repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/snap_test.rs
crates/rome_cli/tests/snap_test.rs
use rome_cli::CliDiagnostic; use rome_console::fmt::{Formatter, Termcolor}; use rome_console::{markup, BufferConsole, Markup}; use rome_diagnostics::termcolor::NoColor; use rome_diagnostics::{print_diagnostic_to_string, Error}; use rome_formatter::IndentStyle; use rome_fs::{FileSystemExt, MemoryFileSystem}; use rome_json_formatter::context::JsonFormatOptions; use rome_json_formatter::format_node; use rome_json_parser::{parse_json, JsonParserOptions}; use std::borrow::Cow; use std::collections::BTreeMap; use std::env::{current_exe, temp_dir}; use std::fmt::Write as _; use std::path::{PathBuf, MAIN_SEPARATOR}; #[derive(Default)] struct InMessages { stdin: Option<String>, } pub(crate) struct CliSnapshot { /// input messages, coming from different sources in_messages: InMessages, /// the configuration, if set pub configuration: Option<String>, /// file name -> content pub files: BTreeMap<String, String>, /// messages written in console pub messages: Vec<String>, /// possible termination error of the CLI pub termination: Option<Error>, } impl CliSnapshot { pub fn from_result(result: Result<(), CliDiagnostic>) -> Self { Self { in_messages: InMessages::default(), configuration: None, files: BTreeMap::default(), messages: Vec::new(), termination: result.err().map(Error::from), } } } impl CliSnapshot { pub fn emit_content_snapshot(&self) -> String { let mut content = String::new(); if let Some(configuration) = &self.configuration { let parsed = parse_json( &redact_snapshot(configuration), JsonParserOptions::default(), ); let formatted = format_node( JsonFormatOptions::default().with_indent_style(IndentStyle::Space(2)), &parsed.syntax(), ) .expect("formatted JSON") .print() .expect("printed JSON"); content.push_str("## `rome.json`\n\n"); content.push_str("```json"); content.push('\n'); content.push_str(formatted.as_code()); content.push_str("```"); content.push_str("\n\n") } for (name, file_content) in &self.files { if !name.starts_with("rome.json") { let extension = name.split('.').last().unwrap(); let _ = write!(content, "## `{}`\n\n", redact_snapshot(name)); let _ = write!(content, "```{extension}"); content.push('\n'); content.push_str(&redact_snapshot(file_content)); content.push('\n'); content.push_str("```"); content.push_str("\n\n") } } if let Some(stdin) = &self.in_messages.stdin { content.push_str("# Input messages\n\n"); content.push_str("```block"); content.push('\n'); content.push_str(stdin); content.push('\n'); content.push_str("```"); content.push_str("\n\n") } if let Some(termination) = &self.termination { let message = print_diagnostic_to_string(termination); content.push_str("# Termination Message\n\n"); content.push_str("```block"); content.push('\n'); content.push_str(&redact_snapshot(&message)); content.push('\n'); content.push_str("```"); content.push_str("\n\n"); } if !self.messages.is_empty() { content.push_str("# Emitted Messages\n\n"); for message in &self.messages { content.push_str("```block"); content.push('\n'); content.push_str(&redact_snapshot(message)); content.push('\n'); content.push_str("```"); content.push_str("\n\n") } } content } } fn redact_snapshot(input: &str) -> Cow<'_, str> { let mut output = Cow::Borrowed(input); // There are some logs that print the timing, and we can't snapshot that message // otherwise at each run we invalid the previous snapshot. // // This is a workaround, and it might not work for all cases. const PATTERN: &str = "file(s) in "; if let Some(start) = output.find(PATTERN) { output .to_mut() .replace_range(start + PATTERN.len().., "<TIME>"); } // Normalize the name of the current executable to "rome" let current_exe = current_exe() .ok() .and_then(|path| Some(path.file_name()?.to_str()?.to_string())); if let Some(current_exe) = current_exe { replace(&mut output, &current_exe, "rome"); } output = replace_temp_dir(output); // Normalize Windows-specific path separators to "/" if cfg!(windows) { let mut rest = &*output; let mut result = String::new(); while let Some(index) = rest.find(MAIN_SEPARATOR) { let (before, after) = rest.split_at(index); result.push_str(before); // Paths are recognized if they start with ".\", ":\" (as in "C:\") // or ">\" (as in "<TEMP_DIR>\") if !before.ends_with(['.', ':', '>']) { let (sep, after) = after.split_at(1); result.push_str(sep); rest = after; continue; } // File paths are assumed to end at the first space or line breaks let path = if let Some(end) = after.find([' ', '\n']) { let (before, after) = after.split_at(end); rest = after; before } else { rest = ""; after }; result.push_str(&path.replace(MAIN_SEPARATOR, "/")); } if !result.is_empty() { result.push_str(&rest.replace(MAIN_SEPARATOR, "/")); output = Cow::Owned(result); } } output } /// Replace the path to the temporary directory with "<TEMP_DIR>" /// And normalizes the count of `-` at the end of the diagnostic fn replace_temp_dir(input: Cow<str>) -> Cow<str> { let mut result = String::new(); let mut rest = input.as_ref(); let temp_dir = temp_dir().display().to_string(); let temp_dir = temp_dir.trim_end_matches(MAIN_SEPARATOR); while let Some(index) = rest.find(temp_dir) { let (before, after) = rest.split_at(index); result.push_str(before); result.push_str("<TEMP_DIR>"); let after = after.split_at(temp_dir.len()).1; let header_line = after.lines().next().unwrap(); match header_line.split_once('\u{2501}') { Some((between_temp_and_line, _)) => { // Diagnostic header line, normalize the horizontal line result.push_str(between_temp_and_line); result.push_str(&"\u{2501}".repeat(20)); rest = after.split_at(header_line.len()).1; } None => { // Not a header line, only replace tempdir rest = after; } } } if result.is_empty() { input } else { result.push_str(rest); Cow::Owned(result) } } fn replace(input: &mut Cow<str>, from: &str, to: &str) { let mut rest = &**input; let mut result = String::new(); while let Some(index) = rest.find(from) { let (before, after) = rest.split_at(index); result.push_str(before); result.push_str(to); let (_, after) = after.split_at(from.len()); rest = after; } if !result.is_empty() { result.push_str(rest); *input = Cow::Owned(result); } } impl From<SnapshotPayload<'_>> for CliSnapshot { fn from(payload: SnapshotPayload<'_>) -> Self { let SnapshotPayload { result, console, fs, test_name: _, module_path: _, } = payload; let mut cli_snapshot = CliSnapshot::from_result(result); let config_path = PathBuf::from("rome.json"); let configuration = fs.open(&config_path).ok(); if let Some(mut configuration) = configuration { let mut buffer = String::new(); if configuration.read_to_string(&mut buffer).is_ok() { cli_snapshot.configuration = Some(buffer); } } let files: Vec<_> = fs .files() .map(|(file, entry)| { let content = entry.lock(); let content = std::str::from_utf8(content.as_slice()).unwrap(); (file.to_str().unwrap().to_string(), String::from(content)) }) .collect(); for (file, content) in files { cli_snapshot.files.insert(file, content); } let in_buffer = &console.in_buffer; for (index, message) in in_buffer.iter().enumerate() { if index == 0 { cli_snapshot.in_messages.stdin = Some(message.to_string()); } } for message in &console.out_buffer { let content = markup_to_string(markup! { {message.content} }); cli_snapshot.messages.push(content) } cli_snapshot } } pub fn markup_to_string(markup: Markup) -> String { let mut buffer = Vec::new(); let mut write = Termcolor(NoColor::new(&mut buffer)); let mut fmt = Formatter::new(&mut write); fmt.write_markup(markup).unwrap(); String::from_utf8(buffer).unwrap() } pub struct SnapshotPayload<'a> { pub module_path: &'a str, pub test_name: &'a str, pub fs: MemoryFileSystem, pub console: BufferConsole, pub result: Result<(), CliDiagnostic>, } impl<'a> SnapshotPayload<'a> { pub fn new( module_path: &'a str, test_name: &'a str, fs: MemoryFileSystem, console: BufferConsole, result: Result<(), CliDiagnostic>, ) -> Self { Self { module_path, test_name, fs, console, result, } } } /// Function used to snapshot a session test of the a CLI run. pub fn assert_cli_snapshot(payload: SnapshotPayload<'_>) { let module_path = payload.module_path.to_owned(); let test_name = payload.test_name; let cli_snapshot = CliSnapshot::from(payload); let content = cli_snapshot.emit_content_snapshot(); let module_path = module_path.replace("::", "_"); let snapshot_path = PathBuf::from("snapshots").join(module_path); insta::with_settings!({ prepend_module_to_snapshot => false, snapshot_path => snapshot_path }, { insta::assert_snapshot!(test_name, content); }); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/cases/mod.rs
crates/rome_cli/tests/cases/mod.rs
//! Add here test cases that are not related directly to a command, but to specific //! case that affects many commands mod config_extends;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/cases/config_extends.rs
crates/rome_cli/tests/cases/config_extends.rs
use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use bpaf::Args; use rome_console::BufferConsole; use rome_fs::MemoryFileSystem; use rome_service::DynRef; use std::path::Path; #[test] fn extends_config_ok_formatter_no_linter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = Path::new("rome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); let format = Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); let lint = Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); let test_file = Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "extends_config_ok_formatter_no_linter", fs, console, result, )); } #[test] fn extends_config_ok_linter_not_formatter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = Path::new("rome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); let format = Path::new("format.json"); fs.insert(format.into(), r#"{ "formatter": { "enabled": true } }"#); let lint = Path::new("linter.json"); fs.insert( lint.into(), r#"{ "linter": { "rules": { "all": false, "suspicious": { "noDebugger": "warn" } } } } "#, ); let test_file = Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "extends_config_ok_linter_not_formatter", fs, console, result, )); } #[test] fn extends_should_raise_an_error_for_unresolved_configuration() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = Path::new("rome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["formatTYPO.json", "linter.json"] }"#, ); let format = Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); let lint = Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); let test_file = Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), test_file.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "extends_should_raise_an_error_for_unresolved_configuration", fs, console, result, )); } #[test] fn extends_should_raise_an_error_for_unresolved_configuration_and_show_verbose() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = Path::new("rome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["formatTYPO.json", "linter.json"] }"#, ); let format = Path::new("format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); let lint = Path::new("linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": false } }"#); let test_file = Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), "--verbose", test_file.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "extends_should_raise_an_error_for_unresolved_configuration_and_show_verbose", fs, console, result, )); } #[test] fn extends_resolves_when_using_config_path() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = Path::new("config/rome.json"); fs.insert( rome_json.into(), r#"{ "extends": ["format.json", "linter.json"] }"#, ); let format = Path::new("config/format.json"); fs.insert( format.into(), r#"{ "javascript": { "formatter": { "quoteStyle": "single" } } }"#, ); let lint = Path::new("config/linter.json"); fs.insert(lint.into(), r#"{ "linter": { "enabled": true } }"#); let test_file = Path::new("test.js"); fs.insert(test_file.into(), r#"debugger; console.log("string"); "#); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), "--config-path=config/", test_file.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "extends_resolves_when_using_config_path", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/ci.rs
crates/rome_cli/tests/commands/ci.rs
use crate::configs::{CONFIG_DISABLED_FORMATTER, CONFIG_FILE_SIZE_LIMIT, CONFIG_LINTER_DISABLED}; use crate::snap_test::SnapshotPayload; use crate::{ assert_cli_snapshot, run_cli, CUSTOM_FORMAT_BEFORE, FORMATTED, LINT_ERROR, PARSE_ERROR, UNFORMATTED, }; use bpaf::Args; use rome_console::{BufferConsole, MarkupBuf}; use rome_fs::{FileSystemExt, MemoryFileSystem}; use rome_service::DynRef; use std::path::{Path, PathBuf}; const INCORRECT_CODE: &str = "let a = !b || !c"; const UNFORMATTED_AND_INCORRECT: &str = "statement( ) ; let a = !b || !c;"; const CI_CONFIGURATION: &str = r#" { "formatter": { "enabled": true }, "linter": { "enabled": true, "rules": { "recommended": true } } } "#; #[test] fn ci_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_help", fs, console, result, )); } #[test] fn ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), FORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, FORMATTED); if console.out_buffer.len() != 1 { panic!("unexpected console content: {:#?}", console.out_buffer); } drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_ok", fs, console, result, )); } #[test] fn formatting_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "formatting_error", fs, console, result, )); } #[test] fn ci_parse_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), PARSE_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_parse_error", fs, console, result, )); } #[test] fn ci_lint_error() { let mut fs = MemoryFileSystem::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), LINT_ERROR.as_bytes()); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_lint_error", fs, console, result, )); } #[test] fn ci_does_not_run_formatter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert( PathBuf::from("rome.json"), CONFIG_DISABLED_FORMATTER.as_bytes(), ); let input_file = Path::new("file.js"); fs.insert(input_file.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), input_file.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(input_file) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_does_not_run_formatter", fs, console, result, )); } #[test] fn ci_does_not_run_formatter_via_cli() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let input_file = Path::new("file.js"); fs.insert(input_file.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--formatter-enabled=false"), input_file.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(input_file) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_does_not_run_formatter_via_cli", fs, console, result, )); } #[test] fn ci_does_not_run_linter() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert( PathBuf::from("rome.json"), CONFIG_LINTER_DISABLED.as_bytes(), ); let file_path = Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_FORMAT_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, CUSTOM_FORMAT_BEFORE); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_does_not_run_linter", fs, console, result, )); } #[test] fn ci_does_not_run_linter_via_cli() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), UNFORMATTED_AND_INCORRECT.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--linter-enabled=false"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED_AND_INCORRECT); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_does_not_run_linter_via_cli", fs, console, result, )); } #[test] fn ci_does_not_organize_imports_via_cli() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); let content = r#"import { lorem, foom, bar } from "foo"; import * as something from "../something"; "#; fs.insert(file_path.into(), content.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--organize-imports-enabled=false"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); // assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut received = String::new(); file.read_to_string(&mut received) .expect("failed to read file from memory FS"); assert_eq!(received, content); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_does_not_organize_imports_via_cli", fs, console, result, )); } #[test] fn ci_errors_for_all_disabled_checks() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CI_CONFIGURATION.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), UNFORMATTED_AND_INCORRECT.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--linter-enabled=false"), ("--formatter-enabled=false"), ("--organize-imports-enabled=false"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED_AND_INCORRECT); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_errors_for_all_disabled_checks", fs, console, result, )); } #[test] fn file_too_large() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), "statement();\n".repeat(80660).as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); // Do not store the content of the file in the snapshot fs.remove(file_path); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large", fs, console, result, )); } #[test] fn file_too_large_config_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert(PathBuf::from("rome.json"), CONFIG_FILE_SIZE_LIMIT); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_config_limit", fs, console, result, )); } #[test] fn file_too_large_cli_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--files-max-size=16"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_cli_limit", fs, console, result, )); } #[test] fn files_max_size_parse_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--files-max-size=-1"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "files_max_size_parse_error", fs, console, result, )); } #[test] fn ci_runs_linter_not_formatter_issue_3495() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_DISABLED_FORMATTER.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), INCORRECT_CODE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("ci target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_runs_linter_not_formatter_issue_3495", fs, console, result, )); } #[test] fn max_diagnostics_default() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); for i in 0..60 { let file_path = PathBuf::from(format!("src/file_{i}.js")); fs.insert(file_path, UNFORMATTED.as_bytes()); } let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), ("src")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut diagnostic_count = 0; let mut filtered_messages = Vec::new(); for msg in console.out_buffer { let MarkupBuf(nodes) = &msg.content; let is_diagnostic = nodes.iter().any(|node| { node.content .contains("File content differs from formatting output") || node.content.contains("format") || node.content.contains("lint") || node.content.contains("ci") }); if is_diagnostic { diagnostic_count += 1; } else { filtered_messages.push(msg); } } console.out_buffer = filtered_messages; for i in 0..60 { let file_path = format!("src/file_{i}.js"); fs.remove(Path::new(&file_path)); } assert_cli_snapshot(SnapshotPayload::new( module_path!(), "max_diagnostics_default", fs, console, result, )); assert_eq!(diagnostic_count, 20); } #[test] fn max_diagnostics() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); for i in 0..60 { let file_path = PathBuf::from(format!("src/file_{i}.js")); fs.insert(file_path, UNFORMATTED.as_bytes()); } let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), ("--max-diagnostics"), ("10"), ("src")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut diagnostic_count = 0; let mut filtered_messages = Vec::new(); for msg in console.out_buffer { let MarkupBuf(nodes) = &msg.content; let is_diagnostic = nodes.iter().any(|node| { node.content .contains("File content differs from formatting output") || node.content.contains("format") || node.content.contains("ci") }); if is_diagnostic { diagnostic_count += 1; } else { filtered_messages.push(msg); } } console.out_buffer = filtered_messages; for i in 0..60 { let file_path = format!("src/file_{i}.js"); fs.remove(Path::new(&file_path)); } assert_cli_snapshot(SnapshotPayload::new( module_path!(), "max_diagnostics", fs, console, result, )); assert_eq!(diagnostic_count, 10); } #[test] fn print_verbose() { let mut fs = MemoryFileSystem::default(); let file_path = Path::new("ci.js"); fs.insert(file_path.into(), LINT_ERROR.as_bytes()); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--verbose"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "print_verbose", fs, console, result, )); } #[test] fn ci_formatter_linter_organize_imports() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = r#"{ "linter": { "enabled": true, "rules": { "recommended": true } }, "organizeImports": { "enabled": true } }"#; let input = r#" import { B, C } from "b.js" import A from "a.js" something( ) "#; let file_path = Path::new("rome.json"); fs.insert(file_path.into(), rome_json.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), input.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("ci target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ci_formatter_linter_organize_imports", fs, console, result, )); } #[test] fn ignore_vcs_ignored_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let rome_json = r#"{ "vcs": { "enabled": true, "clientKind": "git", "useIgnoreFile": true } }"#; let git_ignore = r#" file2.js "#; let code2 = r#"foo.call(); bar.call();"#; let code1 = r#"array.map(sentence => sentence.split(' ')).flat();"#; // ignored files let file_path1 = Path::new("file1.js"); fs.insert(file_path1.into(), code1.as_bytes()); let file_path2 = Path::new("file2.js"); fs.insert(file_path2.into(), code2.as_bytes()); // configuration let config_path = Path::new("rome.json"); fs.insert(config_path.into(), rome_json.as_bytes()); // git folder let git_folder = Path::new(".git"); fs.insert(git_folder.into(), "".as_bytes()); // git ignore file let ignore_file = Path::new(".gitignore"); fs.insert(ignore_file.into(), git_ignore.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), file_path1.as_os_str().to_str().unwrap(), file_path2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ignore_vcs_ignored_file", fs, console, result, )); } #[test] fn ignore_vcs_ignored_file_via_cli() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let git_ignore = r#" file2.js "#; let code2 = r#"foo.call(); bar.call();"#; let code1 = r#"array.map(sentence => sentence.split(' ')).flat();"#; // ignored files let file_path1 = Path::new("file1.js"); fs.insert(file_path1.into(), code1.as_bytes()); let file_path2 = Path::new("file2.js"); fs.insert(file_path2.into(), code2.as_bytes()); // git folder let git_folder = Path::new("./.git"); fs.insert(git_folder.into(), "".as_bytes()); // git ignore file let ignore_file = Path::new("./.gitignore"); fs.insert(ignore_file.into(), git_ignore.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), ("--vcs-enabled=true"), ("--vcs-client-kind=git"), ("--vcs-use-ignore-file=true"), ("--vcs-root=."), file_path1.as_os_str().to_str().unwrap(), file_path2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ignore_vcs_ignored_file_via_cli", fs, console, result, )); } #[test] fn ignores_unknown_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path1 = Path::new("test.txt"); fs.insert(file_path1.into(), *b"content"); let file_path2 = Path::new("test.js"); fs.insert(file_path2.into(), *b"console.log('bar');\n"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("ci"), file_path1.as_os_str().to_str().unwrap(), file_path2.as_os_str().to_str().unwrap(), "--files-ignore-unknown=true", ] .as_slice(), ), ); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "ignores_unknown_file", fs, console, result, )); } #[test] fn doesnt_error_if_no_files_were_processed() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("ci"), "--no-errors-on-unmatched", ("file.js")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "doesnt_error_if_no_files_were_processed", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/version.rs
crates/rome_cli/tests/commands/version.rs
use crate::snap_test::SnapshotPayload; use crate::{assert_cli_snapshot, run_cli}; use bpaf::Args; use rome_console::BufferConsole; use rome_fs::MemoryFileSystem; use rome_service::DynRef; #[test] fn ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("--version")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "version_ok", fs, console, result, )); } #[test] fn full() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("version")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "version_full", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/rage.rs
crates/rome_cli/tests/commands/rage.rs
use crate::run_cli; use crate::snap_test::{CliSnapshot, SnapshotPayload}; use bpaf::Args; use rome_cli::CliDiagnostic; use rome_console::{BufferConsole, Console}; use rome_fs::{FileSystem, MemoryFileSystem}; use rome_service::DynRef; use std::path::{Path, PathBuf}; use std::sync::{Mutex, MutexGuard}; use std::{env, fs}; #[test] fn ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_rage( DynRef::Borrowed(&mut fs), &mut console, Args::from([("rage")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_rage_snapshot(SnapshotPayload::new( module_path!(), "rage_ok", fs, console, result, )); } #[test] fn with_configuration() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert( Path::new("rome.json").to_path_buf(), r#"{ "formatter": { "enabled": false } }"#, ); let result = run_rage( DynRef::Borrowed(&mut fs), &mut console, Args::from([("rage")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_rage_snapshot(SnapshotPayload::new( module_path!(), "with_configuration", fs, console, result, )); } #[test] fn with_malformed_configuration() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert( Path::new("rome.json").to_path_buf(), r#"{ "formatter": { "enabled": } }"#, ); let result = run_rage( DynRef::Borrowed(&mut fs), &mut console, Args::from([("rage")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_rage_snapshot(SnapshotPayload::new( module_path!(), "with_malformed_configuration", fs, console, result, )); } #[test] fn with_server_logs() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = { let log_dir = TestLogDir::new("rome-test-logs"); fs::create_dir_all(&log_dir.path).expect("Failed to create test log directory"); fs::write(log_dir.path.join("server.log.2022-10-14-16"), r#" ┐rome_cli::commands::daemon::Running Server{pid=195434} ├─2547ms INFO rome_lsp::server Starting Rome Language Server... ├─15333ms INFO rome_lsp::server Starting Rome Language Server... ├─15347ms INFO rome_lsp::server Attempting to load the configuration from 'rome.json' file ├─15347ms INFO rome_service::configuration Attempting to load the configuration file at path "/home/micha/git/ant-design/rome.json" ├─15347ms ERROR rome_service::configuration Could not find the file configuration at "/home/micha/git/ant-design/rome.json" ├─15347ms ERROR rome_service::configuration Reason: Os { code: 2, kind: NotFound, message: "No such file or directory" } ├─┐rome_js_parser::parse::parse{file_id=FileId(0)} ├─┘ ├─┐rome_js_parser::parse::parse{file_id=FileId(1)} ├─┘ ├─16108ms INFO rome_lsp::server Starting Rome Language Server... ├─41801ms INFO rome_lsp::server Starting Rome Language Server... ├─41802ms INFO rome_lsp::server Sending shutdown signal INFO rome_cli::commands::daemon Received shutdown signal ├─41802ms ERROR tower_lsp::transport failed to encode message: failed to encode response: Broken pipe (os error 32) ┘ ┐rome_cli::commands::daemon::Running Server{pid=197796} ├─2822ms INFO rome_lsp::server Starting Rome Language Server... ├─7550ms INFO rome_lsp::server Starting Rome Language Server... ├─7551ms INFO rome_lsp::server Attempting to load the configuration from 'rome.json' file ├─7551ms INFO rome_service::configuration Attempting to load the configuration file at path "/home/micha/git/ant-design/rome.json" ├─7551ms ERROR rome_service::configuration Could not find the file configuration at "/home/micha/git/ant-design/rome.json" ├─7551ms ERROR rome_service::configuration Reason: Os { code: 2, kind: NotFound, message: "No such file or directory" } ├─┐rome_js_parser::parse::parse{file_id=FileId(0)} ├─┘ ├─┐rome_js_parser::parse::parse{file_id=FileId(1)} ├─┘ ├─7897ms INFO rome_lsp::server Starting Rome Language Server... "#, ).expect("Failed to write log file"); fs::write( log_dir.path.join("server.log.2022-10-14-15"), r#" Not most recent log file "#, ) .expect("Failed to write configuration file"); run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("rage")].as_slice()), ) }; assert!(result.is_ok(), "run_cli returned {result:?}"); assert_rage_snapshot(SnapshotPayload::new( module_path!(), "with_server_logs", fs, console, result, )); } /// Runs the `rage` command mocking out the log directory. fn run_rage<'app>( fs: DynRef<'app, dyn FileSystem>, console: &'app mut dyn Console, args: Args, ) -> Result<(), CliDiagnostic> { let _test_dir = TestLogDir::new("rome-rage-test"); run_cli(fs, console, args) } fn assert_rage_snapshot(payload: SnapshotPayload<'_>) { let test_name = payload.test_name; let module_path = payload.module_path; let mut snapshot = CliSnapshot::from(payload); // Replace any platform specific content that may yield unstable results. for message in snapshot.messages.iter_mut() { *message = message .lines() .map(|line| match line.trim_start().split_once(':') { Some(( "CPU Architecture" | "OS" | "NO_COLOR" | "TERM" | "ROME_LOG_DIR" | "Color support", value, )) => line.replace(value.trim_start(), "**PLACEHOLDER**"), _ => line.to_string(), }) .collect::<Vec<_>>() .join("\n"); } let content = snapshot.emit_content_snapshot(); let module_path = module_path.replace("::", "_"); let snapshot_path = PathBuf::from("../snapshots").join(module_path); insta::with_settings!({ prepend_module_to_snapshot => false, snapshot_path => snapshot_path }, { insta::assert_snapshot!(test_name, content); }); } /// Mutex to guarantee that the `rage` tests run sequentially. Necessary to avoid race conditions /// when reading the server logs. static RAGE_GUARD: Mutex<()> = Mutex::new(()); /// Mocks out the directory from which `rage` reads the server logs. Ensures that the test directory /// gets removed at the end of the test. struct TestLogDir { path: PathBuf, _guard: MutexGuard<'static, ()>, } impl TestLogDir { fn new(name: &str) -> Self { let guard = RAGE_GUARD.lock().unwrap(); let path = env::temp_dir().join(name); env::set_var("ROME_LOG_DIR", &path); Self { path, _guard: guard, } } } impl Drop for TestLogDir { fn drop(&mut self) { fs::remove_dir_all(&self.path).ok(); env::remove_var("ROME_LOG_DIR"); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/migrate.rs
crates/rome_cli/tests/commands/migrate.rs
use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use bpaf::Args; use rome_console::BufferConsole; use rome_fs::{FileSystemExt, MemoryFileSystem}; use rome_service::DynRef; use std::path::Path; #[test] fn migrate_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("migrate"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "migrate_help", fs, console, result, )); } #[test] fn migrate_config_up_to_date() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let configuration = r#"{ "linter": { "enabled": true } }"#; let configuration_path = Path::new("rome.json"); fs.insert(configuration_path.into(), configuration.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("migrate")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs.open(configuration_path).expect("file to open"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, configuration); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "migrate_config_up_to_date", fs, console, result, )); } #[test] fn missing_configuration_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("migrate")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "missing_configuration_file", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/check.rs
crates/rome_cli/tests/commands/check.rs
use bpaf::Args; use std::env::temp_dir; use std::fs::{create_dir, create_dir_all, remove_dir_all, File}; use std::io::Write; #[cfg(target_family = "unix")] use std::os::unix::fs::symlink; #[cfg(target_os = "windows")] use std::os::windows::fs::{symlink_dir, symlink_file}; use std::path::{Path, PathBuf}; use crate::configs::{ CONFIG_FILE_SIZE_LIMIT, CONFIG_IGNORE_SYMLINK, CONFIG_LINTER_AND_FILES_IGNORE, CONFIG_LINTER_DISABLED, CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC, CONFIG_LINTER_IGNORED_FILES, CONFIG_LINTER_SUPPRESSED_GROUP, CONFIG_LINTER_SUPPRESSED_RULE, CONFIG_LINTER_UPGRADE_DIAGNOSTIC, CONFIG_RECOMMENDED_GROUP, }; use crate::snap_test::{markup_to_string, SnapshotPayload}; use crate::{assert_cli_snapshot, run_cli, FORMATTED, LINT_ERROR, PARSE_ERROR}; use rome_console::{markup, BufferConsole, LogLevel, MarkupBuf}; use rome_fs::{ErrorEntry, FileSystemExt, MemoryFileSystem, OsFileSystem}; use rome_service::DynRef; const ERRORS: &str = r#" for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); "#; const NO_DEBUGGER: &str = "debugger;"; const NEW_SYMBOL: &str = "new Symbol(\"\");"; const FIX_BEFORE: &str = " (1 >= -0) "; const FIX_AFTER: &str = "1 >= 0; "; const CHECK_FORMAT_AFTER: &str = "1 >= -0; "; const APPLY_SUGGESTED_BEFORE: &str = "let a = 4; debugger; console.log(a); "; const APPLY_SUGGESTED_AFTER: &str = "const a = 4;\nconsole.log(a);\n"; const NO_DEBUGGER_BEFORE: &str = "debugger;\n"; const NO_DEBUGGER_AFTER: &str = "debugger;\n"; const UPGRADE_SEVERITY_CODE: &str = r#"if(!cond) { exprA(); } else { exprB() }"#; const NURSERY_UNSTABLE: &str = r#"if(a = b) {}"#; #[test] fn check_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_help", fs, console, result, )); } #[test] fn ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), FORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); } #[test] fn ok_read_only() { let mut fs = MemoryFileSystem::new_read_only(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), FORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); } #[test] fn parse_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), PARSE_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "parse_error", fs, console, result, )); } #[test] fn lint_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), LINT_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_error", fs, console, result, )); } #[test] fn maximum_diagnostics() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), ERRORS.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let messages = &console.out_buffer; assert_eq!( messages .iter() .filter(|m| m.level == LogLevel::Error) .count(), 20_usize ); assert!(messages .iter() .filter(|m| m.level == LogLevel::Log) .any(|m| { let content = format!("{:?}", m.content); content.contains("The number of diagnostics exceeds the number allowed by Rome") && content.contains("Diagnostics not shown") && content.contains("79") })); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "maximum_diagnostics", fs, console, result, )); } #[test] fn apply_ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_ok", fs, console, result, )); } #[test] fn apply_noop() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_AFTER.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_noop", fs, console, result, )); } #[test] fn apply_suggested_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply-unsafe"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_suggested_error", fs, console, result, )); } #[test] fn apply_suggested() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply-unsafe"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, APPLY_SUGGESTED_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_suggested", fs, console, result, )); } #[test] fn apply_unsafe_with_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); // last line doesn't have code fix let source = "let a = 4; debugger; console.log(a); function f() { arguments; } "; let expected = "const a = 4; console.log(a); function f() {\n\targuments;\n} "; let test1 = Path::new("test1.js"); fs.insert(test1.into(), source.as_bytes()); let test2 = Path::new("test2.js"); fs.insert(test2.into(), source.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply-unsafe"), test1.as_os_str().to_str().unwrap(), test2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(test1) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, expected); drop(file); content.clear(); let mut file = fs .open(test2) .expect("formatting target file was removed by the CLI"); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_unsafe_with_error", fs, console, result, )); } #[test] fn no_lint_if_linter_is_disabled_when_run_apply() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, CHECK_FORMAT_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_linter_is_disabled_when_run_apply", fs, console, result, )); } #[test] fn no_lint_if_linter_is_disabled() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_linter_is_disabled", fs, console, result, )); } #[test] fn should_disable_a_rule() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), NO_DEBUGGER_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_SUPPRESSED_RULE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, NO_DEBUGGER_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_disable_a_rule", fs, console, result, )); } #[test] fn should_disable_a_rule_group() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert( config_path.into(), CONFIG_LINTER_SUPPRESSED_GROUP.as_bytes(), ); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, "1 >= -0;\n"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_disable_a_rule_group", fs, console, result, )); } #[test] fn downgrade_severity() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert( file_path.into(), CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC.as_bytes(), ); let file_path = Path::new("file.js"); fs.insert(file_path.into(), NO_DEBUGGER.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let messages = &console.out_buffer; assert_eq!( messages .iter() .filter(|m| m.level == LogLevel::Error) .filter(|m| { let content = format!("{:#?}", m.content); content.contains("suspicious/noDebugger") }) .count(), 1 ); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "downgrade_severity", fs, console, result, )); } #[test] fn upgrade_severity() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert( file_path.into(), CONFIG_LINTER_UPGRADE_DIAGNOSTIC.as_bytes(), ); let file_path = Path::new("file.js"); fs.insert(file_path.into(), UPGRADE_SEVERITY_CODE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let messages = &console.out_buffer; let error_count = messages .iter() .filter(|m| m.level == LogLevel::Error) .filter(|m| { let content = format!("{:?}", m.content); content.contains("style/noNegationElse") }) .count(); assert_eq!( error_count, 1, "expected 1 error-level message in console buffer, found {error_count:?}:\n{:?}", console.out_buffer ); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "upgrade_severity", fs, console, result, )); } #[test] fn no_lint_when_file_is_ignored() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_LINTER_IGNORED_FILES.as_bytes()); let file_path = Path::new("test.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, CHECK_FORMAT_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_when_file_is_ignored", fs, console, result, )); } #[test] fn no_lint_if_files_are_listed_in_ignore_option() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_LINTER_AND_FILES_IGNORE.as_bytes()); let file_path_test1 = Path::new("test1.js"); fs.insert(file_path_test1.into(), FIX_BEFORE.as_bytes()); let file_path_test2 = Path::new("test2.js"); fs.insert(file_path_test2.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path_test1.as_os_str().to_str().unwrap(), file_path_test2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path_test1) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); let mut buffer = String::new(); fs.open(file_path_test2) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, CHECK_FORMAT_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_files_are_listed_in_ignore_option", fs, console, result, )); } /// Creating a symbolic link will fail on Windows if the current process is /// unprivileged. Since running tests as administrator is uncommon and /// constraining, this error gets silently ignored if we're not running on CI /// (the workflows are being being run with the correct permissions on CI) #[cfg(target_os = "windows")] macro_rules! check_windows_symlink { ($result:expr) => { match $result { Ok(res) => res, Err(err) if option_env!("CI") == Some("1") => panic!("failed to create symlink: {err}"), Err(_) => return, } }; } #[test] fn fs_error_dereferenced_symlink() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("check_rome_test_broken_symlink"); let subdir_path = root_path.join("prefix"); let _ = remove_dir_all(&root_path); create_dir_all(subdir_path).unwrap(); #[cfg(target_family = "unix")] { symlink(root_path.join("null"), root_path.join("broken_symlink")).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_file( root_path.join("null"), root_path.join("broken_symlink") )); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("check"), root_path.display().to_string().as_str()].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_dereferenced_symlink", fs, console, result, )); } #[test] fn fs_error_infinite_symlink_expansion_to_dirs() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("check_rome_test_infinite_symlink_expansion_to_dirs"); let subdir1_path = root_path.join("prefix"); let subdir2_path = root_path.join("foo").join("bar"); let _ = remove_dir_all(&root_path); create_dir_all(&subdir1_path).unwrap(); create_dir_all(&subdir2_path).unwrap(); #[cfg(target_family = "unix")] { symlink(&subdir2_path, subdir1_path.join("symlink1")).unwrap(); symlink(subdir1_path, subdir2_path.join("symlink2")).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(&subdir2_path, &subdir1_path.join("symlink1"))); check_windows_symlink!(symlink_dir(subdir1_path, subdir2_path.join("symlink2"))); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("check"), (root_path.display().to_string().as_str())].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_infinite_symlink_expansion_to_dirs", fs, console, result, )); } #[test] fn fs_error_infinite_symlink_expansion_to_files() { let mut console = BufferConsole::default(); let root_path = temp_dir().join("check_rome_test_infinite_symlink_expansion_to_files"); let subdir1_path = root_path.join("prefix"); let subdir2_path = root_path.join("foo").join("bar"); let _ = remove_dir_all(&root_path); create_dir_all(&subdir1_path).unwrap(); create_dir_all(&subdir2_path).unwrap(); let symlink1_path = subdir1_path.join("symlink1"); let symlink2_path = subdir2_path.join("symlink2"); #[cfg(target_family = "unix")] { symlink(&symlink2_path, &symlink1_path).unwrap(); symlink(&symlink1_path, &symlink2_path).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(&symlink2_path, &symlink1_path)); check_windows_symlink!(symlink_dir(&symlink1_path, &symlink2_path)); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("check"), (root_path.display().to_string().as_str())].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); // Don't use a snapshot here, since the diagnostics can be reported in // arbitrary order: assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains("Deeply nested symlink expansion"))); assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains(&symlink1_path.display().to_string()))); assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains(&symlink2_path.display().to_string()))); } #[test] fn fs_error_read_only() { let mut fs = MemoryFileSystem::new_read_only(); let mut console = BufferConsole::default(); let file_path = Path::new("test.js"); fs.insert(file_path.into(), *b"content"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); // Do not store the content of the file in the snapshot fs.remove(file_path); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_read_only", fs, console, result, )); } #[test] fn fs_error_unknown() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert_error(PathBuf::from("prefix/ci.js"), ErrorEntry::UnknownFileType); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), ("prefix")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_unknown", fs, console, result, )); } // Symbolic link ignore pattern test // // Verifies, that ignore patterns to symbolic links are allowed. // // ├── rome.json // ├── hidden_nested // │ └── test // │ └── symlink_testcase1_2 -> hidden_testcase1 // ├── hidden_testcase1 // │ └── test // │ └── test.js // ok // ├── hidden_testcase2 // │ ├── test1.ts // ignored // │ ├── test2.ts // ignored // │ └── test.js // ok // └── src // ├── symlink_testcase1_1 -> hidden_nested // ├── symlink_testcase1_3 -> hidden_testcase1/test/test.js // └── symlink_testcase2 -> hidden_testcase2 #[test] fn fs_files_ignore_symlink() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("check_rome_test_files_ignore_symlink"); let src_path = root_path.join("src"); let testcase1_path = root_path.join("hidden_testcase1"); let testcase1_sub_path = testcase1_path.join("test"); let testcase1_sub_file_path = testcase1_sub_path.join("test.js"); let testcase2_path = root_path.join("hidden_testcase2"); let nested_path = root_path.join("hidden_nested"); let nested_sub_path = nested_path.join("test"); let _ = remove_dir_all(&root_path); create_dir(&root_path).unwrap(); create_dir(&src_path).unwrap(); create_dir_all(testcase1_sub_path.clone()).unwrap(); create_dir(testcase2_path.clone()).unwrap(); create_dir_all(nested_sub_path.clone()).unwrap(); // src/symlink_testcase1_1 let symlink_testcase1_1_path = src_path.join("symlink_testcase1_1"); // hidden_nested/test/symlink_testcase1_2 let symlink_testcase1_2_path = nested_sub_path.join("symlink_testcase1_2"); // src/symlink_testcase1_3 let symlink_testcase1_3_path = src_path.join("symlink_testcase1_3"); // src/symlink_testcase2 let symlink_testcase2_path = src_path.join("symlink_testcase2"); #[cfg(target_family = "unix")] { // src/symlink_testcase1_1 -> hidden_nested symlink(nested_path, symlink_testcase1_1_path).unwrap(); // hidden_nested/test/symlink_testcase1_2 -> hidden_testcase1 symlink(testcase1_path, symlink_testcase1_2_path).unwrap(); // src/symlink_testcase1_3 -> hidden_testcase1/test/test.js symlink(testcase1_sub_file_path, symlink_testcase1_3_path).unwrap(); // src/symlink_testcase2 -> hidden_testcase2 symlink(&testcase2_path, symlink_testcase2_path).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(nested_path, symlink_testcase1_1_path)); check_windows_symlink!(symlink_dir(testcase1_path, symlink_testcase1_2_path)); check_windows_symlink!(symlink_dir( testcase1_sub_file_path, symlink_testcase1_3_path )); check_windows_symlink!(symlink_dir(&testcase2_path, symlink_testcase2_path)); } let config_path = root_path.join("rome.json"); let mut config_file = File::create(config_path).unwrap(); config_file .write_all(CONFIG_IGNORE_SYMLINK.as_bytes()) .unwrap(); let files: [PathBuf; 4] = [ testcase1_sub_path.join("test.js"), // ok testcase2_path.join("test.js"), // ok testcase2_path.join("test1.ts"), // ignored testcase2_path.join("test2.ts"), // ignored ]; for file_path in files { let mut file = File::create(file_path).unwrap(); file.write_all(APPLY_SUGGESTED_BEFORE.as_bytes()).unwrap(); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from( [ ("check"), ("--config-path"), (root_path.display().to_string().as_str()), ("--apply-unsafe"), (src_path.display().to_string().as_str()), ] .as_slice(), ), ); remove_dir_all(root_path).unwrap(); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_files_ignore_symlink", fs, console, result, )); } #[test] fn file_too_large() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement();\n".repeat(80660).as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); // Do not store the content of the file in the snapshot fs.remove(file_path); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large", fs, console, result, )); } #[test] fn file_too_large_config_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert(PathBuf::from("rome.json"), CONFIG_FILE_SIZE_LIMIT); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("check"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_config_limit", fs, console, result, )); } #[test] fn file_too_large_cli_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("check"), ("--files-max-size=16"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_cli_limit", fs, console, result, )); } #[test] fn files_max_size_parse_error() { let mut fs = MemoryFileSystem::default();
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/lint.rs
crates/rome_cli/tests/commands/lint.rs
use bpaf::Args; use std::env::temp_dir; use std::fs::{create_dir, create_dir_all, remove_dir_all, File}; use std::io::Write; #[cfg(target_family = "unix")] use std::os::unix::fs::symlink; #[cfg(target_os = "windows")] use std::os::windows::fs::{symlink_dir, symlink_file}; use std::path::{Path, PathBuf}; use crate::configs::{ CONFIG_FILE_SIZE_LIMIT, CONFIG_IGNORE_SYMLINK, CONFIG_LINTER_AND_FILES_IGNORE, CONFIG_LINTER_DISABLED, CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC, CONFIG_LINTER_IGNORED_FILES, CONFIG_LINTER_SUPPRESSED_GROUP, CONFIG_LINTER_SUPPRESSED_RULE, CONFIG_LINTER_UPGRADE_DIAGNOSTIC, CONFIG_RECOMMENDED_GROUP, }; use crate::snap_test::{markup_to_string, SnapshotPayload}; use crate::{assert_cli_snapshot, run_cli, FORMATTED, LINT_ERROR, PARSE_ERROR}; use rome_console::{markup, BufferConsole, LogLevel, MarkupBuf}; use rome_fs::{ErrorEntry, FileSystemExt, MemoryFileSystem, OsFileSystem}; use rome_service::DynRef; const ERRORS: &str = r#" for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;); "#; const NO_DEBUGGER: &str = "debugger;"; const NEW_SYMBOL: &str = "new Symbol(\"\");"; const FIX_BEFORE: &str = "(1 >= -0)"; const FIX_AFTER: &str = "(1 >= 0)"; const APPLY_SUGGESTED_BEFORE: &str = "let a = 4; debugger; console.log(a); "; const APPLY_SUGGESTED_AFTER: &str = "const a = 4;\nconsole.log(a);\n"; const NO_DEBUGGER_BEFORE: &str = "debugger;\n"; const NO_DEBUGGER_AFTER: &str = "debugger;\n"; const UPGRADE_SEVERITY_CODE: &str = r#"if(!cond) { exprA(); } else { exprB() }"#; const NURSERY_UNSTABLE: &str = r#"if(a = b) {}"#; #[test] fn check_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "check_help", fs, console, result, )); } #[test] fn ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), FORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); } #[test] fn ok_read_only() { let mut fs = MemoryFileSystem::new_read_only(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), FORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); } #[test] fn parse_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), PARSE_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "parse_error", fs, console, result, )); } #[test] fn lint_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), LINT_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lint_error", fs, console, result, )); } #[test] fn maximum_diagnostics() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), ERRORS.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let messages = &console.out_buffer; assert_eq!( messages .iter() .filter(|m| m.level == LogLevel::Error) .count(), 20_usize ); assert!(messages .iter() .filter(|m| m.level == LogLevel::Log) .any(|m| { let content = format!("{:?}", m.content); content.contains("The number of diagnostics exceeds the number allowed by Rome") && content.contains("Diagnostics not shown") && content.contains("77") })); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "maximum_diagnostics", fs, console, result, )); } #[test] fn apply_ok() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_ok", fs, console, result, )); } #[test] fn apply_noop() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_AFTER.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_noop", fs, console, result, )); } #[test] fn apply_suggested_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply-unsafe"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_suggested_error", fs, console, result, )); } #[test] fn apply_suggested() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply-unsafe"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, APPLY_SUGGESTED_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_suggested", fs, console, result, )); } #[test] fn apply_unsafe_with_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); // last line doesn't have code fix let source = "let a = 4; debugger; console.log(a); function f() { arguments; } "; let expected = "const a = 4; console.log(a); function f() { arguments; } "; let test1 = Path::new("test1.js"); fs.insert(test1.into(), source.as_bytes()); let test2 = Path::new("test2.js"); fs.insert(test2.into(), source.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply-unsafe"), test1.as_os_str().to_str().unwrap(), test2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(test1) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, expected); drop(file); content.clear(); let mut file = fs .open(test2) .expect("formatting target file was removed by the CLI"); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "apply_unsafe_with_error", fs, console, result, )); } #[test] fn no_lint_if_linter_is_disabled_when_run_apply() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_linter_is_disabled_when_run_apply", fs, console, result, )); } #[test] fn no_lint_if_linter_is_disabled() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_linter_is_disabled", fs, console, result, )); } #[test] fn should_disable_a_rule() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), NO_DEBUGGER_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert(config_path.into(), CONFIG_LINTER_SUPPRESSED_RULE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, NO_DEBUGGER_AFTER); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_disable_a_rule", fs, console, result, )); } #[test] fn should_disable_a_rule_group() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("fix.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let config_path = Path::new("rome.json"); fs.insert( config_path.into(), CONFIG_LINTER_SUPPRESSED_GROUP.as_bytes(), ); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, "(1 >= -0)"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "should_disable_a_rule_group", fs, console, result, )); } #[test] fn downgrade_severity() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert( file_path.into(), CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC.as_bytes(), ); let file_path = Path::new("file.js"); fs.insert(file_path.into(), NO_DEBUGGER.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); println!("{console:?}"); assert!(result.is_ok(), "run_cli returned {result:?}"); let messages = &console.out_buffer; assert_eq!( messages .iter() .filter(|m| m.level == LogLevel::Error) .filter(|m| { let content = format!("{:#?}", m.content); content.contains("suspicious/noDebugger") }) .count(), 1 ); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "downgrade_severity", fs, console, result, )); } #[test] fn upgrade_severity() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert( file_path.into(), CONFIG_LINTER_UPGRADE_DIAGNOSTIC.as_bytes(), ); let file_path = Path::new("file.js"); fs.insert(file_path.into(), UPGRADE_SEVERITY_CODE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let messages = &console.out_buffer; let error_count = messages .iter() .filter(|m| m.level == LogLevel::Error) .filter(|m| { let content = format!("{:?}", m.content); content.contains("style/noNegationElse") }) .count(); assert_eq!( error_count, 1, "expected 1 error-level message in console buffer, found {error_count:?}:\n{:?}", console.out_buffer ); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "upgrade_severity", fs, console, result, )); } #[test] fn no_lint_when_file_is_ignored() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_LINTER_IGNORED_FILES.as_bytes()); let file_path = Path::new("test.js"); fs.insert(file_path.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_when_file_is_ignored", fs, console, result, )); } #[test] fn no_lint_if_files_are_listed_in_ignore_option() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_LINTER_AND_FILES_IGNORE.as_bytes()); let file_path_test1 = Path::new("test1.js"); fs.insert(file_path_test1.into(), FIX_BEFORE.as_bytes()); let file_path_test2 = Path::new("test2.js"); fs.insert(file_path_test2.into(), FIX_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path_test1.as_os_str().to_str().unwrap(), file_path_test2.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path_test1) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); let mut buffer = String::new(); fs.open(file_path_test2) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, FIX_BEFORE); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "no_lint_if_files_are_listed_in_ignore_option", fs, console, result, )); } /// Creating a symbolic link will fail on Windows if the current process is /// unprivileged. Since running tests as administrator is uncommon and /// constraining, this error gets silently ignored if we're not running on CI /// (the workflows are being being run with the correct permissions on CI) #[cfg(target_os = "windows")] macro_rules! check_windows_symlink { ($result:expr) => { match $result { Ok(res) => res, Err(err) if option_env!("CI") == Some("1") => panic!("failed to create symlink: {err}"), Err(_) => return, } }; } #[test] fn fs_error_dereferenced_symlink() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("lint_rome_test_broken_symlink"); let subdir_path = root_path.join("prefix"); let _ = remove_dir_all(&root_path); create_dir(&root_path).unwrap(); create_dir(subdir_path).unwrap(); #[cfg(target_family = "unix")] { symlink(root_path.join("null"), root_path.join("broken_symlink")).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_file( root_path.join("null"), root_path.join("broken_symlink") )); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("lint"), root_path.display().to_string().as_str()].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_dereferenced_symlink", fs, console, result, )); } #[test] fn fs_error_infinite_symlink_expansion_to_dirs() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("lint_rome_test_infinite_symlink_expansion_to_dirs"); let subdir1_path = root_path.join("prefix"); let subdir2_path = root_path.join("foo").join("bar"); let _ = remove_dir_all(&root_path); create_dir_all(&subdir1_path).unwrap(); create_dir_all(&subdir2_path).unwrap(); #[cfg(target_family = "unix")] { symlink(&subdir2_path, subdir1_path.join("symlink1")).unwrap(); symlink(subdir1_path, subdir2_path.join("symlink2")).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(&subdir2_path, &subdir1_path.join("symlink1"))); check_windows_symlink!(symlink_dir(subdir1_path, subdir2_path.join("symlink2"))); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("lint"), (root_path.display().to_string().as_str())].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_infinite_symlink_expansion_to_dirs", fs, console, result, )); } #[test] fn fs_error_infinite_symlink_expansion_to_files() { let mut console = BufferConsole::default(); let root_path = temp_dir().join("lint_rome_test_infinite_symlink_expansion_to_files"); let subdir1_path = root_path.join("prefix"); let subdir2_path = root_path.join("foo").join("bar"); let _ = remove_dir_all(&root_path); create_dir_all(&subdir1_path).unwrap(); create_dir_all(&subdir2_path).unwrap(); let symlink1_path = subdir1_path.join("symlink1"); let symlink2_path = subdir2_path.join("symlink2"); #[cfg(target_family = "unix")] { symlink(&symlink2_path, &symlink1_path).unwrap(); symlink(&symlink1_path, &symlink2_path).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(&symlink2_path, &symlink1_path)); check_windows_symlink!(symlink_dir(&symlink1_path, &symlink2_path)); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from([("lint"), (root_path.display().to_string().as_str())].as_slice()), ); remove_dir_all(root_path).unwrap(); assert!(result.is_err(), "run_cli returned {result:?}"); // Don't use a snapshot here, since the diagnostics can be reported in // arbitrary order: assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains("Deeply nested symlink expansion"))); assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains(&symlink1_path.display().to_string()))); assert!(console .out_buffer .iter() .flat_map(|msg| msg.content.0.iter()) .any(|node| node.content.contains(&symlink2_path.display().to_string()))); } #[test] fn fs_error_read_only() { let mut fs = MemoryFileSystem::new_read_only(); let mut console = BufferConsole::default(); let file_path = Path::new("test.js"); fs.insert(file_path.into(), *b"content"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--apply"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); // Do not store the content of the file in the snapshot fs.remove(file_path); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_read_only", fs, console, result, )); } #[test] fn fs_error_unknown() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert_error(PathBuf::from("prefix/ci.js"), ErrorEntry::UnknownFileType); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), ("prefix")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_error_unknown", fs, console, result, )); } // Symbolic link ignore pattern test // // Verifies, that ignore patterns to symbolic links are allowed. // // ├── rome.json // ├── hidden_nested // │ └── test // │ └── symlink_testcase1_2 -> hidden_testcase1 // ├── hidden_testcase1 // │ └── test // │ └── test.js // ok // ├── hidden_testcase2 // │ ├── test1.ts // ignored // │ ├── test2.ts // ignored // │ └── test.js // ok // └── src // ├── symlink_testcase1_1 -> hidden_nested // └── symlink_testcase2 -> hidden_testcase2 #[test] fn fs_files_ignore_symlink() { let fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let root_path = temp_dir().join("lint_rome_test_files_ignore_symlink"); let src_path = root_path.join("src"); let testcase1_path = root_path.join("hidden_testcase1"); let testcase1_sub_path = testcase1_path.join("test"); let testcase1_sub_file_path = testcase1_sub_path.join("test.js"); let testcase2_path = root_path.join("hidden_testcase2"); let nested_path = root_path.join("hidden_nested"); let nested_sub_path = nested_path.join("test"); let _ = remove_dir_all(&root_path); create_dir(&root_path).unwrap(); create_dir(&src_path).unwrap(); create_dir_all(testcase1_sub_path.clone()).unwrap(); create_dir(testcase2_path.clone()).unwrap(); create_dir_all(nested_sub_path.clone()).unwrap(); // src/symlink_testcase1_1 let symlink_testcase1_1_path = src_path.join("symlink_testcase1_1"); // hidden_nested/test/symlink_testcase1_2 let symlink_testcase1_2_path = nested_sub_path.join("symlink_testcase1_2"); // src/symlink_testcase1_3 let symlink_testcase1_3_path = src_path.join("symlink_testcase1_3"); // src/symlink_testcase2 let symlink_testcase2_path = src_path.join("symlink_testcase2"); #[cfg(target_family = "unix")] { // src/symlink_testcase1_1 -> hidden_nested symlink(nested_path, symlink_testcase1_1_path).unwrap(); // hidden_nested/test/symlink_testcase1_2 -> hidden_testcase1 symlink(testcase1_path, symlink_testcase1_2_path).unwrap(); // src/symlink_testcase1_3 -> hidden_testcase1/test/test.js symlink(testcase1_sub_file_path, symlink_testcase1_3_path).unwrap(); // src/symlink_testcase2 -> hidden_testcase2 symlink(&testcase2_path, symlink_testcase2_path).unwrap(); } #[cfg(target_os = "windows")] { check_windows_symlink!(symlink_dir(nested_path, symlink_testcase1_1_path)); check_windows_symlink!(symlink_dir(testcase1_path, symlink_testcase1_2_path)); check_windows_symlink!(symlink_dir( testcase1_sub_file_path, symlink_testcase1_3_path )); check_windows_symlink!(symlink_dir(&testcase2_path, symlink_testcase2_path)); } let config_path = root_path.join("rome.json"); let mut config_file = File::create(config_path).unwrap(); config_file .write_all(CONFIG_IGNORE_SYMLINK.as_bytes()) .unwrap(); let files: [PathBuf; 4] = [ testcase1_sub_path.join("test.js"), // ok testcase2_path.join("test.js"), // ok testcase2_path.join("test1.ts"), // ignored testcase2_path.join("test2.ts"), // ignored ]; for file_path in files { let mut file = File::create(file_path).unwrap(); file.write_all(APPLY_SUGGESTED_BEFORE.as_bytes()).unwrap(); } let result = run_cli( DynRef::Owned(Box::new(OsFileSystem)), &mut console, Args::from( [ ("lint"), ("--config-path"), (root_path.display().to_string().as_str()), ("--apply-unsafe"), (src_path.display().to_string().as_str()), ] .as_slice(), ), ); remove_dir_all(root_path).unwrap(); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "fs_files_ignore_symlink", fs, console, result, )); } #[test] fn file_too_large() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement();\n".repeat(80660).as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); // Do not store the content of the file in the snapshot fs.remove(file_path); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large", fs, console, result, )); } #[test] fn file_too_large_config_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); fs.insert(PathBuf::from("rome.json"), CONFIG_FILE_SIZE_LIMIT); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lint"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_config_limit", fs, console, result, )); } #[test] fn file_too_large_cli_limit() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js"); fs.insert(file_path.into(), "statement1();\nstatement2();"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("lint"), ("--files-max-size=16"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "file_too_large_cli_limit", fs, console, result, )); } #[test] fn files_max_size_parse_error() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("check.js");
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/mod.rs
crates/rome_cli/tests/commands/mod.rs
mod check; mod ci; mod format; mod init; mod lint; mod lsp_proxy; mod migrate; mod rage; mod version;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/format.rs
crates/rome_cli/tests/commands/format.rs
use crate::configs::{ CONFIG_DISABLED_FORMATTER, CONFIG_FILE_SIZE_LIMIT, CONFIG_FORMAT, CONFIG_FORMATTER_AND_FILES_IGNORE, CONFIG_FORMATTER_IGNORED_DIRECTORIES, CONFIG_FORMATTER_IGNORED_FILES, CONFIG_ISSUE_3175_1, CONFIG_ISSUE_3175_2, }; use crate::snap_test::{markup_to_string, SnapshotPayload}; use crate::{ assert_cli_snapshot, run_cli, CUSTOM_FORMAT_BEFORE, FORMATTED, LINT_ERROR, UNFORMATTED, }; use bpaf::Args; use rome_console::{markup, BufferConsole, MarkupBuf}; use rome_fs::{FileSystemExt, MemoryFileSystem}; use rome_service::DynRef; use std::path::{Path, PathBuf}; // six spaces const CUSTOM_FORMAT_AFTER: &str = r#"function f() { return { something }; } "#; const APPLY_JSX_QUOTE_STYLE_BEFORE: &str = r#" <div bar="foo" baz={"foo"} />"#; const APPLY_JSX_QUOTE_STYLE_AFTER: &str = r#"<div bar='foo' baz={"foo"} />; "#; const APPLY_QUOTE_STYLE_BEFORE: &str = r#" let a = "something"; let b = { "hey": "hello" };"#; const APPLY_QUOTE_STYLE_AFTER: &str = "let a = 'something'; let b = {\n\t'hey': 'hello',\n};\n"; const APPLY_TRAILING_COMMA_BEFORE: &str = r#" const a = [ longlonglonglongItem1longlonglonglongItem1, longlonglonglongItem1longlonglonglongItem2, longlonglonglongItem1longlonglonglongItem3, ]; "#; const APPLY_TRAILING_COMMA_AFTER: &str = r#"const a = [ longlonglonglongItem1longlonglonglongItem1, longlonglonglongItem1longlonglonglongItem2, longlonglonglongItem1longlonglonglongItem3 ]; "#; const APPLY_ARROW_PARENTHESES_BEFORE: &str = r#" action => {} (action) => {} ({ action }) => {} ([ action ]) => {} (...action) => {} (action = 1) => {} "#; const APPLY_ARROW_PARENTHESES_AFTER: &str = r#"action => {}; action => {}; ({ action }) => {}; ([action]) => {}; (...action) => {}; (action = 1) => {}; "#; const DEFAULT_CONFIGURATION_BEFORE: &str = r#"function f() { return { a, b } }"#; const DEFAULT_CONFIGURATION_AFTER: &str = "function f() { return { a, b }; } "; const CUSTOM_CONFIGURATION_BEFORE: &str = r#"function f() { return { a, b } }"#; const CUSTOM_CONFIGURATION_AFTER: &str = "function f() { return { a, b, }; } "; #[test] fn format_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_help", fs, console, result, )); } #[test] fn print() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("format.js"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "formatter_print", fs, console, result, )); } #[test] fn write() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("format.js"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, FORMATTED); assert_eq!(console.out_buffer.len(), 1); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "formatter_write", fs, console, result, )); } #[test] fn write_only_files_in_correct_base() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_to_format = Path::new("src/format.js"); fs.insert( file_to_format.into(), <&str>::clone(&UNFORMATTED).as_bytes(), ); let file_to_not_format = Path::new("scripts/format.js"); fs.insert(file_to_not_format.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--write"), ("./src")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_to_format) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, FORMATTED, "we test the file is formatted"); drop(file); let mut file = fs .open(file_to_not_format) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED, "we test the file is not formatted"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "write_only_files_in_correct_base", fs, console, result, )); } // Ensures lint warnings are not printed in format mode #[test] fn lint_warning() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("format.js"); fs.insert(file_path.into(), LINT_ERROR.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), file_path.as_os_str().to_str().unwrap()].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, LINT_ERROR); // The console buffer is expected to contain the following message: // 0: "Formatter would have printed the following content" // 1: "Compared 1 files" assert_eq!( console.out_buffer.len(), 2, "console {:#?}", console.out_buffer ); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "formatter_lint_warning", fs, console, result, )); } #[test] // FIXME: redact snapshot for custom paths in configuration #[cfg(not(windows))] fn custom_config_file_path() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let config_path = Path::new("/test/rome.json"); fs.insert(config_path.into(), CONFIG_FORMAT.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), DEFAULT_CONFIGURATION_BEFORE.as_bytes()); let mut config_path = PathBuf::from(config_path); config_path.pop(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), format!( "--config-path={}", config_path.display().to_string().as_str() ) .as_str(), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, DEFAULT_CONFIGURATION_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "custom_config_file_path", fs, console, result, )); } // Should throw an error when an invalid configuration path is specified #[test] // FIXME: redact snapshot for custom paths in configuration #[cfg(not(windows))] fn invalid_config_file_path() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let config_path = Path::new("test"); let file_path = Path::new("file.js"); fs.insert(file_path.into(), *b"content"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--config-path"), (config_path.display().to_string().as_str()), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "invalid_config_file_path", fs, console, result, )); } #[test] fn applies_custom_configuration() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_CONFIGURATION_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--line-width"), ("10"), ("--indent-style"), ("space"), ("--indent-size"), ("8"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, CUSTOM_CONFIGURATION_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_configuration", fs, console, result, )); } #[test] fn applies_custom_configuration_over_config_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_FORMAT.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_CONFIGURATION_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--line-width"), ("10"), ("--indent-style"), ("space"), ("--indent-size"), ("8"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, CUSTOM_CONFIGURATION_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_configuration_over_config_file", fs, console, result, )); } #[test] fn applies_custom_configuration_over_config_file_issue_3175_v1() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_ISSUE_3175_1.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), "import React from 'react';\n".as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--quote-style"), ("single"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, "import React from 'react';\n"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_configuration_over_config_file_issue_3175_v1", fs, console, result, )); } #[test] fn applies_custom_configuration_over_config_file_issue_3175_v2() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let source = r#"function f() { return 'hey'; } "#; let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_ISSUE_3175_2.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), source.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--indent-style"), ("space"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, source); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_configuration_over_config_file_issue_3175_v2", fs, console, result, )); } #[test] fn applies_custom_jsx_quote_style() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), APPLY_JSX_QUOTE_STYLE_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--jsx-quote-style"), ("single"), ("--quote-properties"), ("preserve"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, APPLY_JSX_QUOTE_STYLE_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_jsx_quote_style", fs, console, result, )); } #[test] fn applies_custom_quote_style() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), APPLY_QUOTE_STYLE_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--quote-style"), ("single"), ("--quote-properties"), ("preserve"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, APPLY_QUOTE_STYLE_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_quote_style", fs, console, result, )); } #[test] fn applies_custom_trailing_comma() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), APPLY_TRAILING_COMMA_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--trailing-comma"), ("none"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, APPLY_TRAILING_COMMA_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_trailing_comma", fs, console, result, )); } #[test] fn applies_custom_arrow_parentheses() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), APPLY_ARROW_PARENTHESES_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--arrow-parentheses"), ("as-needed"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, APPLY_ARROW_PARENTHESES_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "applies_custom_arrow_parentheses", fs, console, result, )); } #[test] fn trailing_comma_parse_errors() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--trailing-comma"), ("NONE"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "trailing_comma_parse_errors", fs, console, result, )); } #[test] fn with_semicolons_options() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("file.js"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--semicolons=as-needed"), ("--write"), file_path.as_os_str().to_str().unwrap(), ] .as_slice(), ), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, "statement()\n"); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "with_semicolons_options", fs, console, result, )); } #[test] fn with_invalid_semicolons_option() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--semicolons"), ("asneed"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "with_invalid_semicolons_option", fs, console, result, )); } #[test] fn indent_style_parse_errors() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--indent-style"), ("invalid"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "indent_style_parse_errors", fs, console, result, )); } #[test] fn indent_size_parse_errors_negative() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--indent-size=-1"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "indent_size_parse_errors_negative", fs, console, result, )); } #[test] fn indent_size_parse_errors_overflow() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--indent-size=257"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "indent_size_parse_errors_overflow", fs, console, result, )); } #[test] fn line_width_parse_errors_negative() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from(["format", "--line-width=-1", "file.js"].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "line_width_parse_errors_negative", fs, console, result, )); } #[test] fn line_width_parse_errors_overflow() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--line-width"), ("321"), ("file.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "line_width_parse_errors_overflow", fs, console, result, )); } #[test] fn quote_properties_parse_errors_letter_case() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), ("--quote-properties"), ("As-needed"), ("file.js"), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "quote_properties_parse_errors_letter_case", fs, console, result, )); } #[test] fn format_with_configuration() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_FORMAT.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_FORMAT_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("file.js"), ("--write")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, CUSTOM_FORMAT_AFTER); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_with_configuration", fs, console, result, )); } #[test] fn format_is_disabled() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_DISABLED_FORMATTER.as_bytes()); let file_path = Path::new("file.js"); fs.insert(file_path.into(), CUSTOM_FORMAT_BEFORE.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("file.js"), ("--write")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, CUSTOM_FORMAT_BEFORE); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_is_disabled", fs, console, result, )); } #[test] fn format_stdin_successfully() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); console .in_buffer .push("function f() {return{}}".to_string()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--stdin-file-path"), ("mock.js")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let message = console .out_buffer .get(0) .expect("Console should have written a message"); let content = markup_to_string(markup! { {message.content} }); assert_eq!(content, "function f() {\n\treturn {};\n}\n"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_stdin_successfully", fs, console, result, )); } #[test] fn format_stdin_with_errors() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--stdin-file-path"), ("mock.js")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "format_stdin_with_errors", fs, console, result, )); } #[test] fn does_not_format_if_disabled() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_DISABLED_FORMATTER.as_bytes()); console .in_buffer .push("function f() {return{}}".to_string()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("--stdin-file-path"), ("mock.js")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let message = console .out_buffer .get(0) .expect("Console should have written a message"); let content = markup_to_string(markup! { {message.content} }); assert_eq!(content, "function f() {return{}}".to_string()); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "does_not_format_if_disabled", fs, console, result, )); } #[test] fn does_not_format_ignored_files() { let mut console = BufferConsole::default(); let mut fs = MemoryFileSystem::default(); let file_path = Path::new("rome.json"); fs.insert(file_path.into(), CONFIG_FORMATTER_IGNORED_FILES.as_bytes()); let file_path = Path::new("test.js"); fs.insert(file_path.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("format"), ("test.js"), ("--write")].as_slice()), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut file = fs .open(file_path) .expect("formatting target file was removed by the CLI"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); assert_eq!(content, UNFORMATTED); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "does_not_format_ignored_files", fs, console, result, )); } #[test] fn does_not_format_if_files_are_listed_in_ignore_option() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("rome.json"); fs.insert( file_path.into(), CONFIG_FORMATTER_AND_FILES_IGNORE.as_bytes(), ); let file_path_test1 = Path::new("test1.js"); fs.insert(file_path_test1.into(), UNFORMATTED.as_bytes()); let file_path_test2 = Path::new("test2.js"); fs.insert(file_path_test2.into(), UNFORMATTED.as_bytes()); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from( [ ("format"), file_path_test1.as_os_str().to_str().unwrap(), file_path_test2.as_os_str().to_str().unwrap(), ("--write"), ] .as_slice(), ), ); assert!(result.is_err(), "run_cli returned {result:?}"); let mut buffer = String::new(); fs.open(file_path_test1) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, UNFORMATTED); let mut buffer = String::new(); fs.open(file_path_test2) .unwrap() .read_to_string(&mut buffer) .unwrap(); assert_eq!(buffer, UNFORMATTED); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "does_not_format_if_files_are_listed_in_ignore_option", fs, console, result, )); } #[test] fn does_not_format_ignored_directories() { let mut console = BufferConsole::default();
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/init.rs
crates/rome_cli/tests/commands/init.rs
use crate::configs::{CONFIG_INIT_DEFAULT, CONFIG_INIT_DEFAULT_WHEN_INSTALLED}; use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use bpaf::Args; use rome_console::BufferConsole; use rome_fs::{FileSystemExt, MemoryFileSystem}; use rome_json_formatter::context::JsonFormatOptions; use rome_json_parser::{parse_json, JsonParserOptions}; use rome_service::DynRef; use std::path::Path; #[test] fn init_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("init"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "init_help", fs, console, result, )); } #[test] fn creates_config_file() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("init")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let file_path = Path::new("rome.json"); let mut file = fs .open(file_path) .expect("configuration file was not written on disk"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); let parsed = parse_json(CONFIG_INIT_DEFAULT, JsonParserOptions::default()); let formatted = rome_json_formatter::format_node(JsonFormatOptions::default(), &parsed.syntax()) .expect("valid format document") .print() .expect("valid format document"); assert_eq!(content, formatted.as_code()); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "creates_config_file", fs, console, result, )); } #[test] fn creates_config_file_when_rome_installed_via_package_manager() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let file_path = Path::new("./node_modules/rome/configuration_schema.json"); fs.insert(file_path.into(), *b"{}"); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("init")].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); let file_path = Path::new("rome.json"); let mut file = fs .open(file_path) .expect("configuration file was not written on disk"); let mut content = String::new(); file.read_to_string(&mut content) .expect("failed to read file from memory FS"); let parsed = parse_json( CONFIG_INIT_DEFAULT_WHEN_INSTALLED, JsonParserOptions::default(), ); let formatted = rome_json_formatter::format_node(JsonFormatOptions::default(), &parsed.syntax()) .expect("valid format document") .print() .expect("valid format document"); assert_eq!(content, formatted.as_code()); drop(file); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "creates_config_file_when_rome_installed_via_package_manager", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_cli/tests/commands/lsp_proxy.rs
crates/rome_cli/tests/commands/lsp_proxy.rs
use crate::run_cli; use crate::snap_test::{assert_cli_snapshot, SnapshotPayload}; use bpaf::Args; use rome_console::BufferConsole; use rome_fs::MemoryFileSystem; use rome_service::DynRef; #[test] fn lsp_proxy_help() { let mut fs = MemoryFileSystem::default(); let mut console = BufferConsole::default(); let result = run_cli( DynRef::Borrowed(&mut fs), &mut console, Args::from([("lsp-proxy"), "--help"].as_slice()), ); assert!(result.is_ok(), "run_cli returned {result:?}"); assert_cli_snapshot(SnapshotPayload::new( module_path!(), "lsp_proxy_help", fs, console, result, )); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/prelude.rs
crates/rome_js_parser/src/prelude.rs
pub(crate) use crate::parser::JsParser; pub use rome_parser::prelude::*;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lib.rs
crates/rome_js_parser/src/lib.rs
//! Extremely fast, lossless, and error tolerant JavaScript Parser. //! //! The parser uses an abstraction over non-whitespace tokens. //! This allows us to losslessly or lossly parse code without requiring explicit handling of whitespace. //! The parser yields events, not an AST, the events are resolved into untyped syntax nodes, which can then //! be casted into a typed AST. //! //! The parser is able to produce a valid AST from **any** source code. //! Erroneous productions are wrapped into `ERROR` syntax nodes, the original source code //! is completely represented in the final syntax nodes. //! //! You probably do not want to use the parser struct, unless you want to parse fragments of Js source code or make your own productions. //! Instead use functions such as [parse_script], and [parse_module] which offer abstracted versions for parsing. //! //! For more finer control, use [parse](crate::parse::parse) or [parse_js_with_cache], //! //! Notable features of the parser are: //! - Extremely fast parsing and lexing through the extremely fast lexer. //! - Ability to do Lossy or Lossless parsing on demand without explicit whitespace handling. //! - Customizable, able to parse any fragments of JS code at your discretion. //! - Completely error tolerant, able to produce an AST from any source code. //! - Zero cost for converting untyped nodes to a typed AST. //! - Ability to go from AST to SyntaxNodes to SyntaxTokens to source code and back very easily with nearly zero cost. //! - Very easy tree traversal through [`SyntaxNode`](rome_rowan::SyntaxNode). //! - Descriptive errors with multiple labels and notes. //! - Very cheap cloning, cloning an ast node or syntax node is the cost of adding a reference to an Rc. //! - Cheap incremental reparsing of changed text. //! //! The crate further includes utilities such as: //! - ANSI syntax highlighting of nodes or text through `lexer`. //! //! It is inspired by the rust analyzer parser but adapted for JavaScript. //! //! # Syntax Nodes vs AST Nodes //! The crate relies on a concept of untyped [rome_js_syntax::JsSyntaxNode]s vs typed [rome_rowan::AstNode]s. //! Syntax nodes represent the syntax tree in an untyped way. They represent a location in an immutable //! tree with two pointers. The syntax tree is composed of [rome_js_syntax::JsSyntaxNode]s and [rome_js_syntax::JsSyntaxToken]s in a nested //! tree structure. Each node can have parents, siblings, children, descendants, etc. //! //! [rome_rowan::AstNode]s represent a typed version of a syntax node. They have the same exact representation as syntax nodes //! therefore a conversion between either has zero runtime cost. Every piece of data of an ast node is optional, //! this is due to the fact that the parser is completely error tolerant. //! //! Each representation has its advantages: //! //! ### SyntaxNodes //! - Very simple traversing of the syntax tree through functions on them. //! - Easily able to convert to underlying text, range, or tokens. //! - Contain all whitespace bound to the underlying production (in the case of lossless parsing). //! - Can be easily converted into its typed representation with zero cost. //! - Can be turned into a pretty representation with fmt debug. //! //! ### AST Nodes //! - Easy access to properties of the underlying production. //! - Zero cost conversion to a syntax node. //! //! In conclusion, the use of both representations means we are not constrained to acting through //! typed nodes. Which makes traversal hard and you often have to resort to autogenerated visitor patterns. //! AST nodes are simply a way to easily access subproperties of a syntax node.event; //! //! ## Parser Tests //! //! Parser tests are comments that start with `test` or `test_err` followed by the test name, and then the code on its own line. //! //! ```rust,ignore //! // test js feature_name //! // let a = { new_feature : "" } //! // let b = { new_feature : "" } //! fn parse_new_feature(p: &mut Parser) -> ParsedSyntax {} //! ``` //! //! * `test`: Test for a valid program. Should not produce any diagnostics nor missing nodes. //! * `test_err`: Test for a program with syntax error. Must produce a diagnostic. //! //! By default, the test runs as a JavaScript Module. You can customize the source type by specifying the //! file type after `test` or `test_err` //! //! ```rust,ignore //! // test ts typescript_test //! // console.log("a"); //! if a { //! // .. //! } //! ``` //! //! The supported source types are: //! * `js` //! * `jsx` //! * `ts` //! * `tsx` //! * `d.ts` //! //! To enable script mode, add a `// script` comment to the code. //! //! To extract the test cases, run `cargo codegen test`. Running the codegen is necessary whenever you add, //! change, or remove inline tests . //! //! To update the test output, run //! //! //! **Linux/MacOs**: //! //! ```bash //! env UPDATE_EXPECT=1 cargo test //! ``` //! //! **Windows** //! //! ```powershell //! set UPDATE_EXPECT=1 & cargo test //! ``` mod parser; #[macro_use] mod lexer; mod parse; mod rewrite; mod span; mod state; #[cfg(any(test, feature = "tests"))] pub mod test_utils; #[cfg(test)] mod tests; pub mod options; mod prelude; pub mod syntax; mod token_source; use crate::prelude::*; pub(crate) use crate::ParsedSyntax::{Absent, Present}; pub use crate::{ lexer::{LexContext, ReLexContext}, options::JsParserOptions, parse::*, }; pub(crate) use parser::{JsParser, ParseRecovery}; use rome_js_factory::JsSyntaxFactory; use rome_js_syntax::{JsLanguage, JsSyntaxKind, LanguageVariant}; use rome_parser::tree_sink::LosslessTreeSink; pub(crate) use state::{ParserState, StrictMode}; use std::fmt::Debug; pub enum JsSyntaxFeature { #[allow(unused)] #[doc(alias = "LooseMode")] SloppyMode, StrictMode, TypeScript, Jsx, } impl SyntaxFeature for JsSyntaxFeature { type Parser<'source> = JsParser<'source>; fn is_supported(&self, p: &JsParser) -> bool { match self { JsSyntaxFeature::SloppyMode => p.state().strict().is_none(), JsSyntaxFeature::StrictMode => p.state().strict().is_some(), JsSyntaxFeature::TypeScript => p.source_type().language().is_typescript(), JsSyntaxFeature::Jsx => p.source_type().variant() == LanguageVariant::Jsx, } } } pub(crate) type JsLosslessTreeSink<'source> = LosslessTreeSink<'source, JsLanguage, JsSyntaxFactory>;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/parse.rs
crates/rome_js_parser/src/parse.rs
//! Utilities for high level parsing of js code. use crate::*; use rome_js_syntax::{ AnyJsRoot, JsFileSource, JsLanguage, JsModule, JsScript, JsSyntaxNode, ModuleKind, }; use rome_parser::event::Event; use rome_parser::token_source::Trivia; use rome_rowan::{AstNode, NodeCache}; use std::marker::PhantomData; /// A utility struct for managing the result of a parser job #[derive(Debug)] pub struct Parse<T> { root: JsSyntaxNode, errors: Vec<ParseDiagnostic>, _ty: PhantomData<T>, } impl<T> Parse<T> { pub fn new_module(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new_script(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Self::new(root, errors) } pub fn new(root: JsSyntaxNode, errors: Vec<ParseDiagnostic>) -> Parse<T> { Parse { root, errors, _ty: PhantomData, } } pub fn cast<N: AstNode<Language = JsLanguage>>(self) -> Option<Parse<N>> { if N::can_cast(self.syntax().kind()) { Some(Parse::new(self.root, self.errors)) } else { None } } /// The syntax node represented by this Parse result /// /// ``` /// use rome_js_parser::{JsParserOptions, parse_script}; /// use rome_js_syntax::{JsIfStatement, JsSyntaxKind}; /// use rome_rowan::{AstNode, AstNodeList}; /// /// let parse = parse_script( /// " /// if (a > 5) { /// /* something */ /// } /// ", /// JsParserOptions::default() /// ); /// /// // The first stmt in the root syntax node (Script) is the if statement. /// let if_stmt = parse.tree().statements().first().unwrap(); /// /// assert_eq!(if_stmt.syntax().kind(), JsSyntaxKind::JS_IF_STATEMENT); /// ``` pub fn syntax(&self) -> JsSyntaxNode { self.root.clone() } /// Get the diagnostics which occurred when parsing pub fn diagnostics(&self) -> &[ParseDiagnostic] { self.errors.as_slice() } /// Get the diagnostics which occurred when parsing pub fn into_diagnostics(self) -> Vec<ParseDiagnostic> { self.errors } /// Returns [true] if the parser encountered some errors during the parsing. pub fn has_errors(&self) -> bool { self.errors.iter().any(|diagnostic| diagnostic.is_error()) } } impl<T: AstNode<Language = JsLanguage>> Parse<T> { /// Convert this parse result into a typed AST node. /// /// # Panics /// Panics if the node represented by this parse result mismatches. pub fn tree(&self) -> T { self.try_tree().unwrap_or_else(|| { panic!( "Expected tree to be a {} but root is:\n{:#?}", std::any::type_name::<T>(), self.syntax() ) }) } /// Try to convert this parse's untyped syntax node into an AST node. pub fn try_tree(&self) -> Option<T> { T::cast(self.syntax()) } /// Convert this parse into a result pub fn ok(self) -> Result<T, Vec<ParseDiagnostic>> { if !self.errors.iter().any(|d| d.is_error()) { Ok(self.tree()) } else { Err(self.errors) } } } fn parse_common( text: &str, source_type: JsFileSource, options: JsParserOptions, ) -> (Vec<Event<JsSyntaxKind>>, Vec<ParseDiagnostic>, Vec<Trivia>) { let mut parser = JsParser::new(text, source_type, options); syntax::program::parse(&mut parser); let (events, trivia, errors) = parser.finish(); (events, errors, trivia) } /// Parse text into a [`Parse`](Parse) which can then be turned into an untyped root [`JsSyntaxNode`](JsSyntaxNode). /// Or turned into a typed [`JsScript`](JsScript) with [`tree`](Parse::tree). /// /// ``` /// use rome_js_parser::{JsParserOptions, parse_script}; /// use rome_js_syntax::{JsSyntaxToken, JsFileSource, JsSyntaxList, JsComputedMemberExpression}; /// use rome_rowan::{AstNode, Direction}; /// /// let parse = parse_script("foo.bar[2]", JsParserOptions::default()); /// // Parse returns a JS Root which contains two lists, the directives and the statements, let's get the statements /// let stmt = parse.syntax().children().nth(1).unwrap(); /// // The untyped syntax node of `foo.bar[2]`, the root node is `Script`. /// let untyped_expr_node = stmt.first_child().unwrap(); /// /// // SyntaxNodes can be turned into a nice string representation. /// println!("{:#?}", untyped_expr_node); /// /// // You can then cast syntax nodes into a typed AST node. /// let typed_ast_node = JsComputedMemberExpression::cast(untyped_expr_node.first_child().unwrap()).unwrap(); /// /// // Everything on every ast node is optional because of error recovery. /// let prop = dbg!(typed_ast_node.member()).unwrap(); /// /// // You can then go back to an untyped SyntaxNode and get its range, text, parents, children, etc. /// assert_eq!(prop.syntax().text(), "2"); /// /// // Util has a function for yielding all tokens of a node. /// let tokens = untyped_expr_node.descendants_tokens(Direction::Next).map(|token| token.text_trimmed().to_string()).collect::<Vec<_>>(); /// /// assert_eq!(&tokens, &vec!["foo", ".", "bar", "[", "2", "]"]); /// ``` pub fn parse_script(text: &str, options: JsParserOptions) -> Parse<JsScript> { parse( text, JsFileSource::js_module().with_module_kind(ModuleKind::Script), options, ) .cast::<JsScript>() .unwrap() } /// Same as [parse_script] but configures the parser to parse an ECMAScript module instead of a script /// /// ### Examples /// /// Check the diagnostics emitted by the code /// ``` /// use rome_js_parser::{JsParserOptions, parse_module}; /// let source = r#" /// import { someModule } from "./someModule.js"; /// /// someModule(); /// "#; /// /// let parse = parse_module(source, JsParserOptions::default()); /// /// // Retrieve the diagnostics emitted /// assert_eq!(parse.diagnostics().len(), 0); /// ``` /// /// Retrieve the emitted AST and check its kind: /// ``` /// use rome_js_parser::{JsParserOptions, parse_module}; /// use rome_js_syntax::JsSyntaxKind; /// use rome_rowan::AstNode; /// let source = r#" /// import { someModule } from "./someModule.js"; /// /// someModule(); /// "#; /// let parse = parse_module(source, JsParserOptions::default()); /// /// let tree = parse.tree(); /// /// assert_eq!(tree.syntax().kind(), JsSyntaxKind::JS_MODULE); /// ``` /// pub fn parse_module(text: &str, options: JsParserOptions) -> Parse<JsModule> { parse(text, JsFileSource::js_module(), options) .cast::<JsModule>() .unwrap() } /// Parses the provided string as a EcmaScript program using the provided syntax features. /// /// ### Examples /// /// ``` /// use rome_js_parser::{JsParserOptions, parse}; /// use rome_js_syntax::{LanguageVariant, LanguageVersion, ModuleKind, JsFileSource}; /// // parse source text as TypeScript /// let mut module = JsFileSource::ts(); /// let mut parsed = parse("type F = {}", module, JsParserOptions::default()); /// assert_eq!(parsed.diagnostics().len(), 0); /// // parse source text as JSX /// module = JsFileSource::jsx(); /// parsed = parse("<Component></Component>", module, JsParserOptions::default()); /// assert_eq!(parsed.diagnostics().len(), 0); /// // parse source text with granular control /// module = JsFileSource::default() /// .with_version(LanguageVersion::ESNext) /// .with_module_kind(ModuleKind::Module) /// .with_variant(LanguageVariant::Jsx); /// parsed = parse("foo[bar]", module, JsParserOptions::default()); /// assert_eq!(parsed.diagnostics().len(), 0); /// ``` pub fn parse(text: &str, source_type: JsFileSource, options: JsParserOptions) -> Parse<AnyJsRoot> { let mut cache = NodeCache::default(); parse_js_with_cache(text, source_type, options, &mut cache) } /// Parses the provided string as a EcmaScript program using the provided syntax features and node cache. /// /// ### Examples /// /// ``` /// use rome_js_parser::{JsParserOptions, parse_js_with_cache}; /// use rome_js_syntax::JsFileSource; /// use rome_rowan::NodeCache; /// /// let source_type = JsFileSource::js_module(); /// let mut cache = NodeCache::default(); /// let mut source = "function f() { return 2 }"; /// /// let parsed = parse_js_with_cache(source, source_type, JsParserOptions::default(), &mut cache); /// assert_eq!(parsed.diagnostics().len(), 0); /// /// source = "function bar() { return 3 }"; /// let parsed = parse_js_with_cache(source, source_type, JsParserOptions::default(), &mut cache); /// assert_eq!(parsed.diagnostics().len(), 0); /// ``` pub fn parse_js_with_cache( text: &str, source_type: JsFileSource, options: JsParserOptions, cache: &mut NodeCache, ) -> Parse<AnyJsRoot> { tracing::debug_span!("parse").in_scope(move || { let (events, errors, tokens) = parse_common(text, source_type, options); let mut tree_sink = JsLosslessTreeSink::with_cache(text, &tokens, cache); rome_parser::event::process(&mut tree_sink, events, errors); let (green, parse_errors) = tree_sink.finish(); Parse::new(green, parse_errors) }) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/tests.rs
crates/rome_js_parser/src/tests.rs
use crate::test_utils::has_bogus_nodes_or_empty_slots; use crate::{parse, parse_module, test_utils::assert_errors_are_absent, JsParserOptions, Parse}; use expect_test::expect_file; use rome_console::fmt::{Formatter, Termcolor}; use rome_console::markup; use rome_diagnostics::DiagnosticExt; use rome_diagnostics::PrintDiagnostic; use rome_js_syntax::{AnyJsRoot, JsFileSource, JsSyntaxKind}; use rome_js_syntax::{JsCallArguments, JsLogicalExpression, JsSyntaxToken}; use rome_rowan::{AstNode, Direction, TextSize}; use std::fmt::Write; use std::panic::catch_unwind; use std::path::{Path, PathBuf}; #[test] fn parser_smoke_test() { let src = r#" import "x" with { type: "json" } "#; let module = parse(src, JsFileSource::tsx(), JsParserOptions::default()); assert_errors_are_absent(&module, Path::new("parser_smoke_test")); } #[test] fn parser_missing_smoke_test() { let src = r#" console.log("Hello world"; "#; let module = parse_module(src, JsParserOptions::default()); let arg_list = module .syntax() .descendants() .find_map(JsCallArguments::cast) .unwrap(); let opening = arg_list.syntax().element_in_slot(0); let list = arg_list.syntax().element_in_slot(1); let closing = arg_list.syntax().element_in_slot(2); assert_eq!(opening.map(|o| o.to_string()), Some(String::from("("))); assert_eq!( list.map(|l| l.kind()), Some(JsSyntaxKind::JS_CALL_ARGUMENT_LIST) ); assert_eq!(closing, None); } fn try_parse(path: &str, text: &str, options: JsParserOptions) -> Parse<AnyJsRoot> { let res = catch_unwind(|| { let path = Path::new(path); // Files containing a // SCRIPT comment are parsed as script and not as module // This is needed to test features that are restricted in strict mode. let source_type = if text.contains("// SCRIPT") { JsFileSource::js_script() } else { path.try_into().unwrap() }; let parse = parse(text, source_type, options); assert_eq!( parse.syntax().to_string(), text, "Original source and re-printed tree differ\nParsed Tree: {:#?}", parse.syntax(), ); parse }); assert!(res.is_ok(), "Trying to parse `{}` panicked", path); res.unwrap() } fn try_parse_with_printed_ast( path: &str, text: &str, options: JsParserOptions, ) -> (Parse<AnyJsRoot>, String) { catch_unwind(|| { let parse = try_parse(path, text, options.clone()); let formatted = format!("{:#?}", &parse.tree()); (parse, formatted) }) .unwrap_or_else(|err| { // Re-parsing the source here seems silly. But the problem is, that `SyntaxNode`s aren't // unwind safe. That's why the same `ParseResult` can't be reused here. // This should be fine because this code is only executed for local tests. No checked-in // test should ever hit this line. let re_parsed = try_parse(path, text, options); panic!( "Printing the AST for `{}` panicked. That means it is malformed. Err: {:?}\n{:#?}", path, err, re_parsed.syntax() ); }) } #[cfg(test)] fn run_and_expect_no_errors(path: &str, _: &str, _: &str, _: &str) { let path = PathBuf::from(path); let text = std::fs::read_to_string(&path).unwrap(); let options_path = path.with_extension("options.json"); let options: JsParserOptions = std::fs::read_to_string(options_path) .ok() .and_then(|options| serde_json::from_str(&options).ok()) .unwrap_or_default(); let (parse, ast) = try_parse_with_printed_ast(path.to_str().unwrap(), &text, options); assert_errors_are_absent(&parse, &path); let actual = format!("{}\n\n{:#?}", ast, parse.syntax()); let path = path.with_extension("rast"); expect_file![path].assert_eq(&actual) } #[cfg(test)] fn run_and_expect_errors(path: &str, _: &str, _: &str, _: &str) { let path = PathBuf::from(path); let text = std::fs::read_to_string(&path).unwrap(); let options_path = path.with_extension("options.json"); let options: JsParserOptions = std::fs::read_to_string(options_path) .ok() .and_then(|options| serde_json::from_str(&options).ok()) .unwrap_or_default(); let (parse, ast) = try_parse_with_printed_ast(path.to_str().unwrap(), &text, options); assert_errors_are_present(&parse, &path); let mut actual = format!("{}\n\n{:#?}", ast, parse.syntax()); for diag in parse.diagnostics() { let mut write = rome_diagnostics::termcolor::Buffer::no_color(); let error = diag .clone() .with_file_path(path.file_name().unwrap().to_string_lossy().to_string()) .with_file_source_code(text.to_string()); Formatter::new(&mut Termcolor(&mut write)) .write_markup(markup! { {PrintDiagnostic::verbose(&error)} }) .expect("failed to emit diagnostic"); write!( actual, "--\n{}", std::str::from_utf8(write.as_slice()).expect("non utf8 in error buffer") ) .unwrap(); } write!(actual, "--\n{}", text).unwrap(); let path = path.with_extension("rast"); expect_file![path].assert_eq(&actual) } mod parser { mod ok { tests_macros::gen_tests! {"test_data/inline/ok/**/*.{js,ts,jsx,tsx}", crate::tests::run_and_expect_no_errors, ""} } mod err { tests_macros::gen_tests! {"test_data/inline/err/**/*.{js,ts,jsx,tsx}", crate::tests::run_and_expect_errors, ""} } } fn assert_errors_are_present(program: &Parse<AnyJsRoot>, path: &Path) { assert!( !program.diagnostics().is_empty(), "There should be errors in the file {:?}\nSyntax Tree: {:#?}", path.display(), program.syntax() ); } #[test] pub fn test_trivia_attached_to_tokens() { let text = "/**/let a = 1; // nice variable \n /*hey*/ let \t b = 2; // another nice variable"; let m = parse_module(text, JsParserOptions::default()); let mut tokens = m.syntax().descendants_tokens(Direction::Next); let is_let = |x: &JsSyntaxToken| x.text_trimmed() == "let"; let first_let = tokens.find(is_let).unwrap(); // first let leading trivia asserts let pieces: Vec<_> = first_let.leading_trivia().pieces().collect(); assert!(matches!(pieces.get(0).map(|x| x.text()), Some("/**/"))); assert!(matches!(pieces.get(1), None)); // first let trailing trivia asserts let pieces: Vec<_> = first_let.trailing_trivia().pieces().collect(); assert!(matches!(pieces.get(0).map(|x| x.text()), Some(" "))); assert!(matches!(pieces.get(1), None)); // second let leading trivia asserts let second_let = tokens.find(is_let).unwrap(); let pieces: Vec<_> = second_let.leading_trivia().pieces().collect(); assert_eq!(4, pieces.len()); assert!(matches!(pieces.get(0).map(|x| x.text()), Some("\n"))); assert!(matches!(pieces.get(1).map(|x| x.text()), Some(" "))); assert!(matches!(pieces.get(2).map(|x| x.text()), Some("/*hey*/"))); assert!(matches!(pieces.get(3).map(|x| x.text()), Some(" "))); // second let trailing trivia asserts let pieces: Vec<_> = second_let.trailing_trivia().pieces().collect(); assert_eq!(1, pieces.len()); assert!(matches!(pieces.get(0).map(|x| x.text()), Some(" \t "))); } #[test] pub fn jsroot_display_text_and_trimmed() { let code = " let a = 1; \n "; let root = parse_module(code, JsParserOptions::default()); let syntax = root.syntax(); assert_eq!(format!("{}", syntax), code); let syntax_text = syntax.text(); assert_eq!(format!("{}", syntax_text), code); let syntax_text = syntax.text_trimmed(); assert_eq!(format!("{}", syntax_text), code.trim()); } #[test] pub fn jsroot_ranges() { // 0123456789A let code = " let a = 1;"; let root = parse_module(code, JsParserOptions::default()); let syntax = root.syntax(); let first_let = syntax.first_token().unwrap(); let range = first_let.text_range(); assert_eq!(0usize, usize::from(range.start())); assert_eq!(5usize, usize::from(range.end())); let range = first_let.text_trimmed_range(); assert_eq!(1usize, usize::from(range.start())); assert_eq!(4usize, usize::from(range.end())); let eq = syntax .descendants_tokens(Direction::Next) .find(|x| x.text_trimmed() == "=") .unwrap(); let range = eq.text_range(); assert_eq!(7usize, usize::from(range.start())); assert_eq!(9usize, usize::from(range.end())); let range = eq.text_trimmed_range(); assert_eq!(7usize, usize::from(range.start())); assert_eq!(8usize, usize::from(range.end())); } #[test] pub fn node_range_must_be_correct() { // 0123456789A123456789B123456789 let text = " function foo() { let a = 1; }"; let root = parse_module(text, JsParserOptions::default()); let var_decl = root .syntax() .descendants() .find(|x| x.kind() == JsSyntaxKind::JS_VARIABLE_STATEMENT) .unwrap(); let range = var_decl.text_range(); assert_eq!(18usize, usize::from(range.start())); assert_eq!(29usize, usize::from(range.end())); let range = var_decl.text_trimmed_range(); assert_eq!(18usize, usize::from(range.start())); assert_eq!(28usize, usize::from(range.end())); } #[test] pub fn last_trivia_must_be_appended_to_eof() { // 0123456789A123456789B123456789CC let text = " function foo() { let a = 1; }\n"; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); let range = syntax.text_range(); let start = range.start(); let end = range.end(); assert_eq!(TextSize::from(0), start); assert_eq!(TextSize::from(31), end); } #[test] pub fn just_trivia_must_be_appended_to_eof() { // 0123456789A123456789B123456789C123 let text = "// just trivia... nothing else...."; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); let range = syntax.text_range(); let start = range.start(); let end = range.end(); assert_eq!(TextSize::from(0), start); assert_eq!(TextSize::from(34), end); } #[test] pub fn node_contains_comments() { let text = "true && true // comment"; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); assert!(syntax.has_comments_descendants()); } #[test] fn parser_regexp_after_operator() { fn assert_no_errors(src: &str) { let module = parse(src, JsFileSource::js_script(), JsParserOptions::default()); assert_errors_are_absent(&module, Path::new("parser_regexp_after_operator")); } assert_no_errors(r#"a=/a/"#); assert_no_errors(r#"a==/a/"#); assert_no_errors(r#"a===/a/"#); assert_no_errors(r#"a!=/a/"#); assert_no_errors(r#"a!==/a/"#); } #[test] pub fn node_contains_trailing_comments() { let text = "true && (3 - 2 == 0) // comment"; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); let node = syntax .descendants() .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION) .unwrap(); let logical_expression = JsLogicalExpression::cast(node).unwrap(); let right = logical_expression.right().unwrap(); assert!(right.syntax().has_trailing_comments()); assert!(!right.syntax().has_leading_comments()); } #[test] pub fn node_contains_leading_comments() { let text = r"true && // comment (3 - 2 == 0)"; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); let node = syntax .descendants() .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION) .unwrap(); let logical_expression = JsLogicalExpression::cast(node).unwrap(); let right = logical_expression.right().unwrap(); assert!(right.syntax().has_leading_comments()); assert!(!right.syntax().has_trailing_comments()); } #[test] pub fn node_has_comments() { let text = r"true && // comment (3 - 2 == 0)"; let root = parse_module(text, JsParserOptions::default()); let syntax = root.syntax(); let node = syntax .descendants() .find(|n| n.kind() == JsSyntaxKind::JS_LOGICAL_EXPRESSION) .unwrap(); let logical_expression = JsLogicalExpression::cast(node).unwrap(); let right = logical_expression.right().unwrap(); assert!(right.syntax().has_comments_direct()); } #[test] fn diagnostics_print_correctly() { let text = r"const a"; let root = parse_module(text, JsParserOptions::default()); for diagnostic in root.diagnostics() { let mut write = rome_diagnostics::termcolor::Buffer::no_color(); let error = diagnostic .clone() .with_file_path("example.js") .with_file_source_code(text.to_string()); Formatter::new(&mut Termcolor(&mut write)) .write_markup(markup! { {PrintDiagnostic::verbose(&error)} }) .expect("failed to emit diagnostic"); eprintln!( "{}", std::str::from_utf8(write.as_slice()).expect("non utf8 in error buffer") ); } } #[ignore] #[test] pub fn quick_test() { let code = r#" (@dec a) => {} "#; let root = parse( code, JsFileSource::ts(), JsParserOptions::default().with_parse_class_parameter_decorators(), ); let syntax = root.syntax(); dbg!(&syntax, root.diagnostics(), root.has_errors()); if has_bogus_nodes_or_empty_slots(&syntax) { panic!( "modified tree has bogus nodes or empty slots:\n{syntax:#?} \n\n {}", syntax ) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/token_source.rs
crates/rome_js_parser/src/token_source.rs
use crate::lexer::{BufferedLexer, LexContext, Lexer, LexerCheckpoint, ReLexContext, TextRange}; use crate::prelude::*; use rome_js_syntax::JsSyntaxKind; use rome_js_syntax::JsSyntaxKind::EOF; use rome_parser::token_source::Trivia; use rome_rowan::{TextSize, TriviaPieceKind}; use std::collections::VecDeque; /// Token source for the parser that skips over any non-trivia token. pub struct JsTokenSource<'l> { lexer: BufferedLexer<'l>, /// List of the skipped trivia. Needed to construct the CST and compute the non-trivia token offsets. pub(super) trivia_list: Vec<Trivia>, /// Cache for the non-trivia token lookahead. For example for the source `let a = 10;` if the /// [TokenSource]'s currently positioned at the start of the file (`let`). The `nth(2)` non-trivia token, /// as returned by the [TokenSource], is the `=` token but retrieving it requires skipping over the /// two whitespace trivia tokens (first between `let` and `a`, second between `a` and `=`). /// The [TokenSource] state then is: /// /// * `non_trivia_lookahead`: [IDENT: 'a', EQ] /// * `lookahead_offset`: 4 (the `=` is the 4th token after the `let` keyword) non_trivia_lookahead: VecDeque<Lookahead>, /// Offset of the last cached lookahead token from the current [BufferedLexer] token. lookahead_offset: usize, } #[derive(Debug, Copy, Clone)] struct Lookahead { kind: JsSyntaxKind, after_newline: bool, } impl<'l> JsTokenSource<'l> { /// Creates a new token source. pub(crate) fn new(lexer: BufferedLexer<'l>) -> JsTokenSource<'l> { JsTokenSource { lexer, trivia_list: vec![], lookahead_offset: 0, non_trivia_lookahead: VecDeque::new(), } } /// Creates a new token source for the given string pub fn from_str(source: &'l str) -> JsTokenSource<'l> { let lexer = Lexer::from_str(source); let buffered = BufferedLexer::new(lexer); let mut source = JsTokenSource::new(buffered); source.next_non_trivia_token(LexContext::default(), true); source } #[inline] fn next_non_trivia_token(&mut self, context: LexContext, first_token: bool) { let mut processed_tokens = 0; let mut trailing = !first_token; // Drop the last cached lookahead, we're now moving past it self.non_trivia_lookahead.pop_front(); loop { let kind = self.lexer.next_token(context); processed_tokens += 1; let trivia_kind = TriviaPieceKind::try_from(kind); match trivia_kind { Err(_) => break, Ok(trivia_kind) => { // Trivia after and including the newline is considered the leading trivia of the next token if trivia_kind.is_newline() { trailing = false; } self.trivia_list .push(Trivia::new(trivia_kind, self.current_range(), trailing)); } } } if self.lookahead_offset != 0 { debug_assert!(self.lookahead_offset >= processed_tokens); self.lookahead_offset -= processed_tokens; } } #[inline(always)] pub fn has_unicode_escape(&self) -> bool { self.lexer.has_unicode_escape() } /// Returns `true` if the next token has any preceding trivia (either trailing trivia of the current /// token or leading trivia of the next token) pub fn has_next_preceding_trivia(&mut self) -> bool { let next_token_trivia = self .lexer .lookahead() .next() .and_then(|lookahead| TriviaPieceKind::try_from(lookahead.kind()).ok()); next_token_trivia.is_some() } #[inline(always)] fn lookahead(&mut self, n: usize) -> Option<Lookahead> { assert_ne!(n, 0); // Return the cached token if any if let Some(lookahead) = self.non_trivia_lookahead.get(n - 1) { return Some(*lookahead); } // Jump right to where we've left of last time rather than going through all tokens again. let iter = self.lexer.lookahead().skip(self.lookahead_offset); let mut remaining = n - self.non_trivia_lookahead.len(); for item in iter { self.lookahead_offset += 1; if !item.kind().is_trivia() { remaining -= 1; let lookahead = Lookahead { after_newline: item.has_preceding_line_break(), kind: item.kind(), }; self.non_trivia_lookahead.push_back(lookahead); if remaining == 0 { return Some(lookahead); } } } None } pub fn re_lex(&mut self, mode: ReLexContext) -> JsSyntaxKind { let current_kind = self.current(); let new_kind = self.lexer.re_lex(mode); // Only need to clear the lookahead cache when the token did change if current_kind != new_kind { self.non_trivia_lookahead.clear(); self.lookahead_offset = 0; } new_kind } /// Creates a checkpoint to which it can later return using [Self::rewind]. pub fn checkpoint(&self) -> TokenSourceCheckpoint { TokenSourceCheckpoint { trivia_len: self.trivia_list.len() as u32, lexer: self.lexer.checkpoint(), } } /// Restores the token source to a previous state pub fn rewind(&mut self, checkpoint: TokenSourceCheckpoint) { assert!(self.trivia_list.len() >= checkpoint.trivia_len as usize); self.trivia_list.truncate(checkpoint.trivia_len as usize); self.lexer.rewind(checkpoint.lexer); self.non_trivia_lookahead.clear(); self.lookahead_offset = 0; } } impl<'source> TokenSource for JsTokenSource<'source> { type Kind = JsSyntaxKind; /// Returns the kind of the current non-trivia token #[inline(always)] fn current(&self) -> JsSyntaxKind { self.lexer.current() } /// Returns the range of the current non-trivia token #[inline(always)] fn current_range(&self) -> TextRange { self.lexer.current_range() } #[inline(always)] fn text(&self) -> &'source str { self.lexer.source() } #[inline(always)] fn position(&self) -> TextSize { self.current_range().start() } /// Returns true if the current token is preceded by a line break #[inline(always)] fn has_preceding_line_break(&self) -> bool { self.lexer.has_preceding_line_break() } #[inline(always)] fn bump(&mut self) { self.bump_with_context(LexContext::Regular) } fn skip_as_trivia(&mut self) { self.skip_as_trivia_with_context(LexContext::Regular) } fn finish(self) -> (Vec<Trivia>, Vec<ParseDiagnostic>) { (self.trivia_list, self.lexer.finish()) } } impl<'source> BumpWithContext for JsTokenSource<'source> { type Context = LexContext; #[inline(always)] fn bump_with_context(&mut self, context: Self::Context) { if self.current() != EOF { if !context.is_regular() { self.lookahead_offset = 0; self.non_trivia_lookahead.clear(); } self.next_non_trivia_token(context, false); } } /// Skips the current token as skipped token trivia fn skip_as_trivia_with_context(&mut self, context: LexContext) { if self.current() != EOF { if !context.is_regular() { self.lookahead_offset = 0; self.non_trivia_lookahead.clear(); } self.trivia_list.push(Trivia::new( TriviaPieceKind::Skipped, self.current_range(), false, )); self.next_non_trivia_token(context, true) } } } impl<'source> NthToken for JsTokenSource<'source> { /// Gets the kind of the nth non-trivia token #[inline(always)] fn nth(&mut self, n: usize) -> JsSyntaxKind { if n == 0 { self.current() } else { self.lookahead(n).map_or(EOF, |lookahead| lookahead.kind) } } /// Returns true if the nth non-trivia token is preceded by a line break #[inline(always)] fn has_nth_preceding_line_break(&mut self, n: usize) -> bool { if n == 0 { self.has_preceding_line_break() } else { self.lookahead(n) .map_or(false, |lookahead| lookahead.after_newline) } } } #[derive(Debug)] pub struct TokenSourceCheckpoint { lexer: LexerCheckpoint, /// A `u32` should be enough because `TextSize` is also limited to `u32`. /// The worst case is a document where every character is its own token. This would /// result in `u32::MAX` tokens trivia_len: u32, } impl TokenSourceCheckpoint { /// byte offset in the source text pub(crate) fn current_start(&self) -> TextSize { self.lexer.current_start() } pub(crate) fn trivia_position(&self) -> usize { self.trivia_len as usize } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/parser.rs
crates/rome_js_parser/src/parser.rs
//! The physical parser structure. //! This may not hold your expectations of a traditional parser, //! the parser yields events like `Start node`, `Error`, etc. //! These events are then applied to a `TreeSink`. use rome_parser::*; pub(crate) mod rewrite_parser; pub(crate) mod single_token_parse_recovery; use crate::lexer::ReLexContext; pub(crate) use crate::parser::parse_recovery::{ParseRecovery, RecoveryError, RecoveryResult}; use crate::prelude::*; use crate::state::{ChangeParserState, ParserStateGuard}; use crate::*; use crate::{ state::ParserStateCheckpoint, token_source::{JsTokenSource, TokenSourceCheckpoint}, }; pub(crate) use parsed_syntax::ParsedSyntax; use rome_js_syntax::{ JsFileSource, JsSyntaxKind::{self}, }; use rome_parser::diagnostic::merge_diagnostics; use rome_parser::event::Event; use rome_parser::token_source::Trivia; use rome_parser::{ParserContext, ParserContextCheckpoint}; /// An extremely fast, error tolerant, completely lossless JavaScript parser /// /// The Parser yields lower level events instead of nodes. /// These events are then processed into a syntax tree through a [`TreeSink`] implementation. pub struct JsParser<'source> { pub(super) state: ParserState, pub source_type: JsFileSource, context: ParserContext<JsSyntaxKind>, source: JsTokenSource<'source>, options: JsParserOptions, } impl<'source> JsParser<'source> { /// Creates a new parser that parses the `source`. pub fn new(source: &'source str, source_type: JsFileSource, options: JsParserOptions) -> Self { let source = JsTokenSource::from_str(source); JsParser { state: ParserState::new(&source_type), source_type, context: ParserContext::default(), source, options, } } pub(crate) fn state(&self) -> &ParserState { &self.state } pub(crate) fn options(&self) -> &JsParserOptions { &self.options } pub(crate) fn state_mut(&mut self) -> &mut ParserState { &mut self.state } pub fn source_type(&self) -> JsFileSource { self.source_type } /// Whether the code we are parsing is a module pub const fn is_module(&self) -> bool { self.source_type.module_kind().is_module() } /// Re-lexes the current token in the specified context. Returns the kind /// of the re-lexed token (can be the same as before if the context doesn't make a difference for the current token) pub fn re_lex(&mut self, context: ReLexContext) -> JsSyntaxKind { self.source_mut().re_lex(context) } /// Stores the parser state and position before calling the function and restores the state /// and position before returning. /// /// Useful in situation where the parser must advance a few tokens to determine whatever a syntax is /// of one or the other kind. #[inline] pub fn lookahead<F, R>(&mut self, op: F) -> R where F: FnOnce(&mut JsParser) -> R, { let checkpoint = self.checkpoint(); let result = op(self); self.rewind(checkpoint); result } /// Applies the passed in change to the parser's state and reverts the /// changes when the returned [ParserStateGuard] goes out of scope. pub(crate) fn with_scoped_state<'p, C: ChangeParserState>( &'p mut self, change: C, ) -> ParserStateGuard<'p, 'source, C> { let snapshot = change.apply(self.state_mut()); ParserStateGuard::new(self, snapshot) } /// Applies the passed in change to the parser state before applying the passed `func` and /// restores the state to before the change before returning the result. #[inline] pub(crate) fn with_state<C, F, R>(&mut self, change: C, func: F) -> R where C: ChangeParserState, F: FnOnce(&mut JsParser) -> R, { let snapshot = change.apply(self.state_mut()); let result = func(self); C::restore(self.state_mut(), snapshot); result } pub fn checkpoint(&self) -> JsParserCheckpoint { JsParserCheckpoint { context: self.context.checkpoint(), source: self.source.checkpoint(), state: self.state.checkpoint(), } } pub fn rewind(&mut self, checkpoint: JsParserCheckpoint) { let JsParserCheckpoint { context, source, state, } = checkpoint; self.context.rewind(context); self.source.rewind(source); self.state.restore(state); } pub fn finish(self) -> (Vec<Event<JsSyntaxKind>>, Vec<Trivia>, Vec<ParseDiagnostic>) { let (trivia, source_diagnostics) = self.source.finish(); let (events, parse_diagnostics) = self.context.finish(); let diagnostics = merge_diagnostics(source_diagnostics, parse_diagnostics); (events, trivia, diagnostics) } } impl<'source> Parser for JsParser<'source> { type Kind = JsSyntaxKind; type Source = JsTokenSource<'source>; fn context(&self) -> &ParserContext<Self::Kind> { &self.context } fn context_mut(&mut self) -> &mut ParserContext<Self::Kind> { &mut self.context } fn source(&self) -> &Self::Source { &self.source } fn source_mut(&mut self) -> &mut Self::Source { &mut self.source } fn is_speculative_parsing(&self) -> bool { self.state.speculative_parsing } fn do_bump_with_context( &mut self, kind: Self::Kind, context: <Self::Source as BumpWithContext>::Context, ) where Self::Source: BumpWithContext, { let kind = if kind.is_keyword() && self.source().has_unicode_escape() { self.error(self.err_builder( format!( "'{}' keyword cannot contain escape character.", kind.to_string().expect("to return a value for a keyword") ), self.cur_range(), )); JsSyntaxKind::ERROR_TOKEN } else { kind }; let end = self.cur_range().end(); self.context_mut().push_token(kind, end); if self.context().is_skipping() { self.source_mut().skip_as_trivia_with_context(context); } else { self.source_mut().bump_with_context(context); } } fn do_bump(&mut self, kind: Self::Kind) { self.do_bump_with_context(kind, LexContext::Regular) } } pub struct JsParserCheckpoint { pub(super) context: ParserContextCheckpoint, pub(super) source: TokenSourceCheckpoint, state: ParserStateCheckpoint, } #[cfg(test)] mod tests { use crate::prelude::*; use crate::JsParserOptions; use rome_js_syntax::{JsFileSource, JsSyntaxKind}; #[test] #[should_panic( expected = "Marker must either be `completed` or `abandoned` to avoid that children are implicitly attached to a marker's parent." )] fn uncompleted_markers_panic() { let mut parser = JsParser::new( "'use strict'", JsFileSource::default(), JsParserOptions::default(), ); let _ = parser.start(); // drop the marker without calling complete or abandon } #[test] fn completed_marker_doesnt_panic() { let mut p = JsParser::new( "'use strict'", JsFileSource::default(), JsParserOptions::default(), ); let m = p.start(); p.expect(JsSyntaxKind::JS_STRING_LITERAL); m.complete(&mut p, JsSyntaxKind::JS_STRING_LITERAL_EXPRESSION); } #[test] fn abandoned_marker_doesnt_panic() { let mut p = JsParser::new( "'use strict'", JsFileSource::default(), JsParserOptions::default(), ); let m = p.start(); m.abandon(&mut p); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/rewrite.rs
crates/rome_js_parser/src/rewrite.rs
//! Events emitted by the Parser which are then constructed into a syntax tree use crate::lexer::TextSize; use crate::parser::rewrite_parser::{RewriteParser, RewriteToken}; use crate::parser::JsParserCheckpoint; use crate::JsParser; use rome_js_syntax::JsSyntaxKind; use rome_parser::tree_sink::TreeSink; use rome_parser::{event::process, prelude::*}; struct RewriteParseEventsTreeSink<'r, 'p, T> { reparse: &'r mut T, parser: RewriteParser<'r, 'p>, } impl<'r, 'p, T: RewriteParseEvents> TreeSink for RewriteParseEventsTreeSink<'r, 'p, T> { type Kind = JsSyntaxKind; fn token(&mut self, kind: JsSyntaxKind, end: TextSize) { self.reparse .token(RewriteToken::new(kind, end), &mut self.parser); } fn start_node(&mut self, kind: JsSyntaxKind) { self.reparse.start_node(kind, &mut self.parser); } fn finish_node(&mut self) { self.reparse.finish_node(&mut self.parser); } fn errors(&mut self, _errors: Vec<ParseDiagnostic>) {} } /// Implement this trait if you want to change the tree structure /// from already parsed events. pub(crate) trait RewriteParseEvents { /// Called for a started node in the original tree fn start_node(&mut self, kind: JsSyntaxKind, p: &mut RewriteParser); /// Called for a finished node in the original tree fn finish_node(&mut self, p: &mut RewriteParser); /// Called for every token fn token(&mut self, token: RewriteToken, p: &mut RewriteParser) { p.bump(token) } } /// Allows rewriting a super grammar to a sub grammar by visiting each event emitted after the checkpoint. /// Useful if a node turned out to be of a different kind its subtree must be re-shaped /// (adding new nodes, dropping sub nodes, etc.). pub(crate) fn rewrite_events<T: RewriteParseEvents>( rewriter: &mut T, checkpoint: JsParserCheckpoint, p: &mut JsParser, ) { // Only rewind the events but do not reset the parser errors nor parser state. // The current parsed grammar is a super-set of the grammar that gets re-parsed. Thus, any // error that applied to the old grammar also applies to the sub-grammar. let events: Vec<_> = unsafe { p.context_mut() .split_off_events(checkpoint.context.event_position() + 1) }; let mut sink = RewriteParseEventsTreeSink { parser: RewriteParser::new(p, checkpoint.source), reparse: rewriter, }; process(&mut sink, events, Vec::default()); sink.parser.finish(); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/state.rs
crates/rome_js_parser/src/state.rs
use crate::prelude::*; use bitflags::bitflags; use indexmap::IndexMap; use rome_js_syntax::JsFileSource; use rome_rowan::{TextRange, TextSize}; use std::collections::HashSet; use std::ops::{Deref, DerefMut, Range}; type LabelSet = IndexMap<String, LabelledItem>; #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) enum LabelledItem { Iteration(TextRange), Other(TextRange), } impl LabelledItem { pub(crate) fn range(&self) -> &TextRange { match self { LabelledItem::Iteration(range) | LabelledItem::Other(range) => range, } } } #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub(crate) enum ExportDefaultItemKind { Expression, FunctionOverload, FunctionDeclaration, Interface, // Any other declaration Declaration, } impl ExportDefaultItemKind { pub(crate) fn is_overload(&self) -> bool { matches!(self, ExportDefaultItemKind::FunctionOverload) } pub(crate) fn is_function_declaration(&self) -> bool { matches!(self, ExportDefaultItemKind::FunctionDeclaration) } pub(crate) fn is_interface(&self) -> bool { matches!(self, ExportDefaultItemKind::Interface) } } #[derive(Debug, Clone, Eq, PartialEq)] pub(crate) struct ExportDefaultItem { pub kind: ExportDefaultItemKind, pub range: Range<usize>, } /// State kept by the parser while parsing. /// It is required for things such as strict mode or async functions #[derive(Debug)] pub(crate) struct ParserState { parsing_context: ParsingContextFlags, /// A list of labels for labelled statements used to report undefined label errors /// for break and continue, as well as duplicate labels. /// Often called label set in the spec. label_set: LabelSet, /// Whether we are in strict mode code strict: Option<StrictMode>, /// The exported default item, used for checking duplicate defaults pub default_item: Option<ExportDefaultItem>, /// If set, the parser reports bindings with identical names. The option stores the name of the /// node that disallows duplicate bindings, for example `let`, `const` or `import`. pub duplicate_binding_parent: Option<&'static str>, pub name_map: IndexMap<String, TextRange>, /// Indicates that the parser is speculatively parsing a syntax. Speculative parsing means that the /// parser tries to parse a syntax as one kind and determines at the end if the assumption was right /// by testing if the parser is at a specific token (or has no errors). For this approach to work, /// the parser isn't allowed to skip any tokens while doing error recovery because it may then successfully /// skip over all invalid tokens, so that it appears as if it was able to parse the syntax correctly. /// /// Speculative parsing is useful if a syntax is ambiguous and no amount of lookahead (except parsing the whole syntax) /// is sufficient to determine what syntax it is. For example, the syntax `(a, b) ...` /// in JavaScript is either a parenthesized expression or an arrow expression if `...` is a `=>`. /// The challenge is, that it isn't possible to tell which of the two kinds it is until the parser /// processed all of `(a, b)`. pub(crate) speculative_parsing: bool, /// Stores the token positions of all syntax that looks like an arrow expressions but aren't one. /// Optimization to reduce the back-tracking required when parsing parenthesized and arrow function expressions. pub(crate) not_parenthesized_arrow: HashSet<TextSize>, } #[derive(Debug, Clone, PartialEq, Eq)] pub(crate) enum StrictMode { Module, Explicit(TextRange), Class(TextRange), } impl ParserState { pub fn new(source_type: &JsFileSource) -> Self { let mut state = ParserState { parsing_context: ParsingContextFlags::TOP_LEVEL, label_set: IndexMap::new(), strict: source_type .module_kind() .is_module() .then_some(StrictMode::Module), default_item: None, name_map: IndexMap::new(), duplicate_binding_parent: None, not_parenthesized_arrow: Default::default(), speculative_parsing: false, }; if source_type.module_kind().is_module() { state.parsing_context |= ParsingContextFlags::IN_ASYNC } // test d.ts arguments_in_definition_file // function a(...arguments: any[]): void; if source_type.language().is_definition_file() { EnterAmbientContext.apply(&mut state); } state } pub fn in_function(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::IN_FUNCTION) } pub fn in_generator(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::IN_GENERATOR) } pub fn in_async(&self) -> bool { self.parsing_context.contains(ParsingContextFlags::IN_ASYNC) } pub fn in_ambient_context(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::AMBIENT_CONTEXT) } pub fn in_constructor(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::IN_CONSTRUCTOR) } pub fn is_top_level(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::TOP_LEVEL) } pub fn continue_allowed(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::CONTINUE_ALLOWED) } pub fn break_allowed(&self) -> bool { self.parsing_context .contains(ParsingContextFlags::BREAK_ALLOWED) } pub fn strict(&self) -> Option<&StrictMode> { self.strict.as_ref() } pub fn get_labelled_item(&self, label: &str) -> Option<&LabelledItem> { self.label_set.get(label) } pub(super) fn checkpoint(&self) -> ParserStateCheckpoint { ParserStateCheckpoint::snapshot(self) } pub(super) fn restore(&mut self, checkpoint: ParserStateCheckpoint) { checkpoint.rewind(self); } } /// Stores a checkpoint of the [ParserState]. /// Allows rewinding the state to its previous state. /// /// It's important that creating and rewinding a snapshot is cheap. Consider the performance implications /// before adding new unscoped state. #[derive(Debug)] pub(super) struct ParserStateCheckpoint { /// Additional data that we only want to store in debug mode #[cfg(debug_assertions)] debug_checkpoint: DebugParserStateCheckpoint, } impl ParserStateCheckpoint { /// Creates a snapshot of the passed in state. #[cfg(debug_assertions)] fn snapshot(state: &ParserState) -> Self { Self { debug_checkpoint: DebugParserStateCheckpoint::snapshot(state), } } #[cfg(not(debug_assertions))] fn snapshot(_: &ParserState) -> Self { Self {} } /// Restores the `state values` to the time when this snapshot was created. #[cfg(debug_assertions)] fn rewind(self, state: &mut ParserState) { self.debug_checkpoint.rewind(state); } #[cfg(not(debug_assertions))] fn rewind(self, _: &ParserState) {} } /// Most of the [ParserState] is scoped state. It should, therefore, not be necessary to rewind /// that state because that's already taken care of by `with_state` and `with_scoped_state`. /// But, you can never no and better be safe than sorry. That's why we use some heuristics /// to verify that non of the scoped state did change and assert for it when rewinding. #[derive(Debug, Clone)] #[cfg(debug_assertions)] pub(super) struct DebugParserStateCheckpoint { parsing_context: ParsingContextFlags, label_set_len: usize, strict: Option<StrictMode>, default_item: Option<ExportDefaultItem>, duplicate_binding_parent: Option<&'static str>, name_map_len: usize, } #[cfg(debug_assertions)] impl DebugParserStateCheckpoint { fn snapshot(state: &ParserState) -> Self { Self { parsing_context: state.parsing_context, label_set_len: state.label_set.len(), strict: state.strict.clone(), default_item: state.default_item.clone(), duplicate_binding_parent: state.duplicate_binding_parent, name_map_len: state.name_map.len(), } } fn rewind(self, state: &mut ParserState) { assert_eq!(state.parsing_context, self.parsing_context); assert_eq!(state.label_set.len(), self.label_set_len); assert_eq!(state.strict, self.strict); assert_eq!(state.default_item, self.default_item); assert_eq!( state.duplicate_binding_parent, self.duplicate_binding_parent ); assert_eq!(state.name_map.len(), self.name_map_len); } } /// Reverts state changes to their previous value when it goes out of scope. /// Can be used like a regular parser. pub(crate) struct ParserStateGuard<'parser, 't, C> where C: ChangeParserState, { snapshot: C::Snapshot, inner: &'parser mut JsParser<'t>, } impl<'parser, 't, C: ChangeParserState> ParserStateGuard<'parser, 't, C> { pub(super) fn new(parser: &'parser mut JsParser<'t>, snapshot: C::Snapshot) -> Self { Self { snapshot, inner: parser, } } } impl<'parser, 't, C: ChangeParserState> Drop for ParserStateGuard<'parser, 't, C> { fn drop(&mut self) { let snapshot = std::mem::take(&mut self.snapshot); C::restore(self.inner.state_mut(), snapshot); } } impl<'parser, 't, C: ChangeParserState> Deref for ParserStateGuard<'parser, 't, C> { type Target = JsParser<'t>; fn deref(&self) -> &Self::Target { self.inner } } impl<'parser, 't, C: ChangeParserState> DerefMut for ParserStateGuard<'parser, 't, C> { fn deref_mut(&mut self) -> &mut Self::Target { self.inner } } /// Implements a specific modification to the parser state that can later be reverted. pub(crate) trait ChangeParserState { type Snapshot: Default; /// Applies the change to the passed in state and returns snapshot that allows restoring the previous state. fn apply(self, state: &mut ParserState) -> Self::Snapshot; /// Restores the state to its previous value fn restore(state: &mut ParserState, value: Self::Snapshot); } #[derive(Default, Debug)] pub struct EnableStrictModeSnapshot(Option<StrictMode>); /// Enables strict mode pub(crate) struct EnableStrictMode(pub StrictMode); impl ChangeParserState for EnableStrictMode { type Snapshot = EnableStrictModeSnapshot; #[inline] fn apply(self, state: &mut ParserState) -> Self::Snapshot { EnableStrictModeSnapshot(std::mem::replace(&mut state.strict, Some(self.0))) } #[inline] fn restore(state: &mut ParserState, value: Self::Snapshot) { state.strict = value.0 } } bitflags! { #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] /// Flags describing the context of a function. pub(crate) struct SignatureFlags: u8 { /// Is the function in an async context const ASYNC = 1 << 0; /// Is the function in a generator context const GENERATOR = 1 << 1; /// Is the function a constructor (or constructor context) const CONSTRUCTOR = 1 << 2; } } impl From<SignatureFlags> for ParsingContextFlags { fn from(flags: SignatureFlags) -> Self { let mut parsing_context = ParsingContextFlags::empty(); if flags.contains(SignatureFlags::ASYNC) { parsing_context |= ParsingContextFlags::IN_ASYNC; } if flags.contains(SignatureFlags::GENERATOR) { parsing_context |= ParsingContextFlags::IN_GENERATOR; } if flags.contains(SignatureFlags::CONSTRUCTOR) { parsing_context |= ParsingContextFlags::IN_CONSTRUCTOR; } parsing_context } } bitflags! { /// Flags representing the parsing state. /// The reasons to use flags instead of individual boolean fields on `ParserState` are: /// * It's possible to use bit masks to define what state should be inherited. For example, /// functions inherit whether they're defined inside a parameter but override the `in_async` flag /// * It's easier to snapshot the previous state. Individual boolean fields would require that a change /// snapshots each individual boolean field to allow restoring the previous state. With bitflags, all that /// is needed is to copy away the flags field and restore it after. #[derive(Debug, Copy, Default, Clone, Eq, PartialEq)] pub(crate) struct ParsingContextFlags: u8 { /// Whether the parser is in a generator function like `function* a() {}` /// Matches the `Yield` parameter in the ECMA spec const IN_GENERATOR = 1 << 0; /// Whether the parser is inside a function const IN_FUNCTION = 1 << 1; /// Whatever the parser is inside a constructor const IN_CONSTRUCTOR = 1 << 2; /// Is async allowed in this context. Either because it's an async function or top level await is supported. /// Equivalent to the `Async` generator in the ECMA spec const IN_ASYNC = 1 << 3; /// Whether the parser is parsing a top-level statement (not inside a class, function, parameter) or not const TOP_LEVEL = 1 << 4; /// Whether the parser is in an iteration or switch statement and /// `break` is allowed. const BREAK_ALLOWED = 1 << 5; /// Whether the parser is in an iteration statement and `continue` is allowed. const CONTINUE_ALLOWED = 1 << 6; /// Whatever the parser is in a TypeScript ambient context const AMBIENT_CONTEXT = 1 << 7; const LOOP = Self::BREAK_ALLOWED.bits() | Self::CONTINUE_ALLOWED.bits(); /// Bitmask of all the flags that must be reset (shouldn't be inherited) when the parser enters a function const FUNCTION_RESET_MASK = Self::BREAK_ALLOWED.bits() | Self::CONTINUE_ALLOWED.bits() | Self::IN_CONSTRUCTOR.bits() | Self::IN_ASYNC.bits() | Self::IN_GENERATOR.bits() | Self::TOP_LEVEL.bits(); /// Bitmask of all the flags that must be reset (shouldn't be inherited) when entering parameters. const PARAMETER_RESET_MASK = Self::IN_CONSTRUCTOR.bits() | Self::IN_FUNCTION.bits() | Self::TOP_LEVEL.bits() | Self::IN_GENERATOR.bits() | Self::IN_ASYNC.bits(); } } #[derive(Debug, Default, Copy, Clone)] pub(crate) struct ParsingContextFlagsSnapshot(ParsingContextFlags); pub(crate) trait ChangeParserStateFlags { fn compute_new_flags(&self, existing: ParsingContextFlags) -> ParsingContextFlags; } impl<T: ChangeParserStateFlags> ChangeParserState for T { type Snapshot = ParsingContextFlagsSnapshot; fn apply(self, state: &mut ParserState) -> Self::Snapshot { let new_flags = self.compute_new_flags(state.parsing_context); ParsingContextFlagsSnapshot(std::mem::replace(&mut state.parsing_context, new_flags)) } fn restore(state: &mut ParserState, value: Self::Snapshot) { state.parsing_context = value.0 } } /// Enters the parsing of function/method parameters pub(crate) struct EnterParameters( /// Whether async and yield are reserved keywords pub(crate) SignatureFlags, ); impl ChangeParserStateFlags for EnterParameters { fn compute_new_flags(&self, existing: ParsingContextFlags) -> ParsingContextFlags { (existing - ParsingContextFlags::PARAMETER_RESET_MASK) | ParsingContextFlags::from(self.0) } } #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub(crate) enum BreakableKind { // Iteration statement like Do, While, For Iteration, // Switch statement Switch, } pub(crate) struct EnterBreakable(pub(crate) BreakableKind); impl ChangeParserStateFlags for EnterBreakable { fn compute_new_flags(&self, existing: ParsingContextFlags) -> ParsingContextFlags { let mut flags = existing | ParsingContextFlags::BREAK_ALLOWED; if self.0 == BreakableKind::Iteration { flags |= ParsingContextFlags::CONTINUE_ALLOWED; } flags } } #[derive(Debug, Clone, Default)] pub struct EnterFunctionSnapshot { parsing_context: ParsingContextFlags, label_set: LabelSet, } /// Enters the parsing of a function/method. Resets the relevant parser state and sets the state /// according to the passed [SignatureFlags] pub(crate) struct EnterFunction(pub(crate) SignatureFlags); impl ChangeParserState for EnterFunction { type Snapshot = EnterFunctionSnapshot; #[inline] fn apply(self, state: &mut ParserState) -> Self::Snapshot { let new_flags = (state.parsing_context - ParsingContextFlags::FUNCTION_RESET_MASK) | ParsingContextFlags::IN_FUNCTION | ParsingContextFlags::from(self.0); EnterFunctionSnapshot { parsing_context: std::mem::replace(&mut state.parsing_context, new_flags), label_set: std::mem::take(&mut state.label_set), } } #[inline] fn restore(state: &mut ParserState, value: Self::Snapshot) { state.parsing_context = value.parsing_context; state.label_set = value.label_set; } } pub(crate) struct EnterClassPropertyInitializer; impl ChangeParserStateFlags for EnterClassPropertyInitializer { fn compute_new_flags(&self, existing: ParsingContextFlags) -> ParsingContextFlags { existing - ParsingContextFlags::TOP_LEVEL - ParsingContextFlags::IN_ASYNC - ParsingContextFlags::IN_GENERATOR } } #[derive(Default, Debug, Clone)] pub(crate) struct EnterClassStaticInitializationBlockSnapshot { label_set: LabelSet, flags: ParsingContextFlags, } pub(crate) struct EnterClassStaticInitializationBlock; impl ChangeParserState for EnterClassStaticInitializationBlock { type Snapshot = EnterClassStaticInitializationBlockSnapshot; fn apply(self, state: &mut ParserState) -> Self::Snapshot { let flags = (state.parsing_context - ParsingContextFlags::FUNCTION_RESET_MASK - ParsingContextFlags::IN_FUNCTION) | ParsingContextFlags::IN_ASYNC; // allow async for better error recovery EnterClassStaticInitializationBlockSnapshot { flags: std::mem::replace(&mut state.parsing_context, flags), label_set: std::mem::take(&mut state.label_set), } } fn restore(state: &mut ParserState, value: Self::Snapshot) { state.parsing_context = value.flags; state.label_set = value.label_set; } } #[derive(Debug, Default)] pub(crate) struct WithLabelSnapshot { #[cfg(debug_assertions)] label_set_len: usize, } /// Adds the labelled item with the given label to the `label_set`. /// Removes the label when the change is undone. pub(crate) struct WithLabel(pub String, pub LabelledItem); impl ChangeParserState for WithLabel { type Snapshot = WithLabelSnapshot; fn apply(self, state: &mut ParserState) -> Self::Snapshot { #[cfg(debug_assertions)] let previous_len = state.label_set.len(); state.label_set.insert(self.0, self.1); WithLabelSnapshot { // Capturing the len is sufficient because: // * The labels are stored in an index map that uses insertion-order // * Labels are scoped and new labels are always appended to the end of the list #[cfg(debug_assertions)] label_set_len: previous_len, } } #[cfg(not(debug_assertions))] fn restore(state: &mut ParserState, _: Self::Snapshot) { state.label_set.pop(); } #[cfg(debug_assertions)] fn restore(state: &mut ParserState, value: Self::Snapshot) { assert_eq!(state.label_set.len(), value.label_set_len + 1); state.label_set.pop(); } } /// Sets the state changes needed when parsing a TS type declaration (async and await are not reserved identifiers) pub(crate) struct EnterType; impl ChangeParserStateFlags for EnterType { fn compute_new_flags(&self, existing: ParsingContextFlags) -> ParsingContextFlags { existing - ParsingContextFlags::IN_ASYNC - ParsingContextFlags::IN_GENERATOR } } #[derive(Default)] pub(crate) struct EnterAmbientContextSnapshot { flags: ParsingContextFlags, default_item: Option<ExportDefaultItem>, strict_mode: Option<StrictMode>, } pub(crate) struct EnterAmbientContext; impl ChangeParserState for EnterAmbientContext { type Snapshot = EnterAmbientContextSnapshot; fn apply(self, state: &mut ParserState) -> Self::Snapshot { let new_flags = state.parsing_context | ParsingContextFlags::AMBIENT_CONTEXT; EnterAmbientContextSnapshot { flags: std::mem::replace(&mut state.parsing_context, new_flags), default_item: state.default_item.take(), strict_mode: state.strict.take(), } } fn restore(state: &mut ParserState, value: Self::Snapshot) { state.parsing_context = value.flags; state.default_item = value.default_item; state.strict = value.strict_mode; } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/test_utils.rs
crates/rome_js_parser/src/test_utils.rs
use crate::Parse; use rome_console::fmt::{Formatter, Termcolor}; use rome_console::markup; use rome_diagnostics::DiagnosticExt; use rome_diagnostics::{termcolor::Buffer, PrintDiagnostic}; use rome_js_syntax::{JsLanguage, JsSyntaxNode}; use rome_rowan::{AstNode, SyntaxKind, SyntaxSlot}; use std::{fmt::Debug, path::Path}; /// This check is used in the parser test to ensure it doesn't emit /// bogus nodes without diagnostics, and in the analyzer tests to /// check the syntax trees resulting from code actions are correct pub fn has_bogus_nodes_or_empty_slots(node: &JsSyntaxNode) -> bool { node.descendants().any(|descendant| { let kind = descendant.kind(); if kind.is_bogus() { return true; } if kind.is_list() { return descendant .slots() .any(|slot| matches!(slot, SyntaxSlot::Empty)); } false }) } /// This function analyzes the parsing result of a file and panic with a /// detailed message if it contains any error-level diagnostic, bogus nodes, /// empty list slots or missing required children pub fn assert_errors_are_absent<T>(program: &Parse<T>, path: &Path) where T: AstNode<Language = JsLanguage> + Debug, { let syntax = program.syntax(); let debug_tree = format!("{:?}", program.tree()); let has_missing_children = debug_tree.contains("missing (required)"); if has_bogus_nodes_or_empty_slots(&syntax) { panic!( "modified tree has bogus nodes or empty slots:\n{syntax:#?} \n\n {}", syntax ) } if !program.has_errors() && !has_missing_children { return; } let mut buffer = Buffer::no_color(); for diagnostic in program.diagnostics() { let error = diagnostic .clone() .with_file_path(path.to_str().unwrap()) .with_file_source_code(syntax.to_string()); Formatter::new(&mut Termcolor(&mut buffer)) .write_markup(markup! { {PrintDiagnostic::verbose(&error)} }) .unwrap(); } panic!("There should be no errors in the file {:?} but the following errors where present:\n{}\n\nParsed tree:\n{:#?}", path.display(), std::str::from_utf8(buffer.as_slice()).unwrap(), &syntax ); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax.rs
crates/rome_js_parser/src/syntax.rs
//! The Js syntax itself and parser functions. //! //! The actual parsing is done in these modules. //! Every single function is public, this is to allow people to //! use the parser for their specific needs, for example, parsing //! only an expression. //! //! Functions emit markers, see `CompletedMarker` and `Marker` docs for more info. mod assignment; mod auxiliary; mod binding; mod class; pub mod expr; mod function; mod js_parse_error; mod jsx; mod module; mod object; mod pattern; pub mod program; mod stmt; mod typescript;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/span.rs
crates/rome_js_parser/src/span.rs
use rome_rowan::{Language, SyntaxElement, SyntaxNode, SyntaxToken, TextRange, TextSize}; use std::{fmt::Debug, ops::Range}; /// A value which can be used as the range inside of a diagnostic. /// /// This is essentially a hack to allow us to use SyntaxElement, SyntaxNode, etc directly pub trait Span { fn as_range(&self) -> TextRange; /// Make a new span which extends to another span /// /// ```text /// from to /// ^^^^^^^^^^^^ /// ``` fn join<T: Span>(&self, other: T) -> TextRange { TextRange::new(self.as_range().start(), other.as_range().end()) } /// Make a new span which is between another span /// /// ```text /// from to /// ^^^^^^ /// ``` fn between<T: Span>(&self, other: T) -> TextRange { TextRange::new(self.as_range().end(), other.as_range().start()) } /// Make a new span which extends until another span /// /// ```text /// from to /// ^^^^^^^^^^ /// ``` fn until<T: Span>(&self, other: T) -> TextRange { TextRange::new(self.as_range().start(), other.as_range().start()) } fn sub_start(&self, amount: TextSize) -> TextRange { self.as_range().sub_start(amount) } fn add_start(&self, amount: TextSize) -> TextRange { self.as_range().add_start(amount) } fn sub_end(&self, amount: TextSize) -> TextRange { self.as_range().sub_end(amount) } fn add_end(&self, amount: TextSize) -> TextRange { self.as_range().add_end(amount) } } impl<T: Span> Span for &T { fn as_range(&self) -> TextRange { (*self).as_range() } } impl<T: Span> Span for &mut T { fn as_range(&self) -> TextRange { (**self).as_range() } } impl<T: Copy> Span for Range<T> where TextSize: TryFrom<T>, <TextSize as TryFrom<T>>::Error: Debug, { fn as_range(&self) -> TextRange { TextRange::new( TextSize::try_from(self.start).expect("integer overflow"), TextSize::try_from(self.end).expect("integer overflow"), ) } } impl<T: Language> Span for SyntaxNode<T> { fn as_range(&self) -> TextRange { self.text_range() } } impl<T: Language> Span for SyntaxToken<T> { fn as_range(&self) -> TextRange { self.text_range() } } impl<T: Language> Span for SyntaxElement<T> { fn as_range(&self) -> TextRange { match self { SyntaxElement::Node(n) => n.text_range(), SyntaxElement::Token(t) => t.text_range(), } } } impl Span for TextRange { fn as_range(&self) -> TextRange { *self } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/options.rs
crates/rome_js_parser/src/options.rs
/// Options to pass to the JavaScript parser #[derive(Debug, Clone, Default)] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "schemars", derive(schemars::JsonSchema))] pub struct JsParserOptions { /// Whether the parsing of the class parameter decorators should happen. /// /// This parameter decorators belong to the old language proposal. pub parse_class_parameter_decorators: bool, } impl JsParserOptions { pub fn with_parse_class_parameter_decorators(mut self) -> Self { self.parse_class_parameter_decorators = true; self } /// Should parse parameter decorators inside classes, e.g.: /// /// ```js /// class C { /// post(@Param() name) {} /// } /// ``` pub fn should_parse_parameter_decorators(&self) -> bool { self.parse_class_parameter_decorators } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/object.rs
crates/rome_js_parser/src/syntax/object.rs
#[allow(deprecated)] use crate::parser::single_token_parse_recovery::SingleTokenParseRecovery; use crate::parser::ParsedSyntax::{Absent, Present}; use crate::parser::{ParsedSyntax, RecoveryResult}; use crate::prelude::*; use crate::state::{EnterParameters, SignatureFlags}; use crate::syntax::class::parse_decorators; use crate::syntax::expr::{ is_nth_at_reference_identifier, parse_assignment_expression_or_higher, parse_expression, parse_reference_identifier, ExpressionContext, }; use crate::syntax::function::{ parse_formal_parameter, parse_function_body, parse_parameter_list, ParameterContext, }; use crate::syntax::js_parse_error; use crate::syntax::js_parse_error::decorators_not_allowed; use crate::syntax::typescript::ts_parse_error::{ ts_accessor_type_parameters_error, ts_only_syntax_error, ts_set_accessor_return_type_error, }; use crate::syntax::typescript::{ parse_ts_return_type_annotation, parse_ts_type_annotation, parse_ts_type_parameters, TypeContext, }; use crate::JsSyntaxFeature::TypeScript; use crate::{JsParser, ParseRecovery}; use rome_js_syntax::JsSyntaxKind::*; use rome_js_syntax::{JsSyntaxKind, T}; use rome_parser::parse_lists::ParseSeparatedList; // test js object_expr // let a = {}; // let b = {foo,} // // test_err js object_expr_err // let a = {, foo} // let b = { foo bar } // let b = { foo struct ObjectMembersList; impl ParseSeparatedList for ObjectMembersList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JS_OBJECT_MEMBER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_object_member(p) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new(JS_BOGUS_MEMBER, token_set![T![,], T!['}'], T![;], T![:]]) .enable_recovery_on_line_break(), js_parse_error::expected_object_member, ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } /// An object literal such as `{ a: b, "b": 5 + 5 }`. pub(super) fn parse_object_expression(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); ObjectMembersList.parse_list(p); p.expect(T!['}']); Present(m.complete(p, JS_OBJECT_EXPRESSION)) } /// An individual object property such as `"a": b` or `5: 6 + 6`. fn parse_object_member(p: &mut JsParser) -> ParsedSyntax { match p.cur() { // test js getter_object_member // let a = { // get foo() { // return foo; // }, // get "bar"() { // return "bar"; // }, // get ["a" + "b"]() { // return "a" + "b" // }, // get 5() { // return 5; // }, // get() { // return "This is a method and not a getter"; // } // } T![get] if !p.has_nth_preceding_line_break(1) && is_nth_at_type_member_name(p, 1) => { parse_getter_object_member(p) } // test js setter_object_member // let a = { // set foo(value) { // }, // set "bar"(value) { // }, // set ["a" + "b"](value) { // }, // set 5(value) { // }, // set() { // return "This is a method and not a setter"; // } // } // test_err js object_expr_setter // let b = { // set foo() { // return 5; // } // } T![set] if !p.has_nth_preceding_line_break(1) && is_nth_at_type_member_name(p, 1) => { parse_setter_object_member(p) } // test js object_expr_async_method // let a = { // async foo() {}, // async *foo() {} // } T![async] if is_parser_at_async_method_member(p) => parse_method_object_member(p), // test js object_expr_spread_prop // let a = {...foo} T![...] => { let m = p.start(); p.bump_any(); parse_assignment_expression_or_higher(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); Present(m.complete(p, JS_SPREAD)) } T![*] => { // test js object_expr_generator_method // let b = { *foo() {} } parse_method_object_member(p) } _ => { let m = p.start(); if is_nth_at_reference_identifier(p, 0) && !token_set![T!['('], T![<], T![:]].contains(p.nth(1)) { // test js object_expr_ident_prop // ({foo}) parse_reference_identifier(p).unwrap(); // There are multiple places where it's first needed to parse an expression to determine if // it is an assignment target or not. This requires that parse expression is valid for any // assignment expression. Thus, it's needed that the parser silently parses over a "{ arrow = test }" // property if p.at(T![=]) { // test js assignment_shorthand_prop_with_initializer // for ({ arrow = () => {} } of [{}]) {} // // test_err js object_shorthand_with_initializer // ({ arrow = () => {} }) p.error(p.err_builder("Did you mean to use a `:`? An `=` can only follow a property name when the containing object literal is part of a destructuring pattern.", p.cur_range())); p.bump(T![=]); parse_assignment_expression_or_higher(p, ExpressionContext::default()).ok(); return Present(m.complete(p, JS_BOGUS_MEMBER)); } return Present(m.complete(p, JS_SHORTHAND_PROPERTY_OBJECT_MEMBER)); } let checkpoint = p.checkpoint(); let member_name = parse_object_member_name(p) .or_add_diagnostic(p, js_parse_error::expected_object_member); // test js object_expr_method // let b = { // foo() {}, // "bar"(a, b, c) {}, // ["foo" + "bar"](a) {}, // 5(...rest) {} // } // test_err js object_expr_method // let b = { foo) } if p.at(T!['(']) || p.at(T![<]) { parse_method_object_member_body(p, SignatureFlags::empty()); Present(m.complete(p, JS_METHOD_OBJECT_MEMBER)) } else if member_name.is_some() { // test js object_prop_name // let a = {"foo": foo, [6 + 6]: foo, bar: foo, 7: foo} // test js object_expr_ident_literal_prop // let b = { a: true } // If the member name was a literal OR we're at a colon p.expect(T![:]); // test js object_prop_in_rhs // for ({ a: "x" in {} };;) {} parse_assignment_expression_or_higher(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); Present(m.complete(p, JS_PROPERTY_OBJECT_MEMBER)) } else { // test_err js object_expr_error_prop_name // let a = { /: 6, /: /foo/ } // let b = {{}} // test_err js object_expr_non_ident_literal_prop // let d = {5} #[allow(deprecated)] SingleTokenParseRecovery::new(token_set![T![:], T![,]], JS_BOGUS).recover(p); if p.eat(T![:]) { parse_assignment_expression_or_higher(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_object_member); Present(m.complete(p, JS_PROPERTY_OBJECT_MEMBER)) } else { // It turns out that this isn't a valid member after all. Make sure to throw // away everything that has been parsed so far so that the caller can // do its error recovery p.rewind(checkpoint); m.abandon(p); Absent } } } } } /// Parses a getter object member: `{ get a() { return "a"; } }` fn parse_getter_object_member(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![get]) { return Absent; } let m = p.start(); p.expect(T![get]); parse_object_member_name(p).or_add_diagnostic(p, js_parse_error::expected_object_member_name); // test_err ts ts_object_getter_type_parameters // ({ get a<A>(): A {} }); if let Present(type_parameters) = parse_ts_type_parameters(p, TypeContext::default()) { p.error(ts_accessor_type_parameters_error(p, &type_parameters)) } p.expect(T!['(']); p.expect(T![')']); TypeScript .parse_exclusive_syntax(p, parse_ts_type_annotation, |p, annotation| { ts_only_syntax_error(p, "type annotation", annotation.range(p)) }) .ok(); parse_function_body(p, SignatureFlags::empty()) .or_add_diagnostic(p, js_parse_error::expected_function_body); Present(m.complete(p, JS_GETTER_OBJECT_MEMBER)) } /// Parses a setter object member like `{ set a(value) { .. } }` fn parse_setter_object_member(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![set]) { return Absent; } let m = p.start(); p.expect(T![set]); parse_object_member_name(p).or_add_diagnostic(p, js_parse_error::expected_object_member_name); // test_err ts ts_object_setter_type_parameters // ({ set a<A>(value: A) {} }); if let Present(type_parameters) = parse_ts_type_parameters(p, TypeContext::default()) { p.error(ts_accessor_type_parameters_error(p, &type_parameters)) } let has_l_paren = p.expect(T!['(']); p.with_state(EnterParameters(SignatureFlags::empty()), |p| { // test_err ts ts_decorator_object { "parse_class_parameter_decorators": true } // ({ // method(@dec x, second, @dec third = 'default') {} // method(@dec.fn() x, second, @dec.fn() third = 'default') {} // method(@dec() x, second, @dec() third = 'default') {} // set val(@dec x) {} // set val(@dec.fn() x) {} // set val(@dec() x) {} // }) let decorator_list = parse_decorators(p) .add_diagnostic_if_present(p, decorators_not_allowed) .map(|mut decorator_list| { decorator_list.change_to_bogus(p); decorator_list }) .into(); parse_formal_parameter( p, decorator_list, ParameterContext::Setter, ExpressionContext::default().and_object_expression_allowed(has_l_paren), ) .or_add_diagnostic(p, js_parse_error::expected_parameter); p.expect(T![')']); }); // test_err ts ts_object_setter_return_type // ({ set a(value: string): void {} }); if let Present(return_type_annotation) = parse_ts_return_type_annotation(p) { p.error(ts_set_accessor_return_type_error( p, &return_type_annotation, )); } parse_function_body(p, SignatureFlags::empty()) .or_add_diagnostic(p, js_parse_error::expected_function_body); Present(m.complete(p, JS_SETTER_OBJECT_MEMBER)) } // test js object_member_name // let a = {"foo": foo, [6 + 6]: foo, bar: foo, 7: foo} /// Parses a `JsAnyObjectMemberName` and returns its completion marker pub(crate) fn parse_object_member_name(p: &mut JsParser) -> ParsedSyntax { match p.cur() { T!['['] => parse_computed_member_name(p), _ => parse_literal_member_name(p), } } pub(crate) fn is_nth_at_type_member_name(p: &mut JsParser, offset: usize) -> bool { let nth = p.nth(offset); let start_names = token_set![ JS_STRING_LITERAL, JS_NUMBER_LITERAL, T![ident], T![await], T![yield], T!['['] ]; nth.is_keyword() || start_names.contains(nth) } pub(crate) fn is_at_object_member_name(p: &mut JsParser) -> bool { is_nth_at_type_member_name(p, 0) } pub(crate) fn parse_computed_member_name(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['[']) { return Absent; } let m = p.start(); p.expect(T!['[']); // test js computed_member_name_in // for ({["x" in {}]: 3} ;;) {} parse_expression(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_expression); p.expect(T![']']); Present(m.complete(p, JS_COMPUTED_MEMBER_NAME)) } pub(super) fn is_at_literal_member_name(p: &mut JsParser, offset: usize) -> bool { matches!( p.nth(offset), JS_STRING_LITERAL | JS_NUMBER_LITERAL | T![ident] ) || p.nth(offset).is_keyword() } pub(super) fn parse_literal_member_name(p: &mut JsParser) -> ParsedSyntax { let m = p.start(); match p.cur() { JS_STRING_LITERAL | JS_NUMBER_LITERAL | T![ident] => { p.bump_any(); } t if t.is_keyword() => { p.bump_remap(T![ident]); } _ => { m.abandon(p); return Absent; } } Present(m.complete(p, JS_LITERAL_MEMBER_NAME)) } /// Parses a method object member fn parse_method_object_member(p: &mut JsParser) -> ParsedSyntax { let is_async = is_parser_at_async_method_member(p); if !is_async && !p.at(T![*]) && !is_at_object_member_name(p) { return Absent; } let m = p.start(); let mut flags = SignatureFlags::empty(); // test js async_method // class foo { // async foo() {} // async *foo() {} // } if is_async { p.eat(T![async]); flags |= SignatureFlags::ASYNC; } if p.eat(T![*]) { flags |= SignatureFlags::GENERATOR; } parse_object_member_name(p).or_add_diagnostic(p, js_parse_error::expected_object_member_name); parse_method_object_member_body(p, flags); Present(m.complete(p, JS_METHOD_OBJECT_MEMBER)) } // test ts ts_method_object_member_body // ({ // x<A>(maybeA: any): maybeA is A { return true }, // y(a: string): string { return "string"; }, // async *id<R>(param: Promise<R>): AsyncIterableIterator<R> { yield await param }, // }) /// Parses the body of a method object member starting right after the member name. fn parse_method_object_member_body(p: &mut JsParser, flags: SignatureFlags) { TypeScript .parse_exclusive_syntax( p, |p| parse_ts_type_parameters(p, TypeContext::default().and_allow_const_modifier(true)), |p, type_parameters| { ts_only_syntax_error(p, "type parameters", type_parameters.range(p)) }, ) .ok(); parse_parameter_list(p, ParameterContext::Implementation, flags) .or_add_diagnostic(p, js_parse_error::expected_parameters); TypeScript .parse_exclusive_syntax(p, parse_ts_return_type_annotation, |p, annotation| { ts_only_syntax_error(p, "return type annotation", annotation.range(p)) }) .ok(); parse_function_body(p, flags).or_add_diagnostic(p, js_parse_error::expected_function_body); } fn is_parser_at_async_method_member(p: &mut JsParser) -> bool { p.at(T![async]) && !p.has_nth_preceding_line_break(1) && (is_nth_at_type_member_name(p, 1) || p.nth_at(1, T![*])) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/expr.rs
crates/rome_js_parser/src/syntax/expr.rs
//! Expressions, these include `this`, identifiers, arrays, objects, //! binary expressions, unary expressions, and more. //! //! See the [ECMAScript spec](https://www.ecma-international.org/ecma-262/5.1/#sec-11). use super::typescript::*; use crate::lexer::{LexContext, ReLexContext}; use crate::parser::rewrite_parser::{RewriteMarker, RewriteParser}; use crate::parser::{JsParserCheckpoint, RecoveryResult}; use crate::prelude::*; use crate::rewrite::rewrite_events; use crate::rewrite::RewriteParseEvents; use crate::syntax::assignment::parse_assignment; use crate::syntax::assignment::AssignmentExprPrecedence; use crate::syntax::assignment::{expression_to_assignment, expression_to_assignment_pattern}; use crate::syntax::class::{parse_class_expression, parse_decorators}; use crate::syntax::function::{ is_at_async_function, parse_arrow_function_expression, parse_function_expression, LineBreak, }; use crate::syntax::js_parse_error; use crate::syntax::js_parse_error::{decorators_not_allowed, expected_simple_assignment_target}; use crate::syntax::js_parse_error::{ expected_expression, expected_identifier, invalid_assignment_error, private_names_only_allowed_on_left_side_of_in_expression, }; use crate::syntax::jsx::parse_jsx_tag_expression; use crate::syntax::object::parse_object_expression; use crate::syntax::stmt::{is_semi, STMT_RECOVERY_SET}; use crate::syntax::typescript::ts_parse_error::{expected_ts_type, ts_only_syntax_error}; use crate::JsSyntaxFeature::{Jsx, StrictMode, TypeScript}; use crate::ParsedSyntax::{Absent, Present}; use crate::{syntax, JsParser, ParseRecovery, ParsedSyntax}; use bitflags::bitflags; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_token; use rome_parser::parse_lists::ParseSeparatedList; use rome_parser::ParserProgress; pub const EXPR_RECOVERY_SET: TokenSet<JsSyntaxKind> = token_set![VAR_KW, R_PAREN, L_PAREN, L_BRACK, R_BRACK]; #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub(crate) struct ExpressionContext(ExpressionContextFlags); bitflags! { #[derive(Debug, Copy, Clone, Eq, PartialEq)] struct ExpressionContextFlags: u8 { /// Whether `in` should be counted in a binary expression. /// This is for `for...in` statements to prevent ambiguity. /// Corresponds to `[+In]` in the EcmaScript spec if true const INCLUDE_IN = 1 << 0; /// If false, object expressions are not allowed to be parsed /// inside an expression. /// /// Also applies for object patterns const ALLOW_OBJECT_EXPRESSION = 1 << 1; /// If `true` then, don't parse computed member expressions because they can as well indicate /// the start of a computed class member. const IN_DECORATOR = 1 << 2; /// If `true` allows a typescript type assertion. /// Currently disabled on "new" expressions. const ALLOW_TS_TYPE_ASSERTION = 1 << 3; } } impl ExpressionContext { pub(crate) fn and_include_in(self, include: bool) -> Self { self.and(ExpressionContextFlags::INCLUDE_IN, include) } pub(crate) fn and_object_expression_allowed(self, allowed: bool) -> Self { self.and(ExpressionContextFlags::ALLOW_OBJECT_EXPRESSION, allowed) } pub(crate) fn and_in_decorator(self, in_decorator: bool) -> Self { self.and(ExpressionContextFlags::IN_DECORATOR, in_decorator) } pub(crate) fn and_ts_type_assertion_allowed(self, allowed: bool) -> Self { self.and(ExpressionContextFlags::ALLOW_TS_TYPE_ASSERTION, allowed) } /// Returns true if object expressions or object patterns are valid in this context pub(crate) const fn is_object_expression_allowed(&self) -> bool { self.0 .contains(ExpressionContextFlags::ALLOW_OBJECT_EXPRESSION) } /// Returns `true` if the expression parsing includes binary in expressions. pub(crate) const fn is_in_included(&self) -> bool { self.0.contains(ExpressionContextFlags::INCLUDE_IN) } /// Returns `true` if currently parsing a decorator expression `@<expr>`. pub(crate) const fn is_in_decorator(&self) -> bool { self.0.contains(ExpressionContextFlags::IN_DECORATOR) } /// Adds the `flag` if `set` is `true`, otherwise removes the `flag` fn and(self, flag: ExpressionContextFlags, set: bool) -> Self { ExpressionContext(if set { self.0 | flag } else { self.0 - flag }) } } /// Sets the default flags for a context that parses a new root expression (for example, the condition of an if statement) /// or sub-expression of another expression (the alternate branch of a condition expression). impl Default for ExpressionContext { fn default() -> Self { ExpressionContext( ExpressionContextFlags::INCLUDE_IN | ExpressionContextFlags::ALLOW_OBJECT_EXPRESSION | ExpressionContextFlags::ALLOW_TS_TYPE_ASSERTION, ) } } /// Parses an expression or recovers to the point of where the next statement starts pub(crate) fn parse_expression_or_recover_to_next_statement( p: &mut JsParser, assign: bool, context: ExpressionContext, ) -> RecoveryResult { let func = if assign { syntax::expr::parse_assignment_expression_or_higher } else { syntax::expr::parse_expression }; func(p, context).or_recover( p, &ParseRecovery::new( JsSyntaxKind::JS_BOGUS_EXPRESSION, STMT_RECOVERY_SET.union(token_set![T!['}']]), ) .enable_recovery_on_line_break(), expected_expression, ) } /// A literal expression. /// /// `TRUE | FALSE | NUMBER | STRING | NULL` // test js literals // 5 // true // false // 5n // "foo" // 'bar' // null // 0, 0.0, 0n, 0e00 // "test\ // new-line"; // /^[يفمئامئ‍ئاسۆند]/i; //regex with unicode // test_err js literals // 00, 012, 08, 091, 0789 // parser errors // 01n, 0_0, 01.2 // lexer errors // "test // continues" // unterminated string literal pub(super) fn parse_literal_expression(p: &mut JsParser) -> ParsedSyntax { let literal_kind = match p.cur() { JsSyntaxKind::JS_NUMBER_LITERAL => { return parse_number_literal_expression(p) .or_else(|| parse_big_int_literal_expression(p)); } JsSyntaxKind::JS_STRING_LITERAL => JsSyntaxKind::JS_STRING_LITERAL_EXPRESSION, JsSyntaxKind::NULL_KW => JsSyntaxKind::JS_NULL_LITERAL_EXPRESSION, JsSyntaxKind::TRUE_KW | JsSyntaxKind::FALSE_KW => { JsSyntaxKind::JS_BOOLEAN_LITERAL_EXPRESSION } T![/] | T![/=] => { if p.re_lex(ReLexContext::Regex) == JS_REGEX_LITERAL { JS_REGEX_LITERAL_EXPRESSION } else { return Absent; } } _ => return Absent, }; let m = p.start(); p.bump_any(); Present(m.complete(p, literal_kind)) } pub(crate) fn parse_big_int_literal_expression(p: &mut JsParser) -> ParsedSyntax { if !p.at(JS_NUMBER_LITERAL) || !p.cur_text().ends_with('n') { return Absent; } let m = p.start(); p.bump_remap(JsSyntaxKind::JS_BIGINT_LITERAL); Present(m.complete(p, JS_BIGINT_LITERAL_EXPRESSION)) } pub(crate) fn parse_number_literal_expression(p: &mut JsParser) -> ParsedSyntax { let cur_src = p.cur_text(); if !p.at(JS_NUMBER_LITERAL) || cur_src.ends_with('n') { return Absent; } // Forbid legacy octal number in strict mode if p.state().strict().is_some() && cur_src.starts_with('0') && cur_src .chars() .nth(1) .filter(|c| c.is_ascii_digit()) .is_some() { let err_msg = if cur_src.contains(['8', '9']) { "Decimals with leading zeros are not allowed in strict mode." } else { "\"0\"-prefixed octal literals are deprecated; use the \"0o\" prefix instead." }; p.error(p.err_builder(err_msg, p.cur_range())); } let m = p.start(); p.bump_any(); Present(m.complete(p, JS_NUMBER_LITERAL_EXPRESSION)) } /// Parses an assignment expression or any higher expression /// https://tc39.es/ecma262/multipage/ecmascript-language-expressions.html#prod-AssignmentExpression pub(crate) fn parse_assignment_expression_or_higher( p: &mut JsParser, context: ExpressionContext, ) -> ParsedSyntax { let arrow_expression = parse_arrow_function_expression(p); if arrow_expression.is_present() { return arrow_expression; } parse_assignment_expression_or_higher_base(p, context) } fn parse_assignment_expression_or_higher_base( p: &mut JsParser, context: ExpressionContext, ) -> ParsedSyntax { // test js reparse_yield_as_identifier // // SCRIPT // function foo() { yield *bar; } // function bar() { yield; } // function baz() { yield } if p.at(T![yield]) && (p.state().in_generator() || is_nth_at_expression(p, 1)) { return Present(parse_yield_expression(p, context)); } let checkpoint = p.checkpoint(); parse_conditional_expr(p, context) .and_then(|target| parse_assign_expr_recursive(p, target, checkpoint, context)) } // test js assign_expr // foo += bar = b ??= 3; // foo -= bar; // (foo = bar); // [foo, bar] = baz; // [foo, bar = "default", ...rest] = baz; // [,,,foo,bar] = baz; // ({ bar, baz } = {}); // ({ bar: [baz = "baz"], foo = "foo", ...rest } = {}); // test_err js assign_expr_right // (foo = ); // test_err js assign_expr_left // ( = foo); // test js assign_eval_member_or_computed_expr // eval.foo = 10 // arguments[1] = "baz" // eval[2] = "Chungking Express" // test_err js assign_eval_or_arguments // eval = 0 // eval ??= 2 // eval *= 4 // arguments = "foo" // arguments ||= "baz" // ({ eval } = o) // ({ foo: { eval }}) = o fn parse_assign_expr_recursive( p: &mut JsParser, mut target: CompletedMarker, checkpoint: JsParserCheckpoint, context: ExpressionContext, ) -> ParsedSyntax { let assign_operator = p.cur(); if is_assign_token(assign_operator) { let target = if matches!( target.kind(p), JS_BINARY_EXPRESSION | TS_TYPE_ASSERTION_EXPRESSION ) { // Special handling for binary expressions and type assertions to avoid having to deal with `a as string = ...` // inside of the `ReparseAssignment` implementation because not using parentheses is valid // in for heads `for (a as any in []) {}` p.error(invalid_assignment_error(p, target.range(p))); target.change_kind(p, JS_BOGUS_ASSIGNMENT); target } else { expression_to_assignment_pattern(p, target, checkpoint) }; let m = target.precede(p); p.expect(assign_operator); parse_assignment_expression_or_higher(p, context.and_object_expression_allowed(true)) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); Present(m.complete(p, JS_ASSIGNMENT_EXPRESSION)) } else { Present(target) } } fn is_assign_token(kind: JsSyntaxKind) -> bool { matches!( kind, T![=] | T![+=] | T![-=] | T![*=] | T![/=] | T![%=] | T![<<=] | T![>>=] | T![>>>=] | T![&=] | T![|=] | T![^=] | T![&&=] | T![||=] | T![??=] | T![**=] ) } // test js yield_expr // function *foo() { // yield foo; // yield* foo; // yield; // yield // yield // } fn parse_yield_expression(p: &mut JsParser, context: ExpressionContext) -> CompletedMarker { let m = p.start(); let yield_range = p.cur_range(); p.expect(T![yield]); // test js yield_in_generator_function // function* foo() { yield 10; } // function* foo() { yield *bar; } // function* foo() { yield; } if !is_semi(p, 0) && (p.at(T![*]) || is_at_expression(p)) { let argument = p.start(); p.eat(T![*]); parse_assignment_expression_or_higher(p, context.and_object_expression_allowed(true)).ok(); argument.complete(p, JS_YIELD_ARGUMENT); } let mut yield_expr = m.complete(p, JS_YIELD_EXPRESSION); // test_err js yield_at_top_level_module // yield 10; // test_err js yield_at_top_level_script // // SCRIPT // yield 10; // test_err js yield_in_non_generator_function_script // // SCRIPT // function foo() { yield bar; } // function foo() { yield 10; } // test_err js yield_in_non_generator_function_module // function foo() { yield; } // function foo() { yield foo; } // function foo() { yield *foo; } if !(p.state().in_generator() && p.state().in_function()) { // test_err js yield_expr_in_parameter_initializer // function* test(a = yield "test") {} // function test2(a = yield "test") {} p.error(p.err_builder( "`yield` is only allowed within generator functions.", yield_range, )); yield_expr.change_to_bogus(p); } yield_expr } /// A conditional expression such as `foo ? bar : baz` // test js conditional_expr // foo ? bar : baz // foo ? bar : baz ? bar : baz pub(super) fn parse_conditional_expr(p: &mut JsParser, context: ExpressionContext) -> ParsedSyntax { // test_err js conditional_expr_err // foo ? bar baz // foo ? bar baz ? foo : bar // foo ? bar : let lhs = parse_binary_or_logical_expression(p, OperatorPrecedence::lowest(), context); if p.at(T![?]) { lhs.map(|marker| { let m = marker.precede(p); p.bump(T![?]); parse_assignment_expression_or_higher(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); p.expect(T![:]); parse_assignment_expression_or_higher(p, context) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); m.complete(p, JS_CONDITIONAL_EXPRESSION) }) } else { lhs } } pub(crate) fn is_at_binary_operator(p: &JsParser, context: ExpressionContext) -> bool { let cur_kind = p.cur(); match cur_kind { T![in] => context.is_in_included(), kind => OperatorPrecedence::try_from_binary_operator(kind).is_some(), } } /// A binary expression such as `2 + 2` or `foo * bar + 2` or a logical expression 'a || b' fn parse_binary_or_logical_expression( p: &mut JsParser, left_precedence: OperatorPrecedence, context: ExpressionContext, ) -> ParsedSyntax { // test js private_name_presence_check // class A { // #prop; // test() { // #prop in this // } // } let left = parse_unary_expr(p, context).or_else(|| parse_private_name(p)); parse_binary_or_logical_expression_recursive(p, left, left_precedence, context) } // test js binary_expressions // 5 * 5 // 6 ** 6 ** 7 // 1 + 2 * 3 // (1 + 2) * 3 // 1 / 2 // 74 in foo // foo instanceof Array // foo ?? bar // a >> b // a >>> b // 1 + 1 + 1 + 1 // 5 + 6 - 1 * 2 / 1 ** 6 // class Test { #name; test() { true && #name in {} } } // test_err js binary_expressions_err // foo(foo +); // foo + * 2; // !foo * bar; fn parse_binary_or_logical_expression_recursive( p: &mut JsParser, mut left: ParsedSyntax, left_precedence: OperatorPrecedence, context: ExpressionContext, ) -> ParsedSyntax { // Use a loop to eat all binary expressions with the same precedence. // At first, the algorithm makes the impression that it recurse for every right-hand side expression. // This is true, but `parse_binary_or_logical_expression` immediately returns if the // current operator has the same or a lower precedence than the left-hand side expression. Thus, // the algorithm goes at most `count(OperatorPrecedence)` levels deep. loop { // test_err js js_right_shift_comments // 1 >> /* a comment */ > 2; let op = p.re_lex(ReLexContext::BinaryOperator); if (op == T![as] && p.has_preceding_line_break()) || (op == T![satisfies] && p.has_preceding_line_break()) || (op == T![in] && !context.is_in_included()) { break; } // This isn't spec compliant but improves error recovery in case the `}` is missing // inside of a JSX attribute expression value or an expression child. // Prevents that it parses `</` as less than followed by a RegEx if JSX is enabled and only if // there's no whitespace between the two tokens. // The downside of this is that `a </test/` will be incorrectly left unparsed. I think this is // a worth compromise and compatible with what TypeScript's doing. if Jsx.is_supported(p) && op == T![<] && p.nth_at(1, T![/]) && !p.source_mut().has_next_preceding_trivia() { // test_err jsx jsx_child_expression_missing_r_curly // <test>{ 4 + 3</test> break; } let new_precedence = match OperatorPrecedence::try_from_binary_operator(op) { Some(precedence) => precedence, // Not a binary operator None => break, }; let stop_at_current_operator = if new_precedence.is_right_to_left() { new_precedence < left_precedence } else { new_precedence <= left_precedence }; if stop_at_current_operator { break; } let op_range = p.cur_range(); let mut is_bogus = false; if let Present(left) = &mut left { // test js exponent_unary_parenthesized // (delete a.b) ** 2; // (void ident) ** 2; // (typeof ident) ** 2; // (-3) ** 2; // (+3) ** 2; // (~3) ** 2; // (!true) ** 2; // test_err js exponent_unary_unparenthesized // delete a.b ** 2; // void ident ** 2; // typeof ident ** 2; // -3 ** 2; // +3 ** 2; // ~3 ** 2; // !true ** 2; if op == T![**] && left.kind(p) == JS_UNARY_EXPRESSION { let err = p .err_builder( "unparenthesized unary expression can't appear on the left-hand side of '**'", left.range(p) ) .detail(op_range, "The operation") .detail(left.range(p), "The left-hand side"); p.error(err); is_bogus = true; } else if op != T![in] && left.kind(p) == JS_PRIVATE_NAME { p.error(private_names_only_allowed_on_left_side_of_in_expression( p, left.range(p), )); left.change_kind(p, JS_BOGUS_EXPRESSION); } } else { let err = p .err_builder( format!( "Expected an expression for the left hand side of the `{}` operator.", p.text(op_range), ), op_range, ) .hint("This operator requires a left hand side value"); p.error(err); } let m = left.precede(p); p.bump(op); // test ts ts_as_expression // let x: any = "string"; // let y = x as string; // let z = x as const; // let not_an_as_expression = x // as; // let precedence = "hello" as const + 3 as number as number; if op == T![as] { parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); let mut as_expression = m.complete(p, TS_AS_EXPRESSION); if TypeScript.is_unsupported(p) { p.error(ts_only_syntax_error( p, "'as' expression", as_expression.range(p), )); as_expression.change_to_bogus(p); } left = Present(as_expression); continue; } // test ts ts_satisfies_expression // interface A { // a: string // }; // let x = { a: 'test' } satisfies A; // let y = { a: 'test', b: 'test' } satisfies A; // const z = undefined satisfies 1; // let not_a_satisfies_expression = undefined // satisfies; // let precedence = "hello" satisfies string + 3 satisfies number satisfies number; // test_err js ts_satisfies_expression // let x = "hello" satisfies string; if op == T![satisfies] { parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); let mut satisfies_expression = m.complete(p, TS_SATISFIES_EXPRESSION); if TypeScript.is_unsupported(p) { p.error(ts_only_syntax_error( p, "'satisfies' expression", satisfies_expression.range(p), )); satisfies_expression.change_to_bogus(p); } left = Present(satisfies_expression); continue; } parse_binary_or_logical_expression(p, new_precedence, context) .or_add_diagnostic(p, expected_expression); let expression_kind = if is_bogus { JS_BOGUS_EXPRESSION } else { match op { // test js logical_expressions // foo ?? bar // a || b // a && b // // test_err js logical_expressions_err // foo ?? * 2; // !foo && bar; // foo(foo ||) T![??] | T![||] | T![&&] => JS_LOGICAL_EXPRESSION, T![instanceof] => JS_INSTANCEOF_EXPRESSION, T![in] => JS_IN_EXPRESSION, _ => JS_BINARY_EXPRESSION, } }; left = Present(m.complete(p, expression_kind)); } if let Present(left) = &mut left { // Left at this point becomes the right-hand side of a binary expression // or is a standalone expression. Private names aren't allowed as standalone expressions // nor on the right-hand side if left.kind(p) == JS_PRIVATE_NAME { // test_err js private_name_presence_check_recursive // class A { // #prop; // test() { // #prop in #prop in this; // 5 + #prop; // #prop // #prop + 5; // } // } left.change_kind(p, JS_BOGUS_EXPRESSION); p.error(private_names_only_allowed_on_left_side_of_in_expression( p, left.range(p), )); } } left } /// A member or new expression with subscripts. e.g. `new foo`, `new Foo()`, `foo`, or `foo().bar[5]` // test js new_exprs // new Foo() // new foo; // new.target // new new new new Foo(); // new Foo(bar, baz, 6 + 6, foo[bar] + ((foo) => {}) * foo?.bar) // test_err js new_exprs // new; fn parse_member_expression_or_higher(p: &mut JsParser, context: ExpressionContext) -> ParsedSyntax { parse_primary_expression(p, context) .map(|lhs| parse_member_expression_rest(p, lhs, context, true, &mut false)) } // test_err js subscripts_err // foo()?.baz[].; // BAR`b fn parse_member_expression_rest( p: &mut JsParser, lhs: CompletedMarker, context: ExpressionContext, allow_optional_chain: bool, in_optional_chain: &mut bool, ) -> CompletedMarker { let mut progress = ParserProgress::default(); let mut lhs = lhs; while !p.at(EOF) { progress.assert_progressing(p); lhs = match p.cur() { T![.] => parse_static_member_expression(p, lhs, T![.]).unwrap(), // Don't parse out `[` as a member expression because it may as well be the start of a computed class member T!['['] if !context.is_in_decorator() => { parse_computed_member_expression(p, lhs, false).unwrap() } T![?.] if allow_optional_chain => { let completed = if p.nth_at(1, T!['[']) { parse_computed_member_expression(p, lhs, true).unwrap() } else if is_nth_at_any_name(p, 1) { parse_static_member_expression(p, lhs, T![?.]).unwrap() } else if p.nth_at(1, BACKTICK) { let m = lhs.precede(p); p.bump(T![?.]); let template_literal = p.start(); parse_template_literal(p, template_literal, true, true); m.complete(p, JS_BOGUS_EXPRESSION) } else { // '(' or any other unexpected character break; }; *in_optional_chain = true; completed } T![!] if !p.has_preceding_line_break() => { // test ts ts_non_null_assertion_expression // let a = { b: {} }; // a!; // function test() {} // test()! // a.b.c!; // a!!!!!!; let m = lhs.precede(p); p.bump(T![!]); let mut non_null = m.complete(p, TS_NON_NULL_ASSERTION_EXPRESSION); if TypeScript.is_unsupported(p) { non_null.change_to_bogus(p); p.error(ts_only_syntax_error( p, "non-null assertions", non_null.range(p), )); } non_null } BACKTICK => { // test ts ts_optional_chain_call // (<A, B>() => {})?.<A, B>(); let m = match lhs.kind(p) { TS_INSTANTIATION_EXPRESSION => lhs.undo_completion(p), _ => lhs.precede(p), }; parse_template_literal(p, m, *in_optional_chain, true) } T![<] | T![<<] => { // only those two possible token in cur position `parse_ts_type_arguments_in_expression` could possibly return a `Present(_)` if let Present(_) = parse_ts_type_arguments_in_expression(p, context) { let new_marker = lhs.precede(p); lhs = new_marker.complete(p, JsSyntaxKind::TS_INSTANTIATION_EXPRESSION); continue; }; break; } _ => { break; } }; } lhs } // test_err ts ts_new_operator // new A<test><test>(); // test ts ts_new_operator // var c2 = new T<string>; // Ok // var x1 = new SS<number>(); // OK // var x3 = new SS(); // OK // var x4 = new SS; // OK fn parse_new_expr(p: &mut JsParser, context: ExpressionContext) -> ParsedSyntax { if !p.at(T![new]) { return Absent; } let m = p.start(); p.expect(T![new]); // new.target if p.eat(T![.]) { if p.at(T![ident]) && p.cur_text() == "target" { p.bump_remap(TARGET); } else if is_at_identifier(p) { let identifier_range = p.cur_range(); let name = p.cur_text(); let error = p .err_builder( format!("'{name}' is not a valid meta-property for keyword 'new'."), identifier_range, ) .hint("Did you mean 'target'?"); p.error(error); p.bump_remap(T![ident]); } else { p.error(expected_identifier(p, p.cur_range())); } return Present(m.complete(p, JS_NEW_TARGET_EXPRESSION)); } if let Some(lhs) = parse_primary_expression(p, context.and_ts_type_assertion_allowed(false)) .or_add_diagnostic(p, expected_expression) .map(|expr| parse_member_expression_rest(p, expr, context, false, &mut false)) { // test_err ts invalid_optional_chain_from_new_expressions // new Test<string>?.test(); // new Test?.test(); // new A.b?.c() // new (A.b)?.c() // new (A.b?.()).c() // new A.b?.()() if p.at(T![?.]) { let error = p .err_builder("Invalid optional chain from new expression.", p.cur_range()) .hint(format!("Did you mean to call '{}()'?", lhs.text(p))); p.error(error); } if let TS_INSTANTIATION_EXPRESSION = lhs.kind(p) { lhs.undo_completion(p).abandon(p) }; } // test ts ts_new_with_type_arguments // class Test<A, B, C> {} // new Test<A, B, C>(); if p.at(T!['(']) { parse_call_arguments(p).unwrap(); } Present(m.complete(p, JS_NEW_EXPRESSION)) } // test js super_expression // class Test extends B { // constructor() { // super(); // } // test() { // super.test(a, b); // super[1]; // } // } // // test_err js super_expression_err // class Test extends B { // test() { // super(); // super?.test(); // } // } // super(); fn parse_super_expression(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![super]) { return Absent; } let super_marker = p.start(); p.expect(T![super]); let mut super_expression = super_marker.complete(p, JS_SUPER_EXPRESSION); if p.at(T![?.]) { super_expression.change_kind(p, JS_BOGUS_EXPRESSION); p.error(p.err_builder( "Super doesn't support optional chaining as super can never be null", super_expression.range(p), )); } else if p.at(T!['(']) && !p.state().in_constructor() { p.error(p.err_builder( "`super` is only valid inside of a class constructor of a subclass.", super_expression.range(p), )); super_expression.change_kind(p, JS_BOGUS_EXPRESSION); } match p.cur() { T![.] | T!['['] | T!['('] | T![?.] => Present(super_expression), _ => parse_static_member_expression(p, super_expression, T![.]), } } // test js subscripts // foo`bar` // foo(bar)(baz)(baz)[bar] /// A static member expression for accessing a property // test js static_member_expression // foo.bar // foo.await // foo.yield // foo.for // foo?.for // foo?.bar // class Test { // #bar // test(other) { // this.#bar; // this?.#bar; // other.#bar; // other?.#bar; // } // } fn parse_static_member_expression( p: &mut JsParser, lhs: CompletedMarker, operator: JsSyntaxKind, ) -> ParsedSyntax { // test ts ts_instantiation_expression_property_access // f<b>?.(c); // f<b>?.[c]; // (f<b>).c; // (f<b>)?.c; // (f<b>)?.[c]; if lhs.kind(p) == TS_INSTANTIATION_EXPRESSION { // test_err ts ts_instantiation_expression_property_access // f<b>.c; // f<b>?.c; // a?.f<c>.d; // f<a>.g<b>; p.error(p.err_builder( "An instantiation expression cannot be followed by a property access.", lhs.range(p), ).hint("You can either wrap the instantiation expression in parentheses, or delete the type arguments.")); } let m = lhs.precede(p); p.expect(operator); parse_any_name(p).or_add_diagnostic(p, expected_identifier); Present(m.complete(p, JS_STATIC_MEMBER_EXPRESSION)) } pub(super) fn parse_private_name(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![#]) { return Absent; } let m = p.start(); let hash_end = p.cur_range().end(); p.expect(T![#]); if (is_nth_at_identifier_or_keyword(p, 0)) && hash_end != p.cur_range().start() { // test_err js private_name_with_space // class A { // # test; // } p.error( p.err_builder( "Unexpected space or comment between `#` and identifier", hash_end..p.cur_range().start(), ) .hint("remove the space here"), ); Present(m.complete(p, JS_BOGUS)) } else { if p.cur().is_keyword() { p.bump_remap(T![ident]); } else if p.at(T![ident]) { p.bump(T![ident]); } else { p.error(expected_identifier(p, p.cur_range())); } Present(m.complete(p, JS_PRIVATE_NAME)) } } pub(super) fn parse_any_name(p: &mut JsParser) -> ParsedSyntax { match p.cur() {
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/program.rs
crates/rome_js_parser/src/syntax/program.rs
//! Top level functions for parsing a script or module, also includes module specific items. use super::module::parse_module_body; use super::stmt::parse_statements; use crate::prelude::*; use crate::state::{ChangeParserState, EnableStrictMode}; use crate::syntax::stmt::parse_directives; use crate::JsParser; use rome_js_syntax::JsSyntaxKind::*; use rome_js_syntax::ModuleKind; // test_err js unterminated_unicode_codepoint // let s = "\u{200"; pub(crate) fn parse(p: &mut JsParser) -> CompletedMarker { let m = p.start(); p.eat(JS_SHEBANG); let (statement_list, strict_snapshot) = parse_directives(p); let result = match p.source_type().module_kind() { ModuleKind::Script => { parse_statements(p, false, statement_list); m.complete(p, JS_SCRIPT) } ModuleKind::Module => { parse_module_body(p, statement_list); m.complete(p, JS_MODULE) } }; if let Some(strict_snapshot) = strict_snapshot { EnableStrictMode::restore(p.state_mut(), strict_snapshot); } result }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/pattern.rs
crates/rome_js_parser/src/syntax/pattern.rs
//! Provides traits for parsing pattern like nodes use crate::prelude::*; use crate::syntax::expr::{parse_assignment_expression_or_higher, ExpressionContext}; use crate::syntax::js_parse_error; use crate::ParsedSyntax::{Absent, Present}; use crate::{JsParser, ParseRecovery, ParsedSyntax}; use rome_js_syntax::JsSyntaxKind::{EOF, JS_ARRAY_HOLE}; use rome_js_syntax::{JsSyntaxKind, TextRange, T}; use rome_parser::ParserProgress; /// Trait for parsing a pattern with an optional default of the form `pattern = default` pub(crate) trait ParseWithDefaultPattern { /// The syntax kind of the node for a pattern with a default value fn pattern_with_default_kind() -> JsSyntaxKind; /// Creates a diagnostic for the case where the pattern is missing. For example, if the /// code only contains ` = default` fn expected_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic; /// Parses a pattern (without its default value) fn parse_pattern(&self, p: &mut JsParser) -> ParsedSyntax; /// Parses a pattern and wraps it in a pattern with default if a `=` token follows the pattern fn parse_pattern_with_optional_default(&self, p: &mut JsParser) -> ParsedSyntax { let pattern = self.parse_pattern(p); // test_err js js_invalid_assignment // ([=[(p[=[(p%]>([=[(p[=[( if p.at(T![=]) { let with_default = pattern.precede_or_add_diagnostic(p, Self::expected_pattern_error); p.bump_any(); // eat the = token // test js pattern_with_default_in_keyword // for ([a = "a" in {}] in []) {} parse_assignment_expression_or_higher(p, ExpressionContext::default()) .or_add_diagnostic(p, js_parse_error::expected_expression_assignment); Present(with_default.complete(p, Self::pattern_with_default_kind())) } else { pattern } } } /// Trait for parsing an array like pattern of the form `[a, b = "c", { }]` pub(crate) trait ParseArrayPattern<P: ParseWithDefaultPattern> { /// The kind of a bogus pattern. Used in case the pattern contains elements that aren't valid patterns fn bogus_pattern_kind() -> JsSyntaxKind; /// The kind of the array like pattern (array assignment or array binding) fn array_pattern_kind() -> JsSyntaxKind; /// The kind of the rest pattern fn rest_pattern_kind() -> JsSyntaxKind; /// The kind of the list fn list_kind() -> JsSyntaxKind; /// Creates a diagnostic saying that the parser expected an element at the position passed as an argument. fn expected_element_error(p: &JsParser, range: TextRange) -> ParseDiagnostic; /// Creates a pattern with default instance. Used to parse the array elements. fn pattern_with_default(&self) -> P; /// Tries to parse an array like pattern fn parse_array_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['[']) { return Absent; } let m = p.start(); p.bump(T!['[']); let elements = p.start(); let mut progress = ParserProgress::default(); { while !p.at(EOF) && !p.at(T![']']) { progress.assert_progressing(p); let recovery = ParseRecovery::new( Self::bogus_pattern_kind(), token_set!(EOF, T![,], T![']'], T![=], T![;], T![...], T![')']), ) .enable_recovery_on_line_break(); let element = self.parse_any_array_element(p, &recovery); if element .or_recover(p, &recovery, Self::expected_element_error) .is_err() { // Failed to recover break; } if !p.at(T![']']) { p.expect(T![,]); } } } elements.complete(p, Self::list_kind()); p.expect(T![']']); Present(m.complete(p, Self::array_pattern_kind())) } /// Parses a single array element fn parse_any_array_element( &self, p: &mut JsParser, recovery: &ParseRecovery<JsSyntaxKind>, ) -> ParsedSyntax { match p.cur() { T![,] => Present(p.start().complete(p, JS_ARRAY_HOLE)), T![...] => self .parse_rest_pattern(p) .map(|rest_pattern| validate_rest_pattern(p, rest_pattern, T![']'], recovery)), _ => self .pattern_with_default() .parse_pattern_with_optional_default(p), } } /// Parses a rest element fn parse_rest_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if !p.at(T![...]) { return Absent; } let m = p.start(); let rest_end = p.cur_range().end(); p.bump(T![...]); let with_default = self.pattern_with_default(); with_default.parse_pattern(p).or_add_diagnostic(p, |p, _| { P::expected_pattern_error(p, TextRange::new(rest_end, rest_end)) }); Present(m.complete(p, Self::rest_pattern_kind())) } } /// Trait for parsing an object pattern like node of the form `{ a, b: c}` pub(crate) trait ParseObjectPattern { /// Kind used when recovering from invalid properties. fn bogus_pattern_kind() -> JsSyntaxKind; /// The kind of the pattern like node this trait parses fn object_pattern_kind() -> JsSyntaxKind; /// The kind of the property list fn list_kind() -> JsSyntaxKind; /// Creates a diagnostic saying that a property is expected at the passed in range that isn't present. fn expected_property_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic; /// Parses the object pattern like node fn parse_object_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); let elements = p.start(); let mut progress = ParserProgress::default(); while !p.at(T!['}']) { progress.assert_progressing(p); if p.at(T![,]) { // missing element p.error(Self::expected_property_pattern_error(p, p.cur_range())); p.bump_any(); // bump , continue; } let recovery_set = ParseRecovery::new( Self::bogus_pattern_kind(), token_set!(EOF, T![,], T!['}'], T![...], T![;], T![')'], T![=]), ) .enable_recovery_on_line_break(); let pattern = self.parse_any_property_pattern(p, &recovery_set); if pattern .or_recover(p, &recovery_set, Self::expected_property_pattern_error) .is_err() { break; } if !p.at(T!['}']) { p.expect(T![,]); } } elements.complete(p, Self::list_kind()); p.expect(T!['}']); Present(m.complete(p, Self::object_pattern_kind())) } /// Parses a single property fn parse_any_property_pattern( &self, p: &mut JsParser, recovery: &ParseRecovery<JsSyntaxKind>, ) -> ParsedSyntax { if p.at(T![...]) { self.parse_rest_property_pattern(p) .map(|rest_pattern| validate_rest_pattern(p, rest_pattern, T!['}'], recovery)) } else { self.parse_property_pattern(p) } } /// Parses a shorthand `{ a }` or a "named" `{ a: b }` property fn parse_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax; /// Parses a rest property `{ ...a }` fn parse_rest_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax; } /// Validates if the parsed completed rest marker is a valid rest element inside of a /// array or object assignment target and converts it to an bogus assignment target if not. /// A rest element must be: /// /// * the last element /// * not followed by a trailing comma /// * not have a default value fn validate_rest_pattern( p: &mut JsParser, mut rest: CompletedMarker, end_token: JsSyntaxKind, recovery: &ParseRecovery<JsSyntaxKind>, ) -> CompletedMarker { if p.at(end_token) { return rest; } if p.at(T![=]) { let kind = rest.kind(p); let rest_range = rest.range(p); let rest_marker = rest.undo_completion(p); let default_start = p.cur_range().start(); p.bump(T![=]); if let Ok(recovered) = recovery.recover(p) { recovered.undo_completion(p).abandon(p); // append recovered content to parent } p.error( p.err_builder( "rest element cannot have a default", default_start..p.cur_range().start(), ) .detail( default_start..p.cur_range().start(), "Remove the default value here", ) .detail(rest_range, "Rest element"), ); let mut invalid = rest_marker.complete(p, kind); invalid.change_to_bogus(p); invalid } else if p.at(T![,]) && p.nth_at(1, end_token) { p.error( p.err_builder("rest element may not have a trailing comma", p.cur_range()) .detail(p.cur_range(), "Remove the trailing comma here") .detail(rest.range(p), "Rest element"), ); rest.change_to_bogus(p); rest } else { p.error( p.err_builder("rest element must be the last element", rest.range(p),) .hint( format!( "Move the rest element to the end of the pattern, right before the closing '{}'", end_token.to_string().unwrap(), ), ), ); rest.change_to_bogus(p); rest } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/module.rs
crates/rome_js_parser/src/syntax/module.rs
//! Implements the parsing logic for ES Module syntax use crate::lexer::TextSize; use crate::prelude::*; use crate::state::{EnterAmbientContext, ExportDefaultItem, ExportDefaultItemKind}; use crate::syntax::binding::{ is_at_identifier_binding, is_nth_at_identifier_binding, parse_binding, parse_identifier_binding, }; use crate::syntax::class::{ empty_decorator_list, is_at_export_class_declaration, is_at_export_default_class_declaration, is_at_ts_abstract_class_declaration, parse_class_declaration, parse_class_export_default_declaration, parse_decorators, }; use crate::syntax::expr::{ is_at_expression, is_nth_at_reference_identifier, parse_assignment_expression_or_higher, parse_name, parse_reference_identifier, ExpressionContext, }; use crate::syntax::function::{parse_function_export_default_declaration, LineBreak}; use crate::syntax::js_parse_error::{ decorators_not_allowed, duplicate_assertion_keys_error, expected_binding, expected_declaration, expected_export_clause, expected_export_default_declaration, expected_export_name_specifier, expected_expression, expected_identifier, expected_literal_export_name, expected_module_source, expected_named_import, expected_named_import_specifier, expected_statement, }; use crate::syntax::stmt::{parse_statement, semi, StatementContext, STMT_RECOVERY_SET}; use crate::syntax::typescript::ts_parse_error::ts_only_syntax_error; use crate::syntax::typescript::{ parse_ts_enum_declaration, parse_ts_import_equals_declaration_rest, parse_ts_interface_declaration, }; use crate::JsSyntaxFeature::TypeScript; use crate::{Absent, JsParser, ParseRecovery, ParsedSyntax, Present}; use rome_js_syntax::JsSyntaxKind::*; use rome_js_syntax::{JsSyntaxKind, TextRange, T}; use rome_parser::diagnostic::{expected_any, expected_node}; use rome_parser::parse_lists::ParseSeparatedList; use rome_parser::parse_recovery::RecoveryResult; use rome_parser::ParserProgress; use std::collections::HashMap; use super::auxiliary::{is_nth_at_declaration_clause, parse_declaration_clause}; // test js module // import a from "b"; // export { a }; // c(); // import { c } from "c"; pub(crate) fn parse_module_body(p: &mut JsParser, statement_list: Marker) { parse_module_item_list(p, ModuleItemListParent::Module, statement_list); } pub(crate) enum ModuleItemListParent { Module, Block, } impl ModuleItemListParent { fn is_module(&self) -> bool { matches!(self, ModuleItemListParent::Module) } #[inline] fn is_at_list_end(&self, p: &JsParser) -> bool { if p.at(EOF) { return true; } match self { ModuleItemListParent::Block => p.at(T!['}']), _ => false, } } } pub(crate) fn parse_module_item_list( p: &mut JsParser, parent: ModuleItemListParent, list_marker: Marker, ) { let mut progress = ParserProgress::default(); let recovery_set = if parent.is_module() { STMT_RECOVERY_SET } else { // test_err ts module_closing_curly // declare module A { // "name": "troublesome-lib", // "typings": "lib/index.d.ts", // "version": "0.0.1" // } // don't eat the closing `}` if inside a block STMT_RECOVERY_SET.union(token_set!(T!['}'])) }; while !parent.is_at_list_end(p) { progress.assert_progressing(p); let module_item = parse_module_item(p); let recovered = module_item.or_recover( p, &ParseRecovery::new(JS_BOGUS_STATEMENT, recovery_set), expected_statement, ); if recovered.is_err() { break; } } list_marker.complete(p, JS_MODULE_ITEM_LIST); } fn parse_module_item(p: &mut JsParser) -> ParsedSyntax { match p.cur() { T![import] if !token_set![T![.], T!['(']].contains(p.nth(1)) => { parse_import_or_import_equals_declaration(p) } T![export] => parse_export(p, Absent), T![@] => { let decorator_list = parse_decorators(p); match p.cur() { T![export] if is_at_export_class_declaration(p) || is_at_export_default_class_declaration(p) => { // test js decorator_export_top_level // @decorator // export class Foo { } // @first.field @second @(() => decorator)() // export class Bar {} // @before // export @after class Foo { } // @before // export abstract class Foo { } // @before // export @after abstract class Foo { } // test ts decorator_export_default_top_level_1 // @decorator // export default class Foo { } // test ts decorator_export_default_top_level_2 // @first.field @second @(() => decorator)() // export default class Bar {} // test ts decorator_export_default_top_level_3 // @before // export default @after class Foo { } // test ts decorator_export_default_top_level_4 // @before // export default abstract class Foo { } // test ts decorator_export_default_top_level_5 // @before // export default @after abstract class Foo { } parse_export(p, decorator_list) } T![class] => { // test js decorator_class_declaration_top_level // @decorator // class Foo { } // @first.field @second @(() => decorator)() // class Bar {} parse_class_declaration(p, decorator_list, StatementContext::StatementList) } T![abstract] if is_at_ts_abstract_class_declaration(p, LineBreak::DoCheck) => { // test ts decorator_abstract_class_declaration_top_level // @decorator abstract class A {} // @first.field @second @(() => decorator)() // abstract class Bar {} TypeScript.parse_exclusive_syntax( p, |p| { parse_class_declaration( p, decorator_list, StatementContext::StatementList, ) }, |p, abstract_class| { ts_only_syntax_error(p, "abstract classes", abstract_class.range(p)) }, ) } _ => { // test_err js decorator_class_declaration_top_level // @decorator // let a; // @decorator1 @decorator2 // function Foo() { } decorator_list .add_diagnostic_if_present(p, decorators_not_allowed) .map(|mut marker| { marker.change_kind(p, JS_BOGUS_STATEMENT); marker }); parse_module_item(p) } } } _ => parse_statement(p, StatementContext::StatementList), } } pub(crate) fn parse_import_or_import_equals_declaration(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![import]) { return Absent; } let start = p.cur_range().start(); let import = p.start(); p.bump(T![import]); debug_assert!(p.state().name_map.is_empty()); p.state_mut().duplicate_binding_parent = Some("import"); let statement = if is_at_identifier_binding(p) && (p.nth_at(1, T![=]) || p.nth_at(2, T![=])) { let import_equals = parse_ts_import_equals_declaration_rest(p, import, start); TypeScript.exclusive_syntax(p, import_equals, |p, decl| { ts_only_syntax_error(p, "'import =' declarations", decl.range(p)) }) } else { // test_err js import_err // import; // import *; // import * as c, { a, b } from "c"; // import { aa + bb, dd } from "c"; // import { ab, ac } from "c"; // import { default } from "c"; // import { "a" } from "c"; // import { as x } from "c"; // import 4 from "c"; // import y from 4; parse_import_clause(p).or_add_diagnostic(p, |p, range| { expected_any( &["default import", "namespace import", "named import"], range, ) .into_diagnostic(p) }); let end = p.cur_range().start(); semi(p, TextRange::new(start, end)); Present(import.complete(p, JS_IMPORT)) }; p.state_mut().duplicate_binding_parent = None; p.state_mut().name_map.clear(); statement } // test js import_default_clause // import foo from "test"; fn parse_import_clause(p: &mut JsParser) -> ParsedSyntax { if p.at(JS_STRING_LITERAL) { return parse_import_bare_clause(p); } let pos = p.source().position(); let m = p.start(); // test ts ts_import_clause_types // import type from "./mod"; // not a type // import type foo from "./mod"; // import type * as foo2 from "./mod"; // import type { foo3 } from "mod"; let is_typed = p.at(T![type]) && (matches!(p.nth(1), T![*] | T!['{']) || (is_nth_at_identifier_binding(p, 1) && !p.nth_at(1, T![from]))); if is_typed { p.eat(T![type]); } let clause = match p.cur() { T![*] => parse_import_namespace_clause_rest(p, m), T!['{'] => parse_import_named_clause_rest(p, m), _ if is_at_identifier_binding(p) => { parse_identifier_binding(p).unwrap(); parse_import_default_or_named_clause_rest(p, m, is_typed) } _ => { // SAFETY: Safe because the parser only eats the "type" keyword if it's followed by // either a *, {, or binding debug_assert_eq!(pos, p.source().position()); m.abandon(p); return Absent; } }; if is_typed { TypeScript.exclusive_syntax(p, clause, |p, clause| { ts_only_syntax_error(p, "'import type'", clause.range(p)) }) } else { Present(clause) } } /// Parses the rest of an import named or default clause. /// Rest meaning, everything after `type binding` fn parse_import_default_or_named_clause_rest( p: &mut JsParser, m: Marker, is_typed: bool, ) -> CompletedMarker { match p.cur() { T![,] | T!['{'] => { p.expect(T![,]); let default_specifier = m.complete(p, JS_DEFAULT_IMPORT_SPECIFIER); let default_start = default_specifier.range(p).start(); let named_clause = default_specifier.precede(p); parse_named_import(p).or_add_diagnostic(p, expected_named_import); if is_typed { let end = p.last_end().unwrap_or_else(|| p.cur_range().start()); // test_err ts ts_typed_default_import_with_named // import type A, { B, C } from './a'; p.error(p.err_builder("A type-only import can specify a default import or named bindings, but not both.", default_start..end,)) } p.expect(T![from]); parse_module_source(p).or_add_diagnostic(p, expected_module_source); parse_import_assertion(p).ok(); named_clause.complete(p, JS_IMPORT_NAMED_CLAUSE) } _ => { p.expect(T![from]); parse_module_source(p).or_add_diagnostic(p, expected_module_source); parse_import_assertion(p).ok(); m.complete(p, JS_IMPORT_DEFAULT_CLAUSE) } } } // test js import_bare_clause // import "test"; // import "no_semicolon" fn parse_import_bare_clause(p: &mut JsParser) -> ParsedSyntax { parse_module_source(p).map(|module_source| { let m = module_source.precede(p); parse_import_assertion(p).ok(); m.complete(p, JS_IMPORT_BARE_CLAUSE) }) } // test js import_decl // import * as foo from "bla"; fn parse_import_namespace_clause_rest(p: &mut JsParser, m: Marker) -> CompletedMarker { p.expect(T![*]); p.expect(T![as]); parse_binding(p).or_add_diagnostic(p, expected_binding); p.expect(T![from]); parse_module_source(p).or_add_diagnostic(p, expected_module_source); parse_import_assertion(p).ok(); m.complete(p, JS_IMPORT_NAMESPACE_CLAUSE) } // test js import_named_clause // import {} from "a"; // import { a, b, c, } from "b"; // import e, { f } from "b"; // import g, * as lorem from "c"; // import { f as x, default as w, "a-b-c" as y } from "b"; fn parse_import_named_clause_rest(p: &mut JsParser, m: Marker) -> CompletedMarker { parse_default_import_specifier(p).ok(); parse_named_import(p).or_add_diagnostic(p, expected_named_import); p.expect(T![from]); parse_module_source(p).or_add_diagnostic(p, expected_module_source); parse_import_assertion(p).ok(); m.complete(p, JS_IMPORT_NAMED_CLAUSE) } fn parse_default_import_specifier(p: &mut JsParser) -> ParsedSyntax { parse_binding(p).map(|binding| { let m = binding.precede(p); p.expect(T![,]); m.complete(p, JS_DEFAULT_IMPORT_SPECIFIER) }) } fn parse_named_import(p: &mut JsParser) -> ParsedSyntax { match p.cur() { T![*] => parse_namespace_import_specifier(p), _ => parse_named_import_specifier_list(p), } } fn parse_namespace_import_specifier(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![*]) { return Absent; } let m = p.start(); p.bump_any(); p.expect(T![as]); parse_binding(p).or_add_diagnostic(p, expected_binding); Present(m.complete(p, JS_NAMESPACE_IMPORT_SPECIFIER)) } fn parse_named_import_specifier_list(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); NamedImportSpecifierList.parse_list(p); p.expect(T!['}']); Present(m.complete(p, JS_NAMED_IMPORT_SPECIFIERS)) } struct NamedImportSpecifierList; impl ParseSeparatedList for NamedImportSpecifierList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JS_NAMED_IMPORT_SPECIFIER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_any_named_import_specifier(p) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JS_BOGUS_NAMED_IMPORT_SPECIFIER, STMT_RECOVERY_SET.union(token_set![T![,], T!['}'], T![;]]), ) .enable_recovery_on_line_break(), expected_named_import_specifier, ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } // test ts ts_named_import_specifier_with_type // import { type, type as } from "./mod"; // import { type as other } from "./mod" // import { type as as } from "./mod"; // import { type as as as } from "./mod" // import { type "test-abcd" as test } from "./mod"; // // test_err ts ts_named_import_specifier_error // import { default } from "./mod"; // import { type default } from "./mod"; // import { "literal-name" } from "./mod"; // import { type "literal-name" } from "./mod"; // import { fn parse_any_named_import_specifier(p: &mut JsParser) -> ParsedSyntax { if !is_nth_at_literal_export_name(p, 0) { // covers `type` and `as` too return Absent; } let m = p.start(); let metadata = specifier_metadata( p, is_nth_at_literal_export_name, is_nth_at_identifier_binding, ); if metadata.is_type { p.expect(T![type]); } let specifier = if metadata.has_alias || p.at(JS_STRING_LITERAL) || p.cur().is_non_contextual_keyword() { if metadata.is_local_name_missing { // test_err js import_as_identifier_err // import { as c } from "test"; p.error(expected_literal_export_name( p, TextRange::new(p.cur_range().start(), p.cur_range().start()), )); } else { // test js import_as_as_as_identifier // import { as as as } from "test"; parse_literal_export_name(p).or_add_diagnostic(p, expected_literal_export_name); } p.expect(T![as]); parse_binding(p).or_add_diagnostic(p, expected_binding); m.complete(p, JS_NAMED_IMPORT_SPECIFIER) } else { // test js import_as_identifier // import { as } from "test"; parse_binding(p).or_add_diagnostic(p, expected_identifier); m.complete(p, JS_SHORTHAND_NAMED_IMPORT_SPECIFIER) }; if metadata.is_type { TypeScript.exclusive_syntax(p, specifier, |p, specifier| { ts_only_syntax_error(p, "'import { type x ident }'", specifier.range(p)) }) } else { Present(specifier) } } // test js import_assertion // import "x" assert { type: "json" } // import "foo" assert { "type": "json" }; // import foo from "foo.json" assert { type: "json" }; // import {test} from "foo.json" assert { for: "for" } // import foo_json from "foo.json" assert { type: "json", hasOwnProperty: "true" }; // import "x" assert // { type: "json" } // test js import_attribute // import "x" with { type: "json" } // import "foo" with { "type": "json" }; // import foo from "foo.json" with { type: "json" }; // import {test} from "foo.json" with { for: "for" } // import foo_json from "foo.json" with { type: "json", hasOwnProperty: "true" }; // import "x" with // { type: "json" } // test_err js import_assertion_err // import "foo" assert { type, "json" }; // import "bar" \u{61}ssert { type: "json" }; // import { foo } assert { type: "json" }; // import "lorem" // assert { type: "json" } // import foo2 from "foo.json" assert { "type": "json", type: "html", "type": "js" }; // import "x" assert; // import ipsum from "ipsum.json" assert { type: "json", lazy: true, startAtLine: 1 }; // import { a } from "a.json" assert // test_err js import_attribute_err // import "foo" with { type, "json" }; // import { foo } with { type: "json" }; // import "lorem" // assert { type: "json" } // import foo2 from "foo.json" with { "type": "json", type: "html", "type": "js" }; // import "x" with; // import ipsum from "ipsum.json" with { type: "json", lazy: true, startAtLine: 1 }; // import { a } from "a.json" with fn parse_import_assertion(p: &mut JsParser) -> ParsedSyntax { if p.has_preceding_line_break() { return Absent; } if !p.at(T![assert]) && !p.at(T![with]) { return Absent; } let m = p.start(); match p.cur() { T![assert] => { p.expect(T![assert]); } T![with] => { p.expect(T![with]); } _ => { m.abandon(p); return Absent; } }; // bump assert or with p.expect(T!['{']); ImportAssertionList::default().parse_list(p); p.expect(T!['}']); Present(m.complete(p, JS_IMPORT_ASSERTION)) } #[derive(Default)] struct ImportAssertionList { assertion_keys: HashMap<String, TextRange>, } impl ParseSeparatedList for ImportAssertionList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JS_IMPORT_ASSERTION_ENTRY_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_import_assertion_entry(p, &mut self.assertion_keys) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JS_BOGUS_IMPORT_ASSERTION_ENTRY, STMT_RECOVERY_SET.union(token_set![T![,], T!['}']]), ) .enable_recovery_on_line_break(), |p, range| expected_node("import assertion entry", range).into_diagnostic(p), ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } fn parse_import_assertion_entry( p: &mut JsParser, seen_assertion_keys: &mut HashMap<String, TextRange>, ) -> ParsedSyntax { let m = p.start(); let key_range = p.cur_range(); let key = match p.cur() { JS_STRING_LITERAL => Some(p.cur_text().trim_matches(&['\'', '"'][..])), T![ident] => Some(p.cur_text()), t if t.is_keyword() => Some(p.cur_text()), _ => None, } .map(String::from); match p.cur() { JS_STRING_LITERAL | T![ident] => { p.bump_any(); } t if t.is_keyword() => { p.bump_remap(T![ident]); } T![:] => { p.error( expected_any(&["identifier", "string literal"], p.cur_range()).into_diagnostic(p), ); } _ => { m.abandon(p); return Absent; } }; let mut valid = true; if let Some(key) = key { if let Some(first_use) = seen_assertion_keys.get(&key) { p.error(duplicate_assertion_keys_error( p, &key, first_use.to_owned(), key_range, )); valid = false; } else { seen_assertion_keys.insert(key, key_range); } }; p.expect(T![:]); p.expect(JS_STRING_LITERAL); let mut entry = m.complete(p, JS_IMPORT_ASSERTION_ENTRY); if !valid { entry.change_to_bogus(p); } Present(entry) } // test_err js export_err // export // // test_err js ts_export_syntax_in_js // let a, b, c; // export type { a }; // export { type b }; // export { type c as cc }; // export type { d } from "./d"; // export { type e } from "./e"; // export { type e as ee } from "./e"; // test_err js export_huge_function_in_script // // SCRIPT // export function A () { return "Kinsmen hot Moria tea serves. Sticky camp spell covering forged they're Oakenshield vines. Admirable relatives march regained wheel Ere eternally on rest parts unhappy? Leave hundreds market's Argonath answered avail grieve doing goodness! Wrong miserable well-wishers wander stood immediately neither Agreed goat poison holes fire? Nobody tosses a Dwarf. Brigands Bilbo Baggins prisoner stinker birthday injuries. Kili's loosened shy spiders till. Gandalf's death was not in vain. Nor would he have you give up hope. Bread kindly ghost Beorn's jelly. Andûril two-faced bitterness biding seemed says drinking splendor feed light unnoticed one! Carven nearest Eärendil fireworks former. Mattress smelling wandering teaching appear taste wise Mithril uprooted winter forebearers wheel. Let's beside Proudfoots succumbed! Excuse Anárion stolen helpless nudge study shown holding form? Changes point Snowbourn material side outer highest eaves flash-flame relic descendant lurking. Thousand death Agreed oppose whole? Glóin head's hurts feasting fight shiny legacy. Thror's broken odds suffice believe well-protected? Rightfully manners begged Maggot's fairer. Unheard-of grog shields sad wondering gardener killed gone Galadriel! Pan Frodo fingers spreads magic parting amount interest idly naked. It's some form of Elvish. I can't read it. Silverwork Wraiths riddled enchantment apple anywhere."; } pub(super) fn parse_export(p: &mut JsParser, decorators_list: ParsedSyntax) -> ParsedSyntax { if !p.at(T![export]) { return Absent; } let stmt_start = p.cur_range().start(); let decorators_list = decorators_list.or_else(|| empty_decorator_list(p)); let m = decorators_list.precede(p); p.bump(T![export]); let clause = if is_nth_at_declaration_clause(p, 0) { // test js export_class_clause // export class A {} // export class A extends B {} // test js export_function_clause // export function test(a, b) {} // export function* test2(a, b) {} // export async function test3(a, b, ) {} // test ts ts_export_enum_declaration // export enum A { X, Y } // export const enum B { X, Y } // test ts ts_export_interface_declaration // export interface A {} parse_declaration_clause(p, stmt_start) } else { match p.cur() { T!['{'] => parse_export_named_or_named_from_clause(p), T![default] => parse_export_default_clause(p), T![type] if p.nth_at(1, T!['{']) => parse_export_named_or_named_from_clause(p), T![type] if p.nth_at(1, T![*]) => parse_export_from_clause(p), T![*] => parse_export_from_clause(p), T![=] => TypeScript.parse_exclusive_syntax( p, parse_ts_export_assignment_clause, |p, clause| ts_only_syntax_error(p, "'export ='", clause.range(p)), ), T![from] => parse_export_from_clause(p), T![as] if p.nth_at(1, T![namespace]) => TypeScript.parse_exclusive_syntax( p, parse_ts_export_namespace_clause, |p, clause| ts_only_syntax_error(p, "'export as namespace'", clause.range(p)), ), T![declare] if !p.has_nth_preceding_line_break(1) => TypeScript.parse_exclusive_syntax( p, |p| parse_ts_export_declare_clause(p, stmt_start), |p, clause| ts_only_syntax_error(p, "'export declare'", clause.range(p)), ), _ if p.nth_at(1, T![from]) => parse_export_from_clause(p), _ => Absent, } }; clause.or_add_diagnostic(p, expected_export_clause); Present(m.complete(p, JS_EXPORT)) } fn parse_export_named_or_named_from_clause(p: &mut JsParser) -> ParsedSyntax { let checkpoint = p.checkpoint(); match parse_export_named_clause(p) { Present(_) if p.at(T![from]) => { p.rewind(checkpoint); parse_export_named_from_clause(p) } t => t, } } // test js export_named_clause // export { a }; // export { a as z, b as "y", c as default } // export { as }; // // test_err js export_named_clause_err // export { default as "b" }; // export { "a" as b }; // export { as b }; // export { a as 5 }; // export { a b c }; // // test ts ts_export_type_named // type A = string; // export type { A }; // // export { // // test_err ts ts_export_type // export type fn parse_export_named_clause(p: &mut JsParser) -> ParsedSyntax { if !matches!(p.cur(), T!['{'] | T![type]) { return Absent; } let start = p.cur_range().start(); let m = p.start(); let has_type = p.eat(T![type]); p.bump(T!['{']); ExportNamedSpecifierList.parse_list(p); p.expect(T!['}']); semi(p, TextRange::new(start, p.cur_range().start())); let clause = m.complete(p, JS_EXPORT_NAMED_CLAUSE); if has_type { TypeScript.exclusive_syntax(p, clause, |p, clause| { ts_only_syntax_error(p, "'export type' declarations", clause.range(p)) }) } else { Present(clause) } } struct ExportNamedSpecifierList; impl ParseSeparatedList for ExportNamedSpecifierList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JS_EXPORT_NAMED_SPECIFIER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_any_export_named_specifier(p) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JS_BOGUS, STMT_RECOVERY_SET.union(token_set![T![,], T!['}'], T![;]]), ) .enable_recovery_on_line_break(), expected_export_name_specifier, ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } fn parse_any_export_named_specifier(p: &mut JsParser) -> ParsedSyntax { if !matches!(p.cur(), T![type] | T![as] | T![default]) && !is_nth_at_literal_export_name(p, 0) { return Absent; } let m = p.start(); let metadata = specifier_metadata( p, is_nth_at_reference_identifier, is_nth_at_literal_export_name, ); // test ts ts_export_named_type_specifier // export { type } // export { type type } // export { type as somethingElse } if metadata.is_type { p.expect(T![type]); } // test_err js export_as_identifier_err // export { as c } if metadata.is_local_name_missing { p.error(expected_identifier( p, TextRange::new(p.cur_range().start(), p.cur_range().start()), )); } else if is_nth_at_reference_identifier(p, 0) { parse_reference_identifier(p).ok(); } else { // We need to parse "default" or any string literal here so the "export ... from..." rewind later works. let is_string = matches!(p.cur(), JS_STRING_LITERAL); if let Some(export_name) = parse_literal_export_name(p).or_add_diagnostic(p, expected_identifier) { let error = if is_string { p.err_builder( "A string literal cannot be used as an export binding without `from`.", export_name.range(p), ) } else { p.err_builder( format!( "\"{}\" can only be used with \"export ... from ...\"", export_name.text(p) ), export_name.range(p), ) }; p.error(error); } } // test js export_as_identifier // export { as }; // export { as as as } // let specifier = if metadata.has_alias { p.expect(T![as]); parse_literal_export_name(p).or_add_diagnostic(p, expected_literal_export_name); m.complete(p, JS_EXPORT_NAMED_SPECIFIER) } else { m.complete(p, JS_EXPORT_NAMED_SHORTHAND_SPECIFIER) }; if metadata.is_type { TypeScript.exclusive_syntax(p, specifier, |p, specifier| { ts_only_syntax_error(p, "export { type ident }'", specifier.range(p)) }) } else { Present(specifier) } } #[derive(Default, Debug)] struct SpecifierMetadata { // Is this a type export (`export { type test }`) or a regular value export (`export { test }`) is_type: bool, // Is this an aliased export (`export { t as test }`) or not has_alias: bool, // For error recovery in case the local name is missing: `export { as test }` is_local_name_missing: bool, } // test ts ts_export_type_specifier // let as; // let type; // let a; // export { type }; // export { type as as }; // export { as as as }; // export { type as as as } // export { type type }; // export { type as }; // export { type a as aa }; fn specifier_metadata<LocalNamePred, AliasPred>( p: &mut JsParser, is_nth_name: LocalNamePred, is_nth_alias: AliasPred, ) -> SpecifierMetadata where LocalNamePred: Fn(&mut JsParser, usize) -> bool, AliasPred: Fn(&mut JsParser, usize) -> bool, { let mut metadata = SpecifierMetadata::default(); // This may be a typed import/export, but it could also be the name of the import/export: // ```ts // { type} // name: `type` // { type type } // name: `type` type-export: `true`
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/typescript.rs
crates/rome_js_parser/src/syntax/typescript.rs
//! TypeScript specific functions. use crate::prelude::*; mod statement; pub mod ts_parse_error; mod types; use crate::syntax::expr::{parse_identifier, parse_unary_expr, ExpressionContext}; use crate::syntax::js_parse_error::expected_expression; use crate::syntax::typescript::ts_parse_error::expected_ts_type; use crate::{Absent, JsParser, ParsedSyntax, Present}; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_token_any; use rome_rowan::SyntaxKind; pub(crate) use self::statement::*; use self::ts_parse_error::ts_member_cannot_be; pub(crate) use self::types::*; use super::binding::parse_identifier_binding; use super::class::is_nth_at_modifier; use super::expr::is_nth_at_identifier; use super::js_parse_error::expected_identifier; use super::stmt::optional_semi; pub(crate) enum TsIdentifierContext { Module, /// Inside of an `Interface` or `Type` declaration Type, } impl TsIdentifierContext { fn is_reserved_word(&self, name: &str) -> bool { match self { TsIdentifierContext::Module => is_reserved_module_name(name), TsIdentifierContext::Type => is_reserved_type_name(name), } } } fn parse_ts_identifier_binding( p: &mut JsParser, ts_identifier_context: TsIdentifierContext, ) -> ParsedSyntax { parse_identifier(p, TS_IDENTIFIER_BINDING).map(|mut ident| { if ident.kind(p).is_bogus() { return ident; } let name = p.text(ident.range(p)); let is_reserved_word_this_context = ts_identifier_context.is_reserved_word(name); if is_reserved_word_this_context { let error = p.err_builder(format!("Type alias cannot be {}", name), ident.range(p)); p.error(error); ident.change_to_bogus(p); } ident }) } // test ts ts_type_assertion_expression // let x = <const>"hello"; // let y = <string> x; // var d = <Error>({ name: "foo", message: "bar" }); pub(crate) fn parse_ts_type_assertion_expression( p: &mut JsParser, context: ExpressionContext, ) -> ParsedSyntax { if !p.at(T![<]) { return Absent; } let m = p.start(); p.bump(T![<]); parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); p.expect(T![>]); parse_unary_expr(p, context).or_add_diagnostic(p, expected_expression); Present(m.complete(p, TS_TYPE_ASSERTION_EXPRESSION)) } pub(crate) fn parse_ts_implements_clause(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![implements]) { return Absent; } // test_err js class_implements // class B implements C {} let m = p.start(); p.expect(T![implements]); expect_ts_type_list(p, "implements"); Present(m.complete(p, TS_IMPLEMENTS_CLAUSE)) } fn expect_ts_type_list(p: &mut JsParser, clause_name: &str) -> CompletedMarker { let list = p.start(); if parse_ts_name_with_type_arguments(p).is_absent() { p.error(p.err_builder( format!("'{}' list cannot be empty.", clause_name), p.cur_range().start()..p.cur_range().start(), )) } while p.at(T![,]) { let comma_range = p.cur_range(); p.bump(T![,]); // test_err ts ts_extends_trailing_comma // interface A {} // interface B extends A, {} if parse_ts_name_with_type_arguments(p).is_absent() { p.error(p.err_builder("Trailing comma not allowed.", comma_range)); break; } } list.complete(p, TS_TYPE_LIST) } fn parse_ts_name_with_type_arguments(p: &mut JsParser) -> ParsedSyntax { parse_ts_name(p).map(|name| { let m = name.precede(p); if !p.has_preceding_line_break() { parse_ts_type_arguments(p).ok(); } m.complete(p, TS_NAME_WITH_TYPE_ARGUMENTS) }) } pub(crate) fn try_parse<T, E>( p: &mut JsParser, func: impl FnOnce(&mut JsParser) -> Result<T, E>, ) -> Result<T, E> { let checkpoint = p.checkpoint(); let old_value = std::mem::replace(&mut p.state_mut().speculative_parsing, true); let res = func(p); p.state_mut().speculative_parsing = old_value; if res.is_err() { p.rewind(checkpoint); } res } /// Must be at `[ident:` or `<modifiers> [ident:` pub(crate) fn is_at_ts_index_signature_member(p: &mut JsParser) -> bool { let mut offset = 0; while is_nth_at_modifier(p, offset, false) { offset += 1; } if !p.nth_at(offset, T!['[']) { return false; } if !is_nth_at_identifier(p, offset + 1) { return false; } p.nth_at(offset + 2, T![:]) } #[derive(Clone, Copy)] pub(crate) enum MemberParent { Class, TypeOrInterface, } pub(crate) fn expect_ts_index_signature_member( p: &mut JsParser, m: Marker, parent: MemberParent, ) -> CompletedMarker { while is_nth_at_modifier(p, 0, false) { if p.eat(T![readonly]) { continue; } else { p.error(ts_member_cannot_be( p, p.cur_range(), "index signature", p.cur_text(), )); p.bump_any(); } } p.bump(T!['[']); let parameter = p.start(); parse_identifier_binding(p).or_add_diagnostic(p, expected_identifier); parse_ts_type_annotation(p).unwrap(); // It's a computed member name if the type annotation is missing parameter.complete(p, TS_INDEX_SIGNATURE_PARAMETER); p.expect(T![']']); parse_ts_type_annotation(p).or_add_diagnostic(p, |p, range| { p.err_builder("An index signature must have a type annotation", range) }); eat_members_separator(p, parent); m.complete( p, match parent { MemberParent::Class => TS_INDEX_SIGNATURE_CLASS_MEMBER, MemberParent::TypeOrInterface => TS_INDEX_SIGNATURE_TYPE_MEMBER, }, ) } fn eat_members_separator(p: &mut JsParser, parent: MemberParent) { let (comma, semi_colon) = match parent { MemberParent::Class => (false, true), MemberParent::TypeOrInterface => (true, true), }; debug_assert!(comma || semi_colon); let separator_eaten = comma && p.eat(T![,]); let separator_eaten = separator_eaten || (semi_colon && optional_semi(p)); if !separator_eaten { if semi_colon { let err = p .err_builder("';' expected'", p.cur_range()) .hint("An explicit or implicit semicolon is expected here..."); p.error(err); } else { let mut tokens = vec![]; if comma { tokens.push(T![,]); } if semi_colon { tokens.push(T![;]); } p.error(expected_token_any(&tokens)); } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/class.rs
crates/rome_js_parser/src/syntax/class.rs
use crate::parser::{ParsedSyntax, RecoveryResult}; use crate::prelude::*; use crate::state::{ EnableStrictMode, EnterClassPropertyInitializer, EnterClassStaticInitializationBlock, EnterParameters, SignatureFlags, }; use crate::syntax::binding::parse_binding; use crate::syntax::expr::{ parse_assignment_expression_or_higher, parse_lhs_expr, parse_private_name, ExpressionContext, }; use crate::syntax::function::{ parse_any_parameter, parse_formal_parameter, parse_function_body, parse_parameter_list, parse_parameters_list, parse_ts_type_annotation_or_error, ParameterContext, }; use crate::syntax::js_parse_error; use crate::syntax::js_parse_error::{ decorator_must_precede_modifier, decorators_not_allowed, expected_binding, expected_expression, invalid_decorator_error, modifier_already_seen, modifier_cannot_be_used_with_modifier, modifier_must_precede_modifier, parameter_decorators_not_allowed, }; use crate::syntax::object::{ is_at_literal_member_name, parse_computed_member_name, parse_literal_member_name, }; use crate::syntax::stmt::{optional_semi, parse_statements, StatementContext}; use crate::syntax::typescript::ts_parse_error::{ ts_accessibility_modifier_already_seen, ts_accessor_type_parameters_error, ts_constructor_type_parameters_error, ts_modifier_cannot_appear_on_a_constructor_declaration, ts_modifier_cannot_appear_on_a_parameter, ts_only_syntax_error, ts_set_accessor_return_type_error, }; use crate::syntax::typescript::{ is_reserved_type_name, parse_ts_implements_clause, parse_ts_return_type_annotation, parse_ts_type_annotation, parse_ts_type_arguments, parse_ts_type_parameters, TypeContext, }; use crate::JsSyntaxFeature::TypeScript; use crate::ParsedSyntax::{Absent, Present}; use crate::{JsParser, StrictMode}; use bitflags::bitflags; use drop_bomb::DebugDropBomb; use rome_js_syntax::JsSyntaxKind::*; use rome_js_syntax::TextSize; use rome_js_syntax::{JsSyntaxKind, T}; use rome_parser::parse_lists::ParseNodeList; use rome_parser::parse_recovery::ParseRecovery; use rome_parser::ParserProgress; use rome_rowan::{SyntaxKind, TextRange}; use smallvec::SmallVec; use std::fmt::Debug; use std::ops::Add; use std::slice::Iter; use super::function::LineBreak; use super::js_parse_error::unexpected_body_inside_ambient_context; use super::typescript::ts_parse_error::{self, unexpected_abstract_member_with_body}; use super::typescript::{ expect_ts_index_signature_member, is_at_ts_index_signature_member, MemberParent, }; pub(crate) fn is_at_ts_abstract_class_declaration( p: &mut JsParser, should_check_line_break: LineBreak, ) -> bool { let is_abstract = p.at(T![abstract]) && p.nth_at(1, T![class]); if should_check_line_break == LineBreak::DoCheck { is_abstract && !p.has_nth_preceding_line_break(1) } else { is_abstract } } pub(crate) fn is_at_export_class_declaration(p: &mut JsParser) -> bool { p.at(T![export]) && (p.nth_at(1, T![class]) || p.nth_at(1, T![@]) || p.nth_at(1, T![abstract])) } pub(crate) fn is_at_export_default_class_declaration(p: &mut JsParser) -> bool { p.at(T![export]) && p.nth_at(1, T![default]) && (p.nth_at(2, T![class]) || p.nth_at(2, T![@]) || p.nth_at(2, T![abstract])) } /// Parses a class expression, e.g. let a = class {} pub(super) fn parse_class_expression( p: &mut JsParser, decorator_list: ParsedSyntax, ) -> ParsedSyntax { if !p.at(T![class]) { return Absent; } Present(parse_class(p, ClassKind::Expression, decorator_list)) } // test js class_declaration // class foo {} // class foo extends bar {} // class foo extends foo.bar {} // test_err js class_decl_err // class {} // class extends bar {} // class foo { set {} } // class extends {} // test ts ts_abstract_classes // abstract class A {} // abstract class ConcreteMembers { // name: string; // constructor(name: string) { this.name = name; } // display(): void { console.log(this.name); } // public get my_name() { return this.name; } // public set my_name(name) { this.name = name; } // #private_method() { } // } // abstract class AbstractMembers { // abstract name: string; // abstract display(); // abstract get my_name(); // abstract set my_name(val); // } // test_err ts typescript_abstract_classes_incomplete // abstract class {}; // test_err ts typescript_abstract_classes_invalid_abstract_constructor // abstract class A { abstract constructor();}; // test ts ts_decorate_computed_member // class Test { // @test // ['a']: string; // } // test ts ts_decorated_class_members // class Test { // @test prop: string; // @test method() {} // @test get getter() {} // @test set setter(a) {} // } // test_err ts ts_invalid_decorated_class_members // abstract class Test { // @test constructor() {} // @test declare prop; // @test method(); // @test [index: string]: string; // @test abstract method2(); // @test abstract get getter(); // @test abstract set setter(val); // } /// Parses a class declaration if it is valid and otherwise returns [Invalid]. /// /// A class can be invalid if /// * It uses an illegal identifier name pub(super) fn parse_class_declaration( p: &mut JsParser, decorator_list: ParsedSyntax, context: StatementContext, ) -> ParsedSyntax { if !matches!(p.cur(), T![abstract] | T![class]) { return Absent; } let mut class = parse_class(p, ClassKind::Declaration, decorator_list); if !class.kind(p).is_bogus() && context.is_single_statement() { // test_err js class_in_single_statement_context // if (true) class A {} p.error( p.err_builder( "Classes can only be declared at top level or inside a block", class.range(p), ) .hint("wrap the class in a block statement"), ); class.change_to_bogus(p) } Present(class) } // test js export_default_class_clause // export default class {} // test ts typescript_export_default_abstract_class_case // export default abstract class {} pub(super) fn parse_class_export_default_declaration( p: &mut JsParser, decorator_list: ParsedSyntax, ) -> ParsedSyntax { if !matches!(p.cur(), T![abstract] | T![class]) { return Absent; } Present(parse_class(p, ClassKind::ExportDefault, decorator_list)) } #[derive(Debug, Clone, Copy, Eq, PartialEq, Hash)] enum ClassKind { Declaration, Expression, ExportDefault, } impl ClassKind { fn is_id_optional(&self) -> bool { matches!(self, ClassKind::Expression | ClassKind::ExportDefault) } } impl From<ClassKind> for JsSyntaxKind { fn from(kind: ClassKind) -> Self { match kind { ClassKind::Declaration => JS_CLASS_DECLARATION, ClassKind::Expression => JS_CLASS_EXPRESSION, ClassKind::ExportDefault => JS_CLASS_EXPORT_DEFAULT_DECLARATION, } } } // test js class_named_abstract_is_valid_in_js // class abstract {} // test ts ts_class_named_abstract_is_valid_in_ts // class abstract {} #[inline] fn parse_class(p: &mut JsParser, kind: ClassKind, decorator_list: ParsedSyntax) -> CompletedMarker { let decorator_list = decorator_list.or_else(|| empty_decorator_list(p)); let m = decorator_list.precede(p); let is_abstract = p.eat(T![abstract]); let class_token_range = p.cur_range(); p.expect(T![class]); let p = &mut *p.with_scoped_state(EnableStrictMode(StrictMode::Class(p.cur_range()))); // test_err ts class_decl_no_id // class {} // class implements B {} let id = match p.cur() { T![implements] if TypeScript.is_supported(p) => Absent, T![extends] => Absent, _ => parse_binding(p), }; // parse class id match id { Present(id) => { let text = p.text(id.range(p)); if TypeScript.is_supported(p) && is_reserved_type_name(text) { let err = p .err_builder(format!( "`{}` cannot be used as a class name because it is already reserved as a type", text ),id.range(p), ); p.error(err); } } Absent => { if !kind.is_id_optional() { let err = p.err_builder( "class declarations must have a name", class_token_range.start()..p.cur_range().start(), ); p.error(err); } } } // test ts ts_class_type_parameters // class BuildError<A, B, C> {} TypeScript .parse_exclusive_syntax( p, |p| { parse_ts_type_parameters( p, TypeContext::default() .and_allow_in_out_modifier(true) .and_allow_const_modifier(true), ) }, |p, type_parameters| { ts_only_syntax_error(p, "class type parameters", type_parameters.range(p)) }, ) .ok(); eat_class_heritage_clause(p); p.expect(T!['{']); ClassMembersList { inside_abstract_class: is_abstract, } .parse_list(p); p.expect(T!['}']); m.complete(p, kind.into()) } // test_err js class_extends_err // class A extends bar extends foo {} // class B extends bar, foo {} // class C implements B {} // // test_err ts ts_class_heritage_clause_errors // class A {} // interface Int {} // class B implements Int extends A {} // class C implements Int implements Int {} // class D implements {} // class E extends {} // class F extends E, {} /// Eats a class's 'implements' and 'extends' clauses, attaching them to the current active node. /// Implements error recovery in case a class has multiple extends/implements clauses or if they appear /// out of order fn eat_class_heritage_clause(p: &mut JsParser) { let mut first_extends: Option<CompletedMarker> = None; let mut first_implements: Option<CompletedMarker> = None; loop { match p.cur() { T![extends] => { let current = parse_extends_clause(p).expect( "Expected extends clause because parser is positioned at extends keyword", ); match first_extends.as_ref() { None => { first_extends = { if let Some(first_implements) = first_implements.as_ref() { p.error( p.err_builder( "'extends' clause must precede 'implements' clause.", current.range(p), ) .detail( first_implements.range(p), "This is where implements was found", ), ) } Some(current) } } Some(first_extends) => p.error( p.err_builder("'extends' clause already seen.", current.range(p)) .detail(first_extends.range(p), "first 'extends' clause"), ), } } T![implements] => { let mut current = parse_ts_implements_clause(p).expect("expected 'implements' clause because parser is positioned at 'implements' keyword."); match first_implements.as_ref() { None => { first_implements = { if TypeScript.is_unsupported(p) { p.error(p.err_builder( "classes can only implement interfaces in TypeScript files", current.range(p), )); current.change_to_bogus(p); } Some(current) } } Some(first_implements) => { p.error( p.err_builder("'implements' clause already seen.", current.range(p)) .detail(first_implements.range(p), "first 'implements' clause"), ); } } } _ => break, } } } // test ts ts_extends_generic_type // type IHasVisualizationModel = string; // class D extends C<IHasVisualizationModel> { // x = "string"; // } fn parse_extends_clause(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![extends]) { return Absent; } let m = p.start(); let extends_end = p.cur_range().end(); p.expect(T![extends]); if parse_extends_expression(p).is_absent() { p.error(p.err_builder("'extends' list cannot be empty.", extends_end..extends_end)) } else { TypeScript .parse_exclusive_syntax(p, parse_ts_type_arguments, |p, arguments| { ts_only_syntax_error(p, "type arguments", arguments.range(p)) }) .ok(); } while p.at(T![,]) { let comma_range = p.cur_range(); p.bump(T![,]); let extra = p.start(); if parse_extends_expression(p).is_absent() { p.error(p.err_builder("Trailing comma not allowed.", comma_range)); extra.abandon(p); break; } parse_ts_type_arguments(p).ok(); let extra_class = extra.complete(p, JS_BOGUS); p.error(p.err_builder( "Classes can only extend a single class.", extra_class.range(p), )); } Present(m.complete(p, JS_EXTENDS_CLAUSE)) } fn parse_extends_expression(p: &mut JsParser) -> ParsedSyntax { if p.at(T!['{']) && p.nth_at(1, T!['}']) { // Don't eat the body of the class as an object expression except if we have // * extends {} { // * extends {} implements // * extends {}, if !matches!(p.nth(2), T![extends] | T![implements] | T!['{'] | T![,]) { return Absent; } } parse_lhs_expr(p, ExpressionContext::default()) } struct ClassMembersList { inside_abstract_class: bool, } impl ParseNodeList for ClassMembersList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: JsSyntaxKind = JS_CLASS_MEMBER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_class_member(p, self.inside_abstract_class) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { // test_err js invalid_method_recover // class { // [1 + 1] = () => { // let a=; // }; // }; parsed_element.or_recover( p, &ParseRecovery::new( JS_BOGUS_MEMBER, token_set![ T![;], T![ident], T![readonly], T![private], T![protected], T![public], T![override], T![declare], T![static], T![accessor], T![async], T![yield], T!['}'], T![#], T![@], ], ), js_parse_error::expected_class_member, ) } } // test js class_declare // class B { declare() {} } // class B { declare = foo } // test js static_method // class foo { // static foo(bar) {} // static *foo() {} // static async foo() {} // static async *foo() {} // } fn parse_class_member(p: &mut JsParser, inside_abstract_class: bool) -> ParsedSyntax { let member_marker = p.start(); // test js class_empty_element // class foo { ;;;;;;;;;; get foo() {};;;;} if p.eat(T![;]) { return Present(member_marker.complete(p, JS_EMPTY_CLASS_MEMBER)); } let mut modifiers = parse_class_member_modifiers(p, false); if is_at_static_initialization_block_class_member(p) { return Present(parse_static_initialization_block_class_member( p, member_marker, modifiers, )); } let member = parse_class_member_impl(p, member_marker, &mut modifiers); match member { Present(mut member) => { let mut valid = true; if !inside_abstract_class { // test_err ts ts_concrete_class_with_abstract_members // class A { // abstract my_age: number; // abstract name(): string; // abstract get age(): number; // abstract set age(v); // } if let Some(abstract_token_range) = modifiers.get_first_range(ModifierKind::Abstract) { let err = p.err_builder( "Only abstract classes can have abstract members", abstract_token_range, ); p.error(err); valid = false; } } let modifiers_valid = modifiers.validate_and_complete(p, member.kind(p)); if !valid || !modifiers_valid { member.change_to_bogus(p); } Present(member) } Absent => { // If the modifier list contains a modifier other than a decorator, such modifiers can also be valid member names. debug_assert!(!modifiers .flags .contains(ModifierFlags::ALL_MODIFIERS_EXCEPT_DECORATOR)); // test_err ts ts_broken_class_member_modifiers // class C { // @decorator // } // class CC { // @ // } // class @ // class C@ modifiers.abandon(p); Absent } } } // test ts ts_index_signature_class_member // class A { // [a: number]: string; // } // class B { // [index: string]: { prop } // } // test ts ts_index_signature_class_member_can_be_static // class A { // static [a: number]: string; // } // class B { // static readonly [a: number]: string; // } fn parse_index_signature_class_member(p: &mut JsParser, member_marker: Marker) -> ParsedSyntax { TypeScript.parse_exclusive_syntax( p, |p| { Present(expect_ts_index_signature_member( p, member_marker, MemberParent::Class, )) }, |p, member| ts_only_syntax_error(p, "Index signatures", member.range(p)), ) } fn parse_class_member_impl( p: &mut JsParser, member_marker: Marker, modifiers: &mut ClassMemberModifiers, ) -> ParsedSyntax { let start_token_pos = p.source().position(); let generator_range = p.cur_range(); // Seems like we're at a generator method if p.at(T![*]) { p.bump_any(); // bump * token if is_at_constructor(p, modifiers) { let err = p.err_builder("constructors can't be generators", generator_range); p.error(err); } return Present(parse_method_class_member( p, member_marker, modifiers, SignatureFlags::GENERATOR, )); }; // Seems like we're at an async method if p.at(T![async]) && !p.nth_at(1, T![?]) && !p.nth_at(1, T![;]) && !p.nth_at(1, T![=]) && !is_at_method_class_member(p, 1) && !p.has_nth_preceding_line_break(1) { let async_range = p.cur_range(); p.expect(T![async]); let mut flags = SignatureFlags::ASYNC; if p.eat(T![*]) { flags |= SignatureFlags::GENERATOR; } return Present(if is_at_constructor(p, modifiers) { let err = p.err_builder("constructors cannot be async", async_range); p.error(err); parse_class_member_name(p, modifiers).unwrap(); parse_constructor_class_member_body(p, member_marker, modifiers) } else { parse_method_class_member(p, member_marker, modifiers, flags) }); } // Seems like we're at an index member if is_at_ts_index_signature_member(p) { return parse_index_signature_class_member(p, member_marker); } // test js getter_class_member // class Getters { // get foo() {} // get static() {} // static get bar() {} // get "baz"() {} // get ["a" + "b"]() {} // get 5() {} // get #private() {} // } // class NotGetters { // get() {} // async get() {} // static get() {} // } // // test_err js method_getter_err // class foo { // get {} // } // // test js setter_class_member // class Setters { // set foo(a) {} // set static(a) {} // static set bar(a) {} // set "baz"(a) {} // set ["a" + "b"](a) {} // set 5(a) {} // set #private(a) {} // } // class NotSetters { // set(a) {} // async set(a) {} // static set(a) {} // } // // test_err js setter_class_member // class Setters { // set foo() {} // } if matches!(p.cur(), T![get] | T![set]) && is_at_class_member_name(p, 1) { let is_getter = p.at(T![get]); if is_getter { p.expect(T![get]); } else { p.expect(T![set]); } // So we've seen a get that now must be followed by a getter/setter name parse_class_member_name(p, modifiers) .or_add_diagnostic(p, js_parse_error::expected_class_member_name); // test_err ts ts_getter_setter_type_parameters // class Test { // get a<A>(): A {} // set a<A>(value: A) {} // } if let Present(type_parameters) = parse_ts_type_parameters(p, TypeContext::default()) { p.error(ts_accessor_type_parameters_error(p, &type_parameters)) } let completed = if is_getter { p.expect(T!['(']); p.expect(T![')']); parse_ts_type_annotation_or_error(p).ok(); let member_kind = expect_accessor_body(p, &member_marker, modifiers); member_marker.complete(p, member_kind.as_getter_syntax_kind()) } else { let has_l_paren = p.expect(T!['(']); p.with_state(EnterParameters(SignatureFlags::empty()), |p| { let decorator_list = parse_parameter_decorators(p); // test ts ts_decorator_on_class_setter { "parse_class_parameter_decorators": true } // class A { // set val(@dec x) {} // set val(@dec.fn() x) {} // set val(@dec() x) {} // } // test_err ts ts_decorator_on_class_setter // class A { // set val(@dec x) {} // set val(@dec.fn() x) {} // set val(@dec() x) {} // } parse_formal_parameter( p, decorator_list, ParameterContext::ClassSetter, ExpressionContext::default().and_object_expression_allowed(has_l_paren), ) }) .or_add_diagnostic(p, js_parse_error::expected_parameter); p.expect(T![')']); // test_err ts ts_setter_return_type_annotation // class Test { // set a(value: string): void {} // } if let Present(return_type_annotation) = parse_ts_return_type_annotation(p) { p.error(ts_set_accessor_return_type_error( p, &return_type_annotation, )); } let member_kind = expect_accessor_body(p, &member_marker, modifiers); member_marker.complete(p, member_kind.as_setter_syntax_kind()) }; return Present(completed); } let is_constructor = is_at_constructor(p, modifiers); let member_name = parse_class_member_name(p, modifiers) .or_add_diagnostic(p, js_parse_error::expected_class_member_name); if is_at_method_class_member(p, 0) { // test js class_static_constructor_method // class B { static constructor() {} } // test js constructor_class_member // class Foo { // constructor(a) { // this.a = a; // } // } // class Bar { // "constructor"(b) { // this.b = b; // } // } return if is_constructor { Present(parse_constructor_class_member_body( p, member_marker, modifiers, )) } else { // test js method_class_member // class Test { // method() {} // async asyncMethod() {} // async* asyncGeneratorMethod() {} // * generatorMethod() {} // "foo"() {} // ["foo" + "bar"]() {} // 5() {} // #private() {} // } // class ContextualKeywords { // // Methods called static // static() {} // async static() {} // * static() {} // async* static() {} // declare() {} // get() {} // Method called get // set() {} // Method called set // } // class Static { // static method() {} // static async asyncMethod() {} // static async* asyncGeneratorMethod() {} // static * generatorMethod() {} // static static() {} // static async static() {} // static async* static() {} // static * static() {} // } Present(parse_method_class_member_rest( p, member_marker, modifiers, SignatureFlags::empty(), )) }; } match member_name { Some(_) => { // test js property_class_member // class foo { // property // declare; // initializedProperty = "a" // "a"; // 5 // ["a" + "b"] // static staticProperty // static staticInitializedProperty = 1 // #private // #privateInitialized = "a" // static #staticPrivate // static #staticPrivateInitializedProperty = 1 // } // // test_err js class_declare_member // class B { declare foo } // test ts ts_property_class_member_can_be_named_set_or_get // class B { set: String; get: Number } let mut property = parse_property_class_member_body(p, member_marker, modifiers); if !property.kind(p).is_bogus() && is_constructor { let err = p.err_builder( "class properties may not be called `constructor`", property.range(p), ); p.error(err); property.change_to_bogus(p); } Present(property) } None => { // test_err js block_stmt_in_class // class S{{}} debug_assert_eq!( p.source().position(), start_token_pos, "Parser shouldn't be progressing when returning Absent" ); member_marker.abandon(p); Absent } } } fn is_at_static_initialization_block_class_member(p: &mut JsParser) -> bool { p.at(T![static]) && p.nth_at(1, T!['{']) } // test js static_initialization_block_member // class A { // static a; // static { // this.a = "test"; // } // } // fn parse_static_initialization_block_class_member( p: &mut JsParser, member_marker: Marker, modifiers: ClassMemberModifiers, ) -> CompletedMarker { if modifiers.is_empty() { modifiers.abandon(p); } else { // test_err ts ts_class_initializer_with_modifiers // class A { // public static { } // } p.error(p.err_builder( "Static class blocks cannot have any modifier.", modifiers.list_marker.range(p), )); modifiers.validate_and_complete(p, JS_STATIC_INITIALIZATION_BLOCK_CLASS_MEMBER); } p.expect(T![static]); p.expect(T!['{']); p.with_state(EnterClassStaticInitializationBlock, |p| { let statement_list = p.start(); parse_statements(p, true, statement_list) }); p.expect(T!['}']); member_marker.complete(p, JS_STATIC_INITIALIZATION_BLOCK_CLASS_MEMBER) } /// Parses the body of a property class member (anything after the member name). If the current member is abstract, the [ParsedSyntax] /// will have kind TS_PROPERTY_SIGNATURE_CLASS_MEMBER, otehrwise will be JS_PROPERTY_CLASS_MEMBER. /// /// # Arguments /// /// * `p` - Parser being used /// * `member_marker` - Marker that will be completed at the end of this function /// * `modifiers` - All the member modifiers parsed previously. This will be used for validation and for the [ParsedSyntax::kind] fn parse_property_class_member_body( p: &mut JsParser, member_marker: Marker, modifiers: &ClassMemberModifiers, ) -> CompletedMarker { let annotation = parse_ts_property_annotation(p, modifiers).ok(); // test js class_await_property_initializer // // SCRIPT // async function* test() { // class A { // prop = await; // } // } // test_err js class_yield_property_initializer // // SCRIPT // async function* test() { // class A { // prop = yield; // } // } let initializer_syntax = p.with_state(EnterClassPropertyInitializer, |p| { parse_initializer_clause(p, ExpressionContext::default()) }); expect_member_semi(p, &member_marker, "class property"); let is_signature = modifiers.is_signature() || p.state().in_ambient_context(); let kind = if !is_signature { JS_PROPERTY_CLASS_MEMBER } else if initializer_syntax.is_present() { TS_INITIALIZED_PROPERTY_SIGNATURE_CLASS_MEMBER } else { TS_PROPERTY_SIGNATURE_CLASS_MEMBER }; let member = member_marker.complete(p, kind); if let Present(initializer) = &initializer_syntax { if modifiers.has(ModifierKind::Abstract) { // test_err ts ts_abstract_property_cannot_have_initiliazers // abstract class A { // abstract name: string = ""; // } p.error(p.err_builder( "Property cannot have an initializer because it is marked abstract.", initializer.range(p), )); } else if modifiers.has(ModifierKind::Declare) || p.state().in_ambient_context() { // test ts ts_readonly_property_initializer_ambient_context // declare class A { readonly prop = "test"; } // class B { declare readonly prop = "test"; } // declare class A { private readonly prop = "test"; } // class B { declare private readonly prop = "test"; } // declare class A { static readonly prop = "test"; } // class B { declare static readonly prop = "test"; } if !modifiers.has(ModifierKind::Readonly) { // test_err ts ts_property_initializer_ambient_context // declare class A { prop = "test"; } // class B { declare prop = "test"; } p.error(p.err_builder( "In ambient contexts, properties with initializers need to be readonly.", initializer.range(p), )); } else if let Some(annotation) = annotation {
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/function.rs
crates/rome_js_parser/src/syntax/function.rs
use crate::parser::ParsedSyntax; use crate::prelude::*; use crate::state::{EnterFunction, EnterParameters, SignatureFlags}; use crate::syntax::binding::{ is_at_identifier_binding, is_nth_at_identifier_binding, parse_binding, parse_binding_pattern, }; use crate::syntax::class::{ empty_decorator_list, parse_initializer_clause, parse_parameter_decorators, }; use crate::syntax::expr::{ is_nth_at_identifier, parse_assignment_expression_or_higher, ExpressionContext, }; use crate::syntax::js_parse_error; use crate::syntax::js_parse_error::{ decorators_not_allowed, expected_binding, expected_parameter, expected_parameters, }; use crate::syntax::stmt::{is_semi, parse_block_impl, semi, StatementContext}; use crate::syntax::typescript::ts_parse_error::ts_only_syntax_error; use crate::syntax::typescript::{ is_nth_at_type_parameter_modifier, parse_ts_return_type_annotation, parse_ts_type_annotation, parse_ts_type_parameters, try_parse, TypeContext, }; use crate::JsSyntaxFeature::TypeScript; use crate::ParsedSyntax::{Absent, Present}; use crate::{JsParser, JsSyntaxFeature, ParseRecovery}; use rome_js_syntax::JsSyntaxKind::*; use rome_js_syntax::{JsSyntaxKind, TextRange, T}; use rome_parser::ParserProgress; use rome_rowan::SyntaxKind; /// A function declaration, this could be async and or a generator. This takes a marker /// because you need to first advance over async or start a marker and feed it in. // test js function_decl // function foo1() {} // function *foo2() {} // async function *foo3() {} // async function foo4() {} // function *foo5() { // yield foo; // } // // test js function_declaration_script // // SCRIPT // function test(await) {} // // test_err js function_decl_err // function() {} // function foo {} // function {} // function *() {} // async function() {} // async function *() {} // function *foo2() {} // yield foo3; // function test2(): number {} // function foo4(await) {} // function foo5(yield) {} // // test_err js function_broken // function foo())})}{{{ {} // // test ts ts_function_statement // function test(a: string, b?: number, c="default") {} // function test2<A, B extends A, C = A>(a: A, b: B, c: C) {} pub(super) fn parse_function_declaration( p: &mut JsParser, context: StatementContext, ) -> ParsedSyntax { if !is_at_function(p) { return Absent; } let m = p.start(); let mut function = if p.state().in_ambient_context() { parse_ambient_function(p, m, AmbientFunctionKind::Declaration) } else { parse_function( p, m, FunctionKind::Declaration { single_statement_context: context.is_single_statement(), }, ) }; if context != StatementContext::StatementList && !function.kind(p).is_bogus() { if JsSyntaxFeature::StrictMode.is_supported(p) { // test_err js function_in_single_statement_context_strict // if (true) function a() {} // label1: function b() {} // while (true) function c() {} p.error(p.err_builder("In strict mode code, functions can only be declared at top level or inside a block", function.range(p)).hint( "wrap the function in a block statement")); function.change_to_bogus(p); } else if !matches!(context, StatementContext::If | StatementContext::Label) { // test js function_in_if_or_labelled_stmt_loose_mode // // SCRIPT // label1: function a() {} // if (true) function b() {} else function c() {} // if (true) function d() {} // if (true) "test"; else function e() {} p.error(p.err_builder("In non-strict mode code, functions can only be declared at top level, inside a block, or as the body of an if or labelled statement", function.range(p)).hint( "wrap the function in a block statement")); function.change_to_bogus(p); } } Present(function) } pub(super) fn parse_function_expression(p: &mut JsParser) -> ParsedSyntax { if !is_at_function(p) { return Absent; } let m = p.start(); Present(parse_function(p, m, FunctionKind::Expression)) } // test js export_default_function_clause // export default function test(a, b) {} // // test ts ts_export_default_function_overload // export default function test(a: string): string; // export default function test(a: string | undefined): string { return "hello" } // // test ts ts_export_function_overload // export function test(a: string): string; // export function test(a: string | undefined): string { return "hello" } pub(super) fn parse_function_export_default_declaration(p: &mut JsParser) -> ParsedSyntax { if !is_at_function(p) { return Absent; } let m = p.start(); Present(if p.state().in_ambient_context() { parse_ambient_function(p, m, AmbientFunctionKind::ExportDefault) } else { parse_function(p, m, FunctionKind::ExportDefault) }) } #[derive(PartialEq, Eq, Debug, Copy, Clone)] enum AmbientFunctionKind { Declaration, ExportDefault, } impl AmbientFunctionKind { const fn is_export_default(&self) -> bool { matches!(self, AmbientFunctionKind::ExportDefault) } } #[derive(PartialEq, Eq, Debug, Copy, Clone)] enum FunctionKind { Declaration { // https://tc39.es/ecma262/multipage/additional-ecmascript-features-for-web-browsers.html#sec-functiondeclarations-in-ifstatement-statement-clauses single_statement_context: bool, }, Expression, ExportDefault, } impl FunctionKind { const fn is_export_default(&self) -> bool { matches!(self, FunctionKind::ExportDefault) } fn is_id_optional(&self) -> bool { matches!(self, FunctionKind::Expression | FunctionKind::ExportDefault) } fn is_expression(&self) -> bool { matches!(self, FunctionKind::Expression) } fn is_in_single_statement_context(&self) -> bool { matches!( self, FunctionKind::Declaration { single_statement_context: true } ) } } impl From<FunctionKind> for JsSyntaxKind { fn from(kind: FunctionKind) -> Self { match kind { FunctionKind::Declaration { .. } => JS_FUNCTION_DECLARATION, FunctionKind::Expression => JS_FUNCTION_EXPRESSION, FunctionKind::ExportDefault => JS_FUNCTION_EXPORT_DEFAULT_DECLARATION, } } } fn is_at_function(p: &mut JsParser) -> bool { p.at_ts(token_set![T![async], T![function]]) || is_at_async_function(p, LineBreak::DoNotCheck) } #[inline] fn parse_function(p: &mut JsParser, m: Marker, kind: FunctionKind) -> CompletedMarker { let mut flags = SignatureFlags::empty(); let in_async = is_at_async_function(p, LineBreak::DoNotCheck); if in_async { // test_err js function_escaped_async // void \u0061sync function f(){} p.eat(T![async]); flags |= SignatureFlags::ASYNC; } p.expect(T![function]); let generator_range = if p.at(T![*]) { let range = p.cur_range(); p.bump(T![*]); flags |= SignatureFlags::GENERATOR; Some(range) } else { None }; let id = parse_function_id(p, kind, flags); if !kind.is_id_optional() { id.or_add_diagnostic(p, |p, range| { p.err_builder( "expected a name for the function in a function declaration, but found none", range, ) }); } TypeScript .parse_exclusive_syntax( p, |p| parse_ts_type_parameters(p, TypeContext::default().and_allow_const_modifier(true)), |p, marker| { p.err_builder( "type parameters can only be used in TypeScript files", marker.range(p), ) }, ) .ok(); let parameter_context = if !kind.is_expression() && TypeScript.is_supported(p) { // It isn't known at this point if this is a function overload definition (body is missing) // or a regular function implementation. // Let's go with the laxer of the two. Ideally, these verifications should be part of // a second compiler pass. ParameterContext::Declaration } else { ParameterContext::Implementation }; parse_parameter_list(p, parameter_context, flags) .or_add_diagnostic(p, js_parse_error::expected_parameters); TypeScript .parse_exclusive_syntax(p, parse_ts_return_type_annotation, |p, marker| { p.err_builder( "return types can only be used in TypeScript files", marker.range(p), ) }) .ok(); let body = parse_function_body(p, flags); // test ts ts_function_overload // function test(a: string): void; // function test(a: string | undefined): void {} // function no_semi(a: string) // function no_semi(a: string) {} // async function async_overload(a: string) // async function async_overload(a: string) {} if body.is_absent() && TypeScript.is_supported(p) && is_semi(p, 0) && !kind.is_in_single_statement_context() && !kind.is_expression() { p.eat(T![;]); // test_err ts ts_function_overload_generator // function* test(a: string); // function* test(a: string) {} if let Some(generator_range) = generator_range { p.error(p.err_builder( "An overload signature cannot be declared as a generator.", generator_range, )); } if kind.is_export_default() { m.complete(p, TS_DECLARE_FUNCTION_EXPORT_DEFAULT_DECLARATION) } else { m.complete(p, TS_DECLARE_FUNCTION_DECLARATION) } } else { body.or_add_diagnostic(p, js_parse_error::expected_function_body); let mut function = m.complete(p, kind.into()); // test_err js async_or_generator_in_single_statement_context // if (true) async function t() {} // if (true) function* t() {} if kind.is_in_single_statement_context() && (in_async || generator_range.is_some()) { p.error(p.err_builder("`async` and generator functions can only be declared at top level or inside a block", function.range(p) )); function.change_to_bogus(p); } function } } // test_err js break_in_nested_function // while (true) { // function helper() { // break; // } // } pub(super) fn parse_function_body(p: &mut JsParser, flags: SignatureFlags) -> ParsedSyntax { p.with_state(EnterFunction(flags), |p| { parse_block_impl(p, JS_FUNCTION_BODY) }) } fn parse_function_id(p: &mut JsParser, kind: FunctionKind, flags: SignatureFlags) -> ParsedSyntax { match kind { // Takes the async and generator restriction from the expression FunctionKind::Expression => { // test js function_expression_id // // SCRIPT // (function await() {}); // (function yield() {}); // (async function yield() {}); // (function* await() {}) // // test_err js function_expression_id_err // (async function await() {}); // (function* yield() {}); // function* test() { function yield() {} } p.with_state(EnterFunction(flags), parse_binding) } // Inherits the async and generator from the parent _ => { // test js function_id // // SCRIPT // function test() {} // function await(test) {} // async function await(test) {} // function yield(test) {} // function* yield(test) {} // // // test_err js function_id_err // function* test() { // function yield(test) {} // } parse_binding(p) } } } // test ts ts_declare_function // declare function test<A, B, R>(a: A, b: B): R; // declare function test2({ a }?: { a: "string" }) // declare // function not_a_declaration() {} // // test_err ts ts_declare_function_with_body // declare function test<A>(a: A): string { return "ambient function with a body"; } // // test ts ts_ambient_function // declare module a { // function test(): string; // } fn parse_ambient_function( p: &mut JsParser, m: Marker, kind: AmbientFunctionKind, ) -> CompletedMarker { let stmt_start = p.cur_range().start(); // test_err ts ts_declare_async_function // declare async function test(); let is_async = p.at(T![async]); if is_async { p.error(p.err_builder( "'async' modifier cannot be used in an ambient context.", p.cur_range(), )); p.bump(T![async]); } p.expect(T![function]); let is_generator = p.at(T![*]); if is_generator { // test_err ts ts_declare_generator_function // declare function* test(): void; // declare module 'x' { // export default function* test(): void // } p.error(p.err_builder( "Generators are not allowed in an ambient context.", p.cur_range(), )); p.bump(T![*]); } let binding = parse_binding(p); let binding_range = p.cur_range(); parse_ts_type_parameters(p, TypeContext::default().and_allow_const_modifier(true)).ok(); parse_parameter_list(p, ParameterContext::Declaration, SignatureFlags::empty()) .or_add_diagnostic(p, expected_parameters); parse_ts_return_type_annotation(p).ok(); if let Present(body) = parse_function_body(p, SignatureFlags::empty()) { p.error( p.err_builder( "A 'declare' function cannot have a function body", body.range(p), ) .hint("remove this body"), ); } semi(p, TextRange::new(stmt_start, p.cur_range().start())); if is_async { m.complete(p, JS_BOGUS_STATEMENT) } else if kind.is_export_default() { // test ts ts_declare_function_export_default_declaration // declare module 'x' { // export default function(option: any): void // } // declare module 'y' { // export default function test(option: any): void // } m.complete(p, TS_DECLARE_FUNCTION_EXPORT_DEFAULT_DECLARATION) } else { // test_err ts ts_declare_function_export_declaration_missing_id // declare module 'x' { // export function(option: any): void // } if binding.is_absent() { p.error(expected_binding(p, binding_range)); } // test ts ts_declare_function_export_declaration // declare module 'x' { // export function test(option: any): void // } m.complete(p, TS_DECLARE_FUNCTION_DECLARATION) } } pub(crate) fn parse_ts_type_annotation_or_error(p: &mut JsParser) -> ParsedSyntax { TypeScript.parse_exclusive_syntax(p, parse_ts_type_annotation, |p, annotation| { p.err_builder( "return types can only be used in TypeScript files", annotation.range(p), ) .hint("remove this type annotation") }) } /// Tells [is_at_async_function] if it needs to check line breaks #[derive(PartialEq, Eq)] pub(crate) enum LineBreak { // check line breaks DoCheck, // do not check line break DoNotCheck, } #[inline] /// Checks if the parser is inside a "async function" pub(super) fn is_at_async_function(p: &mut JsParser, should_check_line_break: LineBreak) -> bool { let async_function_tokens = p.at(T![async]) && p.nth_at(1, T![function]); if should_check_line_break == LineBreak::DoCheck { async_function_tokens && !p.has_nth_preceding_line_break(1) } else { async_function_tokens } } /// There are cases where the parser must speculatively parse a syntax. For example, /// parsing `<string>(test)` very much looks like an arrow expression *except* that it isn't followed /// by a `=>`. This enum tells a parse function if ambiguity should be tolerated or if it should stop if it is not. #[derive(Debug, Copy, Clone)] pub(crate) enum Ambiguity { /// Ambiguity is allowed. A parse method should continue even if an expected character is missing. Allowed, /// Ambiguity isn't allowed. A parse method should stop parsing if an expected character is missing /// and let the caller decide what to do in this case. Disallowed, } impl Ambiguity { fn is_disallowed(&self) -> bool { matches!(self, Ambiguity::Disallowed) } } pub(crate) fn parse_arrow_function_expression(p: &mut JsParser) -> ParsedSyntax { parse_parenthesized_arrow_function_expression(p) .or_else(|| parse_arrow_function_with_single_parameter(p)) } /// Tries to parse the header of a parenthesized arrow function expression. /// /// The header is everything coming before the (or everything up and including the `=>` token): /// `async (a) =>`. /// /// Returns the [Marker] for the parsed arrow function header that must be completed by the caller. /// /// ## Errors /// /// Returns `Err` if `ambiguity` is [Ambiguity::Disallowed] and the syntax /// is ambiguous. The `Err` contains the [Marker] of the syntax parsed to this point. It's up /// to the caller to abandon or complete the returned marker. /// /// For example, the parser speculatively tries to parse `<string>(test)` as an arrow /// function because the start very much looks like one, except that the `=>` token is missing /// (it's a TypeScript `<string>` cast followed by a parenthesized expression). fn try_parse_parenthesized_arrow_function_head( p: &mut JsParser, ambiguity: Ambiguity, ) -> Result<(Marker, SignatureFlags), Marker> { let m = p.start(); // test_err js arrow_escaped_async // \u0061sync () => {} let flags = if p.eat(T![async]) { SignatureFlags::ASYNC } else { SignatureFlags::empty() }; if p.at(T![<]) { parse_ts_type_parameters(p, TypeContext::default().and_allow_const_modifier(true)).ok(); if ambiguity.is_disallowed() && p.last() != Some(T![>]) { return Err(m); } } if !p.at(T!['(']) && ambiguity.is_disallowed() { return Err(m); } // test_err ts ts_decorator_on_arrow_function { "parse_class_parameter_decorators": true } // const method = (@dec x, second, @dec third = 'default') => {}; // const method = (@dec.fn() x, second, @dec.fn() third = 'default') => {}; // const method = (@dec() x, second, @dec() third = 'default') => {}; parse_parameter_list( p, ParameterContext::Arrow, arrow_function_parameter_flags(p, flags), ) .or_add_diagnostic(p, expected_parameters); if p.last() != Some(T![')']) && ambiguity.is_disallowed() { return Err(m); } TypeScript .parse_exclusive_syntax(p, parse_ts_return_type_annotation, |p, annotation| { ts_only_syntax_error(p, "return type annotation", annotation.range(p)) }) .ok(); if p.has_preceding_line_break() { p.error(p.err_builder("Line terminator not permitted before arrow.", p.cur_range())); } if !p.expect(T![=>]) && ambiguity.is_disallowed() { return Err(m); } Ok((m, flags)) } // test ts ts_arrow_function_type_parameters // let a = <A, B extends A, C = string>(a: A, b: B, c: C) => "hello"; // let b = async <A, B>(a: A, b: B): Promise<string> => "hello"; fn parse_possible_parenthesized_arrow_function_expression(p: &mut JsParser) -> ParsedSyntax { let start_pos = p.cur_range().start(); // Test if we already tried to parse this position as an arrow function and failed. // If so, bail out immediately. if p.state().not_parenthesized_arrow.contains(&start_pos) { return Absent; } match try_parse(p, |p| { try_parse_parenthesized_arrow_function_head(p, Ambiguity::Disallowed) }) { Ok((m, flags)) => { parse_arrow_body(p, flags).or_add_diagnostic(p, js_parse_error::expected_arrow_body); Present(m.complete(p, JS_ARROW_FUNCTION_EXPRESSION)) } Err(m) => { // SAFETY: Abandoning the marker here is safe because `try_parse` rewinds if // the callback returns `Err` (which is the case that this branch is handling). m.abandon(p); p.state_mut().not_parenthesized_arrow.insert(start_pos); Absent } } } fn parse_parenthesized_arrow_function_expression(p: &mut JsParser) -> ParsedSyntax { let is_parenthesized = is_parenthesized_arrow_function_expression(p); match is_parenthesized { IsParenthesizedArrowFunctionExpression::True => { let (m, flags) = try_parse_parenthesized_arrow_function_head(p, Ambiguity::Allowed).expect("'CompletedMarker' because function should never return 'Err' if called with 'Ambiguity::Allowed'."); parse_arrow_body(p, flags).or_add_diagnostic(p, js_parse_error::expected_arrow_body); Present(m.complete(p, JS_ARROW_FUNCTION_EXPRESSION)) } IsParenthesizedArrowFunctionExpression::Unknown => { parse_possible_parenthesized_arrow_function_expression(p) } IsParenthesizedArrowFunctionExpression::False => Absent, } } #[derive(Debug, Copy, Clone)] enum IsParenthesizedArrowFunctionExpression { True, False, Unknown, } // test js paren_or_arrow_expr // (foo); // (foo) => {}; // (5 + 5); // ({foo, bar, b: [f, ...baz]}) => {}; // (foo, ...bar) => {} // test_err js paren_or_arrow_expr_invalid_params // (5 + 5) => {} // (a, ,b) => {} // (a, b) =>; // (a: string; // (a, b) // => {} fn is_parenthesized_arrow_function_expression( p: &mut JsParser, ) -> IsParenthesizedArrowFunctionExpression { match p.cur() { // These could be the start of a parenthesized arrow function expression but needs further verification T!['('] | T![<] => { is_parenthesized_arrow_function_expression_impl(p, SignatureFlags::empty()) } T![async] => { // test js async_arrow_expr // let a = async foo => {} // let b = async (bar) => {} // async (foo, bar, ...baz) => foo if p.has_nth_preceding_line_break(1) { IsParenthesizedArrowFunctionExpression::False } else if matches!(p.nth(1), T!['('] | T![<]) { is_parenthesized_arrow_function_expression_impl(p, SignatureFlags::ASYNC) } else { IsParenthesizedArrowFunctionExpression::False } } // Not entirely correct but that's probably what the user intended T![=>] => IsParenthesizedArrowFunctionExpression::True, _ => IsParenthesizedArrowFunctionExpression::False, } } // Tests if the parser is at an arrow function expression fn is_parenthesized_arrow_function_expression_impl( p: &mut JsParser, flags: SignatureFlags, ) -> IsParenthesizedArrowFunctionExpression { let n = usize::from(flags.contains(SignatureFlags::ASYNC)); match p.nth(n) { T!['('] => { match p.nth(n + 1) { T![')'] => { // '()' is an arrow expression if followed by an '=>', a type annotation or body. // Otherwise, a parenthesized expression with a missing inner expression match p.nth(n + 2) { T![=>] | T![:] | T!['{'] => IsParenthesizedArrowFunctionExpression::True, _ => IsParenthesizedArrowFunctionExpression::False, } } // Rest parameter '(...a' is certainly not a parenthesized expression T![...] => IsParenthesizedArrowFunctionExpression::True, // '([ ...', '({ ... } can either be a parenthesized object or array expression or a destructing parameter T!['['] | T!['{'] => IsParenthesizedArrowFunctionExpression::Unknown, // '(@' can be a decorator or a parenthesized arrow function T![@] => IsParenthesizedArrowFunctionExpression::Unknown, // '(a...' _ if is_nth_at_identifier_binding(p, n + 1) || p.nth_at(n + 1, T![this]) => { match p.nth(n + 2) { // '(a: ' must be a type annotation T![:] => IsParenthesizedArrowFunctionExpression::True, // Unclear because it could either be // * '(a = ': an initializer or a parenthesized assignment expression // * '(a, ': separator to next parameter or a parenthesized sequence expression // * '(a)': a single parameter OR a parenthesized expression T![=] | T![,] | T![')'] => IsParenthesizedArrowFunctionExpression::Unknown, T![?] => { // Disambiguate between an optional parameter and a parenthesized conditional expression match p.nth(n + 3) { // '(a?:' | '(a?,' | '(a?=' | '(a?)' T![:] | T![,] | T![=] | T![')'] => { IsParenthesizedArrowFunctionExpression::True } _ => IsParenthesizedArrowFunctionExpression::False, } } _ => IsParenthesizedArrowFunctionExpression::False, } } _ => IsParenthesizedArrowFunctionExpression::False, } } // potential start of type parameters T![<] => { if is_nth_at_type_parameter_modifier(p, n + 1) && !JsSyntaxFeature::Jsx.is_supported(p) { // <const T>... IsParenthesizedArrowFunctionExpression::True } else if !is_nth_at_identifier(p, n + 1) { // <5... IsParenthesizedArrowFunctionExpression::False } // test jsx jsx_type_arguments // // These may look like a valid arrows but are JSX // <A extends>() =</A>; // <A extends="B">() =</A>; // <A extends ok>() =</A>; // test tsx tsx_type_arguments // // These are valid type arguments // <A extends B>() => {}; // <A=string>() => {}; // <A, B>() => {}; // <A extends B<C>>() => {} // <a... JSX override else if JsSyntaxFeature::Jsx.is_supported(p) { match p.nth(n + 2) { T![extends] => { // `<a extends=` OR `<a extends>` is a JSX start element // and a `extends` type refinement: `<A extends string>` if matches!(p.nth(n + 3), T![=] | T![>]) { IsParenthesizedArrowFunctionExpression::False } // `<A extends B>` Could be either else if is_nth_at_identifier(p, n + 3) { IsParenthesizedArrowFunctionExpression::Unknown } else { // <A extends B> must be type arguments IsParenthesizedArrowFunctionExpression::True } } // `<A=` or `<A,` or always type arguments and never JSX tags T![=] | T![,] => IsParenthesizedArrowFunctionExpression::True, _ => IsParenthesizedArrowFunctionExpression::False, } } else { // <a... IsParenthesizedArrowFunctionExpression::Unknown } } _ => IsParenthesizedArrowFunctionExpression::False, } } /// Computes the signature flags for parsing the parameters of an arrow expression. These /// have different semantics from parsing the body fn arrow_function_parameter_flags(p: &JsParser, mut flags: SignatureFlags) -> SignatureFlags { if p.state().in_generator() { // Arrow functions inherit whatever yield is a valid identifier name from the parent. flags |= SignatureFlags::GENERATOR; } // The arrow function is in an async context if the outer function is in an async context or itself is // declared async if p.state().in_async() { flags |= SignatureFlags::ASYNC; } flags } // test js arrow_expr_single_param // // SCRIPT // foo => {} // yield => {} // await => {} // baz => // {} fn parse_arrow_function_with_single_parameter(p: &mut JsParser) -> ParsedSyntax { if !is_arrow_function_with_single_parameter(p) { return Absent; } let m = p.start(); let is_async = p.at(T![async]) && is_nth_at_identifier_binding(p, 1); let flags = if is_async { p.eat(T![async]); SignatureFlags::ASYNC } else { SignatureFlags::empty() }; // test_err js async_arrow_expr_await_parameter // let a = async await => {} // async() => { (a = await) => {} }; // async() => { (a = await 10) => {} }; p.with_state(EnterParameters(arrow_function_parameter_flags(p, flags)), parse_binding) .expect("Expected function parameter to be present as guaranteed by is_arrow_function_with_simple_parameter"); p.bump(T![=>]); parse_arrow_body(p, flags).or_add_diagnostic(p, js_parse_error::expected_arrow_body); Present(m.complete(p, JS_ARROW_FUNCTION_EXPRESSION)) } fn is_arrow_function_with_single_parameter(p: &mut JsParser) -> bool { // a => ... if p.nth_at(1, T![=>]) { // test js single_parameter_arrow_function_with_parameter_named_async // let id = async => async; is_at_identifier_binding(p) && !p.has_nth_preceding_line_break(1) } // async ident => ... else { p.at(T![async]) && !p.has_nth_preceding_line_break(1) && is_nth_at_identifier_binding(p, 1) && !p.has_nth_preceding_line_break(2) && p.nth_at(2, T![=>]) } } fn parse_arrow_body(p: &mut JsParser, mut flags: SignatureFlags) -> ParsedSyntax { // test js arrow_in_constructor // class A { // constructor() { // () => { super() }; // () => super(); // } // } if p.state().in_constructor() { flags |= SignatureFlags::CONSTRUCTOR } if p.at(T!['{']) { parse_function_body(p, flags) } else { p.with_state(EnterFunction(flags), |p| { parse_assignment_expression_or_higher(p, ExpressionContext::default()) }) } } pub(crate) fn parse_any_parameter( p: &mut JsParser, decorator_list: ParsedSyntax, parameter_context: ParameterContext, expression_context: ExpressionContext, ) -> ParsedSyntax { let parameter = match p.cur() { T![...] => parse_rest_parameter(p, decorator_list, expression_context), T![this] => { // test_err ts ts_decorator_this_parameter_option { "parse_class_parameter_decorators": true } // class A { // method(@dec this) {} // method(@dec(val) this) {} // method(@dec.fn(val) this) {} // } decorator_list .add_diagnostic_if_present(p, decorators_not_allowed) .map(|mut decorator_list| { decorator_list.change_to_bogus(p); decorator_list }); parse_ts_this_parameter(p) } _ => parse_formal_parameter(p, decorator_list, parameter_context, expression_context), }; parameter.map(|mut parameter| { if parameter.kind(p) == TS_THIS_PARAMETER { if TypeScript.is_unsupported(p) { parameter.change_to_bogus(p); p.error(ts_only_syntax_error( p, "this parameter", parameter.range(p), )); } else if parameter_context.is_arrow_function() { // test_err ts ts_arrow_function_this_parameter // let a = (this: string) => {} parameter.change_to_bogus(p); p.error(p.err_builder( "An arrow function cannot have a 'this' parameter.",
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/auxiliary.rs
crates/rome_js_parser/src/syntax/auxiliary.rs
use crate::prelude::*; use crate::syntax::class::{parse_class_declaration, parse_decorators}; use crate::syntax::function::parse_function_declaration; use crate::syntax::js_parse_error::decorators_not_allowed; use crate::syntax::module::parse_import_or_import_equals_declaration; use crate::syntax::stmt::{ is_nth_at_variable_declarations, parse_variable_declaration, semi, StatementContext, VariableDeclarationParent, }; use crate::syntax::typescript::{ is_nth_at_any_ts_namespace_declaration, parse_any_ts_namespace_declaration_clause, parse_ts_enum_declaration, parse_ts_interface_declaration, parse_ts_type_alias_declaration, }; use crate::{Absent, JsParser, ParsedSyntax}; use rome_js_syntax::JsSyntaxKind::{JS_BOGUS_STATEMENT, JS_VARIABLE_DECLARATION_CLAUSE}; use rome_js_syntax::T; use rome_rowan::{TextRange, TextSize}; // test js export_variable_clause // export let a; // export const b = 3; // export var c, d, e = 3; // // test_err js export_variable_clause_error // export let a = ; // export const b; // export let d, c; pub(crate) fn parse_variable_declaration_clause(p: &mut JsParser) -> ParsedSyntax { let start = p.cur_range().start(); parse_variable_declaration(p, VariableDeclarationParent::Clause).map(|declaration| { let m = declaration.precede(p); semi(p, TextRange::new(start, p.cur_range().end())); m.complete(p, JS_VARIABLE_DECLARATION_CLAUSE) }) } pub(crate) fn is_nth_at_declaration_clause(p: &mut JsParser, n: usize) -> bool { if matches!( p.nth(n), T![function] | T![const] | T![enum] | T![class] | T![import] | T![@] ) { return true; } if is_nth_at_variable_declarations(p, n) { return true; } if p.has_nth_preceding_line_break(n + 1) { return false; } if p.nth_at(n, T![type]) && !p.nth_at(n + 1, T![*]) && !p.nth_at(n + 1, T!['{']) { return true; } if p.nth_at(n, T![interface]) { return true; } if p.nth_at(n, T![async]) && p.nth_at(n + 1, T![function]) { return true; } if is_nth_at_any_ts_namespace_declaration(p, n) { return true; } if p.nth_at(n, T![abstract]) && p.nth_at(n + 1, T![class]) { return true; } false } pub(crate) fn parse_declaration_clause(p: &mut JsParser, stmt_start_pos: TextSize) -> ParsedSyntax { match p.cur() { T![function] => parse_function_declaration(p, StatementContext::StatementList), T![@] => { let decorator_list = parse_decorators(p); match p.cur() { T![class] | T![abstract] if !p.state().in_ambient_context() => { // test js decorator_export_class_clause // export @decorator class Bar {}; // export @first @second class Foo { // constructor() {} // } //test ts decorator_abstract_export_class_clause // export @decorator abstract class Bar {}; // export @first @second abstract class Foo { // constructor() {} // } parse_class_declaration(p, decorator_list, StatementContext::StatementList) } _ => { // test_err js decorator_export_class_clause // @decorator // export let a; // @decorator1 @decorator2 // export function Foo() { } decorator_list .add_diagnostic_if_present(p, decorators_not_allowed) .map(|mut marker| { marker.change_kind(p, JS_BOGUS_STATEMENT); marker }); parse_declaration_clause(p, stmt_start_pos) } } } T![class] | T![abstract] => { parse_class_declaration(p, Absent, StatementContext::StatementList) } T![const] => { if p.nth_at(1, T![enum]) { parse_ts_enum_declaration(p) } else { // test ts ts_ambient_const_variable_statement // declare const a, b, c, d = "test"; parse_variable_declaration_clause(p) } } // test ts ts_ambient_var_statement // declare var a, b, c; T![var] => parse_variable_declaration_clause(p), T![enum] => { // test ts ts_ambient_enum_statement // declare enum A { X, Y, Z } // declare const enum B { X, Y, Z } parse_ts_enum_declaration(p) } T![import] => parse_import_or_import_equals_declaration(p), T![async] => parse_function_declaration(p, StatementContext::StatementList), T![type] => { // test ts ts_declare_type_alias // declare type A = string; // declare type B = string | number & { a: string, b: number } parse_ts_type_alias_declaration(p) } T![interface] => { // test ts ts_ambient_interface // declare interface A { b: string, c: number } parse_ts_interface_declaration(p) } T![let] => { // test ts ts_ambient_let_variable_statement // declare let a, b, c, d; parse_variable_declaration_clause(p) } T![namespace] | T![global] | T![module] => { parse_any_ts_namespace_declaration_clause(p, stmt_start_pos) } _ => Absent, } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/js_parse_error.rs
crates/rome_js_parser/src/syntax/js_parse_error.rs
//! Provides factory function to create common diagnostics for the JavaScript syntax use crate::prelude::*; use crate::span::Span; use crate::JsParser; use crate::JsSyntaxFeature::TypeScript; use rome_js_syntax::TextRange; use rome_parser::diagnostic::{expected_any, expected_node}; pub(crate) fn expected_function_body(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("function body", range).into_diagnostic(p) } pub(crate) fn expected_class_member_name(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any( &[ "identifier", "string literal", "number literal", "private field name", "computed name", ], range, ) .into_diagnostic(p) } pub(crate) fn expected_arrow_body(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["function body", "expression"], range).into_diagnostic(p) } pub(crate) fn expected_object_member(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any( &[ "property", "shorthand property", "getter", "setter", "method", ], range, ) .into_diagnostic(p) } pub(crate) fn expected_array_element(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["property", "expression", "method"], range).into_diagnostic(p) } pub(crate) fn expected_object_member_name(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any( &[ "identifier", "string literal", "number literal", "computed property", ], range, ) .into_diagnostic(p) } pub(crate) fn expected_block_statement(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("block statement", range).into_diagnostic(p) } pub(crate) fn expected_catch_clause(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("catch clause", range).into_diagnostic(p) } pub(crate) fn expected_parameter(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("parameter", range).into_diagnostic(p) } pub(crate) fn expected_parameters(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("parenthesis '('", range).into_diagnostic(p) } pub(crate) fn expected_case_or_default(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["default", "case"], range).into_diagnostic(p) } pub(crate) fn expected_case(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("case", range).into_diagnostic(p) } pub(crate) fn expected_assignment_target(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["identifier", "assignment target"], range).into_diagnostic(p) } pub(crate) fn expected_simple_assignment_target(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["identifier", "member expression"], range).into_diagnostic(p) } pub(crate) fn expected_identifier(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("identifier", range).into_diagnostic(p) } pub(crate) fn expected_statement(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("statement", range).into_diagnostic(p) } pub(crate) fn expected_binding(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["identifier", "array pattern", "object pattern"], range).into_diagnostic(p) } pub(crate) fn expected_class_member(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["property ", "method", "getter", "setter"], range).into_diagnostic(p) } pub(crate) fn expected_class_parameters(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("class parameters", range).into_diagnostic(p) } pub(crate) fn expected_constructor_parameters(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("constructor parameters", range).into_diagnostic(p) } pub(crate) fn expected_class_method_body(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("class method body", range).into_diagnostic(p) } pub(crate) fn expected_module_source(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("string literal", range).into_diagnostic(p) } pub(crate) fn expected_named_import(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["namespace import", "named imports"], range).into_diagnostic(p) } pub(crate) fn expected_literal_export_name(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["string literal", "identifier"], range).into_diagnostic(p) } pub(crate) fn expected_export_clause(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["class", "function", "variable declaration"], range).into_diagnostic(p) } pub(crate) fn expected_export_name_specifier(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("export name", range).into_diagnostic(p) } pub(crate) fn expected_named_import_specifier(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("identifier", range).into_diagnostic(p) } pub(crate) fn duplicate_assertion_keys_error( p: &JsParser, key: &str, first_use: TextRange, duplicate_range: TextRange, ) -> ParseDiagnostic { p.err_builder("Duplicate assertion keys are not allowed", first_use) .detail(first_use, format!("First use of the key `{}`", key)) .detail(duplicate_range, "second use here") } pub(crate) fn expected_expression(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("expression", range).into_diagnostic(p) } pub(crate) fn expected_expression_assignment(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["expression", "assignment"], range).into_diagnostic(p) } pub(crate) fn expected_unary_expression(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("unary expression", range).into_diagnostic(p) } pub(crate) fn expected_property_or_signature(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["property", "signature"], range).into_diagnostic(p) } pub(crate) fn expected_declaration(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any( &[ "function", "class", "variable declaration", "interface", "enum", "type alias", ], range, ) .into_diagnostic(p) } pub(crate) fn expected_export_default_declaration( p: &JsParser, range: TextRange, ) -> ParseDiagnostic { let expected = if TypeScript.is_supported(p) { expected_any( &[ "class declaration", "function declaration", "interface declaration", ], range, ) } else { expected_any(&["class declaration", "function declaration"], range) }; expected.into_diagnostic(p) } pub(crate) fn unexpected_body_inside_ambient_context( p: &JsParser, range: TextRange, ) -> ParseDiagnostic { p.err_builder( "members inside ambient contexts should not have a body", range, ) } pub(crate) fn private_names_only_allowed_on_left_side_of_in_expression( p: &JsParser, private_name_range: TextRange, ) -> ParseDiagnostic { p.err_builder( "Private names are only allowed on the left side of a 'in' expression", private_name_range, ) } pub(crate) fn invalid_assignment_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { p.err_builder( format!("Invalid assignment to `{}`", p.text(range.as_range()),), range, ) .hint("This expression cannot be assigned to") } pub(crate) fn modifier_already_seen( p: &JsParser, second_range: TextRange, first_range: TextRange, ) -> ParseDiagnostic { let modifier = p.text(second_range); p.err_builder(format!("'{modifier}' already seen"), second_range) .detail(second_range, "duplicate modifier") .detail(first_range, "first seen here") } pub(crate) fn modifier_cannot_be_used_with_modifier( p: &JsParser, range: TextRange, other_modifier_range: TextRange, ) -> ParseDiagnostic { let modifier = p.text(range); let other_modifier = p.text(other_modifier_range); p.err_builder( format!("'{modifier}' cannot be used with '{other_modifier}' modifier."), range, ) .detail(range, format!("'{modifier}' modifier")) .detail(other_modifier_range, format!("'{other_modifier}' modifier")) } pub(crate) fn modifier_must_precede_modifier( p: &JsParser, range: TextRange, to_precede_modifier_range: TextRange, ) -> ParseDiagnostic { let modifier_name = p.text(range); let to_precede_name = p.text(to_precede_modifier_range); p.err_builder( format!("'{modifier_name}' must precede '{to_precede_name}'",), range, ) .detail(range, "move this modifier") .detail(to_precede_modifier_range, "before this modifier") } pub(crate) fn invalid_decorator_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { p.err_builder( format!("Invalid decorator `{}`", p.text(range.as_range()),), range, ) } pub(crate) fn parameter_decorators_not_allowed(p: &JsParser, range: TextRange) -> ParseDiagnostic { decorators_not_allowed(p, range).hint("You can enable parameter decorators by setting the `unsafeParameterDecoratorsEnabled` option to `true` in your configuration file.") } pub(crate) fn decorators_not_allowed(p: &JsParser, range: TextRange) -> ParseDiagnostic { p.err_builder("Decorators are not valid here.", range).hint( "Decorators are only valid on class declarations, class expressions, and class methods.", ) } pub(crate) fn decorator_must_precede_modifier(p: &JsParser, range: TextRange) -> ParseDiagnostic { p.err_builder( "Decorators must precede the name and all keywords of property declarations.", range, ) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/binding.rs
crates/rome_js_parser/src/syntax/binding.rs
use crate::prelude::*; use crate::span::Span; use crate::syntax::class::parse_initializer_clause; use crate::syntax::expr::{is_nth_at_identifier, parse_identifier, ExpressionContext}; use crate::syntax::js_parse_error::{ expected_binding, expected_identifier, expected_object_member_name, }; use crate::syntax::object::{is_at_object_member_name, parse_object_member_name}; use crate::syntax::pattern::{ParseArrayPattern, ParseObjectPattern, ParseWithDefaultPattern}; use crate::JsSyntaxFeature::StrictMode; use crate::ParsedSyntax::{Absent, Present}; use crate::{JsParser, ParsedSyntax}; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_any; use rome_rowan::SyntaxKind as SyntaxKindTrait; pub(crate) fn parse_binding_pattern(p: &mut JsParser, context: ExpressionContext) -> ParsedSyntax { match p.cur() { T!['['] => ArrayBindingPattern.parse_array_pattern(p), T!['{'] if context.is_object_expression_allowed() => { ObjectBindingPattern.parse_object_pattern(p) } _ => parse_identifier_binding(p), } } #[inline] pub(crate) fn is_at_identifier_binding(p: &mut JsParser) -> bool { is_nth_at_identifier_binding(p, 0) } #[inline] pub(crate) fn is_nth_at_identifier_binding(p: &mut JsParser, n: usize) -> bool { is_nth_at_identifier(p, n) } #[inline] pub(crate) fn parse_binding(p: &mut JsParser) -> ParsedSyntax { parse_identifier_binding(p) } // test_err js binding_identifier_invalid // async () => { let await = 5; } // function *foo() { // let yield = 5; // } // let eval = 5; // let let = 5; // const let = 5; // let a, a; // // test_err js binding_identifier_invalid_script // // SCRIPT // let let = 5; // const let = 5; /// Parses an identifier binding or returns an invalid syntax if the identifier isn't valid in this context. /// An identifier may not be valid if: /// * it is named "eval" or "arguments" inside of strict mode /// * it is named "let" inside of a "let" or "const" declaration /// * the same identifier is bound multiple times inside of a `let` or const` declaration /// * it is named "yield" inside of a generator function or in strict mode /// * it is named "await" inside of an async function pub(crate) fn parse_identifier_binding(p: &mut JsParser) -> ParsedSyntax { let parsed = parse_identifier(p, JS_IDENTIFIER_BINDING); parsed.map(|mut identifier| { if identifier.kind(p).is_bogus() { return identifier; } let identifier_name = identifier.text(p); if StrictMode.is_supported(p) && matches!(identifier_name, "eval" | "arguments") { let err = p.err_builder( format!( "Illegal use of `{}` as an identifier in strict mode", identifier_name ), identifier.range(p), ); p.error(err); identifier.change_to_bogus(p); return identifier; } if let Some(parent) = p.state().duplicate_binding_parent { if identifier_name == "let" { let err = p .err_builder( format!( "`let` cannot be declared as a variable name inside of a `{}` declaration", parent, ), identifier.range(p), ) .hint("Rename the let identifier here"); p.error(err); identifier.change_to_bogus(p); return identifier; } if let Some(existing) = p.state().name_map.get(identifier_name) { let err = p .err_builder( format!( "Declarations inside of a `{}` declaration may not have duplicates", parent ), identifier.range(p), ) .detail( identifier.range(p), format!( "a second declaration of `{}` is not allowed", identifier_name ), ) .detail( existing.to_owned(), format!("`{}` is first declared here", identifier_name), ); p.error(err); identifier.change_to_bogus(p); return identifier; } let identifier_name = String::from(identifier_name); let identifier_range = identifier.range(p); p.state_mut() .name_map .insert(identifier_name, identifier_range.as_range()); } identifier }) } struct BindingPatternWithDefault; impl ParseWithDefaultPattern for BindingPatternWithDefault { #[inline] fn pattern_with_default_kind() -> JsSyntaxKind { JS_BINDING_PATTERN_WITH_DEFAULT } #[inline] fn expected_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_binding(p, range) } #[inline] fn parse_pattern(&self, p: &mut JsParser) -> ParsedSyntax { parse_binding_pattern(p, ExpressionContext::default()) } } struct ArrayBindingPattern; // test js array_binding // let a = "b"; // let [c, b] = [1, 2]; // let [d, ...abcd] = [1]; // let [e = "default", x] = [] // let [, f, ...rest] = [] // let [[...rest2], { g }] = [] // // test_err js array_binding_err // let [a b] = [1, 2]; // let [="default"] = [1, 2]; // let ["default"] = [1, 2]; // let [[c ] = []; // // test js array_binding_rest // let [ ...abcd ] = a; // let [ ...[x, y] ] = b; // let [ ...[ ...a ] ] = c; // // test_err js array_binding_rest_err // let [ ... ] = a; // let [ ...c = "default" ] = a; // let [ ...rest, other_assignment ] = a; impl ParseArrayPattern<BindingPatternWithDefault> for ArrayBindingPattern { #[inline] fn bogus_pattern_kind() -> JsSyntaxKind { JS_BOGUS_BINDING } #[inline] fn array_pattern_kind() -> JsSyntaxKind { JS_ARRAY_BINDING_PATTERN } #[inline] fn rest_pattern_kind() -> JsSyntaxKind { JS_ARRAY_BINDING_PATTERN_REST_ELEMENT } fn list_kind() -> JsSyntaxKind { JS_ARRAY_BINDING_PATTERN_ELEMENT_LIST } #[inline] fn expected_element_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any( &[ "identifier", "object pattern", "array pattern", "rest pattern", ], range, ) .into_diagnostic(p) } #[inline] fn pattern_with_default(&self) -> BindingPatternWithDefault { BindingPatternWithDefault } } // test_err js object_binding_pattern // let { 5 } } = { eval: "foo" }; // let { eval } = { eval: "foo" }; // let { 5, 6 } = { eval: "foo" }; // let { default , eval: } = {}; struct ObjectBindingPattern; impl ParseObjectPattern for ObjectBindingPattern { #[inline] fn bogus_pattern_kind() -> JsSyntaxKind { JS_BOGUS_BINDING } #[inline] fn object_pattern_kind() -> JsSyntaxKind { JS_OBJECT_BINDING_PATTERN } fn list_kind() -> JsSyntaxKind { JS_OBJECT_BINDING_PATTERN_PROPERTY_LIST } #[inline] fn expected_property_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["identifier", "member name", "rest pattern"], range).into_diagnostic(p) } // test js object_property_binding // let { foo: bar } = {} // let { foo: bar_bar = baz } = {} // // test_err js object_property_binding_err // let { foo: , bar } = {} // let { : lorem = "test" } = {} // let { , ipsum: bazz } = {} // // test js object_shorthand_property // let { a, b } = c // let { d = "default", e = call() } = c // // test_err js object_shorthand_property_err // let { a b } = c // let { = "test" } = c // let { , d } = c fn parse_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if !is_at_object_member_name(p) && !p.at_ts(token_set![T![:], T![=]]) { return Absent; } let m = p.start(); let kind = if p.at(T![=]) || (is_at_identifier_binding(p) && !p.nth_at(1, T![:])) { parse_binding(p).or_add_diagnostic(p, expected_identifier); JS_OBJECT_BINDING_PATTERN_SHORTHAND_PROPERTY } else { parse_object_member_name(p).or_add_diagnostic(p, expected_object_member_name); if p.expect(T![:]) { parse_binding_pattern(p, ExpressionContext::default()) .or_add_diagnostic(p, expected_binding); } JS_OBJECT_BINDING_PATTERN_PROPERTY }; // test js destructuring_initializer_binding // const { value, f = (value) => value } = item let parent = p.state_mut().duplicate_binding_parent.take(); parse_initializer_clause(p, ExpressionContext::default()).ok(); p.state_mut().duplicate_binding_parent = parent; Present(m.complete(p, kind)) } // test js rest_property_binding // let { ...abcd } = a; // let { b: { ...a } } = c; // // test_err js rest_property_binding_err // let { ... } = a; // let { ...c = "default" } = a; // let { ...{a} } = b; // let { ...rest, other_assignment } = a; // let { ...rest2, } = a; // async function test() { // let { ...await } = a; // } fn parse_rest_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if p.at(T![...]) { let m = p.start(); p.bump(T![...]); let inner = parse_binding_pattern(p, ExpressionContext::default()) .or_add_diagnostic(p, expected_identifier); if let Some(mut inner) = inner { if inner.kind(p) != JS_IDENTIFIER_BINDING { let inner_range = inner.range(p); // Don't add multiple errors if inner.kind(p) != JS_BOGUS_BINDING { p.error(p.err_builder("Expected identifier binding", inner_range,).hint( "Object rest patterns must bind to an identifier, other patterns are not allowed.")); } inner.change_kind(p, JS_BOGUS_BINDING); } } Present(m.complete(p, JS_OBJECT_BINDING_PATTERN_REST)) } else { Absent } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/assignment.rs
crates/rome_js_parser/src/syntax/assignment.rs
use crate::parser::rewrite_parser::{RewriteMarker, RewriteParser, RewriteToken}; use crate::parser::JsParserCheckpoint; use crate::prelude::*; use crate::rewrite::{rewrite_events, RewriteParseEvents}; use crate::syntax::class::parse_initializer_clause; use crate::syntax::expr::{ is_at_identifier, parse_conditional_expr, parse_unary_expr, ExpressionContext, }; use crate::syntax::js_parse_error::{ expected_assignment_target, expected_identifier, expected_object_member_name, invalid_assignment_error, }; use crate::syntax::object::{is_at_object_member_name, parse_object_member_name}; use crate::syntax::pattern::{ParseArrayPattern, ParseObjectPattern, ParseWithDefaultPattern}; use crate::JsParser; use crate::ParsedSyntax::{Absent, Present}; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_any; use rome_rowan::AstNode; // test js assignment_target // foo += bar = b ??= 3; // a.foo -= bar; // (foo = bar); // (((foo))) = bar; // a["test"] = bar; // a.call().chain().member = x; // ++count === 3 // a['b'] = c[d] = "test" // test_err js invalid_assignment_target // ++a = b; // (++a) = b; // (a = b; // a?.b = b; // a?.["b"] = b; // (a +) = b; // test ts ts_non_null_assignment // let a; // a! &= 2; // let b = { a: null }; // b.a! &= 5 // test ts ts_as_assignment // let a: any; // type B<A> = { a: A }; // (a as string) = "string"; // ((a as any) as string) = null; // ({ b: a as string } = { b: "test" }); // ([ a as string ] = [ "test" ]); // for (a as string in []) {} // (a as B<string>) = { a: "test" }; // (<number> a) += 1 // test_err ts ts_as_assignment_no_parenthesize // let a: any; // a as string = "string"; // (a() as string) = "string"; // <number> a = 3; // test ts ts_satisfies_assignment // let a: any; // type B<A> = { a: A }; // (a satisfies string) = "string"; // ((a satisfies any) satisfies string) = null; // ({ b: a satisfies string } = { b: "test" }); // ([ a satisfies string ] = [ "test" ]); // for (a satisfies string in []) {} // (a satisfies B<string>) = { a: "test" }; // test_err ts ts_satisfies_assignment_no_parenthesize // let a: any; // a satisfies string = "string"; // (a() satisfies string) = "string"; /// Converts the passed in lhs expression to an assignment pattern /// The passed checkpoint allows to restore the parser to the state before it started parsing the expression. pub(crate) fn expression_to_assignment_pattern( p: &mut JsParser, target: CompletedMarker, checkpoint: JsParserCheckpoint, ) -> CompletedMarker { match target.kind(p) { JS_OBJECT_EXPRESSION => { p.rewind(checkpoint); ObjectAssignmentPattern.parse_object_pattern(p).unwrap() } JS_ARRAY_EXPRESSION => { p.rewind(checkpoint); ArrayAssignmentPattern.parse_array_pattern(p).unwrap() } _ => expression_to_assignment(p, target, checkpoint), } } // test js array_or_object_member_assignment // [{ // get y() { // throw new Test262Error('The property should not be accessed.'); // }, // set y(val) { // setValue = val; // } // }.y = 42] = [23]; // ({ x: { // get y() { // throw new Test262Error('The property should not be accessed.'); // }, // set y(val) { // setValue = val; // } // }.y = 42 } = { x: 23 }); pub(crate) fn parse_assignment_pattern(p: &mut JsParser) -> ParsedSyntax { let checkpoint = p.checkpoint(); let assignment_expression = parse_conditional_expr(p, ExpressionContext::default()); assignment_expression .map(|expression| expression_to_assignment_pattern(p, expression, checkpoint)) } /// Re-parses an expression as an assignment. pub(crate) fn expression_to_assignment( p: &mut JsParser, target: CompletedMarker, checkpoint: JsParserCheckpoint, ) -> CompletedMarker { try_expression_to_assignment(p, target, checkpoint).unwrap_or_else( // test_err js js_regex_assignment // /=0*_:m/=/*_:| |mut invalid_assignment_target| { // Doesn't seem to be a valid assignment target. Recover and create an error. invalid_assignment_target.change_kind(p, JS_BOGUS_ASSIGNMENT); p.error(invalid_assignment_error( p, invalid_assignment_target.range(p), )); invalid_assignment_target }, ) } pub(crate) enum AssignmentExprPrecedence { Unary, Conditional, } impl AssignmentExprPrecedence { fn parse_expression(&self, p: &mut JsParser, context: ExpressionContext) -> ParsedSyntax { match self { AssignmentExprPrecedence::Unary => parse_unary_expr(p, context), AssignmentExprPrecedence::Conditional => parse_conditional_expr(p, context), } } } pub(crate) fn parse_assignment( p: &mut JsParser, expr_kind: AssignmentExprPrecedence, context: ExpressionContext, ) -> ParsedSyntax { let checkpoint = p.checkpoint(); let assignment_expression = expr_kind.parse_expression(p, context); assignment_expression.map(|expr| expression_to_assignment(p, expr, checkpoint)) } struct AssignmentPatternWithDefault; impl ParseWithDefaultPattern for AssignmentPatternWithDefault { #[inline] fn pattern_with_default_kind() -> JsSyntaxKind { JS_ASSIGNMENT_WITH_DEFAULT } #[inline] fn expected_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_assignment_target(p, range) } #[inline] fn parse_pattern(&self, p: &mut JsParser) -> ParsedSyntax { parse_assignment_pattern(p) } } struct ArrayAssignmentPattern; // test js array_assignment_target // [foo, bar] = baz; // [,,,b,,c,] = baz; // [a = "test", a.b, call().b] = baz; // [((a))] = baz; // // test_err js array_assignment_target_err // [a a, ++b, ] = test; // [a, c, ...rest,] = test; // [a = , = "test"] = test; // [[a b] [c]]= test; // [a: b] = c impl ParseArrayPattern<AssignmentPatternWithDefault> for ArrayAssignmentPattern { #[inline] fn bogus_pattern_kind() -> JsSyntaxKind { JS_BOGUS_ASSIGNMENT } #[inline] fn array_pattern_kind() -> JsSyntaxKind { JS_ARRAY_ASSIGNMENT_PATTERN } // test js array_assignment_target_rest // ([ ...abcd ] = a); // ([ ...(abcd) ] = a); // ([ ...m.test ] = c); // ([ ...m[call()] ] = c); // ([ ...any.expression().b ] = c); // ([ ...[x, y] ] = b); // ([ ...[ ...a ] ] = c); // // test_err js array_assignment_target_rest_err // ([ ... ] = a); // ([ ...c = "default" ] = a); // ([ ...rest, other_assignment ] = a); #[inline] fn rest_pattern_kind() -> JsSyntaxKind { JS_ARRAY_ASSIGNMENT_PATTERN_REST_ELEMENT } fn list_kind() -> JsSyntaxKind { JS_ARRAY_ASSIGNMENT_PATTERN_ELEMENT_LIST } #[inline] fn expected_element_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["assignment target", "rest element", "comma"], range).into_diagnostic(p) } #[inline] fn pattern_with_default(&self) -> AssignmentPatternWithDefault { AssignmentPatternWithDefault } } struct ObjectAssignmentPattern; // test js object_assignment_target // ({} = {}); // ({ bar, baz } = {}); // ({ bar: [baz = "baz"], foo = "foo", ...rest } = {}); impl ParseObjectPattern for ObjectAssignmentPattern { #[inline] fn bogus_pattern_kind() -> JsSyntaxKind { JS_BOGUS_ASSIGNMENT } #[inline] fn object_pattern_kind() -> JsSyntaxKind { JS_OBJECT_ASSIGNMENT_PATTERN } fn list_kind() -> JsSyntaxKind { JS_OBJECT_ASSIGNMENT_PATTERN_PROPERTY_LIST } #[inline] fn expected_property_pattern_error(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["assignment target", "rest property"], range).into_diagnostic(p) } // test js property_assignment_target // ({x}= {}); // ({x: y}= {}); // ({x: y.test().z}= {}); // ({x: ((z))}= {}); // ({x: z["computed"]}= {}); // ({x = "default"}= {}); // ({x: y = "default"}= {}); // ({0: y, [computed]: z} = {}); // // test_err js property_assignment_target_err // ({:y} = {}); // ({=y} = {}); // ({:="test"} = {}); // ({:=} = {}); // ({ a b } = {}); fn parse_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax { let m = p.start(); let kind = if (is_at_identifier(p) || p.at(T![=])) && !p.nth_at(1, T![:]) { parse_assignment( p, AssignmentExprPrecedence::Conditional, ExpressionContext::default(), ) .or_add_diagnostic(p, expected_identifier); JS_OBJECT_ASSIGNMENT_PATTERN_SHORTHAND_PROPERTY } else if is_at_object_member_name(p) || p.at(T![:]) || p.nth_at(1, T![:]) { parse_object_member_name(p).or_add_diagnostic(p, expected_object_member_name); p.expect(T![:]); parse_assignment_pattern(p).or_add_diagnostic(p, expected_assignment_target); JS_OBJECT_ASSIGNMENT_PATTERN_PROPERTY } else { m.abandon(p); return Absent; }; parse_initializer_clause(p, ExpressionContext::default()).ok(); Present(m.complete(p, kind)) } // test js rest_property_assignment_target // ({ ...abcd } = a); // ({ ...(abcd) } = a); // ({ ...m.test } = c); // ({ ...m[call()] } = c); // ({ ...any.expression().b } = c); // ({ b: { ...a } } = c); // // test_err js rest_property_assignment_target_err // ({ ... } = a); // ({ ...c = "default" } = a); // ({ ...{a} } = b); // ({ ...rest, other_assignment } = a); // ({ ...rest, } = a); fn parse_rest_property_pattern(&self, p: &mut JsParser) -> ParsedSyntax { if !p.at(T![...]) { return Absent; } let m = p.start(); p.bump(T![...]); let target = parse_assignment_pattern(p).or_add_diagnostic(p, expected_assignment_target); if let Some(mut target) = target { if matches!( target.kind(p), JS_OBJECT_ASSIGNMENT_PATTERN | JS_ARRAY_ASSIGNMENT_PATTERN ) { target.change_kind(p, JS_BOGUS_ASSIGNMENT); p.error(p.err_builder( "object and array assignment targets are not allowed in rest patterns", target.range(p), )); } } Present(m.complete(p, JS_OBJECT_ASSIGNMENT_PATTERN_REST)) } } fn try_expression_to_assignment( p: &mut JsParser, target: CompletedMarker, checkpoint: JsParserCheckpoint, ) -> Result<CompletedMarker, CompletedMarker> { if !matches!( target.kind(p), JS_PARENTHESIZED_EXPRESSION | JS_STATIC_MEMBER_EXPRESSION | JS_COMPUTED_MEMBER_EXPRESSION | JS_IDENTIFIER_EXPRESSION | TS_NON_NULL_ASSERTION_EXPRESSION | TS_TYPE_ASSERTION_EXPRESSION | TS_AS_EXPRESSION | TS_SATISFIES_EXPRESSION ) { return Err(target); } // At this point it's guaranteed that the root node can be mapped to an assignment, // but it's not yet guaranteed if it is valid or not (for example, a static member expression // is valid, except if it uses optional chaining). let mut reparse_assignment = ReparseAssignment::new(); rewrite_events(&mut reparse_assignment, checkpoint, p); Ok(reparse_assignment.result.unwrap()) } struct ReparseAssignment { // Stores the unfinished parents // Index 0: Re-mapped kind of the node // Index 1: Started marker. A `None` marker means that this node should be dropped // from the re-written tree parents: Vec<(JsSyntaxKind, Option<RewriteMarker>)>, // Stores the completed assignment node (valid or invalid). result: Option<CompletedMarker>, // Tracks if the visitor is still inside an assignment inside_assignment: bool, } impl ReparseAssignment { pub fn new() -> Self { Self { parents: Vec::default(), result: None, inside_assignment: true, } } } /// Rewrites expressions to assignments /// * Converts parenthesized expression to parenthesized assignment /// * Converts computed/static member expressions to computed/static member assignment. /// Validates that the operator isn't `?.` . /// * Converts identifier expressions to identifier assignment, drops the inner reference identifier impl RewriteParseEvents for ReparseAssignment { fn start_node(&mut self, kind: JsSyntaxKind, p: &mut RewriteParser) { if !self.inside_assignment { self.parents.push((kind, Some(p.start()))); return; } // Make sure to also add the kind to the match in `try_expression_to_assignment` let mapped_kind = match kind { JS_PARENTHESIZED_EXPRESSION => JS_PARENTHESIZED_ASSIGNMENT, JS_STATIC_MEMBER_EXPRESSION => { self.inside_assignment = false; JS_STATIC_MEMBER_ASSIGNMENT } JS_COMPUTED_MEMBER_EXPRESSION => { self.inside_assignment = false; JS_COMPUTED_MEMBER_ASSIGNMENT } JS_IDENTIFIER_EXPRESSION => JS_IDENTIFIER_ASSIGNMENT, TS_NON_NULL_ASSERTION_EXPRESSION => TS_NON_NULL_ASSERTION_ASSIGNMENT, TS_AS_EXPRESSION => TS_AS_ASSIGNMENT, TS_SATISFIES_EXPRESSION => TS_SATISFIES_ASSIGNMENT, TS_TYPE_ASSERTION_EXPRESSION => TS_TYPE_ASSERTION_ASSIGNMENT, JS_REFERENCE_IDENTIFIER => { self.parents.push((kind, None)); // Omit reference identifiers return; } _ => { self.inside_assignment = false; if AnyTsType::can_cast(kind) && matches!( self.parents.last(), Some(( TS_AS_ASSIGNMENT | TS_SATISFIES_ASSIGNMENT | TS_TYPE_ASSERTION_ASSIGNMENT, _ )) ) { kind } else { JS_BOGUS_ASSIGNMENT } } }; self.parents.push((mapped_kind, Some(p.start()))); } fn finish_node(&mut self, p: &mut RewriteParser) { let (kind, m) = self.parents.pop().unwrap(); if let Some(m) = m { let mut completed = m.complete(p, kind); match kind { JS_IDENTIFIER_ASSIGNMENT => { // test_err js eval_arguments_assignment // eval = "test"; // arguments = "test"; let name = completed.text(p); if matches!(name, "eval" | "arguments") && p.is_strict_mode() { let error = p.err_builder( format!("Illegal use of `{}` as an identifier in strict mode", name), completed.range(p), ); p.error(error); completed.change_to_bogus(p); } } JS_BOGUS_ASSIGNMENT => { let range = completed.range(p); p.error( p.err_builder( format!("Invalid assignment to `{}`", completed.text(p)), range, ) .hint("This expression cannot be assigned to"), ); } _ => {} } self.result = Some(completed.into()); } if AnyTsType::can_cast(kind) && matches!( self.parents.last(), Some(( TS_TYPE_ASSERTION_ASSIGNMENT | TS_AS_ASSIGNMENT | TS_SATISFIES_ASSIGNMENT, _ )) ) { self.inside_assignment = true; } } fn token(&mut self, token: RewriteToken, p: &mut RewriteParser) { let parent = self.parents.last_mut(); if let Some((parent_kind, _)) = parent { if matches!( *parent_kind, JS_COMPUTED_MEMBER_ASSIGNMENT | JS_STATIC_MEMBER_ASSIGNMENT ) && token.kind == T![?.] { *parent_kind = JS_BOGUS_ASSIGNMENT } } p.bump(token) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/stmt.rs
crates/rome_js_parser/src/syntax/stmt.rs
//! Statements, these include `if`, `while`, `for`, `;`, and more. //! //! See the [ECMAScript spec](https://www.ecma-international.org/ecma-262/5.1/#sec-12). use super::binding::*; use super::class::is_at_ts_abstract_class_declaration; use super::expr::parse_expression; use super::module::parse_export; use super::typescript::*; use crate::parser::RecoveryResult; use crate::prelude::*; use crate::state::{ BreakableKind, ChangeParserState, EnableStrictMode, EnableStrictModeSnapshot, EnterBreakable, LabelledItem, StrictMode as StrictModeState, WithLabel, }; use crate::syntax::assignment::expression_to_assignment_pattern; use crate::syntax::class::{parse_class_declaration, parse_decorators, parse_initializer_clause}; use crate::syntax::expr::{ is_at_expression, is_at_identifier, is_nth_at_identifier, parse_assignment_expression_or_higher, parse_expression_or_recover_to_next_statement, parse_identifier, ExpressionContext, }; use crate::syntax::function::{is_at_async_function, parse_function_declaration, LineBreak}; use crate::syntax::js_parse_error; use crate::syntax::js_parse_error::{decorators_not_allowed, expected_binding, expected_statement}; use crate::syntax::module::parse_import_or_import_equals_declaration; use crate::syntax::typescript::ts_parse_error::{expected_ts_type, ts_only_syntax_error}; use crate::span::Span; use crate::JsSyntaxFeature::{StrictMode, TypeScript}; use crate::ParsedSyntax::{Absent, Present}; use crate::{parser, JsParser, JsSyntaxFeature, ParseRecovery}; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_token; use rome_parser::parse_lists::{ParseNodeList, ParseSeparatedList}; use rome_parser::ParserProgress; use rome_rowan::SyntaxKind; pub const STMT_RECOVERY_SET: TokenSet<JsSyntaxKind> = token_set![ L_CURLY, VAR_KW, FUNCTION_KW, IF_KW, FOR_KW, DO_KW, WHILE_KW, CONTINUE_KW, BREAK_KW, RETURN_KW, WITH_KW, SWITCH_KW, THROW_KW, TRY_KW, DEBUGGER_KW, FUNCTION_KW, CLASS_KW, IMPORT_KW, EXPORT_KW, ABSTRACT_KW, INTERFACE_KW, ENUM_KW, TYPE_KW, DECLARE_KW, MODULE_KW, NAMESPACE_KW, LET_KW, CONST_KW, USING_KW, MODULE_KW, NAMESPACE_KW, GLOBAL_KW, T![@], T![;] ]; /// Consume an explicit semicolon, or try to automatically insert one, /// or add an error to the parser if there was none and it could not be inserted // test js semicolons // let foo = bar; // let foo2 = b; // let foo3; // let foo4 // let foo5 // function foo6() { return true } pub(crate) fn semi(p: &mut JsParser, err_range: TextRange) -> bool { // test_err js semicolons_err // let foo = bar throw foo if !optional_semi(p) { let err = p .err_builder( "Expected a semicolon or an implicit semicolon after a statement, but found none", p.cur_range(), ) .detail( p.cur_range(), "An explicit or implicit semicolon is expected here...", ) .detail(err_range, "...Which is required to end this statement"); p.error(err); false } else { true } } /// Eats a semicolon if present but doesn't add an error none is present and the automatic /// semicolon insertion rule does not apply. /// /// Returns false if neither a semicolon was present and the current position doesn't allow an automatic /// semicolon insertion. pub(crate) fn optional_semi(p: &mut JsParser) -> bool { if p.eat(T![;]) { return true; } is_semi(p, 0) } pub(super) fn is_semi(p: &mut JsParser, offset: usize) -> bool { p.nth_at(offset, T![;]) || p.nth_at(offset, EOF) || p.nth_at(offset, T!['}']) || p.has_nth_preceding_line_break(offset) } #[derive(Debug, Clone, Copy, Eq, PartialEq)] pub(crate) enum StatementContext { If, Label, Do, While, With, For, // Block, Switch consequence, etc. StatementList, } impl StatementContext { pub(crate) fn is_single_statement(&self) -> bool { !matches!(self, StatementContext::StatementList) } pub(crate) fn is_statement_list(&self) -> bool { matches!(self, StatementContext::StatementList) } } /// A generic statement such as a block, if, while, with, etc /// /// Error handling and recovering happens inside this function, so the /// caller has to pass a recovery set. /// /// If not passed, [STMT_RECOVERY_SET] will be used as recovery set pub(crate) fn parse_statement(p: &mut JsParser, context: StatementContext) -> ParsedSyntax { match p.cur() { // test_err js import_decl_not_top_level // { // import foo from "bar"; // } // make sure we dont try parsing import.meta or import() as declarations T![import] if !token_set![T![.], T!['(']].contains(p.nth(1)) => { let mut import = parse_import_or_import_equals_declaration(p).unwrap(); if import.kind(p) == TS_IMPORT_EQUALS_DECLARATION { return Present(import); } import.change_kind(p, JS_BOGUS_STATEMENT); let error = match p.source_type().module_kind() { ModuleKind::Script => p .err_builder( "Illegal use of an import declaration outside of a module", import.range(p), ) .hint("not allowed inside scripts"), ModuleKind::Module => p .err_builder( "Illegal use of an import declaration not at the top level", import.range(p), ) .hint("move this declaration to the top level"), }; p.error(error); Present(import) } // test_err js export_decl_not_top_level // { // export { pain } from "life"; // } T![export] => parse_non_top_level_export(p, Absent), T![;] => parse_empty_statement(p), T!['{'] => parse_block_stmt(p), T![if] => parse_if_statement(p), T![with] => parse_with_statement(p), T![while] => parse_while_statement(p), T![const] | T![enum] if is_at_ts_enum_declaration(p) => { // test_err js enum_in_js // enum A {} TypeScript.parse_exclusive_syntax(p, parse_ts_enum_declaration, |p, declaration| { ts_only_syntax_error(p, "'enum's", declaration.range(p)) }) } T![var] => parse_variable_statement(p, context), T![const] => parse_variable_statement(p, context), T![using] if is_nth_at_using_declaration(p, 0) => parse_variable_statement(p, context), T![await] => { if is_nth_at_using_declaration(p, 0) { parse_variable_statement(p, context) } else { parse_expression_statement(p) } } T![for] => parse_for_statement(p), T![do] => parse_do_statement(p), T![switch] => parse_switch_statement(p), T![try] => parse_try_statement(p), T![return] => parse_return_statement(p), T![break] => parse_break_statement(p), T![continue] => parse_continue_statement(p), T![throw] => parse_throw_statement(p), T![debugger] => parse_debugger_statement(p), // function and async function T![function] => parse_function_declaration(p, context), T![async] if is_at_async_function(p, LineBreak::DoCheck) => { parse_function_declaration(p, context) } // class and abstract class T![class] => parse_class_declaration(p, Absent, context), T![@] => { let decorator_list = parse_decorators(p); match p.cur() { T![export] if p.nth_at(1, T![class]) => { // test_err js decorator_export // function foo() { // @decorator // export class Foo { } // @first.field @second @(() => decorator)() // export class Bar {} // } parse_non_top_level_export(p, decorator_list) } T![class] => { // test js decorator_class_declaration // function foo() { // @decorator // class Foo { } // @first.field @second @(() => decorator)() // class Bar {} // } parse_class_declaration(p, decorator_list, context) } T![abstract] if is_at_ts_abstract_class_declaration(p, LineBreak::DoCheck) => { // test ts decorator_abstract_class_declaration // function foo() { // @decorator abstract class A {} // @first.field @second @(() => decorator)() // abstract class Bar {} // } TypeScript.parse_exclusive_syntax( p, |p| parse_class_declaration(p, decorator_list, context), |p, abstract_class| { ts_only_syntax_error(p, "abstract classes", abstract_class.range(p)) }, ) } _ => { // test_err js decorator_class_declaration // function bar() { // @decorator // let a; // @decorator @decorator2 // function Foo() { } // } decorator_list .add_diagnostic_if_present(p, decorators_not_allowed) .map(|mut marker| { marker.change_kind(p, JS_BOGUS_STATEMENT); marker }); parse_statement(p, context) } } } T![abstract] if is_at_ts_abstract_class_declaration(p, LineBreak::DoCheck) => { // test_err js abstract_class_in_js // abstract class A {} TypeScript.parse_exclusive_syntax( p, |p| parse_class_declaration(p, Absent, context), |p, abstract_class| { ts_only_syntax_error(p, "abstract classes", abstract_class.range(p)) }, ) } T![ident] if p.nth_at(1, T![:]) => parse_labeled_statement(p, context), _ if is_at_identifier(p) && p.nth_at(1, T![:]) => parse_labeled_statement(p, context), T![let] if is_nth_at_let_variable_statement(p, 0) && (p.cur_text() == "let" || !p.has_nth_preceding_line_break(1)) => { // test_err js let_newline_in_async_function // async function f() { // let // await 0; // } // test js let_asi_rule // // SCRIPT // let // NO ASI // x = 1; // for await (var x of []) let // ASI // x = 1; // test_err js let_array_with_new_line // // SCRIPT // L: let // [a] = 0; if p.nth_at(1, T!['[']) || context.is_statement_list() || !p.has_nth_preceding_line_break(1) { parse_variable_statement(p, context) } else { parse_expression_statement(p) } } T![type] if !p.has_nth_preceding_line_break(1) && is_nth_at_identifier(p, 1) => { // test ts ts_type_variable // let type; // type = getFlowTypeInConstructor(symbol, getDeclaringConstructor(symbol)!); TypeScript.parse_exclusive_syntax( p, parse_ts_type_alias_declaration, |p, type_alias| ts_only_syntax_error(p, "type alias", type_alias.range(p)), ) } T![interface] if is_at_ts_interface_declaration(p) => { TypeScript.parse_exclusive_syntax(p, parse_ts_interface_declaration, |p, interface| { ts_only_syntax_error(p, "interface", interface.range(p)) }) } T![declare] if is_at_ts_declare_statement(p) => { let declare_range = p.cur_range(); TypeScript.parse_exclusive_syntax(p, parse_ts_declare_statement, |p, _| { p.err_builder( "The 'declare' modifier can only be used in TypeScript files.", declare_range, ) }) } T![async] if is_at_async_function(p, LineBreak::DoNotCheck) => { parse_function_declaration(p, context) } T![module] | T![namespace] | T![global] if is_at_any_ts_namespace_declaration(p) => { let name = p.cur_range(); TypeScript.parse_exclusive_syntax( p, parse_any_ts_namespace_declaration_statement, |p, declaration| { ts_only_syntax_error(p, p.text(name.as_range()), declaration.range(p)) }, ) } _ if is_at_expression(p) => parse_expression_statement(p), _ => Absent, } } pub(crate) fn parse_non_top_level_export( p: &mut JsParser, decorator_list: ParsedSyntax, ) -> ParsedSyntax { parse_export(p, decorator_list).map(|mut export| { let error = match p.source_type().module_kind() { ModuleKind::Module => p .err_builder( "Illegal use of an export declaration not at the top level", export.range(p), ) .hint("move this declaration to the top level"), ModuleKind::Script => p .err_builder( "Illegal use of an export declaration outside of a module", export.range(p), ) .hint("not allowed inside scripts"), }; p.error(error); export.change_kind(p, JS_BOGUS_STATEMENT); export }) } // test js labeled_statement // label1: 1 // label1: 1 // label2: 2 // // test_err js double_label // label1: { // label2: { // label1: {} // } // } // // test js labelled_function_declaration // // SCRIPT // label1: function a() {} // // test_err js labelled_function_declaration_strict_mode // label1: function a() {} fn parse_labeled_statement(p: &mut JsParser, context: StatementContext) -> ParsedSyntax { parse_identifier(p, JS_LABELED_STATEMENT).map(|identifier| { fn parse_body(p: &mut JsParser, context: StatementContext) -> ParsedSyntax { if is_at_identifier(p) && p.nth_at(1, T![:]) && StrictMode.is_unsupported(p) { // Re-use the parent context to catch `if (true) label1: label2: function A() {} parse_labeled_statement(p, context) } else { parse_statement(p, StatementContext::Label) } } p.bump(T![:]); let identifier_range = identifier.range(p); let is_valid_identifier = !identifier.kind(p).is_bogus(); let labelled_statement = identifier.undo_completion(p); let label = p.text(identifier_range); let body = match p.state().get_labelled_item(label) { None => { let labelled_item = match p.cur() { T![for] | T![do] | T![while] => LabelledItem::Iteration(identifier_range), _ => LabelledItem::Other(identifier_range) }; let change = WithLabel(String::from(label), labelled_item); p.with_state(change, |p| parse_body(p, context)) }, Some(label_item) if is_valid_identifier => { let err = p .err_builder("Duplicate statement labels are not allowed", identifier_range) .detail( identifier_range, format!("a second use of `{}` here is not allowed", label), ) .detail( label_item.range().to_owned(), format!("`{}` is first used as a label here", label), ); p.error(err); parse_body(p, context) }, Some(_) => { // Don't add another error, the identifier is already invalid parse_body(p, context) } }; match body.or_add_diagnostic(p, expected_statement) { Some(mut body) if context.is_single_statement() && body.kind(p) == JS_FUNCTION_DECLARATION => { // test_err js labelled_function_decl_in_single_statement_context // if (true) label1: label2: function a() {} p.error(p.err_builder("Labelled function declarations are only allowed at top-level or inside a block", body.range(p)).hint( "Wrap the labelled statement in a block statement")); body.change_to_bogus(p); }, // test js labelled_statement_in_single_statement_context // if (true) label1: var a = 10; _ => {} } labelled_statement.complete(p, JS_LABELED_STATEMENT) }) } // test js ts_keyword_assignments // declare = 1; // abstract = 2; // namespace = 3; // type = 4; // module = 5; // global = 6; // // test js ts_keywords_assignments_script // // SCRIPT // interface = 1; // private = 2; // protected = 3; // public = 4; // implements = 5; fn parse_expression_statement(p: &mut JsParser) -> ParsedSyntax { let start = p.cur_range().start(); let expr = parse_expression_or_recover_to_next_statement(p, false, ExpressionContext::default()); if let Ok(expr) = expr { let m = expr.precede(p); semi(p, TextRange::new(start, p.cur_range().end())); Present(m.complete(p, JS_EXPRESSION_STATEMENT)) } else { Absent } } // test js debugger_stmt // debugger; // test_err js debugger_stmt // function foo() { // debugger { // var something = "lorem"; // } // } /// A debugger statement such as `debugger;` fn parse_debugger_statement(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![debugger]) { return Absent; } let m = p.start(); let range = p.cur_range(); p.expect(T![debugger]); // debugger keyword semi(p, range); Present(m.complete(p, JS_DEBUGGER_STATEMENT)) } /// A throw statement such as `throw new Error("uh oh");` // test js throw_stmt // throw new Error("foo"); // throw "foo" fn parse_throw_statement(p: &mut JsParser) -> ParsedSyntax { // test_err js throw_stmt_err // throw // new Error("oh no :(") // throw; if !p.at(T![throw]) { return Absent; } let m = p.start(); let start = p.cur_range().start(); p.expect(T![throw]); // throw keyword if p.has_preceding_line_break() { let mut err = p .err_builder( "Linebreaks between a throw statement and the error to be thrown are not allowed", p.cur_range(), ) .hint("A linebreak is not allowed here"); if is_at_expression(p) { err = err.detail(p.cur_range(), "Help: did you mean to throw this?"); } p.error(err); } else { parse_expression_or_recover_to_next_statement(p, false, ExpressionContext::default()).ok(); } semi(p, TextRange::new(start, p.cur_range().end())); Present(m.complete(p, JS_THROW_STATEMENT)) } // test js break_stmt // while (true) { // break; // foo: { // break foo; // } // } // test_err js break_stmt // function foo() { break; } // while (true) { // break foo; // } /// A break statement with an optional label such as `break a;` fn parse_break_statement(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![break]) { return Absent; } let m = p.start(); let start = p.cur_range(); p.expect(T![break]); // break keyword let error = if !p.has_preceding_line_break() && p.at(T![ident]) { let label_name = p.cur_text(); let error = match p.state().get_labelled_item(label_name) { Some(_) => None, None => Some( p.err_builder( format!("Use of undefined statement label `{}`", label_name,), p.cur_range(), ) .hint("This label is used, but it is never defined"), ), }; p.bump_any(); error } else if !p.state().break_allowed() { Some(p.err_builder("A `break` statement can only be used within an enclosing iteration or switch statement.", start, )) } else { None }; semi(p, TextRange::new(start.start(), p.cur_range().end())); if let Some(error) = error { p.error(error); Present(m.complete(p, JS_BOGUS_STATEMENT)) } else { Present(m.complete(p, JS_BREAK_STATEMENT)) } } // test js continue_stmt // outer: while(true) { // while (true) { // continue; // continue outer; // } // continue // } // test_err js continue_stmt // function foo() { continue; } // while (true) { // continue foo; // } // foo: { // continue foo; // } /// A continue statement with an optional label such as `continue a;` fn parse_continue_statement(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![continue]) { return Absent; } let m = p.start(); let start = p.cur_range(); p.expect(T![continue]); // continue keyword // test js async_continue_stmt // async: for(a of b) continue async; let error = if !p.has_preceding_line_break() && is_at_identifier(p) { let label_name = p.cur_text(); let error = match p.state().get_labelled_item(label_name) { Some(LabelledItem::Iteration(_)) => None, Some(LabelledItem::Other(range)) => { Some(p.err_builder("A `continue` statement can only jump to a label of an enclosing `for`, `while` or `do while` statement.", p.cur_range()) .detail(p.cur_range(), "This label") .detail(range.to_owned(), "points to non-iteration statement")) } None => { Some(p .err_builder(format!( "Use of undefined statement label `{}`", label_name ), p.cur_range()) .hint( "This label is used, but it is never defined", )) } }; p.bump_remap(T![ident]); error } else if !p.state().continue_allowed() { Some( p.err_builder( "A `continue` statement can only be used within an enclosing `for`, `while` or `do while` statement.",start ), ) } else { None }; semi(p, TextRange::new(start.start(), p.cur_range().end())); if let Some(error) = error { p.error(error); Present(m.complete(p, JS_BOGUS_STATEMENT)) } else { Present(m.complete(p, JS_CONTINUE_STATEMENT)) } } // test js return_stmt // () => { // return; // return foo; // return // } /// A return statement with an optional value such as `return a;` fn parse_return_statement(p: &mut JsParser) -> ParsedSyntax { // test_err js return_stmt_err // return; // return foo; if !p.at(T![return]) { return Absent; } let m = p.start(); let start = p.cur_range().start(); p.expect(T![return]); if !p.has_preceding_line_break() { parse_expression(p, ExpressionContext::default()).ok(); } semi(p, TextRange::new(start, p.cur_range().end())); let mut complete = m.complete(p, JS_RETURN_STATEMENT); if !p.state().in_function() { let err = p.err_builder( "Illegal return statement outside of a function", complete.range(p), ); p.error(err); complete.change_kind(p, JS_BOGUS_STATEMENT); } Present(complete) } // test js empty_stmt // ; /// An empty statement denoted by a single semicolon. fn parse_empty_statement(p: &mut JsParser) -> ParsedSyntax { if p.at(T![;]) { let m = p.start(); p.bump_any(); // bump ; m.complete(p, JS_EMPTY_STATEMENT).into() } else { Absent } } // test js block_stmt // {} // {{{{}}}} // { foo = bar; } /// A block statement consisting of statements wrapped in curly brackets. pub(crate) fn parse_block_stmt(p: &mut JsParser) -> ParsedSyntax { parse_block_impl(p, JS_BLOCK_STATEMENT) } /// A block wrapped in curly brackets. Can either be a function body or a block statement. pub(super) fn parse_block_impl(p: &mut JsParser, block_kind: JsSyntaxKind) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); let (statement_list, strict_snapshot) = if block_kind == JS_FUNCTION_BODY { parse_directives(p) } else { (p.start(), None) }; parse_statements(p, true, statement_list); p.expect(T!['}']); if let Some(strict_snapshot) = strict_snapshot { EnableStrictMode::restore(p.state_mut(), strict_snapshot); } Present(m.complete(p, block_kind)) } // test js directives // // SCRIPT // "use new" // let a = 10; // "use strict"; // not a directive // function test() { // 'use strict'; // let b = 10; // 'use strict'; // not a directive // } // (function () { // "use strict"; // "use strict" // .length; // not a directive // let c = 10; // "use strict"; // not a directive // }); // let b = () => { // "use strict"; // let e = 10; // "use strict"; // not a directive // } // { // "use strict"; // not a directive // } // // // test js directives_redundant // // SCRIPT // function test() { // "use strict"; // function inner_a() { // "use strict"; // } // function inner_b() { // function inner_inner() { // "use strict"; // } // } // } /// Parses the directives and returns /// * The marker for the following statement list. May already contain a parsed out expression statement /// * A checkpoint containing the previous strict mode pub(crate) fn parse_directives(p: &mut JsParser) -> (Marker, Option<EnableStrictModeSnapshot>) { let list = p.start(); let mut directives_list = list.complete(p, JS_DIRECTIVE_LIST); let mut strict_mode_snapshot: Option<EnableStrictModeSnapshot> = None; let mut progress = ParserProgress::default(); let statement_list = loop { progress.assert_progressing(p); // Certainly not a directive, start statement list if !p.at(JS_STRING_LITERAL) { break p.start(); } let expression = parse_expression(p, ExpressionContext::default()) .expect("A string token always yields a valid expression"); // Something like "use strict".length isn't a valid directive if expression.kind(p) != JS_STRING_LITERAL_EXPRESSION { // Turned out not to be a directive. // Start statement list before the just parsed expression statement let statement = expression.precede(p).complete(p, JS_EXPRESSION_STATEMENT); break statement.precede(p); } let directive_range = expression.range(p); let directive = expression.undo_completion(p); semi(p, directive_range); let directive_text = p.text(directive_range); let directive_is_use_strict = directive_text == "\"use strict\"" || directive_text == "'use strict'"; if directive_is_use_strict && strict_mode_snapshot.is_none() { strict_mode_snapshot = Some( EnableStrictMode(StrictModeState::Explicit(directive_range)).apply(p.state_mut()), ); } directive.complete(p, JS_DIRECTIVE); // Extend the directive list to include the just parsed directive directives_list = directives_list .undo_completion(p) .complete(p, JS_DIRECTIVE_LIST); }; (statement_list, strict_mode_snapshot) } /// Top level items or items inside of a block statement, this also handles module items so we can /// easily recover from erroneous module declarations in scripts pub(crate) fn parse_statements(p: &mut JsParser, stop_on_r_curly: bool, statement_list: Marker) { let mut progress = ParserProgress::default(); // test_err js statements_closing_curly // { // "name": "troublesome-lib", // "typings": "lib/index.d.ts", // "version": "0.0.1" // } let recovery_set = if stop_on_r_curly { // Don't eat over the closing '}' STMT_RECOVERY_SET.union(token_set![T!['}']]) } else { STMT_RECOVERY_SET }; while !p.at(EOF) { progress.assert_progressing(p); if stop_on_r_curly && p.at(T!['}']) { break; } if parse_statement(p, StatementContext::StatementList) .or_recover( p, &ParseRecovery::new(JS_BOGUS_STATEMENT, recovery_set), expected_statement, ) .is_err() { break; } } statement_list.complete(p, JS_STATEMENT_LIST); } /// An expression wrapped in parentheses such as `()` /// Returns `true` if the closing parentheses is present fn parenthesized_expression(p: &mut JsParser) -> bool { let has_l_paren = p.expect(T!['(']); parse_expression( p, ExpressionContext::default().and_object_expression_allowed(has_l_paren), ) .or_add_diagnostic(p, js_parse_error::expected_expression); p.expect(T![')']) } /// An if statement such as `if (foo) { bar(); }` // test js if_stmt // if (true) {} else {} // if (true) {} // if (true) false // if (bar) {} else if (true) {} else {} fn parse_if_statement(p: &mut JsParser) -> ParsedSyntax { // test_err js if_stmt_err // if (true) else {} // if (true) else // if else {} // if () {} else {} // if (true)}}}} {} if !p.at(T![if]) { return Absent; } let m = p.start(); p.expect(T![if]); // (test) parenthesized_expression(p); // body parse_statement(p, StatementContext::If).or_add_diagnostic(p, expected_statement); // else clause if p.at(T![else]) { let else_clause = p.start(); p.expect(T![else]); parse_statement(p, StatementContext::If).or_add_diagnostic(p, expected_statement); else_clause.complete(p, JS_ELSE_CLAUSE); } Present(m.complete(p, JS_IF_STATEMENT)) } // test js with_statement // // SCRIPT // function f(x, o) { // with (o) { // console.log(x); // } // } /// A with statement such as `with (foo) something()` fn parse_with_statement(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![with]) { return Absent; } let m = p.start(); p.expect(T![with]); parenthesized_expression(p); parse_statement(p, StatementContext::With).or_add_diagnostic(p, expected_statement); let with_stmt = m.complete(p, JS_WITH_STATEMENT); // or SloppyMode.exclusive_syntax(...) but this reads better with the error message, saying that // it's only forbidden in strict mode StrictMode.excluding_syntax(p, with_stmt, |p, marker| { p.err_builder( "`with` statements are not allowed in strict mode", marker.range(p), ) }) } /// A while statement such as `while(true) { do_something() }` // test js while_stmt // while (true) {} // while (5) {} fn parse_while_statement(p: &mut JsParser) -> ParsedSyntax { // test_err js while_stmt_err // while true {} // while {} // while (true {} // while true) } if !p.at(T![while]) { return Absent; } let m = p.start(); p.expect(T![while]); parenthesized_expression(p); p.with_state(EnterBreakable(BreakableKind::Iteration), |p| { parse_statement(p, StatementContext::While) }) .or_add_diagnostic(p, expected_statement); Present(m.complete(p, JS_WHILE_STATEMENT)) } pub(crate) fn is_nth_at_variable_declarations(p: &mut JsParser, n: usize) -> bool { match p.nth(n) { T![var] | T![const] => true, T![await] | T![using] if is_nth_at_using_declaration(p, n) => true, T![let] if is_nth_at_let_variable_statement(p, n) => true, _ => false, } } pub(crate) fn is_nth_at_using_declaration(p: &mut JsParser, n: usize) -> bool { let (maybe_using, next_cursor) = match p.nth(n) { T![using] => (true, n + 1), T![await] if p.nth_at(n + 1, T![using]) => (true, n + 2), _ => (false, n + 1), }; maybe_using && !p.has_nth_preceding_line_break(next_cursor) && !p.nth_at(next_cursor, T![await]) && is_nth_at_identifier(p, next_cursor) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/jsx/mod.rs
crates/rome_js_parser/src/syntax/jsx/mod.rs
use crate::prelude::*; pub mod jsx_parse_errors; use rome_js_syntax::JsSyntaxKind::*; use rome_parser::diagnostic::expected_token; use rome_parser::parse_lists::ParseNodeList; use rome_rowan::TextRange; use crate::lexer::{JsSyntaxKind, LexContext, ReLexContext, T}; use crate::syntax::expr::{ is_nth_at_identifier_or_keyword, parse_expression, parse_name, ExpressionContext, }; use crate::syntax::js_parse_error::{expected_expression, expected_identifier}; use crate::syntax::jsx::jsx_parse_errors::{ jsx_expected_attribute, jsx_expected_attribute_value, jsx_expected_children, jsx_expected_closing_tag, }; use crate::JsSyntaxFeature::TypeScript; use crate::{parser::RecoveryResult, JsParser, ParseRecovery, ParsedSyntax}; use crate::{Absent, Present}; use super::typescript::parse_ts_type_arguments; // test jsx jsx_element_on_return // function f() { // return <div></div> // } // test jsx jsx_element_on_arrow_function // const f = () => <div></div>; // const f = () => (<div></div>); // test jsx jsx_element_as_statements // <div /> // test_err js jsx_or_type_assertion // // SCRIPT // function f() { // let a = <div>a</div>; // JSX // let b = <string>b; // type assertion // let c = <string>b<a>d; // type assertion // let d = <div>a</div>/; // ambiguous: JSX or "type assertion a less than regex /div>/". Probably JSX. // let d = <string>a</string>/; // } // test jsx jsx_equal_content // <span></span>; // <span>=</span>; pub(crate) fn parse_jsx_tag_expression(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![<]) { return Absent; } if !p.nth_at(1, T![>]) && !is_nth_at_identifier_or_keyword(p, 1) { return Absent; } let m = p.start(); // Safety: Safe because `parse_any_jsx_tag only returns Absent if the parser isn't positioned // at the `<` token which is tested for at the beginning of the function. parse_any_jsx_tag(p, true).unwrap(); Present(m.complete(p, JSX_TAG_EXPRESSION)) } // <a ...> or <a ... /> // ^ ^ // test jsx jsx_element_open_close // function f() { // return <div></div> // } // test jsx jsx_element_self_close // function f() { // return <div /> // } // test jsx jsx_closing_token_trivia // <closing / /* some comment */ >; // <open>< // /* some comment */ / open>; // test_err jsx jsx_invalid_text // <a> test ></a>; // <b> invalid }</b>; /// Parses a JSX tag (fragment or element) /// /// `in_expression` must be `true` if this element is a direct child of the `JsxElementExpression` (root of an expression). /// It should be false when parsing any child node. fn parse_any_jsx_tag(p: &mut JsParser, in_expression: bool) -> ParsedSyntax { match parse_any_jsx_opening_tag(p, in_expression) { Some(OpeningElement::SelfClosing(marker)) => Present(marker), Some(OpeningElement::Fragment(fragment_opening)) => { let opening_range = fragment_opening.range(p); let fragment = fragment_opening.precede(p); parse_jsx_children(p); expect_closing_fragment(p, in_expression, opening_range); Present(fragment.complete(p, JSX_FRAGMENT)) } Some(OpeningElement::Element { name, opening }) => { let opening_range = opening.range(p); let element = opening.precede(p); parse_jsx_children(p); expect_closing_element(p, in_expression, name, opening_range); Present(element.complete(p, JSX_ELEMENT)) } None => Absent, } } enum OpeningElement { Fragment(CompletedMarker), Element { name: Option<CompletedMarker>, opening: CompletedMarker, }, SelfClosing(CompletedMarker), } fn parse_any_jsx_opening_tag(p: &mut JsParser, in_expression: bool) -> Option<OpeningElement> { if !p.at(T![<]) { return None; } let m = p.start(); p.bump(T![<]); if p.at(T![>]) { // test jsx jsx_fragments // <></>; // <>abcd</>; // <> whitespace // </>; // < // /*comment */ // > // < // / // >; p.bump_with_context(T![>], LexContext::JsxChild); return Some(OpeningElement::Fragment( m.complete(p, JSX_OPENING_FRAGMENT), )); } let name = parse_jsx_any_element_name(p).or_add_diagnostic(p, expected_identifier); // Don't parse type arguments in JS because it prevents us from doing better error recovery in case the // `>` token of the opening element is missing: // `<test <inner></test>` The `inner` is it's own element and not the type arguments if TypeScript.is_supported(p) { // test tsx tsx_element_generics_type // <NonGeneric />; // <Generic<true> />; // <Generic<true>></Generic>; let _ = parse_ts_type_arguments(p); } JsxAttributeList.parse_list(p); if p.eat(T![/]) { // test_err jsx jsx_self_closing_element_missing_r_angle // <><test / some test followed by<a /></>; expect_jsx_token(p, T![>], !in_expression); Some(OpeningElement::SelfClosing( m.complete(p, JSX_SELF_CLOSING_ELEMENT), )) } else { // test_err jsx jsx_opening_element_missing_r_angle // <><test <inner> some content</inner></test></> expect_jsx_token(p, T![>], true); Some(OpeningElement::Element { opening: m.complete(p, JSX_OPENING_ELEMENT), name, }) } } fn expect_closing_fragment( p: &mut JsParser, in_expression: bool, opening_range: TextRange, ) -> CompletedMarker { let m = p.start(); p.expect(T![<]); p.expect(T![/]); // test_err jsx jsx_missing_closing_fragment // <>test</test>; // <>test<inner> some text</inner>; if let Present(name) = parse_jsx_any_element_name(p) { p.error( p.err_builder( "JSX fragment has no corresponding closing tag.", opening_range, ) .detail(opening_range, "Opening fragment") .detail(name.range(p), "Closing tag"), ); } // test_err jsx jsx_fragment_closing_missing_r_angle // <div><>test</ 5 more content</div> expect_jsx_token(p, T![>], !in_expression); m.complete(p, JSX_CLOSING_FRAGMENT) } fn expect_closing_element( p: &mut JsParser, in_expression: bool, opening_name_marker: Option<CompletedMarker>, opening_range: TextRange, ) -> CompletedMarker { let m = p.start(); p.expect(T![<]); p.expect(T![/]); let name_marker = parse_jsx_any_element_name(p); // test_err jsx jsx_closing_element_mismatch // <test></>; // <test></text>; // <some><nested></some></nested>; // <><5></test></>; if let Some(opening_name_marker) = opening_name_marker { let opening_name = opening_name_marker.text(p); let error = match name_marker { Present(name) if name.text(p) != opening_name => { let closing_end = if p.at(T![>]) { p.cur_range().end() } else { name.range(p).end() }; let closing_range = TextRange::new(m.start(), closing_end); Some(jsx_expected_closing_tag( p, opening_name, opening_range, closing_range, )) } Present(_) => None, Absent => { if p.at(T![>]) { let closing_range = TextRange::new(m.start(), p.cur_range().end()); Some(jsx_expected_closing_tag( p, opening_name, opening_range, closing_range, )) } else { Some(expected_identifier(p, p.cur_range())) } } }; if let Some(error) = error { p.error(error); } } // test_err jsx jsx_closing_missing_r_angle // <><test>abcd</test more content follows here</> expect_jsx_token(p, T![>], !in_expression); m.complete(p, JSX_CLOSING_ELEMENT) } /// Expects a JSX token that may be followed by JSX child content. /// Ensures that the child content is lexed with the [LexContext::JsxChild] context. fn expect_jsx_token(p: &mut JsParser, token: JsSyntaxKind, before_child_content: bool) { if !before_child_content { p.expect(token); } else if p.at(token) { p.bump_with_context(token, LexContext::JsxChild); } else { p.error(expected_token(token)); // Re-lex the current token as a JSX child. p.re_lex(ReLexContext::JsxChild); } } struct JsxChildrenList; impl ParseNodeList for JsxChildrenList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JsSyntaxKind::JSX_CHILD_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { match p.cur() { // test jsx jsx_element_children // <a> // <b> // <d></d> // <e></e> // </b> // <c></c> // </a> T![<] => parse_any_jsx_tag(p, false), T!['{'] => parse_jsx_expression_child(p), // test jsx jsx_text // <a>test</a>; // <a> whitespace handling </a>; // <a> multi // line // node // </a>; // <test>\u3333</test> // no error for invalid unicode escape JsSyntaxKind::JSX_TEXT_LITERAL => { let m = p.start(); p.bump(JSX_TEXT_LITERAL); ParsedSyntax::Present(m.complete(p, JSX_TEXT)) } _ => ParsedSyntax::Absent, } } fn is_at_list_end(&self, p: &mut JsParser) -> bool { let at_l_angle0 = p.at(T![<]); let at_slash1 = p.nth_at(1, T![/]); at_l_angle0 && at_slash1 } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JsSyntaxKind::JS_BOGUS, token_set![T![<], T![>], T!['{'], T!['}']], ), jsx_expected_children, ) } } #[inline] fn parse_jsx_children(p: &mut JsParser) { JsxChildrenList.parse_list(p); } fn parse_jsx_expression_child(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return ParsedSyntax::Absent; } let m = p.start(); p.bump(T!['{']); // test jsx jsx_children_spread // <div>{...a}</div>; // <div>{...a}After</div>; let is_spread = p.eat(T![...]); let expr = parse_jsx_assignment_expression(p, is_spread); if is_spread { // test_err jsx jsx_spread_no_expression // <test>{...}</test> expr.or_add_diagnostic(p, expected_expression); } // test jsx jsx_children_expression_then_text // <test> // {/* comment */} // some // text // </test> // test_err jsx jsx_children_expression_missing_r_curly // <test> // { 5 + 3 // some text // </test> expect_jsx_token(p, T!['}'], true); let kind = if is_spread { JsSyntaxKind::JSX_SPREAD_CHILD } else { JsSyntaxKind::JSX_EXPRESSION_CHILD }; ParsedSyntax::Present(m.complete(p, kind)) } // test jsx jsx_member_element_name // <a.b.c.d></a.b.c.d>; // <a-b.c></a-b.c>; // <Abcd></Abcd>; // // test_err jsx jsx_namespace_member_element_name // <namespace:a></namespace:a>; // <namespace:a.b></namespace:a.b>; fn parse_jsx_any_element_name(p: &mut JsParser) -> ParsedSyntax { let name = parse_jsx_name_or_namespace(p); name.map(|mut name| { if name.kind(p) == JSX_NAME && (p.at(T![.]) || !is_intrinsic_element(name.text(p))) { name.change_kind(p, JSX_REFERENCE_IDENTIFIER) } else if name.kind(p) == JSX_NAMESPACE_NAME && p.at(T![.]) { let error = p.err_builder( "JSX property access expressions cannot include JSX namespace names.", name.range(p), ); p.error(error); name.change_to_bogus(p); } while p.at(T![.]) { let m = name.precede(p); p.bump(T![.]); parse_name(p).or_add_diagnostic(p, expected_identifier); name = m.complete(p, JSX_MEMBER_NAME) } name }) } /// Tests if this is an intrinsic element name. Intrinsic elements are such elements /// that are built in, for example HTML elements. This implementation uses React's semantic /// and assumes that anything starting with a lower case character is an intrinsic element, and /// that custom components start with an uper case character. /// /// Resources: [TypeScript's documentation on intrinsic elements](https://www.typescriptlang.org/docs/handbook/jsx.html#intrinsic-elements) fn is_intrinsic_element(element_name: &str) -> bool { if let Some(first) = element_name.chars().next() { first.is_lowercase() } else { false } } // test jsx jsx_any_name // <a-b-c-d-e></a-b-c-d-e>; // <a-b-c-d-e />; // <if />; // <namespace:name></namespace:name>; // <dashed-namespaced:dashed-name />; fn parse_jsx_name_or_namespace(p: &mut JsParser) -> ParsedSyntax { parse_jsx_name(p).map(|identifier| { if p.at(T![:]) { let m = identifier.precede(p); p.bump(T![:]); parse_jsx_name(p).or_add_diagnostic(p, expected_identifier); m.complete(p, JSX_NAMESPACE_NAME) } else { identifier } }) } fn parse_jsx_name(p: &mut JsParser) -> ParsedSyntax { p.re_lex(ReLexContext::JsxIdentifier); if p.at(JSX_IDENT) { let name = p.start(); p.bump(JSX_IDENT); Present(name.complete(p, JSX_NAME)) } else { Absent } } struct JsxAttributeList; // test jsx jsx_element_attributes // function f() { // return <div string_literal="a" expression={1} novalue el=<a/>></div>; // } // <div dashed-name='test' use:validate="abcd" />; // <div use-dashed_underscore:validate="ahaha" />; // <div multiline-string='test // continues here' />; // <div invalid-unicode-escape="\u10000\u20000" />; impl ParseNodeList for JsxAttributeList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = JsSyntaxKind::JSX_ATTRIBUTE_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { if matches!(p.cur(), T!['{'] | T![...]) { parse_jsx_spread_attribute(p) } else { parse_jsx_attribute(p) } } fn is_at_list_end(&self, p: &mut JsParser) -> bool { matches!(p.cur(), T![>] | T![/] | T![<]) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JsSyntaxKind::JS_BOGUS, token_set![T![/], T![>], T![<], T!['{'], T!['}'], T![...], T![ident]], ), jsx_expected_attribute, ) } } fn parse_jsx_attribute(p: &mut JsParser) -> ParsedSyntax { if !is_nth_at_identifier_or_keyword(p, 0) { return Absent; } let m = p.start(); // SAFETY: Guaranteed to succeed because the parser is at an identifier or keyword parse_jsx_name_or_namespace(p).unwrap(); let _ = parse_jsx_attribute_initializer_clause(p); Present(m.complete(p, JsSyntaxKind::JSX_ATTRIBUTE)) } // test jsx jsx_spread_attribute // let obj = {}; // <a {...obj} />; // // test_err jsx jsx_spread_attribute_error // let obj = {}; // <a {...obj, other} />; // <a ...obj} />; // <a {obj} />; // <div // {...{} /* // // @ts-ignore */ /* prettier-ignore */ // invalidProp="HelloWorld" // />; fn parse_jsx_spread_attribute(p: &mut JsParser) -> ParsedSyntax { if !matches!(p.cur(), T![...] | T!['{']) { return Absent; } let m = p.start(); p.expect(T!['{']); p.expect(T![...]); let argument = parse_expression(p, ExpressionContext::default()).map(|mut expr| { if expr.kind(p) == JS_SEQUENCE_EXPRESSION { p.error(p.err_builder( "Comma operator isn't a valid value for a JSX spread argument.", expr.range(p), )); expr.change_to_bogus(p); } expr }); argument.or_add_diagnostic(p, expected_expression); p.expect(T!['}']); Present(m.complete(p, JSX_SPREAD_ATTRIBUTE)) } fn parse_jsx_attribute_initializer_clause(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![=]) { return Absent; } let m = p.start(); p.bump_with_context(T![=], LexContext::JsxAttributeValue); // test_err jsx jsx_element_attribute_missing_value // function f() { // return <div string_literal= ></div>; // } parse_jsx_attribute_value(p).or_add_diagnostic(p, jsx_expected_attribute_value); ParsedSyntax::Present(m.complete(p, JsSyntaxKind::JSX_ATTRIBUTE_INITIALIZER_CLAUSE)) } fn parse_jsx_attribute_value(p: &mut JsParser) -> ParsedSyntax { match p.cur() { // test jsx jsx_element_attribute_expression // <div id={1} />; // <div className={prefix`none`} />; T!['{'] => parse_jsx_expression_attribute_value(p), // test jsx jsx_element_attribute_element // <div id=<a/> />; T![<] => parse_any_jsx_tag(p, true), // test jsx jsx_element_attribute_string_literal // <div id="a" />; JsSyntaxKind::JSX_STRING_LITERAL => { let m = p.start(); p.bump(JSX_STRING_LITERAL); ParsedSyntax::Present(m.complete(p, JSX_STRING)) } _ => ParsedSyntax::Absent, } } // test_err jsx jsx_element_attribute_expression_error // <div className={asdf asdf} />; fn parse_jsx_expression_attribute_value(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return ParsedSyntax::Absent; } let m = p.start(); p.bump(T!['{']); parse_jsx_assignment_expression(p, false).or_add_diagnostic(p, expected_expression); if !p.expect(T!['}']) && p.nth_at(1, T!['}']) { p.parse_as_skipped_trivia_tokens(|p| { p.bump_any(); }); p.expect(T!['}']); } ParsedSyntax::Present(m.complete(p, JSX_EXPRESSION_ATTRIBUTE_VALUE)) } // test jsx jsx_children_expression // let x; // let a; // let b; // let key; // let f = () => {}; // <div> // {1} // {9007199254740991n} // {""} // {true} // {null} // {undefined} // {/a/} // {[]} // {x => console.log(x)} // {x = 1} // {await x} // {1 + 1} // {f()} // {a[b]} // {a?1:2} // {function f() {}} // {function () {}} // {a} // {import("a.js")} // {key in a} // {a instanceof Object} // {a && b} // {new f()} // {{}} // {(a)} // {a++} // {++a} // {a,b} // {a.b} // {super.a()} // {this} // {delete a.a} // {void a} // {typeof a} // {+a} // {-a} // {!a} // {~a} // {``} // {/* A JSX comment */} // {/* Multi // line // */} // {} // </div> // function *f() { // return <div> // {yield a} // </div>; // } // test_err jsx jsx_children_expressions_not_accepted // <div> // {import.meta} // {class A{}} // {super()} // {new.target} // </div> fn parse_jsx_assignment_expression(p: &mut JsParser, is_spread: bool) -> ParsedSyntax { let expr = parse_expression(p, ExpressionContext::default()); expr.map(|mut expr| { let msg = if is_spread { "This expression is not valid as a JSX spread expression" } else { "This expression is not valid as a JSX expression." }; let err = match expr.kind(p) { JsSyntaxKind::JS_IMPORT_META_EXPRESSION | JsSyntaxKind::JS_NEW_TARGET_EXPRESSION | JsSyntaxKind::JS_CLASS_EXPRESSION => Some(p.err_builder(msg, expr.range(p))), JsSyntaxKind::JS_SEQUENCE_EXPRESSION if is_spread => { Some(p.err_builder(msg, expr.range(p))) } _ => None, }; if let Some(err) = err { p.error(err); expr.change_to_bogus(p); } expr }) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/jsx/jsx_parse_errors.rs
crates/rome_js_parser/src/syntax/jsx/jsx_parse_errors.rs
use crate::prelude::*; use crate::JsParser; use rome_js_syntax::TextRange; use rome_parser::diagnostic::{expected_any, expected_node}; pub(crate) fn jsx_expected_attribute(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("JSX attribute", range).into_diagnostic(p) } pub(crate) fn jsx_expected_attribute_value(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("JSX attribute value", range).into_diagnostic(p) } pub(crate) fn jsx_expected_children(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["JSX Expression", "Element", "text"], range).into_diagnostic(p) } pub(crate) fn jsx_expected_closing_tag( p: &JsParser, opening_name: &str, opening_range: TextRange, closing_range: TextRange, ) -> ParseDiagnostic { p.err_builder( format!("Expected corresponding JSX closing tag for '{opening_name}'."), opening_range, ) .detail(opening_range, "Opening tag") .detail(closing_range, "closing tag") }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/typescript/types.rs
crates/rome_js_parser/src/syntax/typescript/types.rs
use crate::parser::{RecoveryError, RecoveryResult}; use crate::prelude::*; use crate::state::{EnterType, SignatureFlags}; use crate::syntax::expr::{ is_at_binary_operator, is_at_expression, is_at_identifier, is_nth_at_identifier, is_nth_at_identifier_or_keyword, parse_big_int_literal_expression, parse_identifier, parse_literal_expression, parse_name, parse_number_literal_expression, parse_reference_identifier, parse_template_elements, ExpressionContext, }; use crate::syntax::function::{ parse_formal_parameter, parse_parameter_list, skip_parameter_start, ParameterContext, }; use crate::syntax::js_parse_error::{ decorators_not_allowed, expected_identifier, expected_object_member_name, expected_parameter, expected_parameters, expected_property_or_signature, modifier_already_seen, modifier_must_precede_modifier, }; use crate::syntax::object::{ is_at_object_member_name, is_nth_at_type_member_name, parse_object_member_name, }; use crate::syntax::stmt::optional_semi; use crate::syntax::typescript::try_parse; use crate::syntax::typescript::ts_parse_error::{ expected_ts_type, expected_ts_type_parameter, ts_const_modifier_cannot_appear_on_a_type_parameter, ts_in_out_modifier_cannot_appear_on_a_type_parameter, }; use bitflags::bitflags; use rome_parser::parse_lists::{ParseNodeList, ParseSeparatedList}; use smallvec::SmallVec; use crate::lexer::{LexContext, ReLexContext}; use crate::span::Span; use crate::syntax::class::parse_decorators; use crate::JsSyntaxFeature::TypeScript; use crate::{Absent, JsParser, ParseRecovery, ParsedSyntax, Present}; use rome_js_syntax::JsSyntaxKind::TS_TYPE_ANNOTATION; use rome_js_syntax::T; use rome_js_syntax::{JsSyntaxKind::*, *}; use super::{expect_ts_index_signature_member, is_at_ts_index_signature_member, MemberParent}; bitflags! { /// Context tracking state that applies to the parsing of all types #[derive(Debug, Default, Copy, Clone, Eq, PartialEq)] pub(crate) struct TypeContext: u8 { /// Whether conditional types `extends string ? string : number` are allowed in the current context. /// /// By default, conditional types are allowed. const DISALLOW_CONDITIONAL_TYPES = 1 << 0; /// Whether 'in' and 'out' modifiers are allowed in the current context. /// /// By default, 'in' and 'out' modifiers are not allowed. const ALLOW_IN_OUT_MODIFIER = 1 << 1; /// Whether 'const' modifier is allowed in the current context. /// /// By default, 'const' modifier is not allowed. const ALLOW_CONST_MODIFIER = 1 << 2; } } impl TypeContext { pub(crate) fn and_allow_conditional_types(self, allow: bool) -> Self { self.and(TypeContext::DISALLOW_CONDITIONAL_TYPES, !allow) } pub(crate) fn and_allow_in_out_modifier(self, allow: bool) -> Self { self.and(TypeContext::ALLOW_IN_OUT_MODIFIER, allow) } pub(crate) fn and_allow_const_modifier(self, allow: bool) -> Self { self.and(TypeContext::ALLOW_CONST_MODIFIER, allow) } pub(crate) const fn is_conditional_type_allowed(&self) -> bool { !self.contains(TypeContext::DISALLOW_CONDITIONAL_TYPES) } pub(crate) const fn is_in_out_modifier_allowed(&self) -> bool { self.contains(TypeContext::ALLOW_IN_OUT_MODIFIER) } pub(crate) const fn is_const_modifier_allowed(&self) -> bool { self.contains(TypeContext::ALLOW_CONST_MODIFIER) } /// Adds the `flag` if `set` is `true`, otherwise removes the `flag` fn and(self, flag: TypeContext, set: bool) -> Self { if set { self | flag } else { self - flag } } } pub(crate) fn is_reserved_type_name(name: &str) -> bool { name.len() <= 6 && name.len() >= 3 && matches!( name, "string" | "null" | "number" | "object" | "any" | "unknown" | "boolean" | "bigint" | "symbol" | "void" | "never" ) } pub(crate) fn is_reserved_module_name(name: &str) -> bool { name.len() == 4 && matches!(name, "void" | "null") } pub(crate) fn parse_ts_type_annotation(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![:]) { return Absent; } let m = p.start(); p.bump(T![:]); parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); Present(m.complete(p, TS_TYPE_ANNOTATION)) } // test ts ts_return_type_annotation // type A = (a) => a is string; // type B = { test(a): a is string } // type C = { (a): a is string } // const a = { test(x): x is string { return typeof x === "string" } } // class D { test(x): x is string { return typeof x === "string"; } } pub(crate) fn parse_ts_return_type_annotation(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![:]) { return Absent; } let m = p.start(); p.bump(T![:]); parse_ts_return_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); Present(m.complete(p, TS_RETURN_TYPE_ANNOTATION)) } fn parse_ts_call_signature(p: &mut JsParser, context: TypeContext) { parse_ts_type_parameters(p, context).ok(); parse_parameter_list(p, ParameterContext::Declaration, SignatureFlags::empty()) .or_add_diagnostic(p, expected_parameters); parse_ts_return_type_annotation(p).ok(); } fn parse_ts_type_parameter_name(p: &mut JsParser) -> ParsedSyntax { parse_identifier(p, TS_TYPE_PARAMETER_NAME) } // test ts ts_type_parameters // type A<X extends string, Y = number, Z extends string | number = number> = { x: X, y: Y, z: Z } // // test_err ts ts_type_parameters_incomplete // type A<T pub(crate) fn parse_ts_type_parameters(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { if !is_nth_at_ts_type_parameters(p, 0) { return Absent; } let m = p.start(); p.bump(T![<]); if p.at(T![>]) { p.error(expected_ts_type_parameter(p, p.cur_range())); } TsTypeParameterList(context).parse_list(p); p.expect(T![>]); Present(m.complete(p, TS_TYPE_PARAMETERS)) } struct TsTypeParameterList(TypeContext); impl ParseSeparatedList for TsTypeParameterList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = TS_TYPE_PARAMETER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_ts_type_parameter(p, self.0) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T![>]) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( TS_BOGUS_TYPE, token_set![T![>], T![,], T![ident], T![yield], T![await]], ) .enable_recovery_on_line_break(), expected_ts_type_parameter, ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } // test_err ts type_parameter_modifier // export default function foo<in T>() {} // export function foo<out T>() {} // export function foo1<in T>() {} // export function foo2<out T>() {} // let foo: Foo<in T> // let foo: Foo<out T> // declare function foo<in T>() // declare function foo<out T>() // declare let foo: Foo<in T> // declare let foo: Foo<out T> // foo = function <in T>() {} // foo = function <out T>() {} // class Foo { foo<in T>(): T {} } // class Foo { foo<out T>(): T {} } // foo = { foo<in T>(): T {} }; // foo = { foo<out T>(): T {} }; // <in T>() => {}; // <out T>() => {}; // <in T, out T>() => {}; // let x: <in T>() => {}; // let x: <out T>() => {}; // let x: <in T, out T>() => {}; // let x: new <in T>() => {}; // let x: new <out T>() => {}; // let x: new <in T, out T>() => {}; // let x: { y<in T>(): any }; // let x: { y<out T>(): any }; // let x: { y<in T, out T>(): any }; // type Foo<i\\u006E T> = {} // type Foo<ou\\u0074 T> = {} // type Foo<in in> = {} // type Foo<out in> = {} // type Foo<out in T> = {} // type Foo<public T> = {} // type Foo<innn T> = {} // type Foo<in out in T> = {} // type Foo<in out out T> = {} // function foo<in T>() {} // function foo<out T>() {} // type Foo<const U> = {}; // test tsx type_parameter_modifier_tsx // <in T></in>; // <out T></out>; // <const T></const>; // <in out T></in>; // <out in T></out>; // <in T extends={true}></in>; // <out T extends={true}></out>; // <in out T extends={true}></in>; // test ts type_parameter_modifier // type Foo<in T> = {} // type Foo<out> = {} // type Foo<out T> = {} // type Foo<in out> = {} // type Foo<out out> = {} // type Foo<in out out> = {} // type Foo<in X, out Y> = {} // type Foo<out X, in Y> = {} // type Foo<out X, out Y extends keyof X> = {} // class Foo<in T> {} // class Foo<out T> {} // export default class Foo<in T> {} // class Foo<out T> {} // interface Foo<in T> {} // interface Foo<out T> {} // declare class Foo<in T> {} // declare class Foo<out T> {} // declare interface Foo<in T> {} // declare interface Foo<out T> {} // function a<const T>() {} // function b<const T extends U>() {} // function c<T, const U>() {} // declare function d<const T>(); // <T>() => {}; // <const T>() => {}; // (function <const T>() {}); // (function <const T extends U>() {}); // (function <T, const U>() {}); // class A<const T> {} // class B<const T extends U> {} // class C<T, const U> {} // class D<in const T> {} // class E<const in T> {} // class F<in const out T> {} // (class <const T> {}); // (class <const T extends U> {}); // (class <T, const U> {}); // (class <in const T> {}); // (class <const in T> {}); // class _ { // method<const T>() {} // method<const T extends U>() {} // method<T, const U>() {} // } // declare module a { // function test<const T>(): T; // } // const obj = { // a<const T>(b: any): b is T { return true; } // } fn parse_ts_type_parameter(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { let m = p.start(); parse_ts_type_parameter_modifiers(p, context).ok(); let name = parse_ts_type_parameter_name(p); parse_ts_type_constraint_clause(p, context).ok(); parse_ts_default_type_clause(p).ok(); if name.is_absent() { m.abandon(p); Absent } else { Present(m.complete(p, TS_TYPE_PARAMETER)) } } #[derive(Eq, PartialEq, Clone, Copy, Debug)] enum TypeParameterModifierKind { In, Out, Const, } /// Stores the range of a parsed modifier with its kind #[derive(Eq, PartialEq, Clone, Copy, Debug)] struct TypeParameterModifier { kind: TypeParameterModifierKind, range: TextRange, } impl TypeParameterModifier { const fn as_syntax_kind(&self) -> JsSyntaxKind { match self.kind { TypeParameterModifierKind::In => TS_IN_MODIFIER, TypeParameterModifierKind::Out => TS_OUT_MODIFIER, TypeParameterModifierKind::Const => TS_CONST_MODIFIER, } } } #[derive(Debug, Default)] struct ClassMemberModifierList(SmallVec<[TypeParameterModifier; 3]>); impl ClassMemberModifierList { fn add_modifier(&mut self, modifier: TypeParameterModifier) { self.0.push(modifier); } fn find(&self, modifier_kind: &TypeParameterModifierKind) -> Option<&TypeParameterModifier> { self.0 .iter() .find(|predicate| predicate.kind == *modifier_kind) } } pub(crate) fn is_nth_at_type_parameter_modifier(p: &mut JsParser, n: usize) -> bool { match p.nth(n) { T![in] | T![out] | T![const] => !p.nth_at(n + 1, T![,]) && !p.nth_at(n + 1, T![>]), _ => false, } } fn parse_ts_type_parameter_modifiers(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { let list = p.start(); let mut modifiers = ClassMemberModifierList::default(); while is_nth_at_type_parameter_modifier(p, 0) { let modifier_kind = match p.cur() { T![in] => TypeParameterModifierKind::In, T![out] => TypeParameterModifierKind::Out, T![const] => TypeParameterModifierKind::Const, _ => unreachable!("keywords that are not 'in', 'out' and 'const' are checked earlier"), }; let m = p.start(); let text_range = p.cur_range(); p.bump_any(); if matches!( modifier_kind, TypeParameterModifierKind::In | TypeParameterModifierKind::Out, ) && !context.is_in_out_modifier_allowed() { p.error(ts_in_out_modifier_cannot_appear_on_a_type_parameter( p, text_range, )); m.abandon(p); continue; } if matches!(modifier_kind, TypeParameterModifierKind::Const) && !context.is_const_modifier_allowed() { p.error(ts_const_modifier_cannot_appear_on_a_type_parameter( p, text_range, )); m.abandon(p); continue; } // check for duplicate modifiers if let Some(existing_modifier) = modifiers.find(&modifier_kind) { p.error(modifier_already_seen( p, text_range, existing_modifier.range, )); m.abandon(p); continue; } // check for modifier precedence if let Some(ts_out_modifier) = modifiers.find(&TypeParameterModifierKind::Out) { if modifier_kind == TypeParameterModifierKind::In { p.error(modifier_must_precede_modifier( p, text_range, ts_out_modifier.range, )); m.abandon(p); continue; } } let modifier = TypeParameterModifier { kind: modifier_kind, range: text_range, }; modifiers.add_modifier(modifier); m.complete(p, modifier.as_syntax_kind()); } Present(list.complete(p, TS_TYPE_PARAMETER_MODIFIER_LIST)) } // test ts ts_type_constraint_clause // type A<X extends number> = X; // type B<X extends number | string> = { a: X } fn parse_ts_type_constraint_clause(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { if !p.at(T![extends]) { return Absent; } let m = p.start(); p.expect(T![extends]); parse_ts_type(p, context).or_add_diagnostic(p, expected_ts_type); Present(m.complete(p, TS_TYPE_CONSTRAINT_CLAUSE)) } // test ts ts_default_type_clause // type A<X=string> = X; // type B<X extends number | string = string> = { a: X } fn parse_ts_default_type_clause(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![=]) { return Absent; } let m = p.start(); p.bump(T![=]); parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); Present(m.complete(p, TS_DEFAULT_TYPE_CLAUSE)) } fn is_nth_at_ts_type_parameters(p: &mut JsParser, n: usize) -> bool { p.nth_at(n, T![<]) } #[inline(always)] pub(crate) fn parse_ts_type(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { p.with_state(EnterType, |p| { if is_at_constructor_type(p) { return parse_ts_constructor_type(p, context); } if is_at_function_type(p) { return parse_ts_function_type(p, context); } let left = parse_ts_union_type_or_higher(p, context); // test ts ts_conditional_type_call_signature_lhs // type X<V> = V extends (...args: any[]) => any ? (...args: Parameters<V>) => void : Function; if context.is_conditional_type_allowed() { left.map(|left| { // test ts ts_conditional_type // type A = number; // type B = string extends number ? string : number; // type C = A extends (B extends A ? number : string) ? void : number; // type D<T> = T extends [infer S extends string, ...unknown[]] ? S : never; // type E<U, T> = T extends (infer U extends number ? U : T ) ? U : T // type F<T> = T extends { [P in infer U extends keyof T ? 1 : 0]: 1; } ? 1 : 0; // type G<T> = T extends [unknown, infer S extends string] ? S : never; // type H = A extends () => B extends C ? D : E ? F : G; // type J<T> = T extends ((...a: any[]) => infer R extends string) ? R : never; if !p.has_preceding_line_break() && p.at(T![extends]) { let m = left.precede(p); p.expect(T![extends]); parse_ts_type(p, context.and_allow_conditional_types(false)) .or_add_diagnostic(p, expected_ts_type); p.expect(T![?]); parse_ts_type(p, context).or_add_diagnostic(p, expected_ts_type); p.expect(T![:]); parse_ts_type(p, context).or_add_diagnostic(p, expected_ts_type); m.complete(p, TS_CONDITIONAL_TYPE) } else { left } }) } else { left } }) } // test ts ts_union_type // type A = string | number; // type B = | A | void | null; // type C = A & C | C; fn parse_ts_union_type_or_higher(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { parse_ts_union_or_intersection_type(p, IntersectionOrUnionType::Union, context) } // test ts ts_intersection_type // type A = string & number; // type B = & A & void & null; fn parse_ts_intersection_type_or_higher(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { parse_ts_union_or_intersection_type(p, IntersectionOrUnionType::Intersection, context) } #[derive(Debug, Copy, Clone, Eq, PartialEq)] enum IntersectionOrUnionType { Intersection, Union, } impl IntersectionOrUnionType { #[inline] fn operator(&self) -> JsSyntaxKind { match self { IntersectionOrUnionType::Union => T![|], IntersectionOrUnionType::Intersection => T![&], } } #[inline] fn list_kind(&self) -> JsSyntaxKind { match self { IntersectionOrUnionType::Union => TS_UNION_TYPE_VARIANT_LIST, IntersectionOrUnionType::Intersection => TS_INTERSECTION_TYPE_ELEMENT_LIST, } } #[inline] fn kind(&self) -> JsSyntaxKind { match self { IntersectionOrUnionType::Union => TS_UNION_TYPE, IntersectionOrUnionType::Intersection => TS_INTERSECTION_TYPE, } } #[inline] fn parse_element(&self, p: &mut JsParser, context: TypeContext) -> ParsedSyntax { match self { IntersectionOrUnionType::Union => parse_ts_intersection_type_or_higher(p, context), IntersectionOrUnionType::Intersection => parse_ts_primary_type(p, context), } } } #[inline] fn parse_ts_union_or_intersection_type( p: &mut JsParser, ty_kind: IntersectionOrUnionType, context: TypeContext, ) -> ParsedSyntax { // Leading operator: `& A & B` if p.at(ty_kind.operator()) { let m = p.start(); p.bump(ty_kind.operator()); let list = p.start(); ty_kind .parse_element(p, context) .or_add_diagnostic(p, expected_ts_type); eat_ts_union_or_intersection_type_elements(p, ty_kind, context); list.complete(p, ty_kind.list_kind()); Present(m.complete(p, ty_kind.kind())) } else { let first = ty_kind.parse_element(p, context); if p.at(ty_kind.operator()) { let list = first.precede(p); eat_ts_union_or_intersection_type_elements(p, ty_kind, context); let completed_list = list.complete(p, ty_kind.list_kind()); let m = completed_list.precede(p); Present(m.complete(p, ty_kind.kind())) } else { // Not a union or intersection type first } } } #[inline] fn eat_ts_union_or_intersection_type_elements( p: &mut JsParser, ty_kind: IntersectionOrUnionType, context: TypeContext, ) { while p.at(ty_kind.operator()) { p.bump(ty_kind.operator()); ty_kind .parse_element(p, context) .or_add_diagnostic(p, expected_ts_type); } } fn parse_ts_primary_type(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { // test ts ts_inferred_type // type A = infer B; // type B = { a: infer U; b: infer U}; if p.at(T![infer]) { let m = p.start(); p.expect(T![infer]); parse_ts_type_parameter_name(p).or_add_diagnostic(p, expected_identifier); try_parse_constraint_of_infer_type(p, context).ok(); return Present(m.complete(p, TS_INFER_TYPE)); } // test ts ts_type_operator // type A = { x: string, y: number }; // type B = keyof A; // type C = readonly string[]; // const d: unique symbol = Symbol(); let is_type_operator = matches!(p.cur(), T![unique] | T![keyof] | T![readonly]); if is_type_operator { let m = p.start(); p.bump_any(); parse_ts_primary_type(p, context).or_add_diagnostic(p, expected_ts_type); return Present(m.complete(p, TS_TYPE_OPERATOR_TYPE)); } parse_postfix_type_or_higher(p, context.and_allow_conditional_types(true)) } fn try_parse_constraint_of_infer_type(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { if !p.at(T![extends]) { return Absent; } try_parse(p, |p| { let parsed = parse_ts_type_constraint_clause(p, context.and_allow_conditional_types(false)) .expect("Type constraint clause because parser is positioned at expect clause"); // Rewind if conditional types are allowed, and the parser is at the `?` token because // this should instead be parsed as a conditional type. if context.is_conditional_type_allowed() && p.at(T![?]) { Err(()) } else { Ok(Present(parsed)) } }) .unwrap_or(Absent) } fn parse_postfix_type_or_higher(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { parse_ts_non_array_type(p, context).map(|primary_type| { let mut left = primary_type; while p.at(T!['[']) && !p.has_preceding_line_break() { let m = left.precede(p); p.bump(T!['[']); left = if parse_ts_type(p, context).is_present() { // test ts ts_indexed_access_type // type A = string[number]; // type B = string[number][number][number][]; p.expect(T![']']); m.complete(p, TS_INDEXED_ACCESS_TYPE) } else { // test ts ts_array_type // type A = string[]; // type B = { a: number } []; p.expect(T![']']); m.complete(p, TS_ARRAY_TYPE) } } left }) } fn parse_ts_non_array_type(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { // test ts ts_predefined_type // type A = any // type B = number; // type C = object; // type D = boolean; // type E = bigint; // type F = string; // type G = symbol; // type H = void; // type I = undefined; // type J = null; // type K = never match p.cur() { T!['('] => parse_ts_parenthesized_type(p), T!['{'] => { if is_at_start_of_mapped_type(p) { parse_ts_mapped_type(p, context) } else { parse_ts_object_type(p) } } T!['['] => parse_ts_tuple_type(p, context), T![void] => { let m = p.start(); p.expect(T![void]); Present(m.complete(p, TS_VOID_TYPE)) } JS_NUMBER_LITERAL | JS_STRING_LITERAL | TRUE_KW | FALSE_KW | T![null] => { parse_ts_literal_type(p) } BACKTICK => parse_ts_template_literal_type(p, context), T![-] if p.nth_at(1, JS_NUMBER_LITERAL) => parse_ts_literal_type(p), T![this] => parse_ts_this_type(p), T![typeof] => { if p.nth_at(1, T![import]) { parse_ts_import_type(p) } else { parse_ts_typeof_type(p) } } T![import] => parse_ts_import_type(p), _ => { if !p.nth_at(1, T![.]) { let mapping = match p.cur() { T![any] => Some(TS_ANY_TYPE), T![unknown] => Some(TS_UNKNOWN_TYPE), T![number] => Some(TS_NUMBER_TYPE), T![object] => Some(TS_NON_PRIMITIVE_TYPE), T![boolean] => Some(TS_BOOLEAN_TYPE), T![bigint] => Some(TS_BIGINT_TYPE), T![string] => Some(TS_STRING_TYPE), T![symbol] => Some(TS_SYMBOL_TYPE), T![undefined] => Some(TS_UNDEFINED_TYPE), T![never] => Some(TS_NEVER_TYPE), _ => None, }; if let Some(literal_type_kind) = mapping { let m = p.start(); p.bump_any(); return Present(m.complete(p, literal_type_kind)); } } parse_ts_reference_type(p) } } } // test ts ts_reference_type // type A = object; // type B = string; // type C = A; // type D = B.a; // type E = D.c.b.a; fn parse_ts_reference_type(p: &mut JsParser) -> ParsedSyntax { parse_ts_name(p).map(|name| { let m = name.precede(p); if !p.has_preceding_line_break() { parse_ts_type_arguments(p).ok(); } m.complete(p, TS_REFERENCE_TYPE) }) } pub(crate) fn parse_ts_name(p: &mut JsParser) -> ParsedSyntax { let mut left = if p.cur().is_non_contextual_keyword() && !p.cur().is_future_reserved_keyword() { let m = p.start(); p.bump_remap(T![ident]); Present(m.complete(p, JS_REFERENCE_IDENTIFIER)) } else { parse_reference_identifier(p) }; while p.at(T![.]) { let m = left.precede_or_add_diagnostic(p, expected_identifier); p.bump(T![.]); parse_name(p).or_add_diagnostic(p, expected_identifier); left = Present(m.complete(p, TS_QUALIFIED_NAME)); } left } // test ts ts_typeof_type // let a = "test"; // type B = typeof a; // type T21 = typeof Array<string>; // type A<U> = InstanceType<typeof Array<U>>; // test tsx ts_typeof_type2 // type X = typeof Array // <div>a</div>; fn parse_ts_typeof_type(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![typeof]) { return Absent; } let m = p.start(); p.expect(T![typeof]); parse_ts_name(p).or_add_diagnostic(p, expected_identifier); if !p.has_preceding_line_break() { parse_ts_type_arguments(p).ok(); } Present(m.complete(p, TS_TYPEOF_TYPE)) } // test ts ts_this_type // class A { // method() { // type A = this; // } // predicate(): this is string { // return typeof this === "string" // } // } fn parse_ts_this_type(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![this]) { return Absent; } let m = p.start(); p.expect(T![this]); Present(m.complete(p, TS_THIS_TYPE)) } // test ts ts_parenthesized_type // type A = (string) fn parse_ts_parenthesized_type(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['(']) { return Absent; } let m = p.start(); p.bump(T!['(']); parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); p.expect(T![')']); Present(m.complete(p, TS_PARENTHESIZED_TYPE)) } fn is_at_start_of_mapped_type(p: &mut JsParser) -> bool { if !p.at(T!['{']) { return false; } if p.nth_at(1, T![+]) || p.nth_at(1, T![-]) { return p.nth_at(2, T![readonly]); } let mut offset = 1; if p.nth_at(offset, T![readonly]) { offset += 1; } p.nth_at(offset, T!['[']) && (is_nth_at_identifier(p, offset + 1) || p.nth(offset + 1).is_keyword()) && p.nth_at(offset + 2, T![in]) } // test ts issue_2790 // var x: { // readonly [A in keyof B]?: any; // }; // test ts ts_mapped_type // type A = { [test in "a" | "b"] } // type OptionsFlags<Type> = { // [Property in keyof Type]: boolean; // }; // type CreateMutable<Type> = { // -readonly [Property in keyof Type]: Type[Property]; // }; // type Concrete<Type> = { // [Property in keyof Type]-?: Type[Property] // }; // type Getters<Type> = { // [Property in keyof Type as `get${Capitalize<string & Property>}`]: () => Type[Property] // }; fn parse_ts_mapped_type(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); parse_ts_mapped_type_readonly_modifier_clause(p).ok(); p.expect(T!['[']); parse_ts_type_parameter_name(p).or_add_diagnostic(p, expected_ts_type_parameter); p.expect(T![in]); parse_ts_type(p, context).or_add_diagnostic(p, expected_ts_type); parse_ts_mapped_type_as_clause(p, context).ok(); p.expect(T![']']); parse_ts_mapped_type_optional_modifier_clause(p).ok(); parse_ts_type_annotation(p).ok(); p.eat(T![;]); p.expect(T!['}']); Present(m.complete(p, TS_MAPPED_TYPE)) } fn parse_ts_mapped_type_as_clause(p: &mut JsParser, context: TypeContext) -> ParsedSyntax { if !p.at(T![as]) { return Absent; } let m = p.start(); p.bump_remap(T![as]); parse_ts_type(p, context).or_add_diagnostic(p, expected_ts_type); Present(m.complete(p, TS_MAPPED_TYPE_AS_CLAUSE)) } fn parse_ts_mapped_type_readonly_modifier_clause(p: &mut JsParser) -> ParsedSyntax { if p.at(T![readonly]) { let m = p.start(); p.expect(T![readonly]); Present(m.complete(p, TS_MAPPED_TYPE_READONLY_MODIFIER_CLAUSE)) } else if p.at(T![+]) || p.at(T![-]) { let m = p.start(); p.bump_any(); p.expect(T![readonly]); Present(m.complete(p, TS_MAPPED_TYPE_READONLY_MODIFIER_CLAUSE)) } else { Absent } } fn parse_ts_mapped_type_optional_modifier_clause(p: &mut JsParser) -> ParsedSyntax { match p.cur() { T![?] => { let m = p.start(); p.bump(T![?]); Present(m.complete(p, TS_MAPPED_TYPE_OPTIONAL_MODIFIER_CLAUSE)) } T![-] | T![+] => { let m = p.start(); p.bump_any(); p.expect(T![?]); Present(m.complete(p, TS_MAPPED_TYPE_OPTIONAL_MODIFIER_CLAUSE)) } _ => Absent, } } // test ts ts_import_type // type A = typeof import("test"); // type B = import("test"); // type C = typeof import("test").a.b.c.d.e.f; // type D = import("test")<string>; // type E = import("test").C<string>; fn parse_ts_import_type(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![typeof]) && !p.at(T![import]) { return Absent; } let m = p.start(); p.eat(T![typeof]); p.expect(T![import]); p.expect(T!['(']); p.expect(JS_STRING_LITERAL); p.expect(T![')']); if p.at(T![.]) { let qualifier = p.start(); p.bump(T![.]); parse_ts_name(p).or_add_diagnostic(p, expected_identifier); qualifier.complete(p, TS_IMPORT_TYPE_QUALIFIER); } parse_ts_type_arguments(p).ok(); Present(m.complete(p, TS_IMPORT_TYPE)) } // test ts ts_object_type // type A = { a: string, b: number }; // type B = { a: string; b: number }; // type C = { a: string, b: number; c: string }; // type D = { // a: string // b: number // } fn parse_ts_object_type(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); TypeMembers.parse_list(p); p.expect(T!['}']); Present(m.complete(p, TS_OBJECT_TYPE)) } pub(crate) struct TypeMembers; impl ParseNodeList for TypeMembers { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = TS_TYPE_MEMBER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_ts_type_member(p, TypeContext::default()) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, member: ParsedSyntax) -> RecoveryResult { member.or_recover( p, &ParseRecovery::new(JS_BOGUS, token_set![T!['}'], T![,], T![;]])
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/typescript/statement.rs
crates/rome_js_parser/src/syntax/typescript/statement.rs
use crate::parser::RecoveryResult; use crate::prelude::*; use crate::syntax::binding::{ is_nth_at_identifier_binding, parse_binding, parse_identifier_binding, }; use crate::syntax::class::parse_initializer_clause; use crate::syntax::expr::{is_nth_at_identifier, parse_name, ExpressionContext}; use super::ts_parse_error::expected_ts_enum_member; use crate::state::EnterAmbientContext; use crate::syntax::auxiliary::{is_nth_at_declaration_clause, parse_declaration_clause}; use crate::syntax::js_parse_error::{expected_identifier, expected_module_source}; use crate::syntax::module::{parse_module_item_list, parse_module_source, ModuleItemListParent}; use crate::syntax::stmt::{semi, STMT_RECOVERY_SET}; use crate::syntax::typescript::ts_parse_error::expected_ts_type; use crate::syntax::typescript::{ expect_ts_type_list, parse_ts_identifier_binding, parse_ts_implements_clause, parse_ts_name, parse_ts_type, parse_ts_type_parameters, TypeContext, TypeMembers, }; use crate::{syntax, Absent, JsParser, ParseRecovery, ParsedSyntax, Present}; use rome_js_syntax::{JsSyntaxKind::*, *}; use rome_parser::diagnostic::expected_token; use rome_parser::parse_lists::{ParseNodeList, ParseSeparatedList}; fn parse_literal_as_ts_enum_member(p: &mut JsParser) -> ParsedSyntax { let m = p.start(); match p.cur() { JS_STRING_LITERAL | T![ident] => { p.bump_any(); } t if t.is_keyword() => { p.bump_remap(T![ident]); } JS_NUMBER_LITERAL => { let err = p.err_builder("An enum member cannot have a numeric name", p.cur_range()); p.error(err); p.bump_any() } _ => { m.abandon(p); return Absent; } } Present(m.complete(p, JS_LITERAL_MEMBER_NAME)) } /// An individual enum member fn parse_ts_enum_member(p: &mut JsParser) -> ParsedSyntax { let member = p.start(); let name = match p.cur() { T!['['] => syntax::object::parse_computed_member_name(p), T![#] => { let err = p.err_builder("An `enum` member cannot be private", p.cur_range()); p.error(err); syntax::class::parse_private_class_member_name(p).map(|mut x| { x.change_to_bogus(p); x }) } _ => parse_literal_as_ts_enum_member(p), }; if name.is_absent() { member.abandon(p); return Absent; } let _ = parse_initializer_clause(p, ExpressionContext::default()); Present(member.complete(p, TS_ENUM_MEMBER)) } struct TsEnumMembersList; impl ParseSeparatedList for TsEnumMembersList { type Kind = JsSyntaxKind; type Parser<'source> = JsParser<'source>; const LIST_KIND: Self::Kind = TS_ENUM_MEMBER_LIST; fn parse_element(&mut self, p: &mut JsParser) -> ParsedSyntax { parse_ts_enum_member(p) } fn is_at_list_end(&self, p: &mut JsParser) -> bool { p.at(T!['}']) } fn recover(&mut self, p: &mut JsParser, parsed_element: ParsedSyntax) -> RecoveryResult { parsed_element.or_recover( p, &ParseRecovery::new( JS_BOGUS_MEMBER, STMT_RECOVERY_SET.union(token_set![JsSyntaxKind::IDENT, T![,], T!['}']]), ) .enable_recovery_on_line_break(), expected_ts_enum_member, ) } fn separating_element_kind(&mut self) -> JsSyntaxKind { T![,] } fn allow_trailing_separating_element(&self) -> bool { true } } #[inline(always)] fn is_reserved_enum_name(name: &str) -> bool { super::is_reserved_type_name(name) } fn parse_ts_enum_id(p: &mut JsParser, enum_token_range: TextRange) { match parse_binding(p) { Present(id) => { let text = p.text(id.range(p)); if is_reserved_enum_name(text) { let err = p.err_builder( format!( "`{}` cannot be used as a enum name because it is already reserved", text ), id.range(p), ); p.error(err); } } // test_err ts enum_decl_no_id // enum {A,B,C} // enum 1 {A,B,C} Absent => { if p.nth_at(1, L_CURLY) { let range = p.cur_range(); let m = p.start(); p.bump_any(); let _ = m.complete(p, JS_BOGUS_BINDING); let err = p.err_builder("invalid `enum` name", range); p.error(err); } else { let err = p.err_builder( "`enum` statements must have a name", TextRange::new(enum_token_range.start(), p.cur_range().start()), ); p.error(err); } } } } pub(crate) fn is_at_ts_enum_declaration(p: &mut JsParser) -> bool { is_nth_at_ts_enum_declaration(p, 0) } pub(crate) fn is_nth_at_ts_enum_declaration(p: &mut JsParser, n: usize) -> bool { match p.nth(n) { T![enum] => true, T![const] => p.nth_at(n + 1, T![enum]), _ => false, } } // test ts typescript_enum // enum A {} // enum B { a, b, c } // const enum C { A = 1, B = A * 2, ["A"] = 3, } // // test_err ts typescript_enum_incomplete // enum A { pub(crate) fn parse_ts_enum_declaration(p: &mut JsParser) -> ParsedSyntax { if !is_at_ts_enum_declaration(p) { return Absent; } let m = p.start(); p.eat(T![const]); let enum_token_range = p.cur_range(); p.expect(T![enum]); parse_ts_enum_id(p, enum_token_range); // test_err ts enum_no_l_curly // enum; // enum A; p.expect(T!['{']); TsEnumMembersList.parse_list(p); // test_err ts enum_no_r_curly // enum {; // enum A {; p.expect(T!['}']); Present(m.complete(p, TS_ENUM_DECLARATION)) } pub(crate) fn parse_ts_type_alias_declaration(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![type]) { return Absent; } let start = p.cur_range().start(); let m = p.start(); p.expect(T![type]); parse_ts_identifier_binding(p, super::TsIdentifierContext::Type) .or_add_diagnostic(p, expected_identifier); parse_ts_type_parameters(p, TypeContext::default().and_allow_in_out_modifier(true)).ok(); p.expect(T![=]); parse_ts_type(p, TypeContext::default()).or_add_diagnostic(p, expected_ts_type); semi(p, TextRange::new(start, p.cur_range().end())); Present(m.complete(p, TS_TYPE_ALIAS_DECLARATION)) } // test ts ts_declare_const_initializer // declare module test { const X; } pub(crate) fn parse_ts_declare_statement(p: &mut JsParser) -> ParsedSyntax { if !is_at_ts_declare_statement(p) { return Absent; } let stmt_start_pos = p.cur_range().start(); let m = p.start(); p.expect(T![declare]); p.with_state(EnterAmbientContext, |p| { // test_err ts ts_declare_const_initializer // declare @decorator class D {} // declare @decorator abstract class D {} parse_declaration_clause(p, stmt_start_pos) .expect("Expected a declaration as guaranteed by is_at_ts_declare_statement") }); Present(m.complete(p, TS_DECLARE_STATEMENT)) } #[inline] pub(crate) fn is_at_ts_declare_statement(p: &mut JsParser) -> bool { if !p.at(T![declare]) || p.has_nth_preceding_line_break(1) { return false; } is_nth_at_declaration_clause(p, 1) } #[inline] pub(crate) fn is_at_ts_interface_declaration(p: &mut JsParser) -> bool { if !p.at(T![interface]) || p.has_nth_preceding_line_break(1) { return false; } is_nth_at_identifier_binding(p, 1) || p.nth_at(1, T!['{']) } // test ts ts_interface // interface A {} // interface B { prop: string, method(): string, [index: number]: string, new(): B } // test ts ts_index_signature_interface_member // interface A { // [a: number]: string; // } // interface B { // [index: string]: { prop } // } // interface C { // readonly [a: number]: string; // } // test_err ts ts_index_signature_interface_member_cannot_be_static // interface A { // static [index: string]: string // } // interface B { // public [index: string]: string // } // interface C { // private [index: string]: string // } // test_err ts ts_index_signature_interface_member_cannot_have_visibility_modifiers // interface A { // public [a: number]: string; // } // interface B { // private [a: number]: string; // } // interface C { // protected [a: number]: string; // } pub(crate) fn parse_ts_interface_declaration(p: &mut JsParser) -> ParsedSyntax { if !is_at_ts_interface_declaration(p) { return Absent; } let m = p.start(); p.expect(T![interface]); parse_ts_identifier_binding(p, super::TsIdentifierContext::Type) .or_add_diagnostic(p, expected_identifier); parse_ts_type_parameters(p, TypeContext::default().and_allow_in_out_modifier(true)).ok(); eat_interface_heritage_clause(p); p.expect(T!['{']); TypeMembers.parse_list(p); p.expect(T!['}']); Present(m.complete(p, TS_INTERFACE_DECLARATION)) } // test_err ts ts_interface_heritage_clause_error // interface A {} // interface B implements A {} // interface C extends A extends B {} // interface D extends {} // interface E extends A, {} /// Eats an interface's `extends` or an `extends` (not allowed but for better recovery) clauses /// Attaches the clauses to the currently active node fn eat_interface_heritage_clause(p: &mut JsParser) { let mut first_extends: Option<CompletedMarker> = None; loop { if p.at(T![extends]) { let extends = parse_ts_extends_clause(p).expect( "expected an extends clause because parser is positioned at the extends keyword", ); if let Some(first_extends) = first_extends.as_ref() { p.error( p.err_builder("'extends' clause already seen.", extends.range(p)) .detail(first_extends.range(p), "first 'extends' clause"), ) } else { first_extends = Some(extends); } } else if p.at(T![implements]) { let implements = parse_ts_implements_clause(p).expect("positioned at the implements keyword"); p.error(p.err_builder( "Interface declaration cannot have 'implements' clause.", implements.range(p), )); } else { break; } } } // test ts ts_interface_extends_clause // interface A<Prop> { prop: Prop } // interface B extends A<string> {} // interface C extends A<number>, B {} fn parse_ts_extends_clause(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![extends]) { return Absent; } let m = p.start(); p.expect(T![extends]); expect_ts_type_list(p, "extends"); Present(m.complete(p, TS_EXTENDS_CLAUSE)) } #[inline] pub(crate) fn is_at_any_ts_namespace_declaration(p: &mut JsParser) -> bool { if p.has_nth_preceding_line_break(1) { return false; } if matches!(p.cur(), T![namespace] | T![module]) { return is_nth_at_identifier(p, 1) || p.nth_at(1, JS_STRING_LITERAL); } if p.at(T![global]) { return p.nth_at(1, T!['{']); } false } #[inline] pub(crate) fn is_nth_at_any_ts_namespace_declaration(p: &mut JsParser, n: usize) -> bool { if p.has_nth_preceding_line_break(n + 1) { return false; } if matches!(p.nth(n), T![namespace] | T![module]) { return is_nth_at_identifier(p, n + 1) || p.nth_at(n + 1, JS_STRING_LITERAL); } if p.nth_at(n, T![global]) { return p.nth_at(n + 1, T!['{']); } false } pub(crate) fn parse_any_ts_namespace_declaration_clause( p: &mut JsParser, stmt_start_pos: TextSize, ) -> ParsedSyntax { match p.cur() { T![global] => parse_ts_global_declaration(p), T![namespace] | T![module] => { parse_ts_namespace_or_module_declaration_clause(p, stmt_start_pos) } _ => Absent, } } pub(crate) fn parse_any_ts_namespace_declaration_statement(p: &mut JsParser) -> ParsedSyntax { parse_any_ts_namespace_declaration_clause(p, p.cur_range().start()) } // test ts ts_namespace_declaration // declare namespace a {} // declare namespace a.b.c.d {} // declare namespace a.b { function test(): string } // namespace X { } // // test ts ts_module_declaration // declare module a {} // declare module a.b.c.d {} // declare module a.b { function test(): string } // module X {} // // test ts ts_external_module_declaration // declare module "a"; // declare module "b" // declare module "import" {} // // test_err ts ts_module_err // declare module a; // missing body // declare module "a" declare module "b"; // missing semi fn parse_ts_namespace_or_module_declaration_clause( p: &mut JsParser, stmt_start_pos: TextSize, ) -> ParsedSyntax { if !matches!(p.cur(), T![namespace] | T![module]) { return Absent; } let m = p.start(); if !p.eat(T![namespace]) { p.expect(T![module]); if p.at(JS_STRING_LITERAL) { parse_module_source(p).expect("expected module source to be present because parser is positioned at a string literal"); let body = parse_ts_module_block(p); if body.is_absent() { if p.at(T![;]) { let body = p.start(); p.bump(T![;]); body.complete(p, TS_EMPTY_EXTERNAL_MODULE_DECLARATION_BODY); } else { semi(p, TextRange::new(stmt_start_pos, p.cur_range().end())); } } return Present(m.complete(p, TS_EXTERNAL_MODULE_DECLARATION)); } } parse_ts_module_name(p).or_add_diagnostic(p, expected_identifier); parse_ts_module_block(p).or_add_diagnostic(p, |_, _| expected_token(T!['{'])); Present(m.complete(p, TS_MODULE_DECLARATION)) } // test ts built_in_module_name // // https://github.com/rome/tools/issues/2959 // module number {} // module string {} // declare module never {} fn parse_ts_module_name(p: &mut JsParser) -> ParsedSyntax { let mut left = parse_ts_identifier_binding(p, super::TsIdentifierContext::Module); while p.at(T![.]) { let m = left.precede_or_add_diagnostic(p, expected_identifier); p.bump(T![.]); parse_name(p).or_add_diagnostic(p, expected_identifier); left = Present(m.complete(p, TS_QUALIFIED_MODULE_NAME)); } left } fn parse_ts_module_block(p: &mut JsParser) -> ParsedSyntax { if !p.at(T!['{']) { return Absent; } let m = p.start(); p.bump(T!['{']); let items_list = p.start(); parse_module_item_list(p, ModuleItemListParent::Block, items_list); p.expect(T!['}']); Present(m.complete(p, TS_MODULE_BLOCK)) } // test ts ts_global_declaration // declare module "./test" { // global { // let VERSION: string; // } // } // // test ts ts_global_variable // let global; // global // not a global declaration // console.log("a"); fn parse_ts_global_declaration(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![global]) { return Absent; } let m = p.start(); p.expect(T![global]); parse_ts_module_block(p).or_add_diagnostic(p, |_, _| expected_token(T!['{'])); Present(m.complete(p, TS_GLOBAL_DECLARATION)) } // test ts ts_import_equals_declaration // import x = require("./test"); // namespace a.b {} // import y = a; // import z = a.b; // import type A = require("./a"); // export import n = a; /// Parses everything after the `import` of an import equals declaration pub(crate) fn parse_ts_import_equals_declaration_rest( p: &mut JsParser, m: Marker, stmt_start_pos: TextSize, ) -> CompletedMarker { if is_nth_at_identifier_binding(p, 1) { p.eat(T![type]); } parse_identifier_binding(p).or_add_diagnostic(p, expected_identifier); p.expect(T![=]); if p.at(T![require]) { parse_ts_external_module_reference(p) .expect("Expect module reference to return Present because parser is at require token"); } else { parse_ts_name(p).or_add_diagnostic(p, expected_identifier); } semi(p, TextRange::new(stmt_start_pos, p.cur_range().end())); m.complete(p, TS_IMPORT_EQUALS_DECLARATION) } fn parse_ts_external_module_reference(p: &mut JsParser) -> ParsedSyntax { if !p.at(T![require]) { return Absent; } let m = p.start(); p.expect(T![require]); p.expect(T!['(']); parse_module_source(p).or_add_diagnostic(p, expected_module_source); p.expect(T![')']); Present(m.complete(p, TS_EXTERNAL_MODULE_REFERENCE)) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/syntax/typescript/ts_parse_error.rs
crates/rome_js_parser/src/syntax/typescript/ts_parse_error.rs
use crate::prelude::*; use crate::JsParser; use rome_diagnostics::location::AsSpan; use rome_parser::diagnostic::{expected_any, expected_node}; use rome_rowan::TextRange; pub(crate) fn expected_ts_enum_member(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_any(&["identifier", "string literal", "computed name"], range).into_diagnostic(p) } pub(crate) fn unexpected_abstract_member_with_body( p: &JsParser, range: TextRange, ) -> ParseDiagnostic { p.err_builder("abstract members should not have a body", range) } pub(crate) fn abstract_member_cannot_be_async(p: &JsParser, range: &TextRange) -> ParseDiagnostic { p.err_builder("async members cannot be abstract", range) } pub(crate) fn ts_member_cannot_be( p: &JsParser, range: impl AsSpan, member_type_name: &str, modifier_name: &str, ) -> ParseDiagnostic { let msg = format!("{} members cannot be {}", member_type_name, modifier_name); p.err_builder(msg, range) } pub(crate) fn ts_modifier_cannot_appear_on_a_constructor_declaration( p: &JsParser, modifier_range: TextRange, ) -> ParseDiagnostic { let modifier = p.text(modifier_range); p.err_builder( format!("'{modifier}' cannot appear on a constructor declaration."), modifier_range, ) } pub(crate) fn ts_modifier_cannot_appear_on_a_parameter( p: &JsParser, modifier_range: TextRange, ) -> ParseDiagnostic { let modifier = p.text(modifier_range); p.err_builder( format!("'{modifier}' cannot appear on a parameter."), modifier_range, ) } pub(crate) fn ts_in_out_modifier_cannot_appear_on_a_type_parameter( p: &JsParser, modifier_range: TextRange, ) -> ParseDiagnostic { let modifier = p.text(modifier_range); p.err_builder( format!("'{modifier}' modifier can only appear on a type parameter of a class, interface or type alias.",), modifier_range, ) } pub(crate) fn ts_const_modifier_cannot_appear_on_a_type_parameter( p: &JsParser, modifier_range: TextRange, ) -> ParseDiagnostic { p.err_builder( "'const' modifier can only appear on a type parameter of a function, method or class.", modifier_range, ) } pub(crate) fn ts_accessibility_modifier_already_seen( p: &JsParser, second_range: TextRange, first_range: TextRange, ) -> ParseDiagnostic { p.err_builder("Accessibility modifier already seen.", second_range) .detail(second_range, "duplicate modifier") .detail(first_range, "first modifier") } pub(crate) fn ts_only_syntax_error( p: &JsParser, syntax: &str, range: TextRange, ) -> ParseDiagnostic { p.err_builder(format!("{} are a TypeScript only feature. Convert your file to a TypeScript file or remove the syntax.", syntax) ,range).hint( "TypeScript only syntax") } pub(crate) fn ts_accessor_type_parameters_error( p: &JsParser, type_parameters: &CompletedMarker, ) -> ParseDiagnostic { p.err_builder( "An accessor cannot have type parameters.", type_parameters.range(p), ) } pub(crate) fn ts_constructor_type_parameters_error( p: &JsParser, type_parameters: &CompletedMarker, ) -> ParseDiagnostic { p.err_builder( "constructors cannot have type parameters.", type_parameters.range(p), ) } pub(crate) fn ts_set_accessor_return_type_error( p: &JsParser, type_annotation: &CompletedMarker, ) -> ParseDiagnostic { p.err_builder( "A 'set' accessor cannot have a return type annotation.", type_annotation.range(p), ) } pub(crate) fn expected_ts_type(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("type", range).into_diagnostic(p) } pub(crate) fn expected_ts_type_parameter(p: &JsParser, range: TextRange) -> ParseDiagnostic { expected_node("type parameter", range).into_diagnostic(p) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/parser/rewrite_parser.rs
crates/rome_js_parser/src/parser/rewrite_parser.rs
use crate::parser::JsParser; use crate::token_source::TokenSourceCheckpoint; use crate::prelude::*; use rome_console::fmt::Display; use rome_diagnostics::location::AsSpan; use rome_js_syntax::{JsSyntaxKind, TextRange}; use rome_parser::{ diagnostic::{ParseDiagnostic, ToDiagnostic}, event::Event, CompletedMarker, Marker, }; use rome_rowan::TextSize; /// Simplified parser API for when rewriting the AST structure with `rewrite_events`. /// /// The difference from the regular [Parser] is that the [TokenSource] must be detached during /// rewriting to avoid lexing previously lexed tokens in a different context. For example for `a[`test`] = "b"`. /// Template literal elements get lexed in the [TemplateElement] context. However, if the rewriter /// rewinds the token source then all tokens are lexed in the [LexContext::Regular] which yields /// complete different results. /// /// This is why the [RewriteParser] tracks the source offset without relying on the `TokenSource` /// and explicitly passes the positions to [Marker] and [CompletedMarker]. This further has the /// benefit that rewriting the events doesn't require re-lexing all tokens as well. pub(crate) struct RewriteParser<'parser, 'source> { /// The byte offset of the current token from the start of the source offset: TextSize, inner: &'parser mut JsParser<'source>, /// Offset to the next not yet processed trivia in [TokenSource::trivia_list]. trivia_offset: usize, } impl<'parser, 'source> RewriteParser<'parser, 'source> { pub fn new(p: &'parser mut JsParser<'source>, checkpoint: TokenSourceCheckpoint) -> Self { Self { inner: p, offset: checkpoint.current_start(), trivia_offset: checkpoint.trivia_position(), } } /// Starts a marker for a new node. pub fn start(&mut self) -> RewriteMarker { let pos = self.inner.context().events().len() as u32; self.skip_trivia(false); self.inner.context_mut().push_event(Event::tombstone()); RewriteMarker(Marker::new(pos, self.offset)) } /// Bumps the passed in token pub fn bump(&mut self, token: RewriteToken) { self.skip_trivia(false); debug_assert!(self.offset < token.end); self.inner.context_mut().push_token(token.kind, token.end); // test ts ts_decorator_assignment // @test(--a) // class Test {} // If the parser originally skipped this token as trivia, then make sure to also consume the trivia. if let Some(trivia) = self.inner.source().trivia_list.get(self.trivia_offset) { if trivia.kind().is_skipped() && trivia.offset() == self.offset { self.trivia_offset += 1; } } self.offset = token.end; self.skip_trivia(true); } fn skip_trivia(&mut self, trailing: bool) { let remaining_trivia = &self.inner.source().trivia_list[self.trivia_offset..]; for trivia in remaining_trivia { // Don't skip over any "skipped token trivia". These get consumed when bumping the token. if trailing != trivia.trailing() || self.offset != trivia.offset() || trivia.kind().is_skipped() { break; } self.trivia_offset += 1; self.offset += trivia.len(); } } /// Finishes the rewriter /// /// ## Panics /// If not all tokens have been consumed or if they have been consumed out of order pub fn finish(mut self) { self.skip_trivia(false); // Skip the leading trivia up to the current token. assert_eq!( self.offset, self.inner.source().position(), "Rewrite didn't consume all tokens" ); } /// Returns true if the parser is in strict mode pub fn is_strict_mode(&self) -> bool { self.inner.state().strict().is_some() } pub fn err_builder(&self, message: impl Display, span: impl AsSpan) -> ParseDiagnostic { self.inner.err_builder(message, span) } pub fn error(&mut self, diagnostic: impl ToDiagnostic<JsParser<'source>>) { self.inner.error(diagnostic) } } #[derive(Debug, Clone, Copy)] pub(crate) struct RewriteToken { pub(crate) kind: JsSyntaxKind, end: TextSize, } impl RewriteToken { pub fn new(kind: JsSyntaxKind, end: TextSize) -> Self { Self { kind, end } } } #[derive(Debug)] pub(crate) struct RewriteMarker(Marker); impl RewriteMarker { /// Completes the node with the specified kind pub fn complete(self, p: &mut RewriteParser, kind: JsSyntaxKind) -> RewriteCompletedMarker { RewriteCompletedMarker(self.0.complete(p.inner, kind)) } } #[derive(Debug)] pub(crate) struct RewriteCompletedMarker(CompletedMarker); impl RewriteCompletedMarker { /// Returns the range of the marker pub fn range(&self, p: &RewriteParser) -> TextRange { self.0.range(p.inner) } /// Returns the source text of the marker pub fn text<'a>(&self, p: &'a RewriteParser) -> &'a str { self.0.text(p.inner) } pub fn change_to_bogus(&mut self, p: &mut RewriteParser) { self.0.change_to_bogus(p.inner) } } impl From<RewriteCompletedMarker> for CompletedMarker { fn from(inner: RewriteCompletedMarker) -> Self { inner.0 } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/parser/single_token_parse_recovery.rs
crates/rome_js_parser/src/parser/single_token_parse_recovery.rs
use crate::lexer::{JsSyntaxKind, T}; use crate::prelude::*; use crate::JsParser; use rome_parser::diagnostic::ParseDiagnostic; /// This struct contains the information needed to the parser to recover from a certain error /// /// By default it doesn't check curly braces, use [with_braces_included] to turn opt-in the check #[derive(Debug)] #[deprecated(note = "Use ParsedSyntax with ParseRecovery instead")] pub(crate) struct SingleTokenParseRecovery { /// The [Diagnostic] to emit error: Option<ParseDiagnostic>, /// It tells the parser to recover if the position is inside a set of [tokens](TokenSet) recovery: TokenSet<JsSyntaxKind>, /// It tells the parser to recover if the current token is a curly brace include_braces: bool, /// The kind of the bogus node the parser inserts if it isn't able to recover because /// the current token is neither in the recovery set nor any of `{` or `}`. bogus_node_kind: JsSyntaxKind, } #[allow(deprecated)] impl SingleTokenParseRecovery { pub fn new(recovery: TokenSet<JsSyntaxKind>, bogus_node_kind: JsSyntaxKind) -> Self { Self { error: None, recovery, include_braces: false, bogus_node_kind, } } /// The main function that tells to the parser how to recover itself. /// /// Recover from an error with a [recovery set](TokenSet) or by using a `{` or `}`. /// /// If [SingleTokenParseRecovery] has an error, it gets tracked in the events. pub fn recover(&self, p: &mut JsParser) { let error = self.get_error(); if let Some(error) = error { p.error(error); } if p.state().speculative_parsing { return; } if !self.parsing_is_recoverable(p) { let m = p.start(); p.bump_any(); m.complete(p, self.get_bogus_node_kind()); } } /// Checks if the parsing phase is recoverable by checking curly braces and [tokens set](TokenSet) fn parsing_is_recoverable(&self, parser: &JsParser) -> bool { self.is_at_token_set(parser) || self.is_at_braces(parser) || self.is_at_eof(parser) } /// It returns the diagnostic fn get_error(&self) -> Option<ParseDiagnostic> { self.error.to_owned() } /// It returns the bogus node kind that will be used to complete the parsing fn get_bogus_node_kind(&self) -> JsSyntaxKind { self.bogus_node_kind } fn is_at_braces(&self, parser: &JsParser) -> bool { matches!(parser.cur(), T!['{'] | T!['}'] if self.include_braces) } fn is_at_token_set(&self, parser: &JsParser) -> bool { parser.at_ts(self.recovery) } fn is_at_eof(&self, parser: &JsParser) -> bool { parser.cur() == JsSyntaxKind::EOF } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lexer/errors.rs
crates/rome_js_parser/src/lexer/errors.rs
use crate::prelude::*; pub fn invalid_digits_after_unicode_escape_sequence(start: usize, end: usize) -> ParseDiagnostic { ParseDiagnostic::new("invalid digits after unicode escape sequence", start..end) .hint("expected valid unicode escape sequence") }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lexer/buffered_lexer.rs
crates/rome_js_parser/src/lexer/buffered_lexer.rs
use super::{LexContext, Lexer, LexerCheckpoint, ReLexContext, TextRange, TokenFlags}; use rome_js_syntax::{JsSyntaxKind, JsSyntaxKind::EOF}; use rome_parser::diagnostic::ParseDiagnostic; use std::collections::VecDeque; use std::iter::FusedIterator; /// Wrapper around a [Lexer] that supports lookahead. /// /// The underlying [Lexer] only supports inspecting the current token and lexing the next token. /// However, the current token is often not enough for the Parser to decide what the next node is, /// it often needs information about the next non-trivia tokens. /// /// The [BufferedLexer] adds support for lookahead by caching the lexed tokens and keeping track /// of the current position (and what the `nth` token is). This means, that every token /// only gets lexed once except if the buffer cached some lookahead tokens and: /// /// * `next_token` is called with a context other than [LexContext::default()]. /// * the lexer gets rewinded to a previous position /// * re-lexing the current token changes the kind of the token. That means, /// that any following token may turn out to be different as well, thus, it's necessary to clear the /// lookahead cache. #[derive(Debug)] pub(crate) struct BufferedLexer<'l> { /// Cache storing the lookahead tokens. That are, all tokens between the `current` token and /// the "current" of the [Lexer]. This is because the [Lexer]'s current token points to the /// furthest requested lookahead token. /// /// For example for the following source `let a = 2;`. The `current` token of the inner [Lexer] and /// of the [BufferedLexer] after one call to `next_token` is the `let` token. However, the `current` /// token diverges if the [BufferedLexer] performs lookahead. Let's say you do a lookahead of 4 (`=` token). /// Now, the [BufferedLexer] calls [Lexer::next_token] four times, moving the [Lexer]'s `current` /// token to the `=`. However, the `current` of the [BufferedLexer] still points to the `let` token. /// That's why the [BufferedLexer] stores the following information: /// * `current`: `let` (information about the `current` token from the consumer perspective) /// * `lookahead`: [WHITESPACE, IDENT: 'a', WHITESPACE]. The tokens that have been lexed to /// answer the "lookahead 4" request but haven't been returned yet. /// * [Lexer::current]: Points to `=` lookahead: VecDeque<LexerCheckpoint>, /// Stores the information of the current token in case the `lexer` is at least one token ahead. current: Option<LexerCheckpoint>, /// Underlying lexer. May be ahead if iterated with lookahead inner: Lexer<'l>, } impl<'l> BufferedLexer<'l> { /// Creates a new [BufferedLexer] wrapping the passed in [Lexer]. pub fn new(lexer: Lexer<'l>) -> Self { Self { inner: lexer, current: None, lookahead: VecDeque::new(), } } /// Returns the kind of the next token and any associated diagnostic. /// /// [See `Lexer.next_token`](Lexer::next_token) #[inline(always)] pub fn next_token(&mut self, context: LexContext) -> JsSyntaxKind { // Reset the lookahead if the context isn't the regular context because it's highly likely // that the lexer will return a different token. if !context.is_regular() { self.reset_lookahead(); } // Retrieve the next token from the lookahead cache if it isn't empty else if let Some(next) = self.lookahead.pop_front() { let kind = next.current_kind; // Store the lookahead as the current token if the lookahead isn't empty (in which case, // the lexer is still at least one token ahead). if self.lookahead.is_empty() { self.current = None; } else { self.current = Some(next); } return kind; } // The [BufferedLexer] and [Lexer] are now both at the same position. Clear the cached // current token and lex out the next token. self.current = None; self.inner.next_token(context) } /// Returns the kind of the current token #[inline(always)] pub fn current(&self) -> JsSyntaxKind { if let Some(current) = &self.current { current.current_kind } else { self.inner.current() } } /// Returns the range of the current token #[inline(always)] pub fn current_range(&self) -> TextRange { if let Some(current) = &self.current { TextRange::new(current.current_start, current.position) } else { self.inner.current_range() } } /// Tests if there's a line break before the current token. #[inline(always)] pub fn has_preceding_line_break(&self) -> bool { if let Some(current) = &self.current { current.has_preceding_line_break() } else { self.inner.has_preceding_line_break() } } /// Returns true if the current token is an identifier, and it contains any unicode escape sequences #[inline] pub fn has_unicode_escape(&self) -> bool { if let Some(current) = &self.current { current.has_unicode_escape() } else { self.inner.has_unicode_escape() } } /// Returns the source text #[inline] pub fn source(&self) -> &'l str { self.inner.source() } /// Creates a checkpoint representing the current lexer state. Allows rewinding /// the lexer to this position later on. pub fn checkpoint(&self) -> LexerCheckpoint { if let Some(current) = &self.current { current.clone() } else { self.inner.checkpoint() } } /// Rewinds the lexer to the state stored in the checkpoint. pub fn rewind(&mut self, checkpoint: LexerCheckpoint) { // test_err js js_rewind_at_eof_token // (([zAgRvz=[=(e{V{ self.inner.rewind(checkpoint); self.lookahead.clear(); self.current = None; } fn reset_lookahead(&mut self) { if let Some(current) = self.current.take() { self.inner.rewind(current); self.lookahead.clear(); } } /// Re-lex the current token in the given context /// See [Lexer::re_lex] pub fn re_lex(&mut self, context: ReLexContext) -> JsSyntaxKind { let current_kind = self.current(); let current_checkpoint = self.inner.checkpoint(); if let Some(current) = self.current.take() { self.inner.rewind(current); } let new_kind = self.inner.re_lex(context); if new_kind != current_kind { // The token has changed, clear the lookahead self.lookahead.clear(); } else if !self.lookahead.is_empty() { // It's still the same kind. So let's move the lexer back to the position it was before re-lexing // and keep the lookahead as is. self.current = Some(self.inner.checkpoint()); self.inner.rewind(current_checkpoint); } new_kind } /// Returns an iterator over the tokens following the current token to perform lookahead. /// For example, what's the 3rd token after the current token? #[inline(always)] pub fn lookahead<'s>(&'s mut self) -> LookaheadIterator<'s, 'l> { LookaheadIterator::new(self) } /// Consumes the buffered lexer and returns the lexing diagnostics pub fn finish(self) -> Vec<ParseDiagnostic> { self.inner.finish() } } #[derive(Debug)] pub(crate) struct LookaheadIterator<'l, 't> { buffered: &'l mut BufferedLexer<'t>, nth: usize, } impl<'l, 't> LookaheadIterator<'l, 't> { fn new(lexer: &'l mut BufferedLexer<'t>) -> Self { Self { buffered: lexer, nth: 0, } } } impl<'l, 't> Iterator for LookaheadIterator<'l, 't> { type Item = LookaheadToken; #[inline] fn next(&mut self) -> Option<Self::Item> { let lookbehind = &self.buffered.lookahead; self.nth += 1; // Is the `nth` token already in the cache, then return it if let Some(lookbehind) = lookbehind.get(self.nth - 1) { let lookahead = LookaheadToken::from(lookbehind); return Some(lookahead); } let lexer = &mut self.buffered.inner; // We're already at the end, calling next now only returns `EOF` again. End the iterator. if lexer.current() == EOF { return None; } // Store the current token before moving the inner lexer forward if we haven't done so. // Necessary to prevent that [BufferedLexer::current] moves forward when performing lookahead. if self.buffered.current.is_none() { self.buffered.current = Some(lexer.checkpoint()); } let kind = lexer.next_token(LexContext::default()); // Lex the next token and cache it in the lookahead cache. Needed to cache it right away // because of the diagnostic. let checkpoint = lexer.checkpoint(); self.buffered.lookahead.push_back(checkpoint); Some(LookaheadToken { kind, flags: lexer.current_flags, }) } } impl<'l, 't> FusedIterator for LookaheadIterator<'l, 't> {} #[derive(Debug)] pub struct LookaheadToken { kind: JsSyntaxKind, flags: TokenFlags, } impl LookaheadToken { pub fn kind(&self) -> JsSyntaxKind { self.kind } pub fn has_preceding_line_break(&self) -> bool { self.flags.has_preceding_line_break() } } impl From<&LexerCheckpoint> for LookaheadToken { fn from(checkpoint: &LexerCheckpoint) -> Self { LookaheadToken { kind: checkpoint.current_kind, flags: checkpoint.current_flags, } } } #[cfg(test)] mod tests { use super::BufferedLexer; use crate::lexer::{LexContext, Lexer, TextRange, TextSize}; use rome_js_syntax::JsSyntaxKind::{JS_NUMBER_LITERAL, NEWLINE, WHITESPACE}; use rome_js_syntax::T; #[test] fn without_lookahead() { let lexer = Lexer::from_str("let a\n = 5"); let mut buffered = BufferedLexer::new(lexer); buffered.next_token(LexContext::default()); assert_eq!(buffered.current(), T![let]); assert!(!buffered.has_preceding_line_break()); assert_eq!( buffered.current_range(), TextRange::at(TextSize::from(0), TextSize::from(3)) ); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); assert_eq!(buffered.next_token(LexContext::default()), T![ident]); assert_eq!(buffered.next_token(LexContext::default()), NEWLINE); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); assert_eq!(buffered.next_token(LexContext::default()), T![=]); assert!(buffered.has_preceding_line_break()); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); assert_eq!( buffered.next_token(LexContext::default()), JS_NUMBER_LITERAL ); assert_eq!(buffered.next_token(LexContext::default()), T![EOF]); } #[test] fn lookahead() { let lexer = Lexer::from_str("let a\n = 5"); let mut buffered = BufferedLexer::new(lexer); buffered.next_token(LexContext::default()); assert_eq!(buffered.current(), T![let]); assert!(!buffered.has_preceding_line_break()); assert_eq!( buffered.current_range(), TextRange::at(TextSize::from(0), TextSize::from(3)) ); { let lookahead = buffered.lookahead().map(|l| l.kind).collect::<Vec<_>>(); assert_eq!( lookahead, vec![ WHITESPACE, T![ident], NEWLINE, WHITESPACE, T![=], WHITESPACE, JS_NUMBER_LITERAL, T![EOF] ] ); } assert_eq!(buffered.current(), T![let]); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); { let mut lookahead = buffered.lookahead(); let nth1 = lookahead.next().unwrap(); let nth2 = lookahead.next().unwrap(); let nth3 = lookahead.next().unwrap(); let nth4 = lookahead.next().unwrap(); assert_eq!(nth1.kind(), T![ident]); assert_eq!(nth2.kind(), NEWLINE); assert_eq!(nth3.kind(), WHITESPACE); assert_eq!(nth4.kind(), T![=]); assert!(nth4.has_preceding_line_break()); } assert_eq!(buffered.next_token(LexContext::default()), T![ident]); assert_eq!(buffered.next_token(LexContext::default()), NEWLINE); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); assert_eq!(buffered.next_token(LexContext::default()), T![=]); assert!(buffered.has_preceding_line_break()); assert_eq!(buffered.next_token(LexContext::default()), WHITESPACE); assert_eq!( buffered.next_token(LexContext::default()), JS_NUMBER_LITERAL ); assert_eq!(buffered.next_token(LexContext::default()), T![EOF]); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lexer/highlight.rs
crates/rome_js_parser/src/lexer/highlight.rs
use crate::*; pub use ansi_term::{self, ANSIGenericString, Color, Style}; use atty::is; /// A structure for syntax highlighting pieces of JavaScript source code /// using ANSI. /// /// The highlighter will auto detect if stderr or stdout are terminals, if /// they are not then it will return the original uncolored source code. /// All errors encountered while lexing are ignored. /// /// The highlighter is iterator based, which allows for coloring a part of code /// at a time. /// The highlighter's position can be controlled through various methods which allows /// for reuse of the highlighter without the need to rescan the source code #[derive(Debug, Clone, PartialEq, Eq, Hash)] pub struct Highlighter<'s> { pub source: &'s str, tokens: Vec<Token>, /// Current token position cur: usize, /// Current byte index in source cur_idx: usize, } macro_rules! rgb { ($r:expr, $g:expr, $b:expr) => { Color::RGB($r, $g, $b) }; } impl<'s> Highlighter<'s> { /// Make a new highlighter, this will invoke the lexer to get tokens. pub fn new(source: &'s str) -> Highlighter<'s> { let tokens = Lexer::from_str(source, 0).map(|t| t.0).collect(); Self { source, tokens, cur: 0, cur_idx: 0, } } fn check_terminal(&self) -> bool { is(atty::Stream::Stderr) && is(atty::Stream::Stdout) } /// Reset the highlighter to the start of the source code pub fn reset(&mut self) {} /// Consume the rest of the highlighter's tokens and turn them into an ANSI colored string. /// This returns an unaltered string if stdout and stderr are not terminals. pub fn color(&mut self) -> String { if !self.check_terminal() { let ret = self.source[self.cur_idx..self.source.len()].to_string(); self.cur = self.tokens.len(); self.cur_idx = self.source.len(); return ret; } self.map(|x| x.to_string()).collect() } fn src(&self) -> &'s str { let range = TextRange::at( TextSize::from(self.cur_idx as u32), self.tokens.get(self.cur).unwrap().len(), ); &self.source[range] } } const PURPLE_IDENT: [&str; 4] = ["let", "class", "await", "yield"]; const BUILTINS: [&str; 27] = [ "Math", "Promise", "Number", "String", "Date", "Infinity", "NaN", "undefined", "globalThis", "Object", "Function", "Symbol", "Boolean", "Error", "EvalError", "InternalError", "RangeError", "ReferenceError", "SyntaxError", "TypeError", "Number", "BigInt", "RegExp", "Array", "Map", "Set", "JSON", ]; impl<'s> Iterator for Highlighter<'s> { /// An individual colored token, you can see the color used by checking the string's style foreground type Item = ANSIGenericString<'s, str>; fn next(&mut self) -> Option<Self::Item> { if self.tokens.get(self.cur) == None { return None; } let color = match self.tokens.get(self.cur)?.kind { T!['{'] | T!['}'] | T!['('] | T![')'] => rgb![255, 215, 0], T![import] => rgb![97, 175, 239], T![ident] if PURPLE_IDENT.contains(&self.src()) => rgb![198, 120, 221], T![ident] if self.src() == "from" => rgb![97, 175, 239], T![ident] if BUILTINS.contains(&self.src()) => rgb![229, 192, 123], T![ident] => rgb![224, 108, 117], T![instanceof] | T![new] | T![?] | T![delete] | T![:] | T![const] => { rgb![198, 120, 221] } t if t.is_punct() => rgb![86, 182, 194], t if t.is_keyword() => rgb![198, 120, 221], JsSyntaxKind::JS_STRING_LITERAL | JsSyntaxKind::BACKTICK | JsSyntaxKind::TEMPLATE_CHUNK => { rgb![152, 195, 121] } JsSyntaxKind::JS_NUMBER_LITERAL => rgb![209, 154, 102], JsSyntaxKind::DOLLAR_CURLY => rgb![198, 120, 221], JsSyntaxKind::ERROR_TOKEN => rgb![244, 71, 71], JsSyntaxKind::COMMENT => rgb![127, 132, 142], _ => Color::White, }; let string = self.src(); let token_len: usize = self.tokens.get(self.cur).unwrap().len().into(); self.cur_idx += token_len; self.cur += 1; Some(color.paint(string)) } } /// Colors a piece of source code using ANSI. /// The string returned will be unaltered if stdout and stderr are not terminals. pub fn color(source: &str) -> String { Highlighter::new(source).color() }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lexer/tests.rs
crates/rome_js_parser/src/lexer/tests.rs
#![cfg(test)] #![allow(unused_mut, unused_variables, unused_assignments)] use super::{LexContext, Lexer, TextSize}; use crate::span::Span; use quickcheck_macros::quickcheck; use rome_js_syntax::JsSyntaxKind::{self, EOF}; use std::sync::mpsc::channel; use std::thread; use std::time::Duration; // Assert the result of lexing a piece of source code, // and make sure the tokens yielded are fully lossless and the source can be reconstructed from only the tokens macro_rules! assert_lex { ($src:expr, $($kind:ident:$len:expr $(,)?)*) => {{ let mut lexer = Lexer::from_str($src); let mut idx = 0; let mut tok_idx = TextSize::default(); let mut new_str = String::with_capacity($src.len()); let mut tokens = vec![]; while lexer.next_token(LexContext::default()) != EOF { tokens.push((lexer.current(), lexer.current_range())); } $( assert_eq!( tokens[idx].0, rome_js_syntax::JsSyntaxKind::$kind, "expected token kind {}, but found {:?}", stringify!($kind), tokens[idx].0, ); assert_eq!( tokens[idx].1.len(), TextSize::from($len), "expected token length of {}, but found {:?} for token {:?}", $len, tokens[idx].1.len(), tokens[idx].0, ); new_str.push_str(&$src[tokens[idx].1.as_range()]); tok_idx += tokens[idx].1.len(); idx += 1; )* if idx < tokens.len() { panic!( "expected {} tokens but lexer returned {}, first unexpected token is '{:?}'", idx, tokens.len(), tokens[idx].0 ); } else { assert_eq!(idx, tokens.len()); } assert_eq!($src, new_str, "Failed to reconstruct input"); }}; } // This is for testing if the lexer is truly lossless // It parses random strings and puts them back together with the produced tokens and compares #[quickcheck] fn losslessness(string: String) -> bool { // using an mpsc channel allows us to spawn a thread and spawn the lexer there, then if // it takes more than 2 seconds we panic because it is 100% infinite recursion let cloned = string.clone(); let (sender, receiver) = channel(); thread::spawn(move || { let mut lexer = Lexer::from_str(&cloned); let mut tokens = vec![]; while lexer.next_token(LexContext::default()) != EOF { tokens.push(lexer.current_range()); } sender .send(tokens) .expect("Could not send tokens to receiver"); }); let token_ranges = receiver .recv_timeout(Duration::from_secs(2)) .unwrap_or_else(|_| { panic!( "Lexer is infinitely recursing with this code: ->{}<-", string ) }); let mut new_str = String::with_capacity(string.len()); let mut idx = TextSize::from(0); for range in token_ranges { new_str.push_str(&string[range.as_range()]); idx += range.len(); } string == new_str } #[test] fn empty() { assert_lex! { "", } } #[test] fn identifier() { assert_lex! { "Abcdefg", IDENT:7 } } #[test] fn punctuators() { assert_lex! { "!%%&()*+,-.:;<=>?[]^{}|~", BANG:1, PERCENT:1, PERCENT:1, AMP:1, L_PAREN:1, R_PAREN:1, STAR:1, PLUS:1, COMMA:1, MINUS:1, DOT:1, COLON:1, SEMICOLON:1, LTEQ:2, R_ANGLE:1, QUESTION:1, L_BRACK:1, R_BRACK:1, CARET:1, L_CURLY:1, R_CURLY:1, PIPE:1, TILDE:1, } } #[test] fn bang() { assert_lex!( r#"!/a/"#, BANG:1, SLASH:1, IDENT:1, SLASH:1 ); assert_lex!( r#"a!/a/"#, IDENT:1, BANG:1, SLASH:1, IDENT:1, SLASH:1 ); assert_lex!( "1 && !2", JS_NUMBER_LITERAL:1, WHITESPACE:1, AMP2:2, WHITESPACE:1, BANG:1 JS_NUMBER_LITERAL:1 ); } #[test] fn consecutive_punctuators() { assert_lex! { "&&&&^^^||", AMP2:2, AMP2:2, CARET:1, CARET:1, CARET:1, PIPE2:2, } } #[test] fn unicode_whitespace() { assert_lex! { " \u{00a0}\u{1680}\u{2000}\u{2001}\u{2002}\u{2003}\u{2004}\u{2005}\u{2006}\u{2007}\u{2008}\u{2009}\u{200A}\u{202F}\u{205F}\u{3000}", WHITESPACE:48 } } #[test] fn unicode_whitespace_ident_part() { assert_lex! { "Abcd\u{2006}", IDENT:4, WHITESPACE:3 // length is in bytes } } #[test] fn all_whitespace() { assert_lex! { "\n\t\t", NEWLINE:1 WHITESPACE:2 } assert_lex! { "\r\n\t\t", NEWLINE:2 WHITESPACE:2 } assert_lex! { "\n\n", NEWLINE:1 NEWLINE:1 } assert_lex! { "\r\n\r\n", NEWLINE:2 NEWLINE:2 } assert_lex! { "\r\r\r\r", NEWLINE:1 NEWLINE:1 NEWLINE:1 NEWLINE:1 } assert_lex! { "\r\r\n\n\u{2028}\u{2029}", NEWLINE:1 NEWLINE:2 NEWLINE:1 NEWLINE:3 NEWLINE:3 } } #[test] fn empty_string() { assert_lex! { r#""""#, JS_STRING_LITERAL:2 } assert_lex! { "''", JS_STRING_LITERAL:2 } } #[test] fn simple_string() { assert_lex! { r#"'abcdefghijklmnopqrstuvwxyz123456789\'10🦀'"#, JS_STRING_LITERAL:45 } assert_lex! { r#""abcdefghijklmnopqrstuvwxyz123456789\"10🦀""#, JS_STRING_LITERAL:45 } } #[test] fn string_unicode_escape_invalid() { assert_lex! { r#""abcd\u21""#, ERROR_TOKEN:10 } assert_lex! { r#"'abcd\u21'"#, ERROR_TOKEN:10 } } #[test] fn string_unicode_escape_valid() { assert_lex! { r#""abcd\u2000a""#, JS_STRING_LITERAL:13 } assert_lex! { r#"'abcd\u2000a'"#, JS_STRING_LITERAL:13 } } #[test] fn string_unicode_escape_valid_resolving_to_endquote() { assert_lex! { r#""abcd\u0022a""#, JS_STRING_LITERAL:13 } assert_lex! { r#"'abcd\u0027a'"#, JS_STRING_LITERAL:13 } } #[test] fn string_hex_escape_invalid() { assert_lex! { r#""abcd \xZ0 \xGH""#, ERROR_TOKEN:16 } assert_lex! { r#"'abcd \xZ0 \xGH'"#, ERROR_TOKEN:16 } } #[test] fn string_hex_escape_valid() { assert_lex! { r#""abcd \x00 \xAB""#, JS_STRING_LITERAL:16 } assert_lex! { r#"'abcd \x00 \xAB'"#, JS_STRING_LITERAL:16 } } #[test] fn unterminated_string() { assert_lex! { r#""abcd"#, ERROR_TOKEN:5 } assert_lex! { r#"'abcd"#, ERROR_TOKEN:5 } } #[test] fn string_all_escapes() { assert_lex! { r#""\x\u2004\u20\ux\xNN""#, ERROR_TOKEN:21 } assert_lex! { r#"'\x\u2004\u20\ux\xNN'"#, ERROR_TOKEN:21 } } #[test] fn complex_string_1() { assert_lex! { r#" _this += "str'n\u200bg";"#, WHITESPACE:1, IDENT:5, WHITESPACE:1, PLUSEQ:2, WHITESPACE:1, JS_STRING_LITERAL:14, SEMICOLON:1 } assert_lex! { r#" _this += 'str"n\u200bg';"#, WHITESPACE:1, IDENT:5, WHITESPACE:1, PLUSEQ:2, WHITESPACE:1, JS_STRING_LITERAL:14, SEMICOLON:1 } } #[test] fn unterminated_string_length() { assert_lex! { "'abc", ERROR_TOKEN:4 } } #[test] fn unterminated_string_with_escape_len() { assert_lex! { "'abc\\", ERROR_TOKEN:5 } assert_lex! { r#"'abc\x"#, ERROR_TOKEN:6 } assert_lex! { r#"'abc\x4"#, ERROR_TOKEN:7 } assert_lex! { r#"'abc\x45"#, ERROR_TOKEN:8 } assert_lex! { r#"'abc\u"#, ERROR_TOKEN:6 } assert_lex! { r#"'abc\u20"#, ERROR_TOKEN:8 } } #[test] fn dollarsign_underscore_idents() { assert_lex! { "$a", IDENT:2 } } #[test] fn labels_a() { assert_lex! { "await", AWAIT_KW:5 } assert_lex! { "awaited", IDENT:7 } } #[test] fn labels_b() { assert_lex! { "break", BREAK_KW:5 } assert_lex! { "breaking speed records", IDENT:8, WHITESPACE:1, IDENT:5, WHITESPACE:1, IDENT:7 } } #[test] fn labels_c() { assert_lex! { "continue, const, class, catch, case", CONTINUE_KW:8, COMMA:1, WHITESPACE:1, CONST_KW:5, COMMA:1, WHITESPACE:1, CLASS_KW:5, COMMA:1, WHITESPACE:1, CATCH_KW:5, COMMA:1, WHITESPACE:1, CASE_KW:4 } assert_lex! { "classy crabs", IDENT:6, WHITESPACE:1, IDENT:5 } } #[test] fn labels_d() { assert_lex! { "debugger default delete do", DEBUGGER_KW:8, WHITESPACE:1, DEFAULT_KW:7, WHITESPACE:1, DELETE_KW:6, WHITESPACE:1, DO_KW:2 } assert_lex! { "derive doot d", IDENT:6, WHITESPACE:1, IDENT:4, WHITESPACE:1, IDENT:1 } } #[test] fn labels_e() { assert_lex! { "else enum export extends", ELSE_KW:4, WHITESPACE:1, ENUM_KW:4, WHITESPACE:1, EXPORT_KW:6, WHITESPACE:1, EXTENDS_KW:7 } assert_lex! { "e exports elsey", IDENT:1, WHITESPACE:1, IDENT:7, WHITESPACE:1, IDENT:5 } } #[test] fn labels_f() { assert_lex! { "finally for function", FINALLY_KW:7, WHITESPACE:1, FOR_KW:3, WHITESPACE:1, FUNCTION_KW:8 } assert_lex! { "finally, foreign food!", FINALLY_KW:7, COMMA:1, WHITESPACE:1, IDENT:7, WHITESPACE:1, IDENT:4, BANG:1 } } #[test] fn labels_i() { assert_lex! { "i in instanceof if import", IDENT:1, WHITESPACE:1, IN_KW: 2, WHITESPACE:1, INSTANCEOF_KW:10, WHITESPACE:1, IF_KW:2, WHITESPACE:1, IMPORT_KW:6 } assert_lex! { "icecream is interesting, innit?", IDENT:8, WHITESPACE:1, IS_KW:2, WHITESPACE:1, IDENT:11, COMMA:1, WHITESPACE:1, IDENT:5, QUESTION:1 } } #[test] fn labels_n() { assert_lex! { "new", NEW_KW:3 } assert_lex! { "newly n", IDENT:5, WHITESPACE:1, IDENT:1 } } #[test] fn labels_r() { assert_lex! { "return", RETURN_KW:6 } assert_lex! { "returning", IDENT:9 } } #[test] fn labels_s() { assert_lex! { "switch super", SWITCH_KW:6, WHITESPACE:1, SUPER_KW:5 } assert_lex! { "superb switching", IDENT:6, WHITESPACE:1, IDENT:9 } } #[test] fn labels_t() { assert_lex! { "this try throw typeof t", THIS_KW:4, WHITESPACE:1, TRY_KW:3, WHITESPACE:1, THROW_KW:5, WHITESPACE:1, TYPEOF_KW:6, WHITESPACE:1, IDENT:1 } assert_lex! { "thistle throwing tea", IDENT:7, WHITESPACE:1, IDENT:8, WHITESPACE:1, IDENT:3 } } #[test] fn labels_v() { assert_lex! { "var void v", VAR_KW:3, WHITESPACE:1, VOID_KW:4, WHITESPACE:1, IDENT:1 } assert_lex! { "variable voiding is bad", IDENT:8, WHITESPACE:1, IDENT:7, WHITESPACE:1, IS_KW:2, WHITESPACE:1, IDENT:3 } } #[test] fn labels_w() { assert_lex! { "with while w", WITH_KW:4, WHITESPACE:1, WHILE_KW:5, WHITESPACE:1, IDENT:1 } assert_lex! { "whiley withow", IDENT:6, WHITESPACE:1, IDENT:6 } } #[test] fn labels_y() { assert_lex! { "yield", YIELD_KW:5 } assert_lex! { "yielding", IDENT:8 } } #[test] fn number_basic() { assert_lex! { "1", JS_NUMBER_LITERAL:1 } assert_lex! { "123456 ", JS_NUMBER_LITERAL:6, WHITESPACE:1 } assert_lex! { "90", JS_NUMBER_LITERAL:2 } assert_lex! { ".13", JS_NUMBER_LITERAL:3 } } #[test] fn number_basic_err() { assert_lex! { "2_?", JS_NUMBER_LITERAL:2, // numeric separator error QUESTION:1 } assert_lex! { r#"25\u0046abcdef"#, ERROR_TOKEN:14 } assert_lex! { r#"25\uFEFFb"#, JS_NUMBER_LITERAL:2, ERROR_TOKEN:6, IDENT:1 } assert_lex! { r#".32\u0046abde"#, ERROR_TOKEN:13 } assert_lex! { r#".0_e1"#, // numeric separator error JS_NUMBER_LITERAL:5, } assert_lex! { r#"10e_1"#, // numeric separator error ERROR_TOKEN:5 } } #[test] fn number_leading_zero_err() { assert_lex! { r#"0_0"#, JS_NUMBER_LITERAL:3 // error: numeric separator can not be used after leading 0 } assert_lex! { r#"01.1"#, JS_NUMBER_LITERAL:4, // error: unexpected number } assert_lex! { r#"01n"#, JS_NUMBER_LITERAL:3 // error: Octal literals are not allowed for BigInts. } } #[test] fn number_complex() { assert_lex! { "3e-5 123e+56", JS_NUMBER_LITERAL:4, WHITESPACE:1, JS_NUMBER_LITERAL:7 } assert_lex! { "3.14159e+1", JS_NUMBER_LITERAL:10 } assert_lex! { ".0e34", JS_NUMBER_LITERAL:5 } assert_lex! { "0e00", JS_NUMBER_LITERAL:4 } } #[test] fn dot_number_disambiguation() { assert_lex! { ".e+5", DOT:1, IDENT:1, PLUS:1, JS_NUMBER_LITERAL:1 } assert_lex! { ".0e+5", JS_NUMBER_LITERAL:5 } } #[test] fn binary_literals() { assert_lex! { "0b10101010, 0B10101010, 0b10101010n", JS_NUMBER_LITERAL:10, COMMA:1, WHITESPACE:1, JS_NUMBER_LITERAL:10, COMMA:1, WHITESPACE:1, JS_NUMBER_LITERAL:11 } } #[test] fn octal_literals() { assert_lex! { "0o01742242, 0B10101010, 0b10101010n", JS_NUMBER_LITERAL:10, COMMA:1, WHITESPACE:1, JS_NUMBER_LITERAL:10, COMMA:1, WHITESPACE:1, JS_NUMBER_LITERAL:11 } } #[test] fn bigint_literals() { assert_lex! { "0n 1743642n 1n", JS_NUMBER_LITERAL:2, WHITESPACE:1, JS_NUMBER_LITERAL:8, WHITESPACE:1, JS_NUMBER_LITERAL:2 } } #[test] fn shebang() { assert_lex! { "#! /bin/node", JS_SHEBANG:12 } assert_lex! { "#!/bin/node\n", JS_SHEBANG:11, NEWLINE:1 } assert_lex! { "#!/bin/node\r\n", JS_SHEBANG:11, NEWLINE:2 } assert_lex! { "#!/usr/bin/env deno\u{2028}", JS_SHEBANG:19, NEWLINE:3 } assert_lex! { "#0", ERROR_TOKEN:1, JS_NUMBER_LITERAL:1 } assert_lex! { "0#!/bin/deno", JS_NUMBER_LITERAL:1, HASH:1, BANG:1, SLASH:1, IDENT:3, SLASH:1, IDENT:4, } } #[test] fn single_line_comments() { assert_lex! { "//abc ", COMMENT:5, NEWLINE:1, WHITESPACE:4 } assert_lex! { "//a", COMMENT:3 } } #[test] fn block_comment() { assert_lex! { "/* */", MULTILINE_COMMENT:13 } assert_lex! { "/* */", COMMENT:5 } assert_lex! { "/* *", COMMENT:4 } } #[test] fn division() { assert_lex! { "var a = 5 / 6", VAR_KW:3, WHITESPACE:1, IDENT:1, WHITESPACE:1, EQ:1, WHITESPACE:1, JS_NUMBER_LITERAL:1, WHITESPACE:1, SLASH:1, WHITESPACE:1, JS_NUMBER_LITERAL:1 } } #[test] fn fuzz_fail_1() { assert_lex! { "$\\u", IDENT:1, ERROR_TOKEN:2 } } #[test] fn fuzz_fail_2() { assert_lex! { "..", DOT:1, DOT:1 } } #[test] fn fuzz_fail_3() { assert_lex! { "0e", ERROR_TOKEN:2 } } #[test] fn fuzz_fail_4() { assert_lex! { "0o 0b 0x", ERROR_TOKEN:2, WHITESPACE:1, ERROR_TOKEN:2, WHITESPACE:1, ERROR_TOKEN:2 } } #[test] fn fuzz_fail_5() { assert_lex! { "//\u{2028}", COMMENT:2, NEWLINE:3 } } #[test] fn fuzz_fail_6() { assert_lex! { "//\u{200a}", COMMENT:5 } } #[test] fn unicode_ident_start_handling() { assert_lex! { "αβeta_tester", IDENT:14 } } #[test] fn unicode_ident_separated_by_unicode_whitespace() { assert_lex! { "β\u{FEFF}α", IDENT:2, WHITESPACE:3, IDENT:2 } } #[test] fn err_on_unterminated_unicode() { assert_lex! { "+\\u{A", PLUS:1 ERROR_TOKEN:4 } } #[test] fn issue_30() { assert_lex! { "let foo = { α: true }", LET_KW:3, WHITESPACE:1, IDENT:3, WHITESPACE:1, EQ:1, WHITESPACE:1, L_CURLY:1, WHITESPACE:1, IDENT:2, COLON:1, WHITESPACE:1, TRUE_KW:4, WHITESPACE:1, R_CURLY:1 } } #[test] fn at_token() { assert_lex! { "@", AT:1 } assert_lex! { "@foo", AT:1, IDENT:3 } } #[test] fn object_expr_getter() { assert_lex! { "({ get [foo]() {} })", L_PAREN:1 L_CURLY:1 WHITESPACE:1 GET_KW:3 WHITESPACE:1 L_BRACK:1 IDENT:3 R_BRACK:1 L_PAREN:1 R_PAREN:1 WHITESPACE:1 L_CURLY:1 R_CURLY:1 WHITESPACE:1 R_CURLY:1 R_PAREN:1 } } #[test] fn newline_space_must_be_two_tokens() { assert_lex! { "\n ", NEWLINE:1 WHITESPACE:1 } assert_lex! { " \n", WHITESPACE:1 NEWLINE:1 } assert_lex! { " \n ", WHITESPACE:1 NEWLINE:1 WHITESPACE:1 } assert_lex! { " a\n b \n ", WHITESPACE:1 IDENT:1 NEWLINE:1 WHITESPACE:1 IDENT:1 WHITESPACE:1 NEWLINE:1 WHITESPACE:1 } assert_lex! { "a //COMMENT \n /*COMMENT*/ b /*COM\nMENT*/", IDENT:1 WHITESPACE:1 COMMENT:10 NEWLINE:1 WHITESPACE:1 COMMENT:11 WHITESPACE:1 IDENT:1 WHITESPACE:1 MULTILINE_COMMENT:12 } assert_lex! { "a //COMMENT \n /*COMMENT*/ b /*COM\nMENT*/", IDENT:1 WHITESPACE:1 COMMENT:10 NEWLINE:1 WHITESPACE:1 COMMENT:11 WHITESPACE:1 IDENT:1 WHITESPACE:1 MULTILINE_COMMENT:12 } //Now with CR assert_lex! { "\r\n ", NEWLINE:2 WHITESPACE:1 } assert_lex! { " \r\n", WHITESPACE:1 NEWLINE:2 } assert_lex! { " \r\n ", WHITESPACE:1 NEWLINE:2 WHITESPACE:1 } assert_lex! { " a\r\n b \r\n ", WHITESPACE:1 IDENT:1 NEWLINE:2 WHITESPACE:1 IDENT:1 WHITESPACE:1 NEWLINE:2 WHITESPACE:1 } assert_lex! { "a //COMMENT \r\n /*COMMENT*/ b /*COM\r\nMENT*/", IDENT:1 WHITESPACE:1 COMMENT:10 NEWLINE:2 WHITESPACE:1 COMMENT:11 WHITESPACE:1 IDENT:1 WHITESPACE:1 MULTILINE_COMMENT:13 } assert_lex! { "a //COMMENT \r\n /*COMMENT*/ b /*COM\r\nMENT*/", IDENT:1 WHITESPACE:1 COMMENT:10 NEWLINE:2 WHITESPACE:1 COMMENT:11 WHITESPACE:1 IDENT:1 WHITESPACE:1 MULTILINE_COMMENT:13 } } #[test] fn are_we_jsx() { assert_lex! { r#"<some-div>{"Hey" + 1 + fn()}</some-div>"#, L_ANGLE:1 IDENT:4 MINUS:1 IDENT:3 R_ANGLE:1 L_CURLY:1 JS_STRING_LITERAL:5 WHITESPACE:1 PLUS:1 WHITESPACE:1 JS_NUMBER_LITERAL:1 WHITESPACE:1 PLUS:1 WHITESPACE:1 IDENT:2 L_PAREN:1 R_PAREN:1 R_CURLY:1 L_ANGLE:1 SLASH:1 IDENT:4 MINUS:1 IDENT:3 R_ANGLE:1 } } #[test] fn keywords() { let keywords = vec![ "break", "case", "catch", "class", "const", "continue", "debugger", "default", "delete", "do", "else", "enum", "export", "extends", "false", "finally", "for", "function", "if", "in", "instanceof", "import", "new", "null", "return", "super", "switch", "this", "throw", "try", "true", "typeof", "var", "void", "while", "with", // Strict mode contextual keywords "implements", "interface", "let", "package", "private", "protected", "public", "static", "yield", // contextual keywords "abstract", "as", "asserts", "assert", "any", "async", "await", "boolean", "constructor", "declare", "get", "infer", "is", "keyof", "module", "namespace", "never", "readonly", "require", "number", "object", "set", "string", "symbol", "type", "undefined", "unique", "unknown", "from", "global", "bigint", "override", "of", ]; for keyword in keywords { let kind = JsSyntaxKind::from_keyword(keyword).expect( "Expected `JsSyntaxKind::from_keyword` to return a kind for keyword {keyword}.", ); let mut lexer = Lexer::from_str(keyword); lexer.next_token(LexContext::default()); let lexed_kind = lexer.current(); assert_eq!( lexed_kind, kind, "Expected token '{keyword}' to be of kind {:?} but is {:?}.", kind, lexed_kind ); let lexed_range = lexer.current_range(); assert_eq!( lexed_range.len(), TextSize::from(keyword.len() as u32), "Expected lexed keyword to be of len {} but has length {:?}", keyword.len(), lexed_range.len() ); assert_eq!(lexer.next_token(LexContext::default()), EOF); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_js_parser/src/lexer/mod.rs
crates/rome_js_parser/src/lexer/mod.rs
//! An extremely fast, lookup table based, ECMAScript lexer which yields SyntaxKind tokens used by the rome-js parser. //! For the purposes of error recovery, tokens may have an error attached to them, which is reflected in the Iterator Item. //! The lexer will also yield `COMMENT` and `WHITESPACE` tokens. //! //! The lexer operates on raw bytes to take full advantage of lookup table optimizations, these bytes **must** be valid utf8, //! therefore making a lexer from a `&[u8]` is unsafe since you must make sure the bytes are valid utf8. //! Do not use this to learn how to lex JavaScript, this is just needlessly fast and demonic because i can't control myself :) //! //! basic ANSI syntax highlighting is also offered through the `highlight` feature. //! //! # Warning ⚠️ //! //! `>>` and `>>>` are not emitted as single tokens, they are emitted as multiple `>` tokens. This is because of //! TypeScript parsing and productions such as `T<U<N>>` #![allow(clippy::or_fun_call)] #[rustfmt::skip] mod errors; mod tests; pub mod buffered_lexer; #[cfg(feature = "highlight")] mod highlight; use bitflags::bitflags; #[cfg(feature = "highlight")] pub use highlight::*; pub(crate) use buffered_lexer::BufferedLexer; use rome_js_syntax::JsSyntaxKind::*; pub use rome_js_syntax::*; use rome_js_unicode_table::{ is_id_continue, is_id_start, lookup_byte, Dispatch::{self, *}, }; use rome_parser::diagnostic::ParseDiagnostic; use self::errors::invalid_digits_after_unicode_escape_sequence; // The first utf8 byte of every valid unicode whitespace char, used for short circuiting whitespace checks const UNICODE_WHITESPACE_STARTS: [u8; 5] = [ // NBSP 0xC2, // BOM 0xEF, // Ogham space mark 0xE1, // En quad .. Hair space, narrow no break space, mathematical space 0xE2, // Ideographic space 0xE3, ]; // Unicode spaces, designated by the `Zs` unicode property const UNICODE_SPACES: [char; 19] = [ '\u{0020}', '\u{00A0}', '\u{1680}', '\u{2000}', '\u{2001}', '\u{2002}', '\u{2003}', '\u{2004}', '\u{2005}', '\u{2006}', '\u{2007}', '\u{2008}', '\u{2009}', '\u{200A}', '\u{200B}', '\u{202F}', '\u{205F}', '\u{3000}', '\u{FEFF}', ]; /// Context in which the lexer should lex the next token #[derive(Debug, Copy, Clone, Eq, PartialEq, Default)] pub enum LexContext { /// Default context for if the lexer isn't in any specific other context #[default] Regular, /// For lexing the elements of a JS template literal or TS template type. /// Doesn't skip whitespace trivia. TemplateElement { tagged: bool }, /// Lexes a token in a JSX children context. /// Returns one of /// - Whitespace trivia /// - JsxText /// - `<` end of the current element, or start of a new element /// - expression start: `{` /// - EOF JsxChild, /// Lexes a JSX Attribute value. Calls into normal lex token if positioned at anything /// that isn't `'` or `"`. JsxAttributeValue, } impl LexContext { /// Returns true if this is [LexContext::Regular] pub fn is_regular(&self) -> bool { matches!(self, LexContext::Regular) } } /// Context in which the [LexContext]'s current should be re-lexed. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub enum ReLexContext { /// Re-lexes a `/` or `/=` token as a regular expression. Regex, /// Re-lexes /// * `> >` as `>>` /// * `> > >` as `>>>`, /// * `> =` as '>=' /// * `> > =` as '>>=' /// * `> > > =` as `>>>=` BinaryOperator, /// Re-lexes `'<', '<'` as `<<` in places where a type argument is expected to support /// `B<<A>()>` TypeArgumentLessThan, /// Re-lexes an identifier or keyword as a JSX identifier (that allows `-` tokens) JsxIdentifier, /// See [LexContext::JsxChild] JsxChild, } bitflags! { /// Flags for a lexed token. #[derive(Debug, Copy, Clone, Eq, PartialEq)] pub(crate) struct TokenFlags: u8 { /// Indicates that there has been a line break between the last non-trivia token const PRECEDING_LINE_BREAK = 1 << 0; /// Indicates that an identifier contains an unicode escape sequence const UNICODE_ESCAPE = 1 << 1; } } impl TokenFlags { pub const fn has_preceding_line_break(&self) -> bool { self.contains(TokenFlags::PRECEDING_LINE_BREAK) } pub const fn has_unicode_escape(&self) -> bool { self.contains(TokenFlags::UNICODE_ESCAPE) } } /// An extremely fast, lookup table based, lossless ECMAScript lexer #[derive(Debug)] pub(crate) struct Lexer<'src> { /// Source text source: &'src str, /// The start byte position in the source text of the next token. position: usize, /// `true` if there has been a line break between the last non-trivia token and the next non-trivia token. after_newline: bool, /// Byte offset of the current token from the start of the source /// The range of the current token can be computed by `self.position - self.current_start` current_start: TextSize, /// The kind of the current token current_kind: JsSyntaxKind, /// Flags for the current token current_flags: TokenFlags, diagnostics: Vec<ParseDiagnostic>, } impl<'src> Lexer<'src> { /// Make a new lexer from a str, this is safe because strs are valid utf8 pub fn from_str(string: &'src str) -> Self { Self { source: string, after_newline: false, current_kind: TOMBSTONE, current_start: TextSize::from(0), current_flags: TokenFlags::empty(), position: 0, diagnostics: vec![], } } /// Returns the source code pub fn source(&self) -> &'src str { self.source } /// Returns the kind of the current token #[inline] pub const fn current(&self) -> JsSyntaxKind { self.current_kind } /// Returns the range of the current token (The token that was lexed by the last `next` call) #[inline] pub fn current_range(&self) -> TextRange { TextRange::new(self.current_start, TextSize::from(self.position as u32)) } /// Returns true if a line break precedes the current token. #[inline] pub const fn has_preceding_line_break(&self) -> bool { self.current_flags.has_preceding_line_break() } /// Returns `true` if the current token is an identifier and it contains a unicode escape sequence (`\u...`). #[inline] pub const fn has_unicode_escape(&self) -> bool { self.current_flags.has_unicode_escape() } /// Creates a checkpoint storing the current lexer state. /// /// Use `rewind` to restore the lexer to the state stored in the checkpoint. pub fn checkpoint(&self) -> LexerCheckpoint { LexerCheckpoint { position: TextSize::from(self.position as u32), current_start: self.current_start, current_flags: self.current_flags, current_kind: self.current_kind, after_line_break: self.after_newline, diagnostics_pos: self.diagnostics.len() as u32, } } /// Rewinds the lexer to the same state as when the passed in `checkpoint` was created. pub fn rewind(&mut self, checkpoint: LexerCheckpoint) { let LexerCheckpoint { position, current_start, current_flags, current_kind, after_line_break, diagnostics_pos, } = checkpoint; let new_pos = u32::from(position) as usize; self.position = new_pos; self.current_kind = current_kind; self.current_start = current_start; self.current_flags = current_flags; self.after_newline = after_line_break; self.diagnostics.truncate(diagnostics_pos as usize); } pub fn finish(self) -> Vec<ParseDiagnostic> { self.diagnostics } /// Lexes the next token. /// /// ## Return /// Returns its kind and any potential error. pub fn next_token(&mut self, context: LexContext) -> JsSyntaxKind { self.current_start = TextSize::from(self.position as u32); self.current_flags = TokenFlags::empty(); let kind = if self.is_eof() { EOF } else { match context { LexContext::Regular => self.lex_token(), LexContext::TemplateElement { tagged } => self.lex_template(tagged), LexContext::JsxChild => self.lex_jsx_child_token(), LexContext::JsxAttributeValue => self.lex_jsx_attribute_value(), } }; self.current_flags .set(TokenFlags::PRECEDING_LINE_BREAK, self.after_newline); self.current_kind = kind; if !kind.is_trivia() { self.after_newline = false; } kind } /// Lexes the current token again under the passed [ReLexContext]. /// Useful in case a token can have different meaning depending on the context. /// /// For example, a `/` must either be lexed as a `/` token or as a regular expression if it /// appears at the start of an expression. Re-lexing allows to always lex the `/` as a `/` token and /// call into `re_lex` when the parser is at a valid regular expression position, to see if the /// current token can be lexed out as a regular expression literal. /// /// ## Returns /// The new token kind and any associated diagnostic if current token has a different meaning under /// the passed [ReLexContext]. /// /// Returns the current kind without any diagnostic if not. Any cached lookahead remains valid in that case. pub fn re_lex(&mut self, context: ReLexContext) -> JsSyntaxKind { let old_position = self.position; self.position = u32::from(self.current_start) as usize; let re_lexed_kind = match context { ReLexContext::Regex if matches!(self.current(), T![/] | T![/=]) => self.read_regex(), ReLexContext::BinaryOperator => self.re_lex_binary_operator(), ReLexContext::TypeArgumentLessThan => self.re_lex_type_argument_less_than(), ReLexContext::JsxIdentifier => self.re_lex_jsx_identifier(old_position), ReLexContext::JsxChild if !self.is_eof() => self.lex_jsx_child_token(), _ => self.current(), }; if self.current() == re_lexed_kind { // Didn't re-lex anything. Return existing token again self.position = old_position; } else { self.current_kind = re_lexed_kind; } re_lexed_kind } fn re_lex_binary_operator(&mut self) -> JsSyntaxKind { if self.current_byte() == Some(b'>') { match self.next_byte() { Some(b'>') => match self.next_byte() { Some(b'>') => match self.next_byte() { Some(b'=') => self.eat_byte(T![>>>=]), _ => T![>>>], }, Some(b'=') => self.eat_byte(T![>>=]), _ => T![>>], }, Some(b'=') => self.eat_byte(T![>=]), _ => T![>], } } else { self.current_kind } } fn re_lex_type_argument_less_than(&mut self) -> JsSyntaxKind { if self.current() == T![<<] { self.advance(1); T![<] } else { self.current() } } fn re_lex_jsx_identifier(&mut self, current_end: usize) -> JsSyntaxKind { if self.current_kind.is_keyword() || self.current_kind == T![ident] { self.position = current_end; while let Some(current_byte) = self.current_byte() { match current_byte { b'-' => { self.advance(1); } b':' => { break; } _ => { let start = self.position; // consume ident advances by one position, so move back by one self.position -= 1; self.consume_ident(); // Didn't eat any identifier parts, break out if start == self.position { self.position = start; break; } } } } JSX_IDENT } else { self.current_kind } } fn lex_jsx_child_token(&mut self) -> JsSyntaxKind { debug_assert!(!self.is_eof()); // SAFETY: `lex_token` only calls this method if it isn't passed the EOF let chr = unsafe { self.current_unchecked() }; match chr { // `<`: empty jsx text, directly followed by another element or closing element b'<' => self.eat_byte(T![<]), // `{`: empty jsx text, directly followed by an expression b'{' => self.eat_byte(T!['{']), _ => { while let Some(chr) = self.current_byte() { // but not one of: { or < or > or } match chr { // Start of a new element, the closing tag, or an expression b'<' | b'{' => break, b'>' => { self.diagnostics.push(ParseDiagnostic::new( "Unexpected token. Did you mean `{'>'}` or `&gt;`?", self.position..self.position + 1, )); self.advance(1); } b'}' => { self.diagnostics.push(ParseDiagnostic::new( "Unexpected token. Did you mean `{'}'}` or `&rbrace;`?", self.position..self.position + 1, )); self.advance(1); } chr => { if chr.is_ascii() { self.advance(1); } else { self.advance_char_unchecked(); } } } } JSX_TEXT_LITERAL } } } fn lex_jsx_attribute_value(&mut self) -> JsSyntaxKind { debug_assert!(!self.is_eof()); // Safety: Guaranteed because we aren't at the end of the file let chr = unsafe { self.current_unchecked() }; match chr { b'\'' | b'"' => { self.consume_str_literal(true); JSX_STRING_LITERAL } _ => self.lex_token(), } } /// Bumps the current byte and creates a lexed token of the passed in kind fn eat_byte(&mut self, tok: JsSyntaxKind) -> JsSyntaxKind { self.next_byte(); tok } /// Consume just one newline/line break. /// /// ## Safety /// Must be called at a valid UT8 char boundary fn consume_newline(&mut self) -> bool { self.assert_at_char_boundary(); let start = self.position; match self.current_byte() { Some(b'\r') if self.peek_byte() == Some(b'\n') => self.advance(2), Some(b'\r' | b'\n') => self.advance(1), Some(chr) if !chr.is_ascii() => { let chr = self.current_char_unchecked(); if is_linebreak(chr) { self.advance(chr.len_utf8()); } } _ => {} } self.position != start } /// Consumes all whitespace until a non-whitespace or a newline is found. /// /// ## Safety /// Must be called at a valid UT8 char boundary fn consume_whitespaces(&mut self) { self.assert_at_char_boundary(); while let Some(chr) = self.current_byte() { match lookup_byte(chr) { Dispatch::WHS => { if let b'\r' | b'\n' = chr { break; } else { self.next_byte(); } } Dispatch::UNI => { let chr = self.current_char_unchecked(); if UNICODE_SPACES.contains(&chr) { self.advance(chr.len_utf8()); } else { break; } } _ => break, } } } /// Consume one newline or all whitespace until a non-whitespace or a newline is found. /// /// ## Safety /// Must be called at a valid UT8 char boundary fn consume_newline_or_whitespaces(&mut self) -> JsSyntaxKind { if self.consume_newline() { self.after_newline = true; NEWLINE } else { self.consume_whitespaces(); WHITESPACE } } /// Get the UTF8 char which starts at the current byte /// /// ## Safety /// Must be called at a valid UT8 char boundary fn current_char_unchecked(&self) -> char { // Precautionary measure for making sure the unsafe code below does not read over memory boundary debug_assert!(!self.is_eof()); self.assert_at_char_boundary(); // Safety: We know this is safe because we require the input to the lexer to be valid utf8 and we always call this when we are at a char let string = unsafe { std::str::from_utf8_unchecked(self.source.as_bytes().get_unchecked(self.position..)) }; let chr = if let Some(chr) = string.chars().next() { chr } else { // Safety: we always call this when we are at a valid char, so this branch is completely unreachable unsafe { core::hint::unreachable_unchecked(); } }; chr } /// Gets the current byte. /// /// ## Returns /// The current byte if the lexer isn't at the end of the file. #[inline] fn current_byte(&self) -> Option<u8> { if self.is_eof() { None } else { Some(self.source.as_bytes()[self.position]) } } /// Asserts that the lexer is at a UTF8 char boundary #[inline] fn assert_at_char_boundary(&self) { debug_assert!(self.source.is_char_boundary(self.position)); } /// Asserts that the lexer is currently positioned at `byte` #[inline] fn assert_byte(&self, byte: u8) { debug_assert_eq!(self.source.as_bytes()[self.position], byte); } /// Returns the current byte without checking if the lexer is at the end of the file. /// /// ## Safety /// Calling this function if the lexer is at or passed the end of file is undefined behaviour. #[inline] unsafe fn current_unchecked(&self) -> u8 { self.assert_at_char_boundary(); *self.source.as_bytes().get_unchecked(self.position) } /// Advances the position by one and returns the next byte value #[inline] fn next_byte(&mut self) -> Option<u8> { self.advance(1); self.current_byte() } /// Get the next byte but only advance the index if there is a next byte. /// This is really just a hack for certain methods like escapes #[inline] fn next_byte_bounded(&mut self) -> Option<u8> { if let Some(b) = self.source.as_bytes().get(self.position + 1) { self.advance(1); Some(*b) } else { if !self.is_eof() { // Move the cursor by one to position the Lexer at the EOF token self.advance(1); } None } } /// Peeks at the next byte #[inline] fn peek_byte(&self) -> Option<u8> { self.byte_at(1) } /// Returns the byte at position `self.position + offset` or `None` if it is out of bounds. #[inline] fn byte_at(&self, offset: usize) -> Option<u8> { self.source.as_bytes().get(self.position + offset).copied() } /// Advances the current position by `n` bytes. #[inline] fn advance(&mut self, n: usize) { self.position += n; } #[inline] fn advance_byte_or_char(&mut self, chr: u8) { if chr.is_ascii() { self.advance(1); } else { self.advance_char_unchecked(); } } /// Advances the current position by the current char UTF8 length /// /// ## Safety /// Must be called at a valid UT8 char boundary #[inline] fn advance_char_unchecked(&mut self) { let c = self.current_char_unchecked(); self.position += c.len_utf8(); } /// Returns `true` if the parser is at or passed the end of the file. #[inline] fn is_eof(&self) -> bool { self.position >= self.source.len() } // Read a `\u{000...}` escape sequence, this expects the cur char to be the `{` fn read_codepoint_escape(&mut self) -> Result<char, ()> { let start = self.position + 1; self.read_hexnumber(); let current_byte = self.current_byte(); // Abort on EOF if current_byte.is_none() { return Err(()); } if current_byte != Some(b'}') { // We should not yield diagnostics on a unicode char boundary. That wont make codespan panic // but it may cause a panic for other crates which just consume the diagnostics let invalid = self.current_char_unchecked(); let err = ParseDiagnostic::new( "expected hex digits for a unicode code point escape, but encountered an invalid character", self.position..self.position + invalid.len_utf8() ); self.diagnostics.push(err); self.position -= 1; return Err(()); } // Safety: We know for a fact this is in bounds because we must be on the possible char after the } at this point // which means its impossible for the range of the digits to be out of bounds. // We also know we cant possibly be indexing a unicode char boundary because a unicode char (which cant be a hexdigit) // would have triggered the if statement above. We also know this must be valid utf8, both because of read_hexnumber's behavior // and because input to the lexer must be valid utf8 let digits_str = unsafe { debug_assert!(self.source.as_bytes().get(start..self.position).is_some()); debug_assert!(std::str::from_utf8( self.source.as_bytes().get_unchecked(start..self.position) ) .is_ok()); std::str::from_utf8_unchecked( self.source.as_bytes().get_unchecked(start..self.position), ) }; match u32::from_str_radix(digits_str, 16) { Ok(digits) if digits <= 0x10FFFF => { let res = std::char::from_u32(digits); if let Some(chr) = res { Ok(chr) } else { let err = ParseDiagnostic::new( "invalid codepoint for unicode escape", start..self.position, ); self.diagnostics.push(err); Err(()) } } _ => { let err = ParseDiagnostic::new( "out of bounds codepoint for unicode codepoint escape sequence", start..self.position, ) .hint("Codepoints range from 0 to 0x10FFFF (1114111)"); self.diagnostics.push(err); Err(()) } } } // Read a `\u0000` escape sequence, this expects the current char to be the `u`, it also does not skip over the escape sequence // The pos after this method is the last hex digit fn read_unicode_escape(&mut self, advance: bool) -> Result<char, ()> { self.assert_byte(b'u'); for idx in 0..4 { match self.next_byte_bounded() { None => { if !advance { self.position -= idx + 1; } let err = invalid_digits_after_unicode_escape_sequence( self.position - 1, self.position + 1, ); self.diagnostics.push(err); return Err(()); } Some(b) if !b.is_ascii_hexdigit() => { let err = invalid_digits_after_unicode_escape_sequence( self.position - 1, self.position + 1, ); if !advance { self.position -= idx + 1; } self.diagnostics.push(err); return Err(()); } _ => {} } } unsafe { // Safety: input to the lexer is guaranteed to be valid utf8 and so is the range since we return if there is a wrong amount of digits beforehand let digits_str = std::str::from_utf8_unchecked( self.source .as_bytes() .get_unchecked((self.position - 3)..(self.position + 1)), ); if let Ok(digits) = u32::from_str_radix(digits_str, 16) { if !advance { self.position -= 4; } Ok(std::char::from_u32_unchecked(digits)) } else { // Safety: we know this is unreachable because 4 hexdigits cannot make an out of bounds char, // and we make sure that the chars are actually hex digits core::hint::unreachable_unchecked(); } } } // Validate a `\x00 escape sequence, this expects the current char to be the `x`, it also does not skip over the escape sequence // The pos after this method is the last hex digit fn validate_hex_escape(&mut self) -> bool { self.assert_byte(b'x'); let diagnostic = ParseDiagnostic::new( "invalid digits after hex escape sequence", (self.position - 1)..(self.position + 1), ) .hint("Expected 2 hex digits following this"); for _ in 0..2 { match self.next_byte_bounded() { None => { self.diagnostics.push(diagnostic); return false; } Some(b) if !b.is_ascii_hexdigit() => { self.diagnostics.push(diagnostic); return false; } _ => {} } } true } /// Consume a `\..` escape sequence. /// /// ## Safety /// Must be called at a valid UT8 char boundary fn consume_escape_sequence(&mut self) -> bool { self.assert_at_char_boundary(); self.assert_byte(b'\\'); let cur = self.position; self.advance(1); // eats '\' if let Some(chr) = self.current_byte() { match chr { b'\\' | b'n' | b'r' | b't' | b'b' | b'v' | b'f' | b'\'' | b'"' => { self.advance(1); true } b'u' if self.peek_byte() == Some(b'{') => { self.advance(1); // eats '{' self.read_codepoint_escape().is_ok() } b'u' => self.read_unicode_escape(true).is_ok(), b'x' => self.validate_hex_escape(), b'\r' => { if let Some(b'\n') = self.next_byte() { self.advance(1); } true } chr => { self.advance_byte_or_char(chr); true } } } else { self.diagnostics.push( ParseDiagnostic::new("", cur..cur + 1) .hint("expected an escape sequence following a backslash, but found none"), ); false } } // Consume an identifier by recursively consuming IDENTIFIER_PART kind chars #[inline] fn consume_ident(&mut self) { loop { if self.next_byte_bounded().is_none() || self.cur_ident_part().is_none() { break; } } } /// Consumes the identifier at the current position, and fills the given buf with the UTF-8 /// encoded identifier that got consumed. /// /// Returns the number of bytes written into the buffer, and if any char was escaped. /// This method will stop writing into the buffer if the buffer is too small to /// fit the whole identifier. #[inline] fn consume_and_get_ident(&mut self, buf: &mut [u8]) -> (usize, bool) { let mut idx = 0; let mut any_escaped = false; while self.next_byte_bounded().is_some() { if let Some((c, escaped)) = self.cur_ident_part() { if let Some(buf) = buf.get_mut(idx..idx + 4) { let res = c.encode_utf8(buf); idx += res.len(); any_escaped |= escaped; } } else { return (idx, any_escaped); } } (idx, any_escaped) } /// Consume a string literal and advance the lexer, and returning a list of errors that occurred when reading the string /// This could include unterminated string and invalid escape sequences /// /// ## Safety /// Must be called at a valid UT8 char boundary fn consume_str_literal(&mut self, jsx_attribute: bool) -> bool { self.assert_at_char_boundary(); let quote = unsafe { self.current_unchecked() }; let start = self.position; let mut valid = true; self.advance(1); // eats the start quote while let Some(chr) = self.current_byte() { match chr { b'\\' if !jsx_attribute => { valid &= self.consume_escape_sequence(); } b'\r' | b'\n' if !jsx_attribute => { let unterminated = ParseDiagnostic::new("unterminated string literal", start..self.position) .detail(start..self.position, "") .detail(self.position..self.position + 2, "line breaks here"); self.diagnostics.push(unterminated); return false; } chr if chr == quote => { self.advance(1); return valid; } chr => { if chr.is_ascii() { self.advance(1); } else { self.advance_char_unchecked(); } } } } let unterminated = ParseDiagnostic::new("unterminated string literal", self.position..self.position) .detail(self.position..self.position, "input ends here") .detail(start..start + 1, "string literal starts here"); self.diagnostics.push(unterminated); false } /// Returns `Some(x)` if the current position is an identifier, with the character at /// the position. /// /// Boolean states if there are escaped characters. /// /// The character may be a char that was generated from a unicode escape sequence, /// e.g. `t` is returned, the actual source code is `\u{74}` #[inline] fn cur_ident_part(&mut self) -> Option<(char, bool)> { debug_assert!(!self.is_eof()); // Safety: we always call this method on a char let b = unsafe { self.current_unchecked() }; match lookup_byte(b) { IDT | DIG | ZER => Some((b as char, false)), // FIXME: This should use ID_Continue, not XID_Continue UNI => { let chr = self.current_char_unchecked(); let res = is_id_continue(chr); if res { self.advance(chr.len_utf8() - 1); Some((chr, false)) } else { None } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/snapshot_builder.rs
crates/rome_formatter_test/src/snapshot_builder.rs
use rome_diagnostics::console::fmt::{Formatter, Termcolor}; use rome_diagnostics::console::markup; use rome_diagnostics::PrintDiagnostic; use rome_diagnostics::{termcolor, DiagnosticExt}; use rome_formatter::Printed; use rome_parser::AnyParse; use std::ffi::OsStr; use std::fmt; use std::fmt::Write; use std::path::Path; #[derive(serde::Serialize)] struct TestInfo { test_file: String, } pub struct SnapshotOutput<'a> { content: &'a str, index: Option<usize>, } impl<'a> SnapshotOutput<'a> { pub fn new(content: &'a str) -> Self { SnapshotOutput { content, index: None, } } pub fn with_index(mut self, index: usize) -> Self { self.index = Some(index); self } } pub struct SnapshotBuilder<'a> { input_file: &'a Path, snapshot: String, } impl<'a> SnapshotBuilder<'a> { pub fn new(input_file: &'a Path) -> Self { SnapshotBuilder { input_file, snapshot: String::new(), } } pub fn with_input(mut self, input: &str) -> Self { writeln!(self.snapshot).unwrap(); writeln!(self.snapshot, "# Input").unwrap(); writeln!(self.snapshot).unwrap(); self.write_extension(); self.snapshot.push_str(input); writeln!(self.snapshot).unwrap(); writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_separator(mut self) -> Self { writeln!(self.snapshot, "=============================").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_prettier_diff(mut self, prettier_diff: &str) -> Self { writeln!(self.snapshot, "# Prettier differences").unwrap(); writeln!(self.snapshot).unwrap(); writeln!(self.snapshot, "```diff").unwrap(); self.snapshot.push_str(prettier_diff); writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_multiple_outputs(mut self) -> Self { writeln!(self.snapshot, "# Outputs").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_output_and_options<T>(mut self, output: SnapshotOutput, options: T) -> Self where T: fmt::Display, { self.write_output_header(&output); writeln!(self.snapshot).unwrap(); writeln!(self.snapshot, "-----").unwrap(); write!(self.snapshot, "{}", options).unwrap(); writeln!(self.snapshot, "-----").unwrap(); writeln!(self.snapshot).unwrap(); self.write_extension(); self.snapshot.push_str(output.content); writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_output(mut self, output: SnapshotOutput) -> Self { self.write_output_header(&output); writeln!(self.snapshot).unwrap(); self.write_extension(); self.snapshot.push_str(output.content); writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_unimplemented(mut self, formatted: &Printed) -> Self { if !formatted.verbatim_ranges().is_empty() { writeln!(self.snapshot).unwrap(); writeln!(self.snapshot).unwrap(); self.snapshot.push_str("## Unimplemented nodes/tokens"); writeln!(self.snapshot).unwrap(); writeln!(self.snapshot).unwrap(); for (range, text) in formatted.verbatim() { writeln!(self.snapshot, "{:?} => {:?}", text, range).unwrap(); } } self } pub fn with_errors(mut self, parsed: &AnyParse, parse_input: &str) -> Self { if !parsed.has_errors() { return self; } let file_name = self.input_file.file_name().and_then(OsStr::to_str).unwrap(); let mut buffer = termcolor::Buffer::no_color(); for diagnostic in parsed.diagnostics() { let error = diagnostic .clone() .with_file_path(file_name) .with_file_source_code(parse_input); Formatter::new(&mut Termcolor(&mut buffer)) .write_markup(markup! { {PrintDiagnostic::verbose(&error)} }) .expect("failed to emit diagnostic"); } writeln!(self.snapshot, "# Errors").unwrap(); writeln!(self.snapshot, "```").unwrap(); writeln!( self.snapshot, "{}", std::str::from_utf8(buffer.as_slice()).expect("non utf8 in error buffer") ) .unwrap(); writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); self } pub fn with_lines_exceeding_max_width(mut self, output: &str, max_width: usize) -> Self { let mut lines_exceeding_max_width = output .lines() .enumerate() .filter(|(_, line)| line.len() > max_width) .peekable(); if lines_exceeding_max_width.peek().is_some() { writeln!( self.snapshot, "# Lines exceeding max width of {max_width} characters" ) .unwrap(); writeln!(self.snapshot, "```").unwrap(); for (index, line) in lines_exceeding_max_width { let line_number = index + 1; writeln!(self.snapshot, "{line_number:>5}: {line}").unwrap(); } writeln!(self.snapshot, "```").unwrap(); writeln!(self.snapshot).unwrap(); } self } pub fn finish(self, relative_file_name: &str) { let file_name = self.input_file.file_name().and_then(OsStr::to_str).unwrap(); let info = TestInfo { test_file: relative_file_name.to_owned(), }; insta::with_settings!({ prepend_module_to_snapshot => false, snapshot_path => self.input_file.parent().unwrap(), omit_expression => true, raw_info => &info.test_file.into() }, { insta::assert_snapshot!(file_name, self.snapshot); }); } } impl SnapshotBuilder<'_> { fn write_extension(&mut self) { let file_extension = self.input_file.extension().unwrap().to_str().unwrap(); writeln!(self.snapshot, "```{file_extension}").unwrap(); } fn write_output_header(&mut self, output: &SnapshotOutput) { if let Some(index) = output.index { writeln!(self.snapshot, "## Output {index}").unwrap(); } else { writeln!(self.snapshot, "# Output").unwrap(); } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/test_prettier_snapshot.rs
crates/rome_formatter_test/src/test_prettier_snapshot.rs
use rome_rowan::{TextRange, TextSize}; use std::{ffi::OsStr, fs::read_to_string, ops::Range, path::Path}; use crate::check_reformat::CheckReformat; use crate::snapshot_builder::{SnapshotBuilder, SnapshotOutput}; use crate::utils::{get_prettier_diff, strip_prettier_placeholders, PrettierDiff}; use crate::TestFormatLanguage; use rome_formatter::FormatOptions; use rome_parser::AnyParse; const PRETTIER_IGNORE: &str = "prettier-ignore"; const ROME_IGNORE: &str = "rome-ignore format: prettier ignore"; pub struct PrettierTestFile<'a> { input_file: &'static Path, root_path: &'a Path, input_code: String, parse_input: String, range_start_index: Option<usize>, range_end_index: Option<usize>, } impl<'a> PrettierTestFile<'a> { pub fn new(input: &'static str, root_path: &'a Path) -> Self { let input_file = Path::new(input); assert!( input_file.is_file(), "The input '{}' must exist and be a file.", input_file.display() ); let mut input_code = read_to_string(input_file) .unwrap_or_else(|err| panic!("failed to read {:?}: {:?}", input_file, err)); let (_, range_start_index, range_end_index) = strip_prettier_placeholders(&mut input_code); let parse_input = input_code.replace(PRETTIER_IGNORE, ROME_IGNORE); PrettierTestFile { input_file, root_path, input_code, parse_input, range_start_index, range_end_index, } } fn range(&self) -> (Option<usize>, Option<usize>) { (self.range_start_index, self.range_end_index) } pub fn input_file(&self) -> &Path { self.input_file } pub fn parse_input(&self) -> &str { self.parse_input.as_str() } pub fn file_name(&self) -> &str { self.input_file .file_name() .and_then(OsStr::to_str) .expect("failed to get file name") } pub fn file_extension(&self) -> &OsStr { self.input_file .extension() .expect("failed to get file extension") } pub fn relative_file_name(&self) -> &'static str { self.input_file .strip_prefix(self.root_path) .unwrap_or_else(|_| { panic!( "failed to strip prefix {:?} from {:?}", self.root_path, self.input_file ) }) .to_str() .expect("failed to get relative file name") } } pub struct PrettierSnapshot<'a, L> where L: TestFormatLanguage, { test_file: PrettierTestFile<'a>, language: L, options: L::Options, } impl<'a, L> PrettierSnapshot<'a, L> where L: TestFormatLanguage, { pub fn new(test_file: PrettierTestFile<'a>, language: L, options: L::Options) -> Self { PrettierSnapshot { test_file, language, options, } } fn formatted(&self, parsed: &AnyParse) -> Option<String> { let has_errors = parsed.has_errors(); let syntax = parsed.syntax(); let range = self.test_file.range(); let result = match range { (Some(start), Some(end)) => { // Skip the reversed range tests as its impossible // to create a reversed TextRange anyway if end < start { return None; } self.language.format_range( self.options.clone(), &syntax, TextRange::new( TextSize::try_from(start).unwrap(), TextSize::try_from(end).unwrap(), ), ) } _ => self .language .format_node(self.options.clone(), &syntax) .map(|formatted| formatted.print().unwrap()), }; let formatted = result.expect("formatting failed"); let formatted = match range { (Some(_), Some(_)) => { let range = formatted .range() .expect("the result of format_range should have a range"); let formatted = formatted.as_code(); let mut output_code = self.test_file.parse_input.clone(); output_code.replace_range(Range::<usize>::from(range), formatted); output_code } _ => { let formatted = formatted.into_code(); if !has_errors { let check_reformat = CheckReformat::new( &syntax, &formatted, self.test_file.file_name(), &self.language, self.options.clone(), ); check_reformat.check_reformat(); } formatted } }; let formatted = formatted.replace(ROME_IGNORE, PRETTIER_IGNORE); Some(formatted) } pub fn test(self) { let parsed = self.language.parse(self.test_file().parse_input()); let formatted = match self.formatted(&parsed) { Some(formatted) => formatted, None => return, }; let relative_file_name = self.test_file().relative_file_name(); let input_file = self.test_file().input_file(); let prettier_diff = get_prettier_diff(input_file, relative_file_name, &formatted); let prettier_diff = match prettier_diff { PrettierDiff::Diff(prettier_diff) => prettier_diff, PrettierDiff::Same => return, }; let mut builder = SnapshotBuilder::new(input_file) .with_input(&self.test_file().input_code) .with_prettier_diff(&prettier_diff) .with_output(SnapshotOutput::new(&formatted)) .with_errors(&parsed, &self.test_file().parse_input); let max_width = self.options.line_width().value() as usize; builder = builder.with_lines_exceeding_max_width(&formatted, max_width); builder.finish(relative_file_name); } fn test_file(&self) -> &PrettierTestFile { &self.test_file } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/lib.rs
crates/rome_formatter_test/src/lib.rs
use rome_formatter::{ CstFormatContext, FormatContext, FormatLanguage, FormatOptions, FormatResult, Formatted, Printed, }; use rome_parser::AnyParse; use rome_rowan::{Language, SyntaxNode, TextRange}; pub mod check_reformat; pub mod diff_report; pub mod snapshot_builder; pub mod spec; pub mod test_prettier_snapshot; pub mod utils; pub trait TestFormatLanguage { type SyntaxLanguage: Language + 'static; type Options: FormatOptions + std::fmt::Display + Clone; type Context: CstFormatContext<Options = Self::Options>; type FormatLanguage: FormatLanguage<Context = Self::Context, SyntaxLanguage = Self::SyntaxLanguage> + 'static; fn parse(&self, text: &str) -> AnyParse; fn deserialize_format_options( &self, options: &str, ) -> Vec<<Self::Context as FormatContext>::Options>; fn format_node( &self, options: Self::Options, node: &SyntaxNode<Self::SyntaxLanguage>, ) -> FormatResult<Formatted<Self::Context>>; fn format_range( &self, options: Self::Options, node: &SyntaxNode<Self::SyntaxLanguage>, range: TextRange, ) -> FormatResult<Printed>; }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/diff_report.rs
crates/rome_formatter_test/src/diff_report.rs
use similar::{utils::diff_lines, Algorithm, ChangeTag}; use std::sync::Mutex; use std::{env, fmt::Write, fs::write, os::raw::c_int, str::FromStr, sync::Once}; use serde::Serialize; #[derive(Debug, PartialEq, Eq)] enum ReportType { Json, Markdown, } #[derive(Debug, Clone, Default, Serialize)] struct SingleFileMetricData { filename: String, single_file_compatibility: f64, #[serde(skip)] diff: Option<String>, } #[derive(Clone, Debug, Default, Serialize)] struct PrettierCompatibilityMetricData { file_based_average_prettier_similarity: f64, line_based_average_prettier_similarity: f64, files: Vec<SingleFileMetricData>, } impl FromStr for ReportType { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "json" => Ok(Self::Json), "markdown" => Ok(Self::Markdown), _ => Err("Only `json` and `markdown` are supported".to_string()), } } } struct DiffReportItem { file_name: &'static str, rome_formatted_result: String, prettier_formatted_result: String, } pub struct DiffReport { state: Mutex<Vec<DiffReportItem>>, } impl DiffReport { pub fn get() -> &'static Self { static REPORTER: DiffReport = DiffReport { state: Mutex::new(Vec::new()), }; // Use an atomic Once to register an exit callback the first time any // testing thread requests an instance of the Reporter static ONCE: Once = Once::new(); ONCE.call_once(|| { // Import the atexit function from libc extern "C" { fn atexit(f: extern "C" fn()) -> c_int; } // Trampoline function into the reporter printing logic with the // correct extern C ABI extern "C" fn print_report() { REPORTER.print(); } // Register the print_report function to be called when the process exits unsafe { atexit(print_report); } }); &REPORTER } pub fn report( &self, file_name: &'static str, rome_formatted_result: &str, prettier_formatted_result: &str, ) { match env::var("REPORT_PRETTIER") { Ok(value) if value == "1" => { if !Self::is_ignored(file_name) { self.state.lock().unwrap().push(DiffReportItem { file_name, rome_formatted_result: rome_formatted_result.to_owned(), prettier_formatted_result: prettier_formatted_result.to_owned(), }); } } _ => {} } } fn is_ignored(file_name: &str) -> bool { let patterns = [ "arrows-bind", "async-do-expressions", "async-do-expressions.js", "decimal.js", "do-expressions.js", "export-default-from", "function-bind.js", "module-blocks", "partial-application", "pipeline", "record", "throw-expressions.js", "v8intrinsic.js", "v8_intrinsic", "bind-expressions", "destructuring-private-fields", "/do/", "export-extension", "js/tuple", ]; patterns.iter().any(|pattern| file_name.contains(pattern)) } fn print(&self) { if let Some(report) = rome_rowan::check_live() { panic!("\n{report}") } // Only create the report file if the REPORT_PRETTIER // environment variable is set to 1 match env::var("REPORT_PRETTIER") { Ok(value) if value == "1" => { let report_type = match env::var("REPORT_TYPE") { Ok(value) => ReportType::from_str(&value).unwrap(), _ => ReportType::Markdown, }; let report_filename = match env::var("REPORT_FILENAME") { Ok(value) => value, _ => match report_type { ReportType::Json => "report.json".to_string(), ReportType::Markdown => "report.md".to_string(), }, }; self.report_prettier(report_type, report_filename); } _ => {} } } fn report_prettier(&self, report_type: ReportType, report_filename: String) { let mut state = self.state.lock().unwrap(); state.sort_by_key(|DiffReportItem { file_name, .. }| *file_name); let mut report_metric_data = PrettierCompatibilityMetricData::default(); let mut file_ratio_sum = 0_f64; let mut total_lines = 0; let mut total_matched_lines = 0; let mut file_count = 0; for DiffReportItem { file_name, rome_formatted_result, prettier_formatted_result, } in state.iter() { file_count += 1; let rome_lines = rome_formatted_result.lines().count(); let prettier_lines = prettier_formatted_result.lines().count(); let (matched_lines, ratio, diff) = if rome_formatted_result == prettier_formatted_result { (rome_lines, 1f64, None) } else { let mut matched_lines = 0; let mut diff = String::new(); for (tag, line) in diff_lines( Algorithm::default(), prettier_formatted_result, rome_formatted_result, ) { if matches!(tag, ChangeTag::Equal) { matched_lines += 1; } let line = line.strip_suffix('\n').unwrap_or(line); writeln!(diff, "{}{}", tag, line).unwrap(); } let ratio = matched_lines as f64 / rome_lines.max(prettier_lines) as f64; (matched_lines, ratio, Some(diff)) }; total_lines += rome_lines.max(prettier_lines); total_matched_lines += matched_lines; file_ratio_sum += ratio; let single_file_metric_data = SingleFileMetricData { diff, filename: file_name.to_string(), single_file_compatibility: ratio, }; report_metric_data.files.push(single_file_metric_data); } report_metric_data.file_based_average_prettier_similarity = file_ratio_sum / file_count as f64; report_metric_data.line_based_average_prettier_similarity = total_matched_lines as f64 / total_lines as f64; match report_type { ReportType::Json => self.report_json(report_filename, report_metric_data), ReportType::Markdown => self.report_markdown(report_filename, report_metric_data), } } fn report_markdown( &self, report_filename: String, report_metric_data: PrettierCompatibilityMetricData, ) { let mut report = String::new(); for SingleFileMetricData { filename, single_file_compatibility, diff, } in report_metric_data.files.iter() { writeln!(report, "# {}", filename).unwrap(); if let Some(diff) = diff { writeln!(report, "```diff").unwrap(); writeln!(report, "{diff}").unwrap(); writeln!(report, "```").unwrap() } writeln!(report).unwrap(); writeln!( report, "**Prettier Similarity**: {:.2}%", single_file_compatibility * 100_f64 ) .unwrap(); writeln!(report).unwrap(); writeln!(report).unwrap(); } let mut header = String::from("# Overall Metrics\n\n"); writeln!( header, "**Average compatibility**: {:.2}", report_metric_data.file_based_average_prettier_similarity * 100_f64, ) .unwrap(); header.push_str( r#" <details> <summary>Definition</summary> $$average = \frac\{\sum_{file}^\{files}compatibility_\{file}}\{files}$$ </details> "#, ); write!( header, "**Compatible lines**: {:.2}", report_metric_data.line_based_average_prettier_similarity * 100_f64 ) .unwrap(); header.push_str( r#" <details> <summary>Definition</summary> $$average = \frac{\sum_{file}^{files}matching\_lines_{file}}{max(lines_{rome}, lines_{prettier})}$$ </details> [Metric definition discussion](https://github.com/rome/tools/issues/2555#issuecomment-1124787893) "#, ); let report = format!("{header}\n\n{report}"); write(report_filename, report).unwrap(); } fn report_json( &self, report_filename: String, report_metric_data: PrettierCompatibilityMetricData, ) { let json_content = serde_json::to_string(&report_metric_data).unwrap(); write(report_filename, json_content).unwrap(); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/utils.rs
crates/rome_formatter_test/src/utils.rs
use crate::diff_report::DiffReport; use similar::TextDiff; use std::ffi::OsStr; use std::fs::{read_to_string, remove_file}; use std::path::Path; struct StripPlaceholders { cursor: String, range_start_placeholder: String, range_end_placeholder: String, } /// Find and replace the cursor, range start and range end placeholders in a /// snapshot tests and return their indices in the resulting string impl StripPlaceholders { pub fn new( cursor: String, range_start_placeholder: String, range_end_placeholder: String, ) -> Self { StripPlaceholders { cursor, range_start_placeholder, range_end_placeholder, } } pub fn transform( &self, input_code: &mut String, ) -> (Option<usize>, Option<usize>, Option<usize>) { let mut cursor_index = None; let mut range_start_index = None; let mut range_end_index = None; if let Some(index) = input_code.find(&self.cursor) { input_code.replace_range(index..index + self.cursor.len(), ""); cursor_index = Some(index); } if let Some(index) = input_code.find(&self.range_start_placeholder) { input_code.replace_range(index..index + self.range_start_placeholder.len(), ""); range_start_index = Some(index); if let Some(cursor) = &mut cursor_index { if *cursor > index { *cursor -= self.range_start_placeholder.len(); } } } if let Some(index) = input_code.find(&self.range_end_placeholder) { input_code.replace_range(index..index + self.range_end_placeholder.len(), ""); range_end_index = Some(index); if let Some(cursor) = &mut cursor_index { if *cursor > index { *cursor -= self.range_end_placeholder.len(); } } if let Some(cursor) = &mut range_start_index { // Prettier has tests for reversed ranges if *cursor > index { *cursor -= self.range_end_placeholder.len(); } } } (cursor_index, range_start_index, range_end_index) } } const PRETTIER_CURSOR_PLACEHOLDER: &str = "<|>"; const PRETTIER_RANGE_START_PLACEHOLDER: &str = "<<<PRETTIER_RANGE_START>>>"; const PRETTIER_RANGE_END_PLACEHOLDER: &str = "<<<PRETTIER_RANGE_END>>>"; pub(crate) fn strip_prettier_placeholders( input_code: &mut String, ) -> (Option<usize>, Option<usize>, Option<usize>) { StripPlaceholders::new( PRETTIER_CURSOR_PLACEHOLDER.to_string(), PRETTIER_RANGE_START_PLACEHOLDER.to_string(), PRETTIER_RANGE_END_PLACEHOLDER.to_string(), ) .transform(input_code) } const ROME_CURSOR_PLACEHOLDER: &str = "<|>"; const ROME_RANGE_START_PLACEHOLDER: &str = "<<<ROME_RANGE_START>>>"; const ROME_RANGE_END_PLACEHOLDER: &str = "<<<ROME_RANGE_END>>>"; pub(crate) fn strip_rome_placeholders( input_code: &mut String, ) -> (Option<usize>, Option<usize>, Option<usize>) { StripPlaceholders::new( ROME_CURSOR_PLACEHOLDER.to_string(), ROME_RANGE_START_PLACEHOLDER.to_string(), ROME_RANGE_END_PLACEHOLDER.to_string(), ) .transform(input_code) } pub enum PrettierDiff { Diff(String), Same, } pub fn get_prettier_diff( input_file: &Path, relative_file_name: &'static str, formatted: &str, ) -> PrettierDiff { let input_extension = input_file.extension().and_then(OsStr::to_str); let prettier_snapshot_path = input_extension .map(|ext| input_file.with_extension(format!("{}.prettier-snap", ext))) .filter(|path| path.exists()); let prettier_snapshot_path = prettier_snapshot_path.expect("failed to find prettier snapshot"); let mut prettier_snapshot = read_to_string(prettier_snapshot_path).unwrap(); strip_prettier_placeholders(&mut prettier_snapshot); DiffReport::get().report(relative_file_name, formatted, &prettier_snapshot); if formatted == prettier_snapshot { // The output matches prettier's output. There's no need for a snapshot that duplicates the output. // Delete the snapshot file if it already exists, otherwise return early to not create a new snapshot. if let Some(input_extension) = input_extension { let snapshot_file_name = input_file.with_extension(format!("{}.snap", input_extension)); if snapshot_file_name.exists() && snapshot_file_name.is_file() { remove_file(snapshot_file_name).ok(); // not the end of the world if it fails } let new_snapshot_file_name = input_file.with_extension(format!("{}.snap.new", input_extension)); if new_snapshot_file_name.exists() && new_snapshot_file_name.is_file() { remove_file(new_snapshot_file_name).ok(); // not the end of the world if it fails } } PrettierDiff::Same } else { let mut prettier_differences = Vec::new(); TextDiff::from_lines(prettier_snapshot.as_str(), formatted) .unified_diff() .header("Prettier", "Rome") .to_writer(&mut prettier_differences) .unwrap(); PrettierDiff::Diff( String::from_utf8(prettier_differences).expect("Input file to be in UTF8"), ) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/check_reformat.rs
crates/rome_formatter_test/src/check_reformat.rs
use crate::TestFormatLanguage; use rome_diagnostics::console::fmt::{Formatter, Termcolor}; use rome_diagnostics::console::markup; use rome_diagnostics::termcolor; use rome_diagnostics::{DiagnosticExt, PrintDiagnostic}; use rome_rowan::SyntaxNode; /// Perform a second pass of formatting on a file, printing a diff if the /// output doesn't match the input /// pub struct CheckReformat<'a, L> where L: TestFormatLanguage, { root: &'a SyntaxNode<L::SyntaxLanguage>, text: &'a str, file_name: &'a str, language: &'a L, options: L::Options, } impl<'a, L> CheckReformat<'a, L> where L: TestFormatLanguage, { pub fn new( root: &'a SyntaxNode<L::SyntaxLanguage>, text: &'a str, file_name: &'a str, language: &'a L, options: L::Options, ) -> Self { CheckReformat { root, text, file_name, language, options, } } pub fn check_reformat(&self) { let re_parse = self.language.parse(self.text); // Panic if the result from the formatter has syntax errors if re_parse.has_errors() { let mut buffer = termcolor::Buffer::ansi(); for diagnostic in re_parse.diagnostics() { let error = diagnostic .clone() .with_file_path(self.file_name) .with_file_source_code(self.text.to_string()); Formatter::new(&mut Termcolor(&mut buffer)) .write_markup(markup! { {PrintDiagnostic::verbose(&error)} }) .expect("failed to emit diagnostic"); } panic!( "formatter output had syntax errors where input had none:\n{}", std::str::from_utf8(buffer.as_slice()).expect("non utf8 in error buffer") ) } let formatted = self .language .format_node(self.options.clone(), &re_parse.syntax()) .unwrap(); let printed = formatted.print().unwrap(); if self.text != printed.as_code() { let input_format_element = self .language .format_node(self.options.clone(), self.root) .unwrap(); let pretty_input_ir = format!("{}", formatted.into_document()); let pretty_reformat_ir = format!("{}", input_format_element.into_document()); // Print a diff of the Formatter IR emitted for the input and the output let diff = similar_asserts::SimpleDiff::from_str( &pretty_input_ir, &pretty_reformat_ir, "input", "output", ); println!("{diff}"); similar_asserts::assert_eq!(self.text, printed.as_code()); } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_formatter_test/src/spec.rs
crates/rome_formatter_test/src/spec.rs
use crate::check_reformat::CheckReformat; use crate::snapshot_builder::{SnapshotBuilder, SnapshotOutput}; use crate::utils::strip_rome_placeholders; use crate::TestFormatLanguage; use rome_console::EnvConsole; use rome_formatter::{FormatOptions, Printed}; use rome_fs::RomePath; use rome_parser::AnyParse; use rome_rowan::{TextRange, TextSize}; use rome_service::workspace::{FeatureName, FeaturesBuilder, SupportsFeatureParams}; use rome_service::App; use std::ops::Range; use std::path::{Path, PathBuf}; #[derive(Debug)] pub struct SpecTestFile<'a> { input_file: RomePath, root_path: &'a Path, input_code: String, range_start_index: Option<usize>, range_end_index: Option<usize>, } impl<'a> SpecTestFile<'a> { pub fn try_from_file(input_file: &'a str, root_path: &'a Path) -> Option<SpecTestFile<'a>> { let mut console = EnvConsole::default(); let app = App::with_console(&mut console); let file_path = &input_file; let spec_input_file = Path::new(input_file); assert!( spec_input_file.is_file(), "The input '{}' must exist and be a file.", spec_input_file.display() ); let mut input_file = RomePath::new(file_path); let can_format = app .workspace .file_features(SupportsFeatureParams { path: input_file.clone(), feature: FeaturesBuilder::new().with_formatter().build(), }) .unwrap(); if can_format.supports_for(&FeatureName::Format) { let mut input_code = input_file.get_buffer_from_file(); let (_, range_start_index, range_end_index) = strip_rome_placeholders(&mut input_code); Some(SpecTestFile { input_file, root_path, input_code, range_start_index, range_end_index, }) } else { None } } pub fn input_code(&self) -> &str { &self.input_code } pub fn file_name(&self) -> &str { self.input_file.file_name().unwrap().to_str().unwrap() } pub fn input_file(&self) -> &RomePath { &self.input_file } pub fn relative_file_name(&self) -> &str { self.input_file .strip_prefix(self.root_path) .unwrap_or_else(|_| { panic!( "failed to strip prefix {:?} from {:?}", self.root_path, self.input_file ) }) .to_str() .expect("failed to get relative file name") } fn range(&self) -> (Option<usize>, Option<usize>) { (self.range_start_index, self.range_end_index) } } pub struct SpecSnapshot<'a, L> where L: TestFormatLanguage, { test_file: SpecTestFile<'a>, test_directory: PathBuf, language: L, options: L::Options, } impl<'a, L> SpecSnapshot<'a, L> where L: TestFormatLanguage, { pub fn new( test_file: SpecTestFile<'a>, test_directory: &str, language: L, options: L::Options, ) -> Self { let test_directory = PathBuf::from(test_directory); SpecSnapshot { test_file, test_directory, language, options, } } fn formatted(&self, parsed: &AnyParse, options: L::Options) -> (String, Printed) { let has_errors = parsed.has_errors(); let syntax = parsed.syntax(); let range = self.test_file.range(); let result = match range { (Some(start), Some(end)) => self.language.format_range( options.clone(), &syntax, TextRange::new( TextSize::try_from(start).unwrap(), TextSize::try_from(end).unwrap(), ), ), _ => self .language .format_node(options.clone(), &syntax) .map(|formatted| formatted.print().unwrap()), }; let formatted = result.expect("formatting failed"); let output_code = match range { (Some(_), Some(_)) => { let range = formatted .range() .expect("the result of format_range should have a range"); let mut output_code = self.test_file.input_code.clone(); output_code.replace_range(Range::<usize>::from(range), formatted.as_code()); // Check if output code is a valid syntax let parsed = self.language.parse(&output_code); if parsed.has_errors() { panic!( "{:?} format range produced an invalid syntax tree: {:?}", self.test_file.input_file, output_code ) } output_code } _ => { let output_code = formatted.as_code(); if !has_errors { let check_reformat = CheckReformat::new( &syntax, output_code, self.test_file.file_name(), &self.language, options, ); check_reformat.check_reformat(); } output_code.to_string() } }; (output_code, formatted) } pub fn test(self) { let input_file = self.test_file().input_file().as_path(); let mut snapshot_builder = SnapshotBuilder::new(input_file) .with_input(self.test_file.input_code()) .with_separator() .with_multiple_outputs(); let parsed = self.language.parse(self.test_file.input_code()); let (output_code, printed) = self.formatted(&parsed, self.options.clone()); let max_width = self.options.line_width().value() as usize; snapshot_builder = snapshot_builder .with_output_and_options( SnapshotOutput::new(&output_code).with_index(1), self.options.clone(), ) .with_unimplemented(&printed) .with_lines_exceeding_max_width(&output_code, max_width); let options_path = self.test_directory.join("options.json"); if options_path.exists() { let mut options_path = RomePath::new(&options_path); // SAFETY: we checked its existence already, we assume we have rights to read it let test_options = self .language .deserialize_format_options(options_path.get_buffer_from_file().as_str()); for (index, options) in test_options.into_iter().enumerate() { let (output_code, printed) = self.formatted(&parsed, options.clone()); let max_width = options.line_width().value() as usize; snapshot_builder = snapshot_builder .with_output_and_options( SnapshotOutput::new(&output_code).with_index(index + 2), options, ) .with_unimplemented(&printed) .with_lines_exceeding_max_width(&output_code, max_width); } } snapshot_builder.finish(self.test_file.relative_file_name()); } fn test_file(&self) -> &SpecTestFile { &self.test_file } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/lib.rs
crates/rome_text_size/src/lib.rs
//! Newtypes for working with text sizes/ranges in a more type-safe manner. //! //! This library can help with two things: //! * Reducing storage requirements for offsets and ranges, under the //! assumption that 32 bits is enough. //! * Providing standard vocabulary types for applications where text ranges //! are pervasive. //! //! However, you should not use this library simply because you work with //! strings. In the overwhelming majority of cases, using `usize` and //! `std::ops::Range<usize>` is better. In particular, if you are publishing a //! library, using only std types in the interface would make it more //! interoperable. Similarly, if you are writing something like a lexer, which //! produces, but does not *store* text ranges, then sticking to `usize` would //! be better. //! //! Minimal Supported Rust Version: latest stable. #![forbid(unsafe_code)] #![warn(missing_debug_implementations, missing_docs)] mod range; mod size; mod traits; #[cfg(feature = "schemars")] mod schemars_impls; #[cfg(feature = "serde")] mod serde_impls; pub use crate::{range::TextRange, size::TextSize, traits::TextLen}; #[cfg(target_pointer_width = "16")] compile_error!("text-size assumes usize >= u32 and does not work on 16-bit targets");
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/range.rs
crates/rome_text_size/src/range.rs
use cmp::Ordering; use { crate::TextSize, std::{ cmp, convert::TryFrom, fmt, ops::{Add, AddAssign, Bound, Index, IndexMut, Range, RangeBounds, Sub, SubAssign}, }, }; /// A range in text, represented as a pair of [`TextSize`][struct@TextSize]. /// /// It is a logic error for `start` to be greater than `end`. #[derive(Default, Copy, Clone, Eq, PartialEq, Hash)] pub struct TextRange { // Invariant: start <= end start: TextSize, end: TextSize, } impl fmt::Debug for TextRange { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}..{}", self.start().raw, self.end().raw) } } impl TextRange { /// Creates a new `TextRange` with the given `start` and `end` (`start..end`). /// /// # Panics /// /// Panics if `end < start`. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let start = TextSize::from(5); /// let end = TextSize::from(10); /// let range = TextRange::new(start, end); /// /// assert_eq!(range.start(), start); /// assert_eq!(range.end(), end); /// assert_eq!(range.len(), end - start); /// ``` #[inline] pub fn new(start: TextSize, end: TextSize) -> TextRange { assert!(start <= end); TextRange { start, end } } /// Create a new `TextRange` with the given `offset` and `len` (`offset..offset + len`). /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let text = "0123456789"; /// /// let offset = TextSize::from(2); /// let length = TextSize::from(5); /// let range = TextRange::at(offset, length); /// /// assert_eq!(range, TextRange::new(offset, offset + length)); /// assert_eq!(&text[range], "23456") /// ``` #[inline] pub fn at(offset: TextSize, len: TextSize) -> TextRange { TextRange::new(offset, offset + len) } /// Create a zero-length range at the specified offset (`offset..offset`). /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let point: TextSize; /// # point = TextSize::from(3); /// let range = TextRange::empty(point); /// assert!(range.is_empty()); /// assert_eq!(range, TextRange::new(point, point)); /// ``` #[inline] pub fn empty(offset: TextSize) -> TextRange { TextRange { start: offset, end: offset, } } /// Create a range up to the given end (`..end`). /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let point: TextSize; /// # point = TextSize::from(12); /// let range = TextRange::up_to(point); /// /// assert_eq!(range.len(), point); /// assert_eq!(range, TextRange::new(0.into(), point)); /// assert_eq!(range, TextRange::at(0.into(), point)); /// ``` #[inline] pub fn up_to(end: TextSize) -> TextRange { TextRange { start: 0.into(), end, } } } /// Identity methods. impl TextRange { /// The start point of this range. #[inline] pub const fn start(self) -> TextSize { self.start } /// The end point of this range. #[inline] pub const fn end(self) -> TextSize { self.end } /// The size of this range. #[inline] pub const fn len(self) -> TextSize { // HACK for const fn: math on primitives only TextSize { raw: self.end().raw - self.start().raw, } } /// Check if this range is empty. #[inline] pub const fn is_empty(self) -> bool { // HACK for const fn: math on primitives only self.start().raw == self.end().raw } } /// Manipulation methods. impl TextRange { /// Check if this range contains an offset. /// /// The end index is considered excluded. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let (start, end): (TextSize, TextSize); /// # start = 10.into(); end = 20.into(); /// let range = TextRange::new(start, end); /// assert!(range.contains(start)); /// assert!(!range.contains(end)); /// ``` #[inline] pub fn contains(self, offset: TextSize) -> bool { self.start() <= offset && offset < self.end() } /// Check if this range contains an offset. /// /// The end index is considered included. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let (start, end): (TextSize, TextSize); /// # start = 10.into(); end = 20.into(); /// let range = TextRange::new(start, end); /// assert!(range.contains_inclusive(start)); /// assert!(range.contains_inclusive(end)); /// ``` #[inline] pub fn contains_inclusive(self, offset: TextSize) -> bool { self.start() <= offset && offset <= self.end() } /// Check if this range completely contains another range. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let larger = TextRange::new(0.into(), 20.into()); /// let smaller = TextRange::new(5.into(), 15.into()); /// assert!(larger.contains_range(smaller)); /// assert!(!smaller.contains_range(larger)); /// /// // a range always contains itself /// assert!(larger.contains_range(larger)); /// assert!(smaller.contains_range(smaller)); /// ``` #[inline] pub fn contains_range(self, other: TextRange) -> bool { self.start() <= other.start() && other.end() <= self.end() } /// The range covered by both ranges, if it exists. /// If the ranges touch but do not overlap, the output range is empty. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// assert_eq!( /// TextRange::intersect( /// TextRange::new(0.into(), 10.into()), /// TextRange::new(5.into(), 15.into()), /// ), /// Some(TextRange::new(5.into(), 10.into())), /// ); /// ``` #[inline] pub fn intersect(self, other: TextRange) -> Option<TextRange> { let start = cmp::max(self.start(), other.start()); let end = cmp::min(self.end(), other.end()); if end < start { return None; } Some(TextRange::new(start, end)) } /// Extends the range to cover `other` as well. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// assert_eq!( /// TextRange::cover( /// TextRange::new(0.into(), 5.into()), /// TextRange::new(15.into(), 20.into()), /// ), /// TextRange::new(0.into(), 20.into()), /// ); /// ``` #[inline] pub fn cover(self, other: TextRange) -> TextRange { let start = cmp::min(self.start(), other.start()); let end = cmp::max(self.end(), other.end()); TextRange::new(start, end) } /// Extends the range to cover `other` offsets as well. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// assert_eq!( /// TextRange::empty(0.into()).cover_offset(20.into()), /// TextRange::new(0.into(), 20.into()), /// ) /// ``` #[inline] pub fn cover_offset(self, offset: TextSize) -> TextRange { self.cover(TextRange::empty(offset)) } /// Add an offset to this range. /// /// Note that this is not appropriate for changing where a `TextRange` is /// within some string; rather, it is for changing the reference anchor /// that the `TextRange` is measured against. /// /// The unchecked version (`Add::add`) will _always_ panic on overflow, /// in contrast to primitive integers, which check in debug mode only. #[inline] pub fn checked_add(self, offset: TextSize) -> Option<TextRange> { Some(TextRange { start: self.start.checked_add(offset)?, end: self.end.checked_add(offset)?, }) } /// Subtract an offset from this range. /// /// Note that this is not appropriate for changing where a `TextRange` is /// within some string; rather, it is for changing the reference anchor /// that the `TextRange` is measured against. /// /// The unchecked version (`Sub::sub`) will _always_ panic on overflow, /// in contrast to primitive integers, which check in debug mode only. #[inline] pub fn checked_sub(self, offset: TextSize) -> Option<TextRange> { Some(TextRange { start: self.start.checked_sub(offset)?, end: self.end.checked_sub(offset)?, }) } /// Relative order of the two ranges (overlapping ranges are considered /// equal). /// /// /// This is useful when, for example, binary searching an array of disjoint /// ranges. /// /// # Examples /// /// ``` /// # use rome_text_size::*; /// # use std::cmp::Ordering; /// /// let a = TextRange::new(0.into(), 3.into()); /// let b = TextRange::new(4.into(), 5.into()); /// assert_eq!(a.ordering(b), Ordering::Less); /// /// let a = TextRange::new(0.into(), 3.into()); /// let b = TextRange::new(3.into(), 5.into()); /// assert_eq!(a.ordering(b), Ordering::Less); /// /// let a = TextRange::new(0.into(), 3.into()); /// let b = TextRange::new(2.into(), 5.into()); /// assert_eq!(a.ordering(b), Ordering::Equal); /// /// let a = TextRange::new(0.into(), 3.into()); /// let b = TextRange::new(2.into(), 2.into()); /// assert_eq!(a.ordering(b), Ordering::Equal); /// /// let a = TextRange::new(2.into(), 3.into()); /// let b = TextRange::new(2.into(), 2.into()); /// assert_eq!(a.ordering(b), Ordering::Greater); /// ``` #[inline] pub fn ordering(self, other: TextRange) -> Ordering { if self.end() <= other.start() { Ordering::Less } else if other.end() <= self.start() { Ordering::Greater } else { Ordering::Equal } } /// Subtracts an offset from the start position. /// /// /// ## Panics /// If `start - amount` is less than zero. /// /// ## Examples /// /// ``` /// use rome_text_size::{TextRange, TextSize}; /// /// let range = TextRange::new(TextSize::from(5), TextSize::from(10)); /// assert_eq!(range.sub_start(TextSize::from(2)), TextRange::new(TextSize::from(3), TextSize::from(10))); /// ``` #[inline] pub fn sub_start(&self, amount: TextSize) -> TextRange { TextRange::new(self.start() - amount, self.end()) } /// Adds an offset to the start position. /// /// ## Panics /// If `start + amount > end` /// /// ## Examples /// /// ``` /// use rome_text_size::{TextRange, TextSize}; /// /// let range = TextRange::new(TextSize::from(5), TextSize::from(10)); /// assert_eq!(range.add_start(TextSize::from(3)), TextRange::new(TextSize::from(8), TextSize::from(10))); /// ``` #[inline] pub fn add_start(&self, amount: TextSize) -> TextRange { TextRange::new(self.start() + amount, self.end()) } /// Subtracts an offset from the end position. /// /// /// ## Panics /// If `end - amount < 0` or `end - amount < start` /// /// ## Examples /// /// ``` /// use rome_text_size::{TextRange, TextSize}; /// /// let range = TextRange::new(TextSize::from(5), TextSize::from(10)); /// assert_eq!(range.sub_end(TextSize::from(2)), TextRange::new(TextSize::from(5), TextSize::from(8))); /// ``` #[inline] pub fn sub_end(&self, amount: TextSize) -> TextRange { TextRange::new(self.start(), self.end() - amount) } /// Adds an offset to the end position. /// /// /// ## Panics /// If `end + amount > u32::MAX` /// /// ## Examples /// /// ``` /// use rome_text_size::{TextRange, TextSize}; /// /// let range = TextRange::new(TextSize::from(5), TextSize::from(10)); /// assert_eq!(range.add_end(TextSize::from(2)), TextRange::new(TextSize::from(5), TextSize::from(12))); /// ``` #[inline] pub fn add_end(&self, amount: TextSize) -> TextRange { TextRange::new(self.start(), self.end() + amount) } } impl Index<TextRange> for str { type Output = str; #[inline] fn index(&self, index: TextRange) -> &str { &self[Range::<usize>::from(index)] } } impl Index<TextRange> for String { type Output = str; #[inline] fn index(&self, index: TextRange) -> &str { &self[Range::<usize>::from(index)] } } impl IndexMut<TextRange> for str { #[inline] fn index_mut(&mut self, index: TextRange) -> &mut str { &mut self[Range::<usize>::from(index)] } } impl IndexMut<TextRange> for String { #[inline] fn index_mut(&mut self, index: TextRange) -> &mut str { &mut self[Range::<usize>::from(index)] } } impl RangeBounds<TextSize> for TextRange { fn start_bound(&self) -> Bound<&TextSize> { Bound::Included(&self.start) } fn end_bound(&self) -> Bound<&TextSize> { Bound::Excluded(&self.end) } } impl<T> From<TextRange> for Range<T> where T: From<TextSize>, { #[inline] fn from(r: TextRange) -> Self { r.start().into()..r.end().into() } } macro_rules! ops { (impl $Op:ident for TextRange by fn $f:ident = $op:tt) => { impl $Op<&TextSize> for TextRange { type Output = TextRange; #[inline] fn $f(self, other: &TextSize) -> TextRange { self $op *other } } impl<T> $Op<T> for &TextRange where TextRange: $Op<T, Output=TextRange>, { type Output = TextRange; #[inline] fn $f(self, other: T) -> TextRange { *self $op other } } }; } impl Add<TextSize> for TextRange { type Output = TextRange; #[inline] fn add(self, offset: TextSize) -> TextRange { self.checked_add(offset) .expect("TextRange +offset overflowed") } } impl Sub<TextSize> for TextRange { type Output = TextRange; #[inline] fn sub(self, offset: TextSize) -> TextRange { self.checked_sub(offset) .expect("TextRange -offset overflowed") } } ops!(impl Add for TextRange by fn add = +); ops!(impl Sub for TextRange by fn sub = -); impl<A> AddAssign<A> for TextRange where TextRange: Add<A, Output = TextRange>, { #[inline] fn add_assign(&mut self, rhs: A) { *self = *self + rhs } } impl<S> SubAssign<S> for TextRange where TextRange: Sub<S, Output = TextRange>, { #[inline] fn sub_assign(&mut self, rhs: S) { *self = *self - rhs } } impl TryFrom<(usize, usize)> for TextRange { type Error = std::num::TryFromIntError; #[inline] fn try_from((start, end): (usize, usize)) -> Result<Self, Self::Error> { Ok(TextRange::new( TextSize::try_from(start)?, TextSize::try_from(end)?, )) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/traits.rs
crates/rome_text_size/src/traits.rs
use {crate::TextSize, std::convert::TryInto}; use priv_in_pub::Sealed; mod priv_in_pub { pub trait Sealed {} } /// Primitives with a textual length that can be passed to [`TextSize::of`]. pub trait TextLen: Copy + Sealed { /// The textual length of this primitive. fn text_len(self) -> TextSize; } impl Sealed for &'_ str {} impl TextLen for &'_ str { #[inline] fn text_len(self) -> TextSize { self.len().try_into().unwrap() } } impl Sealed for &'_ String {} impl TextLen for &'_ String { #[inline] fn text_len(self) -> TextSize { self.as_str().text_len() } } impl Sealed for char {} impl TextLen for char { #[inline] fn text_len(self) -> TextSize { (self.len_utf8() as u32).into() } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/schemars_impls.rs
crates/rome_text_size/src/schemars_impls.rs
//! This module implements the [JsonSchema] trait from the [schemars] crate for //! [TextSize] and [TextRange] if the `schemars` feature is enabled. This trait //! exposes meta-information on how a given type is serialized and deserialized //! using `serde`, and is currently used to generate autocomplete information //! for the `rome.json` configuration file and TypeScript types for the node.js //! bindings to the Workspace API use crate::{TextRange, TextSize}; use schemars::{gen::SchemaGenerator, schema::Schema, JsonSchema}; impl JsonSchema for TextSize { fn schema_name() -> String { String::from("TextSize") } fn json_schema(gen: &mut SchemaGenerator) -> Schema { // TextSize is represented as a raw u32, see serde_impls.rs for the // actual implementation <u32>::json_schema(gen) } } impl JsonSchema for TextRange { fn schema_name() -> String { String::from("TextRange") } fn json_schema(gen: &mut SchemaGenerator) -> Schema { // TextSize is represented as (TextSize, TextSize), see serde_impls.rs // for the actual implementation <(TextSize, TextSize)>::json_schema(gen) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/serde_impls.rs
crates/rome_text_size/src/serde_impls.rs
use { crate::{TextRange, TextSize}, serde::{de, Deserialize, Deserializer, Serialize, Serializer}, }; impl Serialize for TextSize { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.raw.serialize(serializer) } } impl<'de> Deserialize<'de> for TextSize { fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { u32::deserialize(deserializer).map(TextSize::from) } } impl Serialize for TextRange { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { (self.start(), self.end()).serialize(serializer) } } impl<'de> Deserialize<'de> for TextRange { #[allow(clippy::nonminimal_bool)] fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> where D: Deserializer<'de>, { let (start, end) = Deserialize::deserialize(deserializer)?; if !(start <= end) { return Err(de::Error::custom(format!( "invalid range: {:?}..{:?}", start, end ))); } Ok(TextRange::new(start, end)) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/src/size.rs
crates/rome_text_size/src/size.rs
use { crate::TextLen, std::{ convert::TryFrom, fmt, iter, num::TryFromIntError, ops::{Add, AddAssign, Sub, SubAssign}, u32, }, }; /// A measure of text length. Also, equivalently, an index into text. /// /// This is a UTF-8 bytes offset stored as `u32`, but /// most clients should treat it as an opaque measure. /// /// For cases that need to escape `TextSize` and return to working directly /// with primitive integers, `TextSize` can be converted losslessly to/from /// `u32` via [`From`] conversions as well as losslessly be converted [`Into`] /// `usize`. The `usize -> TextSize` direction can be done via [`TryFrom`]. /// /// These escape hatches are primarily required for unit testing and when /// converting from UTF-8 size to another coordinate space, such as UTF-16. #[derive(Clone, Copy, Default, PartialEq, Eq, PartialOrd, Ord, Hash)] pub struct TextSize { pub(crate) raw: u32, } impl fmt::Debug for TextSize { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { write!(f, "{}", self.raw) } } impl TextSize { /// The text size of some primitive text-like object. /// /// Accepts `char`, `&str`, and `&String`. /// /// # Examples /// /// ```rust /// # use rome_text_size::*; /// let char_size = TextSize::of('🦀'); /// assert_eq!(char_size, TextSize::from(4)); /// /// let str_size = TextSize::of("rust-analyzer"); /// assert_eq!(str_size, TextSize::from(13)); /// ``` #[inline] pub fn of<T: TextLen>(text: T) -> TextSize { text.text_len() } } /// Methods to act like a primitive integer type, where reasonably applicable. // Last updated for parity with Rust 1.42.0. impl TextSize { /// Checked addition. Returns `None` if overflow occurred. #[inline] pub fn checked_add(self, rhs: TextSize) -> Option<TextSize> { self.raw.checked_add(rhs.raw).map(|raw| TextSize { raw }) } /// Checked subtraction. Returns `None` if overflow occurred. #[inline] pub fn checked_sub(self, rhs: TextSize) -> Option<TextSize> { self.raw.checked_sub(rhs.raw).map(|raw| TextSize { raw }) } } impl From<u32> for TextSize { #[inline] fn from(raw: u32) -> Self { TextSize { raw } } } impl From<TextSize> for u32 { #[inline] fn from(value: TextSize) -> Self { value.raw } } impl TryFrom<usize> for TextSize { type Error = TryFromIntError; #[inline] fn try_from(value: usize) -> Result<Self, TryFromIntError> { Ok(u32::try_from(value)?.into()) } } impl From<TextSize> for usize { #[inline] fn from(value: TextSize) -> Self { value.raw as usize } } macro_rules! ops { (impl $Op:ident for TextSize by fn $f:ident = $op:tt) => { impl $Op<TextSize> for TextSize { type Output = TextSize; #[inline] fn $f(self, other: TextSize) -> TextSize { TextSize { raw: self.raw $op other.raw } } } impl $Op<&TextSize> for TextSize { type Output = TextSize; #[inline] fn $f(self, other: &TextSize) -> TextSize { self $op *other } } impl<T> $Op<T> for &TextSize where TextSize: $Op<T, Output=TextSize>, { type Output = TextSize; #[inline] fn $f(self, other: T) -> TextSize { *self $op other } } }; } ops!(impl Add for TextSize by fn add = +); ops!(impl Sub for TextSize by fn sub = -); impl<A> AddAssign<A> for TextSize where TextSize: Add<A, Output = TextSize>, { #[inline] fn add_assign(&mut self, rhs: A) { *self = *self + rhs } } impl<S> SubAssign<S> for TextSize where TextSize: Sub<S, Output = TextSize>, { #[inline] fn sub_assign(&mut self, rhs: S) { *self = *self - rhs } } impl<A> iter::Sum<A> for TextSize where TextSize: Add<A, Output = TextSize>, { #[inline] fn sum<I: Iterator<Item = A>>(iter: I) -> TextSize { iter.fold(0.into(), Add::add) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/tests/serde.rs
crates/rome_text_size/tests/serde.rs
use {rome_text_size::*, serde_test::*, std::ops}; fn size(x: u32) -> TextSize { TextSize::from(x) } fn range(x: ops::Range<u32>) -> TextRange { TextRange::new(x.start.into(), x.end.into()) } #[test] fn size_serialization() { assert_tokens(&size(00), &[Token::U32(00)]); assert_tokens(&size(10), &[Token::U32(10)]); assert_tokens(&size(20), &[Token::U32(20)]); assert_tokens(&size(30), &[Token::U32(30)]); } #[test] fn range_serialization() { assert_tokens( &range(00..10), &[ Token::Tuple { len: 2 }, Token::U32(00), Token::U32(10), Token::TupleEnd, ], ); assert_tokens( &range(10..20), &[ Token::Tuple { len: 2 }, Token::U32(10), Token::U32(20), Token::TupleEnd, ], ); assert_tokens( &range(20..30), &[ Token::Tuple { len: 2 }, Token::U32(20), Token::U32(30), Token::TupleEnd, ], ); assert_tokens( &range(30..40), &[ Token::Tuple { len: 2 }, Token::U32(30), Token::U32(40), Token::TupleEnd, ], ); } #[test] fn invalid_range_deserialization() { assert_tokens::<TextRange>( &range(62..92), &[ Token::Tuple { len: 2 }, Token::U32(62), Token::U32(92), Token::TupleEnd, ], ); assert_de_tokens_error::<TextRange>( &[ Token::Tuple { len: 2 }, Token::U32(92), Token::U32(62), Token::TupleEnd, ], "invalid range: 92..62", ); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/tests/contructors.rs
crates/rome_text_size/tests/contructors.rs
use rome_text_size::TextSize; #[derive(Copy, Clone)] struct BadRope<'a>(&'a [&'a str]); impl BadRope<'_> { fn text_len(self) -> TextSize { self.0.iter().copied().map(TextSize::of).sum() } } #[test] fn main() { let x: char = 'c'; let _ = TextSize::of(x); let x: &str = "hello"; let _ = TextSize::of(x); let x: &String = &"hello".into(); let _ = TextSize::of(x); let _ = BadRope(&[""]).text_len(); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/tests/main.rs
crates/rome_text_size/tests/main.rs
use {rome_text_size::*, std::ops}; fn size(x: u32) -> TextSize { TextSize::from(x) } fn range(x: ops::Range<u32>) -> TextRange { TextRange::new(x.start.into(), x.end.into()) } #[test] fn sum() { let xs: Vec<TextSize> = vec![size(0), size(1), size(2)]; assert_eq!(xs.iter().sum::<TextSize>(), size(3)); assert_eq!(xs.into_iter().sum::<TextSize>(), size(3)); } #[test] fn math() { assert_eq!(size(10) + size(5), size(15)); assert_eq!(size(10) - size(5), size(5)); } #[test] fn checked_math() { assert_eq!(size(1).checked_add(size(1)), Some(size(2))); assert_eq!(size(1).checked_sub(size(1)), Some(size(0))); assert_eq!(size(1).checked_sub(size(2)), None); assert_eq!(size(!0).checked_add(size(1)), None); } #[test] #[rustfmt::skip] fn contains() { assert!( range(2..4).contains_range(range(2..3))); assert!( ! range(2..4).contains_range(range(1..3))); } #[test] fn intersect() { assert_eq!(range(1..2).intersect(range(2..3)), Some(range(2..2))); assert_eq!(range(1..5).intersect(range(2..3)), Some(range(2..3))); assert_eq!(range(1..2).intersect(range(3..4)), None); } #[test] fn cover() { assert_eq!(range(1..2).cover(range(2..3)), range(1..3)); assert_eq!(range(1..5).cover(range(2..3)), range(1..5)); assert_eq!(range(1..2).cover(range(4..5)), range(1..5)); } #[test] fn cover_offset() { assert_eq!(range(1..3).cover_offset(size(0)), range(0..3)); assert_eq!(range(1..3).cover_offset(size(1)), range(1..3)); assert_eq!(range(1..3).cover_offset(size(2)), range(1..3)); assert_eq!(range(1..3).cover_offset(size(3)), range(1..3)); assert_eq!(range(1..3).cover_offset(size(4)), range(1..4)); } #[test] #[rustfmt::skip] fn contains_point() { assert!( ! range(1..3).contains(size(0))); assert!( range(1..3).contains(size(1))); assert!( range(1..3).contains(size(2))); assert!( ! range(1..3).contains(size(3))); assert!( ! range(1..3).contains(size(4))); assert!( ! range(1..3).contains_inclusive(size(0))); assert!( range(1..3).contains_inclusive(size(1))); assert!( range(1..3).contains_inclusive(size(2))); assert!( range(1..3).contains_inclusive(size(3))); assert!( ! range(1..3).contains_inclusive(size(4))); }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/tests/indexing.rs
crates/rome_text_size/tests/indexing.rs
use rome_text_size::*; #[test] fn main() { let range = TextRange::default(); let _ = &""[range]; let _ = &String::new()[range]; }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_text_size/tests/auto_traits.rs
crates/rome_text_size/tests/auto_traits.rs
use { rome_text_size::*, static_assertions::*, std::{ fmt::Debug, hash::Hash, marker::{Send, Sync}, panic::{RefUnwindSafe, UnwindSafe}, }, }; // auto traits assert_impl_all!(TextSize: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); assert_impl_all!(TextRange: Send, Sync, Unpin, UnwindSafe, RefUnwindSafe); // common traits assert_impl_all!(TextSize: Copy, Debug, Default, Hash, Ord); assert_impl_all!(TextRange: Copy, Debug, Default, Hash, Eq);
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/lib.rs
crates/rome_json_syntax/src/lib.rs
#[macro_use] mod generated; mod file_source; pub mod member_ext; pub mod string_ext; mod syntax_node; pub use self::generated::*; pub use file_source::JsonFileSource; pub use rome_rowan::{TextLen, TextRange, TextSize, TokenAtOffset, TriviaPieceKind, WalkEvent}; pub use syntax_node::*; use rome_rowan::{RawSyntaxKind, TokenText}; impl From<u16> for JsonSyntaxKind { fn from(d: u16) -> JsonSyntaxKind { assert!(d <= (JsonSyntaxKind::__LAST as u16)); unsafe { std::mem::transmute::<u16, JsonSyntaxKind>(d) } } } impl From<JsonSyntaxKind> for u16 { fn from(k: JsonSyntaxKind) -> u16 { k as u16 } } impl JsonSyntaxKind { pub fn is_trivia(self) -> bool { matches!(self, JsonSyntaxKind::NEWLINE | JsonSyntaxKind::WHITESPACE) } pub fn is_comments(self) -> bool { matches!( self, JsonSyntaxKind::COMMENT | JsonSyntaxKind::MULTILINE_COMMENT ) } #[inline] pub const fn is_keyword(self) -> bool { matches!(self, T![null] | T![true] | T![false]) } } impl rome_rowan::SyntaxKind for JsonSyntaxKind { const TOMBSTONE: Self = JsonSyntaxKind::TOMBSTONE; const EOF: Self = JsonSyntaxKind::EOF; fn is_bogus(&self) -> bool { matches!( self, JsonSyntaxKind::JSON_BOGUS | JsonSyntaxKind::JSON_BOGUS_VALUE ) } fn to_bogus(&self) -> Self { match self { JsonSyntaxKind::JSON_NUMBER_VALUE | JsonSyntaxKind::JSON_STRING_VALUE | JsonSyntaxKind::JSON_BOOLEAN_VALUE | JsonSyntaxKind::JSON_NULL_VALUE | JsonSyntaxKind::JSON_ARRAY_VALUE | JsonSyntaxKind::JSON_OBJECT_VALUE | JsonSyntaxKind::JSON_BOGUS_VALUE => JsonSyntaxKind::JSON_BOGUS_VALUE, _ => JsonSyntaxKind::JSON_BOGUS, } } #[inline] fn to_raw(&self) -> RawSyntaxKind { RawSyntaxKind(*self as u16) } #[inline] fn from_raw(raw: RawSyntaxKind) -> Self { Self::from(raw.0) } fn is_root(&self) -> bool { matches!(self, JsonSyntaxKind::JSON_ROOT) } fn is_list(&self) -> bool { JsonSyntaxKind::is_list(*self) } fn to_string(&self) -> Option<&'static str> { JsonSyntaxKind::to_string(self) } } impl TryFrom<JsonSyntaxKind> for TriviaPieceKind { type Error = (); fn try_from(value: JsonSyntaxKind) -> Result<Self, Self::Error> { if value.is_trivia() { match value { JsonSyntaxKind::NEWLINE => Ok(TriviaPieceKind::Newline), JsonSyntaxKind::WHITESPACE => Ok(TriviaPieceKind::Whitespace), _ => unreachable!("Not Trivia"), } } else if value.is_comments() { match value { JsonSyntaxKind::COMMENT => Ok(TriviaPieceKind::SingleLineComment), JsonSyntaxKind::MULTILINE_COMMENT => Ok(TriviaPieceKind::MultiLineComment), _ => unreachable!("Not Comment"), } } else { Err(()) } } } /// Text of `token`, excluding all trivia and removing quotes if `token` is a string literal. pub fn inner_string_text(token: &JsonSyntaxToken) -> TokenText { let mut text = token.token_text_trimmed(); if token.kind() == JsonSyntaxKind::JSON_STRING_LITERAL { // remove string delimiters // SAFETY: string literal token have a delimiters at the start and the end of the string let range = TextRange::new(1.into(), text.len() - TextSize::from(1)); text = text.slice(range); } text }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/syntax_node.rs
crates/rome_json_syntax/src/syntax_node.rs
//! This module defines the Concrete Syntax Tree used by Rome. //! //! The tree is entirely lossless, whitespace, comments, and errors are preserved. //! It also provides traversal methods including parent, children, and siblings of nodes. //! //! This is a simple wrapper around the `rowan` crate which does most of the heavy lifting and is language agnostic. use crate::{AnyJsonValue, JsonSyntaxKind}; use rome_rowan::Language; #[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct JsonLanguage; impl Language for JsonLanguage { type Kind = JsonSyntaxKind; type Root = AnyJsonValue; } pub type JsonSyntaxNode = rome_rowan::SyntaxNode<JsonLanguage>; pub type JsonSyntaxToken = rome_rowan::SyntaxToken<JsonLanguage>; pub type JsonSyntaxElement = rome_rowan::SyntaxElement<JsonLanguage>; pub type JsonSyntaxNodeChildren = rome_rowan::SyntaxNodeChildren<JsonLanguage>; pub type JsonSyntaxElementChildren = rome_rowan::SyntaxElementChildren<JsonLanguage>; pub type JsonSyntaxList = rome_rowan::SyntaxList<JsonLanguage>;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/file_source.rs
crates/rome_json_syntax/src/file_source.rs
use crate::JsonLanguage; use rome_rowan::{FileSource, FileSourceError}; use std::path::Path; #[derive(Debug, Default)] pub struct JsonFileSource { #[allow(dead_code)] variant: JsonVariant, } #[derive(Debug, Default)] enum JsonVariant { #[default] Standard, #[allow(dead_code)] Jsonc, } impl JsonFileSource { pub fn json() -> Self { Self { variant: JsonVariant::Standard, } } pub fn jsonc() -> Self { Self { variant: JsonVariant::Jsonc, } } pub const fn is_jsonc(&self) -> bool { matches!(self.variant, JsonVariant::Jsonc) } } impl<'a> FileSource<'a, JsonLanguage> for JsonFileSource {} impl TryFrom<&Path> for JsonFileSource { type Error = FileSourceError; fn try_from(path: &Path) -> Result<Self, Self::Error> { let file_name = path .file_name() .ok_or_else(|| FileSourceError::MissingFileName(path.into()))? .to_str() .ok_or_else(|| FileSourceError::MissingFileName(path.into()))?; let extension = path .extension() .ok_or_else(|| FileSourceError::MissingFileExtension(path.into()))? .to_str() .ok_or_else(|| FileSourceError::MissingFileExtension(path.into()))?; compute_source_type_from_path_or_extension(file_name, extension) } } /// It deduce the [JsonFileSource] from the file name and its extension fn compute_source_type_from_path_or_extension( file_name: &str, extension: &str, ) -> Result<JsonFileSource, FileSourceError> { let source_type = if file_name.ends_with(".json") { JsonFileSource::json() } else { match extension { "json" => JsonFileSource::json(), "jsonc" => JsonFileSource::jsonc(), _ => { return Err(FileSourceError::UnknownExtension( file_name.into(), extension.into(), )) } } }; Ok(source_type) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/member_ext.rs
crates/rome_json_syntax/src/member_ext.rs
use crate::{inner_string_text, JsonMemberName}; use rome_rowan::{SyntaxResult, TokenText}; impl JsonMemberName { pub fn inner_string_text(&self) -> SyntaxResult<TokenText> { Ok(inner_string_text(&self.value_token()?)) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/string_ext.rs
crates/rome_json_syntax/src/string_ext.rs
use crate::{inner_string_text, JsonStringValue}; use rome_rowan::{SyntaxResult, TokenText}; impl JsonStringValue { pub fn inner_string_text(&self) -> SyntaxResult<TokenText> { Ok(inner_string_text(&self.value_token()?)) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/generated.rs
crates/rome_json_syntax/src/generated.rs
#[rustfmt::skip] pub(super) mod nodes; #[rustfmt::skip] pub mod macros; #[macro_use] pub mod kind; pub use kind::*; pub use nodes::*;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/generated/nodes_mut.rs
crates/rome_json_syntax/src/generated/nodes_mut.rs
//! Generated file, do not edit by hand, see `xtask/codegen` use crate::{generated::nodes::*, JsonSyntaxToken as SyntaxToken}; use rome_rowan::AstNode; use std::iter::once; impl JsonArrayValue { pub fn with_l_brack_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } pub fn with_elements(self, element: Option<JsonArrayElementList>) -> Self { Self::unwrap_cast(self.syntax.splice_slots( 1usize..=1usize, once(element.map(|element| element.into_syntax().into())), )) } pub fn with_r_brack_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(2usize..=2usize, once(Some(element.into()))), ) } } impl JsonBooleanValue { pub fn with_value_token_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } } impl JsonMember { pub fn with_name(self, element: JsonMemberName) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into_syntax().into()))), ) } pub fn with_colon_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(1usize..=1usize, once(Some(element.into()))), ) } pub fn with_value(self, element: AnyJsonValue) -> Self { Self::unwrap_cast( self.syntax .splice_slots(2usize..=2usize, once(Some(element.into_syntax().into()))), ) } } impl JsonMemberName { pub fn with_value_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } } impl JsonNullValue { pub fn with_value_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } } impl JsonNumberValue { pub fn with_value_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } } impl JsonObjectValue { pub fn with_l_curly_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } pub fn with_json_member_list(self, element: Option<JsonMemberList>) -> Self { Self::unwrap_cast(self.syntax.splice_slots( 1usize..=1usize, once(element.map(|element| element.into_syntax().into())), )) } pub fn with_r_curly_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(2usize..=2usize, once(Some(element.into()))), ) } } impl JsonRoot { pub fn with_value(self, element: AnyJsonValue) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into_syntax().into()))), ) } pub fn with_eof_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(1usize..=1usize, once(Some(element.into()))), ) } } impl JsonStringValue { pub fn with_value_token(self, element: SyntaxToken) -> Self { Self::unwrap_cast( self.syntax .splice_slots(0usize..=0usize, once(Some(element.into()))), ) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/generated/kind.rs
crates/rome_json_syntax/src/generated/kind.rs
//! Generated file, do not edit by hand, see `xtask/codegen` #![allow(clippy::all)] #![allow(bad_style, missing_docs, unreachable_pub)] #[doc = r" The kind of syntax node, e.g. `IDENT`, `FUNCTION_KW`, or `FOR_STMT`."] #[derive(Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] #[repr(u16)] pub enum JsonSyntaxKind { #[doc(hidden)] TOMBSTONE, #[doc = r" Marks the end of the file.May have trivia attached"] EOF, COLON, COMMA, L_PAREN, R_PAREN, L_CURLY, R_CURLY, L_BRACK, R_BRACK, NULL_KW, TRUE_KW, FALSE_KW, JSON_STRING_LITERAL, JSON_NUMBER_LITERAL, ERROR_TOKEN, NEWLINE, WHITESPACE, IDENT, COMMENT, MULTILINE_COMMENT, JSON_ROOT, JSON_NUMBER_VALUE, JSON_STRING_VALUE, JSON_BOOLEAN_VALUE, JSON_NULL_VALUE, JSON_ARRAY_VALUE, JSON_OBJECT_VALUE, JSON_MEMBER_LIST, JSON_MEMBER, JSON_MEMBER_NAME, JSON_ARRAY_ELEMENT_LIST, JSON_BOGUS, JSON_BOGUS_VALUE, #[doc(hidden)] __LAST, } use self::JsonSyntaxKind::*; impl JsonSyntaxKind { pub const fn is_punct(self) -> bool { match self { COLON | COMMA | L_PAREN | R_PAREN | L_CURLY | R_CURLY | L_BRACK | R_BRACK => true, _ => false, } } pub const fn is_literal(self) -> bool { match self { JSON_STRING_LITERAL | JSON_NUMBER_LITERAL => true, _ => false, } } pub const fn is_list(self) -> bool { match self { JSON_MEMBER_LIST | JSON_ARRAY_ELEMENT_LIST => true, _ => false, } } pub fn from_keyword(ident: &str) -> Option<JsonSyntaxKind> { let kw = match ident { "null" => NULL_KW, "true" => TRUE_KW, "false" => FALSE_KW, _ => return None, }; Some(kw) } pub const fn to_string(&self) -> Option<&'static str> { let tok = match self { COLON => ":", COMMA => ",", L_PAREN => "(", R_PAREN => ")", L_CURLY => "{", R_CURLY => "}", L_BRACK => "[", R_BRACK => "]", NULL_KW => "null", TRUE_KW => "true", FALSE_KW => "false", JSON_STRING_LITERAL => "string literal", _ => return None, }; Some(tok) } } #[doc = r" Utility macro for creating a SyntaxKind through simple macro syntax"] #[macro_export] macro_rules ! T { [:] => { $ crate :: JsonSyntaxKind :: COLON } ; [,] => { $ crate :: JsonSyntaxKind :: COMMA } ; ['('] => { $ crate :: JsonSyntaxKind :: L_PAREN } ; [')'] => { $ crate :: JsonSyntaxKind :: R_PAREN } ; ['{'] => { $ crate :: JsonSyntaxKind :: L_CURLY } ; ['}'] => { $ crate :: JsonSyntaxKind :: R_CURLY } ; ['['] => { $ crate :: JsonSyntaxKind :: L_BRACK } ; [']'] => { $ crate :: JsonSyntaxKind :: R_BRACK } ; [null] => { $ crate :: JsonSyntaxKind :: NULL_KW } ; [true] => { $ crate :: JsonSyntaxKind :: TRUE_KW } ; [false] => { $ crate :: JsonSyntaxKind :: FALSE_KW } ; [ident] => { $ crate :: JsonSyntaxKind :: IDENT } ; [EOF] => { $ crate :: JsonSyntaxKind :: EOF } ; [#] => { $ crate :: JsonSyntaxKind :: HASH } ; }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/generated/macros.rs
crates/rome_json_syntax/src/generated/macros.rs
//! Generated file, do not edit by hand, see `xtask/codegen` #[doc = r" Reconstruct an AstNode from a SyntaxNode"] #[doc = r""] #[doc = r" This macros performs a match over the [kind](rome_rowan::SyntaxNode::kind)"] #[doc = r" of the provided [rome_rowan::SyntaxNode] and constructs the appropriate"] #[doc = r" AstNode type for it, then execute the provided expression over it."] #[doc = r""] #[doc = r" # Examples"] #[doc = r""] #[doc = r" ```ignore"] #[doc = r" map_syntax_node!(syntax_node, node => node.format())"] #[doc = r" ```"] #[macro_export] macro_rules! map_syntax_node { ($ node : expr , $ pattern : pat => $ body : expr) => { match $node { node => match $crate::JsonSyntaxNode::kind(&node) { $crate::JsonSyntaxKind::JSON_ARRAY_VALUE => { let $pattern = unsafe { $crate::JsonArrayValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_BOOLEAN_VALUE => { let $pattern = unsafe { $crate::JsonBooleanValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_MEMBER => { let $pattern = unsafe { $crate::JsonMember::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_MEMBER_NAME => { let $pattern = unsafe { $crate::JsonMemberName::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_NULL_VALUE => { let $pattern = unsafe { $crate::JsonNullValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_NUMBER_VALUE => { let $pattern = unsafe { $crate::JsonNumberValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_OBJECT_VALUE => { let $pattern = unsafe { $crate::JsonObjectValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_ROOT => { let $pattern = unsafe { $crate::JsonRoot::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_STRING_VALUE => { let $pattern = unsafe { $crate::JsonStringValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_BOGUS => { let $pattern = unsafe { $crate::JsonBogus::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_BOGUS_VALUE => { let $pattern = unsafe { $crate::JsonBogusValue::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_ARRAY_ELEMENT_LIST => { let $pattern = unsafe { $crate::JsonArrayElementList::new_unchecked(node) }; $body } $crate::JsonSyntaxKind::JSON_MEMBER_LIST => { let $pattern = unsafe { $crate::JsonMemberList::new_unchecked(node) }; $body } _ => unreachable!(), }, } }; } pub(crate) use map_syntax_node;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_json_syntax/src/generated/nodes.rs
crates/rome_json_syntax/src/generated/nodes.rs
//! Generated file, do not edit by hand, see `xtask/codegen` #![allow(clippy::enum_variant_names)] #![allow(clippy::match_like_matches_macro)] use crate::{ macros::map_syntax_node, JsonLanguage as Language, JsonSyntaxElement as SyntaxElement, JsonSyntaxElementChildren as SyntaxElementChildren, JsonSyntaxKind::{self as SyntaxKind, *}, JsonSyntaxList as SyntaxList, JsonSyntaxNode as SyntaxNode, JsonSyntaxToken as SyntaxToken, }; use rome_rowan::{support, AstNode, RawSyntaxKind, SyntaxKindSet, SyntaxResult}; #[allow(unused)] use rome_rowan::{ AstNodeList, AstNodeListIterator, AstSeparatedList, AstSeparatedListNodesIterator, }; #[cfg(feature = "serde")] use serde::ser::SerializeSeq; #[cfg(feature = "serde")] use serde::{Serialize, Serializer}; use std::fmt::{Debug, Formatter}; #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonArrayValue { pub(crate) syntax: SyntaxNode, } impl JsonArrayValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonArrayValueFields { JsonArrayValueFields { l_brack_token: self.l_brack_token(), elements: self.elements(), r_brack_token: self.r_brack_token(), } } pub fn l_brack_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } pub fn elements(&self) -> JsonArrayElementList { support::list(&self.syntax, 1usize) } pub fn r_brack_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 2usize) } } #[cfg(feature = "serde")] impl Serialize for JsonArrayValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonArrayValueFields { pub l_brack_token: SyntaxResult<SyntaxToken>, pub elements: JsonArrayElementList, pub r_brack_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonBooleanValue { pub(crate) syntax: SyntaxNode, } impl JsonBooleanValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonBooleanValueFields { JsonBooleanValueFields { value_token: self.value_token(), } } pub fn value_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } } #[cfg(feature = "serde")] impl Serialize for JsonBooleanValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonBooleanValueFields { pub value_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonMember { pub(crate) syntax: SyntaxNode, } impl JsonMember { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonMemberFields { JsonMemberFields { name: self.name(), colon_token: self.colon_token(), value: self.value(), } } pub fn name(&self) -> SyntaxResult<JsonMemberName> { support::required_node(&self.syntax, 0usize) } pub fn colon_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 1usize) } pub fn value(&self) -> SyntaxResult<AnyJsonValue> { support::required_node(&self.syntax, 2usize) } } #[cfg(feature = "serde")] impl Serialize for JsonMember { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonMemberFields { pub name: SyntaxResult<JsonMemberName>, pub colon_token: SyntaxResult<SyntaxToken>, pub value: SyntaxResult<AnyJsonValue>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonMemberName { pub(crate) syntax: SyntaxNode, } impl JsonMemberName { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonMemberNameFields { JsonMemberNameFields { value_token: self.value_token(), } } pub fn value_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } } #[cfg(feature = "serde")] impl Serialize for JsonMemberName { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonMemberNameFields { pub value_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonNullValue { pub(crate) syntax: SyntaxNode, } impl JsonNullValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonNullValueFields { JsonNullValueFields { value_token: self.value_token(), } } pub fn value_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } } #[cfg(feature = "serde")] impl Serialize for JsonNullValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonNullValueFields { pub value_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonNumberValue { pub(crate) syntax: SyntaxNode, } impl JsonNumberValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonNumberValueFields { JsonNumberValueFields { value_token: self.value_token(), } } pub fn value_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } } #[cfg(feature = "serde")] impl Serialize for JsonNumberValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonNumberValueFields { pub value_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonObjectValue { pub(crate) syntax: SyntaxNode, } impl JsonObjectValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonObjectValueFields { JsonObjectValueFields { l_curly_token: self.l_curly_token(), json_member_list: self.json_member_list(), r_curly_token: self.r_curly_token(), } } pub fn l_curly_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } pub fn json_member_list(&self) -> JsonMemberList { support::list(&self.syntax, 1usize) } pub fn r_curly_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 2usize) } } #[cfg(feature = "serde")] impl Serialize for JsonObjectValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonObjectValueFields { pub l_curly_token: SyntaxResult<SyntaxToken>, pub json_member_list: JsonMemberList, pub r_curly_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonRoot { pub(crate) syntax: SyntaxNode, } impl JsonRoot { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonRootFields { JsonRootFields { value: self.value(), eof_token: self.eof_token(), } } pub fn value(&self) -> SyntaxResult<AnyJsonValue> { support::required_node(&self.syntax, 0usize) } pub fn eof_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 1usize) } } #[cfg(feature = "serde")] impl Serialize for JsonRoot { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonRootFields { pub value: SyntaxResult<AnyJsonValue>, pub eof_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] pub struct JsonStringValue { pub(crate) syntax: SyntaxNode, } impl JsonStringValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn as_fields(&self) -> JsonStringValueFields { JsonStringValueFields { value_token: self.value_token(), } } pub fn value_token(&self) -> SyntaxResult<SyntaxToken> { support::required_token(&self.syntax, 0usize) } } #[cfg(feature = "serde")] impl Serialize for JsonStringValue { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: Serializer, { self.as_fields().serialize(serializer) } } #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonStringValueFields { pub value_token: SyntaxResult<SyntaxToken>, } #[derive(Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize))] pub enum AnyJsonValue { JsonArrayValue(JsonArrayValue), JsonBogusValue(JsonBogusValue), JsonBooleanValue(JsonBooleanValue), JsonNullValue(JsonNullValue), JsonNumberValue(JsonNumberValue), JsonObjectValue(JsonObjectValue), JsonStringValue(JsonStringValue), } impl AnyJsonValue { pub fn as_json_array_value(&self) -> Option<&JsonArrayValue> { match &self { AnyJsonValue::JsonArrayValue(item) => Some(item), _ => None, } } pub fn as_json_bogus_value(&self) -> Option<&JsonBogusValue> { match &self { AnyJsonValue::JsonBogusValue(item) => Some(item), _ => None, } } pub fn as_json_boolean_value(&self) -> Option<&JsonBooleanValue> { match &self { AnyJsonValue::JsonBooleanValue(item) => Some(item), _ => None, } } pub fn as_json_null_value(&self) -> Option<&JsonNullValue> { match &self { AnyJsonValue::JsonNullValue(item) => Some(item), _ => None, } } pub fn as_json_number_value(&self) -> Option<&JsonNumberValue> { match &self { AnyJsonValue::JsonNumberValue(item) => Some(item), _ => None, } } pub fn as_json_object_value(&self) -> Option<&JsonObjectValue> { match &self { AnyJsonValue::JsonObjectValue(item) => Some(item), _ => None, } } pub fn as_json_string_value(&self) -> Option<&JsonStringValue> { match &self { AnyJsonValue::JsonStringValue(item) => Some(item), _ => None, } } } impl AstNode for JsonArrayValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_ARRAY_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_ARRAY_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonArrayValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonArrayValue") .field( "l_brack_token", &support::DebugSyntaxResult(self.l_brack_token()), ) .field("elements", &self.elements()) .field( "r_brack_token", &support::DebugSyntaxResult(self.r_brack_token()), ) .finish() } } impl From<JsonArrayValue> for SyntaxNode { fn from(n: JsonArrayValue) -> SyntaxNode { n.syntax } } impl From<JsonArrayValue> for SyntaxElement { fn from(n: JsonArrayValue) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonBooleanValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_BOOLEAN_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_BOOLEAN_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonBooleanValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonBooleanValue") .field( "value_token", &support::DebugSyntaxResult(self.value_token()), ) .finish() } } impl From<JsonBooleanValue> for SyntaxNode { fn from(n: JsonBooleanValue) -> SyntaxNode { n.syntax } } impl From<JsonBooleanValue> for SyntaxElement { fn from(n: JsonBooleanValue) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonMember { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_MEMBER as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_MEMBER } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonMember { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonMember") .field("name", &support::DebugSyntaxResult(self.name())) .field( "colon_token", &support::DebugSyntaxResult(self.colon_token()), ) .field("value", &support::DebugSyntaxResult(self.value())) .finish() } } impl From<JsonMember> for SyntaxNode { fn from(n: JsonMember) -> SyntaxNode { n.syntax } } impl From<JsonMember> for SyntaxElement { fn from(n: JsonMember) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonMemberName { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_MEMBER_NAME as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_MEMBER_NAME } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonMemberName { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonMemberName") .field( "value_token", &support::DebugSyntaxResult(self.value_token()), ) .finish() } } impl From<JsonMemberName> for SyntaxNode { fn from(n: JsonMemberName) -> SyntaxNode { n.syntax } } impl From<JsonMemberName> for SyntaxElement { fn from(n: JsonMemberName) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonNullValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_NULL_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_NULL_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonNullValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonNullValue") .field( "value_token", &support::DebugSyntaxResult(self.value_token()), ) .finish() } } impl From<JsonNullValue> for SyntaxNode { fn from(n: JsonNullValue) -> SyntaxNode { n.syntax } } impl From<JsonNullValue> for SyntaxElement { fn from(n: JsonNullValue) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonNumberValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_NUMBER_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_NUMBER_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonNumberValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonNumberValue") .field( "value_token", &support::DebugSyntaxResult(self.value_token()), ) .finish() } } impl From<JsonNumberValue> for SyntaxNode { fn from(n: JsonNumberValue) -> SyntaxNode { n.syntax } } impl From<JsonNumberValue> for SyntaxElement { fn from(n: JsonNumberValue) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonObjectValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_OBJECT_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_OBJECT_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonObjectValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonObjectValue") .field( "l_curly_token", &support::DebugSyntaxResult(self.l_curly_token()), ) .field("json_member_list", &self.json_member_list()) .field( "r_curly_token", &support::DebugSyntaxResult(self.r_curly_token()), ) .finish() } } impl From<JsonObjectValue> for SyntaxNode { fn from(n: JsonObjectValue) -> SyntaxNode { n.syntax } } impl From<JsonObjectValue> for SyntaxElement { fn from(n: JsonObjectValue) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonRoot { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_ROOT as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_ROOT } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonRoot { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonRoot") .field("value", &support::DebugSyntaxResult(self.value())) .field("eof_token", &support::DebugSyntaxResult(self.eof_token())) .finish() } } impl From<JsonRoot> for SyntaxNode { fn from(n: JsonRoot) -> SyntaxNode { n.syntax } } impl From<JsonRoot> for SyntaxElement { fn from(n: JsonRoot) -> SyntaxElement { n.syntax.into() } } impl AstNode for JsonStringValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_STRING_VALUE as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_STRING_VALUE } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonStringValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonStringValue") .field( "value_token", &support::DebugSyntaxResult(self.value_token()), ) .finish() } } impl From<JsonStringValue> for SyntaxNode { fn from(n: JsonStringValue) -> SyntaxNode { n.syntax } } impl From<JsonStringValue> for SyntaxElement { fn from(n: JsonStringValue) -> SyntaxElement { n.syntax.into() } } impl From<JsonArrayValue> for AnyJsonValue { fn from(node: JsonArrayValue) -> AnyJsonValue { AnyJsonValue::JsonArrayValue(node) } } impl From<JsonBogusValue> for AnyJsonValue { fn from(node: JsonBogusValue) -> AnyJsonValue { AnyJsonValue::JsonBogusValue(node) } } impl From<JsonBooleanValue> for AnyJsonValue { fn from(node: JsonBooleanValue) -> AnyJsonValue { AnyJsonValue::JsonBooleanValue(node) } } impl From<JsonNullValue> for AnyJsonValue { fn from(node: JsonNullValue) -> AnyJsonValue { AnyJsonValue::JsonNullValue(node) } } impl From<JsonNumberValue> for AnyJsonValue { fn from(node: JsonNumberValue) -> AnyJsonValue { AnyJsonValue::JsonNumberValue(node) } } impl From<JsonObjectValue> for AnyJsonValue { fn from(node: JsonObjectValue) -> AnyJsonValue { AnyJsonValue::JsonObjectValue(node) } } impl From<JsonStringValue> for AnyJsonValue { fn from(node: JsonStringValue) -> AnyJsonValue { AnyJsonValue::JsonStringValue(node) } } impl AstNode for AnyJsonValue { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = JsonArrayValue::KIND_SET .union(JsonBogusValue::KIND_SET) .union(JsonBooleanValue::KIND_SET) .union(JsonNullValue::KIND_SET) .union(JsonNumberValue::KIND_SET) .union(JsonObjectValue::KIND_SET) .union(JsonStringValue::KIND_SET); fn can_cast(kind: SyntaxKind) -> bool { matches!( kind, JSON_ARRAY_VALUE | JSON_BOGUS_VALUE | JSON_BOOLEAN_VALUE | JSON_NULL_VALUE | JSON_NUMBER_VALUE | JSON_OBJECT_VALUE | JSON_STRING_VALUE ) } fn cast(syntax: SyntaxNode) -> Option<Self> { let res = match syntax.kind() { JSON_ARRAY_VALUE => AnyJsonValue::JsonArrayValue(JsonArrayValue { syntax }), JSON_BOGUS_VALUE => AnyJsonValue::JsonBogusValue(JsonBogusValue { syntax }), JSON_BOOLEAN_VALUE => AnyJsonValue::JsonBooleanValue(JsonBooleanValue { syntax }), JSON_NULL_VALUE => AnyJsonValue::JsonNullValue(JsonNullValue { syntax }), JSON_NUMBER_VALUE => AnyJsonValue::JsonNumberValue(JsonNumberValue { syntax }), JSON_OBJECT_VALUE => AnyJsonValue::JsonObjectValue(JsonObjectValue { syntax }), JSON_STRING_VALUE => AnyJsonValue::JsonStringValue(JsonStringValue { syntax }), _ => return None, }; Some(res) } fn syntax(&self) -> &SyntaxNode { match self { AnyJsonValue::JsonArrayValue(it) => &it.syntax, AnyJsonValue::JsonBogusValue(it) => &it.syntax, AnyJsonValue::JsonBooleanValue(it) => &it.syntax, AnyJsonValue::JsonNullValue(it) => &it.syntax, AnyJsonValue::JsonNumberValue(it) => &it.syntax, AnyJsonValue::JsonObjectValue(it) => &it.syntax, AnyJsonValue::JsonStringValue(it) => &it.syntax, } } fn into_syntax(self) -> SyntaxNode { match self { AnyJsonValue::JsonArrayValue(it) => it.syntax, AnyJsonValue::JsonBogusValue(it) => it.syntax, AnyJsonValue::JsonBooleanValue(it) => it.syntax, AnyJsonValue::JsonNullValue(it) => it.syntax, AnyJsonValue::JsonNumberValue(it) => it.syntax, AnyJsonValue::JsonObjectValue(it) => it.syntax, AnyJsonValue::JsonStringValue(it) => it.syntax, } } } impl std::fmt::Debug for AnyJsonValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { match self { AnyJsonValue::JsonArrayValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonBogusValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonBooleanValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonNullValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonNumberValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonObjectValue(it) => std::fmt::Debug::fmt(it, f), AnyJsonValue::JsonStringValue(it) => std::fmt::Debug::fmt(it, f), } } } impl From<AnyJsonValue> for SyntaxNode { fn from(n: AnyJsonValue) -> SyntaxNode { match n { AnyJsonValue::JsonArrayValue(it) => it.into(), AnyJsonValue::JsonBogusValue(it) => it.into(), AnyJsonValue::JsonBooleanValue(it) => it.into(), AnyJsonValue::JsonNullValue(it) => it.into(), AnyJsonValue::JsonNumberValue(it) => it.into(), AnyJsonValue::JsonObjectValue(it) => it.into(), AnyJsonValue::JsonStringValue(it) => it.into(), } } } impl From<AnyJsonValue> for SyntaxElement { fn from(n: AnyJsonValue) -> SyntaxElement { let node: SyntaxNode = n.into(); node.into() } } impl std::fmt::Display for AnyJsonValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonArrayValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonBooleanValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonMember { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonMemberName { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonNullValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonNumberValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonObjectValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonRoot { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } impl std::fmt::Display for JsonStringValue { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self.syntax(), f) } } #[derive(Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonBogus { syntax: SyntaxNode, } impl JsonBogus { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } } pub fn items(&self) -> SyntaxElementChildren { support::elements(&self.syntax) } } impl AstNode for JsonBogus { type Language = Language; const KIND_SET: SyntaxKindSet<Language> = SyntaxKindSet::from_raw(RawSyntaxKind(JSON_BOGUS as u16)); fn can_cast(kind: SyntaxKind) -> bool { kind == JSON_BOGUS } fn cast(syntax: SyntaxNode) -> Option<Self> { if Self::can_cast(syntax.kind()) { Some(Self { syntax }) } else { None } } fn syntax(&self) -> &SyntaxNode { &self.syntax } fn into_syntax(self) -> SyntaxNode { self.syntax } } impl std::fmt::Debug for JsonBogus { fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { f.debug_struct("JsonBogus") .field("items", &DebugSyntaxElementChildren(self.items())) .finish() } } impl From<JsonBogus> for SyntaxNode { fn from(n: JsonBogus) -> SyntaxNode { n.syntax } } impl From<JsonBogus> for SyntaxElement { fn from(n: JsonBogus) -> SyntaxElement { n.syntax.into() } } #[derive(Clone, PartialEq, Eq, Hash)] #[cfg_attr(feature = "serde", derive(Serialize))] pub struct JsonBogusValue { syntax: SyntaxNode, } impl JsonBogusValue { #[doc = r" Create an AstNode from a SyntaxNode without checking its kind"] #[doc = r""] #[doc = r" # Safety"] #[doc = r" This function must be guarded with a call to [AstNode::can_cast]"] #[doc = r" or a match on [SyntaxNode::kind]"] #[inline] pub const unsafe fn new_unchecked(syntax: SyntaxNode) -> Self { Self { syntax } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/settings.rs
crates/rome_service/src/settings.rs
use crate::{ configuration::FilesConfiguration, Configuration, ConfigurationDiagnostic, MatchOptions, Matcher, Rules, WorkspaceError, }; use indexmap::IndexSet; use rome_diagnostics::Category; use rome_formatter::{IndentStyle, LineWidth}; use rome_fs::RomePath; use rome_js_syntax::JsLanguage; use rome_json_syntax::JsonLanguage; use std::{ num::NonZeroU64, sync::{RwLock, RwLockReadGuard}, }; /// Global settings for the entire workspace #[derive(Debug, Default)] pub struct WorkspaceSettings { /// Formatter settings applied to all files in the workspaces pub formatter: FormatSettings, /// Linter settings applied to all files in the workspace pub linter: LinterSettings, /// Language specific settings pub languages: LanguagesSettings, /// Filesystem settings for the workspace pub files: FilesSettings, /// Analyzer settings pub organize_imports: OrganizeImportsSettings, } impl WorkspaceSettings { /// Retrieves the settings of the formatter pub fn formatter(&self) -> &FormatSettings { &self.formatter } /// Retrieves the settings of the linter pub fn linter(&self) -> &LinterSettings { &self.linter } /// Retrieves the settings of the organize imports pub fn organize_imports(&self) -> &OrganizeImportsSettings { &self.organize_imports } /// The (configuration)[Configuration] is merged into the workspace #[tracing::instrument(level = "debug", skip(self))] pub fn merge_with_configuration( &mut self, configuration: Configuration, ) -> Result<(), WorkspaceError> { // formatter part if let Some(formatter) = configuration.formatter { self.formatter = FormatSettings::try_from(formatter)?; } // linter part if let Some(linter) = configuration.linter { self.linter = LinterSettings::try_from(linter)?; } // Filesystem settings if let Some(files) = configuration.files { self.files = FilesSettings::try_from(files)?; } if let Some(organize_imports) = configuration.organize_imports { self.organize_imports = OrganizeImportsSettings::try_from(organize_imports)?; } // javascript settings let javascript = configuration.javascript; if let Some(javascript) = javascript { self.languages.javascript.globals = javascript.globals.map(|g| g.into_index_set()); let formatter = javascript.formatter; if let Some(formatter) = formatter { self.languages.javascript.formatter.quote_style = formatter.quote_style; self.languages.javascript.formatter.jsx_quote_style = formatter.jsx_quote_style; self.languages.javascript.formatter.quote_properties = formatter.quote_properties; self.languages.javascript.formatter.trailing_comma = formatter.trailing_comma; self.languages.javascript.formatter.semicolons = formatter.semicolons; self.languages.javascript.formatter.arrow_parentheses = formatter.arrow_parentheses; } if let Some(parser) = javascript.parser { self.languages .javascript .parser .parse_class_parameter_decorators = parser .unsafe_parameter_decorators_enabled .unwrap_or_default(); } let organize_imports = javascript.organize_imports; if let Some(_organize_imports) = organize_imports {} } // json settings let json = configuration.json; if let Some(json) = json { if let Some(parser) = json.parser { self.languages.json.parser.allow_comments = parser.allow_comments.unwrap_or_default(); } } Ok(()) } /// It retrieves the severity based on the `code` of the rule and the current configuration. /// /// The code of the has the following pattern: `{group}/{rule_name}`. /// /// It returns [None] if the `code` doesn't match any rule. pub fn get_severity_from_rule_code( &self, code: &Category, ) -> Option<rome_diagnostics::Severity> { let rules = self.linter.rules.as_ref(); if let Some(rules) = rules { rules.get_severity_from_code(code) } else { None } } } /// Formatter settings for the entire workspace #[derive(Debug)] pub struct FormatSettings { /// Enabled by default pub enabled: bool, /// Stores whether formatting should be allowed to proceed if a given file /// has syntax errors pub format_with_errors: bool, pub indent_style: Option<IndentStyle>, pub line_width: Option<LineWidth>, /// List of paths/files to matcher pub ignored_files: Matcher, } impl Default for FormatSettings { fn default() -> Self { Self { enabled: true, format_with_errors: false, indent_style: Some(IndentStyle::default()), line_width: Some(LineWidth::default()), ignored_files: Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }), } } } /// Linter settings for the entire workspace #[derive(Debug)] pub struct LinterSettings { /// Enabled by default pub enabled: bool, /// List of rules pub rules: Option<Rules>, /// List of paths/files to matcher pub ignored_files: Matcher, } impl Default for LinterSettings { fn default() -> Self { Self { enabled: true, rules: Some(Rules::default()), ignored_files: Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }), } } } /// Linter settings for the entire workspace #[derive(Debug)] pub struct OrganizeImportsSettings { /// Enabled by default pub enabled: bool, /// List of paths/files to matcher pub ignored_files: Matcher, } impl Default for OrganizeImportsSettings { fn default() -> Self { Self { // currently experimental enabled: false, ignored_files: Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }), } } } /// Static map of language names to language-specific settings #[derive(Debug, Default)] pub struct LanguagesSettings { pub javascript: LanguageSettings<JsLanguage>, pub json: LanguageSettings<JsonLanguage>, } pub trait Language: rome_rowan::Language { /// Formatter settings type for this language type FormatterSettings: Default; type LinterSettings: Default; /// Organize imports settings type for this language type OrganizeImportsSettings: Default; /// Fully resolved formatter options type for this language type FormatOptions: rome_formatter::FormatOptions; /// Settings that belong to the parser type ParserSettings: Default; /// Read the settings type for this language from the [LanguagesSettings] map fn lookup_settings(languages: &LanguagesSettings) -> &LanguageSettings<Self>; /// Resolve the formatter options from the global (workspace level), /// per-language and editor provided formatter settings fn resolve_format_options( global: &FormatSettings, language: &Self::FormatterSettings, path: &RomePath, ) -> Self::FormatOptions; } #[derive(Debug, Default)] pub struct LanguageSettings<L: Language> { /// Formatter settings for this language pub formatter: L::FormatterSettings, /// Linter settings for this language pub linter: L::LinterSettings, /// Globals variables/bindings that can be found in a file pub globals: Option<IndexSet<String>>, /// Organize imports settings for this language pub organize_imports: L::OrganizeImportsSettings, /// Parser settings for this language pub parser: L::ParserSettings, } /// Filesystem settings for the entire workspace #[derive(Debug)] pub struct FilesSettings { /// File size limit in bytes pub max_size: NonZeroU64, /// List of paths/files to matcher pub ignored_files: Matcher, /// Files not recognized by Rome should not emit a diagnostic pub ignore_unknown: bool, } /// Limit the size of files to 1.0 MiB by default const DEFAULT_FILE_SIZE_LIMIT: NonZeroU64 = // SAFETY: This constant is initialized with a non-zero value unsafe { NonZeroU64::new_unchecked(1024 * 1024) }; impl Default for FilesSettings { fn default() -> Self { Self { max_size: DEFAULT_FILE_SIZE_LIMIT, ignored_files: Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }), ignore_unknown: false, } } } impl TryFrom<FilesConfiguration> for FilesSettings { type Error = WorkspaceError; fn try_from(config: FilesConfiguration) -> Result<Self, Self::Error> { let mut matcher = Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }); if let Some(ignore) = config.ignore { for pattern in ignore.index_set() { matcher.add_pattern(pattern).map_err(|err| { WorkspaceError::Configuration( ConfigurationDiagnostic::new_invalid_ignore_pattern( pattern.to_string(), err.msg.to_string(), ), ) })?; } } Ok(Self { max_size: config.max_size.unwrap_or(DEFAULT_FILE_SIZE_LIMIT), ignored_files: matcher, ignore_unknown: config.ignore_unknown.unwrap_or_default(), }) } } /// Handle object holding a temporary lock on the workspace settings until /// the deferred language-specific options resolution is called #[derive(Debug)] pub struct SettingsHandle<'a> { inner: RwLockReadGuard<'a, WorkspaceSettings>, } impl<'a> SettingsHandle<'a> { pub(crate) fn new(settings: &'a RwLock<WorkspaceSettings>) -> Self { Self { inner: settings.read().unwrap(), } } } impl<'a> AsRef<WorkspaceSettings> for SettingsHandle<'a> { fn as_ref(&self) -> &WorkspaceSettings { &self.inner } } impl<'a> SettingsHandle<'a> { /// Resolve the formatting context for the given language pub(crate) fn format_options<L>(self, path: &RomePath) -> L::FormatOptions where L: Language, { L::resolve_format_options( &self.inner.formatter, &L::lookup_settings(&self.inner.languages).formatter, path, ) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/workspace.rs
crates/rome_service/src/workspace.rs
//! The [Workspace] is the main entry point for high-level clients (the Rome //! CLI and Language Server) into the various language-specific services of the //! Rome toolchain (parser, formatter, analyzer ...) //! //! # Documents //! //! A [Workspace] instance operates on a set of open documents managed by one //! or multiple clients, and provides language services for these documents //! like diagnostics, code actions or formatting in a language independent way. //! //! In this regard the [Workspace] trait shares many similarities with the //! Language Server Protocol, and in the context of the Language Server the //! state of the [Workspace] instance is intended to closely mirror the state //! of the actual in-editor workspace (the set of documents open in the //! [Workspace] is the set of files currently open in the editor) //! //! In the context of the CLI most commands will generally work on batches of //! files, and as such the set of "open documents" instead corresponds to the //! list of files the CLI is currently actively processing //! //! # State //! //! A [Workspace] instance is stateful: this is not reflected on the trait (all //! methods take an immutable `&self` borrow) because the interface is also //! required to be thread-safe ([Send] + [Sync]), but the workspace is allowed //! to internally cache data across calls (this is in fact the main reason for //! the use of the "open documents" set, those documents can serve as //! conceptual garbage collection roots to manage the caching and eviction of //! parse trees, intermediate analysis data or diagnostics) //! //! # Implementations //! //! Currently the [Workspace] trait is implemented for a single `WorkspaceServer` //! type. However it is eventually intended to also be implemented for a //! potential `WorkspaceClient` type and to operate on a remote workspace //! server through a transport layer. This would allow the CLI and Language //! Server process to share a the same [Workspace] instance in a common daemon //! process for instance //! //! # Errors //! //! Because of the aforementioned client-server abstraction, the [Workspace] //! is designed to let any operation fail: all methods return a [Result] with a //! [WorkspaceError] enum wrapping the underlying issue. Some common errors are: //! //! - [WorkspaceError::NotFound]: This error is returned when an operation is being //! run on a path that doesn't correspond to any open document: either the //! document has been closed or the client didn't open it in the first place //! - [WorkspaceError::SourceFileNotSupported]: This error is returned when an //! operation could not be completed because the language associated with the //! document does not implement the required capability: for instance trying to //! format a file with a language that does not have a formatter use crate::file_handlers::Capabilities; use crate::{Configuration, Deserialize, Serialize, WorkspaceError}; use rome_analyze::ActionCategory; pub use rome_analyze::RuleCategories; use rome_console::{markup, Markup, MarkupBuf}; use rome_diagnostics::CodeSuggestion; use rome_formatter::Printed; use rome_fs::RomePath; use rome_js_syntax::{TextRange, TextSize}; use rome_text_edit::TextEdit; use std::collections::HashMap; use std::{borrow::Cow, panic::RefUnwindSafe, sync::Arc}; pub use self::client::{TransportRequest, WorkspaceClient, WorkspaceTransport}; pub use crate::file_handlers::Language; use crate::settings::WorkspaceSettings; mod client; mod server; #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct SupportsFeatureParams { pub path: RomePath, pub feature: Vec<FeatureName>, } #[derive(Debug, serde::Serialize, serde::Deserialize, Default)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct SupportsFeatureResult { pub reason: Option<SupportKind>, } #[derive(Debug, serde::Serialize, serde::Deserialize, Default)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FileFeaturesResult { pub features_supported: HashMap<FeatureName, SupportKind>, } impl FileFeaturesResult { /// By default, all features are not supported by a file. const WORKSPACE_FEATURES: [(FeatureName, SupportKind); 3] = [ (FeatureName::Lint, SupportKind::FileNotSupported), (FeatureName::Format, SupportKind::FileNotSupported), (FeatureName::OrganizeImports, SupportKind::FileNotSupported), ]; pub fn new() -> Self { Self { features_supported: HashMap::from(FileFeaturesResult::WORKSPACE_FEATURES), } } pub fn with_capabilities(mut self, capabilities: &Capabilities) -> Self { if capabilities.formatter.format.is_some() { self.features_supported .insert(FeatureName::Format, SupportKind::Supported); } if capabilities.analyzer.lint.is_some() { self.features_supported .insert(FeatureName::Lint, SupportKind::Supported); } if capabilities.analyzer.organize_imports.is_some() { self.features_supported .insert(FeatureName::OrganizeImports, SupportKind::Supported); } self } pub fn with_settings(mut self, settings: &WorkspaceSettings) -> Self { if !settings.formatter().enabled { self.features_supported .insert(FeatureName::Format, SupportKind::FeatureNotEnabled); } if !settings.linter().enabled { self.features_supported .insert(FeatureName::Lint, SupportKind::FeatureNotEnabled); } if !settings.organize_imports().enabled { self.features_supported .insert(FeatureName::OrganizeImports, SupportKind::FeatureNotEnabled); } self } pub fn ignored(&mut self, feature: FeatureName) { self.features_supported .insert(feature, SupportKind::Ignored); } /// Checks whether the file support the given `feature` pub fn supports_for(&self, feature: &FeatureName) -> bool { self.features_supported .get(feature) .map(|support_kind| matches!(support_kind, SupportKind::Supported)) .unwrap_or_default() } /// Loops through all the features of the current file, and if a feature is [SupportKind::FileNotSupported], /// it gets changed to [SupportKind::Ignored] pub fn ignore_not_supported(&mut self) { for support_kind in self.features_supported.values_mut() { if matches!(support_kind, SupportKind::FileNotSupported) { *support_kind = SupportKind::Ignored; } } } pub fn support_kind_for(&self, feature: &FeatureName) -> Option<&SupportKind> { self.features_supported.get(feature) } } impl SupportsFeatureResult { /// Whether the feature is intentionally disabled pub const fn is_not_enabled(&self) -> bool { matches!(self.reason, Some(SupportKind::FeatureNotEnabled)) } /// Whether the feature is supported pub const fn is_supported(&self) -> bool { matches!(self.reason, None) } /// Whether the feature is not supported, regardless of the reason pub const fn is_not_supported(&self) -> bool { matches!(self.reason, Some(_)) } } #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum SupportKind { /// The feature is enabled for the file Supported, /// The file is ignored (configuration) Ignored, /// The feature is not enabled (configuration or the file doesn't need it) FeatureNotEnabled, /// The file is not capable of having this feature FileNotSupported, } impl SupportKind { pub const fn is_supported(&self) -> bool { matches!(self, SupportKind::Supported) } pub const fn is_not_enabled(&self) -> bool { matches!(self, SupportKind::FeatureNotEnabled) } } #[derive(Debug, Clone, Hash, serde::Serialize, serde::Deserialize, Eq, PartialEq)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum FeatureName { Format, Lint, OrganizeImports, } #[derive(Debug, Default)] pub struct FeaturesBuilder(Vec<FeatureName>); impl FeaturesBuilder { pub fn new() -> Self { Self::default() } pub fn with_formatter(mut self) -> Self { self.0.push(FeatureName::Format); self } pub fn with_linter(mut self) -> Self { self.0.push(FeatureName::Lint); self } pub fn with_organize_imports(mut self) -> Self { self.0.push(FeatureName::OrganizeImports); self } pub fn build(self) -> Vec<FeatureName> { self.0 } } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct UpdateSettingsParams { pub configuration: Configuration, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct OpenFileParams { pub path: RomePath, pub content: String, pub version: i32, #[serde(default)] pub language_hint: Language, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct GetSyntaxTreeParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct GetSyntaxTreeResult { pub cst: String, pub ast: String, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct GetControlFlowGraphParams { pub path: RomePath, pub cursor: TextSize, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct GetFormatterIRParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct GetFileContentParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct ChangeFileParams { pub path: RomePath, pub content: String, pub version: i32, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct CloseFileParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct PullDiagnosticsParams { pub path: RomePath, pub categories: RuleCategories, pub max_diagnostics: u64, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct PullDiagnosticsResult { pub diagnostics: Vec<rome_diagnostics::serde::Diagnostic>, pub errors: usize, pub skipped_diagnostics: u64, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct PullActionsParams { pub path: RomePath, pub range: TextRange, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct PullActionsResult { pub actions: Vec<CodeAction>, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct CodeAction { pub category: ActionCategory, pub rule_name: Option<(Cow<'static, str>, Cow<'static, str>)>, pub suggestion: CodeSuggestion, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FormatFileParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FormatRangeParams { pub path: RomePath, pub range: TextRange, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FormatOnTypeParams { pub path: RomePath, pub offset: TextSize, } #[derive(Debug, Clone, Copy, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] /// Which fixes should be applied during the analyzing phase pub enum FixFileMode { /// Applies [safe](rome_diagnostics::Applicability::Always) fixes SafeFixes, /// Applies [safe](rome_diagnostics::Applicability::Always) and suggested [safe](rome_diagnostics::Applicability::MaybeIncorrect) SafeAndUnsafeFixes, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FixFileParams { pub path: RomePath, pub fix_file_mode: FixFileMode, pub should_format: bool, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FixFileResult { /// New source code for the file with all fixes applied pub code: String, /// List of all the code actions applied to the file pub actions: Vec<FixAction>, /// Number of errors pub errors: usize, /// number of skipped suggested fixes pub skipped_suggested_fixes: u32, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct FixAction { /// Name of the rule group and rule that emitted this code action pub rule_name: Option<(Cow<'static, str>, Cow<'static, str>)>, /// Source range at which this action was applied pub range: TextRange, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct RenameParams { pub path: RomePath, pub symbol_at: TextSize, pub new_name: String, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct RenameResult { /// Range of source code modified by this rename operation pub range: TextRange, /// List of text edit operations to apply on the source code pub indels: TextEdit, } #[derive(Debug, Eq, PartialEq, Clone, Default, Deserialize, Serialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct ServerInfo { /// The name of the server as defined by the server. pub name: String, /// The server's version as defined by the server. #[serde(skip_serializing_if = "Option::is_none")] pub version: Option<String>, } #[derive(Copy, Clone, Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct RageParams {} #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct RageResult { pub entries: Vec<RageEntry>, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub enum RageEntry { Section(String), Pair { name: String, value: MarkupBuf }, Markup(MarkupBuf), } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct OrganizeImportsParams { pub path: RomePath, } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct OrganizeImportsResult { pub code: String, } impl RageEntry { pub fn section(name: &str) -> Self { Self::Section(name.to_string()) } pub fn markup(markup: Markup) -> Self { Self::Markup(markup.to_owned()) } pub fn pair(name: &str, value: &str) -> Self { Self::pair_markup(name, markup!({ value })) } pub fn pair_markup(name: &str, value: Markup) -> Self { Self::Pair { name: name.to_string(), value: value.to_owned(), } } } #[derive(Debug, serde::Serialize, serde::Deserialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] pub struct IsPathIgnoredParams { pub rome_path: RomePath, pub feature: FeatureName, } pub trait Workspace: Send + Sync + RefUnwindSafe { /// Checks whether a certain feature is supported. There are different conditions: /// - Rome doesn't recognize a file, so it can't provide the feature; /// - the feature is disabled inside the configuration; /// - the file is ignored fn file_features( &self, params: SupportsFeatureParams, ) -> Result<FileFeaturesResult, WorkspaceError>; /// Checks if the current path is ignored by the workspace, against a particular feature. /// /// Takes as input the path of the file that workspace is currently processing and /// a list of paths to match against. /// /// If the file path matches, than `true` is returned and it should be considered ignored. fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result<bool, WorkspaceError>; /// Update the global settings for this workspace fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError>; /// Add a new file to the workspace fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError>; // Return a textual, debug representation of the syntax tree for a given document fn get_syntax_tree( &self, params: GetSyntaxTreeParams, ) -> Result<GetSyntaxTreeResult, WorkspaceError>; // Return a textual, debug representation of the control flow graph at a given position in the document fn get_control_flow_graph( &self, params: GetControlFlowGraphParams, ) -> Result<String, WorkspaceError>; // Return a textual, debug representation of the formatter IR for a given document fn get_formatter_ir(&self, params: GetFormatterIRParams) -> Result<String, WorkspaceError>; /// Return the content of a file fn get_file_content(&self, params: GetFileContentParams) -> Result<String, WorkspaceError>; /// Change the content of an open file fn change_file(&self, params: ChangeFileParams) -> Result<(), WorkspaceError>; /// Remove a file from the workspace fn close_file(&self, params: CloseFileParams) -> Result<(), WorkspaceError>; /// Retrieves the list of diagnostics associated to a file fn pull_diagnostics( &self, params: PullDiagnosticsParams, ) -> Result<PullDiagnosticsResult, WorkspaceError>; /// Retrieves the list of code actions available for a given cursor /// position within a file fn pull_actions(&self, params: PullActionsParams) -> Result<PullActionsResult, WorkspaceError>; /// Runs the given file through the formatter using the provided options /// and returns the resulting source code fn format_file(&self, params: FormatFileParams) -> Result<Printed, WorkspaceError>; /// Runs a range of an open document through the formatter fn format_range(&self, params: FormatRangeParams) -> Result<Printed, WorkspaceError>; /// Runs a "block" ending at the specified character of an open document /// through the formatter fn format_on_type(&self, params: FormatOnTypeParams) -> Result<Printed, WorkspaceError>; /// Return the content of the file with all safe code actions applied fn fix_file(&self, params: FixFileParams) -> Result<FixFileResult, WorkspaceError>; /// Return the content of the file after renaming a symbol fn rename(&self, params: RenameParams) -> Result<RenameResult, WorkspaceError>; /// Returns debug information about this workspace. fn rage(&self, params: RageParams) -> Result<RageResult, WorkspaceError>; /// Returns information about the server this workspace is connected to or `None` if the workspace isn't connected to a server. fn server_info(&self) -> Option<&ServerInfo>; /// Applies import sorting fn organize_imports( &self, params: OrganizeImportsParams, ) -> Result<OrganizeImportsResult, WorkspaceError>; } /// Convenience function for constructing a server instance of [Workspace] pub fn server() -> Box<dyn Workspace> { Box::new(server::WorkspaceServer::new()) } /// Convenience function for constructing a server instance of [Workspace] pub fn server_sync() -> Arc<dyn Workspace> { Arc::new(server::WorkspaceServer::new()) } /// Convenience function for constructing a client instance of [Workspace] pub fn client<T>(transport: T) -> Result<Box<dyn Workspace>, WorkspaceError> where T: WorkspaceTransport + RefUnwindSafe + Send + Sync + 'static, { Ok(Box::new(client::WorkspaceClient::new(transport)?)) } /// [RAII](https://en.wikipedia.org/wiki/Resource_acquisition_is_initialization) /// guard for an open file in a workspace, takes care of closing the file /// automatically on drop pub struct FileGuard<'app, W: Workspace + ?Sized> { workspace: &'app W, path: RomePath, } impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> { pub fn open(workspace: &'app W, params: OpenFileParams) -> Result<Self, WorkspaceError> { let path = params.path.clone(); workspace.open_file(params)?; Ok(Self { workspace, path }) } pub fn get_syntax_tree(&self) -> Result<GetSyntaxTreeResult, WorkspaceError> { self.workspace.get_syntax_tree(GetSyntaxTreeParams { path: self.path.clone(), }) } pub fn get_control_flow_graph(&self, cursor: TextSize) -> Result<String, WorkspaceError> { self.workspace .get_control_flow_graph(GetControlFlowGraphParams { path: self.path.clone(), cursor, }) } pub fn change_file(&self, version: i32, content: String) -> Result<(), WorkspaceError> { self.workspace.change_file(ChangeFileParams { path: self.path.clone(), version, content, }) } pub fn get_file_content(&self) -> Result<String, WorkspaceError> { self.workspace.get_file_content(GetFileContentParams { path: self.path.clone(), }) } pub fn pull_diagnostics( &self, categories: RuleCategories, max_diagnostics: u64, ) -> Result<PullDiagnosticsResult, WorkspaceError> { self.workspace.pull_diagnostics(PullDiagnosticsParams { path: self.path.clone(), categories, max_diagnostics, }) } pub fn pull_actions(&self, range: TextRange) -> Result<PullActionsResult, WorkspaceError> { self.workspace.pull_actions(PullActionsParams { path: self.path.clone(), range, }) } pub fn format_file(&self) -> Result<Printed, WorkspaceError> { self.workspace.format_file(FormatFileParams { path: self.path.clone(), }) } pub fn format_range(&self, range: TextRange) -> Result<Printed, WorkspaceError> { self.workspace.format_range(FormatRangeParams { path: self.path.clone(), range, }) } pub fn format_on_type(&self, offset: TextSize) -> Result<Printed, WorkspaceError> { self.workspace.format_on_type(FormatOnTypeParams { path: self.path.clone(), offset, }) } pub fn fix_file( &self, fix_file_mode: FixFileMode, should_format: bool, ) -> Result<FixFileResult, WorkspaceError> { self.workspace.fix_file(FixFileParams { path: self.path.clone(), fix_file_mode, should_format, }) } pub fn organize_imports(&self) -> Result<OrganizeImportsResult, WorkspaceError> { self.workspace.organize_imports(OrganizeImportsParams { path: self.path.clone(), }) } } impl<'app, W: Workspace + ?Sized> Drop for FileGuard<'app, W> { fn drop(&mut self) { self.workspace .close_file(CloseFileParams { path: self.path.clone(), }) // `close_file` can only error if the file was already closed, in // this case it's generally better to silently matcher the error // than panic (especially in a drop handler) .ok(); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/diagnostics.rs
crates/rome_service/src/diagnostics.rs
use crate::file_handlers::Language; use crate::ConfigurationDiagnostic; use rome_console::fmt::Bytes; use rome_console::markup; use rome_diagnostics::{category, Category, Diagnostic, DiagnosticTags, Location, Severity, Visit}; use rome_formatter::{FormatError, PrintError}; use rome_fs::FileSystemDiagnostic; use rome_js_analyze::utils::rename::RenameError; use rome_js_analyze::RuleError; use serde::{Deserialize, Serialize}; use std::error::Error; use std::fmt; use std::fmt::{Debug, Display, Formatter}; use std::process::{ExitCode, Termination}; #[derive(Serialize, Deserialize)] /// Generic errors thrown during rome operations pub enum WorkspaceError { /// Can't export the report of the CLI into a file ReportNotSerializable(ReportNotSerializable), /// The project contains uncommitted changes DirtyWorkspace(DirtyWorkspace), /// The file does not exist in the [crate::Workspace] NotFound(NotFound), /// A file is not supported. It contains the language and path of the file /// Use this error if Rome is trying to process a file that Rome can't understand SourceFileNotSupported(SourceFileNotSupported), /// The formatter encountered an error while formatting the file FormatError(FormatError), /// The formatter encountered an error while formatting the file PrintError(PrintError), /// The file could not be formatted since it has syntax errors and `format_with_errors` is disabled FormatWithErrorsDisabled(FormatWithErrorsDisabled), /// The file could not be analyzed because a rule caused an error. RuleError(RuleError), /// Thrown when Rome can't read a generic directory CantReadDirectory(CantReadDirectory), /// Thrown when Rome can't read a generic file CantReadFile(CantReadFile), /// Error thrown when validating the configuration. Once deserialized, further checks have to be done. Configuration(ConfigurationDiagnostic), /// Error thrown when Rome cannot rename a symbol. RenameError(RenameError), /// Error emitted by the underlying transport layer for a remote Workspace TransportError(TransportError), /// Emitted when the file is ignored and should not be processed FileIgnored(FileIgnored), /// Emitted when a file could not be parsed because it's larger than the size limit FileTooLarge(FileTooLarge), /// Diagnostics emitted when querying the file system FileSystem(FileSystemDiagnostic), } impl WorkspaceError { pub fn format_with_errors_disabled() -> Self { Self::FormatWithErrorsDisabled(FormatWithErrorsDisabled) } pub fn cant_read_file(path: String) -> Self { Self::CantReadFile(CantReadFile { path }) } pub fn not_found() -> Self { Self::NotFound(NotFound) } pub fn file_too_large(path: String, size: usize, limit: usize) -> Self { Self::FileTooLarge(FileTooLarge { path, size, limit }) } pub fn file_ignored(path: String) -> Self { Self::FileIgnored(FileIgnored { path }) } pub fn source_file_not_supported( language: Language, path: String, extension: Option<String>, ) -> Self { Self::SourceFileNotSupported(SourceFileNotSupported { language, path, extension, }) } pub fn report_not_serializable(reason: impl Into<String>) -> Self { Self::ReportNotSerializable(ReportNotSerializable { reason: reason.into(), }) } } impl Error for WorkspaceError {} impl Debug for WorkspaceError { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { std::fmt::Display::fmt(self, f) } } impl Display for WorkspaceError { fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result { Diagnostic::description(self, f) } } impl Termination for WorkspaceError { fn report(self) -> ExitCode { ExitCode::FAILURE } } impl Diagnostic for WorkspaceError { fn category(&self) -> Option<&'static Category> { match self { WorkspaceError::FormatWithErrorsDisabled(error) => error.category(), WorkspaceError::FormatError(err) => err.category(), WorkspaceError::PrintError(err) => err.category(), WorkspaceError::RuleError(error) => error.category(), WorkspaceError::Configuration(error) => error.category(), WorkspaceError::RenameError(error) => error.category(), WorkspaceError::TransportError(error) => error.category(), WorkspaceError::ReportNotSerializable(error) => error.category(), WorkspaceError::NotFound(error) => error.category(), WorkspaceError::DirtyWorkspace(error) => error.category(), WorkspaceError::SourceFileNotSupported(error) => error.category(), WorkspaceError::CantReadDirectory(error) => error.category(), WorkspaceError::CantReadFile(error) => error.category(), WorkspaceError::FileIgnored(error) => error.category(), WorkspaceError::FileTooLarge(error) => error.category(), WorkspaceError::FileSystem(error) => error.category(), } } fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { match self { WorkspaceError::FormatWithErrorsDisabled(error) => error.description(fmt), WorkspaceError::FormatError(error) => Diagnostic::description(error, fmt), WorkspaceError::PrintError(error) => Diagnostic::description(error, fmt), WorkspaceError::RuleError(error) => Diagnostic::description(error, fmt), WorkspaceError::Configuration(error) => error.description(fmt), WorkspaceError::RenameError(error) => error.description(fmt), WorkspaceError::TransportError(error) => error.description(fmt), WorkspaceError::ReportNotSerializable(error) => error.description(fmt), WorkspaceError::NotFound(error) => error.description(fmt), WorkspaceError::DirtyWorkspace(error) => error.description(fmt), WorkspaceError::SourceFileNotSupported(error) => error.description(fmt), WorkspaceError::CantReadDirectory(error) => error.description(fmt), WorkspaceError::CantReadFile(error) => error.description(fmt), WorkspaceError::FileIgnored(error) => error.description(fmt), WorkspaceError::FileTooLarge(error) => error.description(fmt), WorkspaceError::FileSystem(error) => error.description(fmt), } } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { match self { WorkspaceError::FormatWithErrorsDisabled(error) => error.message(fmt), WorkspaceError::FormatError(err) => err.message(fmt), WorkspaceError::PrintError(err) => err.message(fmt), WorkspaceError::RuleError(error) => error.message(fmt), WorkspaceError::Configuration(error) => error.message(fmt), WorkspaceError::RenameError(error) => error.message(fmt), WorkspaceError::TransportError(error) => error.message(fmt), WorkspaceError::ReportNotSerializable(error) => error.message(fmt), WorkspaceError::NotFound(error) => error.message(fmt), WorkspaceError::DirtyWorkspace(error) => error.message(fmt), WorkspaceError::SourceFileNotSupported(error) => error.message(fmt), WorkspaceError::CantReadDirectory(error) => error.message(fmt), WorkspaceError::CantReadFile(error) => error.message(fmt), WorkspaceError::FileIgnored(error) => error.message(fmt), WorkspaceError::FileTooLarge(error) => error.message(fmt), WorkspaceError::FileSystem(error) => error.message(fmt), } } fn severity(&self) -> Severity { match self { WorkspaceError::FormatError(err) => err.severity(), WorkspaceError::PrintError(err) => err.severity(), WorkspaceError::RuleError(error) => error.severity(), WorkspaceError::Configuration(error) => error.severity(), WorkspaceError::RenameError(error) => error.severity(), WorkspaceError::TransportError(error) => error.severity(), WorkspaceError::ReportNotSerializable(error) => error.severity(), WorkspaceError::DirtyWorkspace(error) => error.severity(), WorkspaceError::NotFound(error) => error.severity(), WorkspaceError::SourceFileNotSupported(error) => error.severity(), WorkspaceError::FormatWithErrorsDisabled(error) => error.severity(), WorkspaceError::CantReadDirectory(error) => error.severity(), WorkspaceError::CantReadFile(error) => error.severity(), WorkspaceError::FileIgnored(error) => error.severity(), WorkspaceError::FileTooLarge(error) => error.severity(), WorkspaceError::FileSystem(error) => error.severity(), } } fn tags(&self) -> DiagnosticTags { match self { WorkspaceError::FormatError(err) => err.tags(), WorkspaceError::PrintError(err) => err.tags(), WorkspaceError::RuleError(error) => error.tags(), WorkspaceError::Configuration(error) => error.tags(), WorkspaceError::RenameError(error) => error.tags(), WorkspaceError::TransportError(error) => error.tags(), WorkspaceError::ReportNotSerializable(error) => error.tags(), WorkspaceError::DirtyWorkspace(error) => error.tags(), WorkspaceError::NotFound(error) => error.tags(), WorkspaceError::SourceFileNotSupported(error) => error.tags(), WorkspaceError::FormatWithErrorsDisabled(error) => error.tags(), WorkspaceError::CantReadDirectory(error) => error.tags(), WorkspaceError::CantReadFile(error) => error.tags(), WorkspaceError::FileIgnored(error) => error.tags(), WorkspaceError::FileTooLarge(error) => error.tags(), WorkspaceError::FileSystem(error) => error.tags(), } } fn location(&self) -> Location<'_> { match self { WorkspaceError::FormatError(err) => err.location(), WorkspaceError::PrintError(err) => err.location(), WorkspaceError::RuleError(error) => error.location(), WorkspaceError::Configuration(error) => error.location(), WorkspaceError::RenameError(error) => error.location(), WorkspaceError::TransportError(error) => error.location(), WorkspaceError::ReportNotSerializable(error) => error.location(), WorkspaceError::DirtyWorkspace(error) => error.location(), WorkspaceError::NotFound(error) => error.location(), WorkspaceError::SourceFileNotSupported(error) => error.location(), WorkspaceError::FormatWithErrorsDisabled(error) => error.location(), WorkspaceError::CantReadDirectory(error) => error.location(), WorkspaceError::CantReadFile(error) => error.location(), WorkspaceError::FileIgnored(error) => error.location(), WorkspaceError::FileTooLarge(error) => error.location(), WorkspaceError::FileSystem(error) => error.location(), } } fn source(&self) -> Option<&dyn Diagnostic> { match self { WorkspaceError::FormatError(error) => Diagnostic::source(error), WorkspaceError::PrintError(error) => Diagnostic::source(error), WorkspaceError::RuleError(error) => Diagnostic::source(error), WorkspaceError::Configuration(error) => Diagnostic::source(error), WorkspaceError::RenameError(error) => Diagnostic::source(error), WorkspaceError::TransportError(error) => Diagnostic::source(error), WorkspaceError::ReportNotSerializable(error) => Diagnostic::source(error), WorkspaceError::DirtyWorkspace(error) => Diagnostic::source(error), WorkspaceError::NotFound(error) => Diagnostic::source(error), WorkspaceError::SourceFileNotSupported(error) => Diagnostic::source(error), WorkspaceError::FormatWithErrorsDisabled(error) => Diagnostic::source(error), WorkspaceError::CantReadDirectory(error) => Diagnostic::source(error), WorkspaceError::CantReadFile(error) => Diagnostic::source(error), WorkspaceError::FileIgnored(error) => Diagnostic::source(error), WorkspaceError::FileTooLarge(error) => Diagnostic::source(error), WorkspaceError::FileSystem(error) => Diagnostic::source(error), } } fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { match self { WorkspaceError::FormatError(err) => err.advices(visitor), WorkspaceError::PrintError(err) => err.advices(visitor), WorkspaceError::RuleError(error) => error.advices(visitor), WorkspaceError::Configuration(error) => error.advices(visitor), WorkspaceError::RenameError(error) => error.advices(visitor), WorkspaceError::TransportError(error) => error.advices(visitor), WorkspaceError::ReportNotSerializable(error) => error.advices(visitor), WorkspaceError::DirtyWorkspace(error) => error.advices(visitor), WorkspaceError::NotFound(error) => error.advices(visitor), WorkspaceError::SourceFileNotSupported(error) => error.advices(visitor), WorkspaceError::FormatWithErrorsDisabled(error) => error.advices(visitor), WorkspaceError::CantReadDirectory(error) => error.advices(visitor), WorkspaceError::CantReadFile(error) => error.advices(visitor), WorkspaceError::FileIgnored(error) => error.advices(visitor), WorkspaceError::FileTooLarge(error) => error.advices(visitor), WorkspaceError::FileSystem(error) => error.advices(visitor), } } fn verbose_advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { match self { WorkspaceError::FormatError(err) => err.verbose_advices(visitor), WorkspaceError::PrintError(err) => err.verbose_advices(visitor), WorkspaceError::RuleError(error) => error.verbose_advices(visitor), WorkspaceError::Configuration(error) => error.verbose_advices(visitor), WorkspaceError::RenameError(error) => error.verbose_advices(visitor), WorkspaceError::TransportError(error) => error.verbose_advices(visitor), WorkspaceError::ReportNotSerializable(error) => error.verbose_advices(visitor), WorkspaceError::DirtyWorkspace(error) => error.verbose_advices(visitor), WorkspaceError::NotFound(error) => error.verbose_advices(visitor), WorkspaceError::SourceFileNotSupported(error) => error.verbose_advices(visitor), WorkspaceError::FormatWithErrorsDisabled(error) => error.verbose_advices(visitor), WorkspaceError::CantReadDirectory(error) => error.verbose_advices(visitor), WorkspaceError::CantReadFile(error) => error.verbose_advices(visitor), WorkspaceError::FileIgnored(error) => error.verbose_advices(visitor), WorkspaceError::FileTooLarge(error) => error.verbose_advices(visitor), WorkspaceError::FileSystem(error) => error.verbose_advices(visitor), } } } impl From<FormatError> for WorkspaceError { fn from(err: FormatError) -> Self { Self::FormatError(err) } } impl From<TransportError> for WorkspaceError { fn from(err: TransportError) -> Self { Self::TransportError(err) } } impl From<PrintError> for WorkspaceError { fn from(err: PrintError) -> Self { Self::PrintError(err) } } impl From<FileSystemDiagnostic> for WorkspaceError { fn from(err: FileSystemDiagnostic) -> Self { Self::FileSystem(err) } } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message = "Uncommitted changes in repository" )] pub struct DirtyWorkspace; #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message( message("The report can't be serialized, here's why: "{self.reason}), description = "The report can't be serialized, here's why: {reason}" ) )] pub struct ReportNotSerializable { reason: String, } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message = "The file does not exist in the workspace.", tags(INTERNAL) )] pub struct NotFound; #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic(category = "format", message = "Format with errors is disabled.")] pub struct FormatWithErrorsDisabled; #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message( message("Rome couldn't read the following directory, maybe for permissions reasons or it doesn't exists: "{self.path}), description = "Rome couldn't read the following directory, maybe for permissions reasons or it doesn't exists: {path}" ) )] pub struct CantReadDirectory { #[location(resource)] path: String, } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message( message("Rome couldn't read the following file, maybe for permissions reasons or it doesn't exists: "{self.path}), description = "Rome couldn't read the following file, maybe for permissions reasons or it doesn't exists: {path}" ) )] pub struct CantReadFile { #[location(resource)] path: String, } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "internalError/fs", message( message("The file "{self.path}" was ignored."), description = "The file {path} was ignored." ), severity = Warning )] pub struct FileIgnored { #[location(resource)] path: String, } #[derive(Debug, Serialize, Deserialize)] pub struct FileTooLarge { path: String, size: usize, limit: usize, } impl Diagnostic for FileTooLarge { fn category(&self) -> Option<&'static Category> { Some(category!("internalError/fs")) } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { fmt.write_markup( markup!{ "Size of "{self.path}" is "{Bytes(self.size)}" which exceeds configured maximum of "{Bytes(self.limit)}" for this project. The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't." } ) } fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { write!(fmt, "Size of {} is {} which exceeds configured maximum of {} for this project. \ The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.", self.path, Bytes(self.size), Bytes(self.limit) ) } } #[derive(Debug, Serialize, Deserialize)] pub struct SourceFileNotSupported { language: Language, path: String, extension: Option<String>, } impl Diagnostic for SourceFileNotSupported { fn category(&self) -> Option<&'static Category> { Some(category!("internalError/io")) } fn severity(&self) -> Severity { Severity::Error } fn location(&self) -> Location<'_> { Location::builder().resource(&self.path).build() } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { if self.language != Language::Unknown { fmt.write_markup(markup! { "Rome doesn't support this feature for the language "{{&self.language}} }) } else if let Some(ext) = self.extension.as_ref() { fmt.write_markup(markup! { "Rome could not determine the language for the file extension "{{ext}} }) } else { fmt.write_markup( markup!{ "Rome could not determine the language for the file "{self.path}" because it doesn't have a clear extension" } ) } } } #[derive(Debug, Serialize, Deserialize)] /// Error emitted by the underlying transport layer for a remote Workspace pub enum TransportError { /// Error emitted by the transport layer if the connection was lost due to an I/O error ChannelClosed, /// Error emitted by the transport layer if a request timed out Timeout, /// Error caused by a serialization or deserialization issue SerdeError(String), /// Generic error type for RPC errors that can't be deserialized into RomeError RPCError(String), } impl Display for TransportError { fn fmt(&self, fmt: &mut Formatter) -> fmt::Result { self.description(fmt) } } impl Diagnostic for TransportError { fn category(&self) -> Option<&'static Category> { Some(category!("internalError/io")) } fn severity(&self) -> Severity { Severity::Error } fn description(&self, fmt: &mut Formatter<'_>) -> fmt::Result { match self { TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), TransportError::ChannelClosed => fmt.write_str( "a request to the remote workspace failed because the connection was interrupted", ), TransportError::Timeout => { fmt.write_str("the request to the remote workspace timed out") } TransportError::RPCError(err) => fmt.write_str(err), } } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { match self { TransportError::SerdeError(err) => write!(fmt, "serialization error: {err}"), TransportError::ChannelClosed => fmt.write_str( "a request to the remote workspace failed because the connection was interrupted", ), TransportError::Timeout => { fmt.write_str("the request to the remote workspace timed out") } TransportError::RPCError(err) => fmt.write_str(err), } } fn tags(&self) -> DiagnosticTags { DiagnosticTags::INTERNAL } } #[cfg(test)] mod test { use crate::diagnostics::{ CantReadDirectory, CantReadFile, DirtyWorkspace, FileIgnored, FileTooLarge, NotFound, SourceFileNotSupported, }; use crate::file_handlers::Language; use crate::{TransportError, WorkspaceError}; use rome_diagnostics::{print_diagnostic_to_string, DiagnosticExt, Error}; use rome_formatter::FormatError; use rome_fs::RomePath; use std::ffi::OsStr; fn snap_diagnostic(test_name: &str, diagnostic: Error) { let content = print_diagnostic_to_string(&diagnostic); insta::with_settings!({ prepend_module_to_snapshot => false, }, { insta::assert_snapshot!(test_name, content); }); } #[test] fn diagnostic_size() { assert_eq!(std::mem::size_of::<WorkspaceError>(), 104) } #[test] fn dirty_workspace() { snap_diagnostic( "dirty_workspace", WorkspaceError::DirtyWorkspace(DirtyWorkspace).into(), ) } #[test] fn file_ignored() { snap_diagnostic( "file_ignored", WorkspaceError::FileIgnored(FileIgnored { path: "example.js".to_string(), }) .with_file_path("example.js"), ) } #[test] fn cant_read_directory() { snap_diagnostic( "cant_read_directory", WorkspaceError::CantReadDirectory(CantReadDirectory { path: "example/".to_string(), }) .with_file_path("example/"), ) } #[test] fn cant_read_file() { snap_diagnostic( "cant_read_file", WorkspaceError::CantReadFile(CantReadFile { path: "example.js".to_string(), }) .with_file_path("example.js"), ) } #[test] fn not_found() { snap_diagnostic( "not_found", WorkspaceError::NotFound(NotFound).with_file_path("not_found.js"), ) } #[test] fn source_file_not_supported() { let path = RomePath::new("not_supported.toml"); snap_diagnostic( "source_file_not_supported", WorkspaceError::SourceFileNotSupported(SourceFileNotSupported { language: Language::Unknown, path: path.display().to_string(), extension: path .extension() .and_then(OsStr::to_str) .map(|s| s.to_string()), }) .with_file_path("not_supported.toml"), ) } #[test] fn file_too_large() { snap_diagnostic( "file_too_large", WorkspaceError::FileTooLarge(FileTooLarge { path: "example.js".to_string(), limit: 100, size: 500, }) .with_file_path("example.js"), ) } #[test] fn transport_channel_closed() { snap_diagnostic( "transport_channel_closed", TransportError::ChannelClosed.into(), ) } #[test] fn transport_timeout() { snap_diagnostic("transport_timeout", TransportError::Timeout.into()) } #[test] fn transport_rpc_error() { snap_diagnostic( "transport_rpc_error", TransportError::RPCError("Some generic error".to_string()).into(), ) } #[test] fn transport_serde_error() { snap_diagnostic( "transport_serde_error", TransportError::SerdeError("Some serialization/deserialization error".to_string()) .into(), ) } #[test] fn formatter_syntax_error() { snap_diagnostic( "formatter_syntax_error", WorkspaceError::FormatError(FormatError::SyntaxError).with_file_path("example.js"), ) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/lib.rs
crates/rome_service/src/lib.rs
use rome_console::Console; use rome_fs::{FileSystem, OsFileSystem}; use serde::{Deserialize, Serialize}; use std::ops::{Deref, DerefMut}; pub mod configuration; pub mod file_handlers; pub mod matcher; pub mod settings; pub mod workspace; mod diagnostics; #[cfg(feature = "schema")] pub mod workspace_types; pub use crate::configuration::{ create_config, load_config, Configuration, ConfigurationBasePath, ConfigurationDiagnostic, JavascriptFormatter, MergeWith, RuleConfiguration, Rules, }; pub use crate::matcher::{MatchOptions, Matcher, Pattern}; pub use crate::diagnostics::{TransportError, WorkspaceError}; /// Exports only for this crate pub use crate::file_handlers::JsFormatterSettings; pub use crate::workspace::Workspace; pub const VERSION: &str = match option_env!("ROME_VERSION") { Some(version) => version, None => env!("CARGO_PKG_VERSION"), }; /// This is the main entrypoint of the application. pub struct App<'app> { /// A reference to the internal virtual file system pub fs: DynRef<'app, dyn FileSystem>, /// A reference to the internal workspace pub workspace: WorkspaceRef<'app>, /// A reference to the internal console, where its buffer will be used to write messages and /// errors pub console: &'app mut dyn Console, } impl<'app> App<'app> { pub fn with_console(console: &'app mut dyn Console) -> Self { Self::with_filesystem_and_console(DynRef::Owned(Box::new(OsFileSystem)), console) } /// Create a new instance of the app using the specified [FileSystem] and [Console] implementation pub fn with_filesystem_and_console( fs: DynRef<'app, dyn FileSystem>, console: &'app mut dyn Console, ) -> Self { Self::new(fs, console, WorkspaceRef::Owned(workspace::server())) } /// Create a new instance of the app using the specified [FileSystem], [Console] and [Workspace] implementation pub fn new( fs: DynRef<'app, dyn FileSystem>, console: &'app mut dyn Console, workspace: WorkspaceRef<'app>, ) -> Self { Self { fs, console, workspace, } } } pub enum WorkspaceRef<'app> { Owned(Box<dyn Workspace>), Borrowed(&'app dyn Workspace), } impl<'app> Deref for WorkspaceRef<'app> { type Target = dyn Workspace + 'app; // False positive #[allow(clippy::explicit_auto_deref)] fn deref(&self) -> &Self::Target { match self { WorkspaceRef::Owned(inner) => &**inner, WorkspaceRef::Borrowed(inner) => *inner, } } } /// Clone of [std::borrow::Cow] specialized for storing a trait object and /// holding a mutable reference in the `Borrowed` variant instead of requiring /// the inner type to implement [std::borrow::ToOwned] pub enum DynRef<'app, T: ?Sized + 'app> { Owned(Box<T>), Borrowed(&'app mut T), } impl<'app, T: ?Sized + 'app> Deref for DynRef<'app, T> { type Target = T; fn deref(&self) -> &Self::Target { match self { DynRef::Owned(inner) => inner, DynRef::Borrowed(inner) => inner, } } } impl<'app, T: ?Sized + 'app> DerefMut for DynRef<'app, T> { fn deref_mut(&mut self) -> &mut Self::Target { match self { DynRef::Owned(inner) => inner, DynRef::Borrowed(inner) => inner, } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/workspace_types.rs
crates/rome_service/src/workspace_types.rs
//! Utility functions to help with generating bindings for the [Workspace] API use std::collections::{HashSet, VecDeque}; use rome_js_syntax::{AnyJsDeclaration, AnyTsTupleTypeElement}; use schemars::{ gen::{SchemaGenerator, SchemaSettings}, schema::{InstanceType, RootSchema, Schema, SchemaObject, SingleOrVec}, JsonSchema, }; use serde_json::Value; use crate::{workspace::*, WorkspaceError}; use rome_js_factory::{ make, syntax::{AnyJsObjectMemberName, AnyTsName, AnyTsType, AnyTsTypeMember, T}, }; use rome_rowan::{AstSeparatedList, TriviaPieceKind}; /// Manages a queue of type definitions that need to be generated #[derive(Default)] pub struct ModuleQueue<'a> { /// Set of type names that have already been emitted visited: HashSet<&'a str>, /// Queue of type names and definitions that need to be generated queue: VecDeque<(&'a str, &'a SchemaObject)>, } impl<'a> ModuleQueue<'a> { /// Add a type definition to the queue if it hasn't been emitted already fn push_back(&mut self, item: (&'a str, &'a SchemaObject)) { if self.visited.insert(item.0) { self.queue.push_back(item); } } /// Pull a type name and definition from the queue fn pop_front(&mut self) -> Option<(&'a str, &'a SchemaObject)> { self.queue.pop_front() } pub fn visited(&self) -> &HashSet<&'a str> { &self.visited } } /// Generate a [TsType] node from the `instance_type` of a [SchemaObject] fn instance_type<'a>( queue: &mut ModuleQueue<'a>, root_schema: &'a RootSchema, schema: &'a SchemaObject, ty: InstanceType, ) -> AnyTsType { match ty { // If the instance type is an object, generate a TS object type with the corresponding properties InstanceType::Object => { let object = schema.object.as_deref().unwrap(); AnyTsType::from(make::ts_object_type( make::token(T!['{']), make::ts_type_member_list(object.properties.iter().map(|(property, schema)| { let (ts_type, optional, description) = schema_type(queue, root_schema, schema); assert!(!optional, "optional nested types are not supported"); let mut property = make::ident(property); if let Some(description) = description { let comment = format!("/**\n\t* {} \n\t */", description); let trivia = vec![ (TriviaPieceKind::Newline, "\n"), (TriviaPieceKind::MultiLineComment, comment.as_str()), (TriviaPieceKind::Newline, "\n"), ]; property = property.with_leading_trivia(trivia); } AnyTsTypeMember::from( make::ts_property_signature_type_member(AnyJsObjectMemberName::from( make::js_literal_member_name(property), )) .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)) .build(), ) })), make::token(T!['}']), )) } // If the instance type is an array, generate a TS array type with the corresponding item type InstanceType::Array => { let array = schema.array.as_deref().unwrap(); let items = array.items.as_ref().unwrap(); match items { SingleOrVec::Single(schema) => { let (ts_type, optional, _) = schema_type(queue, root_schema, schema); assert!(!optional, "optional nested types are not supported"); AnyTsType::from(make::ts_array_type( ts_type, make::token(T!['[']), make::token(T![']']), )) } SingleOrVec::Vec(items) => AnyTsType::from(make::ts_tuple_type( make::token(T!['[']), make::ts_tuple_type_element_list( items.iter().map(|schema| { let (ts_type, optional, _) = schema_type(queue, root_schema, schema); assert!(!optional, "optional nested types are not supported"); AnyTsTupleTypeElement::AnyTsType(ts_type) }), items.iter().map(|_| make::token(T![,])), ), make::token(T![']']), )), } } // Map native types to the corresponding TS type InstanceType::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), InstanceType::Boolean => AnyTsType::from(make::ts_boolean_type(make::token(T![boolean]))), InstanceType::String => AnyTsType::from(make::ts_string_type(make::token(T![string]))), InstanceType::Number | InstanceType::Integer => { AnyTsType::from(make::ts_number_type(make::token(T![number]))) } } } /// Generate a literal [TsType] from a `serde_json` [Value] fn value_type(value: &Value) -> AnyTsType { match value { Value::Null => AnyTsType::from(make::ts_null_literal_type(make::token(T![null]))), Value::Bool(true) => AnyTsType::from(make::ts_boolean_literal_type(make::token(T![true]))), Value::Bool(false) => { AnyTsType::from(make::ts_boolean_literal_type(make::token(T![false]))) } Value::Number(value) => AnyTsType::from( make::ts_number_literal_type(make::js_number_literal(value.as_f64().unwrap())).build(), ), Value::String(value) => { AnyTsType::from(make::ts_string_literal_type(make::js_string_literal(value))) } Value::Array(_) => unimplemented!(), Value::Object(_) => unimplemented!(), } } /// Generate a union [TsType] node from a list of [TsType]s, /// flattening any nested union type the iterator may emit fn make_union_type(items: impl IntoIterator<Item = AnyTsType>) -> AnyTsType { let mut result = Vec::new(); for item in items { if let AnyTsType::TsUnionType(union_type) = item { for item in union_type.types().iter() { result.push(item.unwrap()); } } else { result.push(item); } } let separators = (0..result.len().saturating_sub(1)).map(|_| make::token(T![|])); AnyTsType::from( make::ts_union_type(make::ts_union_type_variant_list(result, separators)).build(), ) } /// Generate a [TsType] node from a [SchemaObject], returning the generated /// TypeScript type along with a boolean flag indicating whether the type is /// considered "optional" in the schema fn schema_object_type<'a>( queue: &mut ModuleQueue<'a>, root_schema: &'a RootSchema, schema: &'a SchemaObject, ) -> (AnyTsType, bool, Option<&'a String>) { // Start by detecting enum types by inspecting the `enum_values` field, i // the field is set return a union type generated from the literal enum values let description = schema .metadata .as_ref() .and_then(|s| s.description.as_ref()); let ts_type = schema .enum_values .as_deref() .map(|enum_values| make_union_type(enum_values.iter().map(value_type))) // If the type isn't an enum, inspect its `instance_type` field, if the // field is set return a type annotation for the corresponding type .or_else(|| { Some(match schema.instance_type.as_ref()? { SingleOrVec::Single(ty) => instance_type(queue, root_schema, schema, **ty), SingleOrVec::Vec(types) => make_union_type( types .iter() .map(|ty| instance_type(queue, root_schema, schema, *ty)), ), }) }) // Otherwise inspect the `reference` field of the schema, if its set return // a TS reference type and add the corresponding type to the queue .or_else(|| { let reference = schema.reference.as_deref()?; let key = reference.trim_start_matches("#/components/schemas/"); match root_schema.definitions.get(key) { Some(Schema::Bool(_)) => unimplemented!(), Some(Schema::Object(schema)) => queue.push_back((key, schema)), None => panic!("definition for type {key:?} not found"), } Some(AnyTsType::from( make::ts_reference_type(AnyTsName::from(make::js_reference_identifier( make::ident(key), ))) .build(), )) }) // Finally try to inspect the subschemas for this type .or_else(|| { let subschemas = schema.subschemas.as_deref()?; // First try to inspect the `all_of` list of subschemas, if it's // set generate an intersection type from it subschemas .all_of .as_deref() .map(|all_of| { AnyTsType::from( make::ts_intersection_type(make::ts_intersection_type_element_list( all_of.iter().map(|ty| { let (ts_type, optional, _) = schema_type(queue, root_schema, ty); assert!(!optional, "optional nested types are not supported"); ts_type }), (0..all_of.len().saturating_sub(1)).map(|_| make::token(T![&])), )) .build(), ) }) // Otherwise try to inspect the `any_of` list of subschemas, and // generate the corresponding union type for it .or_else(|| { let any_of = subschemas .any_of .as_deref() .or(subschemas.one_of.as_deref())?; Some(make_union_type(any_of.iter().map(|ty| { let (ts_type, optional, _) = schema_type(queue, root_schema, ty); assert!(!optional, "optional nested types are not supported"); ts_type }))) }) }) .unwrap_or_else(|| { // this is temporary workaround to fix the `options` field, which is not used at the moment AnyTsType::from(make::ts_any_type(make::token(T![any]))) }); // Types are considered "optional" in the serialization protocol if they // have the `nullable` OpenAPI extension property, or if they have a default value let is_nullable = matches!(schema.extensions.get("nullable"), Some(Value::Bool(true))); let has_defaults = schema .metadata .as_ref() .map(|metadata| metadata.default.is_some()) .unwrap_or(false); (ts_type, is_nullable || has_defaults, description) } /// Generate a [TsType] node from a [Schema], returning the generated type /// along with a boolean flag indicating whether the type is considered /// "optional" in the schema fn schema_type<'a>( queue: &mut ModuleQueue<'a>, root_schema: &'a RootSchema, schema: &'a Schema, ) -> (AnyTsType, bool, Option<&'a String>) { match schema { // Types defined as `true` in the schema always pass validation, // map them to the `any` type Schema::Bool(true) => ( AnyTsType::from(make::ts_any_type(make::token(T![any]))), true, None, ), // Types defined as `false` in the schema never pass validation, // map them to the `never` type Schema::Bool(false) => ( AnyTsType::from(make::ts_never_type(make::token(T![never]))), false, None, ), Schema::Object(schema_object) => schema_object_type(queue, root_schema, schema_object), } } /// Generate and emit all the types defined in `root_schema` into the `module` pub fn generate_type<'a>( module: &mut Vec<(AnyJsDeclaration, Option<&'a String>)>, queue: &mut ModuleQueue<'a>, root_schema: &'a RootSchema, ) -> AnyTsType { // Read the root type of the schema and push it to the queue let root_name = root_schema .schema .metadata .as_deref() .and_then(|metadata| metadata.title.as_deref()) .unwrap(); match root_name { "Null" => return AnyTsType::TsVoidType(make::ts_void_type(make::token(T![void]))), "Boolean" => { return AnyTsType::TsBooleanType(make::ts_boolean_type(make::token(T![boolean]))) } "String" => return AnyTsType::TsStringType(make::ts_string_type(make::token(T![string]))), _ => {} } queue.push_back((root_name, &root_schema.schema)); while let Some((name, schema)) = queue.pop_front() { // Detect if the type being emitted is an object, emit it as an // interface definition if that's the case let is_interface = schema .instance_type .as_ref() .map(|instance_type| { if let SingleOrVec::Single(instance_type) = instance_type { matches!(**instance_type, InstanceType::Object) } else { false } }) .unwrap_or_else(|| schema.object.is_some()); if is_interface { let mut members = Vec::new(); // Create a property signature member in the interface for each // property of the corresponding schema object let object = schema.object.as_deref().unwrap(); for (property, schema) in &object.properties { let (ts_type, optional, description) = schema_type(queue, root_schema, schema); let mut property = make::ident(property); if let Some(description) = description { let comment = format!("/**\n\t* {} \n\t */", description); let trivia = vec![ (TriviaPieceKind::Newline, "\n"), (TriviaPieceKind::MultiLineComment, comment.as_str()), (TriviaPieceKind::Newline, "\n"), ]; property = property.with_leading_trivia(trivia); } let mut builder = make::ts_property_signature_type_member( AnyJsObjectMemberName::from(make::js_literal_member_name(property)), ) .with_type_annotation(make::ts_type_annotation(make::token(T![:]), ts_type)); if optional { builder = builder.with_optional_token(make::token(T![?])); } members.push(AnyTsTypeMember::from(builder.build())); } let description = schema .metadata .as_ref() .and_then(|s| s.description.as_ref()); let current_module = AnyJsDeclaration::from( make::ts_interface_declaration( make::token(T![interface]), make::ts_identifier_binding(make::ident(name)), make::token(T!['{']), make::ts_type_member_list(members), make::token(T!['}']), ) .build(), ); module.push((current_module, description)); } else { // If the schema for this type is not an object, emit it as a type alias let (ts_type, optional, description) = schema_object_type(queue, root_schema, schema); assert!(!optional, "optional nested types are not supported"); let current_module = AnyJsDeclaration::from( make::ts_type_alias_declaration( make::token(T![type]), make::ts_identifier_binding(make::ident(name)), make::token(T![=]), ts_type, ) .build(), ); module.push((current_module, description)); } } AnyTsType::TsReferenceType( make::ts_reference_type(AnyTsName::JsReferenceIdentifier( make::js_reference_identifier(make::ident(root_name)), )) .build(), ) } /// Signature metadata for a [Workspace] method pub struct WorkspaceMethod { /// Name of the method pub name: &'static str, /// Schema for the parameters object of the method pub params: RootSchema, /// Schema for the result object of the method pub result: RootSchema, } impl WorkspaceMethod { /// Construct a [WorkspaceMethod] from a name, a parameter type and a result type fn of<P, R>(name: &'static str) -> Self where P: JsonSchema, R: JsonSchema, { let params = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::<P>(); let result = SchemaGenerator::from(SchemaSettings::openapi3()).root_schema_for::<R>(); Self { name, params, result, } } /// Construct a [WorkspaceMethod] from a name and a function pointer fn from_method<T, P, R>( name: &'static str, _func: fn(T, P) -> Result<R, WorkspaceError>, ) -> Self where P: JsonSchema, R: JsonSchema, { Self::of::<P, R>(name) } } /// Helper macro for generated an OpenAPI schema for a type implementing JsonSchema macro_rules! workspace_method { ($name:ident) => { WorkspaceMethod::from_method(stringify!($name), <dyn Workspace>::$name) }; } /// Returns a list of signature for all the methods in the [Workspace] trait pub fn methods() -> [WorkspaceMethod; 17] { [ WorkspaceMethod::of::<SupportsFeatureParams, SupportsFeatureResult>("file_features"), workspace_method!(update_settings), workspace_method!(open_file), workspace_method!(change_file), workspace_method!(close_file), workspace_method!(get_syntax_tree), workspace_method!(organize_imports), workspace_method!(get_file_content), workspace_method!(get_control_flow_graph), workspace_method!(get_formatter_ir), workspace_method!(pull_diagnostics), workspace_method!(pull_actions), workspace_method!(format_file), workspace_method!(format_range), workspace_method!(format_on_type), workspace_method!(fix_file), workspace_method!(rename), ] }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/matcher/pattern.rs
crates/rome_service/src/matcher/pattern.rs
use crate::matcher::pattern::CharSpecifier::{CharRange, SingleChar}; use crate::matcher::pattern::MatchResult::{ EntirePatternDoesntMatch, Match, SubPatternDoesntMatch, }; use crate::matcher::pattern::PatternToken::{ AnyChar, AnyExcept, AnyRecursiveSequence, AnySequence, AnyWithin, Char, }; use std::error::Error; use std::path::Path; use std::str::FromStr; use std::{fmt, path}; /// A pattern parsing error. #[derive(Debug)] #[allow(missing_copy_implementations)] pub struct PatternError { /// The approximate character index of where the error occurred. pub pos: usize, /// A message describing the error. pub msg: &'static str, } impl Error for PatternError { fn description(&self) -> &str { self.msg } } impl fmt::Display for PatternError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "Pattern syntax error near position {}: {}", self.pos, self.msg ) } } /// A compiled Unix shell style pattern. /// /// - `?` matches any single character. /// /// - `*` matches any (possibly empty) sequence of characters. /// /// - `**` matches the current directory and arbitrary subdirectories. This /// sequence **must** form a single path component, so both `**a` and `b**` /// are invalid and will result in an error. A sequence of more than two /// consecutive `*` characters is also invalid. /// /// - `[...]` matches any character inside the brackets. Character sequences /// can also specify ranges of characters, as ordered by Unicode, so e.g. /// `[0-9]` specifies any character between 0 and 9 inclusive. An unclosed /// bracket is invalid. /// /// - `[!...]` is the negation of `[...]`, i.e. it matches any characters /// **not** in the brackets. /// /// - The metacharacters `?`, `*`, `[`, `]` can be matched by using brackets /// (e.g. `[?]`). When a `]` occurs immediately following `[` or `[!` then it /// is interpreted as being part of, rather then ending, the character set, so /// `]` and NOT `]` can be matched by `[]]` and `[!]]` respectively. The `-` /// character can be specified inside a character sequence pattern by placing /// it at the start or the end, e.g. `[abc-]`. #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default, Debug)] pub struct Pattern { original: String, tokens: Vec<PatternToken>, is_recursive: bool, } /// Show the original glob pattern. impl fmt::Display for Pattern { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.original.fmt(f) } } impl FromStr for Pattern { type Err = PatternError; fn from_str(s: &str) -> Result<Self, PatternError> { Self::new(s) } } #[derive(Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] enum PatternToken { Char(char), AnyChar, AnySequence, AnyRecursiveSequence, AnyWithin(Vec<CharSpecifier>), AnyExcept(Vec<CharSpecifier>), } #[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Debug)] enum CharSpecifier { SingleChar(char), CharRange(char, char), } #[derive(Copy, Clone, PartialEq)] enum MatchResult { Match, SubPatternDoesntMatch, EntirePatternDoesntMatch, } const ERROR_WILDCARDS: &str = "wildcards are either regular `*` or recursive `**`"; const ERROR_RECURSIVE_WILDCARDS: &str = "recursive wildcards must form a single path \ component"; const ERROR_INVALID_RANGE: &str = "invalid range pattern"; impl Pattern { /// This function compiles Unix shell style patterns. /// /// An invalid glob pattern will yield a `PatternError`. pub fn new(pattern: &str) -> Result<Self, PatternError> { let chars = pattern.chars().collect::<Vec<_>>(); let mut tokens = Vec::new(); let mut is_recursive = false; let mut i = 0; // A pattern is relative if it starts with "." followed by a separator, // eg. "./test" or ".\test" let is_relative = matches!(chars.get(..2), Some(['.', sep]) if path::is_separator(*sep)); if is_relative { // If a pattern starts with a relative prefix, strip it from the // pattern and replace it with a "**" sequence i += 2; tokens.push(AnyRecursiveSequence); } else { // A pattern is absolute if it starts with a path separator, eg. "/home" or "\\?\C:\Users" let mut is_absolute = chars.first().map_or(false, |c| path::is_separator(*c)); // On windows a pattern may also be absolute if it starts with a // drive letter, a colon and a separator, eg. "c:/Users" or "G:\Users" if cfg!(windows) && !is_absolute { is_absolute = matches!(chars.get(..3), Some(['a'..='z' | 'A'..='Z', ':', sep]) if path::is_separator(*sep)); } // If a pattern is not absolute, insert a "**" sequence in front if !is_absolute { tokens.push(AnyRecursiveSequence); } } while i < chars.len() { match chars[i] { '?' => { tokens.push(AnyChar); i += 1; } '*' => { let old = i; while i < chars.len() && chars[i] == '*' { i += 1; } let count = i - old; match count { count if count > 2 => { return Err(PatternError { pos: old + 2, msg: ERROR_WILDCARDS, }); } count if count == 2 => { // ** can only be an entire path component // i.e. a/**/b is valid, but a**/b or a/**b is not // invalid matches are treated literally let is_valid = if i == 2 || path::is_separator(chars[i - count - 1]) { // it ends in a '/' if i < chars.len() && path::is_separator(chars[i]) { i += 1; true // or the pattern ends here // this enables the existing globbing mechanism } else if i == chars.len() { true // `**` ends in non-separator } else { return Err(PatternError { pos: i, msg: ERROR_RECURSIVE_WILDCARDS, }); } // `**` begins with non-separator } else { return Err(PatternError { pos: old - 1, msg: ERROR_RECURSIVE_WILDCARDS, }); }; if is_valid { // collapse consecutive AnyRecursiveSequence to a // single one let tokens_len = tokens.len(); if !(tokens_len > 1 && tokens[tokens_len - 1] == AnyRecursiveSequence) { is_recursive = true; tokens.push(AnyRecursiveSequence); } } } _ => { tokens.push(AnySequence); } } } '[' => { if i + 4 <= chars.len() && chars[i + 1] == '!' { match chars[i + 3..].iter().position(|x| *x == ']') { None => (), Some(j) => { let chars = &chars[i + 2..i + 3 + j]; let cs = parse_char_specifiers(chars); tokens.push(AnyExcept(cs)); i += j + 4; continue; } } } else if i + 3 <= chars.len() && chars[i + 1] != '!' { match chars[i + 2..].iter().position(|x| *x == ']') { None => (), Some(j) => { let cs = parse_char_specifiers(&chars[i + 1..i + 2 + j]); tokens.push(AnyWithin(cs)); i += j + 3; continue; } } } // if we get here then this is not a valid range pattern return Err(PatternError { pos: i, msg: ERROR_INVALID_RANGE, }); } c => { tokens.push(Char(c)); i += 1; } } } Ok(Self { tokens, original: pattern.to_string(), is_recursive, }) } /// Escape metacharacters within the given string by surrounding them in /// brackets. The resulting string will, when compiled into a `Pattern`, /// match the input string and nothing else. pub fn escape(s: &str) -> String { let mut escaped = String::new(); for c in s.chars() { match c { // note that ! does not need escaping because it is only special // inside brackets '?' | '*' | '[' | ']' => { escaped.push('['); escaped.push(c); escaped.push(']'); } c => { escaped.push(c); } } } escaped } /// Return if the given `str` matches this `Pattern` using the default /// match options (i.e. `MatchOptions::new()`). /// /// # Examples /// /// ```rust /// use crate::rome_service::Pattern; /// /// assert!(Pattern::new("c?t").unwrap().matches("cat")); /// assert!(Pattern::new("k[!e]tteh").unwrap().matches("kitteh")); /// assert!(Pattern::new("d*g").unwrap().matches("doog")); /// ``` pub fn matches(&self, str: &str) -> bool { self.matches_with(str, MatchOptions::new()) } /// Return if the given `Path`, when converted to a `str`, matches this /// `Pattern` using the default match options (i.e. `MatchOptions::new()`). pub fn matches_path(&self, path: &Path) -> bool { // FIXME (#9639): This needs to handle non-utf8 paths path.to_str().map_or(false, |s| self.matches(s)) } /// Return if the given `str` matches this `Pattern` using the specified /// match options. pub fn matches_with(&self, str: &str, options: MatchOptions) -> bool { self.matches_from(true, str.chars(), 0, options) == Match } /// Return if the given `Path`, when converted to a `str`, matches this /// `Pattern` using the specified match options. pub fn matches_path_with(&self, path: &Path, options: MatchOptions) -> bool { // FIXME (#9639): This needs to handle non-utf8 paths path.to_str() .map_or(false, |s| self.matches_with(s, options)) } /// Access the original glob pattern. pub fn as_str(&self) -> &str { &self.original } fn matches_from( &self, mut follows_separator: bool, mut file: std::str::Chars, i: usize, options: MatchOptions, ) -> MatchResult { for (ti, token) in self.tokens[i..].iter().enumerate() { match *token { AnySequence | AnyRecursiveSequence => { // ** must be at the start. debug_assert!(match *token { AnyRecursiveSequence => follows_separator, _ => true, }); // Empty match match self.matches_from(follows_separator, file.clone(), i + ti + 1, options) { SubPatternDoesntMatch => (), // keep trying m => return m, }; while let Some(c) = file.next() { if follows_separator && options.require_literal_leading_dot && c == '.' { return SubPatternDoesntMatch; } follows_separator = path::is_separator(c); match *token { AnyRecursiveSequence if !follows_separator => continue, AnySequence if options.require_literal_separator && follows_separator => { return SubPatternDoesntMatch } _ => (), } match self.matches_from( follows_separator, file.clone(), i + ti + 1, options, ) { SubPatternDoesntMatch => (), // keep trying m => return m, } } } _ => { let c = match file.next() { Some(c) => c, None => return EntirePatternDoesntMatch, }; let is_sep = path::is_separator(c); if !match *token { AnyChar | AnyWithin(..) | AnyExcept(..) if (options.require_literal_separator && is_sep) || (follows_separator && options.require_literal_leading_dot && c == '.') => { false } AnyChar => true, AnyWithin(ref specifiers) => in_char_specifiers(specifiers, c, options), AnyExcept(ref specifiers) => !in_char_specifiers(specifiers, c, options), Char(c2) => chars_eq(c, c2, options.case_sensitive), AnySequence | AnyRecursiveSequence => unreachable!(), } { return SubPatternDoesntMatch; } follows_separator = is_sep; } } } // Iter is fused. if file.next().is_none() { Match } else { SubPatternDoesntMatch } } } fn parse_char_specifiers(s: &[char]) -> Vec<CharSpecifier> { let mut cs = Vec::new(); let mut i = 0; while i < s.len() { if i + 3 <= s.len() && s[i + 1] == '-' { cs.push(CharRange(s[i], s[i + 2])); i += 3; } else { cs.push(SingleChar(s[i])); i += 1; } } cs } fn in_char_specifiers(specifiers: &[CharSpecifier], c: char, options: MatchOptions) -> bool { for &specifier in specifiers.iter() { match specifier { SingleChar(sc) => { if chars_eq(c, sc, options.case_sensitive) { return true; } } CharRange(start, end) => { // FIXME: work with non-ascii chars properly (issue #1347) if !options.case_sensitive && c.is_ascii() && start.is_ascii() && end.is_ascii() { let start = start.to_ascii_lowercase(); let end = end.to_ascii_lowercase(); let start_up = start.to_uppercase().next().unwrap(); let end_up = end.to_uppercase().next().unwrap(); // only allow case insensitive matching when // both start and end are within a-z or A-Z if start != start_up && end != end_up { let c = c.to_ascii_lowercase(); if c >= start && c <= end { return true; } } } if c >= start && c <= end { return true; } } } } false } /// A helper function to determine if two chars are (possibly case-insensitively) equal. fn chars_eq(a: char, b: char, case_sensitive: bool) -> bool { if cfg!(windows) && path::is_separator(a) && path::is_separator(b) { true } else if !case_sensitive && a.is_ascii() && b.is_ascii() { // FIXME: work with non-ascii chars properly (issue #9084) a.to_ascii_lowercase() == b.to_ascii_lowercase() } else { a == b } } /// Configuration options to modify the behaviour of `Pattern::matches_with(..)`. #[allow(missing_copy_implementations)] #[derive(Debug, Copy, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Default)] pub struct MatchOptions { /// Whether or not patterns should be matched in a case-sensitive manner. /// This currently only considers upper/lower case relationships between /// ASCII characters, but in future this might be extended to work with /// Unicode. pub case_sensitive: bool, /// Whether or not path-component separator characters (e.g. `/` on /// Posix) must be matched by a literal `/`, rather than by `*` or `?` or /// `[...]`. pub require_literal_separator: bool, /// Whether or not paths that contain components that start with a `.` /// will require that `.` appears literally in the pattern; `*`, `?`, `**`, /// or `[...]` will not match. This is useful because such files are /// conventionally considered hidden on Unix systems and it might be /// desirable to skip them when listing files. pub require_literal_leading_dot: bool, } impl MatchOptions { /// Constructs a new `MatchOptions` with default field values. This is used /// when calling functions that do not take an explicit `MatchOptions` /// parameter. /// /// This function always returns this value: /// /// ```rust,ignore /// MatchOptions { /// case_sensitive: true, /// require_literal_separator: false, /// require_literal_leading_dot: false /// } /// ``` pub fn new() -> Self { Self { case_sensitive: true, require_literal_separator: false, require_literal_leading_dot: false, } } } #[cfg(test)] mod test { use super::{MatchOptions, Pattern}; use std::path::Path; #[test] fn test_pattern_from_str() { assert!("a*b".parse::<Pattern>().unwrap().matches("a_b")); assert!("a/**b".parse::<Pattern>().unwrap_err().pos == 4); } #[test] fn test_wildcard_errors() { assert!(Pattern::new("a/**b").unwrap_err().pos == 4); assert!(Pattern::new("a/bc**").unwrap_err().pos == 3); assert!(Pattern::new("a/*****").unwrap_err().pos == 4); assert!(Pattern::new("a/b**c**d").unwrap_err().pos == 2); assert!(Pattern::new("a**b").unwrap_err().pos == 0); } #[test] fn test_unclosed_bracket_errors() { assert!(Pattern::new("abc[def").unwrap_err().pos == 3); assert!(Pattern::new("abc[!def").unwrap_err().pos == 3); assert!(Pattern::new("abc[").unwrap_err().pos == 3); assert!(Pattern::new("abc[!").unwrap_err().pos == 3); assert!(Pattern::new("abc[d").unwrap_err().pos == 3); assert!(Pattern::new("abc[!d").unwrap_err().pos == 3); assert!(Pattern::new("abc[]").unwrap_err().pos == 3); assert!(Pattern::new("abc[!]").unwrap_err().pos == 3); } #[test] fn test_wildcards() { assert!(Pattern::new("a*b").unwrap().matches("a_b")); assert!(Pattern::new("a*b*c").unwrap().matches("abc")); assert!(!Pattern::new("a*b*c").unwrap().matches("abcd")); assert!(Pattern::new("a*b*c").unwrap().matches("a_b_c")); assert!(Pattern::new("a*b*c").unwrap().matches("a___b___c")); assert!(Pattern::new("abc*abc*abc") .unwrap() .matches("abcabcabcabcabcabcabc")); assert!(!Pattern::new("abc*abc*abc") .unwrap() .matches("abcabcabcabcabcabcabca")); assert!(Pattern::new("a*a*a*a*a*a*a*a*a") .unwrap() .matches("aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa")); assert!(Pattern::new("a*b[xyz]c*d").unwrap().matches("abxcdbxcddd")); } #[test] fn test_recursive_wildcards() { let pat = Pattern::new("some/**/needle.txt").unwrap(); assert!(pat.matches("some/needle.txt")); assert!(pat.matches("some/one/needle.txt")); assert!(pat.matches("some/one/two/needle.txt")); assert!(pat.matches("some/other/needle.txt")); assert!(!pat.matches("some/other/notthis.txt")); // a single ** should be valid, for globs // Should accept anything let pat = Pattern::new("**").unwrap(); assert!(pat.is_recursive); assert!(pat.matches("abcde")); assert!(pat.matches("")); assert!(pat.matches(".asdf")); assert!(pat.matches("/x/.asdf")); // collapse consecutive wildcards let pat = Pattern::new("some/**/**/needle.txt").unwrap(); assert!(pat.matches("some/needle.txt")); assert!(pat.matches("some/one/needle.txt")); assert!(pat.matches("some/one/two/needle.txt")); assert!(pat.matches("some/other/needle.txt")); assert!(!pat.matches("some/other/notthis.txt")); // ** can begin the pattern let pat = Pattern::new("**/test").unwrap(); assert!(pat.matches("one/two/test")); assert!(pat.matches("one/test")); assert!(pat.matches("test")); // /** can begin the pattern let pat = Pattern::new("/**/test").unwrap(); assert!(pat.matches("/one/two/test")); assert!(pat.matches("/one/test")); assert!(pat.matches("/test")); assert!(!pat.matches("/one/notthis")); assert!(!pat.matches("/notthis")); // Only start sub-patterns on start of path segment. let pat = Pattern::new("**/.*").unwrap(); assert!(pat.matches(".abc")); assert!(pat.matches("abc/.abc")); assert!(!pat.matches("ab.c")); assert!(!pat.matches("abc/ab.c")); } #[test] fn test_range_pattern() { let pat = Pattern::new("a[0-9]b").unwrap(); for i in 0..10 { assert!(pat.matches(&format!("a{}b", i))); } assert!(!pat.matches("a_b")); let pat = Pattern::new("a[!0-9]b").unwrap(); for i in 0..10 { assert!(!pat.matches(&format!("a{}b", i))); } assert!(pat.matches("a_b")); let pats = ["[a-z123]", "[1a-z23]", "[123a-z]"]; for &p in pats.iter() { let pat = Pattern::new(p).unwrap(); for c in "abcdefghijklmnopqrstuvwxyz".chars() { assert!(pat.matches(&c.to_string())); } for c in "ABCDEFGHIJKLMNOPQRSTUVWXYZ".chars() { let options = MatchOptions { case_sensitive: false, ..MatchOptions::new() }; assert!(pat.matches_with(&c.to_string(), options)); } assert!(pat.matches("1")); assert!(pat.matches("2")); assert!(pat.matches("3")); } let pats = ["[abc-]", "[-abc]", "[a-c-]"]; for &p in pats.iter() { let pat = Pattern::new(p).unwrap(); assert!(pat.matches("a")); assert!(pat.matches("b")); assert!(pat.matches("c")); assert!(pat.matches("-")); assert!(!pat.matches("d")); } let pat = Pattern::new("[2-1]").unwrap(); assert!(!pat.matches("1")); assert!(!pat.matches("2")); assert!(Pattern::new("[-]").unwrap().matches("-")); assert!(!Pattern::new("[!-]").unwrap().matches("-")); } #[test] fn test_pattern_matches() { let txt_pat = Pattern::new("*hello.txt").unwrap(); assert!(txt_pat.matches("hello.txt")); assert!(txt_pat.matches("gareth_says_hello.txt")); assert!(txt_pat.matches("some/path/to/hello.txt")); assert!(txt_pat.matches("some\\path\\to\\hello.txt")); assert!(txt_pat.matches("/an/absolute/path/to/hello.txt")); assert!(!txt_pat.matches("hello.txt-and-then-some")); assert!(!txt_pat.matches("goodbye.txt")); let dir_pat = Pattern::new("*some/path/to/hello.txt").unwrap(); assert!(dir_pat.matches("some/path/to/hello.txt")); assert!(dir_pat.matches("a/bigger/some/path/to/hello.txt")); assert!(!dir_pat.matches("some/path/to/hello.txt-and-then-some")); assert!(!dir_pat.matches("some/other/path/to/hello.txt")); } #[test] fn test_pattern_escape() { let s = "_[_]_?_*_!_"; assert_eq!(Pattern::escape(s), "_[[]_[]]_[?]_[*]_!_".to_string()); assert!(Pattern::new(&Pattern::escape(s)).unwrap().matches(s)); } #[test] fn test_pattern_matches_case_insensitive() { let pat = Pattern::new("aBcDeFg").unwrap(); let options = MatchOptions { case_sensitive: false, require_literal_separator: false, require_literal_leading_dot: false, }; assert!(pat.matches_with("aBcDeFg", options)); assert!(pat.matches_with("abcdefg", options)); assert!(pat.matches_with("ABCDEFG", options)); assert!(pat.matches_with("AbCdEfG", options)); } #[test] fn test_pattern_matches_case_insensitive_range() { let pat_within = Pattern::new("[a]").unwrap(); let pat_except = Pattern::new("[!a]").unwrap(); let options_case_insensitive = MatchOptions { case_sensitive: false, require_literal_separator: false, require_literal_leading_dot: false, }; let options_case_sensitive = MatchOptions { case_sensitive: true, require_literal_separator: false, require_literal_leading_dot: false, }; assert!(pat_within.matches_with("a", options_case_insensitive)); assert!(pat_within.matches_with("A", options_case_insensitive)); assert!(!pat_within.matches_with("A", options_case_sensitive)); assert!(!pat_except.matches_with("a", options_case_insensitive)); assert!(!pat_except.matches_with("A", options_case_insensitive)); assert!(pat_except.matches_with("A", options_case_sensitive)); } #[test] fn test_pattern_matches_require_literal_separator() { let options_require_literal = MatchOptions { case_sensitive: true, require_literal_separator: true, require_literal_leading_dot: false, }; let options_not_require_literal = MatchOptions { case_sensitive: true, require_literal_separator: false, require_literal_leading_dot: false, }; assert!(Pattern::new("abc/def") .unwrap() .matches_with("abc/def", options_require_literal)); assert!(!Pattern::new("abc?def") .unwrap() .matches_with("abc/def", options_require_literal)); assert!(!Pattern::new("abc*def") .unwrap() .matches_with("abc/def", options_require_literal)); assert!(!Pattern::new("abc[/]def") .unwrap() .matches_with("abc/def", options_require_literal)); assert!(Pattern::new("abc/def") .unwrap() .matches_with("abc/def", options_not_require_literal)); assert!(Pattern::new("abc?def") .unwrap() .matches_with("abc/def", options_not_require_literal)); assert!(Pattern::new("abc*def") .unwrap() .matches_with("abc/def", options_not_require_literal)); assert!(Pattern::new("abc[/]def") .unwrap() .matches_with("abc/def", options_not_require_literal)); } #[test] fn test_pattern_matches_require_literal_leading_dot() { let options_require_literal_leading_dot = MatchOptions { case_sensitive: true, require_literal_separator: false, require_literal_leading_dot: true, }; let options_not_require_literal_leading_dot = MatchOptions { case_sensitive: true, require_literal_separator: false, require_literal_leading_dot: false, }; let f = |options| { Pattern::new("*.txt") .unwrap() .matches_with(".hello.txt", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(!f(options_require_literal_leading_dot)); let f = |options| { Pattern::new(".*.*") .unwrap() .matches_with(".hello.txt", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(f(options_require_literal_leading_dot)); let f = |options| { Pattern::new("aaa/bbb/*") .unwrap() .matches_with("aaa/bbb/.ccc", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(!f(options_require_literal_leading_dot)); let f = |options| { Pattern::new("aaa/bbb/*") .unwrap() .matches_with("aaa/bbb/c.c.c.", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(f(options_require_literal_leading_dot)); let f = |options| { Pattern::new("aaa/bbb/.*") .unwrap() .matches_with("aaa/bbb/.ccc", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(f(options_require_literal_leading_dot)); let f = |options| { Pattern::new("aaa/?bbb") .unwrap() .matches_with("aaa/.bbb", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(!f(options_require_literal_leading_dot)); let f = |options| { Pattern::new("aaa/[.]bbb") .unwrap() .matches_with("aaa/.bbb", options) }; assert!(f(options_not_require_literal_leading_dot)); assert!(!f(options_require_literal_leading_dot)); let f = |options| Pattern::new("**/*").unwrap().matches_with(".bbb", options); assert!(f(options_not_require_literal_leading_dot)); assert!(!f(options_require_literal_leading_dot)); } #[test] fn test_matches_path() { // on windows, (Path::new("a/b").as_str().unwrap() == "a\\b"), so this // tests that / and \ are considered equivalent on windows assert!(Pattern::new("a/b").unwrap().matches_path(Path::new("a/b"))); } #[test] fn test_path_join() { let pattern = Path::new("one").join(Path::new("**/*.rs")); assert!(Pattern::new(pattern.to_str().unwrap()).is_ok()); } #[test] fn test_pattern_relative() { assert!(Pattern::new("./b").unwrap().matches_path(Path::new("a/b"))); assert!(Pattern::new("b").unwrap().matches_path(Path::new("a/b"))); if cfg!(windows) { assert!(Pattern::new(".\\b") .unwrap() .matches_path(Path::new("a\\b"))); assert!(Pattern::new("b").unwrap().matches_path(Path::new("a\\b"))); } } #[test] fn test_pattern_absolute() { assert!(Pattern::new("/a/b") .unwrap() .matches_path(Path::new("/a/b")));
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
true
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/matcher/mod.rs
crates/rome_service/src/matcher/mod.rs
pub mod pattern; pub use pattern::{MatchOptions, Pattern, PatternError}; use rome_console::markup; use rome_diagnostics::Diagnostic; use std::collections::HashMap; use std::path::Path; use std::sync::RwLock; /// A data structure to use when there's need to match a string or a path a against /// a unix shell style patterns #[derive(Debug)] pub struct Matcher { patterns: Vec<Pattern>, options: MatchOptions, already_ignored: RwLock<HashMap<String, bool>>, } impl Matcher { /// Creates a new Matcher with given options. /// /// Check [glob website](https://docs.rs/glob/latest/glob/struct.MatchOptions.html) for [MatchOptions] pub fn new(options: MatchOptions) -> Self { Self { patterns: Vec::new(), options, already_ignored: RwLock::new(HashMap::default()), } } /// It adds a unix shell style pattern pub fn add_pattern(&mut self, pattern: &str) -> Result<(), PatternError> { let pattern = Pattern::new(pattern)?; self.patterns.push(pattern); Ok(()) } /// It matches the given string against the stored patterns. /// /// It returns [true] if there's at least a match pub fn matches(&self, source: &str) -> bool { let mut already_ignored = self.already_ignored.write().unwrap(); if let Some(matches) = already_ignored.get(source) { return *matches; } for pattern in &self.patterns { if pattern.matches_with(source, self.options) || source.contains(pattern.as_str()) { already_ignored.insert(source.to_string(), true); return true; } } already_ignored.insert(source.to_string(), false); false } /// It matches the given path against the stored patterns /// /// It returns [true] if there's a lest a match pub fn matches_path(&self, source: &Path) -> bool { let mut already_ignored = self.already_ignored.write().unwrap(); let source_as_string = source.to_str(); if let Some(source_as_string) = source_as_string { if let Some(matches) = already_ignored.get(source_as_string) { return *matches; } } let matches = { for pattern in &self.patterns { let matches = if pattern.matches_path_with(source, self.options) { true } else { // Here we cover cases where the user specifies single files inside the patterns. // The pattern library doesn't support single files, we here we just do a check // on contains source_as_string.map_or(false, |source| source.contains(pattern.as_str())) }; if matches { return true; } } false }; if let Some(source_as_string) = source_as_string { already_ignored.insert(source_as_string.to_string(), matches); } matches } } impl Diagnostic for PatternError { fn description(&self, fmt: &mut std::fmt::Formatter<'_>) -> std::fmt::Result { write!(fmt, "{}", self.msg) } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { fmt.write_markup(markup!({ self.msg })) } } #[cfg(test)] mod test { use crate::matcher::pattern::MatchOptions; use crate::matcher::Matcher; use std::env; #[test] fn matches() { let current = env::current_dir().unwrap(); let dir = format!("{}/**/*.rs", current.display()); let mut ignore = Matcher::new(MatchOptions::default()); ignore.add_pattern(&dir).unwrap(); let path = env::current_dir().unwrap().join("src/workspace.rs"); let result = ignore.matches(path.to_str().unwrap()); assert!(result); } #[test] fn matches_path() { let current = env::current_dir().unwrap(); let dir = format!("{}/**/*.rs", current.display()); let mut ignore = Matcher::new(MatchOptions::default()); ignore.add_pattern(&dir).unwrap(); let path = env::current_dir().unwrap().join("src/workspace.rs"); let result = ignore.matches_path(path.as_path()); assert!(result); } #[test] fn matches_single_path() { let dir = "workspace.rs"; let mut ignore = Matcher::new(MatchOptions { require_literal_separator: true, case_sensitive: true, require_literal_leading_dot: true, }); ignore.add_pattern(dir).unwrap(); let path = env::current_dir().unwrap().join("src/workspace.rs"); let result = ignore.matches(path.to_str().unwrap()); assert!(result); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/workspace/client.rs
crates/rome_service/src/workspace/client.rs
use crate::workspace::{ FileFeaturesResult, GetFileContentParams, IsPathIgnoredParams, OrganizeImportsParams, OrganizeImportsResult, RageParams, RageResult, ServerInfo, }; use crate::{TransportError, Workspace, WorkspaceError}; use rome_formatter::Printed; use serde::{de::DeserializeOwned, Deserialize, Serialize}; use serde_json::json; use std::{ panic::RefUnwindSafe, sync::atomic::{AtomicU64, Ordering}, }; use super::{ ChangeFileParams, CloseFileParams, FixFileParams, FixFileResult, FormatFileParams, FormatOnTypeParams, FormatRangeParams, GetControlFlowGraphParams, GetFormatterIRParams, GetSyntaxTreeParams, GetSyntaxTreeResult, OpenFileParams, PullActionsParams, PullActionsResult, PullDiagnosticsParams, PullDiagnosticsResult, RenameParams, RenameResult, SupportsFeatureParams, UpdateSettingsParams, }; pub struct WorkspaceClient<T> { transport: T, request_id: AtomicU64, server_info: Option<ServerInfo>, } pub trait WorkspaceTransport { fn request<P, R>(&self, request: TransportRequest<P>) -> Result<R, TransportError> where P: Serialize, R: DeserializeOwned; } #[derive(Debug)] pub struct TransportRequest<P> { pub id: u64, pub method: &'static str, pub params: P, } #[derive(Debug, PartialEq, Eq, Clone, Default, Deserialize, Serialize)] #[serde(rename_all = "camelCase")] pub struct InitializeResult { /// Information about the server. #[serde(skip_serializing_if = "Option::is_none")] pub server_info: Option<ServerInfo>, } impl<T> WorkspaceClient<T> where T: WorkspaceTransport + RefUnwindSafe + Send + Sync, { pub fn new(transport: T) -> Result<Self, WorkspaceError> { let mut client = Self { transport, request_id: AtomicU64::new(0), server_info: None, }; // TODO: The current implementation of the JSON-RPC protocol in // tower_lsp doesn't allow any request to be sent before a call to // initialize, this is something we could be able to lift by using our // own RPC protocol implementation let value: InitializeResult = client.request( "initialize", json!({ "capabilities": {}, "clientInfo": { "name": env!("CARGO_PKG_NAME"), "version": crate::VERSION }, }), )?; client.server_info = value.server_info; Ok(client) } fn request<P, R>(&self, method: &'static str, params: P) -> Result<R, WorkspaceError> where P: Serialize, R: DeserializeOwned, { let id = self.request_id.fetch_add(1, Ordering::Relaxed); let request = TransportRequest { id, method, params }; let response = self.transport.request(request)?; Ok(response) } pub fn shutdown(self) -> Result<(), WorkspaceError> { self.request("rome/shutdown", ()) } } impl<T> Workspace for WorkspaceClient<T> where T: WorkspaceTransport + RefUnwindSafe + Send + Sync, { fn file_features( &self, params: SupportsFeatureParams, ) -> Result<FileFeaturesResult, WorkspaceError> { self.request("rome/file_features", params) } fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result<bool, WorkspaceError> { self.request("rome/is_path_ignored", params) } fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { self.request("rome/update_settings", params) } fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { self.request("rome/open_file", params) } fn get_syntax_tree( &self, params: GetSyntaxTreeParams, ) -> Result<GetSyntaxTreeResult, WorkspaceError> { self.request("rome/get_syntax_tree", params) } fn get_control_flow_graph( &self, params: GetControlFlowGraphParams, ) -> Result<String, WorkspaceError> { self.request("rome/get_control_flow_graph", params) } fn get_formatter_ir(&self, params: GetFormatterIRParams) -> Result<String, WorkspaceError> { self.request("rome/get_formatter_ir", params) } fn get_file_content(&self, params: GetFileContentParams) -> Result<String, WorkspaceError> { self.request("rome/get_file_content", params) } fn change_file(&self, params: ChangeFileParams) -> Result<(), WorkspaceError> { self.request("rome/change_file", params) } fn close_file(&self, params: CloseFileParams) -> Result<(), WorkspaceError> { self.request("rome/close_file", params) } fn pull_diagnostics( &self, params: PullDiagnosticsParams, ) -> Result<PullDiagnosticsResult, WorkspaceError> { self.request("rome/pull_diagnostics", params) } fn pull_actions(&self, params: PullActionsParams) -> Result<PullActionsResult, WorkspaceError> { self.request("rome/pull_actions", params) } fn format_file(&self, params: FormatFileParams) -> Result<Printed, WorkspaceError> { self.request("rome/format_file", params) } fn format_range(&self, params: FormatRangeParams) -> Result<Printed, WorkspaceError> { self.request("rome/format_range", params) } fn format_on_type(&self, params: FormatOnTypeParams) -> Result<Printed, WorkspaceError> { self.request("rome/format_on_type", params) } fn fix_file(&self, params: FixFileParams) -> Result<FixFileResult, WorkspaceError> { self.request("rome/fix_file", params) } fn rename(&self, params: RenameParams) -> Result<RenameResult, WorkspaceError> { self.request("rome/rename", params) } fn rage(&self, params: RageParams) -> Result<RageResult, WorkspaceError> { self.request("rome/rage", params) } fn server_info(&self) -> Option<&ServerInfo> { self.server_info.as_ref() } fn organize_imports( &self, params: OrganizeImportsParams, ) -> Result<OrganizeImportsResult, WorkspaceError> { self.request("rome/organize_imports", params) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/workspace/server.rs
crates/rome_service/src/workspace/server.rs
use super::{ ChangeFileParams, CloseFileParams, FeatureName, FixFileResult, FormatFileParams, FormatOnTypeParams, FormatRangeParams, GetControlFlowGraphParams, GetFormatterIRParams, GetSyntaxTreeParams, GetSyntaxTreeResult, OpenFileParams, PullActionsParams, PullActionsResult, PullDiagnosticsParams, PullDiagnosticsResult, RenameResult, SupportsFeatureParams, UpdateSettingsParams, }; use crate::file_handlers::{Capabilities, FixAllParams, Language, LintParams}; use crate::workspace::{ FileFeaturesResult, GetFileContentParams, IsPathIgnoredParams, OrganizeImportsParams, OrganizeImportsResult, RageEntry, RageParams, RageResult, ServerInfo, }; use crate::{ file_handlers::Features, settings::{SettingsHandle, WorkspaceSettings}, Rules, Workspace, WorkspaceError, }; use dashmap::{mapref::entry::Entry, DashMap}; use indexmap::IndexSet; use rome_analyze::{AnalysisFilter, RuleFilter}; use rome_diagnostics::{serde::Diagnostic as SerdeDiagnostic, Diagnostic, DiagnosticExt, Severity}; use rome_formatter::Printed; use rome_fs::RomePath; use rome_parser::AnyParse; use rome_rowan::NodeCache; use std::ffi::OsStr; use std::{panic::RefUnwindSafe, sync::RwLock}; use tracing::trace; pub(super) struct WorkspaceServer { /// features available throughout the application features: Features, /// global settings object for this workspace settings: RwLock<WorkspaceSettings>, /// Stores the document (text content + version number) associated with a URL documents: DashMap<RomePath, Document>, /// Stores the result of the parser (syntax tree + diagnostics) for a given URL syntax: DashMap<RomePath, AnyParse>, } /// The `Workspace` object is long lived, so we want it to be able to cross /// unwind boundaries. /// In return we have to make sure operations on the workspace either do not /// panic, of that panicking will not result in any broken invariant (it would /// not result in any undefined behavior as catching an unwind is safe, but it /// could lead to hard to debug issues) impl RefUnwindSafe for WorkspaceServer {} #[derive(Debug)] pub(crate) struct Document { pub(crate) content: String, pub(crate) version: i32, pub(crate) language_hint: Language, node_cache: NodeCache, } impl WorkspaceServer { /// Create a new [Workspace] /// /// This is implemented as a crate-private method instead of using /// [Default] to disallow instances of [Workspace] from being created /// outside of a [crate::App] pub(crate) fn new() -> Self { Self { features: Features::new(), settings: RwLock::default(), documents: DashMap::default(), syntax: DashMap::default(), } } fn settings(&self) -> SettingsHandle { SettingsHandle::new(&self.settings) } /// Get the supported capabilities for a given file path fn get_capabilities(&self, path: &RomePath) -> Capabilities { let language = self.get_language(path); self.features.get_capabilities(path, language) } /// Retrieves the supported language of a file fn get_language(&self, path: &RomePath) -> Language { self.documents .get(path) .map(|doc| doc.language_hint) .unwrap_or_default() } /// Return an error factory function for unsupported features at a given path fn build_capability_error<'a>( &'a self, path: &'a RomePath, // feature_name: &'a str, ) -> impl FnOnce() -> WorkspaceError + 'a { move || { let language_hint = self .documents .get(path) .map(|doc| doc.language_hint) .unwrap_or_default(); let language = Features::get_language(path).or(language_hint); WorkspaceError::source_file_not_supported( language, path.clone().display().to_string(), path.clone() .extension() .and_then(OsStr::to_str) .map(|s| s.to_string()), ) } } fn build_rule_filter_list<'a>(&'a self, rules: Option<&'a Rules>) -> Vec<RuleFilter> { if let Some(rules) = rules { let enabled: IndexSet<RuleFilter> = rules.as_enabled_rules(); enabled.into_iter().collect::<Vec<RuleFilter>>() } else { vec![] } } /// Get the parser result for a given file /// /// Returns and error if no file exists in the workspace with this path or /// if the language associated with the file has no parser capability fn get_parse( &self, rome_path: RomePath, feature: Option<FeatureName>, ) -> Result<AnyParse, WorkspaceError> { let ignored = if let Some(feature) = feature { self.is_path_ignored(IsPathIgnoredParams { rome_path: rome_path.clone(), feature, })? } else { false }; if ignored { return Err(WorkspaceError::file_ignored(format!( "{}", rome_path.to_path_buf().display() ))); } match self.syntax.entry(rome_path) { Entry::Occupied(entry) => Ok(entry.get().clone()), Entry::Vacant(entry) => { let rome_path = entry.key(); let capabilities = self.get_capabilities(rome_path); let mut document = self .documents .get_mut(rome_path) .ok_or_else(WorkspaceError::not_found)?; let parse = capabilities .parser .parse .ok_or_else(self.build_capability_error(rome_path))?; let size_limit = { let settings = self.settings(); let settings = settings.as_ref(); let limit = settings.files.max_size.get(); usize::try_from(limit).unwrap_or(usize::MAX) }; let document = &mut *document; let size = document.content.as_bytes().len(); if size >= size_limit { return Err(WorkspaceError::file_too_large( rome_path.to_path_buf().display().to_string(), size, size_limit, )); } let settings = self.settings(); let parsed = parse( rome_path, document.language_hint, document.content.as_str(), settings, &mut document.node_cache, ); Ok(entry.insert(parsed).clone()) } } } } impl Workspace for WorkspaceServer { fn file_features( &self, params: SupportsFeatureParams, ) -> Result<FileFeaturesResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let settings = self.settings.read().unwrap(); let mut file_features = FileFeaturesResult::new() .with_capabilities(&capabilities) .with_settings(&settings); if settings.files.ignore_unknown { let language = self.get_language(&params.path); if language == Language::Unknown { file_features.ignore_not_supported(); } } for feature in params.feature { let is_ignored = self.is_path_ignored(IsPathIgnoredParams { rome_path: params.path.clone(), feature: feature.clone(), })?; if is_ignored { file_features.ignored(feature); } } Ok(file_features) } fn is_path_ignored(&self, params: IsPathIgnoredParams) -> Result<bool, WorkspaceError> { let settings = self.settings(); let is_ignored_by_file_config = settings .as_ref() .files .ignored_files .matches_path(params.rome_path.as_path()); Ok(match params.feature { FeatureName::Format => { settings .as_ref() .formatter .ignored_files .matches_path(params.rome_path.as_path()) || is_ignored_by_file_config } FeatureName::Lint => { settings .as_ref() .linter .ignored_files .matches_path(params.rome_path.as_path()) || is_ignored_by_file_config } FeatureName::OrganizeImports => { settings .as_ref() .organize_imports .ignored_files .matches_path(params.rome_path.as_path()) || is_ignored_by_file_config } }) } /// Update the global settings for this workspace /// /// ## Panics /// This function may panic if the internal settings mutex has been poisoned /// by another thread having previously panicked while holding the lock #[tracing::instrument(level = "debug", skip(self))] fn update_settings(&self, params: UpdateSettingsParams) -> Result<(), WorkspaceError> { let mut settings = self.settings.write().unwrap(); settings.merge_with_configuration(params.configuration)?; Ok(()) } /// Add a new file to the workspace fn open_file(&self, params: OpenFileParams) -> Result<(), WorkspaceError> { self.syntax.remove(&params.path); self.documents.insert( params.path, Document { content: params.content, version: params.version, language_hint: params.language_hint, node_cache: NodeCache::default(), }, ); Ok(()) } fn get_syntax_tree( &self, params: GetSyntaxTreeParams, ) -> Result<GetSyntaxTreeResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let debug_syntax_tree = capabilities .debug .debug_syntax_tree .ok_or_else(self.build_capability_error(&params.path))?; // The feature name here can be any feature, in theory let parse = self.get_parse(params.path.clone(), None)?; let printed = debug_syntax_tree(&params.path, parse); Ok(printed) } fn get_control_flow_graph( &self, params: GetControlFlowGraphParams, ) -> Result<String, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let debug_control_flow = capabilities .debug .debug_control_flow .ok_or_else(self.build_capability_error(&params.path))?; let parse = self.get_parse(params.path.clone(), None)?; let printed = debug_control_flow(parse, params.cursor); Ok(printed) } fn get_file_content(&self, params: GetFileContentParams) -> Result<String, WorkspaceError> { let document = self .documents .get(&params.path) .ok_or(WorkspaceError::not_found())?; Ok(document.content.clone()) } fn get_formatter_ir(&self, params: GetFormatterIRParams) -> Result<String, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let debug_formatter_ir = capabilities .debug .debug_formatter_ir .ok_or_else(self.build_capability_error(&params.path))?; let settings = self.settings(); let parse = self.get_parse(params.path.clone(), Some(FeatureName::Format))?; if !settings.as_ref().formatter().format_with_errors && parse.has_errors() { return Err(WorkspaceError::format_with_errors_disabled()); } debug_formatter_ir(&params.path, parse, settings) } /// Change the content of an open file fn change_file(&self, params: ChangeFileParams) -> Result<(), WorkspaceError> { let mut document = self .documents .get_mut(&params.path) .ok_or_else(WorkspaceError::not_found)?; debug_assert!(params.version > document.version); document.version = params.version; document.content = params.content; self.syntax.remove(&params.path); Ok(()) } /// Remove a file from the workspace fn close_file(&self, params: CloseFileParams) -> Result<(), WorkspaceError> { self.documents .remove(&params.path) .ok_or_else(WorkspaceError::not_found)?; self.syntax.remove(&params.path); Ok(()) } /// Retrieves the list of diagnostics associated with a file fn pull_diagnostics( &self, params: PullDiagnosticsParams, ) -> Result<PullDiagnosticsResult, WorkspaceError> { let feature = if params.categories.is_syntax() { FeatureName::Format } else { FeatureName::Lint }; let parse = self.get_parse(params.path.clone(), Some(feature))?; let settings = self.settings.read().unwrap(); let (diagnostics, errors, skipped_diagnostics) = if let Some(lint) = self.get_capabilities(&params.path).analyzer.lint { let rules = settings.linter().rules.as_ref(); let mut rule_filter_list = self.build_rule_filter_list(rules); if settings.organize_imports.enabled { rule_filter_list.push(RuleFilter::Rule("correctness", "organizeImports")); } let mut filter = AnalysisFilter::from_enabled_rules(Some(rule_filter_list.as_slice())); filter.categories = params.categories; trace!("Analyzer filter to apply to lint: {:?}", &filter); let results = lint(LintParams { parse, filter, rules, settings: self.settings(), max_diagnostics: params.max_diagnostics, path: &params.path, }); ( results.diagnostics, results.errors, results.skipped_diagnostics, ) } else { let parse_diagnostics = parse.into_diagnostics(); let errors = parse_diagnostics .iter() .filter(|diag| diag.severity() <= Severity::Error) .count(); (parse_diagnostics, errors, 0) }; Ok(PullDiagnosticsResult { diagnostics: diagnostics .into_iter() .map(|diag| { let diag = diag.with_file_path(params.path.as_path().display().to_string()); SerdeDiagnostic::new(diag) }) .collect(), errors, skipped_diagnostics, }) } /// Retrieves the list of code actions available for a given cursor /// position within a file fn pull_actions(&self, params: PullActionsParams) -> Result<PullActionsResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let code_actions = capabilities .analyzer .code_actions .ok_or_else(self.build_capability_error(&params.path))?; let parse = self.get_parse(params.path.clone(), Some(FeatureName::Lint))?; let settings = self.settings.read().unwrap(); let rules = settings.linter().rules.as_ref(); Ok(code_actions( parse, params.range, rules, self.settings(), &params.path, )) } /// Runs the given file through the formatter using the provided options /// and returns the resulting source code fn format_file(&self, params: FormatFileParams) -> Result<Printed, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let format = capabilities .formatter .format .ok_or_else(self.build_capability_error(&params.path))?; let settings = self.settings(); let parse = self.get_parse(params.path.clone(), Some(FeatureName::Format))?; if !settings.as_ref().formatter().format_with_errors && parse.has_errors() { return Err(WorkspaceError::format_with_errors_disabled()); } format(&params.path, parse, settings) } fn format_range(&self, params: FormatRangeParams) -> Result<Printed, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let format_range = capabilities .formatter .format_range .ok_or_else(self.build_capability_error(&params.path))?; let settings = self.settings(); let parse = self.get_parse(params.path.clone(), Some(FeatureName::Format))?; if !settings.as_ref().formatter().format_with_errors && parse.has_errors() { return Err(WorkspaceError::format_with_errors_disabled()); } format_range(&params.path, parse, settings, params.range) } fn format_on_type(&self, params: FormatOnTypeParams) -> Result<Printed, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let format_on_type = capabilities .formatter .format_on_type .ok_or_else(self.build_capability_error(&params.path))?; let settings = self.settings(); let parse = self.get_parse(params.path.clone(), Some(FeatureName::Format))?; if !settings.as_ref().formatter().format_with_errors && parse.has_errors() { return Err(WorkspaceError::format_with_errors_disabled()); } format_on_type(&params.path, parse, settings, params.offset) } fn fix_file(&self, params: super::FixFileParams) -> Result<FixFileResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let fix_all = capabilities .analyzer .fix_all .ok_or_else(self.build_capability_error(&params.path))?; let settings = self.settings.read().unwrap(); let parse = self.get_parse(params.path.clone(), Some(FeatureName::Lint))?; let rules = settings.linter().rules.as_ref(); fix_all(FixAllParams { parse, rules, fix_file_mode: params.fix_file_mode, settings: self.settings(), should_format: params.should_format, rome_path: &params.path, }) } fn rename(&self, params: super::RenameParams) -> Result<RenameResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let rename = capabilities .analyzer .rename .ok_or_else(self.build_capability_error(&params.path))?; let parse = self.get_parse(params.path.clone(), None)?; let result = rename(&params.path, parse, params.symbol_at, params.new_name)?; Ok(result) } fn rage(&self, _: RageParams) -> Result<RageResult, WorkspaceError> { let entries = vec![ RageEntry::section("Workspace"), RageEntry::pair("Open Documents", &format!("{}", self.documents.len())), ]; Ok(RageResult { entries }) } fn server_info(&self) -> Option<&ServerInfo> { None } fn organize_imports( &self, params: OrganizeImportsParams, ) -> Result<OrganizeImportsResult, WorkspaceError> { let capabilities = self.get_capabilities(&params.path); let organize_imports = capabilities .analyzer .organize_imports .ok_or_else(self.build_capability_error(&params.path))?; let parse = self.get_parse(params.path, None)?; let result = organize_imports(parse)?; Ok(result) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/merge.rs
crates/rome_service/src/configuration/merge.rs
/// Simple trait to merge two types of the same type pub trait MergeWith<T> { /// Merges one type with another fn merge_with(&mut self, other: T); /// Merges one type with another, if the condition is met fn merge_with_if(&mut self, other: T, condition: bool) { if condition { self.merge_with(other) } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/javascript.rs
crates/rome_service/src/configuration/javascript.rs
use crate::configuration::merge::MergeWith; use bpaf::Bpaf; use rome_deserialize::StringSet; use rome_js_formatter::context::{ trailing_comma::TrailingComma, ArrowParentheses, QuoteProperties, QuoteStyle, Semicolons, }; use serde::{Deserialize, Serialize}; #[derive(Default, Debug, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(default, deny_unknown_fields)] pub struct JavascriptConfiguration { #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(javascript_formatter), optional)] pub formatter: Option<JavascriptFormatter>, #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(javascript_parser), optional)] pub parser: Option<JavascriptParser>, /// A list of global bindings that should be ignored by the analyzers /// /// If defined here, they should not emit diagnostics. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub globals: Option<StringSet>, // #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(javascript_organize_imports), optional)] pub organize_imports: Option<JavascriptOrganizeImports>, } impl MergeWith<JavascriptConfiguration> for JavascriptConfiguration { fn merge_with(&mut self, other: JavascriptConfiguration) { if let Some(other_formatter) = other.formatter { let formatter = self .formatter .get_or_insert_with(JavascriptFormatter::default); formatter.merge_with(other_formatter); } } } impl MergeWith<Option<JavascriptFormatter>> for JavascriptConfiguration { fn merge_with(&mut self, other: Option<JavascriptFormatter>) { if let Some(other_formatter) = other { let formatter = self .formatter .get_or_insert_with(JavascriptFormatter::default); formatter.merge_with(other_formatter); } } } impl JavascriptConfiguration { pub(crate) const KNOWN_KEYS: &'static [&'static str] = &["formatter", "globals", "organizeImports", "parser"]; pub fn with_formatter() -> Self { Self { formatter: Some(JavascriptFormatter::default()), ..JavascriptConfiguration::default() } } } #[derive(Default, Debug, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct JavascriptFormatter { /// The style for quotes. Defaults to double. #[bpaf(long("quote-style"), argument("double|single"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub quote_style: Option<QuoteStyle>, /// The style for JSX quotes. Defaults to double. #[bpaf(long("jsx-quote-style"), argument("double|single"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub jsx_quote_style: Option<QuoteStyle>, /// When properties in objects are quoted. Defaults to asNeeded. #[bpaf(long("quote-properties"), argument("preserve|as-needed"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub quote_properties: Option<QuoteProperties>, /// Print trailing commas wherever possible in multi-line comma-separated syntactic structures. Defaults to "all". #[bpaf(long("trailing-comma"), argument("all|es5|none"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub trailing_comma: Option<TrailingComma>, /// Whether the formatter prints semicolons for all statements or only in for statements where it is necessary because of ASI. #[bpaf(long("semicolons"), argument("always|as-needed"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub semicolons: Option<Semicolons>, /// Whether to add non-necessary parentheses to arrow functions. Defaults to "always". #[bpaf(long("arrow-parentheses"), argument("always|as-needed"), optional)] #[serde(skip_serializing_if = "Option::is_none")] pub arrow_parentheses: Option<ArrowParentheses>, } impl JavascriptFormatter { pub(crate) const KNOWN_KEYS: &'static [&'static str] = &[ "quoteStyle", "jsxQuoteStyle", "quoteProperties", "trailingComma", "semicolons", "arrowParentheses", ]; } impl MergeWith<JavascriptFormatter> for JavascriptFormatter { fn merge_with(&mut self, other: JavascriptFormatter) { if let Some(arrow_parentheses) = other.arrow_parentheses { self.arrow_parentheses = Some(arrow_parentheses); } if let Some(quote_properties) = other.quote_properties { self.quote_properties = Some(quote_properties); } if let Some(quote_style) = other.quote_style { self.quote_style = Some(quote_style); } if let Some(jsx_quote_style) = other.jsx_quote_style { self.jsx_quote_style = Some(jsx_quote_style); } if let Some(semicolons) = other.semicolons { self.semicolons = Some(semicolons); } if let Some(trailing_comma) = other.trailing_comma { self.trailing_comma = Some(trailing_comma); } } } #[derive(Debug, Default, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(default, deny_unknown_fields)] pub struct JavascriptOrganizeImports {} #[derive(Default, Debug, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct JavascriptParser { #[bpaf(hide)] #[serde(skip_serializing_if = "Option::is_none")] /// It enables the experimental and unsafe parsing of parameter decorators /// /// These decorators belong to an old proposal, and they are subject to change. pub unsafe_parameter_decorators_enabled: Option<bool>, } impl JavascriptParser { pub(crate) const KNOWN_KEYS: &'static [&'static str] = &["unsafeParameterDecoratorsEnabled"]; } impl MergeWith<JavascriptParser> for JavascriptParser { fn merge_with(&mut self, other: JavascriptParser) { if let Some(unsafe_parameter_decorators_enabled) = other.unsafe_parameter_decorators_enabled { self.unsafe_parameter_decorators_enabled = Some(unsafe_parameter_decorators_enabled); } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/diagnostics.rs
crates/rome_service/src/configuration/diagnostics.rs
use crate::WorkspaceError; use rome_console::fmt::Display; use rome_console::{markup, MarkupBuf}; use rome_deserialize::DeserializationDiagnostic; use rome_diagnostics::{ Advices, Category, Diagnostic, DiagnosticTags, Location, LogCategory, MessageAndDescription, Severity, Visit, }; use rome_rowan::SyntaxError; use serde::{Deserialize, Serialize}; use std::fmt::{Debug, Formatter}; /// Series of errors that can be thrown while computing the configuration #[derive(serde::Serialize, serde::Deserialize)] pub enum ConfigurationDiagnostic { /// Thrown when the program can't serialize the configuration, while saving it SerializationError(SerializationError), /// Thrown when trying to **create** a new configuration file, but it exists already ConfigAlreadyExists(ConfigAlreadyExists), /// Error thrown when de-serialising the configuration from file, the issues can be many: /// - syntax error /// - incorrect fields /// - incorrect values Deserialization(DeserializationDiagnostic), /// When something is wrong with the configuration InvalidConfiguration(InvalidConfiguration), /// Thrown when the pattern inside the `ignore` field errors InvalidIgnorePattern(InvalidIgnorePattern), /// Thrown when there's something wrong with the files specified inside `"extends"` CantLoadExtendFile(CantLoadExtendFile), } impl From<SyntaxError> for ConfigurationDiagnostic { fn from(_: SyntaxError) -> Self { ConfigurationDiagnostic::Deserialization(DeserializationDiagnostic::new( markup! {"Syntax Error"}, )) } } impl From<DeserializationDiagnostic> for ConfigurationDiagnostic { fn from(value: DeserializationDiagnostic) -> Self { ConfigurationDiagnostic::Deserialization(value) } } impl ConfigurationDiagnostic { pub(crate) fn new_serialization_error() -> Self { Self::SerializationError(SerializationError) } pub(crate) fn new_invalid_ignore_pattern( pattern: impl Into<String>, reason: impl Into<String>, ) -> Self { Self::InvalidIgnorePattern(InvalidIgnorePattern { message: format!( "Couldn't parse the {}, reason: {}", pattern.into(), reason.into() ), }) } pub fn new_already_exists() -> Self { Self::ConfigAlreadyExists(ConfigAlreadyExists {}) } pub fn invalid_configuration(message: impl Display) -> Self { Self::InvalidConfiguration(InvalidConfiguration { message: MessageAndDescription::from(markup! {{message}}.to_owned()), }) } } impl Debug for ConfigurationDiagnostic { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { std::fmt::Display::fmt(self, f) } } impl std::fmt::Display for ConfigurationDiagnostic { fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result { self.description(f) } } impl Diagnostic for ConfigurationDiagnostic { fn severity(&self) -> Severity { match self { ConfigurationDiagnostic::SerializationError(error) => error.severity(), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.severity(), ConfigurationDiagnostic::Deserialization(error) => error.severity(), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.severity(), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.severity(), ConfigurationDiagnostic::InvalidConfiguration(error) => error.severity(), } } fn category(&self) -> Option<&'static Category> { match self { ConfigurationDiagnostic::SerializationError(error) => error.category(), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.category(), ConfigurationDiagnostic::Deserialization(error) => error.category(), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.category(), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.category(), ConfigurationDiagnostic::InvalidConfiguration(error) => error.category(), } } fn tags(&self) -> DiagnosticTags { match self { ConfigurationDiagnostic::SerializationError(error) => error.tags(), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.tags(), ConfigurationDiagnostic::Deserialization(error) => error.tags(), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.tags(), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.tags(), ConfigurationDiagnostic::InvalidConfiguration(error) => error.tags(), } } fn location(&self) -> Location<'_> { match self { ConfigurationDiagnostic::SerializationError(error) => error.location(), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.location(), ConfigurationDiagnostic::Deserialization(error) => error.location(), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.location(), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.location(), ConfigurationDiagnostic::InvalidConfiguration(error) => error.location(), } } fn source(&self) -> Option<&dyn Diagnostic> { match self { ConfigurationDiagnostic::SerializationError(error) => error.source(), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.source(), ConfigurationDiagnostic::Deserialization(error) => error.source(), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.source(), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.source(), ConfigurationDiagnostic::InvalidConfiguration(error) => error.source(), } } fn message(&self, fmt: &mut rome_console::fmt::Formatter<'_>) -> std::io::Result<()> { match self { ConfigurationDiagnostic::SerializationError(error) => error.message(fmt), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.message(fmt), ConfigurationDiagnostic::Deserialization(error) => error.message(fmt), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.message(fmt), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.message(fmt), ConfigurationDiagnostic::InvalidConfiguration(error) => error.message(fmt), } } fn description(&self, fmt: &mut Formatter<'_>) -> std::fmt::Result { match self { ConfigurationDiagnostic::SerializationError(error) => error.description(fmt), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.description(fmt), ConfigurationDiagnostic::Deserialization(error) => error.description(fmt), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.description(fmt), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.description(fmt), ConfigurationDiagnostic::InvalidConfiguration(error) => error.description(fmt), } } fn advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { match self { ConfigurationDiagnostic::SerializationError(error) => error.advices(visitor), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.advices(visitor), ConfigurationDiagnostic::Deserialization(error) => error.advices(visitor), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.advices(visitor), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.advices(visitor), ConfigurationDiagnostic::InvalidConfiguration(error) => error.advices(visitor), } } fn verbose_advices(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { match self { ConfigurationDiagnostic::SerializationError(error) => error.verbose_advices(visitor), ConfigurationDiagnostic::ConfigAlreadyExists(error) => error.verbose_advices(visitor), ConfigurationDiagnostic::Deserialization(error) => error.verbose_advices(visitor), ConfigurationDiagnostic::InvalidIgnorePattern(error) => error.verbose_advices(visitor), ConfigurationDiagnostic::CantLoadExtendFile(error) => error.verbose_advices(visitor), ConfigurationDiagnostic::InvalidConfiguration(error) => error.verbose_advices(visitor), } } } #[derive(Debug, Serialize, Deserialize, Default)] pub struct ConfigurationAdvices { messages: Vec<MarkupBuf>, } impl Advices for ConfigurationAdvices { fn record(&self, visitor: &mut dyn Visit) -> std::io::Result<()> { for message in &self.messages { visitor.record_log(LogCategory::Info, message)?; } Ok(()) } } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( message = "Failed to serialize", category = "configuration", severity = Error )] pub struct SerializationError; #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( message = "It seems that a configuration file already exists", category = "configuration", severity = Error )] pub struct ConfigAlreadyExists {} #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "configuration", severity = Error, )] pub struct InvalidIgnorePattern { #[message] #[description] message: String, } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "configuration", severity = Error, )] pub struct CantLoadExtendFile { #[location(resource)] file_path: String, #[message] #[description] message: MessageAndDescription, #[verbose_advice] verbose_advice: ConfigurationAdvices, } impl CantLoadExtendFile { pub fn new(file_path: impl Into<String>, message: impl Display) -> Self { Self { file_path: file_path.into(), message: MessageAndDescription::from(markup! {{message}}.to_owned()), verbose_advice: ConfigurationAdvices::default(), } } pub fn with_verbose_advice(mut self, messsage: impl Display) -> Self { self.verbose_advice .messages .push(markup! {{messsage}}.to_owned()); self } } impl From<CantLoadExtendFile> for WorkspaceError { fn from(value: CantLoadExtendFile) -> Self { WorkspaceError::Configuration(ConfigurationDiagnostic::CantLoadExtendFile(value)) } } #[derive(Debug, Serialize, Deserialize, Diagnostic)] #[diagnostic( category = "configuration", severity = Error, )] pub struct InvalidConfiguration { #[message] #[description] message: MessageAndDescription, } #[cfg(test)] mod test { use crate::configuration::diagnostics::ConfigurationDiagnostic; use crate::{Configuration, MatchOptions, Matcher}; use rome_deserialize::json::deserialize_from_json_str; use rome_diagnostics::{print_diagnostic_to_string, DiagnosticExt, Error}; use rome_json_parser::JsonParserOptions; fn snap_diagnostic(test_name: &str, diagnostic: Error) { let content = print_diagnostic_to_string(&diagnostic); insta::with_settings!({ prepend_module_to_snapshot => false, }, { insta::assert_snapshot!(test_name, content); }); } #[test] fn diagnostic_size() { assert_eq!(std::mem::size_of::<ConfigurationDiagnostic>(), 104); } #[test] fn config_already_exists() { snap_diagnostic( "config_already_exists", ConfigurationDiagnostic::new_already_exists().with_file_path("rome.json"), ) } #[test] fn incorrect_pattern() { let mut matcher = Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }); let pattern = "*******"; if let Err(error) = matcher.add_pattern(pattern) { snap_diagnostic( "incorrect_pattern", ConfigurationDiagnostic::new_invalid_ignore_pattern( pattern.to_string(), error.msg.to_string(), ) .with_file_path("rome.json"), ) } else { panic!("Tha pattern should fail") } } #[test] fn deserialization_error() { let content = "{ \n\n\"formatter\" }"; let result = deserialize_from_json_str::<Configuration>(content, JsonParserOptions::default()); assert!(result.has_errors()); for diagnostic in result.into_diagnostics() { snap_diagnostic("deserialization_error", diagnostic) } } #[test] fn deserialization_quick_check() { let content = r#"{ "linter": { "rules": { "recommended": true, "suspicious": { "noDebugger": { "level": "off", "options": { "hooks": [] } } } } } }"#; let _result = deserialize_from_json_str::<Configuration>(content, JsonParserOptions::default()) .into_deserialized(); } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/vcs.rs
crates/rome_service/src/configuration/vcs.rs
use crate::configuration::merge::MergeWith; use bpaf::Bpaf; use serde::{Deserialize, Serialize}; use std::str::FromStr; const GIT_IGNORE_FILE_NAME: &str = ".gitignore"; /// Set of properties to integrate Rome with a VCS software. #[derive(Debug, Default, Deserialize, Serialize, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(deny_unknown_fields, rename_all = "camelCase")] pub struct VcsConfiguration { /// The kind of client. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(long("vcs-client-kind"), argument("git"), optional)] pub client_kind: Option<VcsClientKind>, /// Whether Rome should integrate itself with the VCS client #[bpaf(long("vcs-enabled"), argument("true|false"))] pub enabled: Option<bool>, /// Whether Rome should use the VCS ignore file. When [true], Rome will ignore the files /// specified in the ignore file. #[bpaf(long("vcs-use-ignore-file"), argument("true|false"))] pub use_ignore_file: Option<bool>, /// The folder where Rome should check for VCS files. By default, Rome will use the same /// folder where `rome.json` was found. /// /// If Rome can't find the configuration, it will attempt to use the current working directory. /// If no current working directory can't be found, Rome won't use the VCS integration, and a diagnostic /// will be emitted #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(long("vcs-root"), argument("PATH"), optional)] pub root: Option<String>, } impl VcsConfiguration { pub const fn is_enabled(&self) -> bool { matches!(self.enabled, Some(true)) } pub const fn is_disabled(&self) -> bool { !self.is_enabled() } pub const fn ignore_file_disabled(&self) -> bool { matches!(self.use_ignore_file, Some(false)) } } impl MergeWith<VcsConfiguration> for VcsConfiguration { fn merge_with(&mut self, other: VcsConfiguration) { if let Some(enabled) = other.enabled { self.enabled = Some(enabled); } if let Some(client_kind) = other.client_kind { self.client_kind = Some(client_kind); } if let Some(use_ignore_file) = other.use_ignore_file { self.use_ignore_file = Some(use_ignore_file); } if let Some(root) = other.root { self.root = Some(root); } } } #[derive(Debug, Default, Deserialize, Clone, Serialize)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase")] pub enum VcsClientKind { #[default] /// Integration with the git client as VCS Git, } impl VcsClientKind { pub const KNOWN_VALUES: &'static [&'static str] = &["git"]; pub const fn ignore_file(&self) -> &'static str { match self { VcsClientKind::Git => GIT_IGNORE_FILE_NAME, } } } impl FromStr for VcsClientKind { type Err = &'static str; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "git" => Ok(Self::Git), _ => Err("Value not supported for VcsClientKind"), } } } impl VcsConfiguration { pub const KNOWN_KEYS: &'static [&'static str] = &["clientKind", "enabled", "useIgnoreFile", "root"]; }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/json.rs
crates/rome_service/src/configuration/json.rs
use crate::configuration::merge::MergeWith; use bpaf::Bpaf; use serde::{Deserialize, Serialize}; #[derive(Default, Debug, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(default, deny_unknown_fields)] pub struct JsonConfiguration { #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(json_parser), optional)] pub parser: Option<JsonParser>, } impl JsonConfiguration { pub const KNOWN_KEYS: &'static [&'static str] = &["parser"]; } impl MergeWith<JsonConfiguration> for JsonConfiguration { fn merge_with(&mut self, other: JsonConfiguration) { if let Some(other_parser) = other.parser { let parser = self.parser.get_or_insert_with(JsonParser::default); parser.merge_with(other_parser); } } } #[derive(Debug, Default, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(default, deny_unknown_fields)] pub struct JavascriptOrganizeImports {} #[derive(Default, Debug, Deserialize, Serialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct JsonParser { #[bpaf(hide)] #[serde(skip_serializing_if = "Option::is_none")] /// Allow parsing comments in `.json` files pub allow_comments: Option<bool>, } impl JsonParser { pub(crate) const KNOWN_KEYS: &'static [&'static str] = &["allowComments"]; } impl MergeWith<JsonParser> for JsonParser { fn merge_with(&mut self, other: JsonParser) { if let Some(allow_comments) = other.allow_comments { self.allow_comments = Some(allow_comments); } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/organize_imports.rs
crates/rome_service/src/configuration/organize_imports.rs
use crate::configuration::merge::MergeWith; use crate::settings::OrganizeImportsSettings; use crate::{ConfigurationDiagnostic, MatchOptions, Matcher, WorkspaceError}; use bpaf::Bpaf; use rome_deserialize::StringSet; use serde::{Deserialize, Serialize}; #[derive(Debug, Serialize, Deserialize, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct OrganizeImports { /// Enables the organization of imports #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub enabled: Option<bool>, /// A list of Unix shell style patterns. The formatter will ignore files/folders that will /// match these patterns. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub ignore: Option<StringSet>, } impl Default for OrganizeImports { fn default() -> Self { Self { enabled: Some(true), ignore: None, } } } impl OrganizeImports { pub const fn is_disabled(&self) -> bool { !self.is_enabled() } pub const fn is_enabled(&self) -> bool { matches!(self.enabled, Some(true)) } } impl MergeWith<OrganizeImports> for OrganizeImports { fn merge_with(&mut self, other: OrganizeImports) { if let Some(enabled) = other.enabled { self.enabled = Some(enabled) } } } impl TryFrom<OrganizeImports> for OrganizeImportsSettings { type Error = WorkspaceError; fn try_from(organize_imports: OrganizeImports) -> Result<Self, Self::Error> { let mut matcher = Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }); let is_disabled = organize_imports.is_disabled(); if let Some(ignore) = organize_imports.ignore { for pattern in ignore.index_set() { matcher.add_pattern(pattern).map_err(|err| { WorkspaceError::Configuration( ConfigurationDiagnostic::new_invalid_ignore_pattern( pattern.to_string(), err.msg.to_string(), ), ) })?; } } Ok(Self { enabled: !is_disabled, ignored_files: matcher, }) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/mod.rs
crates/rome_service/src/configuration/mod.rs
//! This module contains the configuration of `rome.json` //! //! The configuration is divided by "tool", and then it's possible to further customise it //! by language. The language might further options divided by tool. pub mod diagnostics; pub mod formatter; mod generated; pub mod javascript; pub mod json; pub mod linter; mod merge; pub mod organize_imports; mod parse; pub mod vcs; pub use crate::configuration::diagnostics::ConfigurationDiagnostic; use crate::configuration::generated::push_to_analyzer_rules; pub use crate::configuration::merge::MergeWith; use crate::configuration::organize_imports::{organize_imports, OrganizeImports}; use crate::configuration::vcs::{vcs_configuration, VcsConfiguration}; use crate::settings::{LanguagesSettings, LinterSettings}; use crate::{DynRef, WorkspaceError, VERSION}; use bpaf::Bpaf; pub use formatter::{formatter_configuration, FormatterConfiguration, PlainIndentStyle}; pub use javascript::{javascript_configuration, JavascriptConfiguration, JavascriptFormatter}; pub use json::{json_configuration, JsonConfiguration}; pub use linter::{linter_configuration, LinterConfiguration, RuleConfiguration, Rules}; use rome_analyze::{AnalyzerConfiguration, AnalyzerRules}; use rome_deserialize::json::deserialize_from_json_str; use rome_deserialize::{Deserialized, StringSet}; use rome_fs::{AutoSearchResult, FileSystem, OpenOptions}; use rome_js_analyze::metadata; use rome_json_formatter::context::JsonFormatOptions; use rome_json_parser::{parse_json, JsonParserOptions}; use serde::{Deserialize, Serialize}; use std::fmt::Debug; use std::io::ErrorKind; use std::num::NonZeroU64; use std::path::{Path, PathBuf}; /// The configuration that is contained inside the file `rome.json` #[derive(Debug, Deserialize, Serialize, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(deny_unknown_fields, rename_all = "camelCase")] pub struct Configuration { /// A field for the [JSON schema](https://json-schema.org/) specification #[serde(rename(serialize = "$schema", deserialize = "$schema"))] #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub schema: Option<String>, /// The configuration of the VCS integration #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(vcs_configuration), optional, hide_usage)] pub vcs: Option<VcsConfiguration>, /// The configuration of the filesystem #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(files_configuration), optional, hide_usage)] pub files: Option<FilesConfiguration>, /// The configuration of the formatter #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(formatter_configuration), optional)] pub formatter: Option<FormatterConfiguration>, /// The configuration of the import sorting #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external, optional)] pub organize_imports: Option<OrganizeImports>, /// The configuration for the linter #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(linter_configuration), optional)] pub linter: Option<LinterConfiguration>, /// Specific configuration for the JavaScript language #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(javascript_configuration), optional)] pub javascript: Option<JavascriptConfiguration>, /// Specific configuration for the Json language #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(external(json_configuration), optional)] pub json: Option<JsonConfiguration>, /// A list of paths to other JSON files, used to extends the current configuration. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub extends: Option<StringSet>, } impl Default for Configuration { fn default() -> Self { Self { files: None, linter: Some(LinterConfiguration { enabled: Some(true), ..LinterConfiguration::default() }), organize_imports: Some(OrganizeImports::default()), formatter: None, javascript: None, schema: None, vcs: None, extends: None, json: None, } } } impl Configuration { const KNOWN_KEYS: &'static [&'static str] = &[ "vcs", "files", "linter", "formatter", "javascript", "json", "$schema", "organizeImports", "extends", ]; pub fn is_formatter_disabled(&self) -> bool { self.formatter .as_ref() .map(|f| f.is_disabled()) .unwrap_or(false) } pub fn is_linter_disabled(&self) -> bool { self.linter .as_ref() .map(|f| f.is_disabled()) .unwrap_or(false) } pub fn is_organize_imports_disabled(&self) -> bool { self.organize_imports .as_ref() .map(|f| f.is_disabled()) .unwrap_or(false) } pub fn is_vcs_disabled(&self) -> bool { self.vcs .as_ref() .map(|f| matches!(f.enabled, Some(false))) .unwrap_or(true) } } impl MergeWith<Configuration> for Configuration { fn merge_with(&mut self, other_configuration: Configuration) { // files self.merge_with(other_configuration.files); // formatter self.merge_with(other_configuration.formatter); // javascript self.merge_with(other_configuration.javascript); // linter self.merge_with(other_configuration.linter); // organize imports self.merge_with(other_configuration.organize_imports); // VCS self.merge_with(other_configuration.vcs); } } impl MergeWith<Option<Configuration>> for Configuration { fn merge_with(&mut self, other_configuration: Option<Configuration>) { if let Some(other_configuration) = other_configuration { self.merge_with(other_configuration); } } } impl MergeWith<Option<VcsConfiguration>> for Configuration { fn merge_with(&mut self, other: Option<VcsConfiguration>) { if let Some(other_vcs) = other { let vcs = self.vcs.get_or_insert_with(VcsConfiguration::default); vcs.merge_with(other_vcs); } } } impl MergeWith<Option<OrganizeImports>> for Configuration { fn merge_with(&mut self, other: Option<OrganizeImports>) { if let Some(other_organize_imports) = other { let organize_imports = self .organize_imports .get_or_insert_with(OrganizeImports::default); organize_imports.merge_with(other_organize_imports); } } } impl MergeWith<Option<LinterConfiguration>> for Configuration { fn merge_with(&mut self, other: Option<LinterConfiguration>) { if let Some(other_linter) = other { let linter = self.linter.get_or_insert_with(LinterConfiguration::default); linter.merge_with(other_linter); } } } impl MergeWith<Option<FilesConfiguration>> for Configuration { fn merge_with(&mut self, other: Option<FilesConfiguration>) { if let Some(files_configuration) = other { let files = self.files.get_or_insert_with(FilesConfiguration::default); files.merge_with(files_configuration); }; } } impl MergeWith<Option<JavascriptConfiguration>> for Configuration { fn merge_with(&mut self, other: Option<JavascriptConfiguration>) { if let Some(other) = other { let js_configuration = self .javascript .get_or_insert_with(JavascriptConfiguration::default); js_configuration.merge_with(other); } } } impl MergeWith<Option<FormatterConfiguration>> for Configuration { fn merge_with(&mut self, other: Option<FormatterConfiguration>) { if let Some(other_formatter) = other { let formatter = self .formatter .get_or_insert_with(FormatterConfiguration::default); formatter.merge_with(other_formatter); } } } impl MergeWith<Option<JavascriptFormatter>> for Configuration { fn merge_with(&mut self, other: Option<JavascriptFormatter>) { let javascript_configuration = self .javascript .get_or_insert_with(JavascriptConfiguration::default); javascript_configuration.merge_with(other); } } /// The configuration of the filesystem #[derive(Default, Debug, Deserialize, Serialize, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct FilesConfiguration { /// The maximum allowed size for source code files in bytes. Files above /// this limit will be ignored for performance reason. Defaults to 1 MiB #[bpaf(long("files-max-size"), argument("NUMBER"))] pub max_size: Option<NonZeroU64>, /// A list of Unix shell style patterns. Rome tools will ignore files/folders that will /// match these patterns. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub ignore: Option<StringSet>, /// Tells Rome to not emit diagnostics when handling files that doesn't know #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(long("files-ignore-unknown"), argument("true|false"), optional)] pub ignore_unknown: Option<bool>, } impl FilesConfiguration { const KNOWN_KEYS: &'static [&'static str] = &["maxSize", "ignore", "ignoreUnknown"]; } impl MergeWith<FilesConfiguration> for FilesConfiguration { fn merge_with(&mut self, other: FilesConfiguration) { if let Some(ignore) = other.ignore { self.ignore = Some(ignore) } if let Some(max_size) = other.max_size { self.max_size = Some(max_size) } if let Some(ignore_unknown) = other.ignore_unknown { self.ignore_unknown = Some(ignore_unknown) } } } /// - [Result]: if an error occurred while loading the configuration file. /// - [Option]: sometimes not having a configuration file should not be an error, so we need this type. /// - [ConfigurationPayload]: The result of the operation type LoadConfig = Result<Option<ConfigurationPayload>, WorkspaceError>; pub struct ConfigurationPayload { /// The result of the deserialization pub deserialized: Deserialized<Configuration>, /// The path of where the `rome.json` file was found. This contains the `rome.json` name. pub configuration_file_path: PathBuf, /// The base path of where the `rome.json` file was found. /// This has to be used to resolve other configuration files. pub configuration_directory_path: PathBuf, } #[derive(Debug, Default, PartialEq)] pub enum ConfigurationBasePath { /// The default mode, not having a configuration file is not an error. #[default] None, /// The base path provided by the LSP, not having a configuration file is not an error. Lsp(PathBuf), /// The base path provided by the user, not having a configuration file is an error. /// Throws any kind of I/O errors. FromUser(PathBuf), } impl ConfigurationBasePath { const fn is_from_user(&self) -> bool { matches!(self, ConfigurationBasePath::FromUser(_)) } } /// Load the configuration from the file system. /// /// The configuration file will be read from the `file_system`. A [base path](ConfigurationBasePath) should be provided. /// /// The function will try to traverse upwards the file system until if finds a `rome.json` file, or there /// aren't directories anymore. /// /// If a the configuration base path was provided by the user, the function will error. If not, Rome will use /// its defaults. pub fn load_config( file_system: &DynRef<dyn FileSystem>, base_path: ConfigurationBasePath, ) -> LoadConfig { let config_name = file_system.config_name(); let working_directory = file_system.working_directory(); let configuration_directory = match base_path { ConfigurationBasePath::Lsp(ref path) | ConfigurationBasePath::FromUser(ref path) => { path.clone() } _ => match working_directory { Some(wd) => wd, None => PathBuf::new(), }, }; let should_error = base_path.is_from_user(); let result = file_system.auto_search(configuration_directory, config_name, should_error)?; if let Some(auto_search_result) = result { let AutoSearchResult { content, directory_path, file_path, } = auto_search_result; let deserialized = deserialize_from_json_str::<Configuration>(&content, JsonParserOptions::default()); Ok(Some(ConfigurationPayload { deserialized, configuration_file_path: file_path, configuration_directory_path: directory_path, })) } else { Ok(None) } } /// Creates a new configuration on file system /// /// ## Errors /// /// It fails if: /// - the configuration file already exists /// - the program doesn't have the write rights pub fn create_config( fs: &mut DynRef<dyn FileSystem>, mut configuration: Configuration, ) -> Result<(), WorkspaceError> { let path = PathBuf::from(fs.config_name()); let options = OpenOptions::default().write(true).create_new(true); let mut config_file = fs.open_with_options(&path, options).map_err(|err| { if err.kind() == ErrorKind::AlreadyExists { WorkspaceError::Configuration(ConfigurationDiagnostic::new_already_exists()) } else { WorkspaceError::cant_read_file(format!("{}", path.display())) } })?; // we now check if rome is installed inside `node_modules` and if so, we if VERSION == "0.0.0" { let schema_path = Path::new("./node_modules/rome/configuration_schema.json"); let options = OpenOptions::default().read(true); if fs.open_with_options(schema_path, options).is_ok() { configuration.schema = schema_path.to_str().map(String::from); } } else { configuration.schema = Some(format!( "https://docs.rome.tools/schemas/{}/schema.json", VERSION )); } let contents = serde_json::to_string_pretty(&configuration).map_err(|_| { WorkspaceError::Configuration(ConfigurationDiagnostic::new_serialization_error()) })?; let parsed = parse_json(&contents, JsonParserOptions::default()); let formatted = rome_json_formatter::format_node(JsonFormatOptions::default(), &parsed.syntax())? .print() .expect("valid format document"); config_file .set_content(formatted.as_code().as_bytes()) .map_err(|_| WorkspaceError::cant_read_file(format!("{}", path.display())))?; Ok(()) } /// Converts a [WorkspaceSettings] into a suited [configuration for the analyzer]. /// /// The function needs access to a filter, in order to have an easy access to the [metadata] of the /// rules. /// /// The third argument is a closure that accepts a reference to `linter_settings`. /// /// The closure is responsible to map the globals from the correct /// location of the settings. /// /// ## Examples /// /// ```rust /// use rome_service::configuration::to_analyzer_configuration; /// use rome_service::settings::{LanguagesSettings, WorkspaceSettings}; /// let mut settings = WorkspaceSettings::default(); /// settings.languages.javascript.globals = Some(["jQuery".to_string(), "React".to_string()].into()); /// // map globals from JS language /// let analyzer_configuration = /// to_analyzer_configuration(&settings.linter, &settings.languages, |settings| { /// if let Some(globals) = settings.javascript.globals.as_ref() { /// globals /// .iter() /// .map(|global| global.to_string()) /// .collect::<Vec<_>>() /// } else { /// vec![] /// } /// }); /// /// assert_eq!( /// analyzer_configuration.globals, /// vec!["jQuery".to_string(), "React".to_string()] /// ) /// ``` /// /// [WorkspaceSettings]: crate::settings::WorkspaceSettings /// [metadata]: rome_analyze::RegistryRuleMetadata /// [configuration for the analyzer]: AnalyzerConfiguration pub fn to_analyzer_configuration<ToGlobals>( linter_settings: &LinterSettings, language_settings: &LanguagesSettings, to_globals: ToGlobals, ) -> AnalyzerConfiguration where ToGlobals: FnOnce(&LanguagesSettings) -> Vec<String>, { let globals: Vec<String> = to_globals(language_settings); let mut analyzer_rules = AnalyzerRules::default(); if let Some(rules) = linter_settings.rules.as_ref() { push_to_analyzer_rules(rules, metadata(), &mut analyzer_rules); } AnalyzerConfiguration { globals, rules: analyzer_rules, } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/formatter.rs
crates/rome_service/src/configuration/formatter.rs
use crate::configuration::merge::MergeWith; use crate::settings::FormatSettings; use crate::{ConfigurationDiagnostic, MatchOptions, Matcher, WorkspaceError}; use bpaf::Bpaf; use rome_deserialize::StringSet; use rome_formatter::{IndentStyle, LineWidth}; use serde::{Deserialize, Serialize}; use std::str::FromStr; /// Options applied to the formatter #[derive(Deserialize, Serialize, Debug, Eq, PartialEq, Clone, Bpaf)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase", default, deny_unknown_fields)] pub struct FormatterConfiguration { // if `false`, it disables the feature. `true` by default #[bpaf(hide)] pub enabled: Option<bool>, /// Stores whether formatting should be allowed to proceed if a given file /// has syntax errors #[bpaf(hide)] pub format_with_errors: Option<bool>, /// The indent style. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(long("indent-style"), argument("tab|space"), optional)] pub indent_style: Option<PlainIndentStyle>, /// The size of the indentation, 2 by default #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(long("indent-size"), argument("NUMBER"), optional)] pub indent_size: Option<u8>, /// What's the max width of a line. Defaults to 80. #[serde( deserialize_with = "deserialize_line_width", serialize_with = "serialize_line_width" )] #[bpaf(long("line-width"), argument("NUMBER"), optional)] pub line_width: Option<LineWidth>, /// A list of Unix shell style patterns. The formatter will ignore files/folders that will /// match these patterns. #[serde(skip_serializing_if = "Option::is_none")] #[bpaf(hide)] pub ignore: Option<StringSet>, } impl FormatterConfiguration { pub const fn is_disabled(&self) -> bool { matches!(self.enabled, Some(false)) } pub(crate) const KNOWN_KEYS: &'static [&'static str] = &[ "enabled", "formatWithErrors", "indentStyle", "indentSize", "lineWidth", "ignore", ]; } impl Default for FormatterConfiguration { fn default() -> Self { Self { enabled: Some(true), format_with_errors: Some(false), indent_size: Some(2), indent_style: Some(PlainIndentStyle::default()), line_width: Some(LineWidth::default()), ignore: None, } } } impl MergeWith<FormatterConfiguration> for FormatterConfiguration { fn merge_with(&mut self, other: FormatterConfiguration) { if let Some(enabled) = other.enabled { self.enabled = Some(enabled); } if let Some(indent_size) = other.indent_size { self.indent_size = Some(indent_size); } if let Some(indent_style) = other.indent_style { self.indent_style = Some(indent_style); } if let Some(line_width) = other.line_width { self.line_width = Some(line_width); } if let Some(format_with_errors) = other.format_with_errors { self.format_with_errors = Some(format_with_errors); } if let Some(ignore) = other.ignore { self.ignore = Some(ignore) } } } impl TryFrom<FormatterConfiguration> for FormatSettings { type Error = WorkspaceError; fn try_from(conf: FormatterConfiguration) -> Result<Self, Self::Error> { let indent_style = match conf.indent_style { Some(PlainIndentStyle::Tab) => IndentStyle::Tab, Some(PlainIndentStyle::Space) => { IndentStyle::Space(conf.indent_size.unwrap_or_default()) } None => IndentStyle::default(), }; let mut matcher = Matcher::new(MatchOptions { case_sensitive: true, require_literal_leading_dot: false, require_literal_separator: false, }); if let Some(ignore) = conf.ignore { for pattern in ignore.index_set() { matcher.add_pattern(pattern).map_err(|err| { WorkspaceError::Configuration( ConfigurationDiagnostic::new_invalid_ignore_pattern( pattern.to_string(), err.msg.to_string(), ), ) })?; } } Ok(Self { enabled: conf.enabled.unwrap_or_default(), indent_style: Some(indent_style), line_width: conf.line_width, format_with_errors: conf.format_with_errors.unwrap_or_default(), ignored_files: matcher, }) } } fn deserialize_line_width<'de, D>(deserializer: D) -> Result<Option<LineWidth>, D::Error> where D: serde::de::Deserializer<'de>, { let value: u16 = Deserialize::deserialize(deserializer)?; let line_width = LineWidth::try_from(value).map_err(serde::de::Error::custom)?; Ok(Some(line_width)) } pub fn serialize_line_width<S>(line_width: &Option<LineWidth>, s: S) -> Result<S::Ok, S::Error> where S: serde::ser::Serializer, { s.serialize_u16(line_width.unwrap_or_default().value()) } #[derive(Deserialize, Serialize, Debug, Eq, PartialEq, Clone, Default)] #[cfg_attr(feature = "schema", derive(schemars::JsonSchema))] #[serde(rename_all = "camelCase")] pub enum PlainIndentStyle { /// Tab #[default] Tab, /// Space Space, } impl PlainIndentStyle { pub(crate) const KNOWN_VALUES: &'static [&'static str] = &["tab", "space"]; } impl FromStr for PlainIndentStyle { type Err = String; fn from_str(s: &str) -> Result<Self, Self::Err> { match s { "tab" => Ok(PlainIndentStyle::Tab), "space" => Ok(PlainIndentStyle::Space), _ => Err("Unsupported value for this option".to_string()), } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/generated.rs
crates/rome_service/src/configuration/generated.rs
//! Generated file, do not edit by hand, see `xtask/codegen` use crate::configuration::linter::*; use crate::{RuleConfiguration, Rules}; use rome_analyze::{AnalyzerRules, MetadataRegistry}; pub(crate) fn push_to_analyzer_rules( rules: &Rules, metadata: &MetadataRegistry, analyzer_rules: &mut AnalyzerRules, ) { if let Some(rules) = rules.a11y.as_ref() { for rule_name in &A11y::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("a11y", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.complexity.as_ref() { for rule_name in &Complexity::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("complexity", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.correctness.as_ref() { for rule_name in &Correctness::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("correctness", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.nursery.as_ref() { for rule_name in &Nursery::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("nursery", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.performance.as_ref() { for rule_name in &Performance::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("performance", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.security.as_ref() { for rule_name in &Security::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("security", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.style.as_ref() { for rule_name in &Style::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("style", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } if let Some(rules) = rules.suspicious.as_ref() { for rule_name in &Suspicious::GROUP_RULES { if let Some(RuleConfiguration::WithOptions(rule_options)) = rules.get_rule_configuration(rule_name) { if let Some(possible_options) = &rule_options.options { if let Some(rule_key) = metadata.find_rule("suspicious", rule_name) { let rule_options = possible_options.extract_option(&rule_key); analyzer_rules.push_rule(rule_key, rule_options); } } } } } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/parse/mod.rs
crates/rome_service/src/configuration/parse/mod.rs
mod json;
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/parse/json/linter.rs
crates/rome_service/src/configuration/parse/json/linter.rs
use crate::configuration::linter::{RulePlainConfiguration, RuleWithOptions}; use crate::configuration::LinterConfiguration; use crate::{RuleConfiguration, Rules}; use rome_console::markup; use rome_deserialize::json::{has_only_known_keys, with_only_known_variants, VisitJsonNode}; use rome_deserialize::{DeserializationDiagnostic, StringSet, VisitNode}; use rome_js_analyze::options::PossibleOptions; use rome_json_syntax::{AnyJsonValue, JsonLanguage, JsonObjectValue, JsonSyntaxNode}; use rome_rowan::{AstNode, AstSeparatedList, SyntaxNode}; impl VisitJsonNode for LinterConfiguration {} impl VisitNode<JsonLanguage> for LinterConfiguration { fn visit_member_name( &mut self, node: &JsonSyntaxNode, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, LinterConfiguration::KNOWN_KEYS, diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); match name_text { "ignore" => { self.ignore = self .map_to_index_set_string(&value, name_text, diagnostics) .map(StringSet::new); } "enabled" => { self.enabled = self.map_to_boolean(&value, name_text, diagnostics); } "rules" => { let mut rules = Rules::default(); if are_recommended_and_all_correct(&value, name_text, diagnostics)? { self.map_to_object(&value, name_text, &mut rules, diagnostics)?; self.rules = Some(rules); } } _ => {} } Some(()) } } impl RuleConfiguration { pub(crate) fn map_rule_configuration( &mut self, value: &AnyJsonValue, key_name: &str, rule_name: &str, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let value = JsonObjectValue::cast_ref(value.syntax()).or_else(|| { diagnostics.push(DeserializationDiagnostic::new_incorrect_type_for_value( key_name, "object", value.range(), )); None })?; for element in value.json_member_list() { let element = element.ok()?; let key = element.name().ok()?; let value = element.value().ok()?; let (name, value) = self.get_key_and_value(key.syntax(), value.syntax(), diagnostics)?; let name_text = name.text(); match name_text { "level" => { if let RuleConfiguration::WithOptions(options) = self { let mut level = RulePlainConfiguration::default(); level.visit_member_value(value.syntax(), diagnostics)?; options.level = level; } else { let mut level = RulePlainConfiguration::default(); level.visit_member_value(value.syntax(), diagnostics)?; *self = RuleConfiguration::WithOptions(RuleWithOptions { level, ..RuleWithOptions::default() }) } } "options" => { let mut possible_options = PossibleOptions::default(); possible_options.map_to_rule_options(&value, name_text, rule_name, diagnostics); if let RuleConfiguration::WithOptions(options) = self { options.options = Some(possible_options) } else { *self = RuleConfiguration::WithOptions(RuleWithOptions { options: Some(possible_options), ..RuleWithOptions::default() }) } } _ => {} } } Some(()) } } impl VisitJsonNode for RuleConfiguration {} impl VisitNode<JsonLanguage> for RuleConfiguration { fn visit_member_name( &mut self, node: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, &["level", "options"], diagnostics) } fn visit_member_value( &mut self, node: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let node = with_only_known_variants(node, RulePlainConfiguration::KNOWN_KEYS, diagnostics)?; match node.inner_string_text().ok()?.text() { "error" => { *self = RuleConfiguration::Plain(RulePlainConfiguration::Error); } "warn" => { *self = RuleConfiguration::Plain(RulePlainConfiguration::Warn); } "off" => { *self = RuleConfiguration::Plain(RulePlainConfiguration::Off); } _ => {} } Some(()) } } impl VisitJsonNode for RulePlainConfiguration {} impl VisitNode<JsonLanguage> for RulePlainConfiguration { fn visit_member_value( &mut self, node: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let node = with_only_known_variants(node, RulePlainConfiguration::KNOWN_KEYS, diagnostics)?; match node.inner_string_text().ok()?.text() { "error" => { *self = RulePlainConfiguration::Error; } "warn" => { *self = RulePlainConfiguration::Warn; } "off" => { *self = RulePlainConfiguration::Off; } _ => {} } Some(()) } } impl VisitJsonNode for RuleWithOptions {} impl VisitNode<JsonLanguage> for RuleWithOptions { fn visit_member_name( &mut self, node: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, &["level", "options"], diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); match name_text { "level" => { let mut rule_options = RulePlainConfiguration::default(); rule_options.visit_member_value(value.syntax(), diagnostics)?; self.level = rule_options; } "options" => { let mut possible_options = PossibleOptions::default(); self.map_to_object(&value, name_text, &mut possible_options, diagnostics); self.options = Some(possible_options); } _ => {} } Some(()) } } pub(crate) fn are_recommended_and_all_correct( current_node: &AnyJsonValue, name: &str, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<bool> { let value = JsonObjectValue::cast_ref(current_node.syntax()).or_else(|| { diagnostics.push(DeserializationDiagnostic::new_incorrect_type_for_value( name, "object", current_node.range(), )); None })?; let recommended = value.json_member_list().iter().find_map(|member| { let member = member.ok()?; if member.name().ok()?.inner_string_text().ok()?.text() == "recommended" { member.value().ok()?.as_json_boolean_value().cloned() } else { None } }); let all = value.json_member_list().iter().find_map(|member| { let member = member.ok()?; if member.name().ok()?.inner_string_text().ok()?.text() == "all" { member.value().ok()?.as_json_boolean_value().cloned() } else { None } }); if let (Some(recommended), Some(all)) = (recommended, all) { if recommended.value_token().ok()?.text() == "true" && all.value_token().ok()?.text() == "true" { diagnostics .push(DeserializationDiagnostic::new(markup!( <Emphasis>"'recommended'"</Emphasis>" and "<Emphasis>"'all'"</Emphasis>" can't be both "<Emphasis>"'true'"</Emphasis>". You should choose only one of them." )) .with_range(current_node.range()) .with_note(markup!("Rome will fallback to its defaults for this section."))); return Some(false); } } Some(true) }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/parse/json/javascript.rs
crates/rome_service/src/configuration/parse/json/javascript.rs
use crate::configuration::javascript::{JavascriptOrganizeImports, JavascriptParser}; use crate::configuration::{JavascriptConfiguration, JavascriptFormatter}; use rome_deserialize::json::{has_only_known_keys, VisitJsonNode}; use rome_deserialize::{DeserializationDiagnostic, StringSet, VisitNode}; use rome_js_formatter::context::trailing_comma::TrailingComma; use rome_js_formatter::context::{ArrowParentheses, QuoteProperties, QuoteStyle, Semicolons}; use rome_json_syntax::{JsonLanguage, JsonSyntaxNode}; use rome_rowan::SyntaxNode; impl VisitJsonNode for JavascriptConfiguration {} impl VisitNode<JsonLanguage> for JavascriptConfiguration { fn visit_member_name( &mut self, node: &JsonSyntaxNode, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, JavascriptConfiguration::KNOWN_KEYS, diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); match name_text { "formatter" => { let mut javascript_formatter = JavascriptFormatter::default(); self.map_to_object(&value, name_text, &mut javascript_formatter, diagnostics)?; self.formatter = Some(javascript_formatter); } "parser" => { let mut parser = JavascriptParser::default(); self.map_to_object(&value, name_text, &mut parser, diagnostics)?; self.parser = Some(parser); } "globals" => { self.globals = self .map_to_index_set_string(&value, name_text, diagnostics) .map(StringSet::new); } "organizeImports" => { let mut javascript_organize_imports = JavascriptOrganizeImports::default(); self.map_to_object( &value, name_text, &mut javascript_organize_imports, diagnostics, )?; self.organize_imports = Some(javascript_organize_imports); } _ => {} } Some(()) } } impl VisitJsonNode for JavascriptFormatter {} impl VisitNode<JsonLanguage> for JavascriptFormatter { fn visit_member_name( &mut self, node: &JsonSyntaxNode, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, JavascriptFormatter::KNOWN_KEYS, diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); match name_text { "jsxQuoteStyle" => { let mut jsx_quote_style = QuoteStyle::default(); self.map_to_known_string(&value, name_text, &mut jsx_quote_style, diagnostics)?; self.jsx_quote_style = Some(jsx_quote_style); } "quoteStyle" => { let mut quote_style = QuoteStyle::default(); self.map_to_known_string(&value, name_text, &mut quote_style, diagnostics)?; self.quote_style = Some(quote_style); } "trailingComma" => { let mut trailing_comma = TrailingComma::default(); self.map_to_known_string(&value, name_text, &mut trailing_comma, diagnostics)?; self.trailing_comma = Some(trailing_comma); } "quoteProperties" => { let mut quote_properties = QuoteProperties::default(); self.map_to_known_string(&value, name_text, &mut quote_properties, diagnostics)?; self.quote_properties = Some(quote_properties); } "semicolons" => { let mut semicolons = Semicolons::default(); self.map_to_known_string(&value, name_text, &mut semicolons, diagnostics)?; self.semicolons = Some(semicolons); } "arrowParentheses" => { let mut arrow_parentheses = ArrowParentheses::default(); self.map_to_known_string(&value, name_text, &mut arrow_parentheses, diagnostics)?; self.arrow_parentheses = Some(arrow_parentheses); } _ => {} } Some(()) } } impl VisitJsonNode for JavascriptOrganizeImports {} impl VisitNode<JsonLanguage> for JavascriptOrganizeImports { fn visit_map( &mut self, _key: &JsonSyntaxNode, _value: &JsonSyntaxNode, _diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { Some(()) } } impl VisitJsonNode for JavascriptParser {} impl VisitNode<JsonLanguage> for JavascriptParser { fn visit_member_name( &mut self, node: &JsonSyntaxNode, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, JavascriptParser::KNOWN_KEYS, diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); if name_text == "unsafeParameterDecoratorsEnabled" { self.unsafe_parameter_decorators_enabled = self.map_to_boolean(&value, name_text, diagnostics); } Some(()) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false
rome/tools
https://github.com/rome/tools/blob/392d188a49d70e495f13b1bb08cd7d9c43690f9b/crates/rome_service/src/configuration/parse/json/configuration.rs
crates/rome_service/src/configuration/parse/json/configuration.rs
use crate::configuration::json::JsonConfiguration; use crate::configuration::organize_imports::OrganizeImports; use crate::configuration::parse::json::vcs::validate_vcs_configuration; use crate::configuration::vcs::VcsConfiguration; use crate::configuration::{ FilesConfiguration, FormatterConfiguration, JavascriptConfiguration, LinterConfiguration, }; use crate::Configuration; use rome_deserialize::json::{has_only_known_keys, VisitJsonNode}; use rome_deserialize::{DeserializationDiagnostic, StringSet, VisitNode}; use rome_json_syntax::{JsonLanguage, JsonSyntaxNode}; use rome_rowan::SyntaxNode; impl VisitJsonNode for Configuration {} impl VisitNode<JsonLanguage> for Configuration { fn visit_member_name( &mut self, node: &JsonSyntaxNode, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { has_only_known_keys(node, Configuration::KNOWN_KEYS, diagnostics) } fn visit_map( &mut self, key: &SyntaxNode<JsonLanguage>, value: &SyntaxNode<JsonLanguage>, diagnostics: &mut Vec<DeserializationDiagnostic>, ) -> Option<()> { let (name, value) = self.get_key_and_value(key, value, diagnostics)?; let name_text = name.text(); match name_text { "$schema" => { self.schema = self.map_to_string(&value, name_text, diagnostics); } "files" => { let mut files = FilesConfiguration::default(); self.map_to_object(&value, name_text, &mut files, diagnostics)?; self.files = Some(files); } "vcs" => { let mut vcs = VcsConfiguration::default(); self.map_to_object(&value, name_text, &mut vcs, diagnostics)?; validate_vcs_configuration(&value, &mut vcs, diagnostics); self.vcs = Some(vcs); } "formatter" => { let mut formatter = FormatterConfiguration::default(); self.map_to_object(&value, name_text, &mut formatter, diagnostics)?; self.formatter = Some(formatter); } "linter" => { let mut linter = LinterConfiguration::default(); self.map_to_object(&value, name_text, &mut linter, diagnostics)?; self.linter = Some(linter); } "javascript" => { let mut javascript = JavascriptConfiguration::default(); self.map_to_object(&value, name_text, &mut javascript, diagnostics)?; self.javascript = Some(javascript); } "json" => { let mut json = JsonConfiguration::default(); self.map_to_object(&value, name_text, &mut json, diagnostics)?; self.json = Some(json); } "organizeImports" => { let mut organize_imports = OrganizeImports::default(); self.map_to_object(&value, name_text, &mut organize_imports, diagnostics)?; self.organize_imports = Some(organize_imports); } "extends" => { self.extends = self .map_to_index_set_string(&value, name_text, diagnostics) .map(StringSet::new); } _ => {} } Some(()) } }
rust
MIT
392d188a49d70e495f13b1bb08cd7d9c43690f9b
2026-01-04T15:38:12.578592Z
false