repo stringlengths 8 35 | pull_number int64 14 14.5k | instance_id stringlengths 13 40 | issue_numbers listlengths 1 3 | base_commit stringlengths 40 40 | patch stringlengths 344 132k | test_patch stringlengths 308 274k | problem_statement stringlengths 25 19.8k | hints_text stringlengths 0 37.4k | created_at stringlengths 19 19 | version stringlengths 3 4 | environment_setup_commit stringlengths 40 40 | FAIL_TO_PASS listlengths 1 1.1k | PASS_TO_PASS listlengths 0 7.38k | FAIL_TO_FAIL listlengths 0 1.72k | PASS_TO_FAIL listlengths 0 49 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
kkawakam/rustyline | 296 | kkawakam__rustyline-296 | [
"294"
] | fb048a885f2f4b9bc947fd7603d7b3939951b02a | diff --git a/src/edit.rs b/src/edit.rs
--- a/src/edit.rs
+++ b/src/edit.rs
@@ -210,6 +210,10 @@ impl<'out, 'prompt, H: Helper> State<'out, 'prompt, H> {
false
}
}
+
+ pub fn is_default_prompt(&self) -> bool {
+ self.layout.default_prompt
+ }
}
impl<'out, 'prompt, H: Helper> Refresher for State<'out, 'prompt, H> {
diff --git a/src/lib.rs b/src/lib.rs
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -526,12 +526,12 @@ fn readline_edit<H: Helper>(
editor.term.cursor = s.layout.cursor.col;
}
// Accept the line regardless of where the cursor is.
- s.edit_move_end()?;
- if s.has_hint() {
+ if s.has_hint() || !s.is_default_prompt() {
// Force a refresh without hints to leave the previous
// line as the user typed it after a newline.
s.refresh_line_with_msg(None)?;
}
+ s.edit_move_end()?;
break;
}
Cmd::BeginningOfHistory => {
| diff --git a/src/test/emacs.rs b/src/test/emacs.rs
--- a/src/test/emacs.rs
+++ b/src/test/emacs.rs
@@ -159,6 +159,7 @@ fn ctrl_n() {
KeyPress::Ctrl('N'),
KeyPress::Enter,
],
+ "",
("line2", ""),
);
}
diff --git a/src/test/emacs.rs b/src/test/emacs.rs
--- a/src/test/emacs.rs
+++ b/src/test/emacs.rs
@@ -169,6 +170,7 @@ fn ctrl_p() {
EditMode::Emacs,
&["line1"],
&[KeyPress::Ctrl('P'), KeyPress::Enter],
+ "",
("line1", ""),
);
}
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -10,12 +10,14 @@ fn down_key() {
*mode,
&["line1"],
&[KeyPress::Down, KeyPress::Enter],
+ "",
("", ""),
);
assert_history(
*mode,
&["line1", "line2"],
&[KeyPress::Up, KeyPress::Up, KeyPress::Down, KeyPress::Enter],
+ "",
("line2", ""),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -27,6 +29,7 @@ fn down_key() {
KeyPress::Down, // restore original line
KeyPress::Enter,
],
+ "",
("a", ""),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -37,6 +40,7 @@ fn down_key() {
KeyPress::Down, // noop
KeyPress::Enter,
],
+ "",
("a", ""),
);
}
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -45,17 +49,19 @@ fn down_key() {
#[test]
fn up_key() {
for mode in &[EditMode::Emacs, EditMode::Vi] {
- assert_history(*mode, &[], &[KeyPress::Up, KeyPress::Enter], ("", ""));
+ assert_history(*mode, &[], &[KeyPress::Up, KeyPress::Enter], "", ("", ""));
assert_history(
*mode,
&["line1"],
&[KeyPress::Up, KeyPress::Enter],
+ "",
("line1", ""),
);
assert_history(
*mode,
&["line1", "line2"],
&[KeyPress::Up, KeyPress::Up, KeyPress::Enter],
+ "",
("line1", ""),
);
}
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -68,6 +74,7 @@ fn ctrl_r() {
*mode,
&[],
&[KeyPress::Ctrl('R'), KeyPress::Char('o'), KeyPress::Enter],
+ "",
("o", ""),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -79,6 +86,7 @@ fn ctrl_r() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("cargo", ""),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -90,6 +98,7 @@ fn ctrl_r() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("ru", "stc"),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -102,6 +111,7 @@ fn ctrl_r() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("r", "ustc"),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -114,6 +124,7 @@ fn ctrl_r() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("r", "ustc"),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -126,6 +137,7 @@ fn ctrl_r() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("car", "go"),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -138,11 +150,29 @@ fn ctrl_r() {
KeyPress::Ctrl('G'), // abort (FIXME: doesn't work with vi mode)
KeyPress::Enter,
],
+ "",
("a", ""),
);
}
}
+#[test]
+fn ctrl_r_with_long_prompt() {
+ for mode in &[EditMode::Emacs, EditMode::Vi] {
+ assert_history(
+ *mode,
+ &["rustc", "cargo"],
+ &[
+ KeyPress::Ctrl('R'),
+ KeyPress::Char('o'),
+ KeyPress::Enter,
+ ],
+ ">>>>>>>>>>>>>>>>>>>>>>>>>>> ",
+ ("cargo", ""),
+ );
+ }
+}
+
#[test]
fn ctrl_s() {
for mode in &[EditMode::Emacs, EditMode::Vi] {
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -157,6 +187,7 @@ fn ctrl_s() {
KeyPress::Right, // just to assert cursor pos
KeyPress::Enter,
],
+ "",
("car", "go"),
);
}
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -168,12 +199,14 @@ fn meta_lt() {
EditMode::Emacs,
&[""],
&[KeyPress::Meta('<'), KeyPress::Enter],
+ "",
("", ""),
);
assert_history(
EditMode::Emacs,
&["rustc", "cargo"],
&[KeyPress::Meta('<'), KeyPress::Enter],
+ "",
("rustc", ""),
);
}
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -184,12 +217,14 @@ fn meta_gt() {
EditMode::Emacs,
&[""],
&[KeyPress::Meta('>'), KeyPress::Enter],
+ "",
("", ""),
);
assert_history(
EditMode::Emacs,
&["rustc", "cargo"],
&[KeyPress::Meta('<'), KeyPress::Meta('>'), KeyPress::Enter],
+ "",
("", ""),
);
assert_history(
diff --git a/src/test/history.rs b/src/test/history.rs
--- a/src/test/history.rs
+++ b/src/test/history.rs
@@ -201,6 +236,7 @@ fn meta_gt() {
KeyPress::Meta('>'), // restore original line
KeyPress::Enter,
],
+ "",
("a", ""),
);
}
diff --git a/src/test/mod.rs b/src/test/mod.rs
--- a/src/test/mod.rs
+++ b/src/test/mod.rs
@@ -94,14 +94,22 @@ fn assert_cursor(mode: EditMode, initial: (&str, &str), keys: &[KeyPress], expec
// `entries`: history entries before `keys` pressed
// `keys`: keys to press
// `expected`: line status before enter key: strings before and after cursor
-fn assert_history(mode: EditMode, entries: &[&str], keys: &[KeyPress], expected: (&str, &str)) {
+fn assert_history(
+ mode: EditMode,
+ entries: &[&str],
+ keys: &[KeyPress],
+ prompt: &str,
+ expected: (&str, &str),
+) {
let mut editor = init_editor(mode, keys);
for entry in entries {
editor.history.add(*entry);
}
- let actual_line = editor.readline("").unwrap();
+ let actual_line = editor.readline(prompt).unwrap();
assert_eq!(expected.0.to_owned() + expected.1, actual_line);
- assert_eq!(expected.0.len(), editor.term.cursor);
+ if prompt.is_empty() {
+ assert_eq!(expected.0.len(), editor.term.cursor);
+ }
}
#[test]
diff --git a/src/test/vi_cmd.rs b/src/test/vi_cmd.rs
--- a/src/test/vi_cmd.rs
+++ b/src/test/vi_cmd.rs
@@ -441,6 +441,7 @@ fn j() {
*key,
KeyPress::Enter,
],
+ "",
("line2", ""),
);
}
diff --git a/src/test/vi_cmd.rs b/src/test/vi_cmd.rs
--- a/src/test/vi_cmd.rs
+++ b/src/test/vi_cmd.rs
@@ -457,6 +458,7 @@ fn k() {
EditMode::Vi,
&["line1"],
&[KeyPress::Esc, *key, KeyPress::Enter],
+ "",
("line1", ""),
);
}
| Crash when searching for a value
This code:
```
fn main() {
use std::io::{stdin, stdout, Write};
use rustyline::error::ReadlineError;
use rustyline::Editor;
loop {
let mut editor = Editor::<()>::new();
if editor.load_history("history.log").is_err() {
}
let result = editor.readline("Enter expression (ctrl-C to quit): ");
match result {
Ok(mut line) => {
editor.add_history_entry(line.as_str());
line = line.trim().to_string();
if line.len() > 0 {
println!("You entered: {}", line);
}
},
Err(ReadlineError::Interrupted) => {
break
},
Err(ReadlineError::Eof) => {
},
Err(err) => {
println!("Error: {:?}", err);
break
}
}
editor.save_history("history.log").unwrap();
}
}
```
Fails when run if you do a `ctrl-R` to search for a previous value (which _is_ found). The failure enters when you press Enter on the found value. Console listing with backtrace (Ubuntu-x64, rust 1.38.0):
```
$ RUST_BACKTRACE=full target/debug/expr_repl
Enter expression (ctrl-C to quit): abc
You entered: abc
(reverse-i-search)`ab': abc thread 'main' panicked at 'assertion failed: self.layout.cursor <= self.layout.end', /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/edit.rs:125:13
stack backtrace:
0: 0x5565d5ee898b - backtrace::backtrace::libunwind::trace::h89fcc71e59e3bc5b
at /cargo/registry/src/github.com-1ecc6299db9ec823/backtrace-0.3.34/src/backtrace/libunwind.rs:88
1: 0x5565d5ee898b - backtrace::backtrace::trace_unsynchronized::h0bad9be1379e729a
at /cargo/registry/src/github.com-1ecc6299db9ec823/backtrace-0.3.34/src/backtrace/mod.rs:66
2: 0x5565d5ee898b - std::sys_common::backtrace::_print::hd3382a1f33c473da
at src/libstd/sys_common/backtrace.rs:47
3: 0x5565d5ee898b - std::sys_common::backtrace::print::h0ec6f03cfb8e76a6
at src/libstd/sys_common/backtrace.rs:36
4: 0x5565d5ee898b - std::panicking::default_hook::{{closure}}::h96cbf7b454e3f557
at src/libstd/panicking.rs:200
5: 0x5565d5ee8666 - std::panicking::default_hook::h95a8f00337383d83
at src/libstd/panicking.rs:214
6: 0x5565d5ee909d - std::panicking::rust_panic_with_hook::h92f98b46e22f14ed
at src/libstd/panicking.rs:477
7: 0x5565d5ed3df5 - std::panicking::begin_panic::h96738e055e5f4d65
at /rustc/625451e376bb2e5283fc4741caa0a3e8a2ca4d54/src/libstd/panicking.rs:411
8: 0x5565d5e678ec - rustyline::edit::State<H>::move_cursor::hdcb71b31d457715f
at /home/vinay/projects/rsconfig/<::std::macros::panic macros>:3
9: 0x5565d5e67e05 - rustyline::edit::State<H>::edit_move_end::hf8b47e6f67dc66dc
at /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/edit.rs:406
10: 0x5565d5e5be9d - rustyline::readline_edit::ha6e91a48dad7739e
at /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/lib.rs:529
11: 0x5565d5e55950 - rustyline::readline_raw::h8d10993a62be80cd
at /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/lib.rs:626
12: 0x5565d5e5f2c2 - rustyline::Editor<H>::readline_with::h243448f87a7c18a2
at /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/lib.rs:755
13: 0x5565d5e5f5ad - rustyline::Editor<H>::readline::h0d20b4b1450ecb3c
at /home/vinay/.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-5.0.3/src/lib.rs:731
14: 0x5565d5e62d19 - expr_repl::main::ha34c1f1010ec7764
at src/bin/expr_repl.rs:31
15: 0x5565d5e72ab0 - std::rt::lang_start::{{closure}}::h39369991cd93ebad
at /rustc/625451e376bb2e5283fc4741caa0a3e8a2ca4d54/src/libstd/rt.rs:64
16: 0x5565d5ee8aa3 - std::rt::lang_start_internal::{{closure}}::h4e93c1949c7a1955
at src/libstd/rt.rs:49
17: 0x5565d5ee8aa3 - std::panicking::try::do_call::h9440ccd4dc467eaa
at src/libstd/panicking.rs:296
18: 0x5565d5eeaaba - __rust_maybe_catch_panic
at src/libpanic_unwind/lib.rs:80
19: 0x5565d5ee95ad - std::panicking::try::hc046e7ee42ee744f
at src/libstd/panicking.rs:275
20: 0x5565d5ee95ad - std::panic::catch_unwind::h27dfc457c200aee0
at src/libstd/panic.rs:394
21: 0x5565d5ee95ad - std::rt::lang_start_internal::hea1b49a567afe309
at src/libstd/rt.rs:48
22: 0x5565d5e72a89 - std::rt::lang_start::hbc1f50dfdb872b3d
at /rustc/625451e376bb2e5283fc4741caa0a3e8a2ca4d54/src/libstd/rt.rs:64
23: 0x5565d5e6328a - main
24: 0x7f4952b44b97 - __libc_start_main
25: 0x5565d5e520da - _start
26: 0x0 - <unknown>
```
| 2019-10-23T02:57:12 | 5.0 | fb048a885f2f4b9bc947fd7603d7b3939951b02a | [
"test::history::ctrl_r_with_long_prompt"
] | [
"completion::tests::escape",
"completion::tests::extract_word",
"completion::tests::find_unclosed_quote",
"completion::tests::longest_common_prefix",
"completion::tests::unescape",
"highlight::tests::check_bracket",
"edit::test::edit_history_next",
"highlight::tests::find_matching_bracket",
"highlig... | [] | [] | |
kkawakam/rustyline | 706 | kkawakam__rustyline-706 | [
"705"
] | 97df24087bf7b32fd7647f7f9ba6643c0d092fe5 | diff --git a/src/keymap.rs b/src/keymap.rs
--- a/src/keymap.rs
+++ b/src/keymap.rs
@@ -711,7 +711,9 @@ impl<'b> InputState<'b> {
E(K::Char('$') | K::End, M::NONE) => Cmd::Move(Movement::EndOfLine),
E(K::Char('.'), M::NONE) => {
// vi-redo (repeat last command)
- if no_num_args {
+ if !self.last_cmd.is_repeatable() {
+ Cmd::Noop
+ } else if no_num_args {
self.last_cmd.redo(None, wrt)
} else {
self.last_cmd.redo(Some(n), wrt)
| diff --git a/src/test/vi_cmd.rs b/src/test/vi_cmd.rs
--- a/src/test/vi_cmd.rs
+++ b/src/test/vi_cmd.rs
@@ -13,10 +13,15 @@ fn dollar() {
);
}
-/*#[test]
+#[test]
fn dot() {
- // TODO
-}*/
+ assert_cursor(
+ EditMode::Vi,
+ ("", ""),
+ &[E::ESC, E::from('.'), E::ENTER],
+ ("", ""),
+ );
+}
#[test]
fn semi_colon() {
| Crash at rustyline-11.0.0/src/keymap.rs:205:18
Hi,
when I have rustyline configured the following way:
```
let config_builder = rustyline::config::Config::builder()
.max_history_size(10000)?
.history_ignore_dups( true)?
.auto_add_history(true)
.bell_style(rustyline::config::BellStyle::None)
.completion_type(rustyline::config::CompletionType::List)
.edit_mode( rustyline::EditMode::Vi);
let config = config_builder.build();
```
(note that vi mode is active)
Later on I am in the commandline via:
`rl.readline(">> ");`
And I press 'ESC' (so I am in the command mode of the vi mode)
and then I press '.' then it crashes.
`>> thread 'main' panicked at 'internal error: entered unreachable code', .../.cargo/registry/src/github.com-1ecc6299db9ec823/rustyline-11.0.0/src/keymap.rs:205:18`
'.' in the vi mode is the repetition of the latest command but in this situation there was no latest command yet.
Regards,
Frank Schwidom
| Thanks for the detailed bug report. | 2023-06-24T01:04:46 | 11.0 | 97df24087bf7b32fd7647f7f9ba6643c0d092fe5 | [
"test::vi_cmd::dot"
] | [
"binding::test::no_collision",
"binding::test::encode",
"completion::tests::find_unclosed_quote",
"completion::tests::escape",
"completion::tests::extract_word",
"completion::tests::unescape",
"completion::tests::longest_common_prefix",
"highlight::tests::check_bracket",
"highlight::tests::is_open_b... | [
"binding::test::size_of_event"
] | [] |
kkawakam/rustyline | 646 | kkawakam__rustyline-646 | [
"645"
] | 5682cd33e47ae84fc71c9bfc846c8760005d1a4a | diff --git a/src/command.rs b/src/command.rs
--- a/src/command.rs
+++ b/src/command.rs
@@ -51,7 +51,9 @@ pub fn execute<H: Helper>(
}
Cmd::Move(Movement::ViFirstPrint) => {
s.edit_move_home()?;
- s.edit_move_to_next_word(At::Start, Word::Big, 1)?;
+ if s.line.starts_with(char::is_whitespace) {
+ s.edit_move_to_next_word(At::Start, Word::Big, 1)?;
+ }
}
Cmd::Move(Movement::BackwardChar(n)) => {
// Move back a character.
| diff --git a/src/test/vi_cmd.rs b/src/test/vi_cmd.rs
--- a/src/test/vi_cmd.rs
+++ b/src/test/vi_cmd.rs
@@ -58,6 +58,16 @@ fn caret() {
);
}
+#[test]
+fn caret_no_whitespace() {
+ assert_cursor(
+ EditMode::Vi,
+ ("Hi", ""),
+ &[E::ESC, E::from('^'), E::ENTER],
+ ("", "Hi"),
+ );
+}
+
#[test]
fn a() {
assert_cursor(
| Caret key (`^`) does not work as expected in evcxr_repl in vi edit mode
I originally posted the issue in google/evcxr#239 , but it turned out to be a bug of thi scrate, rustyline.
> Caret key should jump the cursor to the first non-empty character in the current line. But it is implemented that it instead jumps ...
```
>> abc def
^ here.
```
> Seems that it instead moves the cursor to the character right after the first whitespace instead.
This seems due to a wrong implementation [here](https://github.com/kkawakam/rustyline/blob/05f94b12fb94a2329d5227b10a8520a93ad4a893/src/command.rs#L52-L55):
```rust
Cmd::Move(Movement::ViFirstPrint) => {
s.edit_move_home()?;
s.edit_move_to_next_word(At::Start, Word::Big, 1)?;
}
```
In this implementation, it first jumps to the beginning of the line, and then move to the "next" word considering any consecutive sequence of non-empty characters as a word. This is problematic only when the first character is a non-empty character; since the cursor is already at the beginning of a "word", it jumps it to the succeeding word, causing the unintuitive behavior.
I am nst sure how should I properly fix it. `s.edit_move_to_next_word` eventually calls `LineBuffer::next_word_pos` to find the actual position to jump to. Should I modify the signature of this method to accept an additional argument for "Inclusive/Exclusive" enum? Or shuold I call a different method in the `Move(ViFirstPrint)` match arm?
| Thanks for the bug report.
```diff
diff --git a/src/command.rs b/src/command.rs
index c16f966..652e9f1 100644
--- a/src/command.rs
+++ b/src/command.rs
@@ -51,7 +51,9 @@ pub fn execute<H: Helper>(
}
Cmd::Move(Movement::ViFirstPrint) => {
s.edit_move_home()?;
- s.edit_move_to_next_word(At::Start, Word::Big, 1)?;
+ if s.line.starts_with(char::is_whitespace) {
+ s.edit_move_to_next_word(At::Start, Word::Big, 1)?;
+ }
}
Cmd::Move(Movement::BackwardChar(n)) => {
// Move back a character.
```
not tested...
It worked! (I modified `examples/example.rs` to change `Emcas` to `Vi` and ran it.) | 2022-08-21T01:04:36 | 10.0 | 5682cd33e47ae84fc71c9bfc846c8760005d1a4a | [
"test::vi_cmd::caret_no_whitespace"
] | [
"completion::tests::escape",
"binding::test::encode",
"completion::tests::find_unclosed_quote",
"binding::test::no_collision",
"completion::tests::extract_word",
"completion::tests::longest_common_prefix",
"completion::tests::unescape",
"highlight::tests::check_bracket",
"highlight::tests::find_matc... | [
"binding::test::size_of_event"
] | [] |
kkawakam/rustyline | 511 | kkawakam__rustyline-511 | [
"509"
] | bd73bdd5fdd4b59e3a1809be175c47a91b8bb512 | diff --git a/src/binding.rs b/src/binding.rs
--- a/src/binding.rs
+++ b/src/binding.rs
@@ -71,15 +71,15 @@ impl KeyEvent {
fn encode(&self) -> u32 {
let mut u = match self.0 {
KeyCode::UnknownEscSeq => 0,
- KeyCode::Backspace => u32::from('H') | BASE_CONTROL,
- KeyCode::BackTab => u32::from('I') | BASE_CONTROL | BASE_SHIFT,
+ KeyCode::Backspace => u32::from('\x7f'),
+ KeyCode::BackTab => u32::from('\t') | BASE_SHIFT,
KeyCode::BracketedPasteStart => PASTE_START,
KeyCode::BracketedPasteEnd => PASTE_FINISH,
KeyCode::Char(c) => u32::from(c),
KeyCode::Delete => DELETE,
KeyCode::Down => DOWN,
KeyCode::End => END,
- KeyCode::Enter => u32::from('M') | BASE_CONTROL,
+ KeyCode::Enter => u32::from('\r'),
KeyCode::F(i) => INSERT + i as u32,
KeyCode::Esc => ESCAPE,
KeyCode::Home => HOME,
diff --git a/src/binding.rs b/src/binding.rs
--- a/src/binding.rs
+++ b/src/binding.rs
@@ -89,7 +89,7 @@ impl KeyEvent {
KeyCode::PageDown => PAGE_DOWN,
KeyCode::PageUp => PAGE_UP,
KeyCode::Right => RIGHT,
- KeyCode::Tab => u32::from('I') | BASE_CONTROL,
+ KeyCode::Tab => u32::from('\t'),
KeyCode::Up => UP,
};
if self.1.contains(Modifiers::CTRL) {
| diff --git a/src/binding.rs b/src/binding.rs
--- a/src/binding.rs
+++ b/src/binding.rs
@@ -204,7 +204,7 @@ pub trait ConditionalEventHandler: Send + Sync {
#[cfg(test)]
mod test {
use super::{Event, EventHandler};
- use crate::{Cmd, KeyEvent};
+ use crate::{Cmd, KeyCode, KeyEvent, Modifiers};
use radix_trie::Trie;
use smallvec::smallvec;
diff --git a/src/binding.rs b/src/binding.rs
--- a/src/binding.rs
+++ b/src/binding.rs
@@ -224,4 +224,16 @@ mod test {
let subtrie = trie.get_raw_descendant(&prefix);
assert!(subtrie.is_none())
}
+
+ #[test]
+ fn no_collision() {
+ use {Event as E, EventHandler as H, KeyCode as C, KeyEvent as K, Modifiers as M};
+ let mut trie = Trie::new();
+ trie.insert(E::from(K(C::Backspace, M::NONE)), H::from(Cmd::Noop));
+ trie.insert(E::from(K(C::Enter, M::NONE)), H::from(Cmd::Noop));
+ trie.insert(E::from(K(C::Tab, M::NONE)), H::from(Cmd::Noop));
+ trie.insert(E::from(K(C::Backspace, M::CTRL)), H::from(Cmd::Noop));
+ trie.insert(E::from(K(C::Enter, M::CTRL)), H::from(Cmd::Noop));
+ trie.insert(E::from(K(C::Tab, M::CTRL)), H::from(Cmd::Noop));
+ }
}
| Trie key collisions for some custom bind sequences
Here's a snippet that worked in Rustyline 7.1.0 but is broken in 8.0.0.
```
let mut rl = rustyline::Editor::<()>::new();
// Enable Ctrl-Backspace to delete the current word.
rl.bind_sequence(
rustyline::KeyEvent::new('\x08', rustyline::Modifiers::CTRL),
rustyline::Cmd::Kill(rustyline::Movement::BackwardWord(1, rustyline::Word::Emacs)),
);
let line = rl.readline("> ");
```
It appears that the encoding when putting `Event` into a radix trie has some collisions, so that Ctrl-Backspace and Backspace end up with the same encoding. This causes a crash when I type backspace:
```
thread 'stdio-thread' panicked at 'multiple-keys with the same bit representation.', ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\keys.rs:76:9
stack backtrace:
0: std::panicking::begin_panic<str>
at ***\.rustup\toolchains\stable-x86_64-pc-windows-msvc\lib\rustlib\src\rust\library\std\src\panicking.rs:519
1: radix_trie::keys::check_keys<rustyline::binding::Event>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\keys.rs:76
2: radix_trie::trie_node::{{impl}}::value_checked::{{closure}}<rustyline::binding::Event,rustyline::binding::EventHandler,rustyline::binding::Event>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\trie_node.rs:89
3: core::option::Option<alloc::boxed::Box<radix_trie::trie_node::KeyValue<rustyline::binding::Event, rustyline::binding::EventHandler>, alloc::alloc::Global>*>::map<alloc::boxed::Box<radix_trie::trie_node::KeyValue<rustyline::binding::Event, rustyline::bindi
at ***\.rustup\toolchains\stable-x86_64-pc-windows-msvc\lib\rustlib\src\rust\library\core\src\option.rs:487
4: radix_trie::trie_node::TrieNode<rustyline::binding::Event, rustyline::binding::EventHandler>::value_checked<rustyline::binding::Event,rustyline::binding::EventHandler,rustyline::binding::Event>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\trie_node.rs:88
5: radix_trie::trie::{{impl}}::get::{{closure}}<rustyline::binding::Event,rustyline::binding::EventHandler,rustyline::binding::Event>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\trie.rs:34
6: core::option::Option<radix_trie::trie_node::TrieNode<rustyline::binding::Event, rustyline::binding::EventHandler>*>::and_then<radix_trie::trie_node::TrieNode<rustyline::binding::Event, rustyline::binding::EventHandler>*,rustyline::binding::EventHandler*,c
at ***\.rustup\toolchains\stable-x86_64-pc-windows-msvc\lib\rustlib\src\rust\library\core\src\option.rs:724
7: radix_trie::Trie<rustyline::binding::Event, rustyline::binding::EventHandler>::get<rustyline::binding::Event,rustyline::binding::EventHandler,rustyline::binding::Event>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\radix_trie-0.2.1\src\trie.rs:32
8: rustyline::keymap::InputState::custom_binding
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\keymap.rs:434
9: rustyline::keymap::InputState::emacs<rustyline::tty::windows::ConsoleRawReader>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\keymap.rs:536
10: rustyline::keymap::InputState::next_cmd
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\keymap.rs:413
11: rustyline::edit::State<tuple<>>::next_cmd<tuple<>>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\edit.rs:84
12: rustyline::readline_edit<tuple<>>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\lib.rs:479
13: rustyline::readline_raw<tuple<>>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\lib.rs:570
14: rustyline::Editor<tuple<>>::readline_with<tuple<>>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\lib.rs:700
15: rustyline::Editor<tuple<>>::readline<tuple<>>
at ***\.cargo\registry\src\github.com-1ecc6299db9ec823\rustyline-8.0.0\src\lib.rs:676
```
| On Unix platform, `Backspace` == `Ctrl-H`. So at least on Unix platform, afaik, we cannot make the difference.
But we should fix it on Windows platform.
Could please confirm that you add custom bindings for both `Backspace` and Ctrl-`Backspace` ?
Thanks. | 2021-04-04T17:24:56 | 8.0 | 36752fe1b30dbfeaaaaa7a7b30cfd7b578742c26 | [
"binding::test::no_collision"
] | [
"binding::test::encode",
"completion::tests::escape",
"completion::tests::unescape",
"edit::test::edit_history_next",
"highlight::tests::find_matching_bracket",
"highlight::tests::is_open_bracket",
"highlight::tests::matching_bracket",
"history::tests::add",
"highlight::tests::check_bracket",
"com... | [] | [] |
salsa-rs/salsa | 596 | salsa-rs__salsa-596 | [
"519"
] | c6c51a0ea0bb11ce5a449c4b9256c66c5e484fee | diff --git a/components/salsa-macros/src/tracked_fn.rs b/components/salsa-macros/src/tracked_fn.rs
--- a/components/salsa-macros/src/tracked_fn.rs
+++ b/components/salsa-macros/src/tracked_fn.rs
@@ -1,4 +1,5 @@
use proc_macro2::{Literal, Span, TokenStream};
+use quote::ToTokens;
use syn::{spanned::Spanned, ItemFn};
use crate::{db_lifetime, fn_util, hygiene::Hygiene, options::Options};
diff --git a/components/salsa-macros/src/tracked_fn.rs b/components/salsa-macros/src/tracked_fn.rs
--- a/components/salsa-macros/src/tracked_fn.rs
+++ b/components/salsa-macros/src/tracked_fn.rs
@@ -154,7 +155,8 @@ impl Macro {
));
}
- let (db_ident, db_path) = check_db_argument(&item.sig.inputs[0])?;
+ let (db_ident, db_path) =
+ check_db_argument(&item.sig.inputs[0], item.sig.generics.lifetimes().next())?;
Ok(ValidFn { db_ident, db_path })
}
diff --git a/components/salsa-macros/src/tracked_fn.rs b/components/salsa-macros/src/tracked_fn.rs
--- a/components/salsa-macros/src/tracked_fn.rs
+++ b/components/salsa-macros/src/tracked_fn.rs
@@ -202,6 +204,7 @@ fn function_type(item_fn: &syn::ItemFn) -> FunctionType {
pub fn check_db_argument<'arg>(
fn_arg: &'arg syn::FnArg,
+ explicit_lt: Option<&'arg syn::LifetimeParam>,
) -> syn::Result<(&'arg syn::Ident, &'arg syn::Path)> {
match fn_arg {
syn::FnArg::Receiver(_) => {
diff --git a/components/salsa-macros/src/tracked_fn.rs b/components/salsa-macros/src/tracked_fn.rs
--- a/components/salsa-macros/src/tracked_fn.rs
+++ b/components/salsa-macros/src/tracked_fn.rs
@@ -256,11 +259,23 @@ pub fn check_db_argument<'arg>(
));
}
- let extract_db_path = || -> Result<&'arg syn::Path, Span> {
- let syn::Type::Reference(ref_type) = &*typed.ty else {
- return Err(typed.ty.span());
- };
+ let tykind_error_msg =
+ "must have type `&dyn Db`, where `Db` is some Salsa Database trait";
+ let syn::Type::Reference(ref_type) = &*typed.ty else {
+ return Err(syn::Error::new(typed.ty.span(), tykind_error_msg));
+ };
+
+ if let Some(lt) = explicit_lt {
+ if ref_type.lifetime.is_none() {
+ return Err(syn::Error::new_spanned(
+ ref_type.and_token,
+ format!("must have a `{}` lifetime", lt.lifetime.to_token_stream()),
+ ));
+ }
+ }
+
+ let extract_db_path = || -> Result<&'arg syn::Path, Span> {
if let Some(m) = &ref_type.mutability {
return Err(m.span());
}
diff --git a/components/salsa-macros/src/tracked_fn.rs b/components/salsa-macros/src/tracked_fn.rs
--- a/components/salsa-macros/src/tracked_fn.rs
+++ b/components/salsa-macros/src/tracked_fn.rs
@@ -298,12 +313,8 @@ pub fn check_db_argument<'arg>(
Ok(path)
};
- let db_path = extract_db_path().map_err(|span| {
- syn::Error::new(
- span,
- "must have type `&dyn Db`, where `Db` is some Salsa Database trait",
- )
- })?;
+ let db_path =
+ extract_db_path().map_err(|span| syn::Error::new(span, tykind_error_msg))?;
Ok((db_ident, db_path))
}
| diff --git a/tests/compile-fail/tracked_fn_incompatibles.rs b/tests/compile-fail/tracked_fn_incompatibles.rs
--- a/tests/compile-fail/tracked_fn_incompatibles.rs
+++ b/tests/compile-fail/tracked_fn_incompatibles.rs
@@ -34,4 +34,38 @@ fn tracked_fn_with_too_many_arguments_for_specify(
) -> u32 {
}
+#[salsa::interned]
+struct MyInterned<'db> {
+ field: u32,
+}
+
+#[salsa::tracked]
+fn tracked_fn_with_lt_param_and_elided_lt_on_db_arg1<'db>(
+ db: &dyn Db,
+ interned: MyInterned<'db>,
+) -> u32 {
+ interned.field(db) * 2
+}
+
+#[salsa::tracked]
+fn tracked_fn_with_lt_param_and_elided_lt_on_db_arg2<'db_lifetime>(
+ db: &dyn Db,
+ interned: MyInterned<'db_lifetime>,
+) -> u32 {
+ interned.field(db) * 2
+}
+
+#[salsa::tracked]
+fn tracked_fn_with_lt_param_and_elided_lt_on_input<'db>(
+ db: &'db dyn Db,
+ interned: MyInterned,
+) -> u32 {
+ interned.field(db) * 2
+}
+
+#[salsa::tracked]
+fn tracked_fn_with_multiple_lts<'db1, 'db2>(db: &'db1 dyn Db, interned: MyInterned<'db2>) -> u32 {
+ interned.field(db) * 2
+}
+
fn main() {}
diff --git a/tests/compile-fail/tracked_fn_incompatibles.stderr b/tests/compile-fail/tracked_fn_incompatibles.stderr
--- a/tests/compile-fail/tracked_fn_incompatibles.stderr
+++ b/tests/compile-fail/tracked_fn_incompatibles.stderr
@@ -28,6 +28,35 @@ error: only functions with a single salsa struct as their input can be specified
29 | #[salsa::tracked(specify)]
| ^^^^^^^
+error: must have a `'db` lifetime
+ --> tests/compile-fail/tracked_fn_incompatibles.rs:44:9
+ |
+44 | db: &dyn Db,
+ | ^
+
+error: must have a `'db_lifetime` lifetime
+ --> tests/compile-fail/tracked_fn_incompatibles.rs:52:9
+ |
+52 | db: &dyn Db,
+ | ^
+
+error: only a single lifetime parameter is accepted
+ --> tests/compile-fail/tracked_fn_incompatibles.rs:67:39
+ |
+67 | fn tracked_fn_with_multiple_lts<'db1, 'db2>(db: &'db1 dyn Db, interned: MyInterned<'db2>) -> u32 {
+ | ^^^^
+
+error[E0106]: missing lifetime specifier
+ --> tests/compile-fail/tracked_fn_incompatibles.rs:61:15
+ |
+61 | interned: MyInterned,
+ | ^^^^^^^^^^ expected named lifetime parameter
+ |
+help: consider using the `'db` lifetime
+ |
+61 | interned: MyInterned<'db>,
+ | +++++
+
error[E0308]: mismatched types
--> tests/compile-fail/tracked_fn_incompatibles.rs:24:46
|
diff --git a/tests/tracked_fn_read_own_specify.rs b/tests/tracked_fn_read_own_specify.rs
--- a/tests/tracked_fn_read_own_specify.rs
+++ b/tests/tracked_fn_read_own_specify.rs
@@ -22,7 +22,7 @@ fn tracked_fn(db: &dyn LogDatabase, input: MyInput) -> u32 {
}
#[salsa::tracked(specify)]
-fn tracked_fn_extra<'db>(db: &dyn LogDatabase, input: MyTracked<'db>) -> u32 {
+fn tracked_fn_extra<'db>(db: &'db dyn LogDatabase, input: MyTracked<'db>) -> u32 {
db.push_log(format!("tracked_fn_extra({input:?})"));
0
}
| inconsistent behavior with respect to lifetime elision
In #518 the behavior with respect to lifetime elision for tracked functions is kind of inconsistent:
* For the `&dyn Db` parameter, we always infer the elided lifetime to be `'db`
* For other random inputs, we basically disallow elision: if you use a `'_`, it will wind up in the value of an associated type, and give an error.
* We'll also do one weird thing which is if you write `fn foo<'db>(db: &dyn Db, x: Foo<'db>)`, I believe the `db` becomes `&'db dyn Db`.
In reality, there is only one lifetime you can correctly use, which is `'db` -- salsa tracked functions must not take references except for a case like `TrackedStruct<'db>` or `InternedStruct<'db>`.
The current setup seems weird but there are two different choices we could make:
* Introduce a `'db` if it's not already there and replace all elided lifetimes with `'db`.
* Forbid elided lifetimes in the inputs unless there is exactly one (i.e., on the database). This is ~the current behavior, actually, except for the third bullet, which we should fix.
The former seems more convenient: there's only one thing you could want, so let's do it.
The latter is more consistent with non-tracked-functions, and it means if you remove the `#[salsa::tracked]`, your function keeps compiling.
I'm inclined towards the latter for now.
| 2024-10-16T00:56:26 | 0.18 | e4d36daf2dc4a096009753b6e326cf44e6c6e18a | [
"compile_fail"
] | [
"get_a_logs_after_changing_b",
"accumulate_once",
"change_a_from_2_to_0",
"change_a_from_2_to_1",
"accumulate_chain",
"accumulate_custom_clone",
"accumulate_custom_debug",
"accumulate_a_called_twice",
"accumulate_execution_order",
"test1",
"accumulate_no_duplicates",
"tests/compile-fail/accumu... | [] | [] | |
salsa-rs/salsa | 591 | salsa-rs__salsa-591 | [
"590"
] | c6c51a0ea0bb11ce5a449c4b9256c66c5e484fee | diff --git a/src/active_query.rs b/src/active_query.rs
--- a/src/active_query.rs
+++ b/src/active_query.rs
@@ -1,17 +1,17 @@
use rustc_hash::FxHashMap;
+use super::zalsa_local::{EdgeKind, QueryEdges, QueryOrigin, QueryRevisions};
+use crate::tracked_struct::IdentityHash;
use crate::{
accumulator::accumulated_map::AccumulatedMap,
durability::Durability,
hash::FxIndexSet,
key::{DatabaseKeyIndex, DependencyIndex},
- tracked_struct::{Disambiguator, KeyStruct},
+ tracked_struct::{Disambiguator, Identity},
zalsa_local::EMPTY_DEPENDENCIES,
- Cycle, Revision,
+ Cycle, Id, Revision,
};
-use super::zalsa_local::{EdgeKind, QueryEdges, QueryOrigin, QueryRevisions};
-
#[derive(Debug)]
pub(crate) struct ActiveQuery {
/// What query is executing
diff --git a/src/active_query.rs b/src/active_query.rs
--- a/src/active_query.rs
+++ b/src/active_query.rs
@@ -45,11 +45,11 @@ pub(crate) struct ActiveQuery {
/// This table starts empty as the query begins and is gradually populated.
/// Note that if a query executes in 2 different revisions but creates the same
/// set of tracked structs, they will get the same disambiguator values.
- disambiguator_map: FxHashMap<u64, Disambiguator>,
+ disambiguator_map: FxHashMap<IdentityHash, Disambiguator>,
/// Map from tracked struct keys (which include the hash + disambiguator) to their
/// final id.
- pub(crate) tracked_struct_ids: FxHashMap<KeyStruct, DatabaseKeyIndex>,
+ pub(crate) tracked_struct_ids: FxHashMap<Identity, Id>,
/// Stores the values accumulated to the given ingredient.
/// The type of accumulated value is erased but known to the ingredient.
diff --git a/src/active_query.rs b/src/active_query.rs
--- a/src/active_query.rs
+++ b/src/active_query.rs
@@ -155,10 +155,10 @@ impl ActiveQuery {
self.input_outputs.clone_from(&cycle_query.input_outputs);
}
- pub(super) fn disambiguate(&mut self, hash: u64) -> Disambiguator {
+ pub(super) fn disambiguate(&mut self, key: IdentityHash) -> Disambiguator {
let disambiguator = self
.disambiguator_map
- .entry(hash)
+ .entry(key)
.or_insert(Disambiguator(0));
let result = *disambiguator;
disambiguator.0 += 1;
diff --git a/src/function/diff_outputs.rs b/src/function/diff_outputs.rs
--- a/src/function/diff_outputs.rs
+++ b/src/function/diff_outputs.rs
@@ -32,10 +32,10 @@ where
if !old_outputs.is_empty() {
// Remove the outputs that are no longer present in the current revision
// to prevent that the next revision is seeded with a id mapping that no longer exists.
- revisions.tracked_struct_ids.retain(|_k, value| {
+ revisions.tracked_struct_ids.retain(|k, value| {
!old_outputs.contains(&DependencyIndex {
- ingredient_index: value.ingredient_index,
- key_index: Some(value.key_index),
+ ingredient_index: k.ingredient_index(),
+ key_index: Some(*value),
})
});
}
diff --git a/src/tracked_struct.rs b/src/tracked_struct.rs
--- a/src/tracked_struct.rs
+++ b/src/tracked_struct.rs
@@ -147,16 +147,35 @@ where
/// stored in the [`ActiveQuery`](`crate::active_query::ActiveQuery`)
/// struct and later moved to the [`Memo`](`crate::function::memo::Memo`).
#[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Copy, Clone)]
-pub(crate) struct KeyStruct {
- /// The hash of the `#[id]` fields of this struct.
- /// Note that multiple structs may share the same hash.
- data_hash: u64,
+pub(crate) struct Identity {
+ /// Hash of fields with id attribute
+ identity_hash: IdentityHash,
/// The unique disambiguator assigned within the active query
- /// to distinguish distinct tracked structs with the same hash.
+ /// to distinguish distinct tracked structs with the same identity_hash.
disambiguator: Disambiguator,
}
+impl Identity {
+ pub(crate) fn ingredient_index(&self) -> IngredientIndex {
+ self.identity_hash.ingredient_index
+ }
+}
+
+/// Stores the data that (almost) uniquely identifies a tracked struct.
+/// This includes the ingredient index of that struct type plus the hash of its id fields.
+/// This is mapped to a disambiguator -- a value that starts as 0 but increments each round,
+/// allowing for multiple tracked structs with the same hash and ingredient_index
+/// created within the query to each have a unique id.
+#[derive(Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Copy, Clone)]
+pub struct IdentityHash {
+ /// Index of the tracked struct ingredient.
+ ingredient_index: IngredientIndex,
+
+ /// Hash of the id fields.
+ hash: u64,
+}
+
// ANCHOR: ValueStruct
#[derive(Debug)]
pub struct Value<C>
diff --git a/src/tracked_struct.rs b/src/tracked_struct.rs
--- a/src/tracked_struct.rs
+++ b/src/tracked_struct.rs
@@ -255,17 +274,21 @@ where
) -> C::Struct<'db> {
let (zalsa, zalsa_local) = db.zalsas();
- let data_hash = crate::hash::hash(&C::id_fields(&fields));
+ let identity_hash = IdentityHash {
+ ingredient_index: self.ingredient_index,
+ hash: crate::hash::hash(&C::id_fields(&fields)),
+ };
+
+ let (current_deps, disambiguator) = zalsa_local.disambiguate(identity_hash);
- let (current_deps, disambiguator) = zalsa_local.disambiguate(data_hash);
+ let identity = Identity {
+ identity_hash,
- let key_struct = KeyStruct {
disambiguator,
- data_hash,
};
let current_revision = zalsa.current_revision();
- match zalsa_local.tracked_struct_id(&key_struct) {
+ match zalsa_local.tracked_struct_id(&identity) {
Some(id) => {
// The struct already exists in the intern map.
zalsa_local.add_output(self.database_key_index(id).into());
diff --git a/src/tracked_struct.rs b/src/tracked_struct.rs
--- a/src/tracked_struct.rs
+++ b/src/tracked_struct.rs
@@ -278,7 +301,7 @@ where
let id = self.allocate(zalsa, zalsa_local, current_revision, ¤t_deps, fields);
let key = self.database_key_index(id);
zalsa_local.add_output(key.into());
- zalsa_local.store_tracked_struct_id(key_struct, key);
+ zalsa_local.store_tracked_struct_id(identity, id);
C::struct_from_id(id)
}
}
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -10,8 +10,7 @@ use crate::runtime::StampedValue;
use crate::table::PageIndex;
use crate::table::Slot;
use crate::table::Table;
-use crate::tracked_struct::Disambiguator;
-use crate::tracked_struct::KeyStruct;
+use crate::tracked_struct::{Disambiguator, Identity, IdentityHash};
use crate::zalsa::IngredientIndex;
use crate::Accumulator;
use crate::Cancelled;
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -262,7 +261,7 @@ impl ZalsaLocal {
/// * the current dependencies (durability, changed_at) of current query
/// * the disambiguator index
#[track_caller]
- pub(crate) fn disambiguate(&self, data_hash: u64) -> (StampedValue<()>, Disambiguator) {
+ pub(crate) fn disambiguate(&self, key: IdentityHash) -> (StampedValue<()>, Disambiguator) {
assert!(
self.query_in_progress(),
"cannot create a tracked struct disambiguator outside of a tracked function"
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -270,7 +269,7 @@ impl ZalsaLocal {
self.with_query_stack(|stack| {
let top_query = stack.last_mut().unwrap();
- let disambiguator = top_query.disambiguate(data_hash);
+ let disambiguator = top_query.disambiguate(key);
(
StampedValue {
value: (),
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -283,32 +282,30 @@ impl ZalsaLocal {
}
#[track_caller]
- pub(crate) fn tracked_struct_id(&self, key_struct: &KeyStruct) -> Option<Id> {
+ pub(crate) fn tracked_struct_id(&self, identity: &Identity) -> Option<Id> {
debug_assert!(
self.query_in_progress(),
"cannot create a tracked struct disambiguator outside of a tracked function"
);
+
self.with_query_stack(|stack| {
let top_query = stack.last().unwrap();
- top_query
- .tracked_struct_ids
- .get(key_struct)
- .map(|index| index.key_index())
+ top_query.tracked_struct_ids.get(identity).copied()
})
}
#[track_caller]
- pub(crate) fn store_tracked_struct_id(&self, key_struct: KeyStruct, id: DatabaseKeyIndex) {
+ pub(crate) fn store_tracked_struct_id(&self, identity: Identity, id: Id) {
debug_assert!(
self.query_in_progress(),
"cannot create a tracked struct disambiguator outside of a tracked function"
);
self.with_query_stack(|stack| {
let top_query = stack.last_mut().unwrap();
- let old_id = top_query.tracked_struct_ids.insert(key_struct, id);
+ let old_id = top_query.tracked_struct_ids.insert(identity, id);
assert!(
old_id.is_none(),
- "overwrote a previous id for `{key_struct:?}`"
+ "overwrote a previous id for `{identity:?}`"
);
})
}
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -377,7 +374,7 @@ pub(crate) struct QueryRevisions {
/// previous revision. To handle this, `diff_outputs` compares
/// the structs from the old/new revision and retains
/// only entries that appeared in the new revision.
- pub(super) tracked_struct_ids: FxHashMap<KeyStruct, DatabaseKeyIndex>,
+ pub(super) tracked_struct_ids: FxHashMap<Identity, Id>,
pub(super) accumulated: AccumulatedMap,
}
diff --git a/src/zalsa_local.rs b/src/zalsa_local.rs
--- a/src/zalsa_local.rs
+++ b/src/zalsa_local.rs
@@ -536,10 +533,7 @@ impl ActiveQueryGuard<'_> {
}
/// Initialize the tracked struct ids with the values from the prior execution.
- pub(crate) fn seed_tracked_struct_ids(
- &self,
- tracked_struct_ids: &FxHashMap<KeyStruct, DatabaseKeyIndex>,
- ) {
+ pub(crate) fn seed_tracked_struct_ids(&self, tracked_struct_ids: &FxHashMap<Identity, Id>) {
self.local_state.with_query_stack(|stack| {
assert_eq!(stack.len(), self.push_len);
let frame = stack.last_mut().unwrap();
| diff --git /dev/null b/tests/hash_collision.rs
new file mode 100644
--- /dev/null
+++ b/tests/hash_collision.rs
@@ -0,0 +1,32 @@
+use std::hash::Hash;
+
+#[test]
+fn hello() {
+ use salsa::{Database, DatabaseImpl, Setter};
+
+ #[salsa::input]
+ struct Bool {
+ value: bool,
+ }
+
+ #[salsa::tracked]
+ struct True<'db> {}
+
+ #[salsa::tracked]
+ struct False<'db> {}
+
+ #[salsa::tracked]
+ fn hello(db: &dyn Database, bool: Bool) {
+ if bool.value(db) {
+ True::new(db);
+ } else {
+ False::new(db);
+ }
+ }
+
+ let mut db = DatabaseImpl::new();
+ let input = Bool::new(&db, false);
+ hello(&db, input);
+ input.set_value(&mut db).to(true);
+ hello(&db, input);
+}
| Input change cause panic on next queries
From Zulip (thanks Andrey for the minimal repro) :
```rust
#[test]
fn hello() {
use salsa::{Database, DatabaseImpl, Setter};
#[salsa::input]
struct Bool {
value: bool,
}
#[salsa::tracked]
struct True<'db> {}
#[salsa::tracked]
struct False<'db> {}
#[salsa::tracked]
fn hello(db: &dyn Database, bool: Bool) {
if bool.value(db) {
True::new(db);
} else {
False::new(db);
}
}
let mut db = DatabaseImpl::new();
let input = Bool::new(&db, false);
hello(&mut db, input);
input.set_value(&mut db).to(true);
hello(&mut db, input);
}
```
Output :
```
assertion `left == right` failed: page has hidden type `"salsa::table::Page<salsa::tracked_struct::Value<mitki_parse::hello::False>>"` but `"salsa::table::Page<salsa::tracked_struct::Value<mitki_parse::hello::True>>"` was expected
```
Stacktrace in `table.rs` :
```rust
impl dyn TablePage {
fn assert_type<T: Any>(&self) -> &T {
assert_eq!( // <-- Assertion failure here
Any::type_id(self),
TypeId::of::<T>(),
"page has hidden type `{:?}` but `{:?}` was expected",
self.hidden_type_name(),
std::any::type_name::<T>(),
);
// SAFETY: Assertion above
unsafe { transmute_data_ptr::<dyn TablePage, T>(self) }
}
}
```
| In struct::new only data_hash and nonce are used to distinguish structs. Can we add IngredientIndex to KeyStruct to fix this bug?
Following what was said on Zulip, I was able to fix this issue by adding the IngredientIndex to the hash in `new_struct` [here](https://github.com/salsa-rs/salsa/commit/b7cd2684623f9e7b7bcb592fb56fed9884f96532). If this is sufficient I'm happy to raise a PR
I would be leaning towards including the `IngredientIndex` in `KeyStruct`. That should also allow us to simplify `diff_outputs` to get the ingredient index directly from the `tracked_struct_ids` entrie's key.
> Following what was said on Zulip, I was able to fix this issue by adding the IngredientIndex to the hash in `new_struct` [here](https://github.com/salsa-rs/salsa/commit/b7cd2684623f9e7b7bcb592fb56fed9884f96532). If this is sufficient I'm happy to raise a PR
This one still can collide the same way, but with different inputs
@mnbjhu
```
use std::hash::{Hash, Hasher};
#[test]
fn hello() {
use salsa::{Database, DatabaseImpl, Setter};
#[salsa::input]
struct Bool {
value: bool,
}
#[salsa::tracked]
struct True<'db>{
#[id]
data: u64
};
#[salsa::tracked]
struct False<'db> {}
#[salsa::tracked]
fn hello(db: &dyn Database, bool: Bool) {
if bool.value(db) {
True::new(db, 9794039851661988595);
} else {
False::new(db);
}
}
let mut db = DatabaseImpl::new();
let input = Bool::new(&db, false);
hello(&mut db, input);
input.set_value(&mut db).to(true);
hello(&mut db, input);
}
``` | 2024-10-12T23:58:10 | 0.18 | e4d36daf2dc4a096009753b6e326cf44e6c6e18a | [
"hello"
] | [
"get_a_logs_after_changing_b",
"accumulate_once",
"change_a_from_2_to_0",
"change_a_from_2_to_1",
"accumulate_chain",
"accumulate_custom_clone",
"accumulate_custom_debug",
"accumulate_a_called_twice",
"accumulate_execution_order",
"test1",
"accumulate_no_duplicates",
"tests/compile-fail/accumu... | [] | [] |
salvo-rs/salvo | 844 | salvo-rs__salvo-844 | [
"842"
] | 81844145f73bad93435c68a4e7f1d231ec9d9eb4 | diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -14,7 +14,6 @@ pub use http::request::Parts;
use http::uri::{Scheme, Uri};
use http::Extensions;
use http_body_util::{BodyExt, Limited};
-use indexmap::IndexMap;
use multimap::MultiMap;
use parking_lot::RwLock;
use serde::de::Deserialize;
diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -25,6 +24,7 @@ use crate::fuse::TransProto;
use crate::http::body::ReqBody;
use crate::http::form::{FilePart, FormData};
use crate::http::{Mime, ParseError, Version};
+use crate::routing::PathParams;
use crate::serde::{from_request, from_str_map, from_str_multi_map, from_str_multi_val, from_str_val};
use crate::Error;
diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -61,7 +61,7 @@ pub struct Request {
#[cfg(feature = "cookie")]
pub(crate) cookies: CookieJar,
- pub(crate) params: IndexMap<String, String>,
+ pub(crate) params: PathParams,
// accept: Option<Vec<Mime>>,
pub(crate) queries: OnceLock<MultiMap<String, String>>,
diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -110,7 +110,7 @@ impl Request {
method: Method::default(),
#[cfg(feature = "cookie")]
cookies: CookieJar::default(),
- params: IndexMap::new(),
+ params: PathParams::new(),
queries: OnceLock::new(),
form_data: tokio::sync::OnceCell::new(),
payload: tokio::sync::OnceCell::new(),
diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -171,7 +171,7 @@ impl Request {
#[cfg(feature = "cookie")]
cookies,
// accept: None,
- params: IndexMap::new(),
+ params: PathParams::new(),
form_data: tokio::sync::OnceCell::new(),
payload: tokio::sync::OnceCell::new(),
// multipart: OnceLock::new(),
diff --git a/crates/core/src/http/request.rs b/crates/core/src/http/request.rs
--- a/crates/core/src/http/request.rs
+++ b/crates/core/src/http/request.rs
@@ -567,12 +567,12 @@ impl Request {
}
/// Get params reference.
#[inline]
- pub fn params(&self) -> &IndexMap<String, String> {
+ pub fn params(&self) -> &PathParams {
&self.params
}
/// Get params mutable reference.
#[inline]
- pub fn params_mut(&mut self) -> &mut IndexMap<String, String> {
+ pub fn params_mut(&mut self) -> &mut PathParams {
&mut self.params
}
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -254,13 +254,13 @@ impl PathWisp for CharsWisp {
}
if chars.len() == max_width {
state.forward(max_width);
- state.params.insert(self.name.clone(), chars.into_iter().collect());
+ state.params.insert(&self.name, chars.into_iter().collect());
return true;
}
}
if chars.len() >= self.min_width {
state.forward(chars.len());
- state.params.insert(self.name.clone(), chars.into_iter().collect());
+ state.params.insert(&self.name, chars.into_iter().collect());
true
} else {
false
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -274,7 +274,7 @@ impl PathWisp for CharsWisp {
}
if chars.len() >= self.min_width {
state.forward(chars.len());
- state.params.insert(self.name.clone(), chars.into_iter().collect());
+ state.params.insert(&self.name, chars.into_iter().collect());
true
} else {
false
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -298,7 +298,7 @@ impl CombWisp {
impl PathWisp for CombWisp {
#[inline]
fn detect<'a>(&self, state: &mut PathState) -> bool {
- let mut offline = if let Some(part) = state.parts.get_mut(state.cursor.0) {
+ let mut offline = if let Some(part) = state.parts.get(state.cursor.0) {
part.clone()
} else {
return false;
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -403,7 +403,7 @@ impl PathWisp for NamedWisp {
}
if !rest.is_empty() || !self.0.starts_with("*+") {
let rest = rest.to_string();
- state.params.insert(self.0.clone(), rest);
+ state.params.insert(&self.0, rest);
state.cursor.0 = state.parts.len();
true
} else {
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -416,7 +416,7 @@ impl PathWisp for NamedWisp {
}
let picked = picked.expect("picked should not be `None`").to_owned();
state.forward(picked.len());
- state.params.insert(self.0.clone(), picked);
+ state.params.insert(&self.0, picked);
true
}
}
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -456,7 +456,7 @@ impl PathWisp for RegexWisp {
if let Some(cap) = cap {
let cap = cap.as_str().to_owned();
state.forward(cap.len());
- state.params.insert(self.name.clone(), cap);
+ state.params.insert(&self.name, cap);
true
} else {
false
diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -472,7 +472,7 @@ impl PathWisp for RegexWisp {
if let Some(cap) = cap {
let cap = cap.as_str().to_owned();
state.forward(cap.len());
- state.params.insert(self.name.clone(), cap);
+ state.params.insert(&self.name, cap);
true
} else {
false
diff --git a/crates/core/src/routing/mod.rs b/crates/core/src/routing/mod.rs
--- a/crates/core/src/routing/mod.rs
+++ b/crates/core/src/routing/mod.rs
@@ -301,7 +301,7 @@
//!
//! #[handler]
//! fn serve_file(req: &mut Request) {
-//! let rest_path = req.param::<i64>("**rest_path");
+//! let rest_path = req.param::<i64>("rest_path");
//! }
//! ```
//!
diff --git a/crates/core/src/routing/mod.rs b/crates/core/src/routing/mod.rs
--- a/crates/core/src/routing/mod.rs
+++ b/crates/core/src/routing/mod.rs
@@ -375,6 +375,7 @@ mod router;
pub use router::Router;
use std::borrow::Cow;
+use std::ops::Deref;
use std::sync::Arc;
use indexmap::IndexMap;
diff --git a/crates/core/src/routing/mod.rs b/crates/core/src/routing/mod.rs
--- a/crates/core/src/routing/mod.rs
+++ b/crates/core/src/routing/mod.rs
@@ -388,8 +389,57 @@ pub struct DetectMatched {
pub goal: Arc<dyn Handler>,
}
-#[doc(hidden)]
-pub type PathParams = IndexMap<String, String>;
+/// The path parameters.
+#[derive(Clone, Default, Debug, Eq, PartialEq)]
+pub struct PathParams {
+ inner: IndexMap<String, String>,
+ greedy: bool,
+}
+impl Deref for PathParams {
+ type Target = IndexMap<String, String>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.inner
+ }
+}
+impl PathParams {
+ /// Create new `PathParams`.
+ pub fn new() -> Self {
+ PathParams::default()
+ }
+ /// If there is a wildcard param, it's value is `true`.
+ pub fn greedy(&self) -> bool {
+ self.greedy
+ }
+ /// Get the last param starts with '*', for example: <**rest>, <*?rest>.
+ pub fn tail(&self) -> Option<&str> {
+ if self.greedy {
+ self.inner.last().map(|(_, v)| &**v)
+ } else {
+ None
+ }
+ }
+
+ /// Insert new param.
+ pub fn insert(&mut self, name: &str, value: String) {
+ #[cfg(debug_assertions)]
+ {
+ if self.greedy {
+ panic!("only one wildcard param is allowed and it must be the last one.");
+ }
+ }
+ if name.starts_with("*+") || name.starts_with("*?") || name.starts_with("**") {
+ self.inner.insert(name[2..].to_owned(), value);
+ self.greedy = true;
+ } else if let Some(name) = name.strip_prefix('*') {
+ self.inner.insert(name.to_owned(), value);
+ self.greedy = true;
+ } else {
+ self.inner.insert(name.to_owned(), value);
+ }
+ }
+}
+
#[doc(hidden)]
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct PathState {
diff --git a/crates/core/src/routing/router.rs b/crates/core/src/routing/router.rs
--- a/crates/core/src/routing/router.rs
+++ b/crates/core/src/routing/router.rs
@@ -354,11 +354,11 @@ impl fmt::Debug for Router {
} else {
format!("{prefix}{SYMBOL_TEE}{SYMBOL_RIGHT}{SYMBOL_RIGHT}")
};
- let hd = if let Some(goal) = &router.goal {
- format!(" -> {}", goal.type_name())
- } else {
- "".into()
- };
+ let hd = router
+ .goal
+ .as_ref()
+ .map(|goal| format!(" -> {}", goal.type_name()))
+ .unwrap_or_default();
if !others.is_empty() {
writeln!(f, "{cp}{path}[{}]{hd}", others.join(","))?;
} else {
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -206,16 +206,16 @@ impl<'de> RequestDeserializer<'de> {
return false;
};
- let field_name: Cow<'_, str> = if let Some(rename) = field.rename {
- Cow::from(rename)
+ let field_name = if let Some(rename) = field.rename {
+ rename
} else if let Some(serde_rename) = field.serde_rename {
- Cow::from(serde_rename)
+ serde_rename
} else if let Some(rename_all) = self.metadata.rename_all {
- rename_all.apply_to_field(field.decl_name).into()
+ &*rename_all.apply_to_field(field.decl_name)
} else if let Some(serde_rename_all) = self.metadata.serde_rename_all {
- serde_rename_all.apply_to_field(field.decl_name).into()
+ &*serde_rename_all.apply_to_field(field.decl_name)
} else {
- field.decl_name.into()
+ field.decl_name
};
for source in sources {
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -237,7 +237,7 @@ impl<'de> RequestDeserializer<'de> {
}
}
SourceFrom::Query => {
- let mut value = self.queries.get_vec(field_name.as_ref());
+ let mut value = self.queries.get_vec(field_name);
if value.is_none() {
for alias in &field.aliases {
value = self.queries.get_vec(*alias);
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -254,8 +254,8 @@ impl<'de> RequestDeserializer<'de> {
}
SourceFrom::Header => {
let mut value = None;
- if self.headers.contains_key(field_name.as_ref()) {
- value = Some(self.headers.get_all(field_name.as_ref()))
+ if self.headers.contains_key(field_name) {
+ value = Some(self.headers.get_all(field_name))
} else {
for alias in &field.aliases {
if self.headers.contains_key(*alias) {
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -301,7 +301,7 @@ impl<'de> RequestDeserializer<'de> {
if let Some(payload) = &self.payload {
match payload {
Payload::FormData(form_data) => {
- let mut value = form_data.fields.get(field_name.as_ref());
+ let mut value = form_data.fields.get(field_name);
if value.is_none() {
for alias in &field.aliases {
value = form_data.fields.get(*alias);
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -318,7 +318,7 @@ impl<'de> RequestDeserializer<'de> {
return false;
}
Payload::JsonMap(ref map) => {
- let mut value = map.get(field_name.as_ref());
+ let mut value = map.get(field_name);
if value.is_none() {
for alias in &field.aliases {
value = map.get(alias);
diff --git a/crates/core/src/serde/request.rs b/crates/core/src/serde/request.rs
--- a/crates/core/src/serde/request.rs
+++ b/crates/core/src/serde/request.rs
@@ -346,7 +346,7 @@ impl<'de> RequestDeserializer<'de> {
}
SourceParser::MultiMap => {
if let Some(Payload::FormData(form_data)) = self.payload {
- let mut value = form_data.fields.get_vec(field_name.as_ref());
+ let mut value = form_data.fields.get_vec(field_name);
if value.is_none() {
for alias in &field.aliases {
value = form_data.fields.get_vec(*alias);
diff --git a/crates/oapi/src/swagger_ui/mod.rs b/crates/oapi/src/swagger_ui/mod.rs
--- a/crates/oapi/src/swagger_ui/mod.rs
+++ b/crates/oapi/src/swagger_ui/mod.rs
@@ -233,12 +233,16 @@ pub(crate) fn redirect_to_dir_url(req_uri: &Uri, res: &mut Response) {
#[async_trait]
impl Handler for SwaggerUi {
async fn handle(&self, req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) {
- let path = req.params().get("**").map(|s| &**s).unwrap_or_default();
- // Redirect to dir url if path is empty and not end with '/'
- if path.is_empty() && !req.uri().path().ends_with('/') {
+ // Redirect to dir url if path is not end with '/'
+ if !req.uri().path().ends_with('/') {
redirect_to_dir_url(req.uri(), res);
return;
}
+ let Some(path) = req.params().tail() else {
+ res.render(StatusError::not_found().detail("The router params is incorrect. The params should ending with a wildcard."));
+ return;
+ };
+
let keywords = self
.keywords
.as_ref()
diff --git a/crates/proxy/src/lib.rs b/crates/proxy/src/lib.rs
--- a/crates/proxy/src/lib.rs
+++ b/crates/proxy/src/lib.rs
@@ -132,14 +132,12 @@ where
/// Url part getter. You can use this to get the proxied url path or query.
pub type UrlPartGetter = Box<dyn Fn(&Request, &Depot) -> Option<String> + Send + Sync + 'static>;
-/// Default url path getter. This getter will get the url path from request wildcard param, like `<**rest>`, `<*+rest>`.
+/// Default url path getter.
+///
+/// This getter will get the last param as the rest url path from request.
+/// In most case you should use wildcard param, like `<**rest>`, `<*+rest>`.
pub fn default_url_path_getter(req: &Request, _depot: &Depot) -> Option<String> {
- let param = req.params().iter().find(|(key, _)| key.starts_with('*'));
- if let Some((_, rest)) = param {
- Some(encode_url_path(rest))
- } else {
- None
- }
+ req.params().tail().map(encode_url_path)
}
/// Default url query getter. This getter just return the query string from request uri.
pub fn default_url_query_getter(req: &Request, _depot: &Depot) -> Option<String> {
diff --git a/crates/serve-static/src/dir.rs b/crates/serve-static/src/dir.rs
--- a/crates/serve-static/src/dir.rs
+++ b/crates/serve-static/src/dir.rs
@@ -287,14 +287,13 @@ impl DirInfo {
#[async_trait]
impl Handler for StaticDir {
async fn handle(&self, req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) {
- let param = req.params().iter().find(|(key, _)| key.starts_with('*'));
let req_path = req.uri().path();
- let rel_path = if let Some((_, value)) = param {
- value.clone()
+ let rel_path = if let Some(rest) = req.params().tail() {
+ rest
} else {
- decode_url_path_safely(req_path)
+ &*decode_url_path_safely(req_path)
};
- let rel_path = format_url_path_safely(&rel_path);
+ let rel_path = format_url_path_safely(rel_path);
let mut files: HashMap<String, Metadata> = HashMap::new();
let mut dirs: HashMap<String, Metadata> = HashMap::new();
let is_dot_file = Path::new(&rel_path)
diff --git a/crates/serve-static/src/embed.rs b/crates/serve-static/src/embed.rs
--- a/crates/serve-static/src/embed.rs
+++ b/crates/serve-static/src/embed.rs
@@ -114,11 +114,10 @@ where
T: RustEmbed + Send + Sync + 'static,
{
async fn handle(&self, req: &mut Request, _depot: &mut Depot, res: &mut Response, _ctrl: &mut FlowCtrl) {
- let param = req.params().iter().find(|(key, _)| key.starts_with('*'));
- let req_path = if let Some((_, value)) = param {
- value.clone()
+ let req_path = if let Some(rest) = req.params().tail() {
+ rest
} else {
- decode_url_path_safely(req.uri().path())
+ &*decode_url_path_safely(req.uri().path())
};
let req_path = format_url_path_safely(&req_path);
let mut key_path = Cow::Borrowed(&*req_path);
diff --git a/examples/static-embed-file/src/main.rs b/examples/static-embed-file/src/main.rs
--- a/examples/static-embed-file/src/main.rs
+++ b/examples/static-embed-file/src/main.rs
@@ -10,7 +10,7 @@ struct Assets;
async fn main() {
tracing_subscriber::fmt().init();
- let router = Router::with_path("<*path>").get(serve_file);
+ let router = Router::with_path("<**rest>").get(serve_file);
let acceptor = TcpListener::new("0.0.0.0:5800").bind().await;
Server::new(acceptor).serve(router).await;
diff --git a/examples/static-embed-file/src/main.rs b/examples/static-embed-file/src/main.rs
--- a/examples/static-embed-file/src/main.rs
+++ b/examples/static-embed-file/src/main.rs
@@ -18,7 +18,7 @@ async fn main() {
#[handler]
async fn serve_file(req: &mut Request, res: &mut Response) {
- let path = req.param::<String>("*path").unwrap();
+ let path = req.param::<String>("rest").unwrap();
if let Some(file) = Assets::get(&path) {
file.render(req, res);
} else {
| diff --git a/crates/core/src/routing/filters/path.rs b/crates/core/src/routing/filters/path.rs
--- a/crates/core/src/routing/filters/path.rs
+++ b/crates/core/src/routing/filters/path.rs
@@ -930,11 +930,6 @@ mod tests {
let segments = PathParser::new("/").parse().unwrap();
assert!(segments.is_empty());
}
- #[test]
- fn test_parse_rest_without_name() {
- let segments = PathParser::new("/hello/<**>").parse().unwrap();
- assert_eq!(format!("{:?}", segments), r#"[ConstWisp("hello"), NamedWisp("**")]"#);
- }
#[test]
fn test_parse_single_const() {
diff --git a/crates/oapi/src/extract/parameter/path.rs b/crates/oapi/src/extract/parameter/path.rs
--- a/crates/oapi/src/extract/parameter/path.rs
+++ b/crates/oapi/src/extract/parameter/path.rs
@@ -158,7 +158,7 @@ mod tests {
let req = TestClient::get("http://127.0.0.1:5801").build_hyper();
let schema = req.uri().scheme().cloned().unwrap();
let mut req = Request::from_hyper(req, schema);
- req.params_mut().insert("param".to_string(), "param".to_string());
+ req.params_mut().insert("param", "param".to_string());
let result = PathParam::<String>::extract_with_arg(&mut req, "param").await;
assert_eq!(result.unwrap().0, "param");
}
diff --git a/crates/serve-static/src/lib.rs b/crates/serve-static/src/lib.rs
--- a/crates/serve-static/src/lib.rs
+++ b/crates/serve-static/src/lib.rs
@@ -190,21 +190,21 @@ mod tests {
let router = Router::new()
.push(Router::with_path("test1.txt").get(Assets::get("test1.txt").unwrap().into_handler()))
- .push(Router::with_path("files/<*path>").get(serve_file))
+ .push(Router::with_path("files/<**path>").get(serve_file))
.push(
- Router::with_path("dir/<*path>").get(
+ Router::with_path("dir/<**path>").get(
static_embed::<Assets>()
.defaults("index.html")
.fallback("fallback.html"),
),
)
- .push(Router::with_path("dir2/<*path>").get(static_embed::<Assets>()))
- .push(Router::with_path("dir3/<*path>").get(static_embed::<Assets>().fallback("notexist.html")));
+ .push(Router::with_path("dir2/<**path>").get(static_embed::<Assets>()))
+ .push(Router::with_path("dir3/<**path>").get(static_embed::<Assets>().fallback("notexist.html")));
let service = Service::new(router);
#[handler]
async fn serve_file(req: &mut Request, res: &mut Response) {
- let path = req.param::<String>("*path").unwrap();
+ let path = req.param::<String>("path").unwrap();
if let Some(file) = Assets::get(&path) {
file.render(req, res);
}
| 使用PathParam时,路由通配符<**id>不生效
**Is your feature request related to a problem? Please describe.**
定义了两个路由,分别为以下:↓
路由1:
``` rust
Router::with_path("/system/roles").get(role_controller::get_list)
```
路由2:
``` rust
Router::with_path("/system/roles/<id>").get(role_controller::get_one)
```
路由2相关代码定义:
```rust
use salvo::{
oapi::{endpoint, extract::PathParam},
Depot, Writer,
};
......
/// 获取单条角色
#[endpoint(operation_id = "system_role_findById", tags("系统->角色"))]
pub async fn get_one(
id: PathParam<i64>,
depot: &mut Depot,
) -> Result<AppResponse<SysRoleModel>, AppError> {
let ctx = depot.obtain_mut::<Context>().unwrap();
// let real_id = transform::decrypt_id(&*id)?;
let result = role_service::view(ctx, *id).await;
Ok(AppResponse(result))
}
```
## 具体情况
但当访问 /system/roles/ 路由时,即前端传参id可能为undefined时
**期望:走路由2,然后报错id不存在**
实际:走路由1
## 相关尝试:
```rust
// 1
Router::with_path("/system/roles/<**id>").get(role_controller::get_one)
// 2
Router::with_path("/system/roles/<**>").get(role_controller::get_one)
```
报错信息:
```
path parameter id not found or convert to type failed
```
**Describe the solution you'd like**
在使用endpoint openapi的情况下,也能通过**通配符或者其它方案实现上述期望
**Describe alternatives you've considered**
**Additional context**
| 2024-07-29T13:10:39 | 0.68 | 81844145f73bad93435c68a4e7f1d231ec9d9eb4 | [
"tests::test_serve_embed_files"
] | [
"conn::addr::tests::test_addr_ipv4",
"conn::addr::tests::test_addr_ipv6",
"catcher::tests::test_custom_catcher",
"catcher::tests::test_handle_error",
"conn::acme::config::tests::test_acme_config_builder",
"depot::test::test_depot",
"conn::joined::tests::test_joined_listener",
"error::tests::test_anyho... | [] | [] | |
salvo-rs/salvo | 785 | salvo-rs__salvo-785 | [
"619"
] | 8504240ebdebbbdd3b69d3156b5688ce9ae16293 | diff --git a/crates/oapi-macros/src/endpoint/mod.rs b/crates/oapi-macros/src/endpoint/mod.rs
--- a/crates/oapi-macros/src/endpoint/mod.rs
+++ b/crates/oapi-macros/src/endpoint/mod.rs
@@ -56,7 +56,7 @@ fn metadata(
let mut operation = #oapi::oapi::Operation::new();
#modifiers
if operation.operation_id.is_none() {
- operation.operation_id = Some(::std::any::type_name::<#name>().replace("::", "."));
+ operation.operation_id = Some(#oapi::oapi::naming::assign_name::<#name>(#oapi::oapi::naming::NameRule::Auto));
}
if !status_codes.is_empty() {
let responses = std::ops::DerefMut::deref_mut(&mut operation.responses);
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -86,7 +86,7 @@ impl TryToTokens for ToSchema<'_> {
Ok(quote! {
if ::std::any::TypeId::of::<Self>() == ::std::any::TypeId::of::<#ty>() {
- name = Some(#oapi::oapi::schema::naming::assign_name::<#ty>(#oapi::oapi::schema::naming::NameRule::Force(#name)));
+ name = Some(#oapi::oapi::naming::assign_name::<#ty>(#oapi::oapi::naming::NameRule::Force(#name)));
}
})
})
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -116,9 +116,9 @@ impl TryToTokens for ToSchema<'_> {
} else if let Some(name) = variant.name() {
let name = name.0.path.to_token_stream();
let name = quote!(#name).to_string();
- Some(quote! { #oapi::oapi::schema::naming::NameRule::Force(#name) })
+ Some(quote! { #oapi::oapi::naming::NameRule::Force(#name) })
} else {
- Some(quote! { #oapi::oapi::schema::naming::NameRule::Auto })
+ Some(quote! { #oapi::oapi::naming::NameRule::Auto })
};
let variant = variant.try_to_token_stream()?;
let body = match name_rule {
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -132,11 +132,11 @@ impl TryToTokens for ToSchema<'_> {
quote! {
let mut name = None;
#type_aliases
- let name = name.unwrap_or_else(||#oapi::oapi::schema::naming::assign_name::<#ident #ty_generics>(#name_rule));
+ let name = name.unwrap_or_else(||#oapi::oapi::naming::assign_name::<#ident #ty_generics>(#name_rule));
}
} else {
quote! {
- let name = #oapi::oapi::schema::naming::assign_name::<#ident #ty_generics>(#name_rule);
+ let name = #oapi::oapi::naming::assign_name::<#ident #ty_generics>(#name_rule);
}
};
quote! {
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -15,6 +15,8 @@ pub use endpoint::{Endpoint, EndpointArgRegister, EndpointOutRegister, EndpointR
pub mod extract;
mod routing;
pub use routing::RouterExt;
+/// Module for name schemas.
+pub mod naming;
cfg_feature! {
#![feature ="swagger-ui"]
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -290,7 +292,7 @@ impl<K: ToSchema, V: ToSchema> ToSchema for HashMap<K, V> {
impl ToSchema for StatusError {
fn to_schema(components: &mut Components) -> RefOr<schema::Schema> {
- let name = crate::schema::naming::assign_name::<StatusError>(Default::default());
+ let name = crate::naming::assign_name::<StatusError>(Default::default());
let ref_or = crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", name)));
if !components.schemas.contains_key(&name) {
components.schemas.insert(name.clone(), ref_or.clone());
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -323,7 +325,7 @@ where
E: ToSchema,
{
fn to_schema(components: &mut Components) -> RefOr<schema::Schema> {
- let name = crate::schema::naming::assign_name::<StatusError>(Default::default());
+ let name = crate::naming::assign_name::<StatusError>(Default::default());
let ref_or = crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", name)));
if !components.schemas.contains_key(&name) {
components.schemas.insert(name.clone(), ref_or.clone());
diff --git a/crates/oapi/src/openapi/schema/naming.rs b/crates/oapi/src/naming.rs
--- a/crates/oapi/src/openapi/schema/naming.rs
+++ b/crates/oapi/src/naming.rs
@@ -59,7 +59,7 @@ pub fn get_name<T: 'static>() -> String {
}
fn type_generic_part(type_name: &str) -> String {
- let re = Regex::new(r"^[^<]+").unwrap();
+ let re = Regex::new(r"^[^<]+").expect("Invalid regex");
let result = re.replace_all(type_name, "");
result.to_string()
}
diff --git a/crates/oapi/src/openapi/schema/naming.rs b/crates/oapi/src/naming.rs
--- a/crates/oapi/src/openapi/schema/naming.rs
+++ b/crates/oapi/src/naming.rs
@@ -70,6 +70,7 @@ pub trait Namer: Sync + Send + 'static {
}
/// A namer that generates wordy names.
+#[derive(Default, Debug, Clone, Copy)]
pub struct WordyNamer;
impl WordyNamer {
/// Create a new WordyNamer.
diff --git a/crates/oapi/src/openapi/schema/naming.rs b/crates/oapi/src/naming.rs
--- a/crates/oapi/src/openapi/schema/naming.rs
+++ b/crates/oapi/src/naming.rs
@@ -106,6 +107,7 @@ impl Namer for WordyNamer {
}
/// A namer that generates short names.
+#[derive(Default, Debug, Clone, Copy)]
pub struct ShortNamer;
impl ShortNamer {
/// Create a new ShortNamer.
diff --git a/crates/oapi/src/openapi/schema/naming.rs b/crates/oapi/src/naming.rs
--- a/crates/oapi/src/openapi/schema/naming.rs
+++ b/crates/oapi/src/naming.rs
@@ -117,7 +119,7 @@ impl Namer for ShortNamer {
fn assign_name(&self, type_id: TypeId, type_name: &'static str, rule: NameRule) -> String {
let name: String = match rule {
NameRule::Auto => {
- let re = Regex::new(r"([^:<>]+::)+").unwrap();
+ let re = Regex::new(r"([^:<>]+::)+").expect("Invalid regex");
let base = re.replace_all(type_name, "");
let mut name = base.to_string();
let mut count = 1;
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -1,4 +1,24 @@
//! Rust implementation of Openapi Spec V3.
+
+mod components;
+mod content;
+mod encoding;
+mod example;
+mod external_docs;
+mod header;
+pub mod info;
+pub mod operation;
+pub mod parameter;
+pub mod path;
+pub mod request_body;
+pub mod response;
+pub mod schema;
+pub mod security;
+pub mod server;
+mod tag;
+mod xml;
+
+use crate::{routing::NormNode, Endpoint};
use std::collections::{btree_map, BTreeSet};
use once_cell::sync::Lazy;
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -25,26 +45,6 @@ pub use self::{
xml::Xml,
};
-mod components;
-mod content;
-mod encoding;
-mod example;
-mod external_docs;
-mod header;
-pub mod info;
-pub mod operation;
-pub mod parameter;
-pub mod path;
-pub mod request_body;
-pub mod response;
-pub mod schema;
-pub mod security;
-pub mod server;
-mod tag;
-mod xml;
-
-use crate::{routing::NormNode, Endpoint};
-
static PATH_PARAMETER_NAME_REGEX: Lazy<Regex> = Lazy::new(|| Regex::new(r"\{([^}:]+)").expect("invalid regex"));
/// Root object of the OpenAPI document.
diff --git a/crates/oapi/src/openapi/schema/mod.rs b/crates/oapi/src/openapi/schema/mod.rs
--- a/crates/oapi/src/openapi/schema/mod.rs
+++ b/crates/oapi/src/openapi/schema/mod.rs
@@ -5,8 +5,6 @@
mod all_of;
mod any_of;
mod array;
-/// Module for name schemas.
-pub mod naming;
mod object;
mod one_of;
| diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -250,7 +250,7 @@ mod tests {
let status_codes: &[salvo::http::StatusCode] = &[];
let mut operation = salvo::oapi::Operation::new();
if operation.operation_id.is_none() {
- operation.operation_id = Some(::std::any::type_name::<hello>().replace("::", "."));
+ operation.operation_id = Some(salvo::oapi::naming::assign_name::<hello>(salvo::oapi::naming::NameRule::Auto));
}
if !status_codes.is_empty() {
let responses = std::ops::DerefMut::deref_mut(&mut operation.responses);
| Find a way to rename OpenAPI object and operation globally
On the swagger spec the operationIds are very long
<img width="975" alt="Screenshot 2024-01-10 at 18 47 46" src="https://github.com/salvo-rs/salvo/assets/6940726/d3169bc1-287c-481d-94aa-cbd561ba54b7">
This results in very long auto generated code
<img width="739" alt="Screenshot 2024-01-10 at 18 48 07" src="https://github.com/salvo-rs/salvo/assets/6940726/ca20c83e-9bb6-4366-8742-eaf40ad007d9">
We need away to reduce it.
| You can rename your operation_id for example:
```rust
#[endpoint(operation_id="AAAAAAA", tags("todos"), status_codes(200, 404))]
pub async fn update_todo(id: PathParam<u64>, updated: JsonBody<Todo>) -> Result<StatusCode, StatusError> {
...
}
```
@chrislearn yes indeed but it’s also on objects like return types; would be nice to config the root so it’s smaller across the whole project automatically :-) | 2024-05-23T10:59:27 | 0.67 | 8504240ebdebbbdd3b69d3156b5688ce9ae16293 | [
"tests::test_handler_for_fn"
] | [] | [] | [] |
salvo-rs/salvo | 784 | salvo-rs__salvo-784 | [
"619"
] | 156afc29722dd9d202b560d1896ebc145781b472 | diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -17,7 +17,6 @@ pub(crate) struct ComponentSchemaProps<'c> {
pub(crate) description: Option<&'c CommentAttributes>,
pub(crate) deprecated: Option<&'c Deprecated>,
pub(crate) object_name: &'c str,
- pub(crate) type_definition: bool,
}
#[derive(Debug)]
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -33,7 +32,6 @@ impl<'c> ComponentSchema {
description,
deprecated,
object_name,
- type_definition,
}: ComponentSchemaProps,
) -> DiagResult<Self> {
let mut tokens = TokenStream::new();
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -51,7 +49,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?
}
Some(GenericType::Vec) => ComponentSchema::vec_to_tokens(
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -61,7 +58,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?,
Some(GenericType::LinkedList) => ComponentSchema::vec_to_tokens(
&mut tokens,
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -70,7 +66,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?,
Some(GenericType::Set) => ComponentSchema::vec_to_tokens(
&mut tokens,
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -79,7 +74,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?,
#[cfg(feature = "smallvec")]
Some(GenericType::SmallVec) => ComponentSchema::vec_to_tokens(
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -89,7 +83,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?,
Some(GenericType::Option) => {
// Add nullable feature if not already exists. Option is always nullable
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -109,7 +102,6 @@ impl<'c> ComponentSchema {
description,
deprecated,
object_name,
- type_definition,
})?
.to_tokens(&mut tokens);
}
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -130,7 +122,6 @@ impl<'c> ComponentSchema {
description,
deprecated,
object_name,
- type_definition,
})?
.to_tokens(&mut tokens);
}
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -141,7 +132,6 @@ impl<'c> ComponentSchema {
object_name,
description_stream,
deprecated_stream,
- type_definition,
)?,
}
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -155,7 +145,6 @@ impl<'c> ComponentSchema {
object_name: &str,
description_stream: Option<TokenStream>,
deprecated_stream: Option<TokenStream>,
- type_definition: bool,
) -> DiagResult<()> {
let oapi = crate::oapi_crate();
let example = features.pop_by(|feature| matches!(feature, Feature::Example(_)));
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -185,7 +174,6 @@ impl<'c> ComponentSchema {
description: None,
deprecated: None,
object_name,
- type_definition,
})?
.to_token_stream();
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -216,7 +204,6 @@ impl<'c> ComponentSchema {
object_name: &str,
description_stream: Option<TokenStream>,
deprecated_stream: Option<TokenStream>,
- type_definition: bool,
) -> DiagResult<()> {
let oapi = crate::oapi_crate();
let example = pop_feature!(features => Feature::Example(_));
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -257,7 +244,6 @@ impl<'c> ComponentSchema {
description: None,
deprecated: None,
object_name,
- type_definition,
})?;
let unique = match unique {
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -318,9 +304,11 @@ impl<'c> ComponentSchema {
object_name: &str,
description_stream: Option<TokenStream>,
deprecated_stream: Option<TokenStream>,
- type_definition: bool,
) -> DiagResult<()> {
let oapi = crate::oapi_crate();
+ let nullable = pop_feature!(features => Feature::Nullable(_))
+ .map(|f| f.try_to_token_stream())
+ .transpose()?;
match type_tree.value_type {
ValueType::Primitive => {
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -356,17 +344,11 @@ impl<'c> ComponentSchema {
}
tokens.extend(features.try_to_token_stream()?);
- let nullable = pop_feature!(features => Feature::Nullable(_));
- if let Some(nullable) = nullable {
- nullable.try_to_tokens(tokens)?;
- }
+ nullable.to_tokens(tokens);
}
ValueType::Object => {
let is_inline = features.is_inline();
- let nullable = pop_feature!(features => Feature::Nullable(_))
- .map(|f| f.try_to_token_stream())
- .transpose()?;
let default = pop_feature!(features => Feature::Default(_))
.map(|f| f.try_to_token_stream())
.transpose()?;
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -390,34 +372,24 @@ impl<'c> ComponentSchema {
nullable.to_tokens(tokens);
} else {
let type_path = &**type_tree.path.as_ref().expect("path should not be `None`");
- let schema = if type_definition {
- quote! {
- if std::any::TypeId::of::<#type_path>() == std::any::TypeId::of::<Self>() {
- #oapi::oapi::RefOr::<#oapi::oapi::Schema>::Ref(#oapi::oapi::schema::Ref::new("#"))
- } else {
- #oapi::oapi::RefOr::from(<#type_path as #oapi::oapi::ToSchema>::to_schema(components))
- }
- }
- } else {
- quote! {
- <#type_path as #oapi::oapi::ToSchema>::to_schema(components)
- }
- };
if is_inline {
let schema = if default.is_some() || nullable.is_some() {
quote_spanned! {type_path.span()=>
#oapi::oapi::schema::AllOf::new()
#nullable
- .item(#schema)
+ .item(<#type_path as #oapi::oapi::ToSchema>::to_schema(components))
#default
}
} else {
quote_spanned! {type_path.span() =>
- #schema
+ <#type_path as #oapi::oapi::ToSchema>::to_schema(components)
}
};
schema.to_tokens(tokens);
} else {
+ let schema = quote! {
+ #oapi::oapi::RefOr::from(<#type_path as #oapi::oapi::ToSchema>::to_schema(components))
+ };
let schema = if default.is_some() || nullable.is_some() {
quote! {
#oapi::oapi::schema::AllOf::new()
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -435,9 +407,6 @@ impl<'c> ComponentSchema {
}
}
ValueType::Tuple => {
- let nullable = pop_feature!(features => Feature::Nullable(_))
- .map(|f| f.try_to_token_stream())
- .transpose()?;
type_tree
.children
.as_ref()
diff --git a/crates/oapi-macros/src/component.rs b/crates/oapi-macros/src/component.rs
--- a/crates/oapi-macros/src/component.rs
+++ b/crates/oapi-macros/src/component.rs
@@ -457,7 +426,6 @@ impl<'c> ComponentSchema {
description: None,
deprecated: None,
object_name,
- type_definition,
})
})
.collect::<DiagResult<Vec<_>>>()
diff --git a/crates/oapi-macros/src/endpoint/mod.rs b/crates/oapi-macros/src/endpoint/mod.rs
--- a/crates/oapi-macros/src/endpoint/mod.rs
+++ b/crates/oapi-macros/src/endpoint/mod.rs
@@ -37,6 +37,15 @@ fn metadata(
}
}
}));
+ let modifiers = if modifiers.is_empty() {
+ None
+ } else {
+ Some(quote! {{
+ let mut components = &mut components;
+ let mut operation = &mut operation;
+ #(#modifiers)*
+ }})
+ };
let stream = quote! {
fn #tfn() -> ::std::any::TypeId {
::std::any::TypeId::of::<#name>()
diff --git a/crates/oapi-macros/src/endpoint/mod.rs b/crates/oapi-macros/src/endpoint/mod.rs
--- a/crates/oapi-macros/src/endpoint/mod.rs
+++ b/crates/oapi-macros/src/endpoint/mod.rs
@@ -44,11 +53,8 @@ fn metadata(
fn #cfn() -> #oapi::oapi::Endpoint {
let mut components = #oapi::oapi::Components::new();
let status_codes: &[#salvo::http::StatusCode] = &#status_codes;
- fn modify(components: &mut #oapi::oapi::Components, operation: &mut #oapi::oapi::Operation) {
- #(#modifiers)*
- }
let mut operation = #oapi::oapi::Operation::new();
- modify(&mut components, &mut operation);
+ #modifiers
if operation.operation_id.is_none() {
operation.operation_id = Some(::std::any::type_name::<#name>().replace("::", "."));
}
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -2,9 +2,12 @@ use std::{fmt::Display, mem};
use proc_macro2::{Ident, Span, TokenStream};
use quote::{quote, ToTokens};
-use syn::{parenthesized, parse::ParseStream, punctuated::Punctuated, token, LitStr, TypePath, WherePredicate};
+use syn::parse::ParseStream;
+use syn::punctuated::Punctuated;
+use syn::token::Comma;
+use syn::{parenthesized, token, LitStr, Token, Type, TypePath, WherePredicate};
-use super::{impl_name, parse_integer, parse_number, Feature, Parse, Validate, Validator};
+use super::{impl_get_name, parse_integer, parse_number, Feature, Parse, Validate, Validator};
use crate::{
parameter::{self, ParameterStyle},
parse_utils, schema,
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -34,7 +37,7 @@ impl From<Example> for Feature {
Feature::Example(value)
}
}
-impl_name!(Example = "example");
+impl_get_name!(Example = "example");
#[derive(Clone, Debug)]
pub(crate) struct Default(pub(crate) Option<AnyValue>);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -65,7 +68,7 @@ impl From<self::Default> for Feature {
Feature::Default(value)
}
}
-impl_name!(Default = "default");
+impl_get_name!(Default = "default");
#[derive(Clone, Debug)]
pub(crate) struct Inline(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -84,7 +87,7 @@ impl From<Inline> for Feature {
Feature::Inline(value)
}
}
-impl_name!(Inline = "inline");
+impl_get_name!(Inline = "inline");
#[derive(Default, Clone, Debug)]
pub(crate) struct XmlAttr(pub(crate) schema::XmlAttr);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -138,7 +141,7 @@ impl From<XmlAttr> for Feature {
Feature::XmlAttr(value)
}
}
-impl_name!(XmlAttr = "xml");
+impl_get_name!(XmlAttr = "xml");
#[derive(Clone, Debug)]
pub(crate) struct Format(pub(crate) SchemaFormat<'static>);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -158,7 +161,7 @@ impl From<Format> for Feature {
Feature::Format(value)
}
}
-impl_name!(Format = "format");
+impl_get_name!(Format = "format");
#[derive(Clone, Debug)]
pub(crate) struct ValueType(pub(crate) syn::Type);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -178,7 +181,7 @@ impl From<ValueType> for Feature {
Feature::ValueType(value)
}
}
-impl_name!(ValueType = "value_type");
+impl_get_name!(ValueType = "value_type");
#[derive(Clone, Copy, Debug)]
pub(crate) struct WriteOnly(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -197,7 +200,7 @@ impl From<WriteOnly> for Feature {
Feature::WriteOnly(value)
}
}
-impl_name!(WriteOnly = "write_only");
+impl_get_name!(WriteOnly = "write_only");
#[derive(Clone, Copy, Debug)]
pub(crate) struct ReadOnly(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -216,26 +219,45 @@ impl From<ReadOnly> for Feature {
Feature::ReadOnly(value)
}
}
-impl_name!(ReadOnly = "read_only");
+impl_get_name!(ReadOnly = "read_only");
#[derive(Clone, Debug)]
-pub(crate) struct Symbol(pub(crate) String);
-impl Parse for Symbol {
+pub(crate) struct Name(pub(crate) TypePath);
+impl Parse for Name {
+ fn parse(input: syn::parse::ParseStream, _: Ident) -> syn::Result<Self> {
+ parse_utils::parse_next(input, || input.parse()).map(Self)
+ }
+}
+impl ToTokens for Name {
+ fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) {
+ stream.extend(self.0.to_token_stream())
+ }
+}
+impl From<Name> for Feature {
+ fn from(value: Name) -> Self {
+ Feature::Name(value)
+ }
+}
+impl_get_name!(Name = "name");
+
+#[derive(Clone, Debug)]
+pub(crate) struct Title(pub(crate) String);
+impl Parse for Title {
fn parse(input: syn::parse::ParseStream, _: Ident) -> syn::Result<Self> {
parse_utils::parse_next_literal_str(input).map(Self)
}
}
-impl ToTokens for Symbol {
+impl ToTokens for Title {
fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) {
stream.extend(self.0.to_token_stream())
}
}
-impl From<Symbol> for Feature {
- fn from(value: Symbol) -> Self {
- Feature::Symbol(value)
+impl From<Title> for Feature {
+ fn from(value: Title) -> Self {
+ Feature::Title(value)
}
}
-impl_name!(Symbol = "symbol");
+impl_get_name!(Title = "title");
#[derive(Clone, Copy, Debug)]
pub(crate) struct Nullable(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -259,7 +281,7 @@ impl From<Nullable> for Feature {
Feature::Nullable(value)
}
}
-impl_name!(Nullable = "nullable");
+impl_get_name!(Nullable = "nullable");
#[derive(Clone, Debug)]
pub(crate) struct Rename(pub(crate) String);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -284,7 +306,7 @@ impl From<Rename> for Feature {
Feature::Rename(value)
}
}
-impl_name!(Rename = "rename");
+impl_get_name!(Rename = "rename");
#[derive(Clone, Debug)]
pub(crate) struct RenameAll(pub(crate) RenameRule);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -309,7 +331,7 @@ impl From<RenameAll> for Feature {
Feature::RenameAll(value)
}
}
-impl_name!(RenameAll = "rename_all");
+impl_get_name!(RenameAll = "rename_all");
#[derive(Clone, Debug)]
pub(crate) struct DefaultStyle(pub(crate) ParameterStyle);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -333,7 +355,7 @@ impl From<DefaultStyle> for Feature {
Feature::DefaultStyle(value)
}
}
-impl_name!(DefaultStyle = "default_style");
+impl_get_name!(DefaultStyle = "default_style");
#[derive(Clone, Debug)]
pub(crate) struct Style(pub(crate) ParameterStyle);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -357,7 +379,7 @@ impl From<Style> for Feature {
Feature::Style(value)
}
}
-impl_name!(Style = "style");
+impl_get_name!(Style = "style");
#[derive(Clone, Debug)]
pub(crate) struct AllowReserved(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -376,7 +398,7 @@ impl From<AllowReserved> for Feature {
Feature::AllowReserved(value)
}
}
-impl_name!(AllowReserved = "allow_reserved");
+impl_get_name!(AllowReserved = "allow_reserved");
#[derive(Clone, Debug)]
pub(crate) struct Explode(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -395,7 +417,7 @@ impl From<Explode> for Feature {
Feature::Explode(value)
}
}
-impl_name!(Explode = "explode");
+impl_get_name!(Explode = "explode");
#[derive(Clone, Debug)]
pub(crate) struct DefaultParameterIn(pub(crate) parameter::ParameterIn);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -414,7 +436,7 @@ impl From<DefaultParameterIn> for Feature {
Feature::DefaultParameterIn(value)
}
}
-impl_name!(DefaultParameterIn = "default_parameter_in");
+impl_get_name!(DefaultParameterIn = "default_parameter_in");
#[derive(Clone, Debug)]
pub(crate) struct ParameterIn(pub(crate) parameter::ParameterIn);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -433,7 +455,7 @@ impl From<ParameterIn> for Feature {
Feature::ParameterIn(value)
}
}
-impl_name!(ParameterIn = "parameter_in");
+impl_get_name!(ParameterIn = "parameter_in");
/// Specify names of unnamed fields with `names(...) attribute for `ToParameters` derive.
#[derive(Clone, Debug)]
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -458,7 +480,7 @@ impl From<Names> for Feature {
Feature::ToParametersNames(value)
}
}
-impl_name!(Names = "names");
+impl_get_name!(Names = "names");
#[derive(Clone, Debug)]
pub(crate) struct MultipleOf(pub(crate) f64, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -493,7 +515,7 @@ impl From<MultipleOf> for Feature {
Feature::MultipleOf(value)
}
}
-impl_name!(MultipleOf = "multiple_of");
+impl_get_name!(MultipleOf = "multiple_of");
#[derive(Clone, Debug)]
pub(crate) struct Maximum(pub(crate) f64, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -528,7 +550,7 @@ impl From<Maximum> for Feature {
Feature::Maximum(value)
}
}
-impl_name!(Maximum = "maximum");
+impl_get_name!(Maximum = "maximum");
#[derive(Clone, Debug)]
pub(crate) struct Minimum(pub(crate) f64, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -568,7 +590,7 @@ impl From<Minimum> for Feature {
Feature::Minimum(value)
}
}
-impl_name!(Minimum = "minimum");
+impl_get_name!(Minimum = "minimum");
#[derive(Clone, Debug)]
pub(crate) struct ExclusiveMaximum(pub(crate) f64, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -600,7 +622,7 @@ impl From<ExclusiveMaximum> for Feature {
Feature::ExclusiveMaximum(value)
}
}
-impl_name!(ExclusiveMaximum = "exclusive_maximum");
+impl_get_name!(ExclusiveMaximum = "exclusive_maximum");
#[derive(Clone, Debug)]
pub(crate) struct ExclusiveMinimum(pub(crate) f64, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -632,7 +654,7 @@ impl From<ExclusiveMinimum> for Feature {
Feature::ExclusiveMinimum(value)
}
}
-impl_name!(ExclusiveMinimum = "exclusive_minimum");
+impl_get_name!(ExclusiveMinimum = "exclusive_minimum");
#[derive(Clone, Debug)]
pub(crate) struct MaxLength(pub(crate) usize, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -670,7 +692,7 @@ impl From<MaxLength> for Feature {
Feature::MaxLength(value)
}
}
-impl_name!(MaxLength = "max_length");
+impl_get_name!(MaxLength = "max_length");
#[derive(Clone, Debug)]
pub(crate) struct MinLength(pub(crate) usize, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -708,7 +730,7 @@ impl From<MinLength> for Feature {
Feature::MinLength(value)
}
}
-impl_name!(MinLength = "min_length");
+impl_get_name!(MinLength = "min_length");
#[derive(Clone, Debug)]
pub(crate) struct Pattern(pub(crate) String, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -743,7 +765,7 @@ impl From<Pattern> for Feature {
Feature::Pattern(value)
}
}
-impl_name!(Pattern = "pattern");
+impl_get_name!(Pattern = "pattern");
#[derive(Clone, Debug)]
pub(crate) struct MaxItems(pub(crate) usize, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -778,7 +800,7 @@ impl From<MaxItems> for Feature {
Feature::MaxItems(value)
}
}
-impl_name!(MaxItems = "max_items");
+impl_get_name!(MaxItems = "max_items");
#[derive(Clone, Debug)]
pub(crate) struct MinItems(pub(crate) usize, pub(crate) Ident);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -813,7 +835,7 @@ impl From<MinItems> for Feature {
Feature::MinItems(value)
}
}
-impl_name!(MinItems = "min_items");
+impl_get_name!(MinItems = "min_items");
#[derive(Clone, Debug)]
#[allow(dead_code)]
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -836,7 +858,7 @@ impl From<MaxProperties> for Feature {
Feature::MaxProperties(value)
}
}
-impl_name!(MaxProperties = "max_properties");
+impl_get_name!(MaxProperties = "max_properties");
#[derive(Clone, Debug)]
#[allow(dead_code)]
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -859,7 +881,7 @@ impl From<MinProperties> for Feature {
Feature::MinProperties(value)
}
}
-impl_name!(MinProperties = "min_properties");
+impl_get_name!(MinProperties = "min_properties");
#[derive(Clone, Debug)]
pub(crate) struct SchemaWith(pub(crate) TypePath);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -881,7 +903,7 @@ impl From<SchemaWith> for Feature {
Feature::SchemaWith(value)
}
}
-impl_name!(SchemaWith = "schema_with");
+impl_get_name!(SchemaWith = "schema_with");
#[derive(Clone, Debug)]
pub(crate) struct Bound(pub(crate) Vec<WherePredicate>);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -905,7 +927,7 @@ impl From<Bound> for Feature {
Feature::Bound(value)
}
}
-impl_name!(Bound = "bound");
+impl_get_name!(Bound = "bound");
#[derive(Eq, PartialEq, Clone, Debug)]
pub(crate) struct SkipBound(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -922,7 +944,7 @@ impl From<SkipBound> for Feature {
Feature::SkipBound(value)
}
}
-impl_name!(SkipBound = "skip_bound");
+impl_get_name!(SkipBound = "skip_bound");
#[derive(Clone, Debug)]
pub(crate) struct Description(pub(crate) String);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -949,7 +971,7 @@ impl From<Description> for Feature {
Self::Description(value)
}
}
-impl_name!(Description = "description");
+impl_get_name!(Description = "description");
/// Deprecated feature parsed from macro attributes.
///
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -977,7 +999,7 @@ impl From<Deprecated> for Feature {
}
}
-impl_name!(Deprecated = "deprecated");
+impl_get_name!(Deprecated = "deprecated");
/// Skip feature parsed from macro attributes.
#[derive(Clone, Debug)]
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -1001,7 +1023,7 @@ impl From<Skip> for Feature {
}
}
-impl_name!(Skip = "skip");
+impl_get_name!(Skip = "skip");
#[derive(Clone, Debug)]
pub(crate) struct AdditionalProperties(pub(crate) bool);
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -1024,7 +1046,7 @@ impl ToTokens for AdditionalProperties {
))
}
}
-impl_name!(AdditionalProperties = "additional_properties");
+impl_get_name!(AdditionalProperties = "additional_properties");
impl From<AdditionalProperties> for Feature {
fn from(value: AdditionalProperties) -> Self {
diff --git a/crates/oapi-macros/src/feature/items.rs b/crates/oapi-macros/src/feature/items.rs
--- a/crates/oapi-macros/src/feature/items.rs
+++ b/crates/oapi-macros/src/feature/items.rs
@@ -1067,4 +1089,84 @@ impl From<Required> for Feature {
Self::Required(value)
}
}
-impl_name!(Required = "required");
+impl_get_name!(Required = "required");
+
+#[derive(Clone, Debug)]
+pub(crate) struct Alias {
+ pub(crate) name: String,
+ pub(crate) ty: Type,
+}
+
+// impl Alias {
+// pub(crate) fn get_lifetimes(&self) -> Result<impl Iterator<Item = &GenericArgument>, Diagnostic> {
+// fn lifetimes_from_type(ty: &Type) -> Result<impl Iterator<Item = &GenericArgument>, Diagnostic> {
+// match ty {
+// Type::Path(type_path) => Ok(type_path
+// .path
+// .segments
+// .iter()
+// .flat_map(|segment| match &segment.arguments {
+// PathArguments::AngleBracketed(angle_bracketed_args) => {
+// Some(angle_bracketed_args.args.iter())
+// }
+// _ => None,
+// })
+// .flatten()
+// .flat_map(|arg| match arg {
+// GenericArgument::Type(type_argument) => {
+// lifetimes_from_type(type_argument).map(|iter| iter.collect::<Vec<_>>())
+// }
+// _ => Ok(vec![arg]),
+// })
+// .flat_map(|args| args.into_iter().filter(|generic_arg| matches!(generic_arg, syn::GenericArgument::Lifetime(lifetime) if lifetime.ident != "'static"))),
+// ),
+// _ => Err(Diagnostic::spanned(ty.span(),DiagLevel::Error, "AliasSchema `get_lifetimes` only supports syn::TypePath types"))
+// }
+// }
+
+// lifetimes_from_type(&self.ty)
+// }
+// }
+
+impl syn::parse::Parse for Alias {
+ fn parse(input: syn::parse::ParseStream) -> syn::Result<Self> {
+ let name = input.parse::<Ident>()?;
+ input.parse::<Token![=]>()?;
+
+ Ok(Self {
+ name: name.to_string(),
+ ty: input.parse::<Type>()?,
+ })
+ }
+}
+
+// pub(super) fn parse_aliases(attributes: &[Attribute]) -> DiagResult<Option<Punctuated<Alias, Comma>>> {
+// attributes
+// .iter()
+// .find(|attribute| attribute.path().is_ident("aliases"))
+// .map(|aliases| aliases.parse_args_with(Punctuated::<Alias, Comma>::parse_terminated))
+// .transpose()
+// .map_err(Into::into)
+// }
+
+#[derive(Default, Clone, Debug)]
+pub(crate) struct Aliases(pub(crate) Punctuated<Alias, Comma>);
+
+impl Parse for Aliases {
+ fn parse(input: syn::parse::ParseStream, _: Ident) -> syn::Result<Self> {
+ parse_utils::parse_punctuated_within_parenthesis(input).map(Self)
+ }
+}
+
+// impl ToTokens for Aliases {
+// fn to_tokens(&self, stream: &mut TokenStream) {
+// stream.extend(self.0.to_token_stream())
+// }
+// }
+
+impl From<Aliases> for Feature {
+ fn from(value: Aliases) -> Self {
+ Feature::Aliases(value)
+ }
+}
+impl_get_name!(Aliases = "aliases");
diff --git a/crates/oapi-macros/src/feature/macros.rs b/crates/oapi-macros/src/feature/macros.rs
--- a/crates/oapi-macros/src/feature/macros.rs
+++ b/crates/oapi-macros/src/feature/macros.rs
@@ -1,8 +1,8 @@
use crate::feature::{items::*, Feature, Validatable};
-macro_rules! impl_name {
+macro_rules! impl_get_name {
( $ident:ident = $name:literal ) => {
- impl crate::feature::Name for $ident {
+ impl crate::feature::GetName for $ident {
fn get_name() -> &'static str {
$name
}
diff --git a/crates/oapi-macros/src/feature/macros.rs b/crates/oapi-macros/src/feature/macros.rs
--- a/crates/oapi-macros/src/feature/macros.rs
+++ b/crates/oapi-macros/src/feature/macros.rs
@@ -10,14 +10,14 @@ macro_rules! impl_name {
impl Display for $ident {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
- let name = <Self as crate::feature::Name>::get_name();
+ let name = <Self as crate::feature::GetName>::get_name();
write!(f, "{name}")
}
}
};
}
-pub(crate) use impl_name;
+pub(crate) use impl_get_name;
macro_rules! is_validatable {
( $( $ident:ident => $validatable:literal ),* $(,)?) => {
diff --git a/crates/oapi-macros/src/feature/macros.rs b/crates/oapi-macros/src/feature/macros.rs
--- a/crates/oapi-macros/src/feature/macros.rs
+++ b/crates/oapi-macros/src/feature/macros.rs
@@ -38,7 +38,9 @@ is_validatable! {
Format => false,
WriteOnly => false,
ReadOnly => false,
- Symbol => false,
+ Name => false,
+ Title => false,
+ Aliases => false,
Nullable => false,
Rename => false,
DefaultStyle => false,
diff --git a/crates/oapi-macros/src/feature/macros.rs b/crates/oapi-macros/src/feature/macros.rs
--- a/crates/oapi-macros/src/feature/macros.rs
+++ b/crates/oapi-macros/src/feature/macros.rs
@@ -77,7 +79,7 @@ macro_rules! parse_features {
($ident:ident as $( $feature:path ),* $(,)?) => {
{
fn parse(input: syn::parse::ParseStream) -> syn::Result<Vec<crate::feature::Feature>> {
- let names = [$( <crate::feature::parse_features!(@as_ident $feature) as crate::feature::Name>::get_name(), )* ];
+ let names = [$( <crate::feature::parse_features!(@as_ident $feature) as crate::feature::GetName>::get_name(), )* ];
let mut features = Vec::<crate::feature::Feature>::new();
let attributes = names.join(", ");
diff --git a/crates/oapi-macros/src/feature/macros.rs b/crates/oapi-macros/src/feature/macros.rs
--- a/crates/oapi-macros/src/feature/macros.rs
+++ b/crates/oapi-macros/src/feature/macros.rs
@@ -91,7 +93,7 @@ macro_rules! parse_features {
let name = &*ident.to_string();
$(
- if name == <crate::feature::parse_features!(@as_ident $feature) as crate::feature::Name>::get_name() {
+ if name == <crate::feature::parse_features!(@as_ident $feature) as crate::feature::GetName>::get_name() {
features.push(<$feature as crate::feature::Parse>::parse(input, ident)?.into());
if !input.is_empty() {
input.parse::<syn::Token![,]>()?;
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -43,7 +43,7 @@ where
})
}
-pub(crate) trait Name {
+pub(crate) trait GetName {
fn get_name() -> &'static str
where
Self: Sized;
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -77,7 +77,9 @@ pub(crate) enum Feature {
ValueType(ValueType),
WriteOnly(WriteOnly),
ReadOnly(ReadOnly),
- Symbol(Symbol),
+ Name(Name),
+ Title(Title),
+ Aliases(Aliases),
Nullable(Nullable),
Rename(Rename),
RenameAll(RenameAll),
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -174,7 +176,14 @@ impl TryToTokens for Feature {
}
Feature::WriteOnly(write_only) => quote! { .write_only(#write_only) },
Feature::ReadOnly(read_only) => quote! { .read_only(#read_only) },
- Feature::Symbol(symbol) => quote! { .symbol(#symbol) },
+ Feature::Name(name) => quote! { .name(#name) },
+ Feature::Title(title) => quote! { .title(#title) },
+ Feature::Aliases(_) => quote! {
+ return Err(Diagnostic::spanned(
+ Span::call_site(),
+ DiagLevel::Error,
+ "Aliases feature does not support `TryToTokens`",
+ )); },
Feature::Nullable(nullable) => quote! { .nullable(#nullable) },
Feature::Required(required) => quote! { .required(#required) },
Feature::Rename(rename) => rename.to_token_stream(),
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -217,28 +226,28 @@ impl TryToTokens for Feature {
return Err(Diagnostic::spanned(
Span::call_site(),
DiagLevel::Error,
- "RenameAll feature does not support `ToTokens`",
+ "RenameAll feature does not support `TryToTokens`",
));
}
Feature::ValueType(_) => {
return Err(Diagnostic::spanned(
Span::call_site(),
DiagLevel::Error,
- "ValueType feature does not support `ToTokens`",
+ "ValueType feature does not support `TryToTokens`",
)
.help(
"ValueType is supposed to be used with `TypeTree` in same manner as a resolved struct/field type.",
));
}
Feature::Inline(_) | Feature::SkipBound(_) | Feature::Bound(_) => {
- // inline, skip_bound and bound feature is ignored by `ToTokens`
+ // inline, skip_bound and bound feature is ignored by `TryToTokens`
TokenStream::new()
}
Feature::ToParametersNames(_) => {
return Err(Diagnostic::spanned(
Span::call_site(),
DiagLevel::Error,
- "Names feature does not support `ToTokens`"
+ "Names feature does not support `TryToTokens`"
).help(
"Names is only used with ToParameters to artificially give names for unnamed struct type `ToParameters`."
));
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -259,7 +268,9 @@ impl Display for Feature {
Feature::Format(format) => format.fmt(f),
Feature::WriteOnly(write_only) => write_only.fmt(f),
Feature::ReadOnly(read_only) => read_only.fmt(f),
- Feature::Symbol(symbol) => symbol.fmt(f),
+ Feature::Name(name) => name.fmt(f),
+ Feature::Title(title) => title.fmt(f),
+ Feature::Aliases(aliases) => aliases.fmt(f),
Feature::Nullable(nullable) => nullable.fmt(f),
Feature::Rename(rename) => rename.fmt(f),
Feature::Style(style) => style.fmt(f),
diff --git a/crates/oapi-macros/src/feature/mod.rs b/crates/oapi-macros/src/feature/mod.rs
--- a/crates/oapi-macros/src/feature/mod.rs
+++ b/crates/oapi-macros/src/feature/mod.rs
@@ -305,7 +316,9 @@ impl Validatable for Feature {
Feature::Format(format) => format.is_validatable(),
Feature::WriteOnly(write_only) => write_only.is_validatable(),
Feature::ReadOnly(read_only) => read_only.is_validatable(),
- Feature::Symbol(symbol) => symbol.is_validatable(),
+ Feature::Name(name) => name.is_validatable(),
+ Feature::Title(title) => title.is_validatable(),
+ Feature::Aliases(aliases) => aliases.is_validatable(),
Feature::Nullable(nullable) => nullable.is_validatable(),
Feature::Rename(rename) => rename.is_validatable(),
Feature::Style(style) => style.is_validatable(),
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -70,12 +70,13 @@ pub fn derive_to_schema(input: TokenStream) -> TokenStream {
ident,
data,
generics,
+ vis,
..
} = syn::parse_macro_input!(input);
- match ToSchema::new(&data, &attrs, &ident, &generics).try_to_token_stream() {
+ match ToSchema::new(&data, &attrs, &ident, &generics, &vis).and_then(|s| s.try_to_token_stream()) {
Ok(stream) => stream.into(),
- Err(diag) => diag.emit_as_expr_tokens().into(),
+ Err(diag) => diag.emit_as_item_tokens().into(),
}
}
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -101,7 +102,7 @@ pub fn derive_to_parameters(input: TokenStream) -> TokenStream {
.try_to_token_stream();
match stream {
Ok(stream) => stream.into(),
- Err(diag) => diag.emit_as_expr_tokens().into(),
+ Err(diag) => diag.emit_as_item_tokens().into(),
}
}
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -122,7 +123,7 @@ pub fn derive_to_response(input: TokenStream) -> TokenStream {
let stream = ToResponse::new(attrs, &data, generics, ident).and_then(|s| s.try_to_token_stream());
match stream {
Ok(stream) => stream.into(),
- Err(diag) => diag.emit_as_expr_tokens().into(),
+ Err(diag) => diag.emit_as_item_tokens().into(),
}
}
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -150,7 +151,7 @@ pub fn to_responses(input: TokenStream) -> TokenStream {
match stream {
Ok(stream) => stream.into(),
- Err(diag) => diag.emit_as_expr_tokens().into(),
+ Err(diag) => diag.emit_as_item_tokens().into(),
}
}
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -182,7 +183,7 @@ pub fn schema(input: TokenStream) -> TokenStream {
let schema = syn::parse_macro_input!(input as Schema);
let type_tree = match TypeTree::from_type(&schema.ty) {
Ok(type_tree) => type_tree,
- Err(diag) => return diag.emit_as_expr_tokens().into(),
+ Err(diag) => return diag.emit_as_item_tokens().into(),
};
let stream = ComponentSchema::new(ComponentSchemaProps {
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -191,12 +192,11 @@ pub fn schema(input: TokenStream) -> TokenStream {
deprecated: None,
description: None,
object_name: "",
- type_definition: false,
})
.map(|s| s.to_token_stream());
match stream {
Ok(stream) => stream.into(),
- Err(diag) => diag.emit_as_expr_tokens().into(),
+ Err(diag) => diag.emit_as_item_tokens().into(),
}
}
diff --git a/crates/oapi-macros/src/operation/request_body.rs b/crates/oapi-macros/src/operation/request_body.rs
--- a/crates/oapi-macros/src/operation/request_body.rs
+++ b/crates/oapi-macros/src/operation/request_body.rs
@@ -139,7 +139,6 @@ impl TryToTokens for RequestBodyAttr<'_> {
description: None,
deprecated: None,
object_name: "",
- type_definition: false,
})?
.to_token_stream()
}
diff --git a/crates/oapi-macros/src/parameter/derive.rs b/crates/oapi-macros/src/parameter/derive.rs
--- a/crates/oapi-macros/src/parameter/derive.rs
+++ b/crates/oapi-macros/src/parameter/derive.rs
@@ -592,7 +592,6 @@ impl TryToTokens for Parameter<'_> {
description: None,
deprecated: None,
object_name: "",
- type_definition: false,
})?
.to_token_stream();
diff --git a/crates/oapi-macros/src/parameter/mod.rs b/crates/oapi-macros/src/parameter/mod.rs
--- a/crates/oapi-macros/src/parameter/mod.rs
+++ b/crates/oapi-macros/src/parameter/mod.rs
@@ -110,7 +110,6 @@ impl TryToTokens for ParameterSchema<'_> {
description: None,
deprecated: None,
object_name: "",
- type_definition: false,
})?,
required,
)
diff --git a/crates/oapi-macros/src/response/derive.rs b/crates/oapi-macros/src/response/derive.rs
--- a/crates/oapi-macros/src/response/derive.rs
+++ b/crates/oapi-macros/src/response/derive.rs
@@ -191,7 +191,7 @@ trait Response {
if let Some(metas) = attribute::find_nested_list(attr, "response").ok().flatten() {
if let Ok(metas) = metas.parse_args_with(Punctuated::<Meta, Token![,]>::parse_terminated) {
for meta in metas {
- if meta.path().is_ident("symbol") || meta.path().is_ident("content") {
+ if meta.path().is_ident("name") || meta.path().is_ident("content") {
return (false, ERROR);
}
}
diff --git a/crates/oapi-macros/src/response/derive.rs b/crates/oapi-macros/src/response/derive.rs
--- a/crates/oapi-macros/src/response/derive.rs
+++ b/crates/oapi-macros/src/response/derive.rs
@@ -278,11 +278,12 @@ impl NamedStructResponse<'_> {
let inline_schema = NamedStructSchema {
attributes,
fields,
+ aliases: None,
features: None,
generics: None,
rename_all: None,
struct_name: Cow::Owned(ident.to_string()),
- symbol: None,
+ name: None,
inline: None,
};
diff --git a/crates/oapi-macros/src/response/derive.rs b/crates/oapi-macros/src/response/derive.rs
--- a/crates/oapi-macros/src/response/derive.rs
+++ b/crates/oapi-macros/src/response/derive.rs
@@ -352,12 +353,13 @@ impl<'p> ToResponseNamedStructResponse<'p> {
let inline_schema = NamedStructSchema {
fields,
+ aliases: None,
features: None,
generics: None,
attributes,
struct_name: Cow::Owned(ident.to_string()),
rename_all: None,
- symbol: None,
+ name: None,
inline: None,
};
let response_type = PathType::InlineSchema(inline_schema.try_to_token_stream()?, ty);
diff --git a/crates/oapi-macros/src/response/derive.rs b/crates/oapi-macros/src/response/derive.rs
--- a/crates/oapi-macros/src/response/derive.rs
+++ b/crates/oapi-macros/src/response/derive.rs
@@ -478,7 +480,7 @@ impl<'r> EnumResponse<'r> {
description,
});
response_value.response_type = if contents.is_empty() {
- let inline_schema = EnumSchema::new(Cow::Owned(ident.to_string()), variants, attributes)?;
+ let inline_schema = EnumSchema::new(Cow::Owned(ident.to_string()), variants, attributes, None, None)?;
Some(PathType::InlineSchema(inline_schema.try_to_token_stream()?, ty))
} else {
diff --git a/crates/oapi-macros/src/response/mod.rs b/crates/oapi-macros/src/response/mod.rs
--- a/crates/oapi-macros/src/response/mod.rs
+++ b/crates/oapi-macros/src/response/mod.rs
@@ -268,7 +268,6 @@ impl TryToTokens for ResponseTuple<'_> {
description: None,
deprecated: None,
object_name: "",
- type_definition: false,
})?
.to_token_stream()
}
diff --git a/crates/oapi-macros/src/response/mod.rs b/crates/oapi-macros/src/response/mod.rs
--- a/crates/oapi-macros/src/response/mod.rs
+++ b/crates/oapi-macros/src/response/mod.rs
@@ -818,7 +817,6 @@ impl TryToTokens for Header {
description: None,
deprecated: None,
object_name: "",
- type_definition: false,
})?
.to_token_stream();
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -2,14 +2,16 @@ use std::borrow::Cow;
use proc_macro2::TokenStream;
use quote::{quote, ToTokens};
+use syn::punctuated::Punctuated;
use syn::spanned::Spanned;
-use syn::{punctuated::Punctuated, Attribute, Fields, Token, Variant};
+use syn::token::Comma;
+use syn::{Attribute, Fields, Generics, Token, Variant};
use crate::{
doc_comment::CommentAttributes,
feature::{
- parse_features, pop_feature, pop_feature_as_inner, Bound, Example, Feature, FeaturesExt, IntoInner, IsSkipped,
- Rename, RenameAll, SkipBound, Symbol, TryToTokensExt,
+ parse_features, pop_feature, pop_feature_as_inner, Alias, Bound, Example, Feature, FeaturesExt, IntoInner,
+ IsSkipped, Name, Rename, RenameAll, SkipBound, TryToTokensExt,
},
schema::{Inline, VariantRename},
serde_util::{self, SerdeContainer, SerdeEnumRepr, SerdeValue},
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -31,7 +33,10 @@ use super::{
#[derive(Debug)]
pub(crate) struct EnumSchema<'a> {
pub(super) schema_type: EnumSchemaType<'a>,
- pub(super) symbol: Option<Symbol>,
+ pub(super) name: Option<Name>,
+ pub(super) aliases: Option<Punctuated<Alias, Comma>>,
+ #[allow(dead_code)]
+ pub(crate) generics: Option<&'a Generics>,
pub(super) inline: Option<Inline>,
}
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -40,6 +45,8 @@ impl<'e> EnumSchema<'e> {
enum_name: Cow<'e, str>,
variants: &'e Punctuated<Variant, Token![,]>,
attributes: &'e [Attribute],
+ aliases: Option<Punctuated<Alias, Comma>>,
+ generics: Option<&'e Generics>,
) -> DiagResult<Self> {
if variants.iter().all(|variant| matches!(variant.fields, Fields::Unit)) {
#[cfg(feature = "repr")]
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -58,13 +65,15 @@ impl<'e> EnumSchema<'e> {
Ok(parse_features!(
input as crate::feature::Example,
crate::feature::Default,
- crate::feature::Symbol,
+ crate::feature::Name,
+ crate::feature::Title,
crate::feature::Inline
))
})?
.unwrap_or_default();
- let symbol = pop_feature_as_inner!(repr_enum_features => Feature::Symbol(_v));
+ let name = pop_feature_as_inner!(repr_enum_features => Feature::Name(_v));
+
let inline: Option<Inline> = pop_feature_as_inner!(repr_enum_features => Feature::Inline(_v));
Ok(Self {
schema_type: EnumSchemaType::Repr(ReprEnum {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -73,8 +82,10 @@ impl<'e> EnumSchema<'e> {
enum_type,
enum_features: repr_enum_features,
}),
- symbol,
+ name,
+ aliases: aliases.clone(),
inline,
+ generics,
})
})
.unwrap_or_else(|| {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -82,10 +93,11 @@ impl<'e> EnumSchema<'e> {
.parse_features::<EnumFeatures>()?
.into_inner()
.unwrap_or_default();
+
+ let name = pop_feature_as_inner!(simple_enum_features => Feature::Name(_v));
+
let rename_all = simple_enum_features.pop_rename_all_feature();
- let symbol = pop_feature_as_inner!(simple_enum_features => Feature::Symbol(_v));
let inline: Option<Inline> = pop_feature_as_inner!(simple_enum_features => Feature::Inline(_v));
-
Ok(Self {
schema_type: EnumSchemaType::Simple(SimpleEnum {
attributes,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -93,8 +105,10 @@ impl<'e> EnumSchema<'e> {
enum_features: simple_enum_features,
rename_all,
}),
- symbol,
+ name,
+ aliases,
inline,
+ generics,
})
})
}
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -105,10 +119,18 @@ impl<'e> EnumSchema<'e> {
.parse_features::<EnumFeatures>()?
.into_inner()
.unwrap_or_default();
+
+ let generic_count = generics.map(|g| g.type_params().count()).unwrap_or_default();
+ let name = pop_feature_as_inner!(simple_enum_features => Feature::Name(_v));
+ if generic_count == 0 && !aliases.as_ref().map(|a| a.is_empty()).unwrap_or(true) {
+ return Err(Diagnostic::new(
+ DiagLevel::Error,
+ "aliases are only allowed for generic types",
+ ));
+ }
+
let rename_all = simple_enum_features.pop_rename_all_feature();
- let symbol: Option<Symbol> = pop_feature_as_inner!(simple_enum_features => Feature::Symbol(_v));
let inline: Option<Inline> = pop_feature_as_inner!(simple_enum_features => Feature::Inline(_v));
-
Ok(Self {
schema_type: EnumSchemaType::Simple(SimpleEnum {
attributes,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -116,8 +138,10 @@ impl<'e> EnumSchema<'e> {
enum_features: simple_enum_features,
rename_all,
}),
- symbol,
+ name,
+ aliases,
inline,
+ generics,
})
}
} else {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -125,10 +149,18 @@ impl<'e> EnumSchema<'e> {
.parse_features::<ComplexEnumFeatures>()?
.into_inner()
.unwrap_or_default();
+
+ let generic_count = generics.map(|g| g.type_params().count()).unwrap_or_default();
+ let name = pop_feature_as_inner!(enum_features => Feature::Name(_v));
+ if generic_count == 0 && !aliases.as_ref().map(|a| a.is_empty()).unwrap_or(true) {
+ return Err(Diagnostic::new(
+ DiagLevel::Error,
+ "aliases are only allowed for generic types",
+ ));
+ }
+
let rename_all = enum_features.pop_rename_all_feature();
- let symbol: Option<Symbol> = pop_feature_as_inner!(enum_features => Feature::Symbol(_v));
let inline: Option<Inline> = pop_feature_as_inner!(enum_features => Feature::Inline(_v));
-
Ok(Self {
schema_type: EnumSchemaType::Complex(ComplexEnum {
enum_name,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -137,8 +169,10 @@ impl<'e> EnumSchema<'e> {
rename_all,
enum_features,
}),
- symbol,
+ name,
+ aliases,
inline,
+ generics,
})
}
}
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -408,11 +442,11 @@ impl ComplexEnum<'_> {
// TODO need to be able to split variant.attrs for variant and the struct representation!
match &variant.fields {
Fields::Named(named_fields) => {
- let (symbol_features, mut named_struct_features) = variant
+ let (title_features, mut named_struct_features) = variant
.attrs
.parse_features::<EnumNamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if named_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -431,7 +465,7 @@ impl ComplexEnum<'_> {
Ok(self::enum_variant::Variant::to_tokens(&ObjectVariant {
name: variant_name.unwrap_or(Cow::Borrowed(&name)),
- symbol: symbol_features
+ title: title_features
.first()
.map(TryToTokens::try_to_token_stream)
.transpose()?,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -443,18 +477,19 @@ impl ComplexEnum<'_> {
features: Some(named_struct_features),
fields: &named_fields.named,
generics: None,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?,
}))
}
Fields::Unnamed(unnamed_fields) => {
- let (symbol_features, mut unnamed_struct_features) = variant
+ let (title_features, mut unnamed_struct_features) = variant
.attrs
.parse_features::<EnumUnnamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if unnamed_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -473,7 +508,7 @@ impl ComplexEnum<'_> {
Ok(self::enum_variant::Variant::to_tokens(&ObjectVariant {
name: variant_name.unwrap_or(Cow::Borrowed(&name)),
- symbol: symbol_features
+ title: title_features
.first()
.map(TryToTokens::try_to_token_stream)
.transpose()?,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -483,7 +518,8 @@ impl ComplexEnum<'_> {
attributes: &variant.attrs,
features: Some(unnamed_struct_features),
fields: &unnamed_fields.unnamed,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -492,7 +528,7 @@ impl ComplexEnum<'_> {
Fields::Unit => {
let mut unit_features = feature::parse_schema_features_with(&variant.attrs, |input| {
Ok(parse_features!(
- input as crate::feature::Symbol,
+ input as crate::feature::Title,
RenameAll,
Rename,
Example
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -504,7 +540,7 @@ impl ComplexEnum<'_> {
return Ok(TokenStream::new());
}
- let symbol = pop_feature!(unit_features => Feature::Symbol(_));
+ let title = pop_feature!(unit_features => Feature::Title(_));
let variant_name = rename_enum_variant(
name.as_ref(),
&mut unit_features,
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -521,8 +557,8 @@ impl ComplexEnum<'_> {
let mut sev = Enum::new([SimpleEnumVariant {
value: variant_name.unwrap_or(Cow::Borrowed(&name)).to_token_stream(),
}]);
- if let Some(symbol) = symbol {
- sev = sev.symbol(symbol.try_to_token_stream()?);
+ if let Some(title) = title {
+ sev = sev.title(title.try_to_token_stream()?);
}
if let Some(example) = example {
sev = sev.example(example.try_to_token_stream()?);
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -557,7 +593,8 @@ impl ComplexEnum<'_> {
features: Some(named_struct_features),
fields: &named_fields.named,
generics: None,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -578,14 +615,15 @@ impl ComplexEnum<'_> {
attributes: &variant.attrs,
features: Some(unnamed_struct_features),
fields: &unnamed_fields.unnamed,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()
}
Fields::Unit => {
let mut unit_features = feature::parse_schema_features_with(&variant.attrs, |input| {
- Ok(parse_features!(input as crate::feature::Symbol))
+ Ok(parse_features!(input as crate::feature::Title))
})?
.unwrap_or_default();
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -593,9 +631,9 @@ impl ComplexEnum<'_> {
return Ok(TokenStream::new());
}
- let symbol = pop_feature!(unit_features => Feature::Symbol(_));
+ let title = pop_feature!(unit_features => Feature::Title(_));
- UntaggedEnum::with_symbol(symbol).try_to_token_stream()
+ UntaggedEnum::with_title(title).try_to_token_stream()
}
}
}
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -614,11 +652,11 @@ impl ComplexEnum<'_> {
let oapi = crate::oapi_crate();
match &variant.fields {
Fields::Named(named_fields) => {
- let (symbol_features, mut named_struct_features) = variant
+ let (title_features, mut named_struct_features) = variant
.attrs
.parse_features::<EnumNamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if named_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -640,11 +678,12 @@ impl ComplexEnum<'_> {
features: Some(named_struct_features),
fields: &named_fields.named,
generics: None,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?;
- let symbol = symbol_features
+ let title = title_features
.first()
.map(TryToTokens::try_to_token_stream)
.transpose()?;
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -654,18 +693,18 @@ impl ComplexEnum<'_> {
}]);
Ok(quote! {
#named_enum
- #symbol
+ #title
.property(#tag, #variant_name_tokens)
.required(#tag)
})
}
Fields::Unnamed(unnamed_fields) => {
if unnamed_fields.unnamed.len() == 1 {
- let (symbol_features, mut unnamed_struct_features) = variant
+ let (title_features, mut unnamed_struct_features) = variant
.attrs
.parse_features::<EnumUnnamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if unnamed_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -685,12 +724,13 @@ impl ComplexEnum<'_> {
attributes: &variant.attrs,
features: Some(unnamed_struct_features),
fields: &unnamed_fields.unnamed,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?;
- let symbol = symbol_features
+ let title = title_features
.first()
.map(TryToTokens::try_to_token_stream)
.transpose()?;
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -707,7 +747,7 @@ impl ComplexEnum<'_> {
if is_reference {
Ok(quote! {
#oapi::oapi::schema::AllOf::new()
- #symbol
+ #title
.item(#unnamed_enum)
.item(#oapi::oapi::schema::Object::new()
.schema_type(#oapi::oapi::schema::SchemaType::Object)
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -718,7 +758,7 @@ impl ComplexEnum<'_> {
} else {
Ok(quote! {
#unnamed_enum
- #symbol
+ #name
.schema_type(#oapi::oapi::schema::SchemaType::Object)
.property(#tag, #variant_name_tokens)
.required(#tag)
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -734,7 +774,7 @@ impl ComplexEnum<'_> {
}
Fields::Unit => {
let mut unit_features = feature::parse_schema_features_with(&variant.attrs, |input| {
- Ok(parse_features!(input as crate::feature::Symbol, Rename))
+ Ok(parse_features!(input as crate::feature::Title, Rename))
})?
.unwrap_or_default();
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -742,7 +782,7 @@ impl ComplexEnum<'_> {
return Ok(TokenStream::new());
}
- let symbol = pop_feature!(unit_features => Feature::Symbol(_))
+ let title = pop_feature!(unit_features => Feature::Title(_))
.map(|f| f.try_to_token_stream())
.transpose()?;
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -761,7 +801,7 @@ impl ComplexEnum<'_> {
Ok(quote! {
#oapi::oapi::schema::Object::new()
- #symbol
+ #title
.property(#tag, #variant_tokens)
.required(#tag)
})
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -784,11 +824,11 @@ impl ComplexEnum<'_> {
let oapi = crate::oapi_crate();
match &variant.fields {
Fields::Named(named_fields) => {
- let (symbol_features, mut named_struct_features) = variant
+ let (title_features, mut named_struct_features) = variant
.attrs
.parse_features::<EnumNamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if named_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -810,18 +850,19 @@ impl ComplexEnum<'_> {
features: Some(named_struct_features),
fields: &named_fields.named,
generics: None,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?;
- let symbol = symbol_features.first().map(|s| s.try_to_token_stream()).transpose()?;
+ let title = title_features.first().map(|s| s.try_to_token_stream()).transpose()?;
let variant_name_tokens = Enum::new([SimpleEnumVariant {
value: variant_name.unwrap_or(Cow::Borrowed(&name)).to_token_stream(),
}]);
Ok(quote! {
#oapi::oapi::schema::Object::new()
- #symbol
+ #title
.schema_type(#oapi::oapi::schema::SchemaType::Object)
.property(#tag, #variant_name_tokens)
.required(#tag)
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -831,11 +872,11 @@ impl ComplexEnum<'_> {
}
Fields::Unnamed(unnamed_fields) => {
if unnamed_fields.unnamed.len() == 1 {
- let (symbol_features, mut unnamed_struct_features) = variant
+ let (title_features, mut unnamed_struct_features) = variant
.attrs
.parse_features::<EnumUnnamedFieldVariantFeatures>()?
.into_inner()
- .map(|features| features.split_for_symbol())
+ .map(|features| features.split_for_title())
.unwrap_or_default();
if unnamed_struct_features.is_skipped() {
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -855,22 +896,25 @@ impl ComplexEnum<'_> {
attributes: &variant.attrs,
features: Some(unnamed_struct_features),
fields: &unnamed_fields.unnamed,
- symbol: None,
+ name: None,
+ aliases: None,
inline: None,
}
.try_to_token_stream()?;
- let symbol = symbol_features
+ let title = title_features
.first()
.map(TryToTokens::try_to_token_stream)
- .transpose()?;
+ .transpose()?
+ .map(|title| quote! { .title(#title)});
let variant_name_tokens = Enum::new([SimpleEnumVariant {
value: variant_name.unwrap_or(Cow::Borrowed(&name)).to_token_stream(),
}]);
Ok(quote! {
#oapi::oapi::schema::Object::new()
- #symbol
+ #name
+ #title
.schema_type(#oapi::oapi::schema::SchemaType::Object)
.property(#tag, #variant_name_tokens)
.required(#tag)
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -888,7 +932,7 @@ impl ComplexEnum<'_> {
Fields::Unit => {
// In this case `content` is simply ignored - there is nothing to put in it.
let mut unit_features = feature::parse_schema_features_with(&variant.attrs, |input| {
- Ok(parse_features!(input as crate::feature::Symbol, Rename))
+ Ok(parse_features!(input as crate::feature::Title, Rename))
})?
.unwrap_or_default();
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -896,7 +940,7 @@ impl ComplexEnum<'_> {
return Ok(TokenStream::new());
}
- let symbol = pop_feature!(unit_features => Feature::Symbol(_))
+ let title = pop_feature!(unit_features => Feature::Title(_))
.map(|f| f.try_to_token_stream())
.transpose()?;
diff --git a/crates/oapi-macros/src/schema/enum_schemas.rs b/crates/oapi-macros/src/schema/enum_schemas.rs
--- a/crates/oapi-macros/src/schema/enum_schemas.rs
+++ b/crates/oapi-macros/src/schema/enum_schemas.rs
@@ -915,7 +959,7 @@ impl ComplexEnum<'_> {
Ok(quote! {
#oapi::oapi::schema::Object::new()
- #symbol
+ #title
.property(#tag, #variant_tokens)
.required(#tag)
})
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -64,9 +64,9 @@ where
pub(crate) struct ObjectVariant<'o, T: ToTokens> {
pub(crate) item: T,
- pub(crate) symbol: Option<TokenStream>,
- pub(crate) example: Option<TokenStream>,
pub(crate) name: Cow<'o, str>,
+ pub(crate) title: Option<TokenStream>,
+ pub(crate) example: Option<TokenStream>,
}
impl<T> Variant for ObjectVariant<'_, T>
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -75,14 +75,14 @@ where
{
fn to_tokens(&self) -> TokenStream {
let oapi = crate::oapi_crate();
- let symbol = &self.symbol;
+ let name = &self.name;
+ let title = &self.title;
let example = &self.example;
let variant = &self.item;
- let name = &self.name;
quote! {
#oapi::oapi::schema::Object::new()
- #symbol
+ #title
#example
.property(#name, #variant)
.required(#name)
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -91,7 +91,7 @@ where
}
pub(crate) struct Enum<'e, V: Variant> {
- pub(crate) symbol: Option<TokenStream>,
+ pub(crate) title: Option<TokenStream>,
pub(crate) example: Option<TokenStream>,
len: usize,
items: Array<'e, TokenStream>,
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -106,8 +106,8 @@ impl<V: Variant> Enum<'_, V> {
items.into_iter().collect()
}
- pub(crate) fn symbol<I: Into<TokenStream>>(mut self, symbol: I) -> Self {
- self.symbol = Some(symbol.into());
+ pub(crate) fn title<I: Into<TokenStream>>(mut self, title: I) -> Self {
+ self.title = Some(title.into());
self
}
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -129,7 +129,7 @@ where
fn to_tokens(&self, stream: &mut proc_macro2::TokenStream) {
let oapi = crate::oapi_crate();
let len = &self.len;
- let symbol = &self.symbol;
+ let title = &self.title;
let example = &self.example;
let items = &self.items;
let schema_type = &self.schema_type;
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -138,7 +138,7 @@ where
stream.extend(quote! {
#oapi::oapi::Object::new()
- #symbol
+ #title
#description
#example
.schema_type(#schema_type)
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -166,7 +166,7 @@ impl<V: Variant> FromIterator<V> for Enum<'_, V> {
.collect::<Array<TokenStream>>();
Self {
- symbol: None,
+ title: None,
example: None,
description: None,
len,
diff --git a/crates/oapi-macros/src/schema/enum_variant.rs b/crates/oapi-macros/src/schema/enum_variant.rs
--- a/crates/oapi-macros/src/schema/enum_variant.rs
+++ b/crates/oapi-macros/src/schema/enum_variant.rs
@@ -243,29 +243,29 @@ impl<'t, V: Variant> FromIterator<(Cow<'t, str>, V)> for TaggedEnum<V> {
}
pub(crate) struct UntaggedEnum {
- symbol: Option<Feature>,
+ title: Option<Feature>,
}
impl UntaggedEnum {
pub(crate) fn new() -> Self {
- Self { symbol: None }
+ Self { title: None }
}
- pub(crate) fn with_symbol(symbol: Option<Feature>) -> Self {
- Self { symbol }
+ pub(crate) fn with_title(title: Option<Feature>) -> Self {
+ Self { title }
}
}
impl TryToTokens for UntaggedEnum {
fn try_to_tokens(&self, tokens: &mut TokenStream) -> DiagResult<()> {
let oapi = crate::oapi_crate();
- let symbol = self.symbol.as_ref().map(|f| f.try_to_token_stream()).transpose()?;
+ let title = self.title.as_ref().map(|f| f.try_to_token_stream()).transpose()?;
tokens.extend(quote! {
#oapi::oapi::schema::Object::new()
.nullable(true)
.default_value(#oapi::oapi::__private::serde_json::Value::Null)
- #symbol
+ #title
});
Ok(())
}
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -2,10 +2,10 @@ use syn::parse::{Parse, ParseBuffer, ParseStream};
use syn::Attribute;
use crate::feature::{
- impl_into_inner, impl_merge, parse_features, AdditionalProperties, Bound, Default, Deprecated, Example,
+ impl_into_inner, impl_merge, parse_features, AdditionalProperties, Aliases, Bound, Default, Deprecated, Example,
ExclusiveMaximum, ExclusiveMinimum, Feature, Format, Inline, IntoInner, MaxItems, MaxLength, MaxProperties,
- Maximum, Merge, MinItems, MinLength, MinProperties, Minimum, MultipleOf, Nullable, Pattern, ReadOnly, Rename,
- RenameAll, Required, SchemaWith, Skip, SkipBound, Symbol, ValueType, WriteOnly, XmlAttr,
+ Maximum, Merge, MinItems, MinLength, MinProperties, Minimum, MultipleOf, Name, Nullable, Pattern, ReadOnly, Rename,
+ RenameAll, Required, SchemaWith, Skip, SkipBound, Title, ValueType, WriteOnly, XmlAttr,
};
use crate::{attribute, DiagResult, Diagnostic};
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -17,7 +17,9 @@ impl Parse for NamedFieldStructFeatures {
Ok(NamedFieldStructFeatures(parse_features!(
input as Example,
XmlAttr,
- Symbol,
+ Name,
+ Title,
+ Aliases,
RenameAll,
MaxProperties,
MinProperties,
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -41,7 +43,9 @@ impl Parse for UnnamedFieldStructFeatures {
Ok(UnnamedFieldStructFeatures(parse_features!(
input as Example,
Default,
- Symbol,
+ Name,
+ Title,
+ Aliases,
Format,
ValueType,
Inline,
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -62,7 +66,9 @@ impl Parse for EnumFeatures {
Ok(EnumFeatures(parse_features!(
input as Example,
Default,
- Symbol,
+ Name,
+ Title,
+ Aliases,
RenameAll,
Inline,
Deprecated,
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -82,7 +88,9 @@ impl Parse for ComplexEnumFeatures {
input as Example,
Default,
RenameAll,
- Symbol,
+ Name,
+ Title,
+ Aliases,
Inline,
Deprecated,
Bound,
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -136,7 +144,7 @@ impl Parse for EnumNamedFieldVariantFeatures {
Ok(EnumNamedFieldVariantFeatures(parse_features!(
input as Example,
XmlAttr,
- Symbol,
+ Title,
Rename,
RenameAll,
Deprecated,
diff --git a/crates/oapi-macros/src/schema/feature.rs b/crates/oapi-macros/src/schema/feature.rs
--- a/crates/oapi-macros/src/schema/feature.rs
+++ b/crates/oapi-macros/src/schema/feature.rs
@@ -154,7 +162,7 @@ impl Parse for EnumUnnamedFieldVariantFeatures {
Ok(EnumUnnamedFieldVariantFeatures(parse_features!(
input as Example,
Default,
- Symbol,
+ Title,
Format,
ValueType,
Rename,
diff --git a/crates/oapi-macros/src/schema/flattened_map_schema.rs b/crates/oapi-macros/src/schema/flattened_map_schema.rs
--- a/crates/oapi-macros/src/schema/flattened_map_schema.rs
+++ b/crates/oapi-macros/src/schema/flattened_map_schema.rs
@@ -17,7 +17,6 @@ impl FlattenedMapSchema {
description,
deprecated,
object_name,
- type_definition,
}: ComponentSchemaProps,
) -> DiagResult<Self> {
let mut tokens = TokenStream::new();
diff --git a/crates/oapi-macros/src/schema/flattened_map_schema.rs b/crates/oapi-macros/src/schema/flattened_map_schema.rs
--- a/crates/oapi-macros/src/schema/flattened_map_schema.rs
+++ b/crates/oapi-macros/src/schema/flattened_map_schema.rs
@@ -51,7 +50,6 @@ impl FlattenedMapSchema {
description: None,
deprecated: None,
object_name,
- type_definition,
})?;
tokens.extend(quote! {
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -1,9 +1,3 @@
-use std::borrow::Cow;
-
-use proc_macro2::{Ident, TokenStream};
-use quote::{quote, ToTokens};
-use syn::{parse_quote, Attribute, Data, Fields, FieldsNamed, FieldsUnnamed, Generics};
-
mod enum_schemas;
mod enum_variant;
mod feature;
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -11,6 +5,14 @@ mod flattened_map_schema;
mod struct_schemas;
mod xml;
+use std::borrow::Cow;
+
+use proc_macro2::{Ident, TokenStream};
+use quote::{quote, ToTokens};
+use syn::punctuated::Punctuated;
+use syn::token::Comma;
+use syn::{parse_quote, Attribute, Data, Fields, FieldsNamed, FieldsUnnamed, Generics, Visibility};
+
pub(crate) use self::{
enum_schemas::*,
feature::{FromAttributes, NamedFieldStructFeatures, UnnamedFieldStructFeatures},
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -23,7 +25,8 @@ use super::{
feature::{pop_feature_as_inner, Feature, FeaturesExt, IntoInner},
ComponentSchema, FieldRename, VariantRename,
};
-use crate::feature::{Bound, Inline, SkipBound, Symbol};
+use crate::feature::{Alias, Bound, Inline, Name, SkipBound};
+use crate::schema::feature::EnumFeatures;
use crate::serde_util::SerdeValue;
use crate::{bound, DiagLevel, DiagResult, Diagnostic, TryToTokens};
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -32,7 +35,8 @@ pub(crate) struct ToSchema<'a> {
attributes: &'a [Attribute],
generics: &'a Generics,
data: &'a Data,
- // vis: &'a Visibility,
+ // aliases: Option<Punctuated<AliasSchema, Comma>>,
+ //vis: &'a Visibility,
}
impl<'a> ToSchema<'a> {
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -41,15 +45,16 @@ impl<'a> ToSchema<'a> {
attributes: &'a [Attribute],
ident: &'a Ident,
generics: &'a Generics,
- // vis: &'a Visibility,
- ) -> Self {
- Self {
+ _vis: &'a Visibility,
+ ) -> DiagResult<Self> {
+ Ok(Self {
data,
ident,
attributes,
generics,
+ // aliases,
// vis,
- }
+ })
}
}
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -57,31 +62,37 @@ impl TryToTokens for ToSchema<'_> {
fn try_to_tokens(&self, tokens: &mut TokenStream) -> DiagResult<()> {
let oapi = crate::oapi_crate();
let ident = self.ident;
- let mut variant = SchemaVariant::new(self.data, self.attributes, ident, self.generics)?;
+ let mut variant = SchemaVariant::new(
+ self.data,
+ self.attributes,
+ ident,
+ self.generics,
+ // None::<Vec<(TypeTree, &TypeTree)>>,
+ )?;
- let (_, ty_generics, _) = self.generics.split_for_impl();
+ let aliases = variant.aliases();
+ let (_, ty_generics, _) = self.generics.split_for_impl();
let inline = variant.inline().as_ref().map(|i| i.0).unwrap_or(false);
- let symbol = if inline {
- None
- } else if let Some(symbol) = variant.symbol() {
- if self.generics.type_params().next().is_none() {
- Some(quote! { #symbol.to_string().replace(" :: ", ".") })
- } else {
- Some(quote! {
- {
- let full_name = std::any::type_name::<#ident #ty_generics>();
- if let Some((_, args)) = full_name.split_once('<') {
- format!("{}<{}", #symbol, args)
- } else {
- full_name.into()
- }
- }
- })
- }
- } else {
- Some(quote! { std::any::type_name::<#ident #ty_generics>().replace("::", ".") })
- };
+
+ let type_aliases = aliases
+ .as_ref()
+ .map(|aliases| {
+ aliases
+ .iter()
+ .map(|alias| {
+ let name = quote::format_ident!("{}", alias.name).to_string();
+ let ty = &alias.ty;
+
+ Ok(quote! {
+ if ::std::any::TypeId::of::<Self>() == ::std::any::TypeId::of::<#ty>() {
+ name = Some(#oapi::oapi::schema::naming::assign_name::<#ty>(#oapi::oapi::schema::naming::NameRule::Force(#name)));
+ }
+ })
+ })
+ .collect::<DiagResult<TokenStream>>()
+ })
+ .transpose()?;
let skip_bound = variant.pop_skip_bound();
let bound = if skip_bound == Some(SkipBound(true)) {
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -100,18 +111,43 @@ impl TryToTokens for ToSchema<'_> {
let (impl_generics, _, where_clause) = generics.split_for_impl();
+ let name_rule = if inline {
+ None
+ } else if let Some(name) = variant.name() {
+ let name = name.0.path.to_token_stream();
+ let name = quote!(#name).to_string();
+ Some(quote! { #oapi::oapi::schema::naming::NameRule::Force(#name) })
+ } else {
+ Some(quote! { #oapi::oapi::schema::naming::NameRule::Auto })
+ };
let variant = variant.try_to_token_stream()?;
- let body = match symbol {
+ let body = match name_rule {
None => {
quote! {
#variant.into()
}
}
- Some(symbol) => {
+ Some(name_rule) => {
+ let name_tokens = if type_aliases.is_some() {
+ quote! {
+ let mut name = None;
+ #type_aliases
+ let name = name.unwrap_or_else(||#oapi::oapi::schema::naming::assign_name::<#ident #ty_generics>(#name_rule));
+ }
+ } else {
+ quote! {
+ let name = #oapi::oapi::schema::naming::assign_name::<#ident #ty_generics>(#name_rule);
+ }
+ };
quote! {
- let schema = #variant;
- components.schemas.insert(#symbol, schema.into());
- #oapi::oapi::RefOr::Ref(#oapi::oapi::Ref::new(format!("#/components/schemas/{}", #symbol)))
+ #name_tokens
+ let ref_or = #oapi::oapi::RefOr::Ref(#oapi::oapi::Ref::new(format!("#/components/schemas/{}", name)));
+ if !components.schemas.contains_key(&name) {
+ components.schemas.insert(name.clone(), ref_or.clone());
+ let schema = #variant;
+ components.schemas.insert(name, schema);
+ }
+ ref_or
}
}
};
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -147,23 +183,45 @@ impl<'a> SchemaVariant<'a> {
let FieldsUnnamed { unnamed, .. } = fields;
let mut unnamed_features = attributes.parse_features::<UnnamedFieldStructFeatures>()?.into_inner();
- let symbol = pop_feature_as_inner!(unnamed_features => Feature::Symbol(_v));
+ let name = pop_feature_as_inner!(unnamed_features => Feature::Name(_v));
+ let aliases = pop_feature_as_inner!(unnamed_features => Feature::Aliases(_v));
+ if generics.type_params().count() == 0 && !aliases.as_ref().map(|a| a.0.is_empty()).unwrap_or(true)
+ {
+ return Err(Diagnostic::spanned(
+ ident.span(),
+ DiagLevel::Error,
+ "aliases are only allowed for generic types",
+ ));
+ }
+
let inline = pop_feature_as_inner!(unnamed_features => Feature::Inline(_v));
Ok(Self::Unnamed(UnnamedStructSchema {
struct_name: Cow::Owned(ident.to_string()),
attributes,
features: unnamed_features,
fields: unnamed,
- symbol,
+ name,
+ aliases: aliases.map(|a| a.0),
inline,
}))
}
Fields::Named(fields) => {
let FieldsNamed { named, .. } = fields;
- let mut named_features = attributes.parse_features::<NamedFieldStructFeatures>()?.into_inner();
- let symbol = pop_feature_as_inner!(named_features => Feature::Symbol(_v));
- let inline = pop_feature_as_inner!(named_features => Feature::Inline(_v));
+ let mut named_features: Option<Vec<Feature>> =
+ attributes.parse_features::<NamedFieldStructFeatures>()?.into_inner();
+
+ let generic_count = generics.type_params().count();
+ let name = pop_feature_as_inner!(named_features => Feature::Name(_v));
+ let aliases = pop_feature_as_inner!(named_features => Feature::Aliases(_v));
+ if generic_count == 0 && !aliases.as_ref().map(|a| a.0.is_empty()).unwrap_or(true) {
+ return Err(Diagnostic::spanned(
+ ident.span(),
+ DiagLevel::Error,
+ "aliases are only allowed for generic types",
+ ));
+ }
+ let inline = pop_feature_as_inner!(named_features => Feature::Inline(_v));
Ok(Self::Named(NamedStructSchema {
struct_name: Cow::Owned(ident.to_string()),
attributes,
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -171,17 +229,24 @@ impl<'a> SchemaVariant<'a> {
features: named_features,
fields: named,
generics: Some(generics),
- symbol,
+ name,
+ aliases: aliases.map(|a| a.0),
inline,
}))
}
Fields::Unit => Ok(Self::Unit(UnitStructVariant)),
},
- Data::Enum(content) => Ok(Self::Enum(EnumSchema::new(
- Cow::Owned(ident.to_string()),
- &content.variants,
- attributes,
- )?)),
+ Data::Enum(content) => {
+ let mut enum_features: Option<Vec<Feature>> = attributes.parse_features::<EnumFeatures>()?.into_inner();
+ let aliases = pop_feature_as_inner!(enum_features => Feature::Aliases(_v));
+ Ok(Self::Enum(EnumSchema::new(
+ Cow::Owned(ident.to_string()),
+ &content.variants,
+ attributes,
+ aliases.map(|a| a.0),
+ Some(generics),
+ )?))
+ }
_ => Err(Diagnostic::spanned(
ident.span(),
DiagLevel::Error,
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -190,20 +255,28 @@ impl<'a> SchemaVariant<'a> {
}
}
- fn symbol(&self) -> &Option<Symbol> {
+ fn name(&self) -> Option<&Name> {
+ match self {
+ Self::Enum(schema) => schema.name.as_ref(),
+ Self::Named(schema) => schema.name.as_ref(),
+ Self::Unnamed(schema) => schema.name.as_ref(),
+ _ => None,
+ }
+ }
+ fn inline(&self) -> Option<&Inline> {
match self {
- Self::Enum(schema) => &schema.symbol,
- Self::Named(schema) => &schema.symbol,
- Self::Unnamed(schema) => &schema.symbol,
- _ => &None,
+ Self::Enum(schema) => schema.inline.as_ref(),
+ Self::Named(schema) => schema.inline.as_ref(),
+ Self::Unnamed(schema) => schema.inline.as_ref(),
+ _ => None,
}
}
- fn inline(&self) -> &Option<Inline> {
+ fn aliases(&self) -> Option<&Punctuated<Alias, Comma>> {
match self {
- Self::Enum(schema) => &schema.inline,
- Self::Named(schema) => &schema.inline,
- Self::Unnamed(schema) => &schema.inline,
- _ => &None,
+ Self::Enum(schema) => schema.aliases.as_ref(),
+ Self::Named(schema) => schema.aliases.as_ref(),
+ Self::Unnamed(schema) => schema.aliases.as_ref(),
+ _ => None,
}
}
fn pop_skip_bound(&mut self) -> Option<SkipBound> {
diff --git a/crates/oapi-macros/src/schema/mod.rs b/crates/oapi-macros/src/schema/mod.rs
--- a/crates/oapi-macros/src/schema/mod.rs
+++ b/crates/oapi-macros/src/schema/mod.rs
@@ -274,13 +347,13 @@ impl TryToTokens for Property {
}
trait SchemaFeatureExt {
- fn split_for_symbol(self) -> (Vec<Feature>, Vec<Feature>);
+ fn split_for_title(self) -> (Vec<Feature>, Vec<Feature>);
}
impl SchemaFeatureExt for Vec<Feature> {
- fn split_for_symbol(self) -> (Vec<Feature>, Vec<Feature>) {
+ fn split_for_title(self) -> (Vec<Feature>, Vec<Feature>) {
self.into_iter()
- .partition(|feature| matches!(feature, Feature::Symbol(_)))
+ .partition(|feature| matches!(feature, Feature::Title(_)))
}
}
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -2,14 +2,15 @@ use std::borrow::Cow;
use proc_macro2::TokenStream;
use quote::{format_ident, quote, ToTokens};
+use syn::token::Comma;
use syn::{punctuated::Punctuated, spanned::Spanned, Attribute, Field, Generics, Token};
use crate::{
component::ComponentSchemaProps,
doc_comment::CommentAttributes,
feature::{
- pop_feature, pop_feature_as_inner, Bound, Feature, FeaturesExt, IntoInner, IsSkipped, RenameAll, SkipBound,
- Symbol, TryToTokensExt,
+ pop_feature, pop_feature_as_inner, Alias, Bound, Feature, FeaturesExt, IntoInner, IsSkipped, Name, RenameAll,
+ SkipBound, TryToTokensExt,
},
schema::Inline,
serde_util::{self, SerdeContainer},
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -31,7 +32,8 @@ pub(crate) struct NamedStructSchema<'a> {
pub(crate) rename_all: Option<RenameAll>,
#[allow(dead_code)]
pub(crate) generics: Option<&'a Generics>,
- pub(crate) symbol: Option<Symbol>,
+ pub(crate) name: Option<Name>,
+ pub(crate) aliases: Option<Punctuated<Alias, Token![,]>>,
pub(crate) inline: Option<Inline>,
}
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -56,6 +58,7 @@ impl NamedStructSchema<'_> {
container_rules: &Option<SerdeContainer>,
) -> DiagResult<NamedStructFieldOptions<'_>> {
let type_tree = &mut TypeTree::from_type(&field.ty)?;
+
let mut field_features = field.attrs.parse_features::<NamedFieldFeatures>()?.into_inner();
let schema_default = self
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -112,7 +115,6 @@ impl NamedStructSchema<'_> {
description: Some(&comments),
deprecated: deprecated.as_ref(),
object_name: self.struct_name.as_ref(),
- type_definition: true,
};
if flatten && type_tree.is_map() {
Property::FlattenedMap(FlattenedMapSchema::new(cs)?)
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -305,7 +307,8 @@ pub(super) struct UnnamedStructSchema<'a> {
pub(super) fields: &'a Punctuated<Field, Token![,]>,
pub(super) attributes: &'a [Attribute],
pub(super) features: Option<Vec<Feature>>,
- pub(super) symbol: Option<Symbol>,
+ pub(super) name: Option<Name>,
+ pub(super) aliases: Option<Punctuated<Alias, Comma>>,
pub(super) inline: Option<Inline>,
}
impl UnnamedStructSchema<'_> {
diff --git a/crates/oapi-macros/src/schema/struct_schemas.rs b/crates/oapi-macros/src/schema/struct_schemas.rs
--- a/crates/oapi-macros/src/schema/struct_schemas.rs
+++ b/crates/oapi-macros/src/schema/struct_schemas.rs
@@ -365,7 +368,6 @@ impl TryToTokens for UnnamedStructSchema<'_> {
description: Some(&CommentAttributes::from_attributes(self.attributes)),
deprecated: deprecated.as_ref(),
object_name: self.struct_name.as_ref(),
- type_definition: true,
})?
.to_token_stream(),
);
diff --git a/crates/oapi/Cargo.toml b/crates/oapi/Cargo.toml
--- a/crates/oapi/Cargo.toml
+++ b/crates/oapi/Cargo.toml
@@ -41,6 +41,7 @@ indexmap = { workspace = true, features = ["serde"] }
inventory = { workspace = true }
mime-infer = { workspace = true }
once_cell = { workspace = true }
+parking_lot = { workspace = true }
serde = { workspace = true }
serde_json = { workspace = true }
regex = { workspace = true }
diff --git a/crates/oapi/docs/derive_to_schema.md b/crates/oapi/docs/derive_to_schema.md
--- a/crates/oapi/docs/derive_to_schema.md
+++ b/crates/oapi/docs/derive_to_schema.md
@@ -433,10 +433,10 @@ enum Random {
}
```
-_**Add `symbol` to the enum.**_
+_**Add `name` to the enum.**_
```
#[derive(salvo_oapi::ToSchema)]
-#[salvo(schema(symbol = "UserType"))]
+#[salvo(schema(name = UserType))]
enum UserType {
Admin,
Moderator,
diff --git a/crates/oapi/docs/derive_to_schema.md b/crates/oapi/docs/derive_to_schema.md
--- a/crates/oapi/docs/derive_to_schema.md
+++ b/crates/oapi/docs/derive_to_schema.md
@@ -476,11 +476,11 @@ struct Value {
}
```
-_**Use `as` attribute to change the name and the path of the schema in the generated OpenAPI
+_**Use `name` attribute to change the name and the path of the schema in the generated OpenAPI
spec.**_
```
#[derive(salvo_oapi::ToSchema)]
- #[salvo(schema(symbol = "api::models::person::Person"))]
+ #[salvo(schema(name = api::models::person::Person))]
struct Person {
name: String,
}
diff --git a/crates/oapi/src/endpoint.rs b/crates/oapi/src/endpoint.rs
--- a/crates/oapi/src/endpoint.rs
+++ b/crates/oapi/src/endpoint.rs
@@ -141,7 +141,7 @@ impl<'a> EndpointOutRegister for &'a String {
}
}
-/// A component for all endpoints.
+/// A registry for all endpoints.
#[doc(hidden)]
#[non_exhaustive]
pub struct EndpointRegistry {
diff --git a/crates/oapi/src/endpoint.rs b/crates/oapi/src/endpoint.rs
--- a/crates/oapi/src/endpoint.rs
+++ b/crates/oapi/src/endpoint.rs
@@ -152,11 +152,11 @@ pub struct EndpointRegistry {
}
impl EndpointRegistry {
- /// Save the endpoint information to the components.
+ /// Save the endpoint information to the registry.
pub const fn save(type_id: fn() -> TypeId, creator: fn() -> Endpoint) -> Self {
Self { type_id, creator }
}
- /// Find the endpoint information from the components.
+ /// Find the endpoint information from the registry.
pub fn find(type_id: &TypeId) -> Option<fn() -> Endpoint> {
for record in inventory::iter::<EndpointRegistry> {
if (record.type_id)() == *type_id {
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -125,6 +125,14 @@ pub trait ToSchema {
/// Returns a tuple of name and schema or reference to a schema that can be referenced by the
/// name or inlined directly to responses, request bodies or parameters.
fn to_schema(components: &mut Components) -> RefOr<schema::Schema>;
+
+ // /// Optional set of alias schemas for the [`ToSchema::schema`].
+ // ///
+ // /// Typically there is no need to manually implement this method but it is instead implemented
+ // /// by derive [`macro@ToSchema`] when `#[aliases(...)]` attribute is defined.
+ // fn aliases() -> Vec<schema::Schema> {
+ // Vec::new()
+ // }
}
/// Represents _`nullable`_ type. This can be used anywhere where "nothing" needs to be evaluated.
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -282,21 +290,25 @@ impl<K: ToSchema, V: ToSchema> ToSchema for HashMap<K, V> {
impl ToSchema for StatusError {
fn to_schema(components: &mut Components) -> RefOr<schema::Schema> {
- let symbol = std::any::type_name::<StatusError>().replace("::", ".");
- let schema = Schema::from(
- Object::new()
- .property("code", u16::to_schema(components))
- .required("code")
- .required("name")
- .property("name", String::to_schema(components))
- .required("brief")
- .property("brief", String::to_schema(components))
- .required("detail")
- .property("detail", String::to_schema(components))
- .property("cause", String::to_schema(components)),
- );
- components.schemas.insert(symbol.clone(), schema.into());
- crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", symbol)))
+ let name = crate::schema::naming::assign_name::<StatusError>(Default::default());
+ let ref_or = crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", name)));
+ if !components.schemas.contains_key(&name) {
+ components.schemas.insert(name.clone(), ref_or.clone());
+ let schema = Schema::from(
+ Object::new()
+ .property("code", u16::to_schema(components))
+ .required("code")
+ .required("name")
+ .property("name", String::to_schema(components))
+ .required("brief")
+ .property("brief", String::to_schema(components))
+ .required("detail")
+ .property("detail", String::to_schema(components))
+ .property("cause", String::to_schema(components)),
+ );
+ components.schemas.insert(name, schema);
+ }
+ ref_or
}
}
impl ToSchema for salvo_core::Error {
diff --git a/crates/oapi/src/lib.rs b/crates/oapi/src/lib.rs
--- a/crates/oapi/src/lib.rs
+++ b/crates/oapi/src/lib.rs
@@ -311,12 +323,16 @@ where
E: ToSchema,
{
fn to_schema(components: &mut Components) -> RefOr<schema::Schema> {
- let symbol = std::any::type_name::<Self>().replace("::", ".");
- let schema = OneOf::new()
- .item(T::to_schema(components))
- .item(E::to_schema(components));
- components.schemas.insert(symbol.clone(), schema.into());
- crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", symbol)))
+ let name = crate::schema::naming::assign_name::<StatusError>(Default::default());
+ let ref_or = crate::RefOr::Ref(crate::Ref::new(format!("#/components/schemas/{}", name)));
+ if !components.schemas.contains_key(&name) {
+ components.schemas.insert(name.clone(), ref_or.clone());
+ let schema = OneOf::new()
+ .item(T::to_schema(components))
+ .item(E::to_schema(components));
+ components.schemas.insert(name, schema);
+ }
+ ref_or
}
}
diff --git a/crates/oapi/src/openapi/components.rs b/crates/oapi/src/openapi/components.rs
--- a/crates/oapi/src/openapi/components.rs
+++ b/crates/oapi/src/openapi/components.rs
@@ -6,7 +6,7 @@ use std::collections::BTreeMap;
use serde::{Deserialize, Serialize};
-use crate::{RefOr, Response, Responses, Schema, SecurityScheme};
+use crate::{RefOr, Response, Responses, Schema, Schemas, SecurityScheme};
/// Implements [OpenAPI Components Object][components] which holds supported
/// reusable objects.
diff --git a/crates/oapi/src/openapi/components.rs b/crates/oapi/src/openapi/components.rs
--- a/crates/oapi/src/openapi/components.rs
+++ b/crates/oapi/src/openapi/components.rs
@@ -81,7 +81,7 @@ impl Components {
///
/// Accepts two arguments where first is name of the schema and second is the schema itself.
pub fn add_schema<S: Into<String>, I: Into<RefOr<Schema>>>(mut self, name: S, schema: I) -> Self {
- self.schemas.insert(name.into(), schema.into());
+ self.schemas.insert(name, schema);
self
}
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -18,7 +18,7 @@ pub use self::{
path::{PathItem, PathItemType, Paths},
request_body::RequestBody,
response::{Response, Responses},
- schema::{Array, Discriminator, KnownFormat, Object, Ref, Schema, SchemaFormat, SchemaType, ToArray},
+ schema::{Array, Discriminator, KnownFormat, Object, Ref, Schema, SchemaFormat, SchemaType, Schemas, ToArray},
security::{SecurityRequirement, SecurityScheme},
server::{Server, ServerVariable, ServerVariables, Servers},
tag::Tag,
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -267,7 +267,7 @@ impl OpenApi {
///
/// Accepts two arguments where first is name of the schema and second is the schema itself.
pub fn add_schema<S: Into<String>, I: Into<RefOr<Schema>>>(mut self, name: S, schema: I) -> Self {
- self.components.schemas.insert(name.into(), schema.into());
+ self.components.schemas.insert(name, schema);
self
}
diff --git a/crates/oapi/src/openapi/schema/mod.rs b/crates/oapi/src/openapi/schema/mod.rs
--- a/crates/oapi/src/openapi/schema/mod.rs
+++ b/crates/oapi/src/openapi/schema/mod.rs
@@ -20,6 +16,102 @@ pub use array::{Array, ToArray};
pub use object::Object;
pub use one_of::OneOf;
+use std::collections::BTreeMap;
+use std::ops::{Deref, DerefMut};
+
+use serde::{Deserialize, Serialize};
+
+use crate::RefOr;
+
+/// Schemas collection for OpenApi.
+#[derive(Serialize, Deserialize, Default, Clone, Debug, PartialEq)]
+#[serde(rename_all = "camelCase")]
+pub struct Schemas(pub BTreeMap<String, RefOr<Schema>>);
+
+impl<K, R> From<BTreeMap<K, R>> for Schemas
+where
+ K: Into<String>,
+ R: Into<RefOr<Schema>>,
+{
+ fn from(inner: BTreeMap<K, R>) -> Self {
+ Self(inner.into_iter().map(|(k, v)| (k.into(), v.into())).collect())
+ }
+}
+impl<K, R, const N: usize> From<[(K, R); N]> for Schemas
+where
+ K: Into<String>,
+ R: Into<RefOr<Schema>>,
+{
+ fn from(inner: [(K, R); N]) -> Self {
+ Self(
+ <[(K, R)]>::into_vec(Box::new(inner))
+ .into_iter()
+ .map(|(k, v)| (k.into(), v.into()))
+ .collect(),
+ )
+ }
+}
+
+impl Deref for Schemas {
+ type Target = BTreeMap<String, RefOr<Schema>>;
+
+ fn deref(&self) -> &Self::Target {
+ &self.0
+ }
+}
+
+impl DerefMut for Schemas {
+ fn deref_mut(&mut self) -> &mut Self::Target {
+ &mut self.0
+ }
+}
+
+impl IntoIterator for Schemas {
+ type Item = (String, RefOr<Schema>);
+ type IntoIter = <BTreeMap<String, RefOr<Schema>> as IntoIterator>::IntoIter;
+
+ fn into_iter(self) -> Self::IntoIter {
+ self.0.into_iter()
+ }
+}
+
+impl Schemas {
+ /// Construct a new empty [`Schemas`]. This is effectively same as calling [`Schemas::default`].
+ pub fn new() -> Self {
+ Default::default()
+ }
+ /// Inserts a key-value pair into the instance and returns `self`.
+ pub fn schema<K: Into<String>, V: Into<RefOr<Schema>>>(mut self, key: K, value: V) -> Self {
+ self.insert(key, value);
+ self
+ }
+ /// Inserts a key-value pair into the instance.
+ pub fn insert<K: Into<String>, V: Into<RefOr<Schema>>>(&mut self, key: K, value: V) {
+ self.0.insert(key.into(), value.into());
+ }
+ /// Moves all elements from `other` into `self`, leaving `other` empty.
+ ///
+ /// If a key from `other` is already present in `self`, the respective
+ /// value from `self` will be overwritten with the respective value from `other`.
+ pub fn append(&mut self, other: &mut Schemas) {
+ let items = std::mem::take(&mut other.0);
+ for item in items {
+ self.insert(item.0, item.1);
+ }
+ }
+ /// Extends a collection with the contents of an iterator.
+ pub fn extend<I, K, V>(&mut self, iter: I)
+ where
+ I: IntoIterator<Item = (K, V)>,
+ K: Into<String>,
+ V: Into<RefOr<Schema>>,
+ {
+ for (k, v) in iter.into_iter() {
+ self.insert(k, v);
+ }
+ }
+}
+
/// Create an _`empty`_ [`Schema`] that serializes to _`null`_.
///
/// Can be used in places where an item can be serialized as `null`. This is used with unit type
diff --git /dev/null b/crates/oapi/src/openapi/schema/naming.rs
new file mode 100644
--- /dev/null
+++ b/crates/oapi/src/openapi/schema/naming.rs
@@ -0,0 +1,143 @@
+use std::any::TypeId;
+use std::collections::BTreeMap;
+
+use once_cell::sync::Lazy;
+use parking_lot::{RwLock, RwLockReadGuard};
+use regex::Regex;
+
+/// NameRule is used to specify the rule of naming.
+#[derive(Default, Debug, Clone, Copy)]
+pub enum NameRule {
+ /// Auto generate name by namer.
+ #[default]
+ Auto,
+ /// Force to use the given name.
+ Force(&'static str),
+}
+
+static GLOBAL_NAMER: Lazy<RwLock<Box<dyn Namer>>> = Lazy::new(|| RwLock::new(Box::new(WordyNamer::new())));
+static GLOBAL_NAMES: Lazy<RwLock<BTreeMap<String, (TypeId, &'static str)>>> = Lazy::new(Default::default);
+
+/// Set global namer.
+pub fn set_namer(namer: impl Namer) {
+ *GLOBAL_NAMER.write() = Box::new(namer);
+}
+
+#[doc(hidden)]
+pub fn namer() -> RwLockReadGuard<'static, Box<dyn Namer>> {
+ GLOBAL_NAMER.read()
+}
+
+fn type_info_by_name(name: &str) -> Option<(TypeId, &'static str)> {
+ GLOBAL_NAMES.read().get(name).cloned()
+}
+fn set_name_type_info(name: String, type_id: TypeId, type_name: &'static str) -> Option<(TypeId, &'static str)> {
+ GLOBAL_NAMES.write().insert(name.clone(), (type_id, type_name))
+}
+
+/// Assign name to type and returns the name. If the type is already named, return the existing name.
+pub fn assign_name<T: 'static>(rule: NameRule) -> String {
+ let type_id = TypeId::of::<T>();
+ let type_name = std::any::type_name::<T>();
+ for (name, (exist_id, _)) in GLOBAL_NAMES.read().iter() {
+ if *exist_id == type_id {
+ return name.clone();
+ }
+ }
+ namer().assign_name(type_id, type_name, rule)
+}
+
+/// Get the name of the type. Panic if the name is not exist.
+pub fn get_name<T: 'static>() -> String {
+ let type_id = TypeId::of::<T>();
+ for (name, (exist_id, _)) in GLOBAL_NAMES.read().iter() {
+ if *exist_id == type_id {
+ return name.clone();
+ }
+ }
+ panic!("Type not found in the name registry: {:?}", std::any::type_name::<T>());
+}
+
+fn type_generic_part(type_name: &str) -> String {
+ let re = Regex::new(r"^[^<]+").unwrap();
+ let result = re.replace_all(type_name, "");
+ result.to_string()
+}
+/// Namer is used to assign names to types.
+pub trait Namer: Sync + Send + 'static {
+ /// Assign name to type.
+ fn assign_name(&self, type_id: TypeId, type_name: &'static str, rule: NameRule) -> String;
+}
+
+/// A namer that generates wordy names.
+pub struct WordyNamer;
+impl WordyNamer {
+ /// Create a new WordyNamer.
+ pub fn new() -> Self {
+ Self
+ }
+}
+impl Namer for WordyNamer {
+ fn assign_name(&self, type_id: TypeId, type_name: &'static str, rule: NameRule) -> String {
+ let name = match rule {
+ NameRule::Auto => {
+ let base = type_name.replace("::", ".");
+ let mut name = base.to_string();
+ let mut count = 1;
+ while type_info_by_name(&name).map(|t| t.0) == Some(type_id) {
+ name = format!("{}{}", base, count);
+ count += 1;
+ }
+ name
+ }
+ NameRule::Force(name) => {
+ let name = format! {"{}{}", name, type_generic_part(type_name)};
+ if let Some((exist_id, exist_name)) = type_info_by_name(&name) {
+ if exist_id != type_id {
+ panic!("Duplicate name for types: {}, {}", exist_name, type_name);
+ }
+ }
+ name.to_string()
+ }
+ };
+ set_name_type_info(name.clone(), type_id, type_name);
+ name
+ }
+}
+
+/// A namer that generates short names.
+pub struct ShortNamer;
+impl ShortNamer {
+ /// Create a new ShortNamer.
+ pub fn new() -> Self {
+ Self
+ }
+}
+impl Namer for ShortNamer {
+ fn assign_name(&self, type_id: TypeId, type_name: &'static str, rule: NameRule) -> String {
+ let name: String = match rule {
+ NameRule::Auto => {
+ let re = Regex::new(r"([^:<>]+::)+").unwrap();
+ let base = re.replace_all(type_name, "");
+ let mut name = base.to_string();
+ let mut count = 1;
+ while type_info_by_name(&name).map(|t| t.0) == Some(type_id) {
+ name = format!("{}{}", base, count);
+ count += 1;
+ }
+ name
+ }
+ NameRule::Force(name) => {
+ let name = format! {"{}{}", name, type_generic_part(type_name)};
+ if let Some((exist_id, exist_name)) = type_info_by_name(&name) {
+ if exist_id != type_id {
+ panic!("Duplicate name for types: {}, {}", exist_name, type_name);
+ }
+ }
+ name.to_string()
+ }
+ };
+ set_name_type_info(name.clone(), type_id, type_name);
+ name
+ }
+}
diff --git a/crates/oapi/src/openapi/schema/object.rs b/crates/oapi/src/openapi/schema/object.rs
--- a/crates/oapi/src/openapi/schema/object.rs
+++ b/crates/oapi/src/openapi/schema/object.rs
@@ -30,9 +30,9 @@ pub struct Object {
#[serde(rename = "type")]
pub schema_type: SchemaType,
- /// Changes the [`Object`] symbol.
+ /// Changes the [`Object`] name.
#[serde(skip_serializing_if = "Option::is_none")]
- pub symbol: Option<String>,
+ pub name: Option<String>,
/// Additional format for detailing the schema type.
#[serde(skip_serializing_if = "Option::is_none")]
diff --git a/crates/oapi/src/openapi/schema/object.rs b/crates/oapi/src/openapi/schema/object.rs
--- a/crates/oapi/src/openapi/schema/object.rs
+++ b/crates/oapi/src/openapi/schema/object.rs
@@ -199,9 +199,9 @@ impl Object {
self
}
- /// Add or change the symbol of the [`Object`].
- pub fn symbol(mut self, symbol: impl Into<String>) -> Self {
- self.symbol = Some(symbol.into());
+ /// Add or change the name of the [`Object`].
+ pub fn name(mut self, name: impl Into<String>) -> Self {
+ self.name = Some(name.into());
self
}
diff --git a/examples/extract-data/src/main.rs b/examples/extract-data/src/main.rs
--- a/examples/extract-data/src/main.rs
+++ b/examples/extract-data/src/main.rs
@@ -55,7 +55,6 @@ struct BadMan<'a> {
default_source(from = "query"),
default_source(from = "param"),
default_source(from = "body"),
- rename_all = "camelCase"
))]
struct GoodMan<'a> {
id: i64,
diff --git a/examples/oapi-generics/src/main.rs b/examples/oapi-generics/src/main.rs
--- a/examples/oapi-generics/src/main.rs
+++ b/examples/oapi-generics/src/main.rs
@@ -3,6 +3,7 @@ use salvo::prelude::*;
use serde::{Deserialize, Serialize};
#[derive(Serialize, Deserialize, ToSchema, Debug)]
+#[salvo(schema(aliases(MyI32 = MyObject<i32>, MyStr = MyObject<String>)))]
struct MyObject<T: ToSchema + std::fmt::Debug + 'static> {
value: T,
}
| diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -217,10 +217,10 @@ mod tests {
};
let item = parse2(input).unwrap();
assert_eq!(
- endpoint::generate(parse2(quote!{}).unwrap(), item)
+ endpoint::generate(parse2(quote! {}).unwrap(), item)
.unwrap()
.to_string(),
- quote!{
+ quote! {
#[allow(non_camel_case_types)]
#[derive(Debug)]
struct hello;
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -248,9 +248,7 @@ mod tests {
fn __macro_gen_oapi_endpoint_creator_hello() -> salvo::oapi::Endpoint {
let mut components = salvo::oapi::Components::new();
let status_codes: &[salvo::http::StatusCode] = &[];
- fn modify(components: &mut salvo::oapi::Components, operation: &mut salvo::oapi::Operation) {}
let mut operation = salvo::oapi::Operation::new();
- modify(&mut components, &mut operation);
if operation.operation_id.is_none() {
operation.operation_id = Some(::std::any::type_name::<hello>().replace("::", "."));
}
diff --git a/crates/oapi-macros/src/lib.rs b/crates/oapi-macros/src/lib.rs
--- a/crates/oapi-macros/src/lib.rs
+++ b/crates/oapi-macros/src/lib.rs
@@ -269,7 +267,9 @@ mod tests {
components,
}
}
- salvo::oapi::__private::inventory::submit! { salvo :: oapi :: EndpointRegistry :: save (__macro_gen_oapi_endpoint_type_id_hello , __macro_gen_oapi_endpoint_creator_hello) }
+ salvo::oapi::__private::inventory::submit! {
+ salvo::oapi::EndpointRegistry::save(__macro_gen_oapi_endpoint_type_id_hello, __macro_gen_oapi_endpoint_creator_hello)
+ }
}
.to_string()
);
diff --git a/crates/oapi/src/openapi/components.rs b/crates/oapi/src/openapi/components.rs
--- a/crates/oapi/src/openapi/components.rs
+++ b/crates/oapi/src/openapi/components.rs
@@ -23,7 +23,7 @@ pub struct Components {
///
/// [schema]: https://spec.openapis.org/oas/latest.html#schema-object
#[serde(skip_serializing_if = "BTreeMap::is_empty", default)]
- pub schemas: BTreeMap<String, RefOr<Schema>>,
+ pub schemas: Schemas,
/// Map of reusable response name, to [OpenAPI Response Object][response]s or [OpenAPI
/// Reference][reference]s to [OpenAPI Response Object][response]s.
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -1143,7 +1143,7 @@ mod tests {
#[test]
fn test_openapi_schema_work_with_generics() {
#[derive(Serialize, Deserialize, Clone, Debug, ToSchema)]
- #[salvo(schema(symbol = "City"))]
+ #[salvo(schema(name = City))]
pub(crate) struct CityDTO {
#[salvo(schema(rename = "id"))]
pub(crate) id: String,
diff --git a/crates/oapi/src/openapi/mod.rs b/crates/oapi/src/openapi/mod.rs
--- a/crates/oapi/src/openapi/mod.rs
+++ b/crates/oapi/src/openapi/mod.rs
@@ -1152,7 +1152,7 @@ mod tests {
}
#[derive(Serialize, Deserialize, Debug, ToSchema)]
- #[salvo(schema(symbol = "Response"))]
+ #[salvo(schema(name = Response))]
pub(crate) struct ApiResponse<T: Serialize + ToSchema + Send + Debug + 'static> {
#[salvo(schema(rename = "status"))]
/// status code
diff --git a/crates/oapi/src/openapi/schema/mod.rs b/crates/oapi/src/openapi/schema/mod.rs
--- a/crates/oapi/src/openapi/schema/mod.rs
+++ b/crates/oapi/src/openapi/schema/mod.rs
@@ -2,15 +2,11 @@
//! used to define field properties, enum values, array or object types.
//!
//! [schema]: https://spec.openapis.org/oas/latest.html#schema-object
-use std::collections::BTreeMap;
-
-use serde::{Deserialize, Serialize};
-
-use crate::RefOr;
-
mod all_of;
mod any_of;
mod array;
+/// Module for name schemas.
+pub mod naming;
mod object;
mod one_of;
diff --git a/crates/oapi/src/openapi/schema/mod.rs b/crates/oapi/src/openapi/schema/mod.rs
--- a/crates/oapi/src/openapi/schema/mod.rs
+++ b/crates/oapi/src/openapi/schema/mod.rs
@@ -477,13 +569,13 @@ mod tests {
}
#[test]
- fn test_object_with_symbol() {
- let json_value = Object::new().symbol("SomeName");
+ fn test_object_with_name() {
+ let json_value = Object::new().name("SomeName");
assert_json_eq!(
json_value,
json!({
"type": "object",
- "symbol": "SomeName"
+ "name": "SomeName"
})
);
}
| Find a way to rename OpenAPI object and operation globally
On the swagger spec the operationIds are very long
<img width="975" alt="Screenshot 2024-01-10 at 18 47 46" src="https://github.com/salvo-rs/salvo/assets/6940726/d3169bc1-287c-481d-94aa-cbd561ba54b7">
This results in very long auto generated code
<img width="739" alt="Screenshot 2024-01-10 at 18 48 07" src="https://github.com/salvo-rs/salvo/assets/6940726/ca20c83e-9bb6-4366-8742-eaf40ad007d9">
We need away to reduce it.
| You can rename your operation_id for example:
```rust
#[endpoint(operation_id="AAAAAAA", tags("todos"), status_codes(200, 404))]
pub async fn update_todo(id: PathParam<u64>, updated: JsonBody<Todo>) -> Result<StatusCode, StatusError> {
...
}
```
@chrislearn yes indeed but it’s also on objects like return types; would be nice to config the root so it’s smaller across the whole project automatically :-) | 2024-05-23T08:27:07 | 0.67 | 8504240ebdebbbdd3b69d3156b5688ce9ae16293 | [
"tests::test_handler_for_fn"
] | [] | [] | [] |
salvo-rs/salvo | 25 | salvo-rs__salvo-25 | [
"24"
] | 9f6ca8954cf4435f061e5f83fe25064b57815ccc | diff --git a/core/src/routing/filter/impls/path.rs b/core/src/routing/filter/impls/path.rs
--- a/core/src/routing/filter/impls/path.rs
+++ b/core/src/routing/filter/impls/path.rs
@@ -57,6 +57,14 @@ where
C: Fn(char) -> bool + Sync + Send + 'static,
{
fn build(&self, name: String, _sign: String, args: Vec<String>) -> Result<Box<dyn PathPart>, String> {
+ if args.is_empty() {
+ return Ok(Box::new(CharPart {
+ name,
+ checker: self.0.clone(),
+ min_width: 1,
+ max_width: None,
+ }));
+ }
let ps = args[0].splitn(2, "..").map(|s| s.trim()).collect::<Vec<_>>();
let (min_width, max_width) = if ps.is_empty() {
(1, None)
| diff --git a/core/src/routing/filter/impls/path.rs b/core/src/routing/filter/impls/path.rs
--- a/core/src/routing/filter/impls/path.rs
+++ b/core/src/routing/filter/impls/path.rs
@@ -738,7 +746,15 @@ mod tests {
);
}
#[test]
- fn test_parse_num() {
+ fn test_parse_num0() {
+ let segments = PathParser::new(r"/first<id:num>").parse().unwrap();
+ assert_eq!(
+ format!("{:?}", segments),
+ r#"[CombPart([ConstPart("first"), CharPart { name: "id", min_width: 1, max_width: None }])]"#
+ );
+ }
+ #[test]
+ fn test_parse_num1() {
let segments = PathParser::new(r"/first<id:num(10)>").parse().unwrap();
assert_eq!(
format!("{:?}", segments),
| master/examples/routing.rs Compilation fails
- https://github.com/salvo-rs/salvo/blob/master/examples/routing.rs
**dependencies**
```
rustc 1.51.0 (2fd73fabe 2021-03-23)
Deepin GNU/Linux 20.2
[dependencies]
salvo = { version = "0.11", features = ["full"] }
tokio = { version = "1", features = ["full"] }
```
**code**
```
use salvo::prelude::*;
#[tokio::main]
async fn main() {
let debug_mode = true;
let admin_mode = true;
let router = Router::new()
.get(index)
.push(
Router::new()
.path("users")
.before(auth)
.post(create_user)
.push(Router::new().path(r"<id:num>").post(update_user).delete(delete_user)),
)
.push(
Router::new()
.path("users")
.get(list_users)
.push(Router::new().path(r"<id:num>").get(show_user)),
)
.then(|router| {
if debug_mode {
router.push(Router::new().path("debug").get(debug))
} else {
router
}
})
.then(|router| {
if admin_mode {
router.push(Router::new().path("admin").get(admin))
} else {
router
}
})
;
Server::new(router).bind(([0, 0, 0, 0], 7878)).await;
}
#[fn_handler]
async fn admin(res: &mut Response) {
res.render_plain_text("Admin page");
}
#[fn_handler]
async fn debug(res: &mut Response) {
res.render_plain_text("Debug page");
}
#[fn_handler]
async fn index(res: &mut Response) {
res.render_plain_text("Hello world!");
}
#[fn_handler]
async fn auth(res: &mut Response) {
res.render_plain_text("user has authed\n\n");
}
#[fn_handler]
async fn list_users(res: &mut Response) {
res.render_plain_text("list users");
}
#[fn_handler]
async fn show_user(res: &mut Response) {
res.render_plain_text("show user");
}
#[fn_handler]
async fn create_user(res: &mut Response) {
res.render_plain_text("user created");
}
#[fn_handler]
async fn update_user(res: &mut Response) {
res.render_plain_text("user updated");
}
#[fn_handler]
async fn delete_user(res: &mut Response) {
res.render_plain_text("user deleted");
}
```
**run**
```
github@github:~/Workspace/demo1$ RUST_BACKTRACE=1 cargo run
warning: unused variable: `debug_mode`
--> src/router.rs:7:9
|
7 | let debug_mode = true;
| ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_debug_mode`
|
= note: `#[warn(unused_variables)]` on by default
warning: unused variable: `admin_mode`
--> src/router.rs:8:9
|
8 | let admin_mode = true;
| ^^^^^^^^^^ help: if this is intentional, prefix it with an underscore: `_admin_mode`
warning: 2 warnings emitted
Finished dev [unoptimized + debuginfo] target(s) in 0.10s
Running `target/debug/demo1`
thread 'main' panicked at 'index out of bounds: the len is 0 but the index is 0', /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/filter/impls/path.rs:60:18
stack backtrace:
0: rust_begin_unwind
at /rustc/2fd73fabe469357a12c2c974c140f67e7cdd76d0/library/std/src/panicking.rs:493:5
1: core::panicking::panic_fmt
at /rustc/2fd73fabe469357a12c2c974c140f67e7cdd76d0/library/core/src/panicking.rs:92:14
2: core::panicking::panic_bounds_check
at /rustc/2fd73fabe469357a12c2c974c140f67e7cdd76d0/library/core/src/panicking.rs:69:5
3: <usize as core::slice::index::SliceIndex<[T]>>::index
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/slice/index.rs:182:10
4: core::slice::index::<impl core::ops::index::Index<I> for [T]>::index
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/slice/index.rs:15:9
5: <alloc::vec::Vec<T,A> as core::ops::index::Index<I>>::index
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/alloc/src/vec/mod.rs:2176:9
6: <salvo_core::routing::filter::impls::path::CharPartBuilder<C> as salvo_core::routing::filter::impls::path::PartBuilder>::build
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/filter/impls/path.rs:60:18
7: salvo_core::routing::filter::impls::path::PathParser::scan_parts
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/filter/impls/path.rs:486:40
8: salvo_core::routing::filter::impls::path::PathParser::parse
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/filter/impls/path.rs:546:29
9: salvo_core::routing::filter::impls::path::PathFilter::new
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/filter/impls/path.rs:589:32
10: salvo_core::routing::router::Router::path
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/salvo_core-0.11.1/src/routing/router.rs:152:21
11: demo1::main::{{closure}}
at ./src/main.rs:14:23
12: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/future/mod.rs:80:19
13: tokio::park::thread::CachedParkThread::block_on::{{closure}}
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/park/thread.rs:263:54
14: tokio::coop::with_budget::{{closure}}
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/coop.rs:106:9
15: std::thread::local::LocalKey<T>::try_with
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/std/src/thread/local.rs:272:16
16: std::thread::local::LocalKey<T>::with
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/std/src/thread/local.rs:248:9
17: tokio::coop::with_budget
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/coop.rs:99:5
18: tokio::coop::budget
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/coop.rs:76:5
19: tokio::park::thread::CachedParkThread::block_on
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/park/thread.rs:263:31
20: tokio::runtime::enter::Enter::block_on
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/runtime/enter.rs:151:13
21: tokio::runtime::thread_pool::ThreadPool::block_on
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/runtime/thread_pool/mod.rs:71:9
22: tokio::runtime::Runtime::block_on
at /home/github/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.5.0/src/runtime/mod.rs:452:43
23: demo1::main
at ./src/main.rs:3:1
24: core::ops::function::FnOnce::call_once
at /home/github/.rustup/toolchains/stable-x86_64-unknown-linux-gnu/lib/rustlib/src/rust/library/core/src/ops/function.rs:227:5
note: Some details are omitted, run with `RUST_BACKTRACE=full` for a verbose backtrace.
```
| 2021-05-02T11:19:57 | 0.11 | 9f6ca8954cf4435f061e5f83fe25064b57815ccc | [
"routing::filter::impls::path::tests::test_parse_num0"
] | [
"error::error_size_of",
"http::multipart::boundary::test::test_empty_stream",
"http::multipart::boundary::test::test_one_boundary",
"http::multipart::boundary::test::test_one_empty_field",
"http::multipart::boundary::test::test_one_nonempty_field",
"http::multipart::field::headers::test_header_end_split",... | [
"routing::filter::impls::path::tests::test_parse_multi_regex_with_prefix_and_suffix",
"routing::filter::impls::path::tests::test_parse_multi_regex",
"routing::filter::impls::path::tests::test_parse_multi_regex_with_prefix",
"routing::filter::impls::path::tests::test_parse_single_regex",
"routing::filter::im... | [] | |
mozilla/sccache | 1,724 | mozilla__sccache-1724 | [
"1723"
] | 6fffb2a20ea0f51ec4a722b0f9d0caa608716ffc | diff --git a/docs/S3.md b/docs/S3.md
--- a/docs/S3.md
+++ b/docs/S3.md
@@ -26,4 +26,4 @@ Sccache is able to load credentials from various sources. Including:
- AssumeRole: assume role with the role specified by `AWS_ROLE_ARN`.
- AssumeRoleWithWebIdentity: assume role with web webIdentity specified by `AWS_ROLE_ARN` and `AWS_WEB_IDENTITY_TOKEN_FILE`.
-Alternatively, the `SCCACHE_S3_NO_CREDENTIALS` environment variable can be set to use public readonly access to the S3 bucket, without the need for credentials. This can be useful for implementing a readonly cache for pull requests, which typically cannot be given access to credentials for security reasons.
+Alternatively, the `SCCACHE_S3_NO_CREDENTIALS` environment variable can be set to use public readonly access to the S3 bucket, without the need for credentials. Valid values for this environment variable are `true`, `1`, `false`, and `0`. This can be useful for implementing a readonly cache for pull requests, which typically cannot be given access to credentials for security reasons.
diff --git a/src/config.rs b/src/config.rs
--- a/src/config.rs
+++ b/src/config.rs
@@ -510,9 +510,14 @@ pub struct EnvConfig {
fn config_from_env() -> Result<EnvConfig> {
// ======= AWS =======
- let s3 = env::var("SCCACHE_BUCKET").ok().map(|bucket| {
+ let s3 = if let Ok(bucket) = env::var("SCCACHE_BUCKET") {
let region = env::var("SCCACHE_REGION").ok();
- let no_credentials = env::var("SCCACHE_S3_NO_CREDENTIALS").ok().is_some();
+ let no_credentials =
+ env::var("SCCACHE_S3_NO_CREDENTIALS").map_or(Ok(false), |val| match val.as_str() {
+ "true" | "1" => Ok(true),
+ "false" | "0" => Ok(false),
+ _ => bail!("SCCACHE_S3_NO_CREDENTIALS must be 'true', '1', 'false', or '0'."),
+ })?;
let use_ssl = env::var("SCCACHE_S3_USE_SSL")
.ok()
.map(|value| value != "off");
diff --git a/src/config.rs b/src/config.rs
--- a/src/config.rs
+++ b/src/config.rs
@@ -525,15 +530,18 @@ fn config_from_env() -> Result<EnvConfig> {
.map(|s| s.to_owned() + "/")
.unwrap_or_default();
- S3CacheConfig {
+ Some(S3CacheConfig {
bucket,
region,
no_credentials,
key_prefix,
endpoint,
use_ssl,
- }
- });
+ })
+ } else {
+ None
+ };
+
if s3.as_ref().map(|s3| s3.no_credentials).unwrap_or_default()
&& (env::var_os("AWS_ACCESS_KEY_ID").is_some()
|| env::var_os("AWS_SECRET_ACCESS_KEY").is_some())
| diff --git a/src/config.rs b/src/config.rs
--- a/src/config.rs
+++ b/src/config.rs
@@ -1080,8 +1088,8 @@ fn config_overrides() {
#[test]
#[serial]
-fn test_s3_no_credentials() {
- env::set_var("SCCACHE_S3_NO_CREDENTIALS", "1");
+fn test_s3_no_credentials_conflict() {
+ env::set_var("SCCACHE_S3_NO_CREDENTIALS", "true");
env::set_var("SCCACHE_BUCKET", "my-bucket");
env::set_var("AWS_ACCESS_KEY_ID", "aws-access-key-id");
env::set_var("AWS_SECRET_ACCESS_KEY", "aws-secret-access-key");
diff --git a/src/config.rs b/src/config.rs
--- a/src/config.rs
+++ b/src/config.rs
@@ -1098,6 +1106,68 @@ fn test_s3_no_credentials() {
env::remove_var("AWS_SECRET_ACCESS_KEY");
}
+#[test]
+#[serial]
+fn test_s3_no_credentials_invalid() {
+ env::set_var("SCCACHE_S3_NO_CREDENTIALS", "yes");
+ env::set_var("SCCACHE_BUCKET", "my-bucket");
+
+ let error = config_from_env().unwrap_err();
+ assert_eq!(
+ "SCCACHE_S3_NO_CREDENTIALS must be 'true', '1', 'false', or '0'.",
+ error.to_string()
+ );
+
+ env::remove_var("SCCACHE_S3_NO_CREDENTIALS");
+ env::remove_var("SCCACHE_BUCKET");
+}
+
+#[test]
+#[serial]
+fn test_s3_no_credentials_valid_true() {
+ env::set_var("SCCACHE_S3_NO_CREDENTIALS", "true");
+ env::set_var("SCCACHE_BUCKET", "my-bucket");
+
+ let env_cfg = config_from_env().unwrap();
+ match env_cfg.cache.s3 {
+ Some(S3CacheConfig {
+ ref bucket,
+ no_credentials,
+ ..
+ }) => {
+ assert_eq!(bucket, "my-bucket");
+ assert!(no_credentials);
+ }
+ None => unreachable!(),
+ };
+
+ env::remove_var("SCCACHE_S3_NO_CREDENTIALS");
+ env::remove_var("SCCACHE_BUCKET");
+}
+
+#[test]
+#[serial]
+fn test_s3_no_credentials_valid_false() {
+ env::set_var("SCCACHE_S3_NO_CREDENTIALS", "false");
+ env::set_var("SCCACHE_BUCKET", "my-bucket");
+
+ let env_cfg = config_from_env().unwrap();
+ match env_cfg.cache.s3 {
+ Some(S3CacheConfig {
+ ref bucket,
+ no_credentials,
+ ..
+ }) => {
+ assert_eq!(bucket, "my-bucket");
+ assert!(!no_credentials);
+ }
+ None => unreachable!(),
+ };
+
+ env::remove_var("SCCACHE_S3_NO_CREDENTIALS");
+ env::remove_var("SCCACHE_BUCKET");
+}
+
#[test]
fn test_gcs_service_account() {
env::set_var("SCCACHE_GCS_BUCKET", "my-bucket");
| Proposal: Make `SCCACHE_S3_NO_CREDENTIALS` require a value of `true`
It seems that `SCCACHE_S3_NO_CREDENTIALS` takes effect even when its value is set to non-truthy values like `SCCACHE_S3_NO_CREDENTIALS=false`.
From reviewing the source code, it looks like this is intended.
Can we switch this environment variable to require an explicit value of `true` to be enabled (e.g. `SCCACHE_S3_NO_CREDENTIALS=true`)?
The fact that `sccache` turns this feature on when this environment value is falsy (or even empty) is currently causing problems for one of the build tools that we use.
I'm happy to work on this assuming this proposal is approved.
| Sounds good. I am all for hardening options.
| 2023-04-13T00:37:43 | 0.4 | 6fffb2a20ea0f51ec4a722b0f9d0caa608716ffc | [
"config::test_s3_no_credentials_invalid",
"config::test_s3_no_credentials_valid_false",
"test::tests::test_server_port_in_use"
] | [
"cache::cache::test::test_normalize_key",
"compiler::args::tests::assert_tests::test_args_iter_no_conflict",
"cache::s3::test::test_endpoint_resolver",
"compiler::args::tests::assert_tests::test_arginfo_process_take_arg - should panic",
"compiler::args::tests::assert_tests::test_arginfo_process_take_maybe_c... | [
"test_dist_nobuilder",
"test_dist_failingserver",
"test_dist_basic",
"test_dist_restartedserver",
"test_auth",
"test_s3_invalid_args",
"test_rust_cargo_run_with_env_dep_parsing",
"test_rust_cargo_check",
"test_rust_cargo_build_nightly",
"test_run_log_no_perm",
"test_rust_cargo_check_nightly",
... | [] |
mozilla/sccache | 1,547 | mozilla__sccache-1547 | [
"1266"
] | 762f5c60b5f6e76812fe291f12002a4b93c5b26b | diff --git a/src/compiler/clang.rs b/src/compiler/clang.rs
--- a/src/compiler/clang.rs
+++ b/src/compiler/clang.rs
@@ -184,6 +184,7 @@ counted_array!(pub static ARGS: [ArgInfo<gcc::ArgData>; _] = [
// Note: this overrides the -fprofile-use option in gcc.rs.
take_arg!("-fprofile-use", PathBuf, Concatenated('='), ClangProfileUse),
take_arg!("-fsanitize-blacklist", PathBuf, Concatenated('='), ExtraHashFile),
+ flag!("-fuse-ctor-homing", PassThroughFlag),
take_arg!("-gcc-toolchain", OsString, Separated, PassThrough),
flag!("-gcodeview", PassThroughFlag),
take_arg!("-include-pch", PathBuf, CanBeSeparated, PreprocessorArgumentPath),
| diff --git a/src/compiler/clang.rs b/src/compiler/clang.rs
--- a/src/compiler/clang.rs
+++ b/src/compiler/clang.rs
@@ -550,6 +551,12 @@ mod test {
assert_eq!(ovec!["-Xclang", "-no-opaque-pointers"], a.preprocessor_args);
}
+ #[test]
+ fn test_parse_xclang_use_ctor_homing() {
+ let a = parses!("-c", "foo.c", "-o", "foo.o", "-Xclang", "-fuse-ctor-homing");
+ assert_eq!(ovec!["-Xclang", "-fuse-ctor-homing"], a.common_args);
+ }
+
#[test]
fn test_parse_fplugin() {
let a = parses!("-c", "foo.c", "-o", "foo.o", "-fplugin", "plugin.so");
| Can't handle UnknownFlag arguments with -Xclang (-fuse-ctor-homing)
Hi, I think I have no cache hit while building chromium on Ubuntu.
```
$ sccache -s
[2022-08-05T07:21:57Z DEBUG sccache::config] Attempting to read config file at "/home/user/.config/sccache/config"
[2022-08-05T07:21:57Z DEBUG sccache::config] Couldn't open config file: No such file or directory (os error 2)
[2022-08-05T07:21:57Z DEBUG sccache::commands] request_stats
Compile requests 700
Compile requests executed 0
Cache hits 0
Cache misses 0
Cache timeouts 0
Cache read errors 0
Forced recaches 0
Cache write errors 0
Compilation failures 0
Cache errors 0
Non-cacheable compilations 0
Non-cacheable calls 700
Non-compilation calls 0
Unsupported compiler calls 0
Average cache write 0.000 s
Average cache read miss 0.000 s
Average cache read hit 0.000 s
Failed distributed compilations 0
Non-cacheable reasons:
Can't handle UnknownFlag arguments with -Xclang 700
```
Here are debug logs
> [2022-08-05T05:37:09Z DEBUG sccache::server] parse_arguments: CannotCache(Can't handle UnknownFlag arguments with -Xclang): ["-MMD", "-MF", "obj/components/autofill_assistant/browser/browser/controller_observer.o.d", "-DDCHECK_ALWAYS_ON=1", "-DUSE_UDEV", "-DUSE_AURA=1", "-DUSE_GLIB=1", "-DUSE_OZONE=1", "-DNAVER_WHALE_BUILD", "-DWHALE_DEV_BUILD", "-D__STDC_CONSTANT_MACROS", "-D__STDC_FORMAT_MACROS", "-D_FILE_OFFSET_BITS=64", "-D_LARGEFILE_SOURCE", "-D_LARGEFILE64_SOURCE", "-D_GNU_SOURCE", "-DCR_CLANG_REVISION=\"llvmorg-15-init-17529-ga210f404-1\"", "-DCOMPONENT_BUILD", "-D_LIBCPP_ABI_NAMESPACE=Cr", "-D_LIBCPP_ABI_VERSION=2", "-D_LIBCPP_ENABLE_NODISCARD", "-D_LIBCPP_DEBUG=0", "-DCR_LIBCXX_REVISION=253791a5ba1afef6f94d739e89c726c48169b1da", "-DCR_SYSROOT_HASH=cb4fa34f1faddafb72cace35faf62a611f2ca7c9", "-D_DEBUG", "-DDYNAMIC_ANNOTATIONS_ENABLED=1", "-DGLIB_VERSION_MAX_ALLOWED=GLIB_VERSION_2_40", "-DGLIB_VERSION_MIN_REQUIRED=GLIB_VERSION_2_40", "-DLIBYUV_DISABLE_NEON", "-DWEBP_EXTERN=extern", "-DGL_GLEXT_PROTOTYPES", "-DUSE_GLX", "-DUSE_EGL", "-DVK_USE_PLATFORM_XCB_KHR", "-DVK_USE_PLATFORM_WAYLAND_KHR", "-D_WTL_NO_AUTOMATIC_NAMESPACE", "-DON_FOCUS_PING_ENABLED", "-DTOOLKIT_VIEWS=1", "-DGOOGLE_PROTOBUF_NO_RTTI", "-DGOOGLE_PROTOBUF_NO_STATIC_INITIALIZER", "-DGOOGLE_PROTOBUF_INTERNAL_DONATE_STEAL_INLINE=0", "-DHAVE_PTHREAD", "-DPROTOBUF_USE_DLLS", "-DABSL_CONSUME_DLL", "-DBORINGSSL_SHARED_LIBRARY", "-DU_USING_ICU_NAMESPACE=0", "-DU_ENABLE_DYLOAD=0", "-DUSE_CHROMIUM_ICU=1", "-DU_ENABLE_TRACING=1", "-DU_ENABLE_RESOURCE_TRACING=0", "-DICU_UTIL_DATA_IMPL=ICU_UTIL_DATA_FILE", "-DLEVELDB_PLATFORM_CHROMIUM=1", "-DLEVELDB_SHARED_LIBRARY", "-DSK_CODEC_DECODES_PNG", "-DSK_CODEC_DECODES_WEBP", "-DSK_ENCODE_PNG", "-DSK_ENCODE_WEBP", "-DSK_ENABLE_SKSL", "-DSK_UNTIL_CRBUG_1187654_IS_FIXED", "-DSK_USER_CONFIG_HEADER=\"../../skia/config/SkUserConfig.h\"", "-DSK_WIN_FONTMGR_NO_SIMULATIONS", "-DSK_GL", "-DSK_CODEC_DECODES_JPEG", "-DSK_ENCODE_JPEG", "-DSK_HAS_WUFFS_LIBRARY", "-DSK_VULKAN=1", "-DSKIA_DLL", "-DSKCMS_API=__attribute__((visibility(\"default\")))", "-DSK_SUPPORT_GPU=1", "-DSK_GPU_WORKAROUNDS_HEADER=\"gpu/config/gpu_driver_bug_workaround_autogen.h\"", "-DI18N_ADDRESS_VALIDATION_DATA_URL=\"https://chromium-i18n.appspot.com/ssl-aggregate-address/\"", "-DWEBRTC_ENABLE_SYMBOL_EXPORT", "-DWEBRTC_ENABLE_AVX2", "-DWEBRTC_NON_STATIC_TRACE_EVENT_HANDLERS=0", "-DWEBRTC_CHROMIUM_BUILD", "-DWEBRTC_POSIX", "-DWEBRTC_LINUX", "-DABSL_ALLOCATOR_NOTHROW=1", "-DWEBRTC_USE_BUILTIN_ISAC_FIX=0", "-DWEBRTC_USE_BUILTIN_ISAC_FLOAT=1", "-DWEBRTC_USE_X11", "-DWEBRTC_USE_PIPEWIRE", "-DWEBRTC_DLOPEN_PIPEWIRE", "-DWEBRTC_USE_GIO", "-DLOGGING_INSIDE_WEBRTC", "-DCRASHPAD_ZLIB_SOURCE_EXTERNAL", "-DV8_USE_EXTERNAL_STARTUP_DATA", "-DUSE_V8_CONTEXT_SNAPSHOT", "-DV8_CONTEXT_SNAPSHOT_FILENAME=\"v8_context_snapshot.bin\"", "-DATK_LIB_DIR=\"/usr/lib/x86_64-linux-gnu\"", "-DUSE_ATK_BRIDGE", "-DUSING_V8_SHARED", "-DV8_ENABLE_CHECKS", "-DV8_COMPRESS_POINTERS", "-DV8_COMPRESS_POINTERS_IN_SHARED_CAGE", "-DV8_31BIT_SMIS_ON_64BIT_ARCH", "-DV8_ENABLE_SANDBOX", "-DV8_SANDBOXED_POINTERS", "-DV8_DEPRECATION_WARNINGS", "-DCPPGC_CAGED_HEAP", "-DCPPGC_YOUNG_GENERATION", "-I../..", "-Igen", "-I../../buildtools/third_party/libc++", "-I../../third_party/perfetto/include", "-Igen/third_party/perfetto/build_config", "-Igen/third_party/perfetto", "-I../../third_party/libyuv/include", "-I../../third_party/jsoncpp/source/include", "-I../../third_party/libwebp/src/src", "-Igen/third_party/private_membership/src", "-Igen/third_party/shell-encryption/src", "-Igen/components/policy/proto", "-I../../third_party/vulkan-deps/vulkan-headers/src/include", "-I../../third_party/khronos", "-I../../gpu", "-Igen/third_party/dawn/include", "-I../../third_party/dawn/include", "-I../../third_party/wtl/include", "-I../../third_party/protobuf/src", "-Igen/protoc_out", "-I../../third_party/abseil-cpp", "-I../../third_party/boringssl/src/include", "-I../../third_party/ced/src", "-I../../third_party/icu/source/common", "-I../../third_party/icu/source/i18n", "-I../../third_party/leveldatabase", "-I../../third_party/leveldatabase/src", "-I../../third_party/leveldatabase/src/include", "-I../../net/third_party/quiche/overrides", "-I../../net/third_party/quiche/src/quiche/common/platform/default", "-I../../net/third_party/quiche/src", "-Igen/net/third_party/quiche/src", "-I../../third_party/skia", "-I../../third_party/wuffs/src/release/c", "-I../../third_party/vulkan/include", "-I../../third_party/libaddressinput/src/cpp/include", "-I../../third_party/webrtc_overrides", "-I../../third_party/webrtc", "-Igen/third_party/webrtc", "-I../../third_party/libwebm/source", "-I../../third_party/mesa_headers", "-I../../third_party/libaom/source/libaom", "-I../../third_party/crashpad/crashpad", "-I../../third_party/crashpad/crashpad/compat/linux", "-I../../third_party/crashpad/crashpad/compat/non_win", "-I../../third_party/zlib", "-I../../v8/include", "-Igen/v8/include", "-Igen/third_party/metrics_proto", "-I../../third_party/re2/src", "-Wall", "-Wextra", "-Wimplicit-fallthrough", "-Wunreachable-code-aggressive", "-Wthread-safety", "-Wno-missing-field-initializers", "-Wno-unused-parameter", "-Wloop-analysis", "-Wno-unneeded-internal-declaration", "-Wenum-compare-conditional", "-Wno-psabi", "-Wno-ignored-pragma-optimize", "-Wno-unqualified-std-cast-call", "-Wno-array-parameter", "-Wno-deprecated-builtins", "-Wmax-tokens", "-Wshadow", "-fno-delete-null-pointer-checks", "-fno-ident", "-fno-strict-aliasing", "--param=ssp-buffer-size=4", "-fstack-protector", "-funwind-tables", "-fPIC", "-pthread", "-fcolor-diagnostics", "-fmerge-all-constants", "-fcrash-diagnostics-dir=../../tools/clang/crashreports", "-mllvm", "-instcombine-lower-dbg-declare=0", "-ffp-contract=off", "-fcomplete-member-pointers", "-m64", "-msse3", "-Wno-builtin-macro-redefined", "-D__DATE__=", "-D__TIME__=", "-D__TIMESTAMP__=", "-ffile-compilation-dir=.", "-no-canonical-prefixes", "-ftrivial-auto-var-init=pattern", "-O0", "-fno-omit-frame-pointer", "-gdwarf-4", "-g2", "-gdwarf-aranges", "-ggnu-pubnames", "-Xclang", "-fuse-ctor-homing", "-fvisibility=hidden", "-Wheader-hygiene", "-Wstring-conversion", "-Wtautological-overlap-compare", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/glib-2.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/lib/x86_64-linux-gnu/glib-2.0/include", "-DPROTOBUF_ALLOW_DEPRECATED=1", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/nss", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/nspr", "-Wno-shadow", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/at-spi2-atk/2.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/at-spi-2.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/dbus-1.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/lib/x86_64-linux-gnu/dbus-1.0/include", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/atk-1.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/include/glib-2.0", "-isystem../../build/linux/debian_bullseye_amd64-sysroot/usr/lib/x86_64-linux-gnu/glib-2.0/include", "-Wno-undefined-bool-conversion", "-Wno-tautological-undefined-compare", "-std=c++17", "-Wno-trigraphs", "-fno-exceptions", "-fno-rtti", "-nostdinc++", "-isystem../../buildtools/third_party/libc++/trunk/include", "-isystem../../buildtools/third_party/libc++abi/trunk/include", "--sysroot=../../build/linux/debian_bullseye_amd64-sysroot", "-fvisibility-inlines-hidden", "-Wno-deprecated-declarations", "-c", "../../components/autofill_assistant/browser/controller_observer.cc", "-o", "obj/components/autofill_assistant/browser/browser/controller_observer.o"]
```-fuse-ctor-homing``` compile flag seems inserted only when configured for the debug build.
https://source.chromium.org/chromium/chromium/src/+/main:build/config/compiler/BUILD.gn;l=2393;drc=5ecdeae9114f5e3e72278378436575071fdaebe7
| 2023-01-10T01:55:40 | 0.7 | 762f5c60b5f6e76812fe291f12002a4b93c5b26b | [
"compiler::clang::test::test_parse_xclang_use_ctor_homing"
] | [
"cache::cache::test::test_normalize_key",
"compiler::args::tests::assert_tests::test_arginfo_process_take_concat_arg_delim - should panic",
"compiler::args::tests::assert_tests::test_arginfo_process_take_arg - should panic",
"compiler::args::tests::assert_tests::test_arginfo_process_take_concat_arg - should p... | [
"test_dist_failingserver",
"test_dist_restartedserver",
"test_dist_nobuilder",
"test_dist_basic",
"test_auth",
"test_s3_invalid_args",
"test_rust_cargo_check_nightly",
"test_run_log_no_perm",
"test_rust_cargo_build",
"test_rust_cargo_check",
"test_rust_cargo_run_with_env_dep_parsing",
"test_ru... | [] | |
mozilla/sccache | 1,157 | mozilla__sccache-1157 | [
"1156"
] | 5edd91a739df0f51f260766d7f68f6f59a89d56e | diff --git a/src/compiler/rust.rs b/src/compiler/rust.rs
--- a/src/compiler/rust.rs
+++ b/src/compiler/rust.rs
@@ -2276,10 +2276,12 @@ impl RlibDepReader {
#[cfg(feature = "dist-client")]
fn parse_rustc_z_ls(stdout: &str) -> Result<Vec<&str>> {
let mut lines = stdout.lines();
- match lines.next() {
- Some("=External Dependencies=") => {}
- Some(s) => bail!("Unknown first line from rustc -Z ls: {}", s),
- None => bail!("No output from rustc -Z ls"),
+ loop {
+ match lines.next() {
+ Some("=External Dependencies=") => break,
+ Some(_s) => {}
+ None => bail!("No output from rustc -Z ls"),
+ }
}
let mut dep_names = vec![];
| diff --git a/src/compiler/rust.rs b/src/compiler/rust.rs
--- a/src/compiler/rust.rs
+++ b/src/compiler/rust.rs
@@ -2895,13 +2897,38 @@ c:/foo/bar.rs:
#[cfg(feature = "dist-client")]
#[test]
- fn test_parse_rustc_z_ls() {
+ fn test_parse_rustc_z_ls_pre_1_55() {
let output = "=External Dependencies=
1 lucet_runtime
2 lucet_runtime_internals-1ff6232b6940e924
3 lucet_runtime_macros-c18e1952b835769e
+";
+ let res = parse_rustc_z_ls(output);
+ assert!(res.is_ok());
+ let res = res.unwrap();
+ assert_eq!(res.len(), 3);
+ assert_eq!(res[0], "lucet_runtime");
+ assert_eq!(res[1], "lucet_runtime_internals");
+ assert_eq!(res[2], "lucet_runtime_macros");
+ }
+
+ #[cfg(feature = "dist-client")]
+ #[test]
+ fn test_parse_rustc_z_ls_post_1_55() {
+ // This was introduced in rust 1.55 by
+ // https://github.com/rust-lang/rust/commit/cef3ab75b12155e0582dd8b7710b7b901215fdd6
+ let output = "Crate info:
+name lucet_runtime
+hash 6c42566fc9757bba stable_crate_id StableCrateId(11157525371370257329)
+proc_macro false
+=External Dependencies=
+1 lucet_runtime
+2 lucet_runtime_internals-1ff6232b6940e924
+3 lucet_runtime_macros-c18e1952b835769e
+
+
";
let res = parse_rustc_z_ls(output);
assert!(res.is_ok());
| RlibDepReader is failing due to different stdout on recent rust versions
According to `test_parse_rustc_z_ls` we would expect the full output to be:
```
=External Dependencies=
1 std-a46a068050a12a4b hash ce2bb5d0f13de257 host_hash None kind Explicit
2 core-98b4d121af2b7335 hash ab9053be6f183cd6 host_hash None kind Explicit
3 compiler_builtins-1d9f7e5920635d15 hash 0b4a866460b24be6 host_hash None kind Explicit
4 rustc_std_workspace_core-bad7ed93fdd31371 hash 201fd3a16cad3e5c host_hash None kind Explicit
5 alloc-8c0a241d0360fa53 hash b3e24736b3536b6f host_hash None kind Explicit
6 libc-70f9ed6d8e7a5ce6 hash 9a731ab7069e6899 host_hash None kind Explicit
7 unwind-58bbb7b1efa3a5e2 hash 5dc2b8d162f5ef24 host_hash None kind Explicit
8 cfg_if-4c9b082d197c16f8 hash 94c7cbb541b4fc5f host_hash None kind Explicit
9 hashbrown-8c2bdbab4845bf3d hash 561db7ad349a11d1 host_hash None kind Explicit
10 rustc_std_workspace_alloc-d578639df0547b30 hash 122612aa617d3817 host_hash None kind Explicit
11 rustc_demangle-5d7898c7fac3a07d hash 45c031f8530426e0 host_hash None kind Explicit
12 std_detect-d7523a4d118e6572 hash 59d61553eeb5a1b5 host_hash None kind Explicit
13 addr2line-d805b480c9102e58 hash 06df91844c7e08f3 host_hash None kind Explicit
14 gimli-06dfdbc22b935051 hash 3a962d45c852274d host_hash None kind Explicit
15 object-1bc822d886d4f7bd hash c613954836ca9b5f host_hash None kind Explicit
16 memchr-963eca8df4c224a6 hash 2c6c586e9ea93024 host_hash None kind Explicit
17 miniz_oxide-9062de483386e50b hash 8e1eef0af67304fb host_hash None kind Explicit
18 adler-b988ab269ff13602 hash a427f257a4331c32 host_hash None kind Explicit
19 panic_unwind-1df56095db9453cb hash dc8beb525870dabd host_hash None kind Implicit
20 cfg_if-b7d513a981139b3b hash 9b7f3df02a977cae host_hash None kind Explicit
21 dbus-af442345a3e22cf4 hash e341a2f6e400cbf2 host_hash None kind Explicit
22 libc-bd681fcff9055b0e hash 3b57693af1386972 host_hash None kind Explicit
23 libdbus_sys-c9dbe27110664aab hash 76e484ea88618765 host_hash None kind Explicit
24 panic_abort-5457de826abd770b hash 1468be7d81845dc1 host_hash None kind Implicit
```
But it looks like since some recent (at least rust 1.58 ?) we get more infos:
```
Crate info:
name audio_thread_priority-84ba74950db609eb
hash 6c42566fc9757bba stable_crate_id StableCrateId(11157525371370257329)
proc_macro false
=External Dependencies=
1 std-a46a068050a12a4b hash ce2bb5d0f13de257 host_hash None kind Explicit
2 core-98b4d121af2b7335 hash ab9053be6f183cd6 host_hash None kind Explicit
3 compiler_builtins-1d9f7e5920635d15 hash 0b4a866460b24be6 host_hash None kind Explicit
4 rustc_std_workspace_core-bad7ed93fdd31371 hash 201fd3a16cad3e5c host_hash None kind Explicit
5 alloc-8c0a241d0360fa53 hash b3e24736b3536b6f host_hash None kind Explicit
6 libc-70f9ed6d8e7a5ce6 hash 9a731ab7069e6899 host_hash None kind Explicit
7 unwind-58bbb7b1efa3a5e2 hash 5dc2b8d162f5ef24 host_hash None kind Explicit
8 cfg_if-4c9b082d197c16f8 hash 94c7cbb541b4fc5f host_hash None kind Explicit
9 hashbrown-8c2bdbab4845bf3d hash 561db7ad349a11d1 host_hash None kind Explicit
10 rustc_std_workspace_alloc-d578639df0547b30 hash 122612aa617d3817 host_hash None kind Explicit
11 rustc_demangle-5d7898c7fac3a07d hash 45c031f8530426e0 host_hash None kind Explicit
12 std_detect-d7523a4d118e6572 hash 59d61553eeb5a1b5 host_hash None kind Explicit
13 addr2line-d805b480c9102e58 hash 06df91844c7e08f3 host_hash None kind Explicit
14 gimli-06dfdbc22b935051 hash 3a962d45c852274d host_hash None kind Explicit
15 object-1bc822d886d4f7bd hash c613954836ca9b5f host_hash None kind Explicit
16 memchr-963eca8df4c224a6 hash 2c6c586e9ea93024 host_hash None kind Explicit
17 miniz_oxide-9062de483386e50b hash 8e1eef0af67304fb host_hash None kind Explicit
18 adler-b988ab269ff13602 hash a427f257a4331c32 host_hash None kind Explicit
19 panic_unwind-1df56095db9453cb hash dc8beb525870dabd host_hash None kind Implicit
20 cfg_if-b7d513a981139b3b hash 9b7f3df02a977cae host_hash None kind Explicit
21 dbus-af442345a3e22cf4 hash e341a2f6e400cbf2 host_hash None kind Explicit
22 libc-bd681fcff9055b0e hash 3b57693af1386972 host_hash None kind Explicit
23 libdbus_sys-c9dbe27110664aab hash 76e484ea88618765 host_hash None kind Explicit
24 panic_abort-5457de826abd770b hash 1468be7d81845dc1 host_hash None kind Implicit
```
This makes the parser failing and issuing a warning `"Failed to initialise RlibDepDecoder, distributed compiles will be inefficient`.
It also seems to be connected to https://bugzilla.mozilla.org/show_bug.cgi?id=1760743
| 2022-04-14T18:32:32 | 0.12 | 5edd91a739df0f51f260766d7f68f6f59a89d56e | [
"compiler::rust::test::test_parse_rustc_z_ls_post_1_55"
] | [
"azure::credentials::test::test_conn_str_with_endpoint_suffix_only",
"azure::credentials::test::test_parse_connection_string",
"azure::credentials::test::test_parse_connection_string_without_account_key",
"azure::blobstore::test::test_canonicalize_resource",
"azure::blobstore::test::test_signing",
"cache:... | [
"test_dist_restartedserver",
"test_dist_basic",
"test_dist_failingserver",
"test_dist_nobuilder",
"test_auth",
"test_rust_cargo_build",
"test_rust_cargo_check",
"test_rust_cargo_build_nightly",
"test_rust_cargo_check_nightly",
"test_rust_cargo_run_with_env_dep_parsing"
] | [] | |
rust-scraper/scraper | 187 | rust-scraper__scraper-187 | [
"169"
] | e8e3cc4edbdc64c07a396804aa137a37fb4c7b74 | diff --git a/src/selector.rs b/src/selector.rs
--- a/src/selector.rs
+++ b/src/selector.rs
@@ -86,6 +86,14 @@ pub struct Parser;
impl<'i> parser::Parser<'i> for Parser {
type Impl = Simple;
type Error = SelectorParseErrorKind<'i>;
+
+ fn parse_is_and_where(&self) -> bool {
+ true
+ }
+
+ fn parse_has(&self) -> bool {
+ true
+ }
}
/// A simple implementation of `SelectorImpl` with no pseudo-classes or pseudo-elements.
| diff --git a/src/selector.rs b/src/selector.rs
--- a/src/selector.rs
+++ b/src/selector.rs
@@ -222,4 +230,22 @@ mod tests {
let s = "<failing selector>";
let _sel: Selector = s.try_into().unwrap();
}
+
+ #[test]
+ fn has_selector() {
+ let s = ":has(a)";
+ let _sel: Selector = s.try_into().unwrap();
+ }
+
+ #[test]
+ fn is_selector() {
+ let s = ":is(a)";
+ let _sel: Selector = s.try_into().unwrap();
+ }
+
+ #[test]
+ fn where_selector() {
+ let s = ":where(a)";
+ let _sel: Selector = s.try_into().unwrap();
+ }
}
diff --git a/src/test.rs b/src/test.rs
--- a/src/test.rs
+++ b/src/test.rs
@@ -20,3 +20,27 @@ fn tag_with_newline() {
Some("https://github.com/causal-agent/scraper")
);
}
+
+#[test]
+fn has_selector() {
+ let document = Html::parse_fragment(
+ r#"
+ <div>
+ <div id="foo">
+ Hi There!
+ </div>
+ </div>
+ <ul>
+ <li>first</li>
+ <li>second</li>
+ <li>third</li>
+ </ul>
+ "#,
+ );
+
+ let selector = Selector::parse("div:has(div#foo) + ul > li:nth-child(2)").unwrap();
+
+ let mut iter = document.select(&selector);
+ let li = iter.next().unwrap();
+ assert_eq!(li.inner_html(), "second");
+}
| Support for `:has()` selector
Hi, do you plan to support [the `:has()` selector](https://developer.mozilla.org/en-US/docs/Web/CSS/:has)? To my understanding, this css keyword is needed for selecting objects based on the parent of another known object.
Consider the following example:
```html
<div>
<div id="foo">
Hi There!
</div>
</div>
<ul>
<li>first</li>
<li>second</li>
<li>third</li>
</ul>
```
In order to select the second list item, I would like to use the following selector:
```rust
let selector = Selector::parse("div:has(div#foo) + ul > li:nth-child(2)").unwrap();
```
This line however panics as of `scraper` version 0.18.1.
| I think this is still missing support in our upstream [`selectors`](https://crates.io/crates/selectors) dependency, at least in the version published on crates.io.
+1. I'm trying to scrape Wikipedia, which has this sort of nesting. [For example](https://en.wikipedia.org/w/index.php?title=List_of_TCP_and_UDP_port_numbers):
```html
<h2>
<span class="mw-headline" id="Registered_ports">Registered ports</span>
<!-- ... -->
</h2>
```
This selector: `h2:has(#Registered_ports) ~ .wikitable.sortable` would pick the first table after this `h2`, which is a good way to locate the content in lieu of a distinctive id/class on the table itself.
From what I can see selectors 0.25 (published to crates.io) does have `:has` support. See https://docs.rs/selectors/latest/selectors/parser/enum.Component.html#variant.Has Although there seem to be performance improvements in more recent unreleased commits.
https://github.com/servo/servo/issues/25133
I had taken a look into adding `:is()` support and it seems like both `:is()` and `:has()` are already supported by `selectors`. The [`Parser`](https://github.com/causal-agent/scraper/blob/v0.19.0/src/selector.rs#L86) impl needs to enable support by implementing `parse_is_and_where` and `parse_has`.
```rust
fn parse_is_and_where(&self) -> bool {
true
}
fn parse_has(&self) -> bool {
true
}
```
@causal-agent Should it be safe to enable support for these selectors? I can make a PR with these changes unless these selectors are not enabled for a reason.
> The [Parser](https://github.com/causal-agent/scraper/blob/v0.19.0/src/selector.rs#L86) impl needs to enable support by implementing parse_is_and_where and parse_has.
Thank you for looking into this!
> Should it be safe to enable support for these selectors? I can make a PR with these changes unless these selectors are not enabled for a reason.
I think only tests will answer that. Please open a PR, ideally including a test case. I can try to then also give it a spin in a code base containing a pretty diverse set of scrapers and see if anything breaks that is not caught by the tests here.
@jameshurst when your PR is ready, tag me. I will run some tests and review it ASAP. | 2024-07-16T19:15:25 | 0.19 | e8e3cc4edbdc64c07a396804aa137a37fb4c7b74 | [
"selector::tests::has_selector",
"selector::tests::is_selector",
"selector::tests::where_selector",
"test::has_selector"
] | [
"element_ref::element::tests::test_has_class",
"element_ref::element::tests::test_has_id",
"selector::tests::invalid_selector_conversions - should panic",
"selector::tests::selector_conversions",
"html::tests::html_is_send",
"test::tag_with_newline",
"html::tests::root_element_document_comment",
"elem... | [] | [] |
rust-scraper/scraper | 213 | rust-scraper__scraper-213 | [
"212"
] | e0d4ea7a3373b3a75bd79ad85af21243fac93e60 | diff --git a/scraper/src/error/utils.rs b/scraper/src/error/utils.rs
--- a/scraper/src/error/utils.rs
+++ b/scraper/src/error/utils.rs
@@ -1,14 +1,12 @@
use cssparser::Token;
pub(crate) fn render_token(token: &Token<'_>) -> String {
- // THIS TOOK FOREVER TO IMPLEMENT
-
match token {
- // TODO: Give these guys some better names
- Token::Ident(ident) => format!("{}", ident.clone()),
- Token::AtKeyword(value) => format!("@{}", value.clone()),
- Token::Hash(name) | Token::IDHash(name) => format!("#{}", name.clone()),
- Token::QuotedString(value) => format!("\"{}\"", value.clone()),
+ Token::Ident(ident) => ident.to_string(),
+ Token::AtKeyword(value) => format!("@{}", value),
+ Token::Hash(name) | Token::IDHash(name) => format!("#{}", name),
+ Token::QuotedString(value) => format!("\"{}\"", value),
+ Token::UnquotedUrl(value) => value.to_string(),
Token::Number {
has_sign: signed,
value: num,
diff --git a/scraper/src/error/utils.rs b/scraper/src/error/utils.rs
--- a/scraper/src/error/utils.rs
+++ b/scraper/src/error/utils.rs
@@ -27,39 +25,30 @@ pub(crate) fn render_token(token: &Token<'_>) -> String {
} => format!("{}{}", render_int(*signed, *num), unit),
Token::WhiteSpace(_) => String::from(" "),
Token::Comment(comment) => format!("/* {} */", comment),
- Token::Function(name) => format!("{}()", name.clone()),
- Token::BadString(string) => format!("<Bad String {:?}>", string.clone()),
- Token::BadUrl(url) => format!("<Bad URL {:?}>", url.clone()),
+ Token::Function(name) => format!("{}()", name),
+ Token::BadString(string) => format!("<Bad String {:?}>", string),
+ Token::BadUrl(url) => format!("<Bad URL {:?}>", url),
// Single-character token
- sc_token => render_single_char_token(sc_token),
+ Token::Colon => ":".into(),
+ Token::Semicolon => ";".into(),
+ Token::Comma => ",".into(),
+ Token::IncludeMatch => "~=".into(),
+ Token::DashMatch => "|=".into(),
+ Token::PrefixMatch => "^=".into(),
+ Token::SuffixMatch => "$=".into(),
+ Token::SubstringMatch => "*=".into(),
+ Token::CDO => "<!--".into(),
+ Token::CDC => "-->".into(),
+ Token::ParenthesisBlock => "<(".into(),
+ Token::SquareBracketBlock => "<[".into(),
+ Token::CurlyBracketBlock => "<{".into(),
+ Token::CloseParenthesis => "<)".into(),
+ Token::CloseSquareBracket => "<]".into(),
+ Token::CloseCurlyBracket => "<}".into(),
+ Token::Delim(delim) => (*delim).into(),
}
}
-fn render_single_char_token(token: &Token) -> String {
- String::from(match token {
- Token::Colon => ":",
- Token::Semicolon => ";",
- Token::Comma => ",",
- Token::IncludeMatch => "~=",
- Token::DashMatch => "|=",
- Token::PrefixMatch => "^=",
- Token::SuffixMatch => "$=",
- Token::SubstringMatch => "*=",
- Token::CDO => "<!--",
- Token::CDC => "-->",
- Token::ParenthesisBlock => "<(",
- Token::SquareBracketBlock => "<[",
- Token::CurlyBracketBlock => "<{",
- Token::CloseParenthesis => "<)",
- Token::CloseSquareBracket => "<]",
- Token::CloseCurlyBracket => "<}",
- other => panic!(
- "Token {:?} is not supposed to match as a single-character token!",
- other
- ),
- })
-}
-
fn render_number(signed: bool, num: f32, token: &Token) -> String {
let num = render_int(signed, num);
| diff --git a/scraper/src/error/utils.rs b/scraper/src/error/utils.rs
--- a/scraper/src/error/utils.rs
+++ b/scraper/src/error/utils.rs
@@ -89,3 +78,14 @@ fn render_int_signed(num: f32) -> String {
fn render_int_unsigned(num: f32) -> String {
format!("{}", num)
}
+
+#[cfg(test)]
+mod tests {
+ use crate::Selector;
+
+ #[test]
+ fn regression_test_issue212() {
+ let err = Selector::parse("div138293@!#@!!@#").unwrap_err();
+ assert_eq!(err.to_string(), "Token \"@\" was not expected");
+ }
+}
| Using map_err on a SelectorErrorKind error results in a panic
Hey there, absolutely love your work on this library!
Unfortunately, the way that the `SelectorErrorKind` error behaves is a bit unusual.
```rust
use anyhow::anyhow;
use scraper::Selector;
fn main() {
let selector_result = Selector::parse("div138293@!#@!!@#").map_err(|e| anyhow!("oh no: {e}"));
println!("====> {:?}", selector_result);
}
```
The following code panics instead of gracefully printing the result via `println!`.
```
thread 'main' panicked at C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\scraper-0.20.0\src\error\utils.rs:56:18:
Token Delim('@') is not supposed to match as a single-character token!
stack backtrace:
0: std::panicking::begin_panic_handler
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/std\src\panicking.rs:662
1: core::panicking::panic_fmt
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/core\src\panicking.rs:74
2: scraper::error::utils::render_single_char_token
at C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\scraper-0.20.0\src\error\utils.rs:56
3: scraper::error::utils::render_token
at C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\scraper-0.20.0\src\error\utils.rs:34
4: scraper::error::impl$3::fmt
at C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\scraper-0.20.0\src\error.rs:83
5: core::fmt::rt::Argument::fmt
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/core\src\fmt\rt.rs:177
6: core::fmt::write
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/core\src\fmt\mod.rs:1178
7: core::fmt::Write::write_fmt::impl$1::spec_write_fmt
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/core\src\fmt\mod.rs:226
8: core::fmt::Write::write_fmt
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/core\src\fmt\mod.rs:231
9: alloc::fmt::format::format_inner
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library/alloc\src\fmt.rs:637
10: alloc::fmt::format::closure$0
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\alloc\src\fmt.rs:642
11: enum2$<core::option::Option<ref$<str$> > >::map_or_else<ref$<str$>,alloc::string::String,alloc::fmt::format::closure_env$0,alloc::string::String (*)(ref$<str$>)>
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\core\src\option.rs:1211
12: alloc::fmt::format
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\alloc\src\fmt.rs:642
13: anyhow::__private::format_err
at C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\anyhow-1.0.89\src\lib.rs:693
14: ableton_downloads::main::closure$0
at .\src\main.rs:5
15: enum2$<core::result::Result<scraper::selector::Selector,enum2$<scraper::error::SelectorErrorKind> > >::map_err<scraper::selector::Selector,enum2$<scraper::error::SelectorErrorKind>,anyhow::Error,ableton_downloads::main::closure_env$0>
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\core\src\result.rs:854
16: ableton_downloads::main
at .\src\main.rs:5
17: core::ops::function::FnOnce::call_once<void (*)(),tuple$<> >
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\core\src\ops\function.rs:250
18: core::hint::black_box
at /rustc/f6e511eec7342f59a25f7c0534f1dbea00d01b14\library\core\src\hint.rs:388
note: Some details are omitted, run with `RUST_BACKTRACE=full` for a verbose backtrace.
```
The panic is found at https://github.com/rust-scraper/scraper/blob/master/scraper/src/error/utils.rs#L56.
Just as a bonus, it also is impossible to use the `.context` or `.with_context` anyhow methods to add context to this error enum due to the following trait bounds not being met.
e.g.
```rust
let selector_result = Selector::parse("div138293@!#@!!@#").context("unable to parse selector");
```
Results in the following:
```
error[E0599]: the method `context` exists for enum `Result<Selector, SelectorErrorKind<'_>>`, but its trait bounds were not satisfied
--> src/main.rs:5:64
|
5 | let selector_result = Selector::parse("div138293@!#@!!@#").context("unable to parse selector");
| ^^^^^^^ method cannot be called on `Result<Selector, SelectorErrorKind<'_>>` due to unsatisfied trait bounds
|
::: C:\Users\Fots\.rustup\toolchains\stable-x86_64-pc-windows-msvc\lib/rustlib/src/rust\library\core\src\result.rs:527:1
|
527 | pub enum Result<T, E> {
| --------------------- doesn't satisfy `_: Context<Selector, SelectorErrorKind<'_>>`
|
::: C:\Users\Fots\.cargo\registry\src\index.crates.io-6f17d22bba15001f\scraper-0.20.0\src\error.rs:13:1
|
13 | pub enum SelectorErrorKind<'a> {
| ------------------------------ doesn't satisfy `SelectorErrorKind<'_>: Send`, `SelectorErrorKind<'_>: Sync` or `_: StdError`
|
= note: the following trait bounds were not satisfied:
`SelectorErrorKind<'_>: anyhow::context::ext::StdError`
which is required by `Result<Selector, SelectorErrorKind<'_>>: anyhow::Context<Selector, SelectorErrorKind<'_>>`
`SelectorErrorKind<'_>: Send`
which is required by `Result<Selector, SelectorErrorKind<'_>>: anyhow::Context<Selector, SelectorErrorKind<'_>>`
`SelectorErrorKind<'_>: Sync`
which is required by `Result<Selector, SelectorErrorKind<'_>>: anyhow::Context<Selector, SelectorErrorKind<'_>>`
```
As per the output, I'm testing on Windows using the MSVC Rust toolchain.
Thanks heaps in advance! 😄
Fotis
| I think this is a bug in our `render_single_char_token` function which does not handle `cssparser`'s [`Token::Delim`](https://docs.rs/cssparser/0.31.2/cssparser/enum.Token.html#variant.Delim) yet. I guess this was an oversight when we last updated that dependencies.
We should add a regression test and make sure that `render_token` and `render_single_char_token` together handle all variants. | 2024-10-24T20:03:23 | 0.20 | e0d4ea7a3373b3a75bd79ad85af21243fac93e60 | [
"error::utils::tests::regression_test_issue212"
] | [
"html::tests::html_is_send",
"element_ref::element::tests::test_is_link",
"element_ref::tests::test_scope",
"html::tests::root_element_document_doctype",
"html::tests::root_element_document_comment",
"html::tests::root_element_fragment",
"element_ref::element::tests::test_has_id",
"element_ref::elemen... | [] | [] |
chmln/sd | 115 | chmln__sd-115 | [
"94"
] | 479f0403db6024d331f872ca3a1039db061a8d3b | diff --git a/src/replacer.rs b/src/replacer.rs
--- a/src/replacer.rs
+++ b/src/replacer.rs
@@ -1,6 +1,6 @@
use crate::{utils, Error, Result};
use regex::bytes::Regex;
-use std::{fs::File, io::prelude::*, path::Path};
+use std::{fs, fs::File, io::prelude::*, path::Path};
pub(crate) struct Replacer {
regex: Regex,
diff --git a/src/replacer.rs b/src/replacer.rs
--- a/src/replacer.rs
+++ b/src/replacer.rs
@@ -102,7 +102,7 @@ impl Replacer {
}
let source = File::open(path)?;
- let meta = source.metadata()?;
+ let meta = fs::metadata(path)?;
let mmap_source = unsafe { Mmap::map(&source)? };
let replaced = self.replace(&mmap_source);
diff --git a/src/replacer.rs b/src/replacer.rs
--- a/src/replacer.rs
+++ b/src/replacer.rs
@@ -123,7 +123,7 @@ impl Replacer {
drop(mmap_source);
drop(source);
- target.persist(path)?;
+ target.persist(fs::canonicalize(path)?)?;
Ok(())
}
}
| diff --git a/tests/cli.rs b/tests/cli.rs
--- a/tests/cli.rs
+++ b/tests/cli.rs
@@ -13,6 +13,18 @@ mod cli {
assert_eq!(content, std::fs::read_to_string(path).unwrap());
}
+ fn create_soft_link<P: AsRef<std::path::Path>>(
+ src: &P,
+ dst: &P,
+ ) -> Result<()> {
+ #[cfg(target_family = "unix")]
+ std::os::unix::fs::symlink(src, dst)?;
+ #[cfg(target_family = "windows")]
+ std::os::windows::fs::symlink_file(src, dst)?;
+
+ Ok(())
+ }
+
#[test]
fn in_place() -> Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
diff --git a/tests/cli.rs b/tests/cli.rs
--- a/tests/cli.rs
+++ b/tests/cli.rs
@@ -41,6 +53,26 @@ mod cli {
Ok(())
}
+ #[test]
+ fn in_place_following_symlink() -> Result<()> {
+ let dir = tempfile::tempdir()?;
+ let path = dir.path();
+ let file = path.join("file");
+ let link = path.join("link");
+
+ create_soft_link(&file, &link)?;
+ std::fs::write(&file, "abc123def")?;
+
+ sd().args(&["abc\\d+", "", link.to_str().unwrap()])
+ .assert()
+ .success();
+
+ assert_file(&file.to_path_buf(), "def");
+ assert!(std::fs::symlink_metadata(link)?.file_type().is_symlink());
+
+ Ok(())
+ }
+
#[test]
fn replace_into_stdout() -> Result<()> {
let mut file = tempfile::NamedTempFile::new()?;
| When using sd over a symlink, symlink get replaced by a full-featured file
As stated in the issue title, when sd is used over a symlink, the symlink gets replaced with a complete file rewrited over it, copy of the file the symlink was pointing to.
| 2021-03-14T05:57:26 | 0.7 | 479f0403db6024d331f872ca3a1039db061a8d3b | [
"cli::in_place_following_symlink"
] | [
"replacer::tests::default_global",
"replacer::tests::case_sensitive_default",
"replacer::tests::escaped_char_preservation",
"replacer::tests::unescape_regex_replacements",
"replacer::tests::full_word_replace",
"replacer::tests::sanity_check_literal_replacements",
"replacer::tests::no_unescape_literal_re... | [] | [] | |
SeaQL/sea-orm | 1,953 | SeaQL__sea-orm-1953 | [
"1699"
] | 06c632712f3d167df0cda742dd228661b953ab7f | diff --git a/sea-orm-codegen/src/entity/writer.rs b/sea-orm-codegen/src/entity/writer.rs
--- a/sea-orm-codegen/src/entity/writer.rs
+++ b/sea-orm-codegen/src/entity/writer.rs
@@ -460,15 +460,23 @@ impl EntityWriter {
entity
.columns
.iter()
- .fold(TokenStream::new(), |mut ts, col| {
- if let sea_query::ColumnType::Enum { name, .. } = col.get_inner_col_type() {
- let enum_name = format_ident!("{}", name.to_string().to_upper_camel_case());
- ts.extend([quote! {
- use super::sea_orm_active_enums::#enum_name;
- }]);
- }
- ts
- })
+ .fold(
+ (TokenStream::new(), Vec::new()),
+ |(mut ts, mut enums), col| {
+ if let sea_query::ColumnType::Enum { name, .. } = col.get_inner_col_type() {
+ if !enums.contains(&name) {
+ enums.push(name);
+ let enum_name =
+ format_ident!("{}", name.to_string().to_upper_camel_case());
+ ts.extend([quote! {
+ use super::sea_orm_active_enums::#enum_name;
+ }]);
+ }
+ }
+ (ts, enums)
+ },
+ )
+ .0
}
pub fn gen_model_struct(
| diff --git a/sea-orm-codegen/src/entity/writer.rs b/sea-orm-codegen/src/entity/writer.rs
--- a/sea-orm-codegen/src/entity/writer.rs
+++ b/sea-orm-codegen/src/entity/writer.rs
@@ -814,7 +822,8 @@ mod tests {
};
use pretty_assertions::assert_eq;
use proc_macro2::TokenStream;
- use sea_query::{ColumnType, ForeignKeyAction, RcOrArc};
+ use quote::quote;
+ use sea_query::{Alias, ColumnType, ForeignKeyAction, RcOrArc, SeaRc};
use std::io::{self, BufRead, BufReader, Read};
fn setup() -> Vec<Entity> {
diff --git a/sea-orm-codegen/src/entity/writer.rs b/sea-orm-codegen/src/entity/writer.rs
--- a/sea-orm-codegen/src/entity/writer.rs
+++ b/sea-orm-codegen/src/entity/writer.rs
@@ -2275,4 +2284,129 @@ mod tests {
Ok(())
}
+
+ #[test]
+ fn test_gen_import_active_enum() -> io::Result<()> {
+ let entities = vec![
+ Entity {
+ table_name: "tea_pairing".to_owned(),
+ columns: vec![
+ Column {
+ name: "id".to_owned(),
+ col_type: ColumnType::Integer,
+ auto_increment: true,
+ not_null: true,
+ unique: false,
+ },
+ Column {
+ name: "first_tea".to_owned(),
+ col_type: ColumnType::Enum {
+ name: SeaRc::new(Alias::new("tea_enum")),
+ variants: vec![
+ SeaRc::new(Alias::new("everyday_tea")),
+ SeaRc::new(Alias::new("breakfast_tea")),
+ ],
+ },
+ auto_increment: false,
+ not_null: true,
+ unique: false,
+ },
+ Column {
+ name: "second_tea".to_owned(),
+ col_type: ColumnType::Enum {
+ name: SeaRc::new(Alias::new("tea_enum")),
+ variants: vec![
+ SeaRc::new(Alias::new("everyday_tea")),
+ SeaRc::new(Alias::new("breakfast_tea")),
+ ],
+ },
+ auto_increment: false,
+ not_null: true,
+ unique: false,
+ },
+ ],
+ relations: vec![],
+ conjunct_relations: vec![],
+ primary_keys: vec![PrimaryKey {
+ name: "id".to_owned(),
+ }],
+ },
+ Entity {
+ table_name: "tea_pairing_with_size".to_owned(),
+ columns: vec![
+ Column {
+ name: "id".to_owned(),
+ col_type: ColumnType::Integer,
+ auto_increment: true,
+ not_null: true,
+ unique: false,
+ },
+ Column {
+ name: "first_tea".to_owned(),
+ col_type: ColumnType::Enum {
+ name: SeaRc::new(Alias::new("tea_enum")),
+ variants: vec![
+ SeaRc::new(Alias::new("everyday_tea")),
+ SeaRc::new(Alias::new("breakfast_tea")),
+ ],
+ },
+ auto_increment: false,
+ not_null: true,
+ unique: false,
+ },
+ Column {
+ name: "second_tea".to_owned(),
+ col_type: ColumnType::Enum {
+ name: SeaRc::new(Alias::new("tea_enum")),
+ variants: vec![
+ SeaRc::new(Alias::new("everyday_tea")),
+ SeaRc::new(Alias::new("breakfast_tea")),
+ ],
+ },
+ auto_increment: false,
+ not_null: true,
+ unique: false,
+ },
+ Column {
+ name: "size".to_owned(),
+ col_type: ColumnType::Enum {
+ name: SeaRc::new(Alias::new("tea_size")),
+ variants: vec![
+ SeaRc::new(Alias::new("small")),
+ SeaRc::new(Alias::new("medium")),
+ SeaRc::new(Alias::new("huge")),
+ ],
+ },
+ auto_increment: false,
+ not_null: true,
+ unique: false,
+ },
+ ],
+ relations: vec![],
+ conjunct_relations: vec![],
+ primary_keys: vec![PrimaryKey {
+ name: "id".to_owned(),
+ }],
+ },
+ ];
+
+ assert_eq!(
+ quote!(
+ use super::sea_orm_active_enums::TeaEnum;
+ )
+ .to_string(),
+ EntityWriter::gen_import_active_enum(&entities[0]).to_string()
+ );
+
+ assert_eq!(
+ quote!(
+ use super::sea_orm_active_enums::TeaEnum;
+ use super::sea_orm_active_enums::TeaSize;
+ )
+ .to_string(),
+ EntityWriter::gen_import_active_enum(&entities[1]).to_string()
+ );
+
+ Ok(())
+ }
}
| sea-orm-codegen: Enum `use` statements getting generated multiple times
<!--
Welcome! Thank you for reporting bugs!
First of all, please star our repo. Your support is vital to the continued maintenance of SeaORM.
Want to ask a question? You can reach us via:
- Discord: https://discord.com/invite/uCPdDXzbdv
- GitHub Discussions: https://github.com/SeaQL/sea-orm/discussions/new
Please make sure that you are not asking for a missing feature; a bug is incorrect behavior -
either in the feature specification or implementation.
Please also make sure your description is clear and precise - maintainers don't have access to your
code and can't see what you have seen. Please avoid vague descriptions like "they are different"
or "the program crashes" - in either case, provide exact information.
If you are certain there is a bug, please provide a reproducible example, which helps the investigator
to pin-point the bug and the implementor to verify that a solution is satisfactory. Bug reports without
reproducible example may be closed immediately or dangle forever.
Finally, please search for existing issues and discussions before submission. Feel free to revive old
threads if you have new information to add, but please don't ask for ETA or "+1".
-->
## Description
When using PostgreSQL, a table that has two columns using the same enum causes the generated entity to have duplicate `use` statements, which cause a compilation error.
## Steps to Reproduce
1. Create a migration like this:
<details>
```Rust
use sea_orm_migration::prelude::*;
use sea_orm_migration::prelude::extension::postgres::Type;
use sea_orm_migration::sea_orm::DbBackend;
#[derive(DeriveMigrationName)]
pub struct Migration;
#[async_trait::async_trait]
impl MigrationTrait for Migration {
async fn up(&self, manager: &SchemaManager) -> Result<(), DbErr> {
assert_eq!(manager.get_database_backend(), DbBackend::Postgres);
manager.create_type(Type::create().as_enum(MyEnum::Table).values([MyEnum::A, MyEnum::B]).to_owned()).await?;
manager
.create_table(
Table::create()
.table(MyTable::Table)
.if_not_exists()
.col(
ColumnDef::new(MyTable::Id)
.integer()
.not_null()
.auto_increment()
.primary_key(),
)
.col(ColumnDef::new(MyTable::Col1).enumeration(MyEnum::Table, [MyEnum::A, MyEnum::B]).not_null())
.col(ColumnDef::new(MyTable::Col2).enumeration(MyEnum::Table, [MyEnum::A, MyEnum::B]).not_null())
.to_owned(),
)
.await
}
async fn down(&self, manager: &SchemaManager) -> Result<(), DbErr> {
manager
.drop_table(Table::drop().table(MyTable::Table).to_owned())
.await
}
}
/// Learn more at https://docs.rs/sea-query#iden
#[derive(Iden)]
enum MyTable {
Table,
Id,
Col1,
Col2,
}
#[derive(Iden)]
enum MyEnum {
Table,
A,
B,
}
```
</details>
2. Migrate: `sea migrate`
3. Generate entities: `sea generate entity -o src/entities`
4. Have a look at the generated file (in this case: `src/entities/my_table.rs`)
### Expected Behavior
The generated file has only one import and compiles without errors.
Generated file should look like this:
```Rust
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.0
use super::sea_orm_active_enums::MyEnum;
use sea_orm::entity::prelude::*;
/* ... */
```
### Actual Behavior
Generated file has duplicate use statements and looks like this:
```Rust
//! `SeaORM` Entity. Generated by sea-orm-codegen 0.12.0
use super::sea_orm_active_enums::MyEnum;
use super::sea_orm_active_enums::MyEnum;
use sea_orm::entity::prelude::*;
/* ... */
```
### Reproduces How Often
Always
### Workarounds
1. Manually remove the duplicate `use` statement each time
2. I've implemented a simple fix in https://github.com/SeaQL/sea-orm/commit/7cf319556d17c57cb95a91b96b573468a310bc44
Note: I tried to fix this and manage to solve it for my project where the bug originally showed up.
However, I would have liked to add some kind of tests for that but I didn't really understand how. - https://github.com/SeaQL/sea-orm/blob/master/CONTRIBUTING.md did state how to run all the tests (which were successful for my commit) but not how the test system works and where to add my own tests.
I currently don't have the time to try to understand how tests work, so unfortunately, I don't think it would be much help if I opened a PR myself (or would it?).
So if someone wants to pick this up - feel free to use my changes, the main thing that's missing is some test.
## Reproducible Example
Currently, it does not seem like I will have time to add a PR for that unfortunately.
## Versions
> sea-orm-cli: master (1431d80)
> Postgres (14.5 - should not matter)
> Windows 10 (should not matter)
| 2023-11-02T09:02:02 | 0.12 | c6e1a84a120dff5a73abe450fb56ed4483d9413d | [
"entity::writer::tests::test_gen_import_active_enum"
] | [
"entity::base_entity::tests::test_get_conjunct_relations_to_upper_camel_case",
"entity::base_entity::tests::test_get_conjunct_relations_via_snake_case",
"entity::base_entity::tests::test_get_eq_needed",
"entity::base_entity::tests::test_get_column_names_snake_case",
"entity::base_entity::tests::test_get_col... | [] | [] | |
SeaQL/sea-orm | 1,821 | SeaQL__sea-orm-1821 | [
"1819"
] | 131f9f11230b7fd2d318870fd0c2d441c80ed734 | diff --git a/sea-orm-codegen/src/entity/active_enum.rs b/sea-orm-codegen/src/entity/active_enum.rs
--- a/sea-orm-codegen/src/entity/active_enum.rs
+++ b/sea-orm-codegen/src/entity/active_enum.rs
@@ -26,7 +26,18 @@ impl ActiveEnum {
if v.chars().next().map(char::is_numeric).unwrap_or(false) {
format_ident!("_{}", v)
} else {
- format_ident!("{}", v.to_upper_camel_case())
+ let variant_name = v.to_upper_camel_case();
+ if variant_name.is_empty() {
+ println!("Warning: item '{}' in the enumeration '{}' cannot be converted into a valid Rust enum member name. It will be converted to its corresponding UTF-8 encoding. You can modify it later as needed.", v, enum_name);
+ let mut utf_string = String::new();
+ for c in v.chars() {
+ utf_string.push('U');
+ utf_string.push_str(&format!("{:04X}", c as u32));
+ }
+ format_ident!("{}", utf_string)
+ } else {
+ format_ident!("{}", variant_name)
+ }
}
});
| diff --git a/sea-orm-codegen/src/entity/active_enum.rs b/sea-orm-codegen/src/entity/active_enum.rs
--- a/sea-orm-codegen/src/entity/active_enum.rs
+++ b/sea-orm-codegen/src/entity/active_enum.rs
@@ -223,4 +234,53 @@ mod tests {
.to_string()
)
}
+
+ #[test]
+ fn test_enum_variant_utf8_encode() {
+ assert_eq!(
+ ActiveEnum {
+ enum_name: Alias::new("ty").into_iden(),
+ values: vec![
+ "Question",
+ "QuestionsAdditional",
+ "Answer",
+ "Other",
+ "/",
+ "//",
+ "A-B-C",
+ ]
+ .into_iter()
+ .map(|variant| Alias::new(variant).into_iden())
+ .collect(),
+ }
+ .impl_active_enum(
+ &WithSerde::None,
+ true,
+ &TokenStream::new(),
+ &TokenStream::new(),
+ )
+ .to_string(),
+ quote!(
+ #[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum, Copy)]
+ #[sea_orm(rs_type = "String", db_type = "Enum", enum_name = "ty")]
+ pub enum Ty {
+ #[sea_orm(string_value = "Question")]
+ Question,
+ #[sea_orm(string_value = "QuestionsAdditional")]
+ QuestionsAdditional,
+ #[sea_orm(string_value = "Answer")]
+ Answer,
+ #[sea_orm(string_value = "Other")]
+ Other,
+ #[sea_orm(string_value = "/")]
+ U002F,
+ #[sea_orm(string_value = "//")]
+ U002FU002F,
+ #[sea_orm(string_value = "A-B-C")]
+ ABC,
+ }
+ )
+ .to_string()
+ )
+ }
}
| `sea-orm-cli` entities generator panics when an item in EnumType consist entirely without any letters or numbers
<!--
Welcome! Thank you for reporting bugs!
First of all, please star our repo. Your support is vital to the continued maintenance of SeaORM.
Want to ask a question? You can reach us via:
- Discord: https://discord.com/invite/uCPdDXzbdv
- GitHub Discussions: https://github.com/SeaQL/sea-orm/discussions/new
Please make sure that you are not asking for a missing feature; a bug is incorrect behavior -
either in the feature specification or implementation.
Please also make sure your description is clear and precise - maintainers don't have access to your
code and can't see what you have seen. Please avoid vague descriptions like "they are different"
or "the program crashes" - in either case, provide exact information.
If you are certain there is a bug, please provide a reproducible example, which helps the investigator
to pin-point the bug and the implementor to verify that a solution is satisfactory. Bug reports without
reproducible example may be closed immediately or dangle forever.
Finally, please search for existing issues and discussions before submission. Feel free to revive old
threads if you have new information to add, but please don't ask for ETA or "+1".
-->
## Description
When I use this enum type in MySQL:
```sql
ENUM('Question','QuestionsAdditional','Answer','Other','/')
```
The `sea-orm-cli` entities generator will panic because it cannot convert '/' into an enum item name in Rust. That's just fine, but what troubles me is the `sea-orm-cli` didn't give a clear explanation for this panic. It just shows this:
```
thread 'main' panicked at 'Ident is not allowed to be empty; use Option<Ident>', <HomePath>\.cargo\registry\src\mirrors.ustc.edu.cn-61ef6e0cd06fb9b8\proc-macro2-1.0.66\src\fallback.rs:752:9
```
this is not really a bug itself, but we could probably provide users with a clearer error message or do some extra handling to accommodate these values. And if you'd be willing to first introduce a friendly error message as a transitional step before proposing other better approaches later, I'd be happy to provide a pull request for that.
Code related are mainly in this file:`sea-orm-codegen/src/entity/active_enum.rs:23`
## Steps to Reproduce
1. Setup a MySQL server, and run this:
```sql
CREATE TABLE `test` (
`enum_column` enum('A','B','/') NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
```
2. Try to use `sea-orm-cli` to generate entity
### Expected Behavior
A friendly panic message is given, maybe saying 'We currently do not support enum items consisting entirely without any letters or numbers.', or just process this situation fine.
### Actual Behavior
A confusing error message unrelated to the specific cause was given.
### Reproduces How Often
Always
### Workarounds
this is fine :)
```sql
CREATE TABLE `test` (
`enum_column` enum('A','B','/') NOT NULL
) ENGINE=InnoDB DEFAULT CHARSET=utf8mb4
```
## Reproducible Example
<!-- Please add a minimal reproducible example under https://github.com/SeaQL/sea-orm/tree/master/issues, and open a PR subsequently. -->
## Versions
<!-- You can get this information from the output of `cargo tree | grep sea-` from the console. Also, please include the database and OS that you are running. -->

| What's more, I changed the source code of `sea-orm-codegen` to solve my problem, and it works fine. If you think this is ok, I can run the test and make a pull request.
`sea-orm-codegen/src/entity/active_enum.rs`
```rs
impl ActiveEnum {
pub fn impl_active_enum(&self, with_serde: &WithSerde, with_copy_enums: bool) -> TokenStream {
let enum_name = &self.enum_name.to_string();
let enum_iden = format_ident!("{}", enum_name.to_upper_camel_case());
let values: Vec<String> = self.values.iter().map(|v| v.to_string()).collect();
let variants = values.iter().map(|v| v.trim()).map(|v| {
if v.chars().next().map(char::is_numeric).unwrap_or(false) {
format_ident!("_{}", v)
} else {
let new_name = v.to_upper_camel_case();
if new_name.is_empty() {
// We can't find numbers or letters in the variant name.
let mut utf_string = String::new();
for c in v.chars() {
utf_string.push('U');
utf_string.push_str(&format!("{:04X}", c as u32));
}
format_ident!("{}", utf_string)
} else {
format_ident!("{}", new_name)
}
}
});
let extra_derive = with_serde.extra_derive();
let copy_derive = if with_copy_enums {
quote! { , Copy }
} else {
quote! {}
};
quote! {
#[derive(Debug, Clone, PartialEq, Eq, EnumIter, DeriveActiveEnum #copy_derive #extra_derive)]
#[sea_orm(rs_type = "String", db_type = "Enum", enum_name = #enum_name)]
pub enum #enum_iden {
#(
#[sea_orm(string_value = #values)]
#variants,
)*
}
}
}
}
```
For sure! A PR on better error messages and enum handling would be appreciated. | 2023-08-22T18:03:15 | 0.12 | c6e1a84a120dff5a73abe450fb56ed4483d9413d | [
"entity::active_enum::tests::test_enum_variant_utf8_encode"
] | [
"entity::active_enum::tests::test_enum_extra_derives",
"entity::base_entity::tests::test_get_column_names_snake_case",
"entity::base_entity::tests::test_get_column_rs_types",
"entity::base_entity::tests::test_get_conjunct_relations_to_upper_camel_case",
"entity::base_entity::tests::test_get_conjunct_relatio... | [] | [] |
SeaQL/sea-orm | 226 | SeaQL__sea-orm-226 | [
"225"
] | 125ee401610038ea0200142676353b6094cda8e5 | diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -27,8 +27,6 @@ impl Column {
ColumnType::Char(_)
| ColumnType::String(_)
| ColumnType::Text
- | ColumnType::Time(_)
- | ColumnType::Date
| ColumnType::Custom(_) => "String",
ColumnType::TinyInteger(_) => "i8",
ColumnType::SmallInteger(_) => "i16",
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -37,6 +35,8 @@ impl Column {
ColumnType::Float(_) => "f32",
ColumnType::Double(_) => "f64",
ColumnType::Json | ColumnType::JsonBinary => "Json",
+ ColumnType::Date => "Date",
+ ColumnType::Time(_) => "Time",
ColumnType::DateTime(_) | ColumnType::Timestamp(_) => "DateTime",
ColumnType::TimestampWithTimeZone(_) => "DateTimeWithTimeZone",
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal",
| diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -194,6 +194,11 @@ mod tests {
make_col!("CAKE_FILLING_ID", ColumnType::Double(None)),
make_col!("CAKE-FILLING-ID", ColumnType::Binary(None)),
make_col!("CAKE", ColumnType::Boolean),
+ make_col!("date", ColumnType::Date),
+ make_col!("time", ColumnType::Time(None)),
+ make_col!("date_time", ColumnType::DateTime(None)),
+ make_col!("timestamp", ColumnType::Timestamp(None)),
+ make_col!("timestamp_tz", ColumnType::TimestampWithTimeZone(None)),
]
}
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -211,6 +216,11 @@ mod tests {
"cake_filling_id",
"cake_filling_id",
"cake",
+ "date",
+ "time",
+ "date_time",
+ "timestamp",
+ "timestamp_tz",
];
for (col, snack_case) in columns.into_iter().zip(snack_cases) {
assert_eq!(col.get_name_snake_case().to_string(), snack_case);
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -231,6 +241,11 @@ mod tests {
"CakeFillingId",
"CakeFillingId",
"Cake",
+ "Date",
+ "Time",
+ "DateTime",
+ "Timestamp",
+ "TimestampTz",
];
for (col, camel_case) in columns.into_iter().zip(camel_cases) {
assert_eq!(col.get_name_camel_case().to_string(), camel_case);
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -241,7 +256,21 @@ mod tests {
fn test_get_rs_type() {
let columns = setup();
let rs_types = vec![
- "String", "String", "i8", "i16", "i32", "i64", "f32", "f64", "Vec<u8>", "bool",
+ "String",
+ "String",
+ "i8",
+ "i16",
+ "i32",
+ "i64",
+ "f32",
+ "f64",
+ "Vec<u8>",
+ "bool",
+ "Date",
+ "Time",
+ "DateTime",
+ "DateTime",
+ "DateTimeWithTimeZone",
];
for (mut col, rs_type) in columns.into_iter().zip(rs_types) {
let rs_type: TokenStream = rs_type.parse().unwrap();
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -271,6 +300,11 @@ mod tests {
"ColumnType::Double.def()",
"ColumnType::Binary.def()",
"ColumnType::Boolean.def()",
+ "ColumnType::Date.def()",
+ "ColumnType::Time.def()",
+ "ColumnType::DateTime.def()",
+ "ColumnType::Timestamp.def()",
+ "ColumnType::TimestampWithTimeZone.def()",
];
for (mut col, col_def) in columns.into_iter().zip(col_defs) {
let mut col_def: TokenStream = col_def.parse().unwrap();
| sea-orm-cli: does not recognize SQL Date on Postgres
```
sea-orm = {
version = "^0.2.4",
features = ["sqlx-postgres", "macros", "runtime-async-std-native-tls", ],
default-features = false
}
```
sea-orm-cli: 0.2.4
---
Creating a Postgres tables with:
Postgres sql:
```sql
create table stores
(
...
opening_date date not null,
...
);
```
Generating entities by **sea-orm-cli**
```console
sea-orm-cli generate entity
-u postgres://user:password@host:port/postgres
-o src/entity
```
results in:
```rust
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "stores")]
pub struct Model {
...
pub opening_date: String,
...
}
```
executing:
```rust
let orm = entity::stores::ActiveModel {
...
opening_date: Set(input.open_date.to_owned()),
...
};
```
🚨 leads to:
```console
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ expected struct `std::string::String`, found struct `NaiveDate`
```
As expected, manually replacing **String** with **Chrono:NaiveDate** fixes the error. But the CLI produces every time again String as type for SQL Date.
| Hey @shi-rudo, thanks for the feedback! Just found the faulty lines. I will create a PR fixing it ASAP.
https://github.com/SeaQL/sea-orm/blob/632290469b649d40b45760ff00f687a1467c5508/sea-orm-codegen/src/entity/column.rs#L30-L31
Please change... for now
```rust
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "stores")]
pub struct Model {
...
pub opening_date: Date, // Change `String` to `Date`
...
}
```
thanks for the fast response!
By the way, the same thing happens with SQL Time `pub opening_time: String` should be `Time` resp. `NaiveTime` Just a note in case you didn't notice this type in the next PR.
Yes, I will fix both `NaiveDate` & `NaiveTime` mappings. Thanks!! | 2021-10-05T23:03:30 | 0.2 | 8d06f4b9e9c2816cc09141d96df3b365cd6fafbd | [
"entity::column::tests::test_get_rs_type"
] | [
"entity::base_entity::tests::test_get_column_defs",
"entity::base_entity::tests::test_get_column_names_camel_case",
"entity::base_entity::tests::test_get_column_names_snake_case",
"entity::base_entity::tests::test_get_conjunct_relations_to_snake_case",
"entity::base_entity::tests::test_get_primary_key_names... | [] | [] |
SeaQL/sea-orm | 746 | SeaQL__sea-orm-746 | [
"704"
] | a0a2492a921c92b47cabff95be7f25ed0a4ebad2 | diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -68,7 +68,7 @@ impl Column {
},
ColumnType::Decimal(_) | ColumnType::Money(_) => "Decimal".to_owned(),
ColumnType::Uuid => "Uuid".to_owned(),
- ColumnType::Binary(_) => "Vec<u8>".to_owned(),
+ ColumnType::Binary(_) | ColumnType::VarBinary(_) => "Vec<u8>".to_owned(),
ColumnType::Boolean => "bool".to_owned(),
ColumnType::Enum(name, _) => name.to_camel_case(),
_ => unimplemented!(),
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -128,7 +128,7 @@ impl Column {
}
ColumnType::Time(_) => quote! { ColumnType::Time.def() },
ColumnType::Date => quote! { ColumnType::Date.def() },
- ColumnType::Binary(BlobSize::Blob(_)) => quote! { ColumnType::Binary.def() },
+ ColumnType::Binary(BlobSize::Blob(_)) | ColumnType::VarBinary(_) => quote! { ColumnType::Binary.def() },
ColumnType::Binary(BlobSize::Tiny) => quote! { ColumnType::TinyBinary.def() },
ColumnType::Binary(BlobSize::Medium) => quote! { ColumnType::MediumBinary.def() },
ColumnType::Binary(BlobSize::Long) => quote! { ColumnType::LongBinary.def() },
| diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -266,6 +266,7 @@ mod tests {
make_col!("cake-filling-id", ColumnType::Float(None)),
make_col!("CAKE_FILLING_ID", ColumnType::Double(None)),
make_col!("CAKE-FILLING-ID", ColumnType::Binary(BlobSize::Blob(None))),
+ make_col!("CAKE-FILLING-ID", ColumnType::VarBinary(10)),
make_col!("CAKE", ColumnType::Boolean),
make_col!("date", ColumnType::Date),
make_col!("time", ColumnType::Time(None)),
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -292,6 +293,7 @@ mod tests {
"cake_filling_id",
"cake_filling_id",
"cake_filling_id",
+ "cake_filling_id",
"cake",
"date",
"time",
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -321,6 +323,7 @@ mod tests {
"CakeFillingId",
"CakeFillingId",
"CakeFillingId",
+ "CakeFillingId",
"Cake",
"Date",
"Time",
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -351,6 +354,7 @@ mod tests {
"f32",
"f64",
"Vec<u8>",
+ "Vec<u8>",
"bool",
"Date",
"Time",
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -393,6 +397,7 @@ mod tests {
"f32",
"f64",
"Vec<u8>",
+ "Vec<u8>",
"bool",
"TimeDate",
"TimeTime",
diff --git a/sea-orm-codegen/src/entity/column.rs b/sea-orm-codegen/src/entity/column.rs
--- a/sea-orm-codegen/src/entity/column.rs
+++ b/sea-orm-codegen/src/entity/column.rs
@@ -434,6 +439,7 @@ mod tests {
"ColumnType::Float.def()",
"ColumnType::Double.def()",
"ColumnType::Binary.def()",
+ "ColumnType::Binary.def()",
"ColumnType::Boolean.def()",
"ColumnType::Date.def()",
"ColumnType::Time.def()",
| mismatched types; Rust type `core::option::Option<alloc::string::String>` (as SQL type `VARCHAR`) is not compatible with SQL type `VARBINARY`
<!--
Welcome! Thanks for reporting bugs!
Do you want to ask a question? Are you looking for support?
Please ask us on
- Discord: https://discord.com/invite/uCPdDXzbdv
- GitHub Discussion
-->
## Description
create table with field like `varbinary(20)` will result in error with `find_by_id(id)`
```
error occurred while decoding column
mismatched types; Rust type `core::option::Option<alloc::string::String>` (as SQL type `VARCHAR`) is not compatible with SQL type `VARBINARY`
```
<!-- describe the bug -->
## Steps to Reproduce
1. create table `demo` which has a `varbinary(20)` field
```sql
CREATE TABLE `demo` (
`id` int unsigned NOT NULL AUTO_INCREMENT,
`secret` varbinary(20) NOT NULL,
PRIMARY KEY (`id`)
) ENGINE=InnoDB DEFAULT CHARSET=utf8
```
2. insert some demo data:
```sql
INSERT INTO demo (secret) VALUES (UNHEX('b2df817c8b751a3ae3d8ba1de4dc8369'));
select * from demo;
+----+------------------------------------+
| id | secret |
+----+------------------------------------+
| 1 | 0xb2df817c8b751a3ae3d8ba1de4dc8369 |
+----+------------------------------------+
```
3. generate entity using cli: `sea-orm-cli generate entity -t demo`
and we'll got
```rust
//! SeaORM Entity. Generated by sea-orm-codegen 0.8.0
use sea_orm::entity::prelude::*;
#[derive(Clone, Debug, PartialEq, DeriveEntityModel)]
#[sea_orm(table_name = "demo")]
pub struct Model {
#[sea_orm(primary_key)]
pub id: u32,
#[sea_orm(column_type = "Custom(\"VARBINARY(20)\".to_owned())")]
pub secret: String,
}
#[derive(Copy, Clone, Debug, EnumIter)]
pub enum Relation {}
impl RelationTrait for Relation {
fn def(&self) -> RelationDef {
panic!("No RelationDef")
}
}
impl ActiveModelBehavior for ActiveModel {}
```
5. fetch one row data:
```rust
use super::demo::Entity as Demo;
let hello: Option<demo::Model> = Demo::find_by_id(1).one(db).await?;
```
and we got the errror "mismatched types; Rust type core::option::Option<alloc::string::String> (as SQL type VARCHAR) is not compatible with SQL type VARBINARY"
### Expected Behavior
sea-orm-cli generate entity with table has `VARBINARY` fields execute `find_by_id` query has no problem
### Actual Behavior
sea-orm-cli generate entity with table has `VARBINARY` fields execute `find_by_id` query got "mismatched types; Rust type core::option::Option<alloc::string::String> (as SQL type VARCHAR) is not compatible with SQL type VARBINARY"
### Reproduces How Often
always reproducible
## Versions
<!-- you can get this information from copy and pasting the output of `cargo tree | grep sea-` from the console. Also, please include the database and OS that you are running. -->
## Additional Information
<!-- please provide a minimal reproducible example, if possible. Any other additional information that might be helpful. -->
| Hello, @ttys3! Thank you for the report. Can write a small example which reproduces error?
And I think varbinary is not equal to String. You can use Binary instead of String.
Hey @ttys3, I think `varbinary` should represented as `Vec<u8>` in Rust
@ikrivosheev @billy1624 I updated the issue with re-produce steps.
I think the problem is sea-orm-cli generate entity does not support table has VARBINARY fields
Ah... I understand, problem is in generator. Thank you for the report. | 2022-05-18T15:56:49 | 0.9 | 87a9eb9d15674830a35ef0c60c46d19fd364590e | [
"entity::column::tests::test_get_rs_type_with_chrono",
"entity::column::tests::test_get_rs_type_with_time",
"entity::column::tests::test_get_def"
] | [
"entity::base_entity::tests::test_get_column_defs",
"entity::base_entity::tests::test_get_conjunct_relations_to_camel_case",
"entity::base_entity::tests::test_get_column_rs_types",
"entity::base_entity::tests::test_get_column_names_camel_case",
"entity::base_entity::tests::test_get_column_names_snake_case",... | [] | [] |
skim-rs/skim | 563 | skim-rs__skim-563 | [
"194",
"495"
] | 8a579837daaeb71938c635aa5352cbde9669d5f8 | diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -75,7 +75,7 @@ impl Perform for ANSIParser {
match code[0] {
0 => attr = Attr::default(),
1 => attr.effect |= Effect::BOLD,
- 2 => attr.effect |= !Effect::BOLD,
+ 2 => attr.effect |= Effect::DIM,
4 => attr.effect |= Effect::UNDERLINE,
5 => attr.effect |= Effect::BLINK,
7 => attr.effect |= Effect::REVERSE,
| diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -606,4 +606,21 @@ mod tests {
assert_eq!(Some(('a', highlight)), it.next());
assert_eq!(None, it.next());
}
+
+ #[test]
+ fn test_ansi_dim() {
+ // https://github.com/lotabout/skim/issues/495
+ let input = "\x1B[2mhi\x1b[0m";
+ let ansistring = ANSIParser::default().parse_ansi(input);
+ let mut it = ansistring.iter();
+ let attr = Attr {
+ effect: Effect::DIM,
+ ..Attr::default()
+ };
+
+ assert_eq!(Some(('h', attr)), it.next());
+ assert_eq!(Some(('i', attr)), it.next());
+ assert_eq!(None, it.next());
+ assert_eq!(ansistring.stripped(), "hi");
+ }
}
| Color not working with `ag`
When using silver searcher, coloring of file path and line numbers aren't working.
Used the following command:
```
$ sk --ansi -i -c 'ag --color "{}"'
```
Dimmed colors not properly displayed
Whenever an item in the list of entries passed to `skim` is _dimmed_, `skim` does not properly understand this and translate it rather as _dimmed/underlined/reversed/blinking_. Example:
```sh
echo -e "\x1b[2;33mDIM\x1b[0m\nnormal\nnormal2" | sk --ansi
```
In other words, `skim` understands `2;33` as `2;4;5;7;33`.
Tested on 0.9.4 and 0.10.1.
| Confirmed. Will fix when I got time.
| 2024-04-03T00:37:59 | 0.10 | 8a579837daaeb71938c635aa5352cbde9669d5f8 | [
"ansi::tests::test_ansi_dim"
] | [
"ansi::tests::test_multi_bytes",
"ansi::tests::test_ansi_iterator",
"ansi::tests::test_highlight_indices",
"ansi::tests::test_merge_fragments",
"ansi::tests::test_multi_byte_359",
"ansi::tests::test_multiple_attributes",
"ansi::tests::test_normal_string",
"ansi::tests::test_reset",
"field::test::tes... | [] | [] |
skim-rs/skim | 362 | skim-rs__skim-362 | [
"359"
] | bedadf1a37d50a56a2cf279b7289cbef5b3ca206 | diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -337,7 +337,7 @@ impl<'a> From<(&'a str, &'a [usize], Attr)> for AnsiString<'a> {
pub struct AnsiStringIterator<'a> {
fragments: &'a [(Attr, (u32, u32))],
fragment_idx: usize,
- chars_iter: std::str::CharIndices<'a>,
+ chars_iter: std::iter::Enumerate<std::str::Chars<'a>>,
}
impl<'a> AnsiStringIterator<'a> {
diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -345,7 +345,7 @@ impl<'a> AnsiStringIterator<'a> {
Self {
fragments,
fragment_idx: 0,
- chars_iter: stripped.char_indices(),
+ chars_iter: stripped.chars().enumerate(),
}
}
}
diff --git a/src/helper/item.rs b/src/helper/item.rs
--- a/src/helper/item.rs
+++ b/src/helper/item.rs
@@ -119,9 +119,9 @@ impl SkimItem for DefaultSkimItem {
.collect(),
Matches::CharRange(start, end) => vec![(context.highlight_attr, (start as u32, end as u32))],
Matches::ByteRange(start, end) => {
- let start = context.text[..start].chars().count();
- let end = start + context.text[start..end].chars().count();
- vec![(context.highlight_attr, (start as u32, end as u32))]
+ let ch_start = context.text[..start].chars().count();
+ let ch_end = ch_start + context.text[start..end].chars().count();
+ vec![(context.highlight_attr, (ch_start as u32, ch_end as u32))]
}
Matches::None => vec![],
};
diff --git a/src/lib.rs b/src/lib.rs
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -164,9 +164,12 @@ impl<'a> From<DisplayContext<'a>> for AnsiString<'a> {
AnsiString::new_str(context.text, vec![(context.highlight_attr, (start as u32, end as u32))])
}
Matches::ByteRange(start, end) => {
- let start = context.text[..start].chars().count();
- let end = start + context.text[start..end].chars().count();
- AnsiString::new_str(context.text, vec![(context.highlight_attr, (start as u32, end as u32))])
+ let ch_start = context.text[..start].chars().count();
+ let ch_end = ch_start + context.text[start..end].chars().count();
+ AnsiString::new_str(
+ context.text,
+ vec![(context.highlight_attr, (ch_start as u32, ch_end as u32))],
+ )
}
Matches::None => AnsiString::new_str(context.text, vec![]),
}
| diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -581,4 +581,16 @@ mod tests {
vec![(ao, (1, 2)), (an, (2, 6)), (ao, (6, 7)), (ao, (9, 11))]
);
}
+
+ #[test]
+ fn test_multi_byte_359() {
+ // https://github.com/lotabout/skim/issues/359
+ let highlight = Attr::default().effect(Effect::BOLD);
+ let ansistring = AnsiString::new_str("ああa", vec![(highlight, (2, 3))]);
+ let mut it = ansistring.iter();
+ assert_eq!(Some(('あ', Attr::default())), it.next());
+ assert_eq!(Some(('あ', Attr::default())), it.next());
+ assert_eq!(Some(('a', highlight)), it.next());
+ assert_eq!(None, it.next());
+ }
}
diff --git a/test/test_skim.py b/test/test_skim.py
--- a/test/test_skim.py
+++ b/test/test_skim.py
@@ -1063,6 +1063,10 @@ def test_preview_scroll_and_offset(self):
self.tmux.until(lambda lines: re.match(r'121.*121/1000', lines[0]))
self.tmux.send_keys(Key('Enter'))
+ def test_issue_359_multi_byte_and_regex(self):
+ self.tmux.send_keys(f"""echo 'ああa' | {self.sk("--regex -q 'a'")}""", Key('Enter'))
+ self.tmux.until(lambda lines: lines[-3].startswith('> ああa'))
+
def find_prompt(lines, interactive=False, reverse=False):
linen = -1
prompt = ">"
| Crashes with multi-byte characters input and --regex
After update to 0.9.1, I'm having trouble with sk.
```shell-session
$ sk --version
0.9.1
$ echo $SKIM_DEFAULT_OPTIONS
$ echo $LANG
ja_JP.UTF-8
$ od -tx1z <<< 'ああa'
0000000 e3 81 82 e3 81 82 61 0a >......a.<
0000010
$ echo 'ああa' | sk --regex
```
After inputting 'a', sk crashes with the following message:
```
thread 'main' panicked at 'byte index 2 is not a char boundary; it is inside 'あ' (bytes 0..3) of `ああa`', src/lib.rs:168:35
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
```
What should I do next? My environment is as follows:
OS: Arch Linux on Windows 10 x86_64
Kernel: 4.19.128-microsoft-standard
CPU: Intel i7-7700 (8) @ 3.600GHz
Shell: zsh 5.8
| 2020-10-21T22:25:28 | 0.9 | 2ad92dfc3d0f9f6b172e1c6659a235ff13fe0f64 | [
"ansi::tests::test_multi_byte_359"
] | [
"ansi::tests::test_highlight_indices",
"ansi::tests::test_ansi_iterator",
"ansi::tests::test_merge_fragments",
"ansi::tests::test_normal_string",
"ansi::tests::test_multiple_attributes",
"ansi::tests::test_reset",
"field::test::test_parse_field_range",
"global::tests::test",
"helper::selector::tests... | [] | [] | |
skim-rs/skim | 282 | skim-rs__skim-282 | [
"278"
] | dade97c7247eb971d7e4dda8d8f648d8c143aa7f | diff --git a/src/util.rs b/src/util.rs
--- a/src/util.rs
+++ b/src/util.rs
@@ -10,8 +10,18 @@ use crate::field::get_string_by_range;
use crate::item::ItemWrapper;
use crate::SkimItem;
+lazy_static! {
+ static ref RE_ESCAPE: Regex = Regex::new(r"['\U{00}]").unwrap();
+}
+
pub fn escape_single_quote(text: &str) -> String {
- text.replace("'", "'\\''")
+ RE_ESCAPE
+ .replace_all(text, |x: &Captures| match x.get(0).unwrap().as_str() {
+ "'" => "'\\''".to_string(),
+ "\0" => "\\0".to_string(),
+ _ => "".to_string(),
+ })
+ .to_string()
}
/// use to print a single line, properly handle the tabsteop and shift of a string
| diff --git a/src/util.rs b/src/util.rs
--- a/src/util.rs
+++ b/src/util.rs
@@ -413,4 +423,9 @@ mod tests {
assert_eq!("'cmd_query'", inject_command("{cq}", default_context));
assert_eq!("'a,b,c' 'x,y,z'", inject_command("{+}", default_context));
}
+
+ #[test]
+ fn test_escape_single_quote() {
+ assert_eq!("'\\''a'\\''\\0", escape_single_quote("'a'\0"));
+ }
}
diff --git a/test/test_skim.py b/test/test_skim.py
--- a/test/test_skim.py
+++ b/test/test_skim.py
@@ -70,9 +70,10 @@ def __repr__(self):
class TmuxOutput(list):
"""A list that contains the output of tmux"""
# match the status line
- # normal: `| 10/219 [2] 8.`
- # inline: `> query < 10/219 [2] 8.`
- RE = re.compile(r'(?:^|[^<-]*). ([0-9]+)/([0-9]+)(?: \[([0-9]+)\])? *([0-9]+)(\.)?$')
+ # normal: `| 10/219 [2] 8.`
+ # inline: `> query < 10/219 [2] 8.`
+ # preview: `> query < 10/219 [2] 8.│...`
+ RE = re.compile(r'(?:^|[^<-]*). ([0-9]+)/([0-9]+)(?: \[([0-9]+)\])? *([0-9]+)(\.)?(?: │)? *$')
def __init__(self, iteratable=[]):
super(TmuxOutput, self).__init__(iteratable)
self._counts = None
diff --git a/test/test_skim.py b/test/test_skim.py
--- a/test/test_skim.py
+++ b/test/test_skim.py
@@ -876,6 +877,12 @@ def test_if_non_matched(self):
self.tmux.send_keys(Key('Enter')) # not triggered anymore
self.tmux.until(lambda lines: lines.ready_with_matches(1))
+ def test_nul_in_execute(self):
+ """NUL should work in preview command see #278"""
+ self.tmux.send_keys(f"""echo -ne 'a\\0b' | {self.sk("--preview='echo -en {} | xxd'")}""", Key('Enter'))
+ self.tmux.until(lambda lines: lines.ready_with_lines(1))
+ self.tmux.until(lambda lines: lines.any_include('6100 62'))
+
def find_prompt(lines, interactive=False, reverse=False):
linen = -1
prompt = ">"
| Can't preview string with nul byte
```zsh
> sk --preview-window=down --preview='echo {}' <<<$'abc\0cba'
```
Got an error:

| 2020-03-01T14:58:58 | 0.8 | da913fb9de587a75158fe67b3756574dbd5e5efb | [
"util::tests::test_escape_single_quote"
] | [
"ansi::tests::test_normal_string",
"ansi::tests::test_reset",
"ansi::tests::test_ansi_iterator",
"ansi::tests::test_multiple_attributes",
"field::test::test_parse_field_range",
"field::test::test_get_string_by_field",
"field::test::test_parse_matching_fields",
"spinlock::tests::smoke",
"query::test:... | [] | [] | |
skim-rs/skim | 105 | skim-rs__skim-105 | [
"104"
] | 817b75c09663bf57888bee1ae08bd820dc4d7414 | diff --git a/shell/key-bindings.bash b/shell/key-bindings.bash
--- a/shell/key-bindings.bash
+++ b/shell/key-bindings.bash
@@ -58,7 +58,7 @@ __skim_history__() (
shopt -u nocaseglob nocasematch
line=$(
HISTTIMEFORMAT= history |
- SKIM_DEFAULT_OPTIONS="--height ${SKIM_TMUX_HEIGHT:-40%} $SKIM_DEFAULT_OPTIONS --tac --sync -n2..,.. --tiebreak=index --bind=ctrl-r:toggle-sort $SKIM_CTRL_R_OPTS -m" $(__skimcmd) |
+ SKIM_DEFAULT_OPTIONS="--height ${SKIM_TMUX_HEIGHT:-40%} $SKIM_DEFAULT_OPTIONS --tac --sync -n2..,.. --tiebreak=index $SKIM_CTRL_R_OPTS -m" $(__skimcmd) |
command grep '^ *[0-9]') &&
if [[ $- =~ H ]]; then
sed 's/^ *\([0-9]*\)\** .*/!\1/' <<< "$line"
diff --git a/shell/key-bindings.zsh b/shell/key-bindings.zsh
--- a/shell/key-bindings.zsh
+++ b/shell/key-bindings.zsh
@@ -70,7 +70,7 @@ skim-history-widget() {
local selected num
setopt localoptions noglobsubst noposixbuiltins pipefail 2> /dev/null
selected=( $(fc -rl 1 |
- SKIM_DEFAULT_OPTIONS="--height ${SKIM_TMUX_HEIGHT:-40%} $SKIM_DEFAULT_OPTIONS -n2..,.. --tiebreak=index --bind=ctrl-r:toggle-sort $SKIM_CTRL_R_OPTS --query=${(qqq)LBUFFER} -m" $(__skimcmd)) )
+ SKIM_DEFAULT_OPTIONS="--height ${SKIM_TMUX_HEIGHT:-40%} $SKIM_DEFAULT_OPTIONS -n2..,.. --tiebreak=index $SKIM_CTRL_R_OPTS --query=${(qqq)LBUFFER} -m" $(__skimcmd)) )
local ret=$?
if [ -n "$selected" ]; then
num=$selected[1]
diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -103,10 +103,9 @@ impl FieldRange {
}
}
-
-// e.g. delimiter = Regex::new(",").unwrap()
-// Note that this is differnt with `to_index_pair`, it uses delimiters like ".*?,"
-pub fn get_string_by_field<'a>(delimiter: &Regex, text: &'a str, field: &FieldRange) -> Option<&'a str> {
+// ("|", "a|b||c") -> [(0, 2), (2, 4), (4, 5), (5, 6)]
+// explain: split to ["a|", "b|", "|", "c"]
+fn get_ranges_by_delimiter(delimiter: &Regex, text: &str) -> Vec<(usize, usize)> {
let mut ranges = Vec::new();
let mut last = 0;
for mat in delimiter.find_iter(text) {
diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -114,6 +113,14 @@ pub fn get_string_by_field<'a>(delimiter: &Regex, text: &'a str, field: &FieldRa
last = mat.end();
}
ranges.push((last, text.len()));
+ ranges
+}
+
+
+// e.g. delimiter = Regex::new(",").unwrap()
+// Note that this is differnt with `to_index_pair`, it uses delimiters like ".*?,"
+pub fn get_string_by_field<'a>(delimiter: &Regex, text: &'a str, field: &FieldRange) -> Option<&'a str> {
+ let ranges = get_ranges_by_delimiter(delimiter, text);
if let Some((start, stop)) = field.to_index_pair(ranges.len()) {
let &(begin, _) = &ranges[start];
diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -130,12 +137,7 @@ pub fn get_string_by_range<'a>(delimiter: &Regex, text: &'a str, range: &str) ->
// -> a vector of the matching fields.
pub fn parse_matching_fields(delimiter: &Regex, text: &str, fields: &[FieldRange]) -> Vec<(usize, usize)> {
- let mut ranges = delimiter
- .find_iter(text)
- .map(|m| (m.start(), m.end()))
- .collect::<Vec<(usize, usize)>>();
- let &(_, end) = ranges.last().unwrap_or(&(0, 0));
- ranges.push((end, text.len()));
+ let ranges = get_ranges_by_delimiter(delimiter, text);
let mut ret = Vec::new();
for field in fields {
diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -150,17 +152,8 @@ pub fn parse_matching_fields(delimiter: &Regex, text: &str, fields: &[FieldRange
ret
}
-
-
-
-
pub fn parse_transform_fields(delimiter: &Regex, text: &str, fields: &[FieldRange]) -> String {
- let mut ranges = delimiter
- .find_iter(text)
- .map(|m| (m.start(), m.end()))
- .collect::<Vec<(usize, usize)>>();
- let &(_, end) = ranges.last().unwrap_or(&(0, 0));
- ranges.push((end, text.len()));
+ let ranges = get_ranges_by_delimiter(delimiter, text);
let mut ret = String::new();
for field in fields {
diff --git a/src/model.rs b/src/model.rs
--- a/src/model.rs
+++ b/src/model.rs
@@ -24,6 +24,7 @@ pub type ClosureType = Box<Fn(&mut Window) + Send>;
const SPINNER_DURATION: u32 = 200;
const SPINNERS: [char; 8] = ['-', '\\', '|', '/', '-', '\\', '|', '/'];
+const DELIMITER_STR: &'static str = r"[\t\n ]+";
lazy_static! {
static ref RE_FILEDS: Regex = Regex::new(r"\\?(\{-?[0-9.,q]*?})").unwrap();
diff --git a/src/model.rs b/src/model.rs
--- a/src/model.rs
+++ b/src/model.rs
@@ -94,7 +95,7 @@ impl Model {
multi_selection: false,
reverse: false,
preview_cmd: None,
- delimiter: Regex::new(r"[ \t\n]+").unwrap(),
+ delimiter: Regex::new(DELIMITER_STR).unwrap(),
output_ending: "\n",
print_query: false,
print_cmd: false,
diff --git a/src/model.rs b/src/model.rs
--- a/src/model.rs
+++ b/src/model.rs
@@ -121,7 +122,7 @@ impl Model {
}
if let Some(delimiter) = options.delimiter {
- self.delimiter = Regex::new(delimiter).unwrap_or_else(|_| Regex::new(r"[ \t\n]+").unwrap());
+ self.delimiter = Regex::new(delimiter).unwrap_or_else(|_| Regex::new(DELIMITER_STR).unwrap());
}
if options.print0 {
diff --git a/src/reader.rs b/src/reader.rs
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -18,6 +18,8 @@ use regex::Regex;
use sender::CachedSender;
use std::env;
+const DELIMITER_STR: &'static str = r"[\t\n ]+";
+
struct ReaderOption {
pub use_ansi_color: bool,
pub default_arg: String,
diff --git a/src/reader.rs b/src/reader.rs
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -35,7 +37,7 @@ impl ReaderOption {
default_arg: String::new(),
transform_fields: Vec::new(),
matching_fields: Vec::new(),
- delimiter: Regex::new(r".*?\t").unwrap(),
+ delimiter: Regex::new(DELIMITER_STR).unwrap(),
replace_str: "{}".to_string(),
line_ending: b'\n',
}
diff --git a/src/reader.rs b/src/reader.rs
--- a/src/reader.rs
+++ b/src/reader.rs
@@ -48,7 +50,7 @@ impl ReaderOption {
if let Some(delimiter) = options.delimiter {
self.delimiter =
- Regex::new(&(".*?".to_string() + delimiter)).unwrap_or_else(|_| Regex::new(r".*?[\t ]").unwrap());
+ Regex::new(delimiter).unwrap_or_else(|_| Regex::new(DELIMITER_STR).unwrap());
}
if let Some(transform_fields) = options.with_nth {
| diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -251,7 +244,7 @@ mod test {
#[test]
fn test_parse_transform_fields() {
// delimiter is ","
- let re = Regex::new(".*?,").unwrap();
+ let re = Regex::new(",").unwrap();
assert_eq!(
super::parse_transform_fields(&re, &"A,B,C,D,E,F", &vec![Single(2), Single(4), Single(-1), Single(-7)]),
diff --git a/src/field.rs b/src/field.rs
--- a/src/field.rs
+++ b/src/field.rs
@@ -285,7 +278,7 @@ mod test {
#[test]
fn test_parse_matching_fields() {
// delimiter is ","
- let re = Regex::new(".*?,").unwrap();
+ let re = Regex::new(",").unwrap();
assert_eq!(
super::parse_matching_fields(
| zsh history binding malfunctional?
Perhaps I am missing something, but when using the zsh history binding, it doesn't work as before. A couple things I've tried that don't work: `^r` doesn't rotate the mode and `prefix-exact-match`. The search syntax isn't completely broken, as at least the `exact-match` type works.
Said features do work with `sk --ansi -c 'rg --color=always --line-number "{}"'`.
`export SKIM_DEFAULT_COMMAND='fd --type f'`
| 2018-11-10T09:56:39 | 0.5 | a33fe3635b080be7ccfcd7468a457db01fec05bb | [
"field::test::test_parse_transform_fields",
"field::test::test_parse_matching_fields"
] | [
"field::test::test_parse_field_range",
"model::tests::test_reshape_string",
"model::tests::test_accumulate_text_width",
"orderedvec::test::test_push_get",
"query::test::test_add_char",
"query::test::test_backward_delete_char",
"field::test::test_get_string_by_field",
"query::test::test_new_query",
"... | [] | [] | |
skim-rs/skim | 239 | skim-rs__skim-239 | [
"231",
"220"
] | 11c80768e0664878fc6fada1a31e7c9d0dfa8590 | diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -69,9 +69,14 @@ impl Perform for ANSIParser {
return;
}
- let mut attr = self.last_attr;
- let mut iter = params.into_iter();
+ // \[[m => means reset
+ let mut attr = if params.is_empty() {
+ Attr::default()
+ } else {
+ self.last_attr
+ };
+ let mut iter = params.into_iter();
while let Some(&code) = iter.next() {
match code {
0 => attr = Attr::default(),
diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -245,7 +250,8 @@ impl AnsiString {
}
pub fn has_attrs(&self) -> bool {
- self.fragments.len() > 1
+ // more than 1 fragments or is not default attr
+ self.fragments.len() > 1 || (!self.fragments.is_empty() && self.fragments[0].0 != Attr::default())
}
pub fn from_str(raw: &str) -> AnsiString {
| diff --git a/src/ansi.rs b/src/ansi.rs
--- a/src/ansi.rs
+++ b/src/ansi.rs
@@ -349,4 +355,11 @@ mod tests {
assert_eq!(Some(('i', attr)), it.next());
assert_eq!(None, it.next());
}
+
+ #[test]
+ fn test_reset() {
+ let input = "\x1B[35mA\x1B[mB";
+ let ansistring = ANSIParser::default().parse_ansi(input);
+ assert_eq!(ansistring.fragments.len(), 2);
+ }
}
| ANSI colors are ignored in some cases
I can reproduce it by:
```sh
/usr/bin/ls -1 --color | sk --ansi
```
Or...
```sh
e=$(echo -ne '\e')
sk --ansi <<EOF
$e[34mno
$e[36mno$e[0m
$e[36mno$e[0m
ABC$e[34myes
$e[36myes$e[39mABC
ABC$e[36myes$e[0m
ABC$e[36myes$e[0mABC
EOF
```
bat highlight-line not working properly
As it can be seen in the image, if I run `sk --preview 'bat --color always --style numbers --highlight-line 5 {1}'`, the (5th) line is only partially highlighted in the preview window:

It seems to be a problem with skim since bat line highlighting works just fine:

|
Should be fixed? Tested on MacOS + current master.

| 2019-12-11T23:58:55 | 0.6 | 11c80768e0664878fc6fada1a31e7c9d0dfa8590 | [
"ansi::tests::test_reset"
] | [
"ansi::tests::test_ansi_iterator",
"ansi::tests::test_multiple_attributes",
"ansi::tests::test_normal_string",
"field::test::test_parse_field_range",
"query::test::test_add_char",
"field::test::test_parse_matching_fields",
"query::test::test_new_query",
"query::test::test_backward_delete_char",
"fie... | [] | [] |
starship/starship | 2,613 | starship__starship-2613 | [
"2610"
] | 0f3a58352f4e84718b95bfb42340348cfc60ee25 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1453,7 +1453,8 @@ symbol = "🌟 "
The `jobs` module shows the current number of jobs running.
The module will be shown only if there are background jobs running.
The module will show the number of jobs running if there is more than 1 job, or
-more than the `threshold` config value, if it exists.
+more than the `threshold` config value, if it exists. If `threshold` is set to 0,
+then the module will also show when there are 0 jobs running.
::: warning
diff --git a/src/modules/jobs.rs b/src/modules/jobs.rs
--- a/src/modules/jobs.rs
+++ b/src/modules/jobs.rs
@@ -15,11 +15,20 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
.trim()
.parse::<i64>()
.ok()?;
- if num_of_jobs == 0 {
+
+ if config.threshold < 0 {
+ log::warn!(
+ "threshold in [jobs] ({}) was less than zero",
+ config.threshold
+ );
+ return None;
+ }
+
+ if num_of_jobs == 0 && config.threshold > 0 {
return None;
}
- let module_number = if num_of_jobs > config.threshold {
+ let module_number = if num_of_jobs > config.threshold || config.threshold == 0 {
num_of_jobs.to_string()
} else {
"".to_string()
| diff --git a/src/modules/jobs.rs b/src/modules/jobs.rs
--- a/src/modules/jobs.rs
+++ b/src/modules/jobs.rs
@@ -109,4 +118,32 @@ mod test {
let expected = Some(format!("{} ", Color::Blue.bold().paint("✦3")));
assert_eq!(expected, actual);
}
+
+ #[test]
+ fn config_0_job_0() {
+ let actual = ModuleRenderer::new("jobs")
+ .config(toml::toml! {
+ [jobs]
+ threshold = 0
+ })
+ .jobs(0)
+ .collect();
+
+ let expected = Some(format!("{} ", Color::Blue.bold().paint("✦0")));
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn config_0_job_1() {
+ let actual = ModuleRenderer::new("jobs")
+ .config(toml::toml! {
+ [jobs]
+ threshold = 0
+ })
+ .jobs(1)
+ .collect();
+
+ let expected = Some(format!("{} ", Color::Blue.bold().paint("✦1")));
+ assert_eq!(expected, actual);
+ }
}
| [jobs] Cannot take negative value (zero jobs not shown)
## Bug Report
#### Current Behavior
```toml
[jobs]
threshold = -1
```
does not show 0 (zero) in the output if there are no background jobs.
#### Expected Behavior
`0` (zero) is printed for zero background jobs
#### Environment
```
$ starship --version
starship 0.51.0
branch:
commit_hash:
build_time:2021-03-29 15:35:00
build_env:rustc 1.50.0,
```
- Shell type: bash
- Shell version: 4.4.23
- Terminal emulator: Konsole
- Operating system: NixOS Linux
-
| 2021-04-20T07:04:13 | 0.51 | 0f3a58352f4e84718b95bfb42340348cfc60ee25 | [
"modules::golang::tests::folder_with_go_file",
"modules::golang::tests::folder_with_go_mod",
"modules::jobs::test::config_0_job_0",
"modules::nodejs::tests::engines_node_version_match",
"modules::nodejs::tests::engines_node_version_not_match",
"modules::nodejs::tests::folder_with_ts_file",
"modules::oca... | [
"bug_report::tests::test_get_shell_info",
"bug_report::tests::test_get_config_path",
"config::tests::test_from_string",
"bug_report::tests::test_make_github_link",
"config::tests::table_get_styles_bold_italic_underline_green_dimmy_silly_caps",
"config::tests::test_load_nested_config",
"config::tests::te... | [
"modules::cmake::tests::buildfolder_with_cmake_cache",
"modules::crystal::tests::folder_with_cr_file",
"modules::dart::tests::folder_with_dart_file",
"modules::crystal::tests::folder_with_shard_file",
"modules::cmake::tests::folder_with_cmake_lists",
"modules::deno::tests::folder_with_deps_js",
"modules... | [
"modules::dotnet::tests::shows_latest_in_directory_with_fsproj",
"modules::dotnet::tests::shows_latest_in_directory_with_csproj",
"modules::dotnet::tests::shows_latest_in_directory_with_directory_build_props_file",
"modules::dotnet::tests::shows_latest_in_directory_with_directory_build_targets_file"
] | |
starship/starship | 2,569 | starship__starship-2569 | [
"2454"
] | 3b37bcd063f3c1b0d80e46a41a6bc9e1e933d4f7 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -2006,13 +2006,13 @@ symbol = "☁️ "
## Package Version
The `package` module is shown when the current directory is the repository for a
-package, and shows its current version. The module currently supports `npm`, `cargo`,
+package, and shows its current version. The module currently supports `npm`, `nimble`, `cargo`,
`poetry`, `composer`, `gradle`, `julia`, `mix` and `helm` packages.
- [**npm**](https://docs.npmjs.com/cli/commands/npm) – The `npm` package version is extracted from the `package.json` present
in the current directory
-- [**cargo**](https://doc.rust-lang.org/cargo/) – The `cargo` package version is extracted from the `Cargo.toml` present
- in the current directory
+- [**cargo**](https://doc.rust-lang.org/cargo/) – The `cargo` package version is extracted from the `Cargo.toml` present in the current directory
+- [**nimble**](https://github.com/nim-lang/nimble) - The `nimble` package version is extracted from the `*.nimble` file present in the current directory with the `nimble dump` command
- [**poetry**](https://python-poetry.org/) – The `poetry` package version is extracted from the `pyproject.toml` present
in the current directory
- [**composer**](https://getcomposer.org/) – The `composer` package version is extracted from the `composer.json` present
diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -1,5 +1,3 @@
-use std::path::Path;
-
use super::{Context, Module, RootModuleConfig};
use crate::configs::package::PackageConfig;
use crate::formatter::StringFormatter;
diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -16,7 +14,7 @@ use serde_json as json;
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let mut module = context.new_module("package");
let config: PackageConfig = PackageConfig::try_load(module.config);
- let module_version = get_package_version(&context.current_dir, &config)?;
+ let module_version = get_package_version(context, &config)?;
let parsed = StringFormatter::new(config.format).and_then(|formatter| {
formatter
diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -54,6 +52,16 @@ fn extract_cargo_version(file_contents: &str) -> Option<String> {
Some(formatted_version)
}
+fn extract_nimble_version(context: &Context) -> Option<String> {
+ let cmd_output = context.exec_cmd("nimble", &["dump", "--json"])?;
+
+ let nimble_json: json::Value = json::from_str(&cmd_output.stdout).ok()?;
+ let raw_version = nimble_json.get("version")?.as_str()?;
+
+ let formatted_version = format_version(raw_version);
+ Some(formatted_version)
+}
+
fn extract_package_version(file_contents: &str, display_private: bool) -> Option<String> {
let package_json: json::Value = json::from_str(file_contents).ok()?;
diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -194,9 +202,17 @@ fn extract_vpkg_version(file_contents: &str) -> Option<String> {
Some(formatted_version)
}
-fn get_package_version(base_dir: &Path, config: &PackageConfig) -> Option<String> {
+fn get_package_version(context: &Context, config: &PackageConfig) -> Option<String> {
+ let base_dir = &context.current_dir;
+
if let Ok(cargo_toml) = utils::read_file(base_dir.join("Cargo.toml")) {
extract_cargo_version(&cargo_toml)
+ } else if context
+ .try_begin_scan()?
+ .set_extensions(&["nimble"])
+ .is_match()
+ {
+ extract_nimble_version(context)
} else if let Ok(package_json) = utils::read_file(base_dir.join("package.json")) {
extract_package_version(&package_json, config.display_private)
} else if let Ok(poetry_toml) = utils::read_file(base_dir.join("pyproject.toml")) {
| diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -236,7 +252,7 @@ fn format_version(version: &str) -> String {
#[cfg(test)]
mod tests {
use super::*;
- use crate::test::ModuleRenderer;
+ use crate::{test::ModuleRenderer, utils::CommandOutput};
use ansi_term::Color;
use std::fs::File;
use std::io;
diff --git a/src/modules/package.rs b/src/modules/package.rs
--- a/src/modules/package.rs
+++ b/src/modules/package.rs
@@ -274,6 +290,117 @@ mod tests {
project_dir.close()
}
+ #[test]
+ fn test_extract_nimble_package_version() -> io::Result<()> {
+ let config_name = "test_project.nimble";
+
+ let config_content = r##"
+version = "0.1.0"
+author = "Mr. nimble"
+description = "A new awesome nimble package"
+license = "MIT"
+"##;
+
+ let project_dir = create_project_dir()?;
+ fill_config(&project_dir, config_name, Some(&config_content))?;
+
+ let starship_config = toml::toml! {
+ [package]
+ disabled = false
+ };
+ let actual = ModuleRenderer::new("package")
+ .cmd(
+ "nimble dump --json",
+ Some(CommandOutput {
+ stdout: r##"
+{
+ "name": "test_project.nimble",
+ "version": "0.1.0",
+ "author": "Mr. nimble",
+ "desc": "A new awesome nimble package",
+ "license": "MIT",
+ "skipDirs": [],
+ "skipFiles": [],
+ "skipExt": [],
+ "installDirs": [],
+ "installFiles": [],
+ "installExt": [],
+ "requires": [],
+ "bin": [],
+ "binDir": "",
+ "srcDir": "",
+ "backend": "c"
+}
+"##
+ .to_owned(),
+ stderr: "".to_owned(),
+ }),
+ )
+ .path(project_dir.path())
+ .config(starship_config)
+ .collect();
+
+ let expected = Some(format!(
+ "is {} ",
+ Color::Fixed(208).bold().paint(format!("📦 {}", "v0.1.0"))
+ ));
+
+ assert_eq!(actual, expected);
+ project_dir.close()
+ }
+
+ #[test]
+ fn test_extract_nimble_package_version_for_nimble_directory_when_nimble_is_not_available(
+ ) -> io::Result<()> {
+ let config_name = "test_project.nimble";
+
+ let config_content = r##"
+version = "0.1.0"
+author = "Mr. nimble"
+description = "A new awesome nimble package"
+license = "MIT"
+"##;
+
+ let project_dir = create_project_dir()?;
+ fill_config(&project_dir, config_name, Some(&config_content))?;
+
+ let starship_config = toml::toml! {
+ [package]
+ disabled = false
+ };
+ let actual = ModuleRenderer::new("package")
+ .cmd("nimble dump --json", None)
+ .path(project_dir.path())
+ .config(starship_config)
+ .collect();
+
+ let expected = None;
+
+ assert_eq!(actual, expected);
+ project_dir.close()
+ }
+
+ #[test]
+ fn test_extract_nimble_package_version_for_non_nimble_directory() -> io::Result<()> {
+ // Only create an empty directory. There's no .nibmle file for this case.
+ let project_dir = create_project_dir()?;
+
+ let starship_config = toml::toml! {
+ [package]
+ disabled = false
+ };
+ let actual = ModuleRenderer::new("package")
+ .cmd("nimble dump --json", None)
+ .path(project_dir.path())
+ .config(starship_config)
+ .collect();
+
+ let expected = None;
+
+ assert_eq!(actual, expected);
+ project_dir.close()
+ }
+
#[test]
fn test_extract_package_version() -> io::Result<()> {
let config_name = "package.json";
| Package version: Nimble project support
## Feature Request
#### Describe the solution you'd like
Nimble projects contain a `version` property that should be included in the package version module.
#### Implementing
When a file with the extension `.nimble` is in the current directory the version can be read by using [`nimble dump`](https://github.com/nim-lang/nimble#nimble-dump) to parse & evaluate the file. The output can be in INI-compatible format (default) or JSON with the `--json` flag. You could use a parser for INI or JSON, or just use a regex (which is probably faster than parsing).
Running the command `nimble dump` in the project will either output the project information or error with an non-zero exit code.
#### Test cases
In a new directory create a file containing `example.nimble` filled with
```nims
# Comments
version = "1.2.3"
author = "Example Author"
description = "A Nim package"
license = "MIT"
```
Running `nimble dump` should output
```ini
name: "tmp"
version: "1.2.3"
author: "Example Author"
desc: "A Nim package"
license: "MIT"
skipDirs: ""
skipFiles: ""
skipExt: ""
installDirs: ""
installFiles: ""
installExt: ""
requires: ""
bin: ""
binDir: ""
srcDir: ""
backend: "c"
```
and with `--json`
```json
{
"name": "tmp",
"version": "1.2.3",
"author": "Example Author",
"desc": "A Nim package",
"license": "MIT",
"skipDirs": [],
"skipFiles": [],
"skipExt": [],
"installDirs": [],
"installFiles": [],
"installExt": [],
"requires": [],
"bin": [],
"binDir": "",
"srcDir": "",
"backend": "c"
}
```
| Hey, I'd like to take on this issue, if someone's not working on this already. :)
@aeruhxi As long as there are no related PRs go ahead. I believe nobody is currently working on it... Went through PRs and verified it, have fun implementing stuff :rocket: | 2021-04-09T20:50:43 | 0.53 | c4f977c48d29790f27d332c32d97eff40fd258f1 | [
"modules::golang::tests::folder_with_glide_yaml",
"modules::golang::tests::folder_with_go_file",
"modules::golang::tests::folder_with_go_mod",
"modules::golang::tests::folder_with_go_sum",
"modules::golang::tests::folder_with_go_version",
"modules::golang::tests::folder_with_gopkg_yml",
"modules::helm::... | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_plain_and_broken_s... | [] | [
"modules::nodejs::tests::folder_with_ts_file",
"modules::ocaml::tests::folder_with_dune",
"modules::ocaml::tests::folder_with_merlin_file",
"modules::ocaml::tests::folder_with_jbuild_ignore",
"modules::ocaml::tests::folder_with_opam_file",
"modules::ocaml::tests::folder_with_dune_project",
"modules::oca... |
starship/starship | 2,456 | starship__starship-2456 | [
"2009"
] | 69b9bf72c3380217d82cec863edb7db0f29e6862 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -454,7 +454,7 @@ the module will be activated if any of the following conditions are met:
| Option | Default | Description |
| ------------------- | -------------------------------------- | -------------------------------------------- |
| `format` | `"via [$symbol($version )]($style)"` | The format for the module. |
-| `symbol` | `"喝 "` | The symbol used before the version of cmake. |
+| `symbol` | `"△ "` | The symbol used before the version of cmake. |
| `detect_extensions` | `[]` | Which extensions should trigger this moudle |
| `detect_files` | `["CMakeLists.txt", "CMakeCache.txt"]` | Which filenames should trigger this module |
| `detect_folders` | `[]` | Which folders should trigger this module |
diff --git a/src/configs/cmake.rs b/src/configs/cmake.rs
--- a/src/configs/cmake.rs
+++ b/src/configs/cmake.rs
@@ -17,7 +17,7 @@ impl<'a> RootModuleConfig<'a> for CMakeConfig<'a> {
fn new() -> Self {
CMakeConfig {
format: "via [$symbol($version )]($style)",
- symbol: "喝 ",
+ symbol: "△ ",
style: "bold blue",
disabled: false,
detect_extensions: vec![],
| diff --git a/src/modules/cmake.rs b/src/modules/cmake.rs
--- a/src/modules/cmake.rs
+++ b/src/modules/cmake.rs
@@ -77,7 +77,7 @@ mod tests {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("CMakeLists.txt"))?.sync_all()?;
let actual = ModuleRenderer::new("cmake").path(dir.path()).collect();
- let expected = Some(format!("via {}", Color::Blue.bold().paint("喝 v3.17.3 ")));
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("△ v3.17.3 ")));
assert_eq!(expected, actual);
dir.close()
}
diff --git a/src/modules/cmake.rs b/src/modules/cmake.rs
--- a/src/modules/cmake.rs
+++ b/src/modules/cmake.rs
@@ -87,7 +87,7 @@ mod tests {
let dir = tempfile::tempdir()?;
File::create(dir.path().join("CMakeCache.txt"))?.sync_all()?;
let actual = ModuleRenderer::new("cmake").path(dir.path()).collect();
- let expected = Some(format!("via {}", Color::Blue.bold().paint("喝 v3.17.3 ")));
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("△ v3.17.3 ")));
assert_eq!(expected, actual);
dir.close()
}
| NerdFonts Codepoint Leakage
[Issue 365 on NerdFonts](https://github.com/ryanoasis/nerd-fonts/issues/365) affects the CMake symbol that we currently use in our module. There are two issues here:
- Users who are not using nerd-patched fonts will see a CJK symbol instead of however their terminal emulator displays an unknown symbol
- Users who use that section of the Unicode standard may have to choose between using our prompt and using the system in their language
We should keep this in mind moving forward and potentially pick a new CMake module symbol and avoid using glyphs located in the affected Unicode block until the issue is fixed by NerdFonts. Unfortunately, it looks like it's very much a breaking change from them, so it might be a while before that fix comes out.
| Maybe this is why I can see this broken symbol instead of the battery icon?

Manjaro with Konsole, Breeze shell theme with Fira Code 10pt font from system repository.
How can I solve it or what has broken it in the first place? This was fine up until a few days or weeks I think, can't remember, just got frustrated enough to try to solve it today :D
> Manjaro with Konsole, Breeze shell theme with Fira Code 10pt font from system repository.
Starship requires the use of Nerd Font variants of fonts. In this case, you'll want to change the font your terminal is using to the version of Fira Code downloaded here: https://www.nerdfonts.com/font-downloads
Thank you very much, it works now! Although, I installed `nerd-fonts-noto-sans-mono` and `ttf-nerd-fonts-symbols` from the official repository instead of replacing Fira Code, then `fc-cache -vf` to refresh the font cache, and restart Konsole.

| 2021-03-13T00:55:47 | 0.50 | 69b9bf72c3380217d82cec863edb7db0f29e6862 | [
"modules::cmake::tests::buildfolder_with_cmake_cache",
"modules::cmake::tests::folder_with_cmake_lists"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_bold_italic_underline_green_dimmy_silly_caps",
"config::tests::test_from_bool",
"bug_report::tests::test_get_shell_info",
"config::tests::table_get_styles_plain_and_broken_styles",
"con... | [
"modules::directory::tests::linux::directory_in_root",
"modules::directory::tests::root_directory",
"modules::username::tests::logname_equals_user",
"modules::username::tests::current_user_not_logname",
"modules::username::tests::no_logname_env_variable",
"modules::username::tests::ssh_connection",
"mod... | [] |
starship/starship | 4,008 | starship__starship-4008 | [
"3870",
"3870"
] | 209181c36978265b6a138e72c36177fefb84c165 | diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -748,7 +748,8 @@
"disabled": true,
"format": "[$symbol$context( \\($namespace\\))]($style) in ",
"style": "cyan bold",
- "symbol": "☸ "
+ "symbol": "☸ ",
+ "user_aliases": {}
},
"allOf": [
{
diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -3207,6 +3208,13 @@
"additionalProperties": {
"type": "string"
}
+ },
+ "user_aliases": {
+ "default": {},
+ "type": "object",
+ "additionalProperties": {
+ "type": "string"
+ }
}
}
},
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -2056,6 +2056,7 @@ To enable it, set `disabled` to `false` in your configuration file.
| `format` | `'[$symbol$context( \($namespace\))]($style) in '` | The format for the module. |
| `style` | `"cyan bold"` | The style for the module. |
| `context_aliases` | | Table of context aliases to display. |
+| `user_aliases` | | Table of user aliases to display. |
| `disabled` | `true` | Disables the `kubernetes` module. |
### Variables
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -2083,11 +2084,14 @@ disabled = false
"dev.local.cluster.k8s" = "dev"
".*/openshift-cluster/.*" = "openshift"
"gke_.*_(?P<var_cluster>[\\w-]+)" = "gke-$var_cluster"
+[kubernetes.user_aliases]
+"dev.local.cluster.k8s" = "dev"
+"root/.*" = "root"
```
#### Regex Matching
-Additional to simple aliasing, `context_aliases` also supports
+Additional to simple aliasing, `context_aliases` and `user_aliases` also supports
extended matching and renaming using regular expressions.
The regular expression must match on the entire kube context,
diff --git a/src/configs/kubernetes.rs b/src/configs/kubernetes.rs
--- a/src/configs/kubernetes.rs
+++ b/src/configs/kubernetes.rs
@@ -10,6 +10,7 @@ pub struct KubernetesConfig<'a> {
pub style: &'a str,
pub disabled: bool,
pub context_aliases: HashMap<String, &'a str>,
+ pub user_aliases: HashMap<String, &'a str>,
}
impl<'a> Default for KubernetesConfig<'a> {
diff --git a/src/configs/kubernetes.rs b/src/configs/kubernetes.rs
--- a/src/configs/kubernetes.rs
+++ b/src/configs/kubernetes.rs
@@ -20,6 +21,7 @@ impl<'a> Default for KubernetesConfig<'a> {
style: "cyan bold",
disabled: true,
context_aliases: HashMap::new(),
+ user_aliases: HashMap::new(),
}
}
}
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -1,6 +1,7 @@
use yaml_rust::YamlLoader;
use std::borrow::Cow;
+use std::collections::HashMap;
use std::env;
use std::path;
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -82,22 +83,30 @@ fn get_kube_ctx_component(
Some(ctx_components)
}
+fn get_kube_user<'a>(config: &'a KubernetesConfig, kube_user: &'a str) -> Cow<'a, str> {
+ return get_alias(&config.user_aliases, kube_user).unwrap_or(Cow::Borrowed(kube_user));
+}
+
fn get_kube_context_name<'a>(config: &'a KubernetesConfig, kube_ctx: &'a str) -> Cow<'a, str> {
- if let Some(val) = config.context_aliases.get(kube_ctx) {
- return Cow::Borrowed(val);
+ return get_alias(&config.context_aliases, kube_ctx).unwrap_or(Cow::Borrowed(kube_ctx));
+}
+
+fn get_alias<'a>(
+ aliases: &'a HashMap<String, &'a str>,
+ alias_candidate: &'a str,
+) -> Option<Cow<'a, str>> {
+ if let Some(val) = aliases.get(alias_candidate) {
+ return Some(Cow::Borrowed(val));
}
- config
- .context_aliases
- .iter()
- .find_map(|(k, v)| {
- let re = regex::Regex::new(&format!("^{}$", k)).ok()?;
- match re.replace(kube_ctx, *v) {
- Cow::Owned(replaced) => Some(Cow::Owned(replaced)),
- _ => None,
- }
- })
- .unwrap_or(Cow::Borrowed(kube_ctx))
+ return aliases.iter().find_map(|(k, v)| {
+ let re = regex::Regex::new(&format!("^{}$", k)).ok()?;
+ let replaced = re.replace(alias_candidate, *v);
+ match replaced {
+ Cow::Owned(replaced) => Some(Cow::Owned(replaced)),
+ _ => None,
+ }
+ });
}
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -157,7 +166,7 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
"user" => kube_user.and_then(|ctx| {
ctx.as_ref().map(|kube| {
// unwrap is safe as kube_user only holds kube.user.is_some()
- Ok(Cow::Borrowed(kube.user.as_ref().unwrap().as_str()))
+ Ok(get_kube_user(&config, kube.user.as_ref().unwrap().as_str()))
})
}),
"cluster" => kube_cluster.and_then(|ctx| {
| diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -529,6 +538,127 @@ users: []
dir.close()
}
+ fn base_test_user_alias(
+ user_name: &str,
+ config: toml::Value,
+ expected: &str,
+ ) -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let filename = dir.path().join("config");
+
+ let mut file = File::create(&filename)?;
+ file.write_all(
+ format!(
+ "
+apiVersion: v1
+clusters: []
+contexts:
+ - context:
+ cluster: test_cluster
+ user: {}
+ namespace: test_namespace
+ name: test_context
+current-context: test_context
+kind: Config
+preferences: {{}}
+users: []
+",
+ user_name
+ )
+ .as_bytes(),
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("kubernetes")
+ .path(dir.path())
+ .env("KUBECONFIG", filename.to_string_lossy().as_ref())
+ .config(config)
+ .collect();
+
+ let expected = Some(format!("{} in ", Color::Cyan.bold().paint(expected)));
+ assert_eq!(expected, actual);
+
+ dir.close()
+ }
+
+ #[test]
+ fn test_user_alias_simple() -> io::Result<()> {
+ base_test_user_alias(
+ "test_user",
+ toml::toml! {
+ [kubernetes]
+ disabled = false
+ format = "[$symbol$context( \\($user\\))]($style) in "
+ [kubernetes.user_aliases]
+ "test_user" = "test_alias"
+ ".*" = "literal match has precedence"
+ },
+ "☸ test_context (test_alias)",
+ )
+ }
+
+ #[test]
+ fn test_user_alias_regex() -> io::Result<()> {
+ base_test_user_alias(
+ "openshift-cluster/user",
+ toml::toml! {
+ [kubernetes]
+ disabled = false
+ format = "[$symbol$context( \\($user\\))]($style) in "
+ [kubernetes.user_aliases]
+ "openshift-cluster/.*" = "test_alias"
+ },
+ "☸ test_context (test_alias)",
+ )
+ }
+
+ #[test]
+ fn test_user_alias_regex_replace() -> io::Result<()> {
+ base_test_user_alias(
+ "gke_infra-user-28cccff6_europe-west4_cluster-1",
+ toml::toml! {
+ [kubernetes]
+ disabled = false
+ format = "[$symbol$context( \\($user\\))]($style) in "
+ [kubernetes.user_aliases]
+ "gke_.*_(?P<cluster>[\\w-]+)" = "example: $cluster"
+ },
+ "☸ test_context (example: cluster-1)",
+ )
+ }
+
+ #[test]
+ fn test_user_alias_broken_regex() -> io::Result<()> {
+ base_test_user_alias(
+ "input",
+ toml::toml! {
+ [kubernetes]
+ disabled = false
+ format = "[$symbol$context( \\($user\\))]($style) in "
+ [kubernetes.user_aliases]
+ "input[.*" = "this does not match"
+ },
+ "☸ test_context (input)",
+ )
+ }
+
+ #[test]
+ fn test_user_should_use_default_if_no_matching_alias() -> io::Result<()> {
+ base_test_user_alias(
+ "gke_infra-user-28cccff6_europe-west4_cluster-1",
+ toml::toml! {
+ [kubernetes]
+ disabled = false
+ format = "[$symbol$context( \\($user\\))]($style) in "
+ [kubernetes.user_aliases]
+ "([A-Z])\\w+" = "this does not match"
+ "gke_infra-user-28cccff6" = "this does not match"
+ },
+ "☸ test_context (gke_infra-user-28cccff6_europe-west4_cluster-1)",
+ )
+ }
+
#[test]
fn test_kube_user() -> io::Result<()> {
let dir = tempfile::tempdir()?;
| Add username alias to kubernetes module
Current Behavior
with the new OC client version 4.7 username also includes the server name and due to this in prompt big username is printing.
and this feature came from this https://github.com/starship/starship/pull/3569
Expected Behavior
we should be able to show only the username.
Environment
Starship version: [the output of starship --version]
```
starship 1.5.4
tag:v1.5.4
commit_hash:d420a63e
build_time:2022-03-24 19:18:47 +00:00
build_env:rustc 1.59.0 (9d1b2106e 2022-02-23),
- Shell type: [fish, zsh] bash
- Shell version: [the output of `fish --version` or `zsh --version`]
- Shell plugin manager: [if present, e.g. oh-my-fish, oh-my-zsh, fisher, antigen] Nothing
- Terminal emulator: [e.g. iTerm, Hyper, Terminator] putty/mobaX
- Operating system: [e.g. macOS 10.13.4, Windows 10] : RHEL
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
[kubernetes]
format = '[☸︎ ($user on )$context \($namespace\)](dimmed green) '
disabled = false
oc client version
└─> oc version
Client Version: 4.7.0-202107141046.p0.git.8b4b094.assembly.stream-8b4b094
Kubernetes Version: v1.20.14+0d60930
oc whoami -c
/ocpcluster-ocpd-corp-abc-com:6443/pratik
Add username alias to kubernetes module
Current Behavior
with the new OC client version 4.7 username also includes the server name and due to this in prompt big username is printing.
and this feature came from this https://github.com/starship/starship/pull/3569
Expected Behavior
we should be able to show only the username.
Environment
Starship version: [the output of starship --version]
```
starship 1.5.4
tag:v1.5.4
commit_hash:d420a63e
build_time:2022-03-24 19:18:47 +00:00
build_env:rustc 1.59.0 (9d1b2106e 2022-02-23),
- Shell type: [fish, zsh] bash
- Shell version: [the output of `fish --version` or `zsh --version`]
- Shell plugin manager: [if present, e.g. oh-my-fish, oh-my-zsh, fisher, antigen] Nothing
- Terminal emulator: [e.g. iTerm, Hyper, Terminator] putty/mobaX
- Operating system: [e.g. macOS 10.13.4, Windows 10] : RHEL
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
[kubernetes]
format = '[☸︎ ($user on )$context \($namespace\)](dimmed green) '
disabled = false
oc client version
└─> oc version
Client Version: 4.7.0-202107141046.p0.git.8b4b094.assembly.stream-8b4b094
Kubernetes Version: v1.20.14+0d60930
oc whoami -c
/ocpcluster-ocpd-corp-abc-com:6443/pratik
| Hi @jainpratik163 👋
By default, the `kubernetes` module is formatted to show the namespace:
```toml
[kubernetes]
format = '[$symbol$context( \($namespace\))]($style) in '
```
It can be replaced with any of the variables listed [here](https://starship.rs/config/#variables-33), including `$username`.
To do so, edit or create `~/.config/starship.toml`, and add the following:
```toml
[kubernetes]
format = '[$symbol$context( \($username\))]($style) in '
```
for the user, it is showing like this **pratik/ocpcluster-ocpd-corp-abc-com:6443** this is coming when we are using `oc client 4.7 ` onwards and ideally it should only show username as pratik, not cluster URL also.
and seems like some changes were made in oc client 4.7 for username in the config file.
and also as per doc, there is no such variable username mentioned for k8s , only the user was mentioned.
And for context, I already created an alias.
Hi @jainpratik163 can you share your kubernetes configuration (please redact any credentials or tokens first) so we can dig into this a bit.
@andytom below is config file for both version
1. **oc client version 4.7.x**
```
apiVersion: v1
clusters:
- cluster:
server: https://OCP-CLUSTER:6443
name: OCP-CLUSTER:6443
- context:
cluster: OCP-CLUSTER:6443
namespace: namespace-name
user: pratijai/OCP-CLUSTER:6443
name: namespace-name/OCP-CLUSTER:6443/pratijai
current-context: namespace-name/OCP-CLUSTER:6443/pratijai
kind: Config
preferences: {}
users:
- name: pratijai/OCP-CLUSTER:6443
user:
token: xyz
```
2. oc version lesser than 4.7
```
apiVersion: v1
clusters:
- cluster:
server: https://OCP-CLUSTER:6443
name: OCP-CLUSTER:6443
- context:
cluster: OCP-CLUSTER:6443
namespace: namespace-name
user: pratijai
name: namespace-name/OCP-CLUSTER:6443/pratijai
current-context: namespace-name/OCP-CLUSTER:6443/pratijai
kind: Config
preferences: {}
users:
- name: pratijai/OCP-CLUSTER:6443
user:
token: xyz
```
only diff is of user
Thanks @jainpratik163, I understand the problem a bit better now, so currently starship just shows username as it is in the configuration file but maybe we could add something like `context_alias` but for the username maybe `username_alias`, do you think that would solve that problem?
@andytom yes it will solve the problem if we can alias option where we can cut username only and rest rejected.
Any update on this?
Hi @jainpratik163 👋
By default, the `kubernetes` module is formatted to show the namespace:
```toml
[kubernetes]
format = '[$symbol$context( \($namespace\))]($style) in '
```
It can be replaced with any of the variables listed [here](https://starship.rs/config/#variables-33), including `$username`.
To do so, edit or create `~/.config/starship.toml`, and add the following:
```toml
[kubernetes]
format = '[$symbol$context( \($username\))]($style) in '
```
for the user, it is showing like this **pratik/ocpcluster-ocpd-corp-abc-com:6443** this is coming when we are using `oc client 4.7 ` onwards and ideally it should only show username as pratik, not cluster URL also.
and seems like some changes were made in oc client 4.7 for username in the config file.
and also as per doc, there is no such variable username mentioned for k8s , only the user was mentioned.
And for context, I already created an alias.
Hi @jainpratik163 can you share your kubernetes configuration (please redact any credentials or tokens first) so we can dig into this a bit.
@andytom below is config file for both version
1. **oc client version 4.7.x**
```
apiVersion: v1
clusters:
- cluster:
server: https://OCP-CLUSTER:6443
name: OCP-CLUSTER:6443
- context:
cluster: OCP-CLUSTER:6443
namespace: namespace-name
user: pratijai/OCP-CLUSTER:6443
name: namespace-name/OCP-CLUSTER:6443/pratijai
current-context: namespace-name/OCP-CLUSTER:6443/pratijai
kind: Config
preferences: {}
users:
- name: pratijai/OCP-CLUSTER:6443
user:
token: xyz
```
2. oc version lesser than 4.7
```
apiVersion: v1
clusters:
- cluster:
server: https://OCP-CLUSTER:6443
name: OCP-CLUSTER:6443
- context:
cluster: OCP-CLUSTER:6443
namespace: namespace-name
user: pratijai
name: namespace-name/OCP-CLUSTER:6443/pratijai
current-context: namespace-name/OCP-CLUSTER:6443/pratijai
kind: Config
preferences: {}
users:
- name: pratijai/OCP-CLUSTER:6443
user:
token: xyz
```
only diff is of user
Thanks @jainpratik163, I understand the problem a bit better now, so currently starship just shows username as it is in the configuration file but maybe we could add something like `context_alias` but for the username maybe `username_alias`, do you think that would solve that problem?
@andytom yes it will solve the problem if we can alias option where we can cut username only and rest rejected.
Any update on this? | 2022-05-23T20:39:00 | 1.6 | 209181c36978265b6a138e72c36177fefb84c165 | [
"modules::kubernetes::tests::test_user_alias_broken_regex",
"modules::kubernetes::tests::test_user_alias_simple",
"modules::kubernetes::tests::test_user_alias_regex",
"modules::kubernetes::tests::test_user_alias_regex_replace",
"modules::kubernetes::tests::test_user_should_use_default_if_no_matching_alias"
... | [
"bug_report::tests::test_make_github_link",
"config::tests::test_from_option",
"config::tests::test_from_i64",
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_plain_and_broken_styles",
"... | [] | [] |
starship/starship | 3,839 | starship__starship-3839 | [
"625"
] | 441ebb39b9cd451564959d259409d2395e7afb01 | diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -122,7 +122,10 @@
"error_symbol": "[❯](bold red)",
"format": "$symbol ",
"success_symbol": "[❯](bold green)",
- "vicmd_symbol": "[❮](bold green)"
+ "vicmd_symbol": "[❮](bold green)",
+ "vimcmd_replace_one_symbol": "[❮](bold purple)",
+ "vimcmd_replace_symbol": "[❮](bold purple)",
+ "vimcmd_visual_symbol": "[❮](bold yellow)"
},
"allOf": [
{
diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -1762,6 +1765,18 @@
"default": "[❮](bold green)",
"type": "string"
},
+ "vimcmd_visual_symbol": {
+ "default": "[❮](bold yellow)",
+ "type": "string"
+ },
+ "vimcmd_replace_symbol": {
+ "default": "[❮](bold purple)",
+ "type": "string"
+ },
+ "vimcmd_replace_one_symbol": {
+ "default": "[❮](bold purple)",
+ "type": "string"
+ },
"disabled": {
"default": false,
"type": "boolean"
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -571,18 +571,23 @@ look at [this example](#with-custom-error-shape).
::: warning
`vicmd_symbol` is only supported in cmd, fish and zsh.
+`vimcmd_replace_one_symbol`, `vimcmd_replace_symbol`, and `vimcmd_visual_symbol`
+are only supported in fish due to [upstream issues with mode detection in zsh](https://github.com/starship/starship/issues/625#issuecomment-732454148).
:::
### Options
-| Option | Default | Description |
-| ---------------- | ------------------- | -------------------------------------------------------------------------------- |
-| `format` | `"$symbol "` | The format string used before the text input. |
-| `success_symbol` | `"[❯](bold green)"` | The format string used before the text input if the previous command succeeded. |
-| `error_symbol` | `"[❯](bold red)"` | The format string used before the text input if the previous command failed. |
-| `vicmd_symbol` | `"[❮](bold green)"` | The format string used before the text input if the shell is in vim normal mode. |
-| `disabled` | `false` | Disables the `character` module. |
+| Option | Default | Description |
+| -------------------------- | -------------------- | --------------------------------------------------------------------------------------- |
+| `format` | `"$symbol "` | The format string used before the text input. |
+| `success_symbol` | `"[❯](bold green)"` | The format string used before the text input if the previous command succeeded. |
+| `error_symbol` | `"[❯](bold red)"` | The format string used before the text input if the previous command failed. |
+| `vicmd_symbol` | `"[❮](bold green)"` | The format string used before the text input if the shell is in vim normal mode. |
+| `vicmd_replace_one_symbol` | `"[❮](bold purple)"` | The format string used before the text input if the shell is in vim `replace_one` mode. |
+| `vimcmd_replace_symbol` | `"[❮](bold purple)"` | The format string used before the text input if the shell is in vim replace mode. |
+| `vimcmd_visual_symbol` | `"[❮](bold yellow)"` | The format string used before the text input if the shell is in vim replace mode. |
+| `disabled` | `false` | Disables the `character` module. |
### Variables
diff --git a/src/configs/character.rs b/src/configs/character.rs
--- a/src/configs/character.rs
+++ b/src/configs/character.rs
@@ -8,6 +8,9 @@ pub struct CharacterConfig<'a> {
pub success_symbol: &'a str,
pub error_symbol: &'a str,
pub vicmd_symbol: &'a str,
+ pub vimcmd_visual_symbol: &'a str,
+ pub vimcmd_replace_symbol: &'a str,
+ pub vimcmd_replace_one_symbol: &'a str,
pub disabled: bool,
}
diff --git a/src/configs/character.rs b/src/configs/character.rs
--- a/src/configs/character.rs
+++ b/src/configs/character.rs
@@ -18,6 +21,9 @@ impl<'a> Default for CharacterConfig<'a> {
success_symbol: "[❯](bold green)",
error_symbol: "[❯](bold red)",
vicmd_symbol: "[❮](bold green)",
+ vimcmd_visual_symbol: "[❮](bold yellow)",
+ vimcmd_replace_symbol: "[❮](bold purple)",
+ vimcmd_replace_one_symbol: "[❮](bold purple)",
disabled: false,
}
}
diff --git a/src/modules/character.rs b/src/modules/character.rs
--- a/src/modules/character.rs
+++ b/src/modules/character.rs
@@ -4,7 +4,7 @@ use crate::formatter::StringFormatter;
/// Creates a module for the prompt character
///
-/// The character segment prints an arrow character in a color dependant on the
+/// The character segment prints an arrow character in a color dependent on the
/// exit-code of the last executed command:
/// - If the exit-code was "0", it will be formatted with `success_symbol`
/// (green arrow by default)
diff --git a/src/modules/character.rs b/src/modules/character.rs
--- a/src/modules/character.rs
+++ b/src/modules/character.rs
@@ -13,6 +13,9 @@ use crate::formatter::StringFormatter;
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
enum ShellEditMode {
Normal,
+ Visual,
+ Replace,
+ ReplaceOne,
Insert,
}
const ASSUMED_MODE: ShellEditMode = ShellEditMode::Insert;
diff --git a/src/modules/character.rs b/src/modules/character.rs
--- a/src/modules/character.rs
+++ b/src/modules/character.rs
@@ -35,11 +38,17 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
(Shell::Fish, "default") | (Shell::Zsh, "vicmd") | (Shell::Cmd, "vi") => {
ShellEditMode::Normal
}
+ (Shell::Fish, "visual") => ShellEditMode::Visual,
+ (Shell::Fish, "replace") => ShellEditMode::Replace,
+ (Shell::Fish, "replace_one") => ShellEditMode::ReplaceOne,
_ => ASSUMED_MODE,
};
let symbol = match mode {
ShellEditMode::Normal => config.vicmd_symbol,
+ ShellEditMode::Visual => config.vimcmd_visual_symbol,
+ ShellEditMode::Replace => config.vimcmd_replace_symbol,
+ ShellEditMode::ReplaceOne => config.vimcmd_replace_one_symbol,
ShellEditMode::Insert => {
if exit_success {
config.success_symbol
| diff --git a/src/modules/character.rs b/src/modules/character.rs
--- a/src/modules/character.rs
+++ b/src/modules/character.rs
@@ -168,6 +177,9 @@ mod test {
fn fish_keymap() {
let expected_vicmd = Some(format!("{} ", Color::Green.bold().paint("❮")));
let expected_specified = Some(format!("{} ", Color::Green.bold().paint("V")));
+ let expected_visual = Some(format!("{} ", Color::Yellow.bold().paint("❮")));
+ let expected_replace = Some(format!("{} ", Color::Purple.bold().paint("❮")));
+ let expected_replace_one = expected_replace.clone();
let expected_other = Some(format!("{} ", Color::Green.bold().paint("❯")));
// fish keymap is default
diff --git a/src/modules/character.rs b/src/modules/character.rs
--- a/src/modules/character.rs
+++ b/src/modules/character.rs
@@ -188,11 +200,32 @@ mod test {
.collect();
assert_eq!(expected_specified, actual);
- // fish keymap is other
+ // fish keymap is visual
let actual = ModuleRenderer::new("character")
.shell(Shell::Fish)
.keymap("visual")
.collect();
+ assert_eq!(expected_visual, actual);
+
+ // fish keymap is replace
+ let actual = ModuleRenderer::new("character")
+ .shell(Shell::Fish)
+ .keymap("replace")
+ .collect();
+ assert_eq!(expected_replace, actual);
+
+ // fish keymap is replace_one
+ let actual = ModuleRenderer::new("character")
+ .shell(Shell::Fish)
+ .keymap("replace_one")
+ .collect();
+ assert_eq!(expected_replace_one, actual);
+
+ // fish keymap is other
+ let actual = ModuleRenderer::new("character")
+ .shell(Shell::Fish)
+ .keymap("other")
+ .collect();
assert_eq!(expected_other, actual);
}
| More informative VI mode indicator
## Feature Request
#### Is your feature request related to a problem? Please describe.
Recognise this might be a Fish-specific problem. Delving onwards...
Migrating from SpaceFish, where the vi-mode indicator gave feed back on whether you were in replace, visual, insert or normal mode, as described [here](https://github.com/matchai/spacefish/blob/master/docs/Options.md). I must admit, I'm a big fan of the character switching between insert and normal mode! But it gives me confusing information on visual or replace mode.
If you are in normal mode and then switch into either visual or replace mode, the prompt character switches back to insert mode. This is incredibly confusing, as there is now no clear indication of whether you are in insert, replace, or visual. When you then exit visual/replace mode, it switches back to the normal mode character, as expected.
#### Describe the solution you'd like
Solution A: Allow custom configuration of characters for each mode. Basically, the currently implementation expanded to have a character for visual and replace modes.
Solution B: Allow inclusion of the default `fish_mode_prompt`, though I suspect this would be more complicated.
#### Describe alternatives you've considered
I have manually configured `fish_mode_prompt`, but this is blocked/disabled by Starship.
| @rbpatt2019 I was able to add an indicator by adding this to my `starship.toml`.
```
[character]
symbol = "[I] >"
vicmd_symbol = "[N] >"
```
Would be great to be able to chose colors as well as the character change
@seanag0234 As was I! To clarify, what I'm looking for is an indicator not for Insert or command/normal mode (which I was able to achieve), but an indicator for visual mode and replace mode. Right now visual, replace, and insert mode are all indicated by the same symbol - " [I] >>>>" in my .toml - and there isn't an option to differentiate between the three.
> @rbpatt2019 I was able to add an indicator by adding this to my `starship.toml`.
>
> ```
> [character]
> symbol = "[I] >"
> vicmd_symbol = "[N] >"
> ```
I didn't get it to work ... Character stays the same in every VI-Mode...
My current config: https://raw.githubusercontent.com/Adrian-Grimm/DotFiles/master/.config/starship.toml
(Would be nice to have something like a config validation included... but that's another topic...)
As an update, I'm now using zsh on my work laptop and cannot find a way to add a visual/replace mode indicator there either.
@Adrian-Grimm & @rbpatt2019 - I was able to replicate the issue on bash and didn't see a bug filed for this, so I created one: #1171
Worth noting that `vicmd_symbol` seems to work just fine on Fish.
I haven't had a chance to test this behavior on Zsh.
After an hour of fiddling it seems that zsh does not report clear information regarding it's mode.
When inspecting `KEYMAP` variable in the `zle-keymap-select` hook, the only two values observed are `main` & `vicmd`, while I was switching from _Command_, _Replace_, _Visual_, _Insert_ and _Normal_ modes.
Based on this code that is in charge to identify the current mode,
```Rust
let mode = match (&context.shell, keymap.as_str()) {
(Shell::Fish, "default") | (Shell::Zsh, "vicmd") => ShellEditMode::Normal,
_ => ASSUMED_MODE,
};
```
it is not possible to know if the mode is different than Normal and Insert :(
---
As I was writing this message, I found a very informative comment [on a SO thread](https://stackoverflow.com/questions/39871079/detect-zsh-keymap-mode-for-vi-visual-mode) which states:
> Visual mode is not a single state in zsh: it's defined by the combination of the mark being set, region being active and vi command mode. You can change those states independently with a custom widget or with emacs widgets. viopp and visual are only ever used as local keymaps with vicmd remaining the selected keymap. This means you don't need to repeat many vi style bindings across three keymaps: bindings in vicmd are shared, Note how few bindings visual and viopp contain compared to vicmd. But it also means that they are never selected triggering the callback
My issue is solved: https://github.com/starship/starship/issues/1171#issuecomment-711085024
I'm hitting this right now in fish. starship only has two modes: "normal" and "insert". It maps Fish's "default" mode or Zsh's "vicmd" mode to "normal", and everything else to "insert".
https://github.com/starship/starship/blob/af43aeefba1cc12044f05a09a8b6f0ae309a556c/src/modules/character.rs#L34-L37
But fish has 6 different modes (default, insert, paste, replace, replace_one, and visual). "paste" can generally be ignored, it's just for bracketed paste mode (even `fish_default_mode_prompt` prints nothing for it). But the others should be configurable, both character and style.
---
FWIW anyone who wants to restore the original mode prompt can do so by just redefining it after sourcing the starship init. The simplest way is probably `source $__fish_data_dir/functions/fish_mode_prompt.fish`. The downside to this is having `fish_mode_prompt` defined means starship won't be rerun on mode changes, so you're giving up on having `character` reflect this. But it will sometimes get repainted in other modes, so you'll need to figure out workarounds for that.
I'd love to see a fix for this as well | 2022-04-06T02:00:34 | 1.5 | 230e85be37a0fc12999d1e6ff1209e7d5f99ecd1 | [
"modules::character::test::fish_keymap"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::test_from_bool",
"config::tests::table_get_styles_ordered",
"config::tests::... | [] | [] |
starship/starship | 3,753 | starship__starship-3753 | [
"3689"
] | 52fa4bbab4393589aebbeded4351886e657a6275 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1374,6 +1374,7 @@ The `git_branch` module shows the active branch of the repo in your current dire
| `truncation_length` | `2^63 - 1` | Truncates a git branch to `N` graphemes. |
| `truncation_symbol` | `"…"` | The symbol used to indicate a branch name was truncated. You can use `""` for no symbol. |
| `only_attached` | `false` | Only show the branch name when not in a detached `HEAD` state. |
+| `ignore_branches` | `[]` | A list of names to avoid displaying. Useful for "master" or "main". |
| `disabled` | `false` | Disables the `git_branch` module. |
### Variables
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1397,6 +1398,7 @@ The `git_branch` module shows the active branch of the repo in your current dire
symbol = "🌱 "
truncation_length = 4
truncation_symbol = ""
+ignore_branches = ["master", "main"]
```
## Git Commit
diff --git a/src/configs/git_branch.rs b/src/configs/git_branch.rs
--- a/src/configs/git_branch.rs
+++ b/src/configs/git_branch.rs
@@ -12,6 +12,7 @@ pub struct GitBranchConfig<'a> {
pub truncation_symbol: &'a str,
pub only_attached: bool,
pub always_show_remote: bool,
+ pub ignore_branches: Vec<&'a str>,
pub disabled: bool,
}
diff --git a/src/configs/git_branch.rs b/src/configs/git_branch.rs
--- a/src/configs/git_branch.rs
+++ b/src/configs/git_branch.rs
@@ -25,6 +26,7 @@ impl<'a> Default for GitBranchConfig<'a> {
truncation_symbol: "…",
only_attached: false,
always_show_remote: false,
+ ignore_branches: vec![],
disabled: false,
}
}
diff --git a/src/modules/git_branch.rs b/src/modules/git_branch.rs
--- a/src/modules/git_branch.rs
+++ b/src/modules/git_branch.rs
@@ -37,6 +37,14 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let branch_name = repo.branch.as_ref()?;
let mut graphemes: Vec<&str> = branch_name.graphemes(true).collect();
+ if config
+ .ignore_branches
+ .iter()
+ .any(|ignored| branch_name.eq(ignored))
+ {
+ return None;
+ }
+
let mut remote_branch_graphemes: Vec<&str> = Vec::new();
let mut remote_name_graphemes: Vec<&str> = Vec::new();
if let Some(remote) = repo.remote.as_ref() {
| diff --git a/src/modules/git_branch.rs b/src/modules/git_branch.rs
--- a/src/modules/git_branch.rs
+++ b/src/modules/git_branch.rs
@@ -365,6 +373,29 @@ mod tests {
repo_dir.close()
}
+ #[test]
+ fn test_ignore_branches() -> io::Result<()> {
+ let repo_dir = fixture_repo(FixtureProvider::Git)?;
+
+ create_command("git")?
+ .args(&["checkout", "-b", "test_branch"])
+ .current_dir(repo_dir.path())
+ .output()?;
+
+ let actual = ModuleRenderer::new("git_branch")
+ .config(toml::toml! {
+ [git_branch]
+ ignore_branches = ["dummy", "test_branch"]
+ })
+ .path(&repo_dir.path())
+ .collect();
+
+ let expected = None;
+
+ assert_eq!(expected, actual);
+ repo_dir.close()
+ }
+
// This test is not possible until we switch to `git status --porcelain`
// where we can mock the env for the specific git process. This is because
// git2 does not care about our mocking and when we set the real `GIT_DIR`
| Only show git branch when not on master/main
I like to keep the prompt as simple as possible and for it to only show information when it is relevant and useful. I use a single-line prompt and it usually looks nearly identical to the standard Debian bash prompt (coloured) when there aren't any active starship modules.
While I would like the git branch symbol to appear when it a git directory, I only want the branch name to appear when not in main/master. Not only would it be a lot neater, it would be much better at reminding me when I am not in main, which I am often doing even when the branch name is always displaying.
Does anybody know of any workarounds that can make this possible?
| 2022-03-16T02:25:44 | 1.4 | 52fa4bbab4393589aebbeded4351886e657a6275 | [
"modules::git_branch::tests::test_ignore_branches"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_plain_and_broken_s... | [] | [] | |
starship/starship | 3,750 | starship__starship-3750 | [
"3746"
] | 848bf693a4147db2651887f17751be11ba24e5a8 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -3083,7 +3083,7 @@ format = '[📦 \[$env\]]($style) '
## Status
The `status` module displays the exit code of the previous command.
-The module will be shown only if the exit code is not `0`.
+If $success_symbol is empty (default), the module will be shown only if the exit code is not `0`.
The status code will cast to a signed 32-bit integer.
::: tip
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -3103,7 +3103,7 @@ This module is not supported on nu shell.
| ----------------------- | ----------------------------------------------------------------------------- | ------------------------------------------------------- |
| `format` | `"[$symbol$status]($style) "` | The format of the module |
| `symbol` | `"✖"` | The symbol displayed on program error |
-| `success_symbol` | `"✔️"` | The symbol displayed on program success |
+| `success_symbol` | `""` | The symbol displayed on program success |
| `not_executable_symbol` | `"🚫"` | The symbol displayed when file isn't executable |
| `not_found_symbol` | `"🔍"` | The symbol displayed when the command can't be found |
| `sigint_symbol` | `"🧱"` | The symbol displayed on SIGINT (Ctrl + c) |
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -3140,8 +3140,9 @@ This module is not supported on nu shell.
[status]
style = "bg:blue"
-symbol = "🔴"
-format = '[\[$symbol $common_meaning$signal_name$maybe_int\]]($style) '
+symbol = "🔴 "
+success_symbol = "🟢 SUCCESS"
+format = '[\[$symbol$common_meaning$signal_name$maybe_int\]]($style) '
map_symbol = true
disabled = false
```
diff --git a/src/configs/status.rs b/src/configs/status.rs
--- a/src/configs/status.rs
+++ b/src/configs/status.rs
@@ -26,7 +26,7 @@ impl<'a> Default for StatusConfig<'a> {
StatusConfig {
format: "[$symbol$status]($style) ",
symbol: "✖",
- success_symbol: "✔️",
+ success_symbol: "",
not_executable_symbol: "🚫",
not_found_symbol: "🔍",
sigint_symbol: "🧱",
diff --git a/src/modules/status.rs b/src/modules/status.rs
--- a/src/modules/status.rs
+++ b/src/modules/status.rs
@@ -17,7 +17,7 @@ enum PipeStatusStatus<'a> {
/// Creates a module with the status of the last command
///
-/// Will display the status only if it is not 0
+/// Will display the status
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let mut module = context.new_module("status");
let config = StatusConfig::try_load(module.config);
diff --git a/src/modules/status.rs b/src/modules/status.rs
--- a/src/modules/status.rs
+++ b/src/modules/status.rs
@@ -43,8 +43,9 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
false => PipeStatusStatus::Disabled,
};
- // Exit code is zero and pipestatus is all zero or disabled/missing
+ // Exit code is zero while success_symbol and pipestatus are all zero or disabled/missing
if exit_code == "0"
+ && config.success_symbol.is_empty()
&& (match pipestatus_status {
PipeStatusStatus::Pipe(ps) => ps.iter().all(|s| s == "0"),
_ => true,
diff --git a/src/modules/status.rs b/src/modules/status.rs
--- a/src/modules/status.rs
+++ b/src/modules/status.rs
@@ -176,7 +177,7 @@ fn status_common_meaning(ex: ExitCode) -> Option<&'static str> {
return None;
}
match ex {
- 0 => Some(""),
+ 0 => Some(""), // SUCCESS can be defined by $success_symbol if the user wishes too.
1 => Some("ERROR"),
2 => Some("USAGE"),
126 => Some("NOPERM"),
| diff --git a/src/modules/status.rs b/src/modules/status.rs
--- a/src/modules/status.rs
+++ b/src/modules/status.rs
@@ -228,13 +229,59 @@ mod tests {
use crate::test::ModuleRenderer;
#[test]
- fn success_status() {
+ fn success_status_success_symbol_empty() {
let expected = None;
+ // Status code 0 and success_symbol = ""
+ let actual = ModuleRenderer::new("status")
+ .config(toml::toml! {
+ [status]
+ success_symbol = ""
+ disabled = false
+ })
+ .status(0)
+ .collect();
+ assert_eq!(expected, actual);
+
+ // Status code 0 and success_symbol is missing
+ let actual = ModuleRenderer::new("status")
+ .config(toml::toml! {
+ [status]
+ disabled = false
+ })
+ .status(0)
+ .collect();
+ assert_eq!(expected, actual);
+
+ // No status code and success_symbol = ""
+ let actual = ModuleRenderer::new("status")
+ .config(toml::toml! {
+ [status]
+ success_symbol = ""
+ disabled = false
+ })
+ .collect();
+ assert_eq!(expected, actual);
+
+ // No status code and success_symbol is missing
+ let actual = ModuleRenderer::new("status")
+ .config(toml::toml! {
+ [status]
+ disabled = false
+ })
+ .collect();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn success_status_success_symbol_filled() {
+ let expected = Some(format!("{} ", Color::Red.bold().paint("✔️0")));
+
// Status code 0
let actual = ModuleRenderer::new("status")
.config(toml::toml! {
[status]
+ success_symbol = "✔️"
disabled = false
})
.status(0)
diff --git a/src/modules/status.rs b/src/modules/status.rs
--- a/src/modules/status.rs
+++ b/src/modules/status.rs
@@ -245,6 +292,7 @@ mod tests {
let actual = ModuleRenderer::new("status")
.config(toml::toml! {
[status]
+ success_symbol = "✔️"
disabled = false
})
.collect();
| success_symbol option of the status module is being ignored
<!--
─────────────────────────────────────────────
⚠️ IMPORTANT: Please run the following command to create an issue:
starship bug-report
An issue will be pre-populated with your system's configuration,
making the process a whole lot quicker 😊
─────────────────────────────────────────────
-->
## Bug Report
#### Current Behavior
`success_symbol` option of the `status` module is being ignored.
<!-- A clear and concise description of the behavior. -->
#### Expected Behavior
Show the `success_symbol` when command succeed as describe in the [documentation](https://starship.rs/config/#options-61): "The symbol displayed on program success"
<!-- A clear and concise description of what you expected to happen. -->
#### Additional context/Screenshots
<!-- Add any other context about the problem here. If applicable, add screenshots to help explain. -->
```
❯ sh -c 'echo tst; exit 0;'
tst
❯ sh -c 'echo tst; exit 7;'
tst
❯ ✗ [7]
```
#### Possible Solution
<!--- Only if you have suggestions on a fix for the bug -->
ATM IDK. I will update this issue, if I come up with something.
#### Environment
- Starship version: `starship 1.4.2 branch: commit_hash: build_time:2022-03-10 18:01:38 +00:00 build_env:rustc 1.59.0`
- Shell type: zsh
- Shell version: `zsh 5.8 (x86_64-apple-darwin21.0)`
- Shell plugin manager: nothing
- Terminal emulators: iTerm and Alacritty
- Operating system: MacOS 12.2.1
#### Relevant Shell Configuration
<!--
Based on the shell you use, please paste the appropriate configuration.
The default location for your shell is:
Bash: ~/.bashrc
Zsh: ~/.zshrc
Fish: ~/.config/fish/config.fish
Xonsh: ~/.config/xonsh/rc.xsh
Elvish: ~/.config/elvish/rc.elv
Nushell: ~/.config/nu/config.toml
Ion: ~/.config/ion/initrc
-->
```
~
❯ cat ~/.zshrc
eval "$(starship init zsh)"
```
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
```toml
format = " $character"
right_format = "$status "
[character]
success_symbol = "[❯](bold green)"
error_symbol = "[❯](bold green)"
vicmd_symbol = "[❮](bold green)"
[status]
style = "fg:blue"
symbol = "[✗](bold red)"
success_symbol = "[✔️](bold green)"
format = '$symbol$success_symbol [\[$int\]]($style) '
disabled = false
```
| Looking through the code, I see this comment:
https://github.com/starship/starship/blob/a10e24b2052047d431b6a44b0a202f605c39bc96/src/modules/status.rs#L20
Seems like a rather odd choice. Why to restrict the user from displaying success status if he wishes to?
Since it can easily be disabled by default by using: `success_symbol = ""`.
Also why is `success_symbol` needed if "display the status only if it is not 0"? | 2022-03-15T05:02:26 | 1.4 | 52fa4bbab4393589aebbeded4351886e657a6275 | [
"modules::status::tests::success_status_success_symbol_filled"
] | [
"config::tests::test_from_bool",
"config::tests::table_get_styles_with_none",
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_plain_and_broken_styles",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
... | [] | [] |
starship/starship | 3,569 | starship__starship-3569 | [
"3487"
] | 589576d3eb643dad0dd88dc5df0236625497177a | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1879,9 +1879,10 @@ kotlin_binary = "kotlinc"
## Kubernetes
-Displays the current [Kubernetes context](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/#context) name and, if set, the namespace from the kubeconfig file.
+Displays the current [Kubernetes context](https://kubernetes.io/docs/concepts/configuration/organize-cluster-access-kubeconfig/#context) name and, if set, the namespace, user and cluster from the kubeconfig file.
The namespace needs to be set in the kubeconfig file, this can be done via
-`kubectl config set-context starship-cluster --namespace astronaut`.
+`kubectl config set-context starship-context --namespace astronaut`.
+Similarly the user and cluster can be set with `kubectl config set-context starship-context --user starship-user` and `kubectl config set-context starship-context --cluster starship-cluster`.
If the `$KUBECONFIG` env var is set the module will use that if not it will use the `~/.kube/config`.
::: tip
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1905,8 +1906,10 @@ To enable it, set `disabled` to `false` in your configuration file.
| Variable | Example | Description |
| --------- | -------------------- | ---------------------------------------- |
-| context | `starship-cluster` | The current kubernetes context |
+| context | `starship-context` | The current kubernetes context name |
| namespace | `starship-namespace` | If set, the current kubernetes namespace |
+| user | `starship-user` | If set, the current kubernetes user |
+| cluster | `starship-cluster` | If set, the current kubernetes cluster |
| symbol | | Mirrors the value of option `symbol` |
| style\* | | Mirrors the value of option `style` |
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1918,12 +1921,12 @@ To enable it, set `disabled` to `false` in your configuration file.
# ~/.config/starship.toml
[kubernetes]
-format = 'on [⛵ $context \($namespace\)](dimmed green) '
+format = 'on [⛵ ($user on )($cluster in )$context \($namespace\)](dimmed green) '
disabled = false
[kubernetes.context_aliases]
"dev.local.cluster.k8s" = "dev"
".*/openshift-cluster/.*" = "openshift"
-"gke_.*_(?P<cluster>[\\w-]+)" = "gke-$cluster"
+"gke_.*_(?P<var_cluster>[\\w-]+)" = "gke-$var_cluster"
```
#### Regex Matching
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1943,12 +1946,12 @@ and shortened using regular expressions:
# OpenShift contexts carry the namespace and user in the kube context: `namespace/name/user`:
".*/openshift-cluster/.*" = "openshift"
# Or better, to rename every OpenShift cluster at once:
-".*/(?P<cluster>[\\w-]+)/.*" = "$cluster"
+".*/(?P<var_cluster>[\\w-]+)/.*" = "$var_cluster"
# Contexts from GKE, AWS and other cloud providers usually carry additional information, like the region/zone.
# The following entry matches on the GKE format (`gke_projectname_zone_cluster-name`)
# and renames every matching kube context into a more readable format (`gke-cluster-name`):
-"gke_.*_(?P<cluster>[\\w-]+)" = "gke-$cluster"
+"gke_.*_(?P<var_cluster>[\\w-]+)" = "gke-$var_cluster"
```
## Line Break
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -10,6 +10,12 @@ use crate::configs::kubernetes::KubernetesConfig;
use crate::formatter::StringFormatter;
use crate::utils;
+struct KubeCtxComponents {
+ user: Option<String>,
+ namespace: Option<String>,
+ cluster: Option<String>,
+}
+
fn get_kube_context(filename: path::PathBuf) -> Option<String> {
let contents = utils::read_file(filename).ok()?;
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -27,7 +33,10 @@ fn get_kube_context(filename: path::PathBuf) -> Option<String> {
Some(current_ctx.to_string())
}
-fn get_kube_ns(filename: path::PathBuf, current_ctx: String) -> Option<String> {
+fn get_kube_ctx_component(
+ filename: path::PathBuf,
+ current_ctx: String,
+) -> Option<KubeCtxComponents> {
let contents = utils::read_file(filename).ok()?;
let yaml_docs = YamlLoader::load_from_str(&contents).ok()?;
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -36,18 +45,41 @@ fn get_kube_ns(filename: path::PathBuf, current_ctx: String) -> Option<String> {
}
let conf = &yaml_docs[0];
- let ns = conf["contexts"].as_vec().and_then(|contexts| {
+ let ctx_yaml = conf["contexts"].as_vec().and_then(|contexts| {
contexts
.iter()
.filter_map(|ctx| Some((ctx, ctx["name"].as_str()?)))
.find(|(_, name)| *name == current_ctx)
+ });
+
+ let ctx_components = KubeCtxComponents {
+ user: ctx_yaml
+ .and_then(|(ctx, _)| ctx["context"]["user"].as_str())
+ .and_then(|s| {
+ if s.is_empty() {
+ return None;
+ }
+ Some(s.to_owned())
+ }),
+ namespace: ctx_yaml
.and_then(|(ctx, _)| ctx["context"]["namespace"].as_str())
- })?;
+ .and_then(|s| {
+ if s.is_empty() {
+ return None;
+ }
+ Some(s.to_owned())
+ }),
+ cluster: ctx_yaml
+ .and_then(|(ctx, _)| ctx["context"]["cluster"].as_str())
+ .and_then(|s| {
+ if s.is_empty() {
+ return None;
+ }
+ Some(s.to_owned())
+ }),
+ };
- if ns.is_empty() {
- return None;
- }
- Some(ns.to_owned())
+ Some(ctx_components)
}
fn get_kube_context_name<'a>(config: &'a KubernetesConfig, kube_ctx: &'a str) -> Cow<'a, str> {
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -86,8 +118,22 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let kube_ctx = env::split_paths(&kube_cfg).find_map(get_kube_context)?;
- let kube_ns =
- env::split_paths(&kube_cfg).find_map(|filename| get_kube_ns(filename, kube_ctx.clone()));
+ let ctx_components: Vec<Option<KubeCtxComponents>> = env::split_paths(&kube_cfg)
+ .map(|filename| get_kube_ctx_component(filename, kube_ctx.clone()))
+ .collect();
+
+ let kube_user = ctx_components.iter().find(|&ctx| match ctx {
+ Some(kube) => kube.user.is_some(),
+ None => false,
+ });
+ let kube_ns = ctx_components.iter().find(|&ctx| match ctx {
+ Some(kube) => kube.namespace.is_some(),
+ None => false,
+ });
+ let kube_cluster = ctx_components.iter().find(|&ctx| match ctx {
+ Some(kube) => kube.cluster.is_some(),
+ None => false,
+ });
let parsed = StringFormatter::new(config.format).and_then(|formatter| {
formatter
diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -101,7 +147,26 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
})
.map(|variable| match variable {
"context" => Some(Ok(get_kube_context_name(&config, &kube_ctx))),
- "namespace" => kube_ns.as_ref().map(|s| Ok(Cow::Borrowed(s.as_str()))),
+
+ "namespace" => kube_ns.and_then(|ctx| {
+ ctx.as_ref().map(|kube| {
+ // unwrap is safe as kube_ns only holds kube.namespace.is_some()
+ Ok(Cow::Borrowed(kube.namespace.as_ref().unwrap().as_str()))
+ })
+ }),
+ "user" => kube_user.and_then(|ctx| {
+ ctx.as_ref().map(|kube| {
+ // unwrap is safe as kube_user only holds kube.user.is_some()
+ Ok(Cow::Borrowed(kube.user.as_ref().unwrap().as_str()))
+ })
+ }),
+ "cluster" => kube_cluster.and_then(|ctx| {
+ ctx.as_ref().map(|kube| {
+ // unwrap is safe as kube_cluster only holds kube.cluster.is_some()
+ Ok(Cow::Borrowed(kube.cluster.as_ref().unwrap().as_str()))
+ })
+ }),
+
_ => None,
})
.parse(None, Some(context))
| diff --git a/src/modules/kubernetes.rs b/src/modules/kubernetes.rs
--- a/src/modules/kubernetes.rs
+++ b/src/modules/kubernetes.rs
@@ -463,4 +528,162 @@ users: []
dir.close()
}
+
+ #[test]
+ fn test_kube_user() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let filename = dir.path().join("config");
+
+ let mut file = File::create(&filename)?;
+ file.write_all(
+ b"
+apiVersion: v1
+clusters: []
+contexts:
+ - context:
+ cluster: test_cluster
+ user: test_user
+ namespace: test_namespace
+ name: test_context
+current-context: test_context
+kind: Config
+preferences: {}
+users: []
+",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("kubernetes")
+ .path(dir.path())
+ .env("KUBECONFIG", filename.to_string_lossy().as_ref())
+ .config(toml::toml! {
+ [kubernetes]
+ format = "($user)"
+ disabled = false
+ })
+ .collect();
+
+ let expected = Some("test_user".to_string());
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn test_kube_cluster() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let filename = dir.path().join("config");
+
+ let mut file = File::create(&filename)?;
+ file.write_all(
+ b"
+apiVersion: v1
+clusters: []
+contexts:
+ - context:
+ cluster: test_cluster
+ user: test_user
+ namespace: test_namespace
+ name: test_context
+current-context: test_context
+kind: Config
+preferences: {}
+users: []
+",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("kubernetes")
+ .path(dir.path())
+ .env("KUBECONFIG", filename.to_string_lossy().as_ref())
+ .config(toml::toml! {
+ [kubernetes]
+ format = "($cluster)"
+ disabled = false
+ })
+ .collect();
+
+ let expected = Some("test_cluster".to_string());
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn test_kube_user_missing() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let filename = dir.path().join("config");
+
+ let mut file = File::create(&filename)?;
+ file.write_all(
+ b"
+apiVersion: v1
+clusters: []
+contexts:
+ - context:
+ cluster: test_cluster
+ namespace: test_namespace
+ name: test_context
+current-context: test_context
+kind: Config
+preferences: {}
+users: []
+",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("kubernetes")
+ .path(dir.path())
+ .env("KUBECONFIG", filename.to_string_lossy().as_ref())
+ .config(toml::toml! {
+ [kubernetes]
+ format = "$symbol($user )($cluster )($namespace)"
+ disabled = false
+ })
+ .collect();
+
+ let expected = Some("☸ test_cluster test_namespace".to_string());
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn test_kube_cluster_missing() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let filename = dir.path().join("config");
+
+ let mut file = File::create(&filename)?;
+ file.write_all(
+ b"
+apiVersion: v1
+clusters: []
+contexts:
+ - context:
+ user: test_user
+ namespace: test_namespace
+ name: test_context
+current-context: test_context
+kind: Config
+preferences: {}
+users: []
+",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("kubernetes")
+ .path(dir.path())
+ .env("KUBECONFIG", filename.to_string_lossy().as_ref())
+ .config(toml::toml! {
+ [kubernetes]
+ format = "$symbol($user )($cluster )($namespace)"
+ disabled = false
+ })
+ .collect();
+
+ let expected = Some("☸ test_user test_namespace".to_string());
+ assert_eq!(expected, actual);
+ dir.close()
+ }
}
| How to set kubernetes username ie with whom we login into k8s cluster in prompt
<!--
─────────────────────────────────────────────
⚠️ IMPORTANT: Please run the following command to create an issue:
starship bug-report
An issue will be pre-populated with your system's configuration,
making the process a whole lot quicker 😊
─────────────────────────────────────────────
-->
## Bug Report
#### Current Behavior
<!-- A clear and concise description of the behavior. -->
Not able to set k8s username in prompt
#### Expected Behavior
we should able to show username
#### Environment
- Starship version: [the output of `starship --version`]
```
starship 1.1.1
tag:v1.1.1
commit_hash:9a4b2bb5
build_time:2021-12-21 20:12:04 +00:00
build_env:rustc 1.57.0 (f1edd0429 2021-11-29),
```
- Shell type: [fish, zsh] bash
- Shell version: [the output of `fish --version` or `zsh --version`]
- Shell plugin manager: [if present, e.g. oh-my-fish, oh-my-zsh, fisher, antigen] Nothing
- Terminal emulator: [e.g. iTerm, Hyper, Terminator] putty/mobaX
- Operating system: [e.g. macOS 10.13.4, Windows 10] : RHEL
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
[kubernetes]
format = '[☸︎ $context \($namespace\)](dimmed green) '
disabled = false
| 2022-02-06T23:40:28 | 1.2 | 589576d3eb643dad0dd88dc5df0236625497177a | [
"modules::aws::tests::expiration_date_set",
"modules::kubernetes::tests::test_kube_cluster",
"modules::kubernetes::tests::test_kube_cluster_missing",
"modules::kubernetes::tests::test_kube_user_missing",
"modules::kubernetes::tests::test_kube_user"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_plain_and_broken_s... | [] | [] | |
starship/starship | 3,536 | starship__starship-3536 | [
"3221"
] | c89c13038a34a52291d253e6d4b15c0dd4aa5dfa | diff --git a/src/modules/directory.rs b/src/modules/directory.rs
--- a/src/modules/directory.rs
+++ b/src/modules/directory.rs
@@ -106,7 +106,9 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let after_repo_root = contracted_path.replacen(repo_path_vec[0], "", 1);
let num_segments_after_root = after_repo_root.split('/').count();
- if ((num_segments_after_root - 1) as i64) < config.truncation_length {
+ if config.truncation_length == 0
+ || ((num_segments_after_root - 1) as i64) < config.truncation_length
+ {
let root = repo_path_vec[0];
let before = dir_string.replace(&contracted_path, "");
[prefix + &before, root.to_string(), after_repo_root]
| diff --git a/src/modules/directory.rs b/src/modules/directory.rs
--- a/src/modules/directory.rs
+++ b/src/modules/directory.rs
@@ -1669,6 +1671,36 @@ mod tests {
assert_eq!(expected, actual);
tmp_dir.close()
}
+
+ #[test]
+ fn highlight_git_root_dir_zero_truncation_length() -> io::Result<()> {
+ let (tmp_dir, _) = make_known_tempdir(Path::new("/tmp"))?;
+ let repo_dir = tmp_dir.path().join("above").join("repo");
+ let dir = repo_dir.join("src/sub/path");
+ fs::create_dir_all(&dir)?;
+ init_repo(&repo_dir).unwrap();
+
+ let actual = ModuleRenderer::new("directory")
+ .config(toml::toml! {
+ [directory]
+ truncation_length = 0
+ truncate_to_repo = false
+ repo_root_style = "green"
+ })
+ .path(dir)
+ .collect();
+ let expected = Some(format!(
+ "{}{}repo{} ",
+ Color::Cyan.bold().paint(convert_path_sep(
+ tmp_dir.path().join("above/").to_str().unwrap()
+ )),
+ Color::Green.prefix(),
+ Color::Cyan.bold().paint(convert_path_sep("/src/sub/path"))
+ ));
+ assert_eq!(expected, actual);
+ tmp_dir.close()
+ }
+
// sample for invalid unicode from https://doc.rust-lang.org/std/ffi/struct.OsStr.html#method.to_string_lossy
#[cfg(any(unix, target_os = "redox"))]
fn invalid_path() -> PathBuf {
| Custom repo_root_style does not work with truncation_length = 0
<!--
─────────────────────────────────────────────
⚠️ IMPORTANT: Please run the following command to create an issue:
starship bug-report
An issue will be pre-populated with your system's configuration,
making the process a whole lot quicker 😊
─────────────────────────────────────────────
-->
(URL generated by `starship bug-report` gave an HTTP ERROR 400 in the browser, so didn't use that. Sorry.)
## Bug Report
#### Current Behavior
Custom `repo_root_style` does not work with `truncation_length = 0`.
#### Expected Behavior
Custom `repo_root_style` should work with `truncation_length = 0`.
#### Additional context/Screenshots
If I remove `truncation_length = 0` from my config or set it to a value different from `0`, the directory which is the repo root correctly shows with the style defined in `repo_root_style`. If I keep `truncation_length = 0` in my config, the directory which is the repo root shows with the same style as the other directories.
According to the documentation on `truncation_length`:
> `0` means no truncation.
so I'm considering it as a valid value, and that should not interfere with `repo_root_style`.
#### Possible Solution
<!--- Only if you have suggestions on a fix for the bug -->
#### Environment
- Starship version:
```
starship 1.0.0
branch:
commit_hash:
build_time:2021-11-09 21:30:28 +00:00
build_env:rustc 1.56.1,
```
- Shell type: zsh
- Shell version: `zsh 5.8 (x86_64-apple-darwin20.0)`
- Shell plugin manager: Zim
- Terminal emulator: iTerm
- Operating system: macOS 11.6.1
#### Relevant Shell Configuration
<!--
Based on the shell you use, please paste the appropriate configuration.
Fish users: ~/.config/fish/config.fish
Zsh users: ~/.zshrc
-->
```bash
if (( ${+commands[starship]} )) eval "$(starship init zsh)"
```
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
```bash
[directory]
truncation_length = 0
truncate_to_repo = false
repo_root_style = 'bold red'
read_only = ' '
```
| 2022-01-29T08:30:41 | 1.5 | 230e85be37a0fc12999d1e6ff1209e7d5f99ecd1 | [
"modules::directory::tests::highlight_git_root_dir_zero_truncation_length"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::test_from_bool",
"config::tests::... | [] | [] | |
starship/starship | 3,359 | starship__starship-3359 | [
"3358"
] | c63e9a71bd958c576100fbbeaf5723bb22450fd9 | diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -79,7 +79,7 @@ fn get_module_version(context: &Context, config: &RustConfig) -> Option<String>
.or_else(|| find_rust_toolchain_file(context))
.or_else(|| execute_rustup_default(context))
{
- match execute_rustup_run_rustc_version(&toolchain) {
+ match execute_rustup_run_rustc_version(context, &toolchain) {
RustupRunRustcVersionOutcome::RustcVersion(rustc_version) => {
format_rustc_version(&rustc_version, config.version_format)
}
diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -87,12 +87,12 @@ fn get_module_version(context: &Context, config: &RustConfig) -> Option<String>
RustupRunRustcVersionOutcome::RustupNotWorking => {
// If `rustup` is not in `$PATH` or cannot be executed for other reasons, we can
// safely execute `rustc --version`.
- format_rustc_version(&execute_rustc_version()?, config.version_format)
+ format_rustc_version(&execute_rustc_version(context)?, config.version_format)
}
RustupRunRustcVersionOutcome::Err => None,
}
} else {
- format_rustc_version(&execute_rustc_version()?, config.version_format)
+ format_rustc_version(&execute_rustc_version(context)?, config.version_format)
}
}
diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -123,15 +123,21 @@ fn extract_toolchain_from_rustup_override_list(stdout: &str, cwd: &Path) -> Opti
if stdout == "no overrides\n" {
return None;
}
+ // use display version of path, also allows stripping \\?\
+ let cwd = cwd.to_string_lossy();
+ // rustup strips \\?\ prefix
+ #[cfg(windows)]
+ let cwd = cwd.strip_prefix(r"\\?\").unwrap_or(&cwd);
+
stdout
.lines()
.filter_map(|line| {
- let mut words = line.split_whitespace();
- let dir = words.next()?;
- let toolchain = words.next()?;
- Some((dir, toolchain))
+ let (dir, toolchain) = line.split_once('\t')?;
+ Some((dir.trim(), toolchain.trim()))
})
- .find(|(dir, _)| cwd.starts_with(dir))
+ // find most specific match
+ .filter(|(dir, _)| cwd.starts_with(dir))
+ .max_by_key(|(dir, _)| dir.len())
.map(|(_, toolchain)| toolchain.to_owned())
}
diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -199,9 +205,16 @@ fn find_rust_toolchain_file(context: &Context) -> Option<String> {
}
}
-fn execute_rustup_run_rustc_version(toolchain: &str) -> RustupRunRustcVersionOutcome {
+fn execute_rustup_run_rustc_version(
+ context: &Context,
+ toolchain: &str,
+) -> RustupRunRustcVersionOutcome {
create_command("rustup")
- .and_then(|mut cmd| cmd.args(&["run", toolchain, "rustc", "--version"]).output())
+ .and_then(|mut cmd| {
+ cmd.args(&["run", toolchain, "rustc", "--version"])
+ .current_dir(&context.current_dir)
+ .output()
+ })
.map(extract_toolchain_from_rustup_run_rustc_version)
.unwrap_or(RustupRunRustcVersionOutcome::RustupNotWorking)
}
diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -222,11 +235,11 @@ fn extract_toolchain_from_rustup_run_rustc_version(output: Output) -> RustupRunR
RustupRunRustcVersionOutcome::Err
}
-fn execute_rustc_version() -> Option<String> {
- match create_command("rustc").ok()?.arg("--version").output() {
- Ok(output) => Some(String::from_utf8(output.stdout).unwrap()),
- Err(_) => None,
- }
+fn execute_rustc_version(context: &Context) -> Option<String> {
+ context
+ .exec_cmd("rustc", &["--version"])
+ .map(|o| o.stdout)
+ .filter(|s| !s.is_empty())
}
fn format_rustc_version(rustc_version: &str, version_format: &str) -> Option<String> {
| diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -275,11 +288,13 @@ mod tests {
);
static OVERRIDES_INPUT: &str =
- "/home/user/src/a beta-x86_64-unknown-linux-gnu\n\
- /home/user/src/b nightly-x86_64-unknown-linux-gnu\n";
+ "/home/user/src/a \t beta-x86_64-unknown-linux-gnu\n\
+ /home/user/src/b \t nightly-x86_64-unknown-linux-gnu\n\
+ /home/user/src/b/d c \t stable-x86_64-pc-windows-msvc\n";
static OVERRIDES_CWD_A: &str = "/home/user/src/a/src";
static OVERRIDES_CWD_B: &str = "/home/user/src/b/tests";
static OVERRIDES_CWD_C: &str = "/home/user/src/c/examples";
+ static OVERRIDES_CWD_D: &str = "/home/user/src/b/d c/spaces";
assert_eq!(
extract_toolchain_from_rustup_override_list(OVERRIDES_INPUT, OVERRIDES_CWD_A.as_ref()),
Some("beta-x86_64-unknown-linux-gnu".to_owned()),
diff --git a/src/modules/rust.rs b/src/modules/rust.rs
--- a/src/modules/rust.rs
+++ b/src/modules/rust.rs
@@ -292,6 +307,29 @@ mod tests {
extract_toolchain_from_rustup_override_list(OVERRIDES_INPUT, OVERRIDES_CWD_C.as_ref()),
None,
);
+ assert_eq!(
+ extract_toolchain_from_rustup_override_list(OVERRIDES_INPUT, OVERRIDES_CWD_D.as_ref()),
+ Some("stable-x86_64-pc-windows-msvc".to_owned()),
+ );
+ }
+
+ #[test]
+ #[cfg(windows)]
+ fn test_extract_toolchain_from_rustup_override_list_win() {
+ static OVERRIDES_INPUT: &str =
+ "C:\\src \t beta-x86_64-unknown-linux-gnu\n";
+ static OVERRIDES_CWD_A: &str = r"\\?\C:\src";
+ static OVERRIDES_CWD_B: &str = r"C:\src";
+
+ assert_eq!(
+ extract_toolchain_from_rustup_override_list(OVERRIDES_INPUT, OVERRIDES_CWD_A.as_ref()),
+ Some("beta-x86_64-unknown-linux-gnu".to_owned()),
+ );
+
+ assert_eq!(
+ extract_toolchain_from_rustup_override_list(OVERRIDES_INPUT, OVERRIDES_CWD_B.as_ref()),
+ Some("beta-x86_64-unknown-linux-gnu".to_owned()),
+ );
}
#[cfg(any(unix, windows))]
| Rust version is incorrect when creating a rustup override
#### Current Behavior
<!-- A clear and concise description of the behavior. -->
I have created an override with rustup, `rustup override set nightly` in one of my folders.
#### Expected Behavior
<!-- A clear and concise description of what you expected to happen. -->
Starship thinks that the directory is using the stable toolchain when it isn't.
#### Additional context/Screenshots
<!-- Add any other context about the problem here. If applicable, add screenshots to help explain. -->
<img width="413" alt="image" src="https://user-images.githubusercontent.com/50248166/147595316-e58cc225-a141-4dd7-be78-a0a649a95f0b.png">
#### Possible Solution
<!--- Only if you have suggestions on a fix for the bug -->
#### Environment
- Starship version: 1.1.1
- powershell version: <unknown version>
- Operating system: Windows 10.0.22000
- Terminal emulator: Alacritty
- Git Commit Hash: 9a4b2bb5
- Branch/Tag: v1.1.1
- Rust Version: rustc 1.57.0 (f1edd0429 2021-11-29)
- Rust channel: stable-x86_64-pc-windows-msvc release
- Build Time: 2021-12-21 20:14:52 +00:00
#### Relevant Shell Configuration
```bash
<unknown config>
```
#### Starship Configuration
```toml
[time]
disabled = false
format = '[\[ $time \]]($style) '
time_format = "%T"
[nodejs]
disabled = true
[git_status]
disabled = true
[cmd_duration]
min_time = 500
format = "took [$duration](bold yellow)"
[battery]
disabled = true
[git_metrics]
disabled = false
[lua]
disabled = true
```
| starship is reading the output of `rustup override list` and tries to match it with the CWD. Related PR is https://github.com/starship/starship/pull/426 .
A wild guess would be that there is a problem with matching the path under Windows?
The place in the code is [here](https://github.com/syphar/starship/blob/eaa3cc18755eb91b5bcfd4c636881a40f954078b/src/modules/rust.rs#L112-L127), but I lack the machine to verify this. | 2021-12-29T06:01:52 | 1.1 | c63e9a71bd958c576100fbbeaf5723bb22450fd9 | [
"modules::rust::tests::test_extract_toolchain_from_rustup_override_list"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::test_from_bool",
"config::tests::... | [] | [] |
starship/starship | 3,349 | starship__starship-3349 | [
"3341"
] | 10e89121595827ce99042d036b1ee55f8a3a6a08 | diff --git a/src/modules/dart.rs b/src/modules/dart.rs
--- a/src/modules/dart.rs
+++ b/src/modules/dart.rs
@@ -3,6 +3,7 @@ use super::{Context, Module, RootModuleConfig};
use crate::configs::dart::DartConfig;
use crate::formatter::StringFormatter;
use crate::formatter::VersionFormatter;
+use crate::utils::get_command_string_output;
/// Creates a module with the current Dart version
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
diff --git a/src/modules/dart.rs b/src/modules/dart.rs
--- a/src/modules/dart.rs
+++ b/src/modules/dart.rs
@@ -32,8 +33,8 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
})
.map(|variable| match variable {
"version" => {
- let dart_version =
- parse_dart_version(&context.exec_cmd("dart", &["--version"])?.stderr)?;
+ let command = context.exec_cmd("dart", &["--version"])?;
+ let dart_version = parse_dart_version(&get_command_string_output(command))?;
VersionFormatter::format_module_version(
module.get_name(),
&dart_version,
| diff --git a/src/modules/dart.rs b/src/modules/dart.rs
--- a/src/modules/dart.rs
+++ b/src/modules/dart.rs
@@ -71,6 +72,7 @@ fn parse_dart_version(dart_version: &str) -> Option<String> {
#[cfg(test)]
mod tests {
use crate::test::ModuleRenderer;
+ use crate::utils::CommandOutput;
use ansi_term::Color;
use std::fs::{self, File};
use std::io;
diff --git a/src/modules/dart.rs b/src/modules/dart.rs
--- a/src/modules/dart.rs
+++ b/src/modules/dart.rs
@@ -138,4 +140,25 @@ mod tests {
assert_eq!(expected, actual);
dir.close()
}
+
+ #[test]
+ fn detect_version_output_in_stdout() -> io::Result<()> {
+ // after dart 2.15.0, version info output in stdout.
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("any.dart"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("dart")
+ .cmd(
+ "dart --version",
+ Some(CommandOutput {
+ stdout: String::from("Dart SDK version: 2.15.1 (stable) (Tue Dec 14 13:32:21 2021 +0100) on \"linux_x64\""),
+ stderr: String::default(),
+ }),
+ )
+ .path(dir.path())
+ .collect();
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("🎯 v2.15.1 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
}
| Dart version is not shown at all.
#### Current Behavior
With default dart module config I am shown `via 🎯 `, when I am in directory with dart project.
#### Expected Behavior
It should show the version. The output should be `via 🎯 2.15.1`
#### Additional context/Screenshots
I have not installed dart directly but is shipped with flutter. The output of `dart --version` is `Dart SDK version: 2.15.1 (stable) (Tue Dec 14 13:32:21 2021 +0100) on "linux_x64"`. I cross checked with the parse function in code (I am not a Rust developer) - looks like the implementation is correct, not sure where it is failing
#### Possible Solution
<!--- Only if you have suggestions on a fix for the bug -->
#### Environment
- Starship version: 1.0.0
- bash version: GNU bash, version 5.0.17(1)-release (x86_64-pc-linux-gnu)
Copyright (C) 2019 Free Software Foundation, Inc.
License GPLv3+: GNU GPL version 3 or later <http://gnu.org/licenses/gpl.html>
This is free software; you are free to change and redistribute it.
There is NO WARRANTY, to the extent permitted by law.
- Operating system: Ubuntu 20.04
- Terminal emulator: <unknown terminal> <unknown version>
- Git Commit Hash: dfbf861f
- Branch/Tag: v1.0.0
- Rust Version: rustc 1.56.1 (59eed8a2a 2021-11-01)
- Rust channel: release
- Build Time: 2021-11-09 21:35:52 +00:00
#### Relevant Shell Configuration
```bash
# ~/.bashrc: executed by bash(1) for non-login shells.
# see /usr/share/doc/bash/examples/startup-files (in the package bash-doc)
# for examples
# If not running interactively, don't do anything
case $- in
*i*) ;;
*) return;;
esac
# don't put duplicate lines or lines starting with space in the history.
# See bash(1) for more options
HISTCONTROL=ignoreboth
# append to the history file, don't overwrite it
shopt -s histappend
# for setting history length see HISTSIZE and HISTFILESIZE in bash(1)
HISTSIZE=1000
HISTFILESIZE=2000
# check the window size after each command and, if necessary,
# update the values of LINES and COLUMNS.
shopt -s checkwinsize
# If set, the pattern "**" used in a pathname expansion context will
# match all files and zero or more directories and subdirectories.
#shopt -s globstar
# make less more friendly for non-text input files, see lesspipe(1)
[ -x /usr/bin/lesspipe ] && eval "$(SHELL=/bin/sh lesspipe)"
# set variable identifying the chroot you work in (used in the prompt below)
if [ -z "${debian_chroot:-}" ] && [ -r /etc/debian_chroot ]; then
debian_chroot=$(cat /etc/debian_chroot)
fi
# set a fancy prompt (non-color, unless we know we "want" color)
case "$TERM" in
xterm-color|*-256color) color_prompt=yes;;
esac
# uncomment for a colored prompt, if the terminal has the capability; turned
# off by default to not distract the user: the focus in a terminal window
# should be on the output of commands, not on the prompt
#force_color_prompt=yes
if [ -n "$force_color_prompt" ]; then
if [ -x /usr/bin/tput ] && tput setaf 1 >&/dev/null; then
# We have color support; assume it's compliant with Ecma-48
# (ISO/IEC-6429). (Lack of such support is extremely rare, and such
# a case would tend to support setf rather than setaf.)
color_prompt=yes
else
color_prompt=
fi
fi
if [ "$color_prompt" = yes ]; then
PS1='${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$ '
else
PS1='${debian_chroot:+($debian_chroot)}\u@\h:\w\$ '
fi
unset color_prompt force_color_prompt
# If this is an xterm set the title to user@host:dir
case "$TERM" in
xterm*|rxvt*)
PS1="\[\e]0;${debian_chroot:+($debian_chroot)}\u@\h: \w\a\]$PS1"
;;
*)
;;
esac
# enable color support of ls and also add handy aliases
if [ -x /usr/bin/dircolors ]; then
test -r ~/.dircolors && eval "$(dircolors -b ~/.dircolors)" || eval "$(dircolors -b)"
alias ls='ls --color=auto'
#alias dir='dir --color=auto'
#alias vdir='vdir --color=auto'
alias grep='grep --color=auto'
alias fgrep='fgrep --color=auto'
alias egrep='egrep --color=auto'
fi
# colored GCC warnings and errors
#export GCC_COLORS='error=01;31:warning=01;35:note=01;36:caret=01;32:locus=01:quote=01'
# some more ls aliases
alias ll='ls -alF'
alias la='ls -A'
alias l='ls -CF'
# Add an "alert" alias for long running commands. Use like so:
# sleep 10; alert
alias alert='notify-send --urgency=low -i "$([ $? = 0 ] && echo terminal || echo error)" "$(history|tail -n1|sed -e '\''s/^\s*[0-9]\+\s*//;s/[;&|]\s*alert$//'\'')"'
# Alias definitions.
# You may want to put all your additions into a separate file like
# ~/.bash_aliases, instead of adding them here directly.
# See /usr/share/doc/bash-doc/examples in the bash-doc package.
if [ -f ~/.bash_aliases ]; then
. ~/.bash_aliases
fi
# enable programmable completion features (you don't need to enable
# this, if it's already enabled in /etc/bash.bashrc and /etc/profile
# sources /etc/bash.bashrc).
if ! shopt -oq posix; then
if [ -f /usr/share/bash-completion/bash_completion ]; then
. /usr/share/bash-completion/bash_completion
elif [ -f /etc/bash_completion ]; then
. /etc/bash_completion
fi
fi
## show git branch name in PS1
# default PS1=\[\e]0;\u@\h: \w\a\]${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\$
git_branch() {
git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/(\1)/'
}
export PS1="\[\e]0;\u@\h: \w\a\]${debian_chroot:+($debian_chroot)}\[\033[01;32m\]\u@\h\[\033[00m\]:\[\033[01;34m\]\w\[\033[00m\]\[\033[01;31m\]\$(git_branch)\[\033[00m\]\$ "
# Flutter env
export PATH="$PATH:/home/harsh/flutter/flutter/bin"
export CHROME_EXECUTABLE="/snap/bin/brave"
alias fork-flutter='/home/harsh/forks/flutter/bin/flutter'
# Android env
export ANDROID_HOME="/home/harsh/Android/Sdk"
export PATH="$PATH:/home/harsh/Android/Sdk/platform-tools"
# Home
export PATH="$PATH:/home/harsh"
# Next Home Flutter alias
. /home/harsh/AndroidStudioProjects/next_home/.alias
# Next Home Alexa Skill alias
. /home/harsh/PycharmProjects/next-home-alexa-skill/.alias
#k8s
export KUBECONFIG=$HOME/.kube/config
# Next Home helmfile alias
alias helmfile-local='helmfile --interactive -e local'
alias helmfile-staging='helmfile --interactive -e staging'
alias helmfile-production='helmfile --interactive -e production'
# set editor
export EDITOR=nano
# pgo
export PATH=/home/h
| What is the output of `command -v dart`? Is `dart` possibly a shell function that starship can't access?
In general, this line could also be causing your issues by restricting `PATH` to only `/home/h`
```sh
export PATH=/home/h
```
you might want to replace this with this:
```sh
export PATH="/home/h:$PATH"
```
Can you also try running the following in a directory where you expect the dart module to be shown and post the results.
```sh
env STARSHIP_LOG=trace starship module dart
```
@davidkna `dart` is a shell script - this are it content
```sh
#!/usr/bin/env bash
# Copyright 2014 The Flutter Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
# ---------------------------------- NOTE ---------------------------------- #
#
# Please keep the logic in this file consistent with the logic in the
# `dart.bat` script in the same directory to ensure that Flutter & Dart continue
# to work across all platforms!
#
# -------------------------------------------------------------------------- #
set -e
# Needed because if it is set, cd may print the path it changed to.
unset CDPATH
# On Mac OS, readlink -f doesn't work, so follow_links traverses the path one
# link at a time, and then cds into the link destination and find out where it
# ends up.
#
# The returned filesystem path must be a format usable by Dart's URI parser,
# since the Dart command line tool treats its argument as a file URI, not a
# filename. For instance, multiple consecutive slashes should be reduced to a
# single slash, since double-slashes indicate a URI "authority", and these are
# supposed to be filenames. There is an edge case where this will return
# multiple slashes: when the input resolves to the root directory. However, if
# that were the case, we wouldn't be running this shell, so we don't do anything
# about it.
#
# The function is enclosed in a subshell to avoid changing the working directory
# of the caller.
function follow_links() (
cd -P "$(dirname -- "$1")"
file="$PWD/$(basename -- "$1")"
while [[ -h "$file" ]]; do
cd -P "$(dirname -- "$file")"
file="$(readlink -- "$file")"
cd -P "$(dirname -- "$file")"
file="$PWD/$(basename -- "$file")"
done
echo "$file"
)
PROG_NAME="$(follow_links "${BASH_SOURCE[0]}")"
BIN_DIR="$(cd "${PROG_NAME%/*}" ; pwd -P)"
OS="$(uname -s)"
# If we're on Windows, invoke the batch script instead to get proper locking.
if [[ $OS =~ MINGW.* || $OS =~ CYGWIN.* ]]; then
exec "${BIN_DIR}/dart.bat" "$@"
fi
# To define `shared::execute()` function
source "$BIN_DIR/internal/shared.sh"
shared::execute "$@"
```
@andytom Output you requested
```
❯ env STARSHIP_LOG=trace starship module dart
[DEBUG] - (starship::config): STARSHIP_CONFIG is not set
[DEBUG] - (starship::config): Using default config path: /home/harsh/.config/starship.toml
[TRACE] - (starship::utils): Trying to read from "/home/harsh/.config/starship.toml"
[TRACE] - (starship::utils): File read successfully
[TRACE] - (starship::config): Config file content: "
command_timeout = 5000
[package]
disabled = false
[kubernetes]
disabled = false
#[dart]
#format = "via [$symbol($version )]($style)"
#version_format = "v${major}"
# --- disabled modules --- #
[aws]
disabled = true
[gcloud]
disabled = true
[nodejs]
#disabled = true
"
[DEBUG] - (starship::config): Config parsed: Table({"command_timeout": Integer(5000), "package": Table({"disabled": Boolean(false)}), "kubernetes": Table({"disabled": Boolean(false)}), "aws": Table({"disabled": Boolean(true)}), "gcloud": Table({"disabled": Boolean(true)}), "nodejs": Table({})})
[TRACE] - (starship::context): Received completed pipestatus of None
[TRACE] - (starship::config): No config found for "dart": Option "dart" not found
[TRACE] - (starship::context): Building HashSets of directory files, folders and extensions took 149.128µs
[TRACE] - (starship::utils): Executing command "dart" with args ["--version"]
[TRACE] - (starship::utils): Using "/home/harsh/flutter/flutter/bin/dart" as "dart"
[TRACE] - (starship::utils): stdout: "Dart SDK version: 2.15.1 (stable) (Tue Dec 14 13:32:21 2021 +0100) on \"linux_x64\"\n", stderr: "", exit code: "Some(0)", took 22.27421ms
[TRACE] - (starship::config): Parsing color_string: blue
[TRACE] - (starship::config): Read predefined color: blue
[TRACE] - (starship::modules): Took 23.54269ms to compute module "dart"
``` | 2021-12-24T16:52:43 | 1.1 | c63e9a71bd958c576100fbbeaf5723bb22450fd9 | [
"modules::dart::tests::detect_version_output_in_stdout"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_with_none",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_plain_and_broken... | [] | [] |
starship/starship | 4,689 | starship__starship-4689 | [
"4608"
] | c9de67bd6629de7c31c753adbc6e31ddc807d8b6 | diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -73,7 +73,7 @@
"disabled": false,
"format": "with [$symbol($version )]($style)",
"style": "bold blue",
- "symbol": "🦬 ",
+ "symbol": "🐃 ",
"version_format": "v${raw}"
},
"allOf": [
diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -1854,7 +1854,7 @@
"type": "string"
},
"symbol": {
- "default": "🦬 ",
+ "default": "🐃 ",
"type": "string"
},
"style": {
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -558,7 +558,7 @@ The `buf` module shows the currently installed version of [Buf](https://buf.buil
| ------------------- | ----------------------------------------------- | ----------------------------------------------------- |
| `format` | `'with [$symbol($version )]($style)'` | The format for the `buf` module. |
| `version_format` | `'v${raw}'` | The version format. |
-| `symbol` | `'🦬 '` | The symbol used before displaying the version of Buf. |
+| `symbol` | `'🐃 '` | The symbol used before displaying the version of Buf. |
| `detect_extensions` | `[]` | Which extensions should trigger this module. |
| `detect_files` | `['buf.yaml', 'buf.gen.yaml', 'buf.work.yaml']` | Which filenames should trigger this module. |
| `detect_folders` | `[]` | Which folders should trigger this modules. |
diff --git a/src/configs/buf.rs b/src/configs/buf.rs
--- a/src/configs/buf.rs
+++ b/src/configs/buf.rs
@@ -23,7 +23,7 @@ impl<'a> Default for BufConfig<'a> {
BufConfig {
format: "with [$symbol($version )]($style)",
version_format: "v${raw}",
- symbol: "🦬 ",
+ symbol: "🐃 ",
style: "bold blue",
disabled: false,
detect_extensions: vec![],
diff --git a/src/modules/buf.rs b/src/modules/buf.rs
--- a/src/modules/buf.rs
+++ b/src/modules/buf.rs
@@ -1,5 +1,4 @@
use super::{Context, Module, ModuleConfig};
-
use crate::configs::buf::BufConfig;
use crate::formatter::StringFormatter;
use crate::formatter::VersionFormatter;
| diff --git a/src/modules/buf.rs b/src/modules/buf.rs
--- a/src/modules/buf.rs
+++ b/src/modules/buf.rs
@@ -103,8 +102,10 @@ mod tests {
.sync_all()
.unwrap();
let actual = ModuleRenderer::new("buf").path(dir.path()).collect();
- let expected = Some(format!("with {}", Color::Blue.bold().paint("🦬 v1.0.0 ")));
+
+ let expected = Some(format!("with {}", Color::Blue.bold().paint("🐃 v1.0.0 ")));
assert_eq!(expected, actual);
+
dir.close().unwrap();
}
| "Buf" icon broken on Windows 10 or lower
#### Current Behavior
On Windows 10, the icon for "buf" shows as an unknown symbol.
#### Expected Behavior
The icon should be shown correctly.
#### Additional context/Screenshots
The icon showed properly (but without a version number because the command timed out when the window initially opened) when using Starship v1.8.0. After upgrading to v1.11.0, the version number shows, but the icon is broken.
This screenshot shows both versions:

#### Possible Solution
It looks like #4450 changed the symbol for `buf` from a letter "B" (which Windows can render) to a Buffalo/Bison emoji (which Windows 10 does not have). Note that the Buffalo/Bison emoji was only added to the Unicode standard in version 13.0 in 2020.
Maybe we can use the 🐃 (Water Buffalo) emoji instead, which has been part of the Unicode standard since 2010 and was in the Emoji 1.0 release.
#### Environment
- Starship version: 1.11.0
- pwsh version: 7.2.7
- Operating system: Windows 10.0.19044
- Terminal emulator: vscode 1.72.1
- Git Commit Hash:
- Branch/Tag: master
- Rust Version: rustc 1.64.0 (a55dd71d5 2022-09-19)
- Rust channel: stable-x86_64-pc-windows-msvc release
- Build Time: 2022-10-15 01:51:42 +00:00
#### Relevant Shell Configuration
```pwsh
# Chocolatey profile
$ChocolateyProfile = "$env:ChocolateyInstall\helpers\chocolateyProfile.psm1"
if (Test-Path($ChocolateyProfile)) {
Import-Module "$ChocolateyProfile"
}
$env:LC_ALL='C.UTF-8'
$env:LANG='en_US.UTF-8'
$OutputEncoding = [console]::InputEncoding = [console]::OutputEncoding = New-Object System.Text.UTF8Encoding
$env:PYTHONIOENCODING = "UTF-8"
# Allow Ctrl+D to close the terminal, like Linux does
Set-PSReadlineKeyHandler -Key ctrl+d -Function ViExit
Invoke-Expression (&starship init powershell)
```
#### Starship Configuration
```toml
<unknown config>
```
| 2022-12-06T11:14:19 | 1.11 | c9de67bd6629de7c31c753adbc6e31ddc807d8b6 | [
"modules::buf::tests::folder_with_buf_config"
] | [
"bug_report::tests::test_get_config_path",
"bug_report::tests::test_make_github_link",
"config::tests::table_get_palette",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_hidden_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests... | [] | [] | |
starship/starship | 2,994 | starship__starship-2994 | [
"2377"
] | 2865f135f0d0c0337f67141960e454f47e9d45b2 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -502,6 +502,37 @@ the module will be activated if any of the following conditions are met:
\*: This variable can only be used as a part of a style string
+## COBOL / GNUCOBOL
+
+The `cobol` module shows the currently installed version of COBOL.
+By default, the module will be shown if any of the following conditions are met:
+
+- The current directory contains any files ending in `.cob` or `.COB`
+- The current directory contains any files ending in `.cbl` or `.CBL`
+
+### Options
+
+| Option | Default | Description |
+| ------------------- | ------------------------------------ | ------------------------------------------------------------------------ |
+| `symbol` | `"⚙️ "` | The symbol used before displaying the version of COBOL. |
+| `format` | `"via [$symbol($version )]($style)"` | The format for the module. |
+| `version_format` | `"v${raw}"` | The version format. Available vars are `raw`, `major`, `minor`, & `patch`|
+| `style` | `"bold blue"` | The style for the module. |
+| `detect_extensions` | `["cbl", "cob", "CBL", "COB"]` | Which extensions should trigger this module. |
+| `detect_files` | `[]` | Which filenames should trigger this module. |
+| `detect_folders` | `[]` | Which folders should trigger this module. |
+| `disabled` | `false` | Disables the `cobol` module. |
+
+### Variables
+
+| Variable | Example | Description |
+| -------- | --------- | ------------------------------------ |
+| version | `v3.1.2.0`| The version of `cobol` |
+| symbol | | Mirrors the value of option `symbol` |
+| style\* | | Mirrors the value of option `style` |
+
+\*: This variable can only be used as a part of a style string
+
## Command Duration
The `cmd_duration` module shows how long the last command took to execute.
diff --git a/docs/presets/README.md b/docs/presets/README.md
--- a/docs/presets/README.md
+++ b/docs/presets/README.md
@@ -115,6 +115,9 @@ format = '\[[$symbol($version)]($style)\]'
[cmd_duration]
format = "[⏱ $duration]($style)"
+[cobol]
+format = '\[[$symbol($version)]($style)\]'
+
[conda]
format = '\[[$symbol$environment]($style)\]'
diff --git a/docs/presets/README.md b/docs/presets/README.md
--- a/docs/presets/README.md
+++ b/docs/presets/README.md
@@ -276,6 +279,9 @@ deleted = "x"
[aws]
symbol = "aws "
+[cobol]
+symbol = "cobol "
+
[conda]
symbol = "conda "
diff --git a/docs/presets/README.md b/docs/presets/README.md
--- a/docs/presets/README.md
+++ b/docs/presets/README.md
@@ -379,6 +385,9 @@ This preset hides the version of language runtimes. If you work in containers or
[cmake]
format = "via [$symbol]($style)"
+[cobol]
+format = "via [$symbol]($style)"
+
[crystal]
format = "via [$symbol]($style)"
diff --git /dev/null b/src/configs/cobol.rs
new file mode 100644
--- /dev/null
+++ b/src/configs/cobol.rs
@@ -0,0 +1,31 @@
+use crate::config::ModuleConfig;
+
+use serde::Serialize;
+use starship_module_config_derive::ModuleConfig;
+
+#[derive(Clone, ModuleConfig, Serialize)]
+pub struct CobolConfig<'a> {
+ pub format: &'a str,
+ pub version_format: &'a str,
+ pub symbol: &'a str,
+ pub style: &'a str,
+ pub disabled: bool,
+ pub detect_extensions: Vec<&'a str>,
+ pub detect_files: Vec<&'a str>,
+ pub detect_folders: Vec<&'a str>,
+}
+
+impl<'a> Default for CobolConfig<'a> {
+ fn default() -> Self {
+ CobolConfig {
+ format: "via [$symbol($version )]($style)",
+ version_format: "v${raw}",
+ symbol: "⚙️ ",
+ style: "bold blue",
+ disabled: false,
+ detect_extensions: vec!["cbl", "cob", "CBL", "COB"],
+ detect_files: vec![],
+ detect_folders: vec![],
+ }
+ }
+}
diff --git a/src/configs/mod.rs b/src/configs/mod.rs
--- a/src/configs/mod.rs
+++ b/src/configs/mod.rs
@@ -8,6 +8,7 @@ pub mod battery;
pub mod character;
pub mod cmake;
pub mod cmd_duration;
+pub mod cobol;
pub mod conda;
pub mod crystal;
pub mod custom;
diff --git a/src/configs/mod.rs b/src/configs/mod.rs
--- a/src/configs/mod.rs
+++ b/src/configs/mod.rs
@@ -83,6 +84,7 @@ pub struct FullConfig<'a> {
character: character::CharacterConfig<'a>,
cmake: cmake::CMakeConfig<'a>,
cmd_duration: cmd_duration::CmdDurationConfig<'a>,
+ cobol: cobol::CobolConfig<'a>,
conda: conda::CondaConfig<'a>,
crystal: crystal::CrystalConfig<'a>,
dart: dart::DartConfig<'a>,
diff --git a/src/configs/mod.rs b/src/configs/mod.rs
--- a/src/configs/mod.rs
+++ b/src/configs/mod.rs
@@ -155,6 +157,7 @@ impl<'a> Default for FullConfig<'a> {
character: Default::default(),
cmake: Default::default(),
cmd_duration: Default::default(),
+ cobol: Default::default(),
conda: Default::default(),
crystal: Default::default(),
dart: Default::default(),
diff --git a/src/configs/starship_root.rs b/src/configs/starship_root.rs
--- a/src/configs/starship_root.rs
+++ b/src/configs/starship_root.rs
@@ -34,6 +34,7 @@ pub const PROMPT_ORDER: &[&str] = &[
// ↓ Toolchain version modules ↓
// (Let's keep these sorted alphabetically)
"cmake",
+ "cobol",
"dart",
"deno",
"dotnet",
diff --git a/src/module.rs b/src/module.rs
--- a/src/module.rs
+++ b/src/module.rs
@@ -14,6 +14,7 @@ pub const ALL_MODULES: &[&str] = &[
"character",
"cmake",
"cmd_duration",
+ "cobol",
"conda",
"crystal",
"dart",
diff --git a/src/modules/mod.rs b/src/modules/mod.rs
--- a/src/modules/mod.rs
+++ b/src/modules/mod.rs
@@ -3,6 +3,7 @@ mod aws;
mod character;
mod cmake;
mod cmd_duration;
+mod cobol;
mod conda;
mod crystal;
pub(crate) mod custom;
diff --git a/src/modules/mod.rs b/src/modules/mod.rs
--- a/src/modules/mod.rs
+++ b/src/modules/mod.rs
@@ -85,6 +86,7 @@ pub fn handle<'a>(module: &str, context: &'a Context) -> Option<Module<'a>> {
"character" => character::module(context),
"cmake" => cmake::module(context),
"cmd_duration" => cmd_duration::module(context),
+ "cobol" => cobol::module(context),
"conda" => conda::module(context),
"dart" => dart::module(context),
"deno" => deno::module(context),
diff --git a/src/modules/mod.rs b/src/modules/mod.rs
--- a/src/modules/mod.rs
+++ b/src/modules/mod.rs
@@ -169,6 +171,7 @@ pub fn description(module: &str) -> &'static str {
}
"cmake" => "The currently installed version of CMake",
"cmd_duration" => "How long the last command took to execute",
+ "cobol" => "The currently installed version of COBOL/GNUCOBOL",
"conda" => "The current conda environment, if $CONDA_DEFAULT_ENV is set",
"crystal" => "The currently installed version of Crystal",
"dart" => "The currently installed version of Dart",
diff --git a/src/utils.rs b/src/utils.rs
--- a/src/utils.rs
+++ b/src/utils.rs
@@ -85,6 +85,19 @@ pub fn exec_cmd(cmd: &str, args: &[&str], time_limit: Duration) -> Option<Comman
_ => format!("{} {}", cmd, args.join(" ")),
};
match command.as_str() {
+ "cobc -version" => Some(CommandOutput {
+ stdout: String::from("\
+cobc (GnuCOBOL) 3.1.2.0
+Copyright (C) 2020 Free Software Foundation, Inc.
+License GPLv3+: GNU GPL version 3 or later <https://gnu.org/licenses/gpl.html>
+This is free software; see the source for copying conditions. There is NO
+warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.
+Written by Keisuke Nishida, Roger While, Ron Norman, Simon Sobisch, Edward Hart
+Built Dec 24 2020 19:08:58
+Packaged Dec 23 2020 12:04:58 UTC
+C version \"10.2.0\""),
+ stderr: String::default(),
+ }),
"crystal --version" => Some(CommandOutput {
stdout: String::from(
"\
| diff --git /dev/null b/src/modules/cobol.rs
new file mode 100644
--- /dev/null
+++ b/src/modules/cobol.rs
@@ -0,0 +1,141 @@
+use super::{Context, Module, RootModuleConfig};
+
+use crate::configs::cobol::CobolConfig;
+use crate::formatter::StringFormatter;
+use crate::formatter::VersionFormatter;
+
+/// Creates a module with the current COBOL version
+pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
+ let mut module = context.new_module("cobol");
+ let config = CobolConfig::try_load(module.config);
+ let is_cobol_project = context
+ .try_begin_scan()?
+ .set_files(&config.detect_files)
+ .set_extensions(&config.detect_extensions)
+ .set_folders(&config.detect_folders)
+ .is_match();
+
+ if !is_cobol_project {
+ return None;
+ }
+
+ let parsed = StringFormatter::new(config.format).and_then(|formatter| {
+ formatter
+ .map_meta(|var, _| match var {
+ "symbol" => Some(config.symbol),
+ _ => None,
+ })
+ .map_style(|variable| match variable {
+ "style" => Some(Ok(config.style)),
+ _ => None,
+ })
+ .map(|variable| match variable {
+ "version" => {
+ let cobol_version =
+ get_cobol_version(&context.exec_cmd("cobc", &["-version"])?.stdout)?;
+
+ VersionFormatter::format_module_version(
+ module.get_name(),
+ &cobol_version,
+ config.version_format,
+ )
+ .map(Ok)
+ }
+ _ => None,
+ })
+ .parse(None)
+ });
+
+ module.set_segments(match parsed {
+ Ok(segments) => segments,
+ Err(error) => {
+ log::warn!("Error in module `cobol`:\n{}", error);
+ return None;
+ }
+ });
+
+ Some(module)
+}
+
+fn get_cobol_version(cobol_stdout: &str) -> Option<String> {
+ // cobol output looks like this:
+ // cobc (GnuCOBOL) 3.1.2.0
+ // ...
+
+ Some(
+ cobol_stdout
+ // split into ["cobc", "(GNUCOBOL)", "3.1.2.0"...]
+ .split_whitespace()
+ // return "3.1.2.0"
+ .nth(2)?
+ .to_string(),
+ )
+}
+
+#[cfg(test)]
+mod tests {
+ use crate::test::ModuleRenderer;
+ use ansi_term::Color;
+ use std::fs::File;
+ use std::io;
+
+ #[test]
+ fn folder_without_cobol_files() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+
+ let actual = ModuleRenderer::new("cobol").path(dir.path()).collect();
+
+ let expected = None;
+
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn folder_with_lowercase_cob_files() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("main.cob"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("cobol").path(dir.path()).collect();
+
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("⚙️ v3.1.2.0 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn folder_with_lowercase_cbl_files() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("main.cbl"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("cobol").path(dir.path()).collect();
+
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("⚙️ v3.1.2.0 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn folder_with_capital_cob_files() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("MAIN.COB"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("cobol").path(dir.path()).collect();
+
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("⚙️ v3.1.2.0 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn folder_with_capital_cbl_files() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("MAIN.CBL"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("cobol").path(dir.path()).collect();
+
+ let expected = Some(format!("via {}", Color::Blue.bold().paint("⚙️ v3.1.2.0 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+}
| GNUCOBOL support
Hello,
I'd like to see GNUCOBOL support get added into Starship, it would function like the other language compilers in Starship.
The version would be gotten by:
`echo -n v && cobc -version | head -n 1 | awk '{ print $3 }'`
And it would be triggered by the detection of a .cbl/CBL or a .cob/COB file in the current folder.
I know that probably isn't the fastest way to get the version, but since I don't really know rust or do bash scripting, I'm leaving this part to the experts.
I've been getting into COBOL recently and I would like to see it added to Starship. I know that it may not be the most popular feature, but even then, if someone out there can provide a patch or similar, I'd be forever grateful.
Many thanks,
Zenithium
| Good news for any would-be implementors: `cobc -version` is pretty much instant, clocking in at a cool 1.4ms on my system and tying with `python --version` (which we already know is really fast, at least for cpython).
We wouldn't need any additional performance hacks--this would be a pretty straightforward module implementation.
@ZenithiumDev Any ideas for what we could use for a symbol? Unfortunately I think "C" is out of the picture since we already use that symbol for various other modules.
Hello again,
@chipbuster , thank you for your quick reply.
I haven't thought of the symbol, though, I'd prefer if the icon was something from the Font Awesome free icon set, as I and probably a lot of other people use it. | 2021-08-18T23:04:26 | 0.56 | 2865f135f0d0c0337f67141960e454f47e9d45b2 | [
"modules::cmake::tests::folder_with_cmake_lists",
"modules::cmake::tests::buildfolder_with_cmake_cache",
"modules::crystal::tests::folder_with_shard_file",
"modules::dart::tests::folder_with_dart_file",
"modules::crystal::tests::folder_with_cr_file",
"modules::dart::tests::folder_with_pubspec_lock_file",
... | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_with_none",
"config::tests::table_get_styles_plain_and_broken... | [
"modules::docker_context::tests::test_with_docker_compose_yml"
] | [
"modules::dotnet::tests::shows_multiple_tfms",
"modules::dotnet::tests::shows_pinned_in_directory_with_global_json",
"modules::docker_context::tests::test_with_dockerfile",
"modules::erlang::tests::test_with_config",
"modules::elm::tests::folder_with_elm_version",
"modules::ocaml::tests::folder_with_ml_fi... |
starship/starship | 2,973 | starship__starship-2973 | [
"2937"
] | 2865f135f0d0c0337f67141960e454f47e9d45b2 | diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -230,43 +230,48 @@ struct RepoStatus {
}
impl RepoStatus {
- fn is_conflicted(status: &str) -> bool {
- status.starts_with("u ")
- }
-
- fn is_deleted(status: &str) -> bool {
+ fn is_deleted(short_status: &str) -> bool {
// is_wt_deleted || is_index_deleted
- status.starts_with("1 .D") || status.starts_with("1 D")
+ short_status.contains('D')
}
- fn is_renamed(status: &str) -> bool {
- // is_wt_renamed || is_index_renamed
- // Potentially a copy and not a rename
- status.starts_with("2 ")
+ fn is_modified(short_status: &str) -> bool {
+ // is_wt_modified || is_wt_added
+ short_status.ends_with('M') || short_status.ends_with('A')
}
- fn is_modified(status: &str) -> bool {
- // is_wt_modified
- status.starts_with("1 .M") || status.starts_with("1 .A")
+ fn is_staged(short_status: &str) -> bool {
+ // is_index_modified || is_index_added
+ short_status.starts_with('M') || short_status.starts_with('A')
}
- fn is_staged(status: &str) -> bool {
- // is_index_modified || is_index_new
- status.starts_with("1 M") || status.starts_with("1 A")
- }
+ fn parse_normal_status(&mut self, short_status: &str) {
+ if Self::is_deleted(short_status) {
+ self.deleted += 1;
+ }
+
+ if Self::is_modified(short_status) {
+ self.modified += 1;
+ }
- fn is_untracked(status: &str) -> bool {
- // is_wt_new
- status.starts_with("? ")
+ if Self::is_staged(short_status) {
+ self.staged += 1;
+ }
}
fn add(&mut self, s: &str) {
- self.conflicted += Self::is_conflicted(s) as usize;
- self.deleted += Self::is_deleted(s) as usize;
- self.renamed += Self::is_renamed(s) as usize;
- self.modified += Self::is_modified(s) as usize;
- self.staged += Self::is_staged(s) as usize;
- self.untracked += Self::is_untracked(s) as usize;
+ match s.chars().next() {
+ Some('1') => self.parse_normal_status(&s[2..4]),
+ Some('2') => {
+ self.renamed += 1;
+ self.parse_normal_status(&s[2..4])
+ }
+ Some('u') => self.conflicted += 1,
+ Some('?') => self.untracked += 1,
+ Some('!') => (),
+ Some(_) => log::error!("Unknown line type in git status output"),
+ None => log::error!("Missing line type in git status output"),
+ }
}
fn set_ahead_behind(&mut self, s: &str) {
| diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -313,7 +318,7 @@ fn format_symbol(format_str: &str, config_path: &str) -> Option<Vec<Segment>> {
mod tests {
use ansi_term::{ANSIStrings, Color};
use std::fs::{self, File};
- use std::io;
+ use std::io::{self, prelude::*};
use std::path::Path;
use crate::test::{fixture_repo, FixtureProvider, ModuleRenderer};
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -701,6 +706,21 @@ mod tests {
repo_dir.close()
}
+ #[test]
+ fn shows_staged_and_modified_file() -> io::Result<()> {
+ let repo_dir = fixture_repo(FixtureProvider::Git)?;
+
+ create_staged_and_modified(repo_dir.path())?;
+
+ let actual = ModuleRenderer::new("git_status")
+ .path(&repo_dir.path())
+ .collect();
+ let expected = format_output("!+");
+
+ assert_eq!(expected, actual);
+ repo_dir.close()
+ }
+
#[test]
fn shows_renamed_file() -> io::Result<()> {
let repo_dir = fixture_repo(FixtureProvider::Git)?;
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -735,6 +755,21 @@ mod tests {
repo_dir.close()
}
+ #[test]
+ fn shows_renamed_and_modified_file() -> io::Result<()> {
+ let repo_dir = fixture_repo(FixtureProvider::Git)?;
+
+ create_renamed_and_modified(repo_dir.path())?;
+
+ let actual = ModuleRenderer::new("git_status")
+ .path(&repo_dir.path())
+ .collect();
+ let expected = format_output("»!");
+
+ assert_eq!(expected, actual);
+ repo_dir.close()
+ }
+
#[test]
fn shows_deleted_file() -> io::Result<()> {
let repo_dir = fixture_repo(FixtureProvider::Git)?;
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -769,6 +804,21 @@ mod tests {
repo_dir.close()
}
+ #[test]
+ fn doesnt_show_ignored_file() -> io::Result<()> {
+ let repo_dir = fixture_repo(FixtureProvider::Git)?;
+
+ create_staged_and_ignored(repo_dir.path())?;
+
+ let actual = ModuleRenderer::new("git_status")
+ .path(&repo_dir.path())
+ .collect();
+ let expected = format_output("+");
+
+ assert_eq!(expected, actual);
+ repo_dir.close()
+ }
+
#[test]
fn worktree_in_different_dir() -> io::Result<()> {
let worktree_dir = tempfile::tempdir()?;
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -953,6 +1003,22 @@ mod tests {
Ok(())
}
+ fn create_staged_and_modified(repo_dir: &Path) -> io::Result<()> {
+ let mut file = File::create(repo_dir.join("readme.md"))?;
+ file.sync_all()?;
+
+ create_command("git")?
+ .args(&["add", "."])
+ .current_dir(repo_dir)
+ .output()?;
+ barrier();
+
+ writeln!(&mut file, "modified")?;
+ file.sync_all()?;
+
+ Ok(())
+ }
+
fn create_renamed(repo_dir: &Path) -> io::Result<()> {
create_command("git")?
.args(&["mv", "readme.md", "readme.md.bak"])
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -969,9 +1035,47 @@ mod tests {
Ok(())
}
+ fn create_renamed_and_modified(repo_dir: &Path) -> io::Result<()> {
+ create_command("git")?
+ .args(&["mv", "readme.md", "readme.md.bak"])
+ .current_dir(repo_dir)
+ .output()?;
+ barrier();
+
+ create_command("git")?
+ .args(&["add", "-A"])
+ .current_dir(repo_dir)
+ .output()?;
+ barrier();
+
+ let mut file = File::create(repo_dir.join("readme.md.bak"))?;
+ writeln!(&mut file, "modified")?;
+ file.sync_all()?;
+
+ Ok(())
+ }
+
fn create_deleted(repo_dir: &Path) -> io::Result<()> {
fs::remove_file(repo_dir.join("readme.md"))?;
Ok(())
}
+
+ fn create_staged_and_ignored(repo_dir: &Path) -> io::Result<()> {
+ let mut file = File::create(repo_dir.join(".gitignore"))?;
+ writeln!(&mut file, "ignored.txt")?;
+ file.sync_all()?;
+
+ create_command("git")?
+ .args(&["add", ".gitignore"])
+ .current_dir(repo_dir)
+ .output()?;
+ barrier();
+
+ let mut file = File::create(repo_dir.join("ignored.txt"))?;
+ writeln!(&mut file, "modified")?;
+ file.sync_all()?;
+
+ Ok(())
+ }
}
| Git status: modified files are not shown if also staged
#### Current Behavior
When a file is both modified and staged, it will only be dis played as « staged ».
#### Expected Behavior
Take the file into account both in « staged » and « modified »
#### Additional context/Screenshots
##### Git status of repo
When already present file is modified:
```
❯ git -C /home/mmillet/test --no-optional-locks status --porcelain=2 --branch
# branch.oid 72870ff32e67c8d9c1da4df4550182418ee70675
# branch.head master
1 MM N... 100644 100644 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 72943a16fb2c8f38f9dde202b7a70ccc19c52f34 test1
1 .M N... 100644 100644 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 test2
1 AM N... 000000 100644 100644 0000000000000000000000000000000000000000 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 test3
000000000000000000 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 test3
```
`test1` was already in previous commit, and has been modified, added, and modified again
`test2` was already in previous commit, then modified
`test3` just added, then modified
When in this state, my indicator show only one added file :`test2` (if `test2` is not modified I do not see any file as modified).
Starship debug (I removed config since it's displayed at the bottom of the issue):
```
❯ env STARSHIP_LOG=trace starship module git_status
[DEBUG] - (starship::config): STARSHIP_CONFIG is not set
[DEBUG] - (starship::config): Using default config path: /home/mmillet/.config/starship.toml
[TRACE] - (starship::utils): Trying to read from "/home/mmillet/.config/starship.toml"
[TRACE] - (starship::utils): File read sucessfully
[TRACE] - (starship::config): Config file content: "
...
"
[DEBUG] - (starship::config): Config parsed: Table({"format": String("$username$hostname$shlvl$directory$git_branch$git_commit$git_state$git_status$hg_branch$docker_context$package$nodejs$ocaml$python$conda$memory_usage$aws$cmd_duration$line_break$jobs$battery$time$status$character"), "hostname": Table({"format": String("[$hostname]($style) | "), "ssh_only": Boolean(false)}), "directory": Table({"truncate_to_repo": Boolean(false), "fish_style_pwd_dir_length": Integer(2)}), "username": Table({"format": String("[$user]($style) | ")}), "git_status": Table({"style": String("bold cyan"), "ahead": String("↑${count}"), "diverged": String("↑${ahead_count}↓${behind_count}"), "behind": String("↓${count}"), "conflicted": String("✖${count}(red)"), "untracked": String("[…\u{200d}$count](244)"), "modified": String("✚$count(bold blue)"), "staged": String("[●$count](bold bright-yellow)"), "renamed": String("»(green)"), "deleted": String("✘(red)")})})
[DEBUG] - (starship::config): Config found for "git_status": Some(Table({"style": String("bold cyan"), "ahead": String("↑${count}"), "diverged": String("↑${ahead_count}↓${behind_count}"), "behind": String("↓${count}"), "conflicted": String("✖${count}(red)"), "untracked": String("[…\u{200d}$count](244)"), "modified": String("✚$count(bold blue)"), "staged": String("[●$count](bold bright-yellow)"), "renamed": String("»(green)"), "deleted": String("✘(red)")}))
[DEBUG] - (starship::modules::git_status): New repo status created
[TRACE] - (starship::utils): Executing command "git" with args ["-C", "/home/mmillet/test", "--no-optional-locks", "status", "--porcelain=2", "--branch"]
[TRACE] - (starship::utils): Executing command "git" with args ["-C", "/home/mmillet/test", "--no-optional-locks", "stash", "list"]
[TRACE] - (starship::utils): Using "/usr/bin/git" as "git"
[TRACE] - (starship::utils): Using "/usr/bin/git" as "git"
[TRACE] - (starship::utils): stdout: "# branch.oid 72870ff32e67c8d9c1da4df4550182418ee70675\n# branch.head master\n1 MM N... 100644 100644 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 72943a16fb2c8f38f9dde202b7a70ccc19c52f34 test1\n1 .M N... 100644 100644 100644 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 test2\n1 AM N... 000000 100644 100644 0000000000000000000000000000000000000000 e69de29bb2d1d6434b8b29ae775ad8c2e48c5391 test3\n", stderr: "", exit code: "Some(0)", took 3.761845ms
[TRACE] - (starship::config): Parsing color_string: bright-yellow
[TRACE] - (starship::config): Read predefined color: bright-yellow
[TRACE] - (starship::utils): stdout: "", stderr: "", exit code: "Some(0)", took 4.6319ms
[TRACE] - (starship::config): Parsing color_string: cyan
[TRACE] - (starship::config): Read predefined color: cyan
[TRACE] - (starship::modules): Took 7.221036ms to compute module "git_status"
[✚1●2] ⏎
```
#### Possible Solution
<!--- Only if you have suggestions on a fix for the bug -->
#### Environment
- Starship version: 0.56.0
- fish version: fish, version 3.1.0
- Operating system: Ubuntu 20.04
- Terminal emulator: <unknown terminal> <unknown version>
- Git Commit Hash: f64cfa6a
- Branch/Tag: v0.56.0
- Rust Version: rustc 1.53.0 (53cb7b09b 2021-06-17)
- Rust channel: release
- Build Time: 2021-07-13 22:05:04
#### Relevant Shell Configuration
```bash
set -x PATH .local/bin $PATH
starship init fish | source
```
#### Starship Configuration
```toml
format = """
$username\
$hostname\
$shlvl\
$directory\
$git_branch\
$git_commit\
$git_state\
$git_status\
$hg_branch\
$docker_context\
$package\
$nodejs\
$ocaml\
$python\
$conda\
$memory_usage\
$aws\
$cmd_duration\
$line_break\
$jobs\
$battery\
$time\
$status\
$character"""
[hostname]
format = "[$hostname]($style) | "
ssh_only = false
[directory]
truncate_to_repo = false
fish_style_pwd_dir_length = 2
[username]
format = "[$user]($style) | "
[git_status]
style= "bold cyan"
ahead = "↑${count}"
diverged = "↑${ahead_count}↓${behind_count}"
behind = "↓${count}"
conflicted = "✖${count}(red)"
untracked = "[…$count](244)"
modified = "✚${count}(bold blue)"
staged = '[●$count](bold bright-yellow)'
renamed = "»(green)"
deleted = "✘(red)"
```
| 2021-08-09T19:38:30 | 0.56 | 2865f135f0d0c0337f67141960e454f47e9d45b2 | [
"modules::crystal::tests::folder_with_shard_file",
"modules::dart::tests::folder_with_pubspec_yaml_file",
"modules::cmake::tests::buildfolder_with_cmake_cache",
"modules::deno::tests::folder_with_deps_ts",
"modules::crystal::tests::folder_with_cr_file",
"modules::dart::tests::folder_with_dart_file",
"mo... | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_plain_and_broken_s... | [
"modules::java::tests::folder_with_class_file",
"modules::java::tests::folder_with_gradle_file",
"modules::java::tests::folder_with_java_file_no_java_installed",
"modules::java::tests::folder_with_gradle_kotlin_build_file",
"modules::java::tests::folder_with_jar_archive",
"modules::java::tests::folder_wit... | [
"modules::golang::tests::folder_with_glide_yaml",
"modules::golang::tests::folder_with_go_mod",
"modules::golang::tests::folder_with_go_file"
] | |
starship/starship | 2,785 | starship__starship-2785 | [
"2567"
] | 14a2ada1ee1db32d2334789587b57f18fe6913b4 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -839,7 +839,6 @@ the following files are present in the current directory:
- `Directory.Build.props`
- `Directory.Build.targets`
- `Packages.props`
-- `*.sln`
- `*.csproj`
- `*.fsproj`
- `*.xproj`
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -863,7 +862,7 @@ when there is a csproj file in the current directory.
| `version_format` | `"v${raw}"` | The version format. Available vars are `raw`, `major`, `minor`, & `patch` |
| `symbol` | `".NET "` | The symbol used before displaying the version of dotnet. |
| `heuristic` | `true` | Use faster version detection to keep starship snappy. |
-| `detect_extensions` | `["sln", "csproj", "fsproj", "xproj"]` | Which extensions should trigger this module. |
+| `detect_extensions` | `["csproj", "fsproj", "xproj"]` | Which extensions should trigger this module. |
| `detect_files` | `["global.json", "project.json", "Directory.Build.props", "Directory.Build.targets", "Packages.props"]` | Which filenames should trigger this module. |
| `detect_folders` | `[]` | Which folders should trigger this modules. |
| `style` | `"bold blue"` | The style for the module. |
diff --git a/src/configs/dotnet.rs b/src/configs/dotnet.rs
--- a/src/configs/dotnet.rs
+++ b/src/configs/dotnet.rs
@@ -25,7 +25,7 @@ impl<'a> Default for DotnetConfig<'a> {
style: "blue bold",
heuristic: true,
disabled: false,
- detect_extensions: vec!["sln", "csproj", "fsproj", "xproj"],
+ detect_extensions: vec!["csproj", "fsproj", "xproj"],
detect_files: vec
#### Possible Solution
.NET will currently trigger on seeing a `.sln` file, which is probably what's triggering it. However, the `.sln` file
in this directory appears to only be used to define C++ builds for Visual Studio.
We could remove the `.sln` file trigger, or we could display the `.NET` module only if a `.sln` file exists AND there are no `.vcxproj` files. I don't understand the VS build systems well enough to comment, but is it possible to have a `.sln` file without a `.<lang>proj` file?
#### Environment
- Starship version: [the output of `starship --version`]
- Shell type: bash
- Shell version: GNU bash, version 4.3.42(1)-release (x86_64-suse-linux-gnu)
- Shell plugin manager: None
- Terminal emulator: Alacritty
- Operating system: Some Linux distro (remote machine, lsb_release missing)
#### Relevant Shell Configuration
Can reproduce config if needed, but I don't think it's relevant.
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
```bash
[git_status]
disabled=true
```
| Maybe one solution is not to consider the existence of a .sln file in "DotnetConfig.detect_extensions" as a proof of being in a dotnet project.
In the end a '.sln' file is a structure to organize projects in a solution, and it can have a mix of c++ and c#/vb/f# subprojects in different folders. | 2021-06-05T16:55:54 | 0.54 | dc92d664e22f945645c3ac124a592285d5109894 | [
"modules::aws::tests::expiration_date_set",
"modules::crystal::tests::folder_with_shard_file",
"modules::dart::tests::folder_with_pubspec_lock_file",
"modules::dart::tests::folder_with_dart_file",
"modules::cmake::tests::folder_with_cmake_lists",
"modules::dart::tests::folder_with_pubspec_yaml_file",
"m... | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_plain_and_broken_styles",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_or... | [] | [
"modules::perl::tests::folder_with_makefile_file",
"modules::perl::tests::folder_with_cpanfile_file",
"modules::perl::tests::folder_with_buildfile_file",
"modules::perl::tests::folder_with_cpanfile_snapshot_file",
"modules::perl::tests::folder_with_meta_json_file",
"modules::perl::tests::folder_with_meta_... |
starship/starship | 2,782 | starship__starship-2782 | [
"2022"
] | 2d92e70f516e9ebab4ff20c4d47f0e6098731629 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -791,7 +791,8 @@ truncation_symbol = "…/"
The `docker_context` module shows the currently active
[Docker context](https://docs.docker.com/engine/context/working-with-contexts/) if it's not set to
-`default`.
+`default` or if the `DOCKER_HOST` or `DOCKER_CONTEXT` environment variables are set (as they are meant
+to override the context in use).
### Options
diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -9,10 +9,16 @@ use crate::utils;
/// Creates a module with the currently active Docker context
///
/// Will display the Docker context if the following criteria are met:
-/// - There is a file named `$HOME/.docker/config.json`
+/// - There is a non-empty enviroment variable named DOCKER_HOST
+/// - Or there is a non-empty enviroment variable named DOCKER_CONTEXT
+/// - Or there is a file named `$HOME/.docker/config.json`
/// - Or a file named `$DOCKER_CONFIG/config.json`
/// - The file is JSON and contains a field named `currentContext`
/// - The value of `currentContext` is not `default`
+/// - If multiple criterias are met, we use the following order to define the docker context:
+/// - DOCKER_HOST, DOCKER_CONTEXT, $HOME/.docker/config.json, $DOCKER_CONFIG/config.json
+/// - (This is the same order docker follows, as DOCKER_HOST and DOCKER_CONTEXT override the
+/// config)
pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
let mut module = context.new_module("docker_context");
let config: DockerContextConfig = DockerContextConfig::try_load(module.config);
| diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -35,50 +41,46 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
)
.join("config.json");
- if !docker_config.exists() {
- return None;
- }
+ let docker_context_env = std::array::IntoIter::new(["DOCKER_HOST", "DOCKER_CONTEXT"])
+ .find_map(|env| context.get_env(env));
- let json = utils::read_file(docker_config).ok()?;
- let parsed_json = serde_json::from_str(&json).ok()?;
-
- match parsed_json {
- serde_json::Value::Object(root) => {
- let current_context = root.get("currentContext")?;
- match current_context {
- serde_json::Value::String(ctx) => {
- let parsed = StringFormatter::new(config.format).and_then(|formatter| {
- formatter
- .map_meta(|variable, _| match variable {
- "symbol" => Some(config.symbol),
- _ => None,
- })
- .map_style(|variable| match variable {
- "style" => Some(Ok(config.style)),
- _ => None,
- })
- .map(|variable| match variable {
- "context" => Some(Ok(ctx)),
- _ => None,
- })
- .parse(None)
- });
-
- module.set_segments(match parsed {
- Ok(segments) => segments,
- Err(error) => {
- log::warn!("Error in module `docker_context`:\n{}", error);
- return None;
- }
- });
-
- Some(module)
- }
- _ => None,
+ let ctx = match docker_context_env {
+ Some(data) => data,
+ _ => {
+ if !docker_config.exists() {
+ return None;
}
+ let json = utils::read_file(docker_config).ok()?;
+ let parsed_json: serde_json::Value = serde_json::from_str(&json).ok()?;
+ parsed_json.get("currentContext")?.as_str()?.to_owned()
}
- _ => None,
- }
+ };
+
+ let parsed = StringFormatter::new(config.format).and_then(|formatter| {
+ formatter
+ .map_meta(|variable, _| match variable {
+ "symbol" => Some(config.symbol),
+ _ => None,
+ })
+ .map_style(|variable| match variable {
+ "style" => Some(Ok(config.style)),
+ _ => None,
+ })
+ .map(|variable| match variable {
+ "context" => Some(Ok(ctx.as_str())),
+ _ => None,
+ })
+ .parse(None)
+ });
+
+ module.set_segments(match parsed {
+ Ok(segments) => segments,
+ Err(error) => {
+ log::warn!("Error in module `docker_context`:\n{}", error);
+ return None;
+ }
+ });
+ Some(module)
}
#[cfg(test)]
diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -264,4 +266,105 @@ mod tests {
cfg_dir.close()
}
+
+ #[test]
+ fn test_docker_host_env() -> io::Result<()> {
+ let cfg_dir = tempfile::tempdir()?;
+
+ let actual = ModuleRenderer::new("docker_context")
+ .env("DOCKER_HOST", "udp://starship@127.0.0.1:53")
+ .config(toml::toml! {
+ [docker_context]
+ only_with_files = false
+ })
+ .collect();
+ let expected = Some(format!(
+ "via {} ",
+ Color::Blue.bold().paint("🐳 udp://starship@127.0.0.1:53")
+ ));
+
+ assert_eq!(expected, actual);
+
+ cfg_dir.close()
+ }
+
+ #[test]
+ fn test_docker_context_env() -> io::Result<()> {
+ let cfg_dir = tempfile::tempdir()?;
+
+ let actual = ModuleRenderer::new("docker_context")
+ .env("DOCKER_CONTEXT", "starship")
+ .config(toml::toml! {
+ [docker_context]
+ only_with_files = false
+ })
+ .collect();
+ let expected = Some(format!("via {} ", Color::Blue.bold().paint("🐳 starship")));
+
+ assert_eq!(expected, actual);
+
+ cfg_dir.close()
+ }
+
+ #[test]
+ fn test_docker_context_overrides_config() -> io::Result<()> {
+ let cfg_dir = tempfile::tempdir()?;
+
+ let cfg_file = cfg_dir.path().join("config.json");
+
+ let config_content = serde_json::json!({
+ "currentContext": "starship"
+ });
+
+ let mut docker_config = File::create(&cfg_file)?;
+ docker_config.write_all(config_content.to_string().as_bytes())?;
+ docker_config.sync_all()?;
+
+ let actual = ModuleRenderer::new("docker_context")
+ .env("DOCKER_CONTEXT", "starship")
+ .env("DOCKER_CONFIG", cfg_dir.path().to_string_lossy())
+ .config(toml::toml! {
+ [docker_context]
+ only_with_files = false
+ })
+ .collect();
+ let expected = Some(format!("via {} ", Color::Blue.bold().paint("🐳 starship")));
+
+ assert_eq!(expected, actual);
+
+ cfg_dir.close()
+ }
+
+ #[test]
+ fn test_docker_host_overrides_docker_context_env_and_conf() -> io::Result<()> {
+ let cfg_dir = tempfile::tempdir()?;
+
+ let cfg_file = cfg_dir.path().join("config.json");
+
+ let config_content = serde_json::json!({
+ "currentContext": "starship"
+ });
+
+ let mut docker_config = File::create(&cfg_file)?;
+ docker_config.write_all(config_content.to_string().as_bytes())?;
+ docker_config.sync_all()?;
+
+ let actual = ModuleRenderer::new("docker_context")
+ .env("DOCKER_HOST", "udp://starship@127.0.0.1:53")
+ .env("DOCKER_CONTEXT", "starship")
+ .env("DOCKER_CONFIG", cfg_dir.path().to_string_lossy())
+ .config(toml::toml! {
+ [docker_context]
+ only_with_files = false
+ })
+ .collect();
+ let expected = Some(format!(
+ "via {} ",
+ Color::Blue.bold().paint("🐳 udp://starship@127.0.0.1:53")
+ ));
+
+ assert_eq!(expected, actual);
+
+ cfg_dir.close()
+ }
}
| DOCKER_HOST should also be used for docker_context
<!--
─────────────────────────────────────────────
⚠️ IMPORTANT: Please run the following command to create an issue:
starship bug-report
An issue will be pre-populated with your system's configuration,
making the process a whole lot quicker 😊
─────────────────────────────────────────────
-->
## Bug Report
#### Current Behavior
Per https://docs.docker.com/engine/context/working-with-contexts/#the-anatomy-of-a-context the Docker Context can also be overriden by the `DOCKER_HOST` environment variable.
#### Expected Behavior
`docker_context` should appear when `DOCKER_HOST` is set to a non-empty value.
#### Additional context/Screenshots
<!-- Add any other context about the problem here. If applicable, add screenshots to help explain. -->
The default is actually `Current DOCKER_HOST based configuration`
```none
❯ docker context ls
NAME TYPE DESCRIPTION DOCKER ENDPOINT KUBERNETES ENDPOINT ORCHESTRATOR
default * moby Current DOCKER_HOST based configuration ssh://trajano@54.224.104.16 swarm
```
#### Possible Solution
on Windows if `DOCKER_HOST != ''` then show the context but do not use the name instead show the endpoint value.
<!--- Only if you have suggestions on a fix for the bug -->
For now this `custom.docker_context` I use
```toml
[custom.docker_context]
description = "Shows the docker symbol if the current directory has Dockerfile or docker-compose.yml files"
when="[ $DOCKER_HOST ]"
command = "echo -n $DOCKER_HOST"
shell="sh"
symbol = "🐳 "
style = "blue bold"
format= "via [$symbol$output]($style)"
```
#### Environment
- Starship version: 0.47.0
- Shell type: bash
- Shell version: [the output of `fish --version` or `zsh --version`]
- Shell plugin manager: [e.g. oh-my-fish, oh-my-zsh, fisher, antigen]
- Terminal emulator: Windows Terminal
- Operating system: Windows 10
#### Relevant Shell Configuration
<!--
Based on the shell you use, please paste the appropriate configuration.
Fish users: ~/.config/fish/config.fish
Zsh users: ~/.zshrc
-->
```bash
export DOCKER_HOST=ssh://trajano@54.224.104.16
```
#### Starship Configuration
<!-- Can be found in ~/.config/starship.toml -->
```bash
[docker_context]
disabled=false
only_with_files=false
```
| 2021-06-04T23:13:44 | 0.55 | 2d92e70f516e9ebab4ff20c4d47f0e6098731629 | [
"modules::deno::tests::folder_with_mod_ts",
"modules::helm::tests::folder_with_helm_file"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_ordered",
"config::tests::table_get_styles_plain_and_broken_s... | [] | [
"modules::crystal::tests::folder_with_cr_file",
"modules::cmake::tests::buildfolder_with_cmake_cache",
"modules::cmake::tests::folder_with_cmake_lists",
"modules::dart::tests::folder_with_pubspec_yaml_file",
"modules::dart::tests::folder_with_pubspec_lock_file",
"modules::dart::tests::folder_with_dart_fil... | |
starship/starship | 2,702 | starship__starship-2702 | [
"1155"
] | c4f977c48d29790f27d332c32d97eff40fd258f1 | diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -256,7 +256,7 @@ impl RepoStatus {
fn is_modified(status: &str) -> bool {
// is_wt_modified
- status.starts_with("1 .M")
+ status.starts_with("1 .M") || status.starts_with("1 .A")
}
fn is_staged(status: &str) -> bool {
| diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -633,6 +633,21 @@ mod tests {
repo_dir.close()
}
+ #[test]
+ fn shows_added() -> io::Result<()> {
+ let repo_dir = fixture_repo(FixtureProvider::Git)?;
+
+ create_added(&repo_dir.path())?;
+
+ let actual = ModuleRenderer::new("git_status")
+ .path(&repo_dir.path())
+ .collect();
+ let expected = format_output("!");
+
+ assert_eq!(expected, actual);
+ repo_dir.close()
+ }
+
#[test]
fn shows_staged_file() -> io::Result<()> {
let repo_dir = fixture_repo(FixtureProvider::Git)?;
diff --git a/src/modules/git_status.rs b/src/modules/git_status.rs
--- a/src/modules/git_status.rs
+++ b/src/modules/git_status.rs
@@ -870,6 +885,18 @@ mod tests {
Ok(())
}
+ fn create_added(repo_dir: &Path) -> io::Result<()> {
+ File::create(repo_dir.join("license"))?.sync_all()?;
+
+ Command::new("git")
+ .args(&["add", "-A", "-N"])
+ .current_dir(repo_dir)
+ .output()?;
+ barrier();
+
+ Ok(())
+ }
+
fn create_modified(repo_dir: &Path) -> io::Result<()> {
File::create(repo_dir.join("readme.md"))?.sync_all()?;
| `git add -N` / `git add --intent-to-add` shows files as staged while they are not
#### Current Behavior
<!-- A clear and concise description of the behavior. -->
When adding an untracked file using `git add -N -- $file` or `git add --intent-to-add -- $file`, then starship shows that there are staged files. However, these files aren't staged; there is nothing to commit.
#### Expected Behavior
<!-- A clear and concise description of what you expected to happen. -->
Starship should count these files as containing unstaged changes.
#### Additional context/Screenshots
<!-- Add any other context about the problem here. If applicable, add screenshots to help explain. -->
An example:
In an empty dir, input:
```bash
git init
echo a >> b
git add b
git commit -m first
touch c
git status
```
Output
```
On branch master
Untracked files:
(use "git add <file>..." to include in what will be committed)
c
nothing added to commit but untracked files present (use "git add" to track)
```
Then input:
```bash
git add -N c
git status
```
Output:
```
On branch master
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
(use "git restore <file>..." to discard changes in working directory)
new file: c
no changes added to commit (use "git add" and/or "git commit -a")
```
Then input:
```bash
git commit -m second
```
Then output:
```
On branch master
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
(use "git restore <file>..." to discard changes in working directory)
new file: c
no changes added to commit (use "git add" and/or "git commit -a")
```
As can be seen, there is a new file that is not staged. I expect starship to count it as tracked and containing changes, but not as staged.
#### Environment
- Starship version: 0.41.0
- bash version: GNU bash, version 5.0.17(1)-release (x86_64-apple-darwin18.7.0)
- Operating system: Mac OS 10.14.6
- Terminal emulator: Apple_Terminal 421.2
#### Relevant Shell Configuration
```bash
<unknown config>
```
#### Starship Configuration
```toml
[battery]
disabled = true
[cmd_duration]
disabled = true
[git_branch]
symbol = " "
style = "bold 202" # ANSI color: https://i.stack.imgur.com/KTSQa.png
[git_state]
rebase = "悔"
merge = ""
revert = ""
cherry_pick = ""
bisect = "什"
[git_status]
conflicted = " !"
conflicted_count.enabled = true
ahead.value = " "
ahead.style = "bold green"
behind = " "
diverged.value = " 李"
diverged.style = "bold yellow"
untracked.value = " +"
untracked_count.enabled = true
stashed.value = " $"
stashed.style = "bold blue"
modified.value = " ~"
modified_count.enabled = true
staged.value = " ﰛ"
staged.style = "bold green"
staged_count.enabled = true
staged_count.style = "bold green"
renamed = " "
renamed_count.enabled = true
deleted = " -"
deleted_count.enabled = true
prefix = "["
suffix = " ]"
[java]
disabled = true
```
| Here is where it get wrong.
https://github.com/starship/starship/blob/02edad0c66474758d145ac17e36ba28836d1eced/src/modules/git_status.rs#L266
It should be easy to fix by reading the API of `libgit2`.
Currently there is a refactor that replaces this module with a faster alternative (see matchai/starship-poc, which should fix this issue.
We have just released v0.52.1 which should fix this so going to close this. Please feel free to reopen it if this is not the case.
I see that the issue has been addressed, but there is a new issue with the behaviour now. If you do the following on a clean state git repo:
```
touch file
```
Note that starship sees one untracked file. 👍
Then do:
```
git add -N
```
Note that starship no longer sees any changes, while `git status` notes that there is no clean state:
```
On branch master
Your branch is up to date with 'origin/master'.
Changes not staged for commit:
(use "git add <file>..." to update what will be committed)
(use "git restore <file>..." to discard changes in working directory)
new file: file
no changes added to commit (use "git add" and/or "git commit -a")
```
Please reopen this issue, or create a new issue. I can do that too if you want.
Thanks for the update @erikhuizinga. I think this should just be a case of updating the how we parse the git output. Can you run the following and post the results.
```
env STARSHIP_LOG=trace starship module git_status
```
Here you go:
```
[DEBUG] - (starship::config): STARSHIP_CONFIG is not set
[DEBUG] - (starship::config): Using default config path: ~/.config/starship.toml
[TRACE] - (starship::config): Config file content: "
add_newline = false
[battery]
disabled = true
[cmd_duration]
disabled = true
[git_branch]
symbol = " "
style = "bold 202" # ANSI color: https://i.stack.imgur.com/KTSQa.png
[git_state]
rebase = "悔"
merge = ""
revert = ""
cherry_pick = ""
bisect = "什"
[git_status]
style = 'bold white'
conflicted = "[ !$count](bold yellow)"
ahead = '[ ](bold green)'
behind = '[ ](bold red)'
diverged = '[ 李](bold yellow)'
untracked = ' ?$count'
stashed = '[ \$](bold blue)'
modified = ' ~$count'
staged = '[ +$count](bold green)'
renamed = ' $count'
deleted = '[ -$count](bold red)'
format = '([\[$all_status$ahead_behind \]]($style) )'
[git_commit]
tag_disabled = false
[java]
disabled = true
[kotlin]
disabled = true
[package]
disabled = true
"
[DEBUG] - (starship::config): Config parsed: Table({"add_newline": Boolean(false), "battery": Table({"disabled": Boolean(true)}), "cmd_duration": Table({"disabled": Boolean(true)}), "git_branch": Table({"symbol": String("\u{f418} "), "style": String("bold 202")}), "git_state": Table({"rebase": String("悔"), "merge": String("\u{f407}"), "revert": String("\u{f7d9}"), "cherry_pick": String("\u{e29b}"), "bisect": String("什")}), "git_status": Table({"style": String("bold white"), "conflicted": String("[ !$count](bold yellow)"), "ahead": String("[ \u{f55c}](bold green)"), "behind": String("[ \u{f544}](bold red)"), "diverged": String("[ 李](bold yellow)"), "untracked": String(" ?$count"), "stashed": String("[ \\$](bold blue)"), "modified": String(" ~$count"), "staged": String("[ +$count](bold green)"), "renamed": String(" \u{f553}$count"), "deleted": String("[ -$count](bold red)"), "format": String("([\\[$all_status$ahead_behind \\]]($style) )")}), "git_commit": Table({"tag_disabled": Boolean(false)}), "java": Table({"disabled": Boolean(true)}), "kotlin": Table({"disabled": Boolean(true)}), "package": Table({"disabled": Boolean(true)})})
[DEBUG] - (starship::config): Config found for "git_status": Some(Table({"style": String("bold white"), "conflicted": String("[ !$count](bold yellow)"), "ahead": String("[ \u{f55c}](bold green)"), "behind": String("[ \u{f544}](bold red)"), "diverged": String("[ 李](bold yellow)"), "untracked": String(" ?$count"), "stashed": String("[ \\$](bold blue)"), "modified": String(" ~$count"), "staged": String("[ +$count](bold green)"), "renamed": String(" \u{f553}$count"), "deleted": String("[ -$count](bold red)"), "format": String("([\\[$all_status$ahead_behind \\]]($style) )")}))
[DEBUG] - (starship::modules::git_status): New repo status created
[TRACE] - (starship::utils): Executing command "git" with args ["-C", "~/Desktop/tmp/", "--no-optional-locks", "status", "--porcelain=2", "--branch"]
[TRACE] - (starship::utils): Executing command "git" with args ["-C", "~/Desktop/tmp/", "--no-optional-locks", "stash", "list"]
[TRACE] - (starship::utils): Using "/usr/local/bin/git" as "git"
[TRACE] - (starship::utils): Using "/usr/local/bin/git" as "git"
[TRACE] - (starship::utils): stdout: "", stderr: "", exit code: "Some(0)", took 17.691478ms
[TRACE] - (starship::utils): stdout: "# branch.oid 96191a09a2b9cd25c6b2a1f5880ca23ea2a9e9b3\n# branch.head master\n1 .A N... 000000 000000 100644 0000000000000000000000000000000000000000 0000000000000000000000000000000000000000 file\n", stderr: "", exit code: "Some(0)", took 20.64079ms
[TRACE] - (starship::modules): Took 38.568812ms to compute module "git_status"
[DEBUG] - (starship::config): Config found for "git_status": Some(Table({"style": String("bold white"), "conflicted": String("[ !$count](bold yellow)"), "ahead": String("[ \u{f55c}](bold green)"), "behind": String("[ \u{f544}](bold red)"), "diverged": String("[ 李](bold yellow)"), "untracked": String(" ?$count"), "stashed": String("[ \\$](bold blue)"), "modified": String(" ~$count"), "staged": String("[ +$count](bold green)"), "renamed": String(" \u{f553}$count"), "deleted": String("[ -$count](bold red)"), "format": String("([\\[$all_status$ahead_behind \\]]($style) )")}))
```
(I replaced my home dir path with `~`, in case you're wondering)
That was after
```bash
cd ~/Desktop/tmp
git init
touch a
git add a
git commit -m a
touch file
git add -A -N
```
Thank you @erikhuizinga, I think this should just be a case of updating the way parse the git status output. From having a quick look I think all we need is to update the [is_modified method](https://github.com/starship/starship/blob/603da248527b39de6ecc6ca945a32937439352d7/src/modules/git_status.rs#L257) to accept lines that start with `1 .A` (and add a test 😀). I am away from my laptop at the moment so if someone else wants to submit the PR feel free if not I will sort this when I get back.
> and add a test 😀
➕➕
I'd be happy to take a crack at this as my first issue! | 2021-05-08T03:54:19 | 0.53 | c4f977c48d29790f27d332c32d97eff40fd258f1 | [
"modules::elm::tests::folder_with_elm_file",
"modules::elm::tests::folder_with_elm_json",
"modules::dotnet::tests::shows_single_tfm",
"modules::elixir::tests::test_with_mix_file",
"modules::elm::tests::folder_with_elm_version",
"modules::erlang::tests::test_with_config",
"modules::elm::tests::folder_wit... | [
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_plain_and_broken_styles",
"config::tests::table_get_styles_or... | [] | [
"modules::golang::tests::folder_with_go_version",
"modules::helm::tests::folder_with_chart_file",
"modules::helm::tests::folder_with_helm_file",
"modules::golang::tests::folder_with_gopkg_lock",
"modules::golang::tests::folder_with_gopkg_yml",
"modules::nodejs::tests::engines_node_version_not_match",
"m... |
starship/starship | 6,314 | starship__starship-6314 | [
"6170"
] | cbc22a316db52f253719e258a3cd3c8fa4e1495b | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1267,7 +1267,7 @@ disabled = false
The `docker_context` module shows the currently active
[Docker context](https://docs.docker.com/engine/context/working-with-contexts/)
-if it's not set to `default` or if the `DOCKER_MACHINE_NAME`, `DOCKER_HOST` or
+if it's not set to `default` or `desktop-linux`, or if the `DOCKER_MACHINE_NAME`, `DOCKER_HOST` or
`DOCKER_CONTEXT` environment variables are set (as they are meant to override
the context in use).
diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -14,7 +14,7 @@ use crate::utils;
/// - Or there is a file named `$HOME/.docker/config.json`
/// - Or a file named `$DOCKER_CONFIG/config.json`
/// - The file is JSON and contains a field named `currentContext`
-/// - The value of `currentContext` is not `default`
+/// - The value of `currentContext` is not `default` or `desktop-linux`
/// - If multiple criteria are met, we use the following order to define the docker context:
/// - `DOCKER_HOST`, `DOCKER_CONTEXT`, $HOME/.docker/config.json, $`DOCKER_CONFIG/config.json`
/// - (This is the same order docker follows, as `DOCKER_HOST` and `DOCKER_CONTEXT` override the
diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -57,7 +57,8 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
}
};
- if ctx == "default" || ctx.starts_with("unix://") {
+ let default_contexts = ["default", "desktop-linux"];
+ if default_contexts.contains(&ctx.as_str()) || ctx.starts_with("unix://") {
return None;
}
| diff --git a/src/modules/docker_context.rs b/src/modules/docker_context.rs
--- a/src/modules/docker_context.rs
+++ b/src/modules/docker_context.rs
@@ -347,6 +348,24 @@ mod tests {
cfg_dir.close()
}
+ #[test]
+ fn test_docker_context_default_after_3_5() -> io::Result<()> {
+ let cfg_dir = tempfile::tempdir()?;
+
+ let actual = ModuleRenderer::new("docker_context")
+ .env("DOCKER_CONTEXT", "desktop-linux")
+ .config(toml::toml! {
+ [docker_context]
+ only_with_files = false
+ })
+ .collect();
+ let expected = None;
+
+ assert_eq!(expected, actual);
+
+ cfg_dir.close()
+ }
+
#[test]
fn test_docker_context_overrides_config() -> io::Result<()> {
let cfg_dir = tempfile::tempdir()?;
| Docker context is shown even though is is default
#### Current Behavior
Docker Desktop uses "desktop-linux" instead of "default" as the default context since version 3.5.
#### Expected Behavior
If the context is "desktop-linux" it should not be shown in my opinion.
#### Environment
- Starship version: 1.20.1
- zsh version: zsh 5.9 (x86_64-apple-darwin23.0)
- Operating system: Mac OS 14.5.0
- Terminal emulator: WarpTerminal v0.2024.07.30.08.02.stable_01
- Git Commit Hash:
- Branch/Tag:
- Rust Version: rustc 1.79.0 (129f3b996 2024-06-10) (Homebrew)
- Rust channel: release
- Build Time: 2024-07-27 09:07:23 +00:00
#### Relevant Shell Configuration
```bash
eval "$(starship init zsh)"
```
#### Starship Configuration
```toml
```
| 2024-10-12T07:26:51 | 1.20 | cbc22a316db52f253719e258a3cd3c8fa4e1495b | [
"modules::docker_context::tests::test_docker_context_default_after_3_5"
] | [
"config::tests::read_config_no_config_file_path_provided",
"bug_report::tests::test_get_config_path",
"config::tests::table_get_colors_palette",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_inverted_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_blink... | [
"modules::username::tests::show_always_false"
] | [] | |
starship/starship | 6,310 | starship__starship-6310 | [
"6309"
] | cbc22a316db52f253719e258a3cd3c8fa4e1495b | diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -311,6 +311,7 @@
"detect_files": [
"deno.json",
"deno.jsonc",
+ "deno.lock",
"mod.ts",
"deps.ts",
"mod.js",
diff --git a/.github/config-schema.json b/.github/config-schema.json
--- a/.github/config-schema.json
+++ b/.github/config-schema.json
@@ -2739,6 +2740,7 @@
"default": [
"deno.json",
"deno.jsonc",
+ "deno.lock",
"mod.ts",
"deps.ts",
"mod.js",
diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -1097,20 +1097,20 @@ format = 'via [🔰 $version](bold red) '
The `deno` module shows you your currently installed version of [Deno](https://deno.land/).
By default the module will be shown if any of the following conditions are met:
-- The current directory contains a `deno.json`, `deno.jsonc`, `mod.ts`, `mod.js`, `deps.ts` or `deps.js` file
+- The current directory contains a `deno.json`, `deno.jsonc`, `deno.lock`, `mod.ts`, `mod.js`, `deps.ts` or `deps.js` file
### Options
-| Option | Default | Description |
-| ------------------- | ----------------------------------------------------------------------- | ------------------------------------------------------------------------- |
-| `format` | `'via [$symbol($version )]($style)'` | The format for the module. |
-| `version_format` | `'v${raw}'` | The version format. Available vars are `raw`, `major`, `minor`, & `patch` |
-| `symbol` | `'🦕 '` | A format string representing the symbol of Deno |
-| `detect_extensions` | `[]` | Which extensions should trigger this module. |
-| `detect_files` | `['deno.json', 'deno.jsonc', 'mod.ts', 'mod.js', 'deps.ts', 'deps.js']` | Which filenames should trigger this module. |
-| `detect_folders` | `[]` | Which folders should trigger this module. |
-| `style` | `'green bold'` | The style for the module. |
-| `disabled` | `false` | Disables the `deno` module. |
+| Option | Default | Description |
+| ------------------- | ------------------------------------------------------------------------------------ | ------------------------------------------------------------------------- |
+| `format` | `'via [$symbol($version )]($style)'` | The format for the module. |
+| `version_format` | `'v${raw}'` | The version format. Available vars are `raw`, `major`, `minor`, & `patch` |
+| `symbol` | `'🦕 '` | A format string representing the symbol of Deno |
+| `detect_extensions` | `[]` | Which extensions should trigger this module. |
+| `detect_files` | `['deno.json', 'deno.jsonc', 'deno.lock', 'mod.ts', 'mod.js', 'deps.ts', 'deps.js']` | Which filenames should trigger this module. |
+| `detect_folders` | `[]` | Which folders should trigger this module. |
+| `style` | `'green bold'` | The style for the module. |
+| `disabled` | `false` | Disables the `deno` module. |
### Variables
diff --git a/src/configs/deno.rs b/src/configs/deno.rs
--- a/src/configs/deno.rs
+++ b/src/configs/deno.rs
@@ -30,6 +30,7 @@ impl<'a> Default for DenoConfig<'a> {
detect_files: vec![
"deno.json",
"deno.jsonc",
+ "deno.lock",
"mod.ts",
"deps.ts",
"mod.js",
| diff --git a/src/modules/deno.rs b/src/modules/deno.rs
--- a/src/modules/deno.rs
+++ b/src/modules/deno.rs
@@ -103,6 +103,16 @@ mod tests {
dir.close()
}
+ #[test]
+ fn folder_with_deno_lock() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("deno.lock"))?.sync_all()?;
+ let actual = ModuleRenderer::new("deno").path(dir.path()).collect();
+ let expected = Some(format!("via {}", Color::Green.bold().paint("🦕 v1.8.3 ")));
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
#[test]
fn folder_with_mod_ts() -> io::Result<()> {
let dir = tempfile::tempdir()?;
| Add `deno.lock` to default detect_files list for `deno`
I noticed when running `deno install` on a node project with only a `package.json` file that Starship does not pick up on `deno` usage. Because Deno 2 supports interoperability with `package.json` files, I think it would be useful if the default list of `detect_files` included the `deno.lock` file. Thanks!
| 2024-10-10T22:39:27 | 1.20 | cbc22a316db52f253719e258a3cd3c8fa4e1495b | [
"modules::deno::tests::folder_with_deno_lock"
] | [
"bug_report::tests::test_get_config_path",
"config::tests::read_config_no_config_file_path_provided",
"config::tests::table_get_colors_palette",
"config::tests::table_get_palette",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_blink_silly_caps",
"config::tests::table_get_styles_bold_... | [
"modules::username::tests::show_always_false"
] | [] | |
starship/starship | 5,081 | starship__starship-5081 | [
"5066"
] | de7e94884bc309814f6af79d68d664efb513e093 | diff --git a/docs/config/README.md b/docs/config/README.md
--- a/docs/config/README.md
+++ b/docs/config/README.md
@@ -2829,11 +2829,12 @@ By default the module will be shown if any of the following conditions are met:
### Variables
-| Variable | Example | Description |
-| -------- | ---------- | ------------------------------------ |
-| version | `v13.12.0` | The version of `node` |
-| symbol | | Mirrors the value of option `symbol` |
-| style\* | | Mirrors the value of option `style` |
+| Variable | Example | Description |
+| --------------- | ---------- | --------------------------------------------------------------------------------------------------------------------------------------------------------- |
+| version | `v13.12.0` | The version of `node` |
+| engines_version | `>=12.0.0` | `node` version requirement as set in the engines property of `package.json`. Will only show if the version requirement does not match the `node` version. |
+| symbol | | Mirrors the value of option `symbol` |
+| style\* | | Mirrors the value of option `style` |
*: This variable can only be used as a part of a style string
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -35,6 +35,8 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
.exec_cmd("node", &["--version"])
.map(|cmd| cmd.stdout)
});
+ let engines_version = Lazy::new(|| get_engines_version(context));
+
let parsed = StringFormatter::new(config.format).and_then(|formatter| {
formatter
.map_meta(|var, _| match var {
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -43,9 +45,11 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
})
.map_style(|variable| match variable {
"style" => {
- let engines_version = get_engines_version(context);
- let in_engines_range =
- check_engines_version(nodejs_version.as_deref(), engines_version);
+ let in_engines_range = check_engines_version(
+ nodejs_version.as_deref(),
+ engines_version.as_deref(),
+ );
+
if in_engines_range {
Some(Ok(config.style))
} else {
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -56,7 +60,7 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
})
.map(|variable| match variable {
"version" => {
- let version = nodejs_version
+ let node_ver = nodejs_version
.deref()
.as_ref()?
.trim_start_matches('v')
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -64,11 +68,20 @@ pub fn module<'a>(context: &'a Context) -> Option<Module<'a>> {
VersionFormatter::format_module_version(
module.get_name(),
- version,
+ node_ver,
config.version_format,
)
.map(Ok)
}
+ "engines_version" => {
+ let in_engines_range = check_engines_version(
+ nodejs_version.as_deref(),
+ engines_version.as_deref(),
+ );
+ let eng_ver = engines_version.as_deref()?.to_string();
+
+ (!in_engines_range).then_some(Ok(eng_ver))
+ }
_ => None,
})
.parse(None, Some(context))
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -89,16 +102,19 @@ fn get_engines_version(context: &Context) -> Option<String> {
let json_str = context.read_file_from_pwd("package.json")?;
let package_json: json::Value = json::from_str(&json_str).ok()?;
let raw_version = package_json.get("engines")?.get("node")?.as_str()?;
+
Some(raw_version.to_string())
}
-fn check_engines_version(nodejs_version: Option<&str>, engines_version: Option<String>) -> bool {
+fn check_engines_version(nodejs_version: Option<&str>, engines_version: Option<&str>) -> bool {
let (Some(nodejs_version), Some(engines_version)) = (nodejs_version, engines_version) else {
return true;
};
- let Ok(r) = VersionReq::parse(&engines_version) else {
+
+ let Ok(r) = VersionReq::parse(engines_version) else {
return true;
};
+
let re = Regex::new(r"\d+\.\d+\.\d+").unwrap();
let version = re
.captures(nodejs_version)
diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -106,6 +122,7 @@ fn check_engines_version(nodejs_version: Option<&str>, engines_version: Option<S
.get(0)
.unwrap()
.as_str();
+
let v = match Version::parse(version) {
Ok(v) => v,
Err(_e) => return true,
| diff --git a/src/modules/nodejs.rs b/src/modules/nodejs.rs
--- a/src/modules/nodejs.rs
+++ b/src/modules/nodejs.rs
@@ -269,6 +286,80 @@ mod tests {
assert_eq!(expected, actual);
dir.close()
}
+
+ #[test]
+ fn show_expected_version_when_engines_does_not_match() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ let mut file = File::create(dir.path().join("package.json"))?;
+ file.write_all(
+ b"{
+ \"engines\":{
+ \"node\":\"<=11.0.0\"
+ }
+ }",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("nodejs")
+ .path(dir.path())
+ .config(toml::toml! {
+ [nodejs]
+ format = "via [$symbol($version )($engines_version )]($style)"
+ })
+ .collect();
+ let expected = Some(format!(
+ "via {}",
+ Color::Red.bold().paint(" v12.0.0 <=11.0.0 ")
+ ));
+
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn do_not_show_expected_version_if_engines_match() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ let mut file = File::create(dir.path().join("package.json"))?;
+ file.write_all(
+ b"{
+ \"engines\":{
+ \"node\":\">=12.0.0\"
+ }
+ }",
+ )?;
+ file.sync_all()?;
+
+ let actual = ModuleRenderer::new("nodejs")
+ .path(dir.path())
+ .config(toml::toml! [
+ [nodejs]
+ format = "via [$symbol($version )($engines_version )]($style)"
+ ])
+ .collect();
+ let expected = Some(format!("via {}", Color::Green.bold().paint(" v12.0.0 ")));
+
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
+ #[test]
+ fn do_not_show_expected_version_if_no_set_engines_version() -> io::Result<()> {
+ let dir = tempfile::tempdir()?;
+ File::create(dir.path().join("package.json"))?.sync_all()?;
+
+ let actual = ModuleRenderer::new("nodejs")
+ .path(dir.path())
+ .config(toml::toml! {
+ [nodejs]
+ format = "via [$symbol($version )($engines_version )]($style)"
+ })
+ .collect();
+ let expected = Some(format!("via {}", Color::Green.bold().paint(" v12.0.0 ")));
+
+ assert_eq!(expected, actual);
+ dir.close()
+ }
+
#[test]
fn no_node_installed() -> io::Result<()> {
let dir = tempfile::tempdir()?;
| Add variable for the expected Node.js version
## Feature Request
For Node.js, Starship provides the [variable](https://starship.rs/config/#node-js) `version` and the [options](https://starship.rs/config/#node-js) `style` and `not_capable_style`. `version` is the version of the installed Node.js and it is displayed using the format specified in `style` when it matches the version specified in `package.json` (property `$.engines.node`) or using the format specified in `not_capable_style` when it does not match.
This is helpful in itself but it could be more helpful than than. When the version of the interpreter does not match the project requirements, the first execution of any `yarn` command in the directory tells not only that the current version of Node.js is not appropriate for the project but it also tells what version is expected.
Starship could bring the same information without even running `yarn`.
It already knows what is written in `package.json` and has the information in memory. This is how it detects if the current version of Node.js is good or not and decides to render it using `style` or `not_capable_style`. All it needs is to provide a variable similar to `version` that contains the Node.js version required by the project (the value of property `$.engines.node` from `package.json`).
Please add a new variable (it could be named `expected_version`) that contains the value of property `$.engines.node` from `package.json`).
---
A similar solution can be implemented for other languages that declare the version of the interpreter that they need to run (PHP, Python, Ruby etc.)
| It would be useful to show only `version` if the Node.js version matches the version expected by the project and to show both `version` and `expected_version` if the version of the interpreter does not match the requirements of the project.
Is this possible?
Sounded straightforward so I tried this~ Currently have a working implementation, shows expected version if `engines.node` and current node version don't match, and if the variable is included in the module format. Not including the `$expected_version` variable in format doesn't change the normal rendering.
Was thinking about adding an option to allow people to enable/disable it but if you don't put the variable in format then it won't show regardless? Just have to clean some stuff up. | 2023-04-10T00:19:08 | 1.13 | de7e94884bc309814f6af79d68d664efb513e093 | [
"modules::nodejs::tests::show_expected_version_when_engines_does_not_match"
] | [
"config::tests::table_get_palette",
"bug_report::tests::test_get_config_path",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_silly_caps",
"config::tests::table_get_styles_bold_italic_underline_green_dimmed_blink_silly_caps",
"config::tests::table_get_colors_palette",
"config::tests::... | [] | [] |
static-web-server/static-web-server | 166 | static-web-server__static-web-server-166 | [
"165"
] | 8017446d9d1099217b7d1ed451e57b2912d0cc1e | diff --git a/src/compression_static.rs b/src/compression_static.rs
--- a/src/compression_static.rs
+++ b/src/compression_static.rs
@@ -31,20 +31,22 @@ pub async fn precompressed_variant(
// Try to find the pre-compressed metadata variant for the given file path
if let Some(ext) = precomp_ext {
let mut filepath_precomp = file_path;
- let filename = filepath_precomp.file_name().unwrap().to_str().unwrap();
- let precomp_file_name = [filename, ".", ext].concat();
- filepath_precomp.set_file_name(precomp_file_name);
-
- tracing::trace!(
- "getting metadata for pre-compressed file variant {}",
- filepath_precomp.display()
- );
-
- if let Ok((meta, _)) = file_metadata(&filepath_precomp) {
- tracing::trace!("pre-compressed file variant found, serving it directly");
-
- let encoding = if ext == "gz" { "gzip" } else { ext };
- precompressed = Some((filepath_precomp, meta, encoding));
+ if let Some(filename) = filepath_precomp.file_name() {
+ let filename = filename.to_str().unwrap();
+ let precomp_file_name = [filename, ".", ext].concat();
+ filepath_precomp.set_file_name(precomp_file_name);
+
+ tracing::trace!(
+ "getting metadata for pre-compressed file variant {}",
+ filepath_precomp.display()
+ );
+
+ if let Ok((meta, _)) = file_metadata(&filepath_precomp) {
+ tracing::trace!("pre-compressed file variant found, serving it directly");
+
+ let encoding = if ext == "gz" { "gzip" } else { ext };
+ precompressed = Some((filepath_precomp, meta, encoding));
+ }
}
// Note: In error case like "no such file or dir" the workflow just continues
| diff --git a/tests/compression_static.rs b/tests/compression_static.rs
--- a/tests/compression_static.rs
+++ b/tests/compression_static.rs
@@ -126,4 +126,31 @@ mod tests {
"body and index_gz_buf are not equal in length"
);
}
+
+ #[tokio::test]
+ async fn compression_static_base_path_as_dot() {
+ let mut headers = HeaderMap::new();
+ headers.insert(
+ http::header::ACCEPT_ENCODING,
+ "gzip, deflate, br".parse().unwrap(),
+ );
+
+ let base_path = PathBuf::from(".");
+
+ let (_resp, _) = static_files::handle(&HandleOpts {
+ method: &Method::GET,
+ headers: &headers,
+ base_path: &base_path,
+ uri_path: "/",
+ uri_query: None,
+ dir_listing: true,
+ dir_listing_order: 6,
+ dir_listing_format: &DirListFmt::Html,
+ redirect_trailing_slash: true,
+ compression_static: true,
+ ignore_hidden_files: false,
+ })
+ .await
+ .expect("unexpected error response on `handle` function");
+ }
}
| Server crashes with compression-static enabled and root as "."
**Describe the bug**
The server crashes when it receives a request for "/" with static compression enabled and root as ".".
This only happens if the client supports compression
**To Reproduce**
run `static-web-server --root . --compression-static true` and access http://localhost from a client that supports compression
**Expected behavior**
Server should not crash
**Logs/Screenshots**
```
2022-12-02T02:38:26.080893Z INFO static_web_server::handler: incoming request: method=GET uri=/
2022-12-02T02:38:26.080900Z TRACE static_web_server::static_files: dir: base=".", route=""
2022-12-02T02:38:26.080912Z TRACE static_web_server::compression_static: preparing pre-compressed file path variant of .
thread 'static-web-server' panicked at 'called `Option::unwrap()` on a `None` value', src/compression_static.rs:34:53
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
[1] 368327 IOT instruction static-web-server -p 8080 --root . --compression-static true -g trace
```
**Environment and Specs**
- static-web-server: 2.14.0
- OS: Debian bookworm/sid Linux 6.0.0
- Client: Firefox 107.0
**Additional context**
Add any other context about the problem here if needed.
| 2022-12-02T11:22:49 | 2.14 | 8017446d9d1099217b7d1ed451e57b2912d0cc1e | [
"tests::compression_static_base_path_as_dot"
] | [
"basic_auth::tests::test_invalid_auth_encoding",
"basic_auth::tests::test_invalid_auth_encoding2",
"basic_auth::tests::test_invalid_auth",
"basic_auth::tests::test_invalid_auth_header",
"basic_auth::tests::test_invalid_auth_pairs",
"static_files::tests::test_sanitize_path",
"control_headers::tests::head... | [] | [] | |
static-web-server/static-web-server | 471 | static-web-server__static-web-server-471 | [
"470"
] | dd48972896d859344a84e5d2944d6662ccc22dcb | diff --git a/src/compression.rs b/src/compression.rs
--- a/src/compression.rs
+++ b/src/compression.rs
@@ -98,6 +98,11 @@ pub(crate) fn post_process<T>(
return Ok(resp);
}
+ let is_precompressed = resp.headers().get(CONTENT_ENCODING).is_some();
+ if is_precompressed {
+ return Ok(resp);
+ }
+
// Compression content encoding varies so use a `Vary` header
resp.headers_mut().insert(
hyper::header::VARY,
| diff --git a/tests/compression.rs b/tests/compression.rs
--- a/tests/compression.rs
+++ b/tests/compression.rs
@@ -16,7 +16,7 @@ pub mod tests {
#[tokio::test]
async fn compression_file() {
- let opts = fixture_settings("toml/handler.toml");
+ let opts = fixture_settings("toml/handler_fixtures.toml");
let general = General {
compression: true,
compression_static: true,
diff --git /dev/null b/tests/fixtures/toml/handler_fixtures.toml
new file mode 100644
--- /dev/null
+++ b/tests/fixtures/toml/handler_fixtures.toml
@@ -0,0 +1,9 @@
+[general]
+
+root = "tests/fixtures/public"
+
+[advanced]
+
+[[advanced.headers]]
+source = "**/*.{html,htm}"
+headers = { Server = "Static Web Server" }
| Incorrect `content-encoding` for `gzip` when file is already available + `compression-static`
### Search for duplicate issues
- [X] I already searched, and this issue is not a duplicate.
### Issue scope
Docker / Kubernetes
### Describe the bug
If I run `SWS` with my "frontend" package, with both regular and pre-compressed files (`.gzip`), I see an unexpected `content-encoding` returned by `SWS`: `content-encoding: gzip, gzip`.
After verification, the content is packaged twice, aka a `gzip` inside a `gzip`.
This generates issues with Apple based browsers, because they don't process `gzip` twice and so can't parse `js` or `css` after the first "unpackaging".
### How to reproduce it
Find attached files I have generated, with the unexpected result in `SWS`:
[app-css.zip](https://github.com/user-attachments/files/16589058/app-css.zip)
If I run the `SWS` with `docker run -it --rm -p 8080:8080 ui-for-testing`, using the following configuration:
```toml
[general]
host = "::"
port = 8080
log-level = "info"
cache-control-headers = true
compression = true
compression-static = true
security-headers = true
directory-listing = false
redirect-trailing-slash = true
ignore-hidden-files = true
```
I get the following result when I run the following `curl` call:
```sh
curl 'http://localhost:8080/app.min.css' \
-H 'Accept: */*' \
-H 'Accept-Encoding: gzip' \
-v -o nul &| grep content-encoding
< content-encoding: gzip, gzip
```
If I set `compression-static = false`, the result is "normal", but I suspect the compression is done by `SWS` and doesn't leverage the pre-built `gzip` file.
```
curl 'http://localhost:8080/app.min.css' \
-H 'Accept: */*' \
-H 'Accept-Encoding: gzip' \
-v -o nul &| grep content-encoding
< content-encoding: gzip
```
### Expected behavior
I expect the system to leverage the pre-build `gzip` and returns a `content-type` with only `gzip` once.
### Complementary information
From the investigation I did, this bug has been introduced after `2.31.0`.
If I change my `Dockerfile` to `2.32.0`, I have the issue, but I don't have it with `2.31.0`, of course with the same config and files.
### Build target
Docker linux/amd64
### Environment and specs
- [x] **static-web-server:** [e.g. v2.32.0+]
- [ ] **Rust:** [e.g. v1.78.0]
- [x] **OS:** Distrolesss
- [x] **Arch:** x86_64 (64-bit), ARM (32-bit), ARM64 (64-bit)
- [x] **Docker:** 27.1.1
- [x] **Client:** All (Safari, Chrome, Arc, `curl`)
### Additional context
This issue has been found while developing on my application, [Podcast-Server](https://gitlab.com/davinkevin/Podcast-Server/), and you can find the code used by the UI [here](https://gitlab.com/davinkevin/Podcast-Server/-/tree/main/ui/src/docker?ref_type=heads)
| 2024-08-13T07:20:43 | 2.32 | dd48972896d859344a84e5d2944d6662ccc22dcb | [
"tests::compression_file"
] | [
"basic_auth::tests::test_auth_disabled",
"basic_auth::tests::test_invalid_auth_encoding",
"basic_auth::tests::test_invalid_auth",
"basic_auth::tests::test_options_request",
"basic_auth::tests::test_invalid_auth_configuration",
"basic_auth::tests::test_invalid_auth_header",
"basic_auth::tests::test_inval... | [] | [] | |
static-web-server/static-web-server | 74 | static-web-server__static-web-server-74 | [
"73",
"73"
] | 4791f7cbb1654dc37ae2a47260292a0e588f452f | diff --git a/src/static_files.rs b/src/static_files.rs
--- a/src/static_files.rs
+++ b/src/static_files.rs
@@ -58,30 +58,32 @@ pub async fn handle(
let base = Arc::new(base_path.into());
let (filepath, meta, auto_index) = path_from_tail(base, uri_path).await?;
+ // NOTE: `auto_index` appends an `index.html` to an `uri_path` of kind directory only.
+
+ // Check for a trailing slash on the current directory path
+ // and redirect if that path doesn't end with the slash char
+ if auto_index && !uri_path.ends_with('/') {
+ let uri = [uri_path, "/"].concat();
+ let loc = match HeaderValue::from_str(uri.as_str()) {
+ Ok(val) => val,
+ Err(err) => {
+ tracing::error!("invalid header value from current uri: {:?}", err);
+ return Err(StatusCode::INTERNAL_SERVER_ERROR);
+ }
+ };
+
+ let mut resp = Response::new(Body::empty());
+ resp.headers_mut().insert(hyper::header::LOCATION, loc);
+ *resp.status_mut() = StatusCode::PERMANENT_REDIRECT;
+ tracing::trace!("uri doesn't end with a slash so redirecting permanently");
+ return Ok(resp);
+ }
+
// Directory listing
- // 1. Check if "directory listing" feature is enabled,
+ // 1. Check if "directory listing" feature is enabled
// if current path is a valid directory and
- // if it does not contain an `index.html` file
+ // if it does not contain an `index.html` file (if a proper auto index is generated)
if dir_listing && auto_index && !filepath.as_ref().exists() {
- // Redirect if current path does not end with a slash char
- if !uri_path.ends_with('/') {
- let uri = [uri_path, "/"].concat();
- let loc = match HeaderValue::from_str(uri.as_str()) {
- Ok(val) => val,
- Err(err) => {
- tracing::error!("invalid header value from current uri: {:?}", err);
- return Err(StatusCode::INTERNAL_SERVER_ERROR);
- }
- };
-
- let mut resp = Response::new(Body::empty());
- resp.headers_mut().insert(hyper::header::LOCATION, loc);
- *resp.status_mut() = StatusCode::PERMANENT_REDIRECT;
- tracing::trace!("uri doesn't end with a slash so redirect permanently");
-
- return Ok(resp);
- }
-
return directory_listing(
method,
uri_path,
| diff --git a/tests/static_files.rs b/tests/static_files.rs
--- a/tests/static_files.rs
+++ b/tests/static_files.rs
@@ -115,6 +115,30 @@ mod tests {
}
}
+ #[tokio::test]
+ async fn handle_trailing_slash_redirection() {
+ let mut res = static_files::handle(
+ &Method::GET,
+ &HeaderMap::new(),
+ root_dir(),
+ "assets",
+ None,
+ false,
+ 0,
+ )
+ .await
+ .expect("unexpected error response on `handle` function");
+
+ assert_eq!(res.status(), 308);
+ assert_eq!(res.headers()["location"], "assets/");
+
+ let body = hyper::body::to_bytes(res.body_mut())
+ .await
+ .expect("unexpected bytes error during `body` conversion");
+
+ assert_eq!(body, Bytes::new());
+ }
+
#[tokio::test]
async fn handle_append_index_on_dir() {
let buf = fs::read(root_dir().join("index.html"))
diff --git a/tests/static_files.rs b/tests/static_files.rs
--- a/tests/static_files.rs
+++ b/tests/static_files.rs
@@ -134,9 +158,22 @@ mod tests {
)
.await
{
- Ok(res) => {
- assert_eq!(res.status(), 200);
- assert_eq!(res.headers()["content-length"], buf.len().to_string());
+ Ok(mut res) => {
+ if uri.is_empty() {
+ // it should redirect permanently
+ assert_eq!(res.status(), 308);
+ assert_eq!(res.headers()["location"], "/");
+
+ let body = hyper::body::to_bytes(res.body_mut())
+ .await
+ .expect("unexpected bytes error during `body` conversion");
+
+ assert_eq!(body, Bytes::new());
+ } else {
+ // otherwise it should response with ok
+ assert_eq!(res.status(), 200);
+ assert_eq!(res.headers()["content-length"], buf.len().to_string());
+ }
}
Err(_) => {
panic!("expected a status 200 but not a status error")
| Add trailing slash when missing
**Is your feature request related to a problem? Please describe.**
Current, managing relative links is a pain because navigating to `website.com/myfolder` returns the index page of that folder with no redirect. This means that if `index.html` references `./image.png`, the browser will try to get it from `website.com/image.png` instead of `website.com/myfolder/image.png`.
**Describe the solution you'd like**
When serving a folder, check if there is a trailing slash. If there isn't, do a redirect to a path with the slash at the end.
Add trailing slash when missing
**Is your feature request related to a problem? Please describe.**
Current, managing relative links is a pain because navigating to `website.com/myfolder` returns the index page of that folder with no redirect. This means that if `index.html` references `./image.png`, the browser will try to get it from `website.com/image.png` instead of `website.com/myfolder/image.png`.
**Describe the solution you'd like**
When serving a folder, check if there is a trailing slash. If there isn't, do a redirect to a path with the slash at the end.
| Yeah in fact we do this already for the directory listing so it should require a minor tweak to make it happen.
Yeah in fact we do this already for the directory listing so it should require a minor tweak to make it happen. | 2022-01-02T06:12:21 | 2.3 | 4791f7cbb1654dc37ae2a47260292a0e588f452f | [
"tests::handle_append_index_on_dir",
"tests::handle_trailing_slash_redirection"
] | [
"basic_auth::tests::test_invalid_auth_encoding2",
"basic_auth::tests::test_invalid_auth",
"static_files::tests::test_sanitize_path",
"static_files::tests::test_reserve_at_least",
"basic_auth::tests::test_invalid_auth_encoding",
"basic_auth::tests::test_invalid_auth_header",
"basic_auth::tests::test_inva... | [] | [] |
TeXitoi/structopt | 14 | TeXitoi__structopt-14 | [
"13"
] | 52d2550d4e5bea30cf3a689abef208f654bff5c4 | diff --git a/examples/basic.rs b/examples/basic.rs
--- a/examples/basic.rs
+++ b/examples/basic.rs
@@ -11,23 +11,36 @@ extern crate structopt_derive;
use structopt::StructOpt;
+/// A basic example
#[derive(StructOpt, Debug)]
-#[structopt(name = "basic", about = "A basic example")]
+#[structopt(name = "basic")]
struct Opt {
- #[structopt(short = "d", long = "debug", help = "Activate debug mode")]
+ /// Activate debug mode
+ #[structopt(short = "d", long = "debug")]
debug: bool,
- #[structopt(short = "v", long = "verbose", help = "Verbose mode")]
+
+ /// Verbose mode
+ #[structopt(short = "v", long = "verbose")]
verbose: u64,
- #[structopt(short = "s", long = "speed", help = "Set speed", default_value = "42")]
+
+ /// Set speed
+ #[structopt(short = "s", long = "speed", default_value = "42")]
speed: f64,
- #[structopt(short = "o", long = "output", help = "Output file")]
+
+ /// Output file
+ #[structopt(short = "o", long = "output")]
output: String,
- #[structopt(short = "c", long = "car", help = "Number of car")]
+
+ /// Number of car
+ #[structopt(short = "c", long = "car")]
car: Option<i32>,
+
+ /// admin_level to consider
#[structopt(short = "l", long = "level")]
- #[structopt(help = "admin_level to consider")]
level: Vec<String>,
- #[structopt(name = "FILE", help = "Files to process")]
+
+ /// Files to process
+ #[structopt(name = "FILE")]
files: Vec<String>,
}
diff --git a/structopt-derive/src/lib.rs b/structopt-derive/src/lib.rs
--- a/structopt-derive/src/lib.rs
+++ b/structopt-derive/src/lib.rs
@@ -129,31 +129,64 @@ fn sub_type(t: &syn::Ty) -> Option<&syn::Ty> {
}
}
-fn extract_attrs<'a>(attrs: &'a [Attribute]) -> Box<Iterator<Item = (&'a Ident, &'a Lit)> + 'a> {
- let iter = attrs.iter()
+#[derive(Debug, Clone, Copy)]
+enum AttrSource { Struct, Field, }
+
+fn extract_attrs<'a>(attrs: &'a [Attribute], attr_source: AttrSource) -> Box<Iterator<Item = (Ident, Lit)> + 'a> {
+ let settings_attrs = attrs.iter()
.filter_map(|attr| match attr.value {
MetaItem::List(ref i, ref v) if i.as_ref() == "structopt" => Some(v),
_ => None,
}).flat_map(|v| v.iter().filter_map(|mi| match *mi {
- NestedMetaItem::MetaItem(MetaItem::NameValue(ref i, ref l)) => Some((i, l)),
+ NestedMetaItem::MetaItem(MetaItem::NameValue(ref i, ref l)) =>
+ Some((i.clone(), l.clone())),
_ => None,
}));
- Box::new(iter)
+
+ let doc_comments = attrs.iter()
+ .filter_map(move |attr| {
+ if let Attribute {
+ value: MetaItem::NameValue(ref name, Lit::Str(ref value, StrStyle::Cooked)),
+ is_sugared_doc: true,
+ ..
+ } = *attr {
+ if name != "doc" { return None; }
+ let text = value.trim_left_matches("//!")
+ .trim_left_matches("///")
+ .trim_left_matches("/*!")
+ .trim_left_matches("/**")
+ .trim();
+
+ // Clap's `App` has an `about` method to set a description,
+ // it's `Field`s have a `help` method instead.
+ if let AttrSource::Struct = attr_source {
+ Some(("about".into(), text.into()))
+ } else {
+ Some(("help".into(), text.into()))
+ }
+ } else {
+ None
+ }
+ });
+
+ Box::new(doc_comments.chain(settings_attrs))
}
-fn from_attr_or_env(attrs: &[(&Ident, &Lit)], key: &str, env: &str) -> Lit {
+fn from_attr_or_env(attrs: &[(Ident, Lit)], key: &str, env: &str) -> Lit {
let default = std::env::var(env).unwrap_or("".into());
attrs.iter()
- .find(|&&(i, _)| i.as_ref() == key)
- .map(|&(_, l)| l.clone())
+ .filter(|&&(ref i, _)| i.as_ref() == key)
+ .last()
+ .map(|&(_, ref l)| l.clone())
.unwrap_or_else(|| Lit::Str(default, StrStyle::Cooked))
}
fn gen_name(field: &Field) -> Ident {
- extract_attrs(&field.attrs)
- .find(|&(i, _)| i.as_ref() == "name")
- .and_then(|(_, l)| match *l {
- Lit::Str(ref s, _) => Some(Ident::new(s.clone())),
+ extract_attrs(&field.attrs, AttrSource::Field)
+ .filter(|&(ref i, _)| i.as_ref() == "name")
+ .last()
+ .and_then(|(_, ref l)| match l {
+ &Lit::Str(ref s, _) => Some(Ident::new(s.clone())),
_ => None,
})
.unwrap_or(field.ident.as_ref().unwrap().clone())
diff --git a/structopt-derive/src/lib.rs b/structopt-derive/src/lib.rs
--- a/structopt-derive/src/lib.rs
+++ b/structopt-derive/src/lib.rs
@@ -196,7 +229,7 @@ fn gen_from_clap(struct_name: &Ident, s: &[Field]) -> quote::Tokens {
}
fn gen_clap(ast: &DeriveInput, s: &[Field]) -> quote::Tokens {
- let struct_attrs: Vec<_> = extract_attrs(&ast.attrs).collect();
+ let struct_attrs: Vec<_> = extract_attrs(&ast.attrs, AttrSource::Struct).collect();
let name = from_attr_or_env(&struct_attrs, "name", "CARGO_PKG_NAME");
let version = from_attr_or_env(&struct_attrs, "version", "CARGO_PKG_VERSION");
let author = from_attr_or_env(&struct_attrs, "author", "CARGO_PKG_AUTHORS");
diff --git a/structopt-derive/src/lib.rs b/structopt-derive/src/lib.rs
--- a/structopt-derive/src/lib.rs
+++ b/structopt-derive/src/lib.rs
@@ -220,14 +253,14 @@ fn gen_clap(ast: &DeriveInput, s: &[Field]) -> quote::Tokens {
Ty::Option => quote!( .takes_value(true).multiple(false).#validator ),
Ty::Vec => quote!( .takes_value(true).multiple(true).#validator ),
Ty::Other => {
- let required = extract_attrs(&field.attrs)
- .find(|&(i, _)| i.as_ref() == "default_value")
+ let required = extract_attrs(&field.attrs, AttrSource::Field)
+ .find(|&(ref i, _)| i.as_ref() == "default_value")
.is_none();
quote!( .takes_value(true).multiple(false).required(#required).#validator )
},
};
- let from_attr = extract_attrs(&field.attrs)
- .filter(|&(i, _)| i.as_ref() != "name")
+ let from_attr = extract_attrs(&field.attrs, AttrSource::Field)
+ .filter(|&(ref i, _)| i.as_ref() != "name")
.map(|(i, l)| quote!(.#i(#l)));
quote!( .arg(_structopt::clap::Arg::with_name(stringify!(#name)) #modifier #(#from_attr)*) )
});
| diff --git /dev/null b/tests/doc-comments-help.rs
new file mode 100644
--- /dev/null
+++ b/tests/doc-comments-help.rs
@@ -0,0 +1,51 @@
+// Copyright (c) 2017 structopt Developers
+//
+// This work is free. You can redistribute it and/or modify it under
+// the terms of the Do What The Fuck You Want To Public License,
+// Version 2, as published by Sam Hocevar. See the COPYING file for
+// more details.
+
+extern crate structopt;
+#[macro_use]
+extern crate structopt_derive;
+
+use structopt::StructOpt;
+
+#[test]
+fn commets_intead_of_actual_help() {
+ /// Lorem ipsum
+ #[derive(StructOpt, PartialEq, Debug)]
+ struct LoremIpsum {
+ /// Fooify a bar
+ #[structopt(short = "f", long = "foo")]
+ foo: bool,
+ }
+
+ let mut output = Vec::new();
+ LoremIpsum::clap().write_long_help(&mut output).unwrap();
+ let output = String::from_utf8(output).unwrap();
+
+ assert!(output.contains("Lorem ipsum"));
+ assert!(output.contains("Fooify a bar"));
+}
+
+#[test]
+fn help_is_better_than_comments() {
+ /// Lorem ipsum
+ #[derive(StructOpt, PartialEq, Debug)]
+ #[structopt(name = "lorem-ipsum", about = "Dolor sit amet")]
+ struct LoremIpsum {
+ /// Fooify a bar
+ #[structopt(short = "f", long = "foo", help = "DO NOT PASS A BAR UNDER ANY CIRCUMSTANCES")]
+ foo: bool,
+ }
+
+ let mut output = Vec::new();
+ LoremIpsum::clap().write_long_help(&mut output).unwrap();
+ let output = String::from_utf8(output).unwrap();
+
+ println!("{}", output);
+ assert!(output.contains("Dolor sit amet"));
+ assert!(!output.contains("Lorem ipsum"));
+ assert!(output.contains("DO NOT PASS A BAR"));
+}
| Consider using field doc comment as help text by default
When no `help = "Foo"` argument is given in the `structopt` attribute on a field, you could use the field's doc comment if present. This might make the code a bit clearer while also reducing duplicating the text.
| 2017-06-15T21:19:07 | 0.0 | 52d2550d4e5bea30cf3a689abef208f654bff5c4 | [
"commets_intead_of_actual_help"
] | [
"help_is_better_than_comments",
"combined_flags",
"multiple_flag",
"unique_flag"
] | [] | [] | |
Peternator7/strum | 288 | Peternator7__strum-288 | [
"283"
] | fcb9841a744543b993eb1645db35dbda541e9a30 | diff --git a/strum_macros/src/helpers/type_props.rs b/strum_macros/src/helpers/type_props.rs
--- a/strum_macros/src/helpers/type_props.rs
+++ b/strum_macros/src/helpers/type_props.rs
@@ -21,6 +21,7 @@ pub struct StrumTypeProperties {
pub discriminant_others: Vec<TokenStream>,
pub discriminant_vis: Option<Visibility>,
pub use_phf: bool,
+ pub enum_repr: Option<TokenStream>,
}
impl HasTypeProperties for DeriveInput {
diff --git a/strum_macros/src/helpers/type_props.rs b/strum_macros/src/helpers/type_props.rs
--- a/strum_macros/src/helpers/type_props.rs
+++ b/strum_macros/src/helpers/type_props.rs
@@ -103,6 +104,17 @@ impl HasTypeProperties for DeriveInput {
}
}
+ let attrs = &self.attrs;
+ for attr in attrs {
+ if let Ok(list) = attr.meta.require_list() {
+ if let Some(ident) = list.path.get_ident() {
+ if ident == "repr" {
+ output.enum_repr = Some(list.tokens.clone())
+ }
+ }
+ }
+ }
+
Ok(output)
}
}
diff --git a/strum_macros/src/macros/enum_discriminants.rs b/strum_macros/src/macros/enum_discriminants.rs
--- a/strum_macros/src/macros/enum_discriminants.rs
+++ b/strum_macros/src/macros/enum_discriminants.rs
@@ -40,10 +40,16 @@ pub fn enum_discriminants_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
// Pass through all other attributes
let pass_though_attributes = type_properties.discriminant_others;
+ let repr = type_properties.enum_repr.map(|repr| quote!(#[repr(#repr)]));
+
// Add the variants without fields, but exclude the `strum` meta item
let mut discriminants = Vec::new();
for variant in variants {
let ident = &variant.ident;
+ let discriminant = variant
+ .discriminant
+ .as_ref()
+ .map(|(_, expr)| quote!( = #expr));
// Don't copy across the "strum" meta attribute. Only passthrough the whitelisted
// attributes and proxy `#[strum_discriminants(...)]` attributes
diff --git a/strum_macros/src/macros/enum_discriminants.rs b/strum_macros/src/macros/enum_discriminants.rs
--- a/strum_macros/src/macros/enum_discriminants.rs
+++ b/strum_macros/src/macros/enum_discriminants.rs
@@ -81,7 +87,7 @@ pub fn enum_discriminants_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
})
.collect::<Result<Vec<_>, _>>()?;
- discriminants.push(quote! { #(#attrs)* #ident });
+ discriminants.push(quote! { #(#attrs)* #ident #discriminant});
}
// Ideally:
diff --git a/strum_macros/src/macros/enum_discriminants.rs b/strum_macros/src/macros/enum_discriminants.rs
--- a/strum_macros/src/macros/enum_discriminants.rs
+++ b/strum_macros/src/macros/enum_discriminants.rs
@@ -153,6 +159,7 @@ pub fn enum_discriminants_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
Ok(quote! {
/// Auto-generated discriminant enum variants
#derives
+ #repr
#(#[ #pass_though_attributes ])*
#discriminants_vis enum #discriminants_name {
#(#discriminants),*
diff --git a/strum_macros/src/macros/from_repr.rs b/strum_macros/src/macros/from_repr.rs
--- a/strum_macros/src/macros/from_repr.rs
+++ b/strum_macros/src/macros/from_repr.rs
@@ -1,62 +1,32 @@
use heck::ToShoutySnakeCase;
use proc_macro2::{Span, TokenStream};
-use quote::{format_ident, quote, ToTokens};
-use syn::{Data, DeriveInput, Fields, PathArguments, Type, TypeParen};
+use quote::{format_ident, quote};
+use syn::{Data, DeriveInput, Fields, Type};
-use crate::helpers::{non_enum_error, HasStrumVariantProperties};
+use crate::helpers::{non_enum_error, HasStrumVariantProperties, HasTypeProperties};
pub fn from_repr_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
let name = &ast.ident;
let gen = &ast.generics;
let (impl_generics, ty_generics, where_clause) = gen.split_for_impl();
let vis = &ast.vis;
- let attrs = &ast.attrs;
let mut discriminant_type: Type = syn::parse("usize".parse().unwrap()).unwrap();
- for attr in attrs {
- let path = attr.path();
-
- let mut ts = if let Ok(ts) = attr
- .meta
- .require_list()
- .map(|metas| metas.to_token_stream().into_iter())
- {
- ts
- } else {
- continue;
- };
- // Discard the path
- let _ = ts.next();
- let tokens: TokenStream = ts.collect();
-
- if path.leading_colon.is_some() {
- continue;
- }
- if path.segments.len() != 1 {
- continue;
- }
- let segment = path.segments.first().unwrap();
- if segment.ident != "repr" {
- continue;
- }
- if segment.arguments != PathArguments::None {
- continue;
- }
- let typ_paren = match syn::parse2::<Type>(tokens.clone()) {
- Ok(Type::Paren(TypeParen { elem, .. })) => *elem,
- _ => continue,
- };
- let inner_path = match &typ_paren {
- Type::Path(t) => t,
- _ => continue,
- };
- if let Some(seg) = inner_path.path.segments.last() {
- for t in &[
- "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "isize",
- ] {
- if seg.ident == t {
- discriminant_type = typ_paren;
- break;
+ if let Some(type_path) = ast
+ .get_type_properties()
+ .ok()
+ .and_then(|tp| tp.enum_repr)
+ .and_then(|repr_ts| syn::parse2::<Type>(repr_ts).ok())
+ {
+ if let Type::Path(path) = type_path.clone() {
+ if let Some(seg) = path.path.segments.last() {
+ for t in &[
+ "u8", "u16", "u32", "u64", "usize", "i8", "i16", "i32", "i64", "isize",
+ ] {
+ if seg.ident == t {
+ discriminant_type = type_path;
+ break;
+ }
}
}
}
| diff --git a/strum_tests/tests/enum_discriminants.rs b/strum_tests/tests/enum_discriminants.rs
--- a/strum_tests/tests/enum_discriminants.rs
+++ b/strum_tests/tests/enum_discriminants.rs
@@ -1,6 +1,9 @@
-use enum_variant_type::EnumVariantType;
-use strum::{Display, EnumDiscriminants, EnumIter, EnumMessage, EnumString, IntoEnumIterator};
+use std::mem::{align_of, size_of};
+use enum_variant_type::EnumVariantType;
+use strum::{
+ Display, EnumDiscriminants, EnumIter, EnumMessage, EnumString, FromRepr, IntoEnumIterator,
+};
mod core {} // ensure macros call `::core`
diff --git a/strum_tests/tests/enum_discriminants.rs b/strum_tests/tests/enum_discriminants.rs
--- a/strum_tests/tests/enum_discriminants.rs
+++ b/strum_tests/tests/enum_discriminants.rs
@@ -305,3 +308,58 @@ fn crate_module_path_test() {
assert_eq!(expected, discriminants);
}
+
+#[allow(dead_code)]
+#[derive(EnumDiscriminants)]
+#[repr(u16)]
+enum WithReprUInt {
+ Variant0,
+ Variant1,
+}
+
+#[test]
+fn with_repr_uint() {
+ // These tests would not be proof of proper functioning on a 16 bit system
+ assert_eq!(size_of::<u16>(), size_of::<WithReprUIntDiscriminants>());
+ assert_eq!(
+ size_of::<WithReprUInt>(),
+ size_of::<WithReprUIntDiscriminants>()
+ )
+}
+
+#[allow(dead_code)]
+#[derive(EnumDiscriminants)]
+#[repr(align(16), u8)]
+enum WithReprAlign {
+ Variant0,
+ Variant1,
+}
+
+#[test]
+fn with_repr_align() {
+ assert_eq!(
+ align_of::<WithReprAlign>(),
+ align_of::<WithReprAlignDiscriminants>()
+ );
+ assert_eq!(16, align_of::<WithReprAlignDiscriminants>());
+}
+
+#[allow(dead_code)]
+#[derive(EnumDiscriminants)]
+#[strum_discriminants(derive(FromRepr))]
+enum WithExplicitDicriminantValue {
+ Variant0 = 42 + 100,
+ Variant1 = 11,
+}
+
+#[test]
+fn with_explicit_discriminant_value() {
+ assert_eq!(
+ WithExplicitDicriminantValueDiscriminants::from_repr(11),
+ Some(WithExplicitDicriminantValueDiscriminants::Variant1)
+ );
+ assert_eq!(
+ 142,
+ WithExplicitDicriminantValueDiscriminants::Variant0 as u8
+ );
+}
| EnumDiscriminants should inherit the repr of the enum they are derived from
The below example code will generate `VehicleDiscriminants` enum with `Car` and `Truck` variants but it will be `repr(usize)` instead of inheriting the `u8` repr of `Vehicle` additionally they will have the default enum representations (`Car = 0` and `Vehicle = 1` in this case)
```
// Custom discriminant tests
#[derive(EnumDiscriminants, Debug, PartialEq)]
#[strum_discriminants(derive(FromRepr))]
#[repr(u8)]
enum Vehicle {
Car(CarModel) = 1,
Truck(TruckModel) = 3,
}
```
I would have expected the above code to produce the following:
```
#[repr(u8)]
#[derive(FromRepr)]
enum VehicleDiscriminants {
Car = 1,
Truck = 3,
}
```
| It's a breaking change, but I agree it's certainly more inline with what would be expected. Happy to take a PR if you're interested. | 2023-08-01T22:43:37 | 0.25 | 597f8e941fb9dec5603f6892df4109b50f615160 | [
"with_explicit_discriminant_value",
"with_repr_uint",
"with_repr_align"
] | [
"arbitrary_attributes_pass_through",
"complicated_test",
"crate_module_path_test",
"from_ref_test",
"fields_test",
"filter_variant_attributes_pass_through",
"from_ref_test_complex",
"from_test",
"from_test_complex",
"override_visibility",
"split_attributes_test",
"renamed_test",
"with_defaul... | [] | [] |
Peternator7/strum | 268 | Peternator7__strum-268 | [
"267"
] | 025b1b5687d061bc4116f77578fa9fc2b3fc1f26 | diff --git a/strum_macros/src/macros/enum_count.rs b/strum_macros/src/macros/enum_count.rs
--- a/strum_macros/src/macros/enum_count.rs
+++ b/strum_macros/src/macros/enum_count.rs
@@ -2,11 +2,18 @@ use proc_macro2::TokenStream;
use quote::quote;
use syn::{Data, DeriveInput};
+use crate::helpers::variant_props::HasStrumVariantProperties;
use crate::helpers::{non_enum_error, HasTypeProperties};
pub(crate) fn enum_count_inner(ast: &DeriveInput) -> syn::Result<TokenStream> {
let n = match &ast.data {
- Data::Enum(v) => v.variants.len(),
+ Data::Enum(v) => v.variants.iter().try_fold(0usize, |acc, v| {
+ if v.get_variant_properties()?.disabled.is_none() {
+ Ok::<usize, syn::Error>(acc + 1usize)
+ } else {
+ Ok::<usize, syn::Error>(acc)
+ }
+ })?,
_ => return Err(non_enum_error()),
};
let type_properties = ast.get_type_properties()?;
| diff --git a/strum_tests/tests/enum_count.rs b/strum_tests/tests/enum_count.rs
--- a/strum_tests/tests/enum_count.rs
+++ b/strum_tests/tests/enum_count.rs
@@ -11,12 +11,29 @@ enum Week {
Saturday,
}
+#[allow(dead_code)]
+#[derive(Debug, EnumCount, EnumIter)]
+enum Pets {
+ Dog,
+ Cat,
+ Fish,
+ Bird,
+ #[strum(disabled)]
+ Hamster,
+}
+
#[test]
fn simple_test() {
assert_eq!(7, Week::COUNT);
assert_eq!(Week::iter().count(), Week::COUNT);
}
+#[test]
+fn disabled_test() {
+ assert_eq!(4, Pets::COUNT);
+ assert_eq!(Pets::iter().count(), Pets::COUNT);
+}
+
#[test]
fn crate_module_path_test() {
pub mod nested {
| Disabled variant still included in Count
The [additional attributes docs](https://docs.rs/strum/latest/strum/additional_attributes/index.html#attributes-on-variants) define the `disabled` attribute as:
> `disabled`: removes variant from generated code.
However, when deriving the `EnumCount` trait, the disabled variant is still counted in the final count.
Here is some example code:
```rust
use strum::EnumCount;
#[derive(strum::EnumCount)]
enum Fields {
Field0,
Field1,
Field2,
#[strum(disabled)]
Unknown,
}
fn main() {
println!("Count: {}", Fields::COUNT);
}
````
Expected output: `Count: 3`
Actual output: `Count: 4`
This issue seems similar to #244
| 2023-04-26T18:46:25 | 0.24 | 025b1b5687d061bc4116f77578fa9fc2b3fc1f26 | [
"disabled_test"
] | [
"crate_module_path_test",
"simple_test"
] | [] | [] | |
paritytech/substrate | 13,502 | paritytech__substrate-13502 | [
"13115"
] | 3e8fc43f624714b3637555823bb670719b52b59d | diff --git a/frame/message-queue/src/mock_helpers.rs b/frame/message-queue/src/mock_helpers.rs
--- a/frame/message-queue/src/mock_helpers.rs
+++ b/frame/message-queue/src/mock_helpers.rs
@@ -89,7 +89,7 @@ pub fn page<T: Config>(msg: &[u8]) -> PageOf<T> {
}
pub fn single_page_book<T: Config>() -> BookStateOf<T> {
- BookState { begin: 0, end: 1, count: 1, ..Default::default() }
+ BookState { begin: 0, end: 1, count: 1, message_count: 1, size: 1, ..Default::default() }
}
pub fn empty_book<T: Config>() -> BookStateOf<T> {
diff --git a/frame/message-queue/src/mock_helpers.rs b/frame/message-queue/src/mock_helpers.rs
--- a/frame/message-queue/src/mock_helpers.rs
+++ b/frame/message-queue/src/mock_helpers.rs
@@ -139,10 +139,8 @@ pub fn setup_bump_service_head<T: Config>(
current: <<T as Config>::MessageProcessor as ProcessMessage>::Origin,
next: <<T as Config>::MessageProcessor as ProcessMessage>::Origin,
) {
- let mut book = single_page_book::<T>();
- book.ready_neighbours = Some(Neighbours::<MessageOriginOf<T>> { prev: next.clone(), next });
- ServiceHead::<T>::put(¤t);
- BookStateFor::<T>::insert(¤t, &book);
+ crate::Pallet::<T>::enqueue_message(msg("1"), current);
+ crate::Pallet::<T>::enqueue_message(msg("1"), next);
}
/// Knit a queue into the ready-ring and write it back to storage.
diff --git a/frame/message-queue/src/mock_helpers.rs b/frame/message-queue/src/mock_helpers.rs
--- a/frame/message-queue/src/mock_helpers.rs
+++ b/frame/message-queue/src/mock_helpers.rs
@@ -164,11 +162,8 @@ pub fn unknit<T: Config>(o: &<<T as Config>::MessageProcessor as ProcessMessage>
pub fn build_ring<T: Config>(
queues: &[<<T as Config>::MessageProcessor as ProcessMessage>::Origin],
) {
- for queue in queues {
- BookStateFor::<T>::insert(queue, empty_book::<T>());
- }
- for queue in queues {
- knit::<T>(queue);
+ for queue in queues.iter() {
+ crate::Pallet::<T>::enqueue_message(msg("1"), queue.clone());
}
assert_ring::<T>(queues);
}
| diff --git a/frame/message-queue/src/integration_test.rs b/frame/message-queue/src/integration_test.rs
--- a/frame/message-queue/src/integration_test.rs
+++ b/frame/message-queue/src/integration_test.rs
@@ -22,8 +22,8 @@
use crate::{
mock::{
- new_test_ext, CountingMessageProcessor, IntoWeight, MockedWeightInfo, NumMessagesProcessed,
- YieldingQueues,
+ build_and_execute, CountingMessageProcessor, IntoWeight, MockedWeightInfo,
+ NumMessagesProcessed, YieldingQueues,
},
mock_helpers::MessageOrigin,
*,
diff --git a/frame/message-queue/src/integration_test.rs b/frame/message-queue/src/integration_test.rs
--- a/frame/message-queue/src/integration_test.rs
+++ b/frame/message-queue/src/integration_test.rs
@@ -123,7 +123,7 @@ fn stress_test_enqueue_and_service() {
let max_msg_len = MaxMessageLenOf::<Test>::get();
let mut rng = StdRng::seed_from_u64(42);
- new_test_ext::<Test>().execute_with(|| {
+ build_and_execute::<Test>(|| {
let mut msgs_remaining = 0;
for _ in 0..blocks {
// Start by enqueuing a large number of messages.
diff --git a/frame/message-queue/src/integration_test.rs b/frame/message-queue/src/integration_test.rs
--- a/frame/message-queue/src/integration_test.rs
+++ b/frame/message-queue/src/integration_test.rs
@@ -171,7 +171,7 @@ fn stress_test_queue_suspension() {
let max_msg_len = MaxMessageLenOf::<Test>::get();
let mut rng = StdRng::seed_from_u64(41);
- new_test_ext::<Test>().execute_with(|| {
+ build_and_execute::<Test>(|| {
let mut suspended = BTreeSet::<u32>::new();
let mut msgs_remaining = 0;
diff --git a/frame/message-queue/src/lib.rs b/frame/message-queue/src/lib.rs
--- a/frame/message-queue/src/lib.rs
+++ b/frame/message-queue/src/lib.rs
@@ -578,7 +578,12 @@ pub mod pallet {
}
}
- /// Check all assumptions about [`crate::Config`].
+ #[cfg(feature = "try-runtime")]
+ fn try_state(_: BlockNumberFor<T>) -> Result<(), sp_runtime::TryRuntimeError> {
+ Self::do_try_state()
+ }
+
+ /// Check all compile-time assumptions about [`crate::Config`].
fn integrity_test() {
assert!(!MaxMessageLenOf::<T>::get().is_zero(), "HeapSize too low");
}
diff --git a/frame/message-queue/src/lib.rs b/frame/message-queue/src/lib.rs
--- a/frame/message-queue/src/lib.rs
+++ b/frame/message-queue/src/lib.rs
@@ -1105,6 +1110,106 @@ impl<T: Config> Pallet<T> {
ItemExecutionStatus::Executed(is_processed)
}
+ /// Ensure the correctness of state of this pallet.
+ ///
+ /// # Assumptions-
+ ///
+ /// If `serviceHead` points to a ready Queue, then BookState of that Queue has:
+ ///
+ /// * `message_count` > 0
+ /// * `size` > 0
+ /// * `end` > `begin`
+ /// * Some(ready_neighbours)
+ /// * If `ready_neighbours.next` == self.origin, then `ready_neighbours.prev` == self.origin
+ /// (only queue in ring)
+ ///
+ /// For Pages(begin to end-1) in BookState:
+ ///
+ /// * `remaining` > 0
+ /// * `remaining_size` > 0
+ /// * `first` <= `last`
+ /// * Every page can be decoded into peek_* functions
+ #[cfg(any(test, feature = "try-runtime"))]
+ pub fn do_try_state() -> Result<(), sp_runtime::TryRuntimeError> {
+ // Checking memory corruption for BookStateFor
+ ensure!(
+ BookStateFor::<T>::iter_keys().count() == BookStateFor::<T>::iter_values().count(),
+ "Memory Corruption in BookStateFor"
+ );
+ // Checking memory corruption for Pages
+ ensure!(
+ Pages::<T>::iter_keys().count() == Pages::<T>::iter_values().count(),
+ "Memory Corruption in Pages"
+ );
+
+ // No state to check
+ if ServiceHead::<T>::get().is_none() {
+ return Ok(())
+ }
+
+ //loop around this origin
+ let starting_origin = ServiceHead::<T>::get().unwrap();
+
+ while let Some(head) = Self::bump_service_head(&mut WeightMeter::max_limit()) {
+ ensure!(
+ BookStateFor::<T>::contains_key(&head),
+ "Service head must point to an existing book"
+ );
+
+ let head_book_state = BookStateFor::<T>::get(&head);
+ ensure!(
+ head_book_state.message_count > 0,
+ "There must be some messages if in ReadyRing"
+ );
+ ensure!(head_book_state.size > 0, "There must be some message size if in ReadyRing");
+ ensure!(
+ head_book_state.end > head_book_state.begin,
+ "End > Begin if unprocessed messages exists"
+ );
+ ensure!(
+ head_book_state.ready_neighbours.is_some(),
+ "There must be neighbours if in ReadyRing"
+ );
+
+ if head_book_state.ready_neighbours.as_ref().unwrap().next == head {
+ ensure!(
+ head_book_state.ready_neighbours.as_ref().unwrap().prev == head,
+ "Can only happen if only queue in ReadyRing"
+ );
+ }
+
+ for page_index in head_book_state.begin..head_book_state.end {
+ let page = Pages::<T>::get(&head, page_index).unwrap();
+ let remaining_messages = page.remaining;
+ let mut counted_remaining_messages = 0;
+ ensure!(
+ remaining_messages > 0.into(),
+ "These must be some messages that have not been processed yet!"
+ );
+
+ for i in 0..u32::MAX {
+ if let Some((_, processed, _)) = page.peek_index(i as usize) {
+ if !processed {
+ counted_remaining_messages += 1;
+ }
+ } else {
+ break
+ }
+ }
+
+ ensure!(
+ remaining_messages == counted_remaining_messages.into(),
+ "Memory Corruption"
+ );
+ }
+
+ if head_book_state.ready_neighbours.as_ref().unwrap().next == starting_origin {
+ break
+ }
+ }
+ Ok(())
+ }
+
/// Print the pages in each queue and the messages in each page.
///
/// Processed messages are prefixed with a `*` and the current `begin`ning page with a `>`.
diff --git a/frame/message-queue/src/mock.rs b/frame/message-queue/src/mock.rs
--- a/frame/message-queue/src/mock.rs
+++ b/frame/message-queue/src/mock.rs
@@ -295,10 +295,15 @@ where
ext
}
-/// Run this closure in test externalities.
-pub fn test_closure<R>(f: impl FnOnce() -> R) -> R {
- let mut ext = new_test_ext::<Test>();
- ext.execute_with(f)
+/// Run the function pointer inside externalities and asserts the try_state hook at the end.
+pub fn build_and_execute<T: Config>(test: impl FnOnce() -> ())
+where
+ BlockNumberFor<T>: From<u32>,
+{
+ new_test_ext::<T>().execute_with(|| {
+ test();
+ MessageQueue::do_try_state().expect("All invariants must hold after a test");
+ });
}
/// Set the weight of a specific weight function.
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -27,22 +27,22 @@ use sp_core::blake2_256;
#[test]
fn mocked_weight_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
assert!(<Test as Config>::WeightInfo::service_queue_base().is_zero());
});
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_queue_base", Weight::MAX);
assert_eq!(<Test as Config>::WeightInfo::service_queue_base(), Weight::MAX);
});
// The externalities reset it.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
assert!(<Test as Config>::WeightInfo::service_queue_base().is_zero());
});
}
#[test]
fn enqueue_within_one_page_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
use MessageOrigin::*;
MessageQueue::enqueue_message(msg("a"), Here);
MessageQueue::enqueue_message(msg("b"), Here);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -77,7 +77,7 @@ fn enqueue_within_one_page_works() {
#[test]
fn queue_priority_retains() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
use MessageOrigin::*;
assert_ring(&[]);
MessageQueue::enqueue_message(msg("a"), Everywhere(1));
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -108,11 +108,13 @@ fn queue_priority_retains() {
#[test]
fn queue_priority_reset_once_serviced() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
use MessageOrigin::*;
MessageQueue::enqueue_message(msg("a"), Everywhere(1));
MessageQueue::enqueue_message(msg("b"), Everywhere(2));
MessageQueue::enqueue_message(msg("c"), Everywhere(3));
+ MessageQueue::do_try_state().unwrap();
+ println!("{}", MessageQueue::debug_info());
// service head is 1, it will process a, leaving service head at 2. it also processes b and
// empties queue 2, so service head will end at 3.
assert_eq!(MessageQueue::service_queues(2.into_weight()), 2.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -135,7 +137,7 @@ fn queue_priority_reset_once_serviced() {
#[test]
fn service_queues_basic_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
MessageQueue::enqueue_messages(vec![msg("a"), msg("ab"), msg("abc")].into_iter(), Here);
MessageQueue::enqueue_messages(vec![msg("x"), msg("xy"), msg("xyz")].into_iter(), There);
assert_eq!(QueueChanges::take(), vec![(Here, 3, 6), (There, 3, 6)]);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -159,13 +161,14 @@ fn service_queues_basic_works() {
assert_eq!(MessageQueue::service_queues(Weight::MAX), 2.into_weight());
assert_eq!(MessagesProcessed::take(), vec![(vmsg("xy"), There), (vmsg("xyz"), There)]);
assert_eq!(QueueChanges::take(), vec![(There, 0, 0)]);
+ MessageQueue::do_try_state().unwrap();
});
}
#[test]
fn service_queues_failing_messages_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_page_item", 1.into_weight());
MessageQueue::enqueue_message(msg("badformat"), Here);
MessageQueue::enqueue_message(msg("corrupt"), Here);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -211,7 +214,7 @@ fn service_queues_failing_messages_works() {
#[test]
fn service_queues_suspension_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
MessageQueue::enqueue_messages(vec![msg("a"), msg("b"), msg("c")].into_iter(), Here);
MessageQueue::enqueue_messages(vec![msg("x"), msg("y"), msg("z")].into_iter(), There);
MessageQueue::enqueue_messages(
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -266,7 +269,7 @@ fn service_queues_suspension_works() {
#[test]
fn reap_page_permanent_overweight_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Create 10 pages more than the stale limit.
let n = (MaxStale::get() + 10) as usize;
for _ in 0..n {
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -306,7 +309,7 @@ fn reaping_overweight_fails_properly() {
use MessageOrigin::*;
assert_eq!(MaxStale::get(), 2, "The stale limit is two");
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// page 0
MessageQueue::enqueue_message(msg("weight=4"), Here);
MessageQueue::enqueue_message(msg("a"), Here);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -376,7 +379,7 @@ fn reaping_overweight_fails_properly() {
#[test]
fn service_queue_bails() {
// Not enough weight for `service_queue_base`.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_queue_base", 2.into_weight());
let mut meter = WeightMeter::from_limit(1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -384,7 +387,7 @@ fn service_queue_bails() {
assert!(meter.consumed().is_zero());
});
// Not enough weight for `ready_ring_unknit`.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("ready_ring_unknit", 2.into_weight());
let mut meter = WeightMeter::from_limit(1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -392,7 +395,7 @@ fn service_queue_bails() {
assert!(meter.consumed().is_zero());
});
// Not enough weight for `service_queue_base` and `ready_ring_unknit`.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_queue_base", 2.into_weight());
set_weight("ready_ring_unknit", 2.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -407,7 +410,7 @@ fn service_page_works() {
use super::integration_test::Test; // Run with larger page size.
use MessageOrigin::*;
use PageExecutionStatus::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_page_base_completion", 2.into_weight());
set_weight("service_page_item", 3.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -444,7 +447,7 @@ fn service_page_works() {
#[test]
fn service_page_bails() {
// Not enough weight for `service_page_base_completion`.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_page_base_completion", 2.into_weight());
let mut meter = WeightMeter::from_limit(1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -461,7 +464,7 @@ fn service_page_bails() {
assert!(meter.consumed().is_zero());
});
// Not enough weight for `service_page_base_no_completion`.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("service_page_base_no_completion", 2.into_weight());
let mut meter = WeightMeter::from_limit(1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -481,7 +484,7 @@ fn service_page_bails() {
#[test]
fn service_page_item_bails() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let _guard = StorageNoopGuard::default();
let (mut page, _) = full_page::<Test>();
let mut weight = WeightMeter::from_limit(10.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -508,7 +511,7 @@ fn service_page_suspension_works() {
use MessageOrigin::*;
use PageExecutionStatus::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let (page, mut msgs) = full_page::<Test>();
assert!(msgs >= 10, "pre-condition: need at least 10 msgs per page");
let mut book = book_for::<Test>(&page);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -556,14 +559,8 @@ fn service_page_suspension_works() {
#[test]
fn bump_service_head_works() {
use MessageOrigin::*;
- test_closure(|| {
- // Create a ready ring with three queues.
- BookStateFor::<Test>::insert(Here, empty_book::<Test>());
- knit(&Here);
- BookStateFor::<Test>::insert(There, empty_book::<Test>());
- knit(&There);
- BookStateFor::<Test>::insert(Everywhere(0), empty_book::<Test>());
- knit(&Everywhere(0));
+ build_and_execute::<Test>(|| {
+ build_triple_ring();
// Bump 99 times.
for i in 0..99 {
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -579,9 +576,9 @@ fn bump_service_head_works() {
/// `bump_service_head` does nothing when called with an insufficient weight limit.
#[test]
fn bump_service_head_bails() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("bump_service_head", 2.into_weight());
- setup_bump_service_head::<Test>(0.into(), 10.into());
+ setup_bump_service_head::<Test>(0.into(), 1.into());
let _guard = StorageNoopGuard::default();
let mut meter = WeightMeter::from_limit(1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -592,7 +589,7 @@ fn bump_service_head_bails() {
#[test]
fn bump_service_head_trivial_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("bump_service_head", 2.into_weight());
let mut meter = WeightMeter::max_limit();
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -605,22 +602,15 @@ fn bump_service_head_trivial_works() {
assert_eq!(ServiceHead::<Test>::get().unwrap(), 1.into(), "Bumped the head");
assert_eq!(meter.consumed(), 4.into_weight());
- assert_eq!(MessageQueue::bump_service_head(&mut meter), None, "Cannot bump");
+ assert_eq!(MessageQueue::bump_service_head(&mut meter), Some(1.into()), "Its a ring");
assert_eq!(meter.consumed(), 6.into_weight());
});
}
#[test]
fn bump_service_head_no_head_noops() {
- use MessageOrigin::*;
- test_closure(|| {
- // Create a ready ring with three queues.
- BookStateFor::<Test>::insert(Here, empty_book::<Test>());
- knit(&Here);
- BookStateFor::<Test>::insert(There, empty_book::<Test>());
- knit(&There);
- BookStateFor::<Test>::insert(Everywhere(0), empty_book::<Test>());
- knit(&Everywhere(0));
+ build_and_execute::<Test>(|| {
+ build_triple_ring();
// But remove the service head.
ServiceHead::<Test>::kill();
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -632,7 +622,7 @@ fn bump_service_head_no_head_noops() {
#[test]
fn service_page_item_consumes_correct_weight() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let mut page = page::<Test>(b"weight=3");
let mut weight = WeightMeter::from_limit(10.into_weight());
let overweight_limit = 0.into_weight();
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -656,7 +646,7 @@ fn service_page_item_consumes_correct_weight() {
/// `service_page_item` skips a permanently `Overweight` message and marks it as `unprocessed`.
#[test]
fn service_page_item_skips_perm_overweight_message() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let mut page = page::<Test>(b"TooMuch");
let mut weight = WeightMeter::from_limit(2.into_weight());
let overweight_limit = 0.into_weight();
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -695,7 +685,7 @@ fn service_page_item_skips_perm_overweight_message() {
#[test]
fn peek_index_works() {
use super::integration_test::Test; // Run with larger page size.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Fill a page with messages.
let (mut page, msgs) = full_page::<Test>();
let msg_enc_len = ItemHeader::<<Test as Config>::Size>::max_encoded_len() + 4;
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -716,7 +706,7 @@ fn peek_index_works() {
#[test]
fn peek_first_and_skip_first_works() {
use super::integration_test::Test; // Run with larger page size.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Fill a page with messages.
let (mut page, msgs) = full_page::<Test>();
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -739,7 +729,7 @@ fn peek_first_and_skip_first_works() {
#[test]
fn note_processed_at_pos_works() {
use super::integration_test::Test; // Run with larger page size.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let (mut page, msgs) = full_page::<Test>();
for i in 0..msgs {
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -775,7 +765,7 @@ fn note_processed_at_pos_idempotent() {
#[test]
fn is_complete_works() {
use super::integration_test::Test; // Run with larger page size.
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let (mut page, msgs) = full_page::<Test>();
assert!(msgs > 3, "Boring");
let msg_enc_len = ItemHeader::<<Test as Config>::Size>::max_encoded_len() + 4;
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -931,8 +921,9 @@ fn page_from_message_max_len_works() {
#[test]
fn sweep_queue_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
build_triple_ring();
+ QueueChanges::take();
let book = BookStateFor::<Test>::get(Here);
assert!(book.begin != book.end);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -967,9 +958,8 @@ fn sweep_queue_works() {
#[test]
fn sweep_queue_wraps_works() {
use MessageOrigin::*;
- test_closure(|| {
- BookStateFor::<Test>::insert(Here, empty_book::<Test>());
- knit(&Here);
+ build_and_execute::<Test>(|| {
+ build_ring::<Test>(&[Here]);
MessageQueue::sweep_queue(Here);
let book = BookStateFor::<Test>::get(Here);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -980,14 +970,14 @@ fn sweep_queue_wraps_works() {
#[test]
fn sweep_queue_invalid_noops() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
assert_storage_noop!(MessageQueue::sweep_queue(Here));
});
}
#[test]
fn footprint_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let origin = MessageOrigin::Here;
let (page, msgs) = full_page::<Test>();
let book = book_for::<Test>(&page);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1005,7 +995,7 @@ fn footprint_works() {
/// The footprint of an invalid queue is the default footprint.
#[test]
fn footprint_invalid_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let origin = MessageOrigin::Here;
assert_eq!(MessageQueue::footprint(origin), Default::default());
})
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1015,7 +1005,7 @@ fn footprint_invalid_works() {
#[test]
fn footprint_on_swept_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let mut book = empty_book::<Test>();
book.message_count = 3;
book.size = 10;
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1031,7 +1021,7 @@ fn footprint_on_swept_works() {
#[test]
fn execute_overweight_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("bump_service_head", 1.into_weight());
set_weight("service_queue_base", 1.into_weight());
set_weight("service_page_base_completion", 1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1091,7 +1081,7 @@ fn execute_overweight_works() {
fn permanently_overweight_book_unknits() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("bump_service_head", 1.into_weight());
set_weight("service_queue_base", 1.into_weight());
set_weight("service_page_base_completion", 1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1128,7 +1118,7 @@ fn permanently_overweight_book_unknits() {
fn permanently_overweight_book_unknits_multiple() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
set_weight("bump_service_head", 1.into_weight());
set_weight("service_queue_base", 1.into_weight());
set_weight("service_page_base_completion", 1.into_weight());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1167,7 +1157,7 @@ fn permanently_overweight_book_unknits_multiple() {
fn ready_but_empty_does_not_panic() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
BookStateFor::<Test>::insert(Here, empty_book::<Test>());
BookStateFor::<Test>::insert(There, empty_book::<Test>());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1187,7 +1177,7 @@ fn ready_but_empty_does_not_panic() {
fn ready_but_perm_overweight_does_not_panic() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
MessageQueue::enqueue_message(msg("weight=9"), Here);
assert_eq!(MessageQueue::service_queues(8.into_weight()), 0.into_weight());
assert_ring(&[]);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1207,7 +1197,7 @@ fn ready_but_perm_overweight_does_not_panic() {
fn ready_ring_knit_basic_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
BookStateFor::<Test>::insert(Here, empty_book::<Test>());
for i in 0..10 {
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1227,7 +1217,7 @@ fn ready_ring_knit_basic_works() {
fn ready_ring_knit_and_unknit_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Place three queues into the storage.
BookStateFor::<Test>::insert(Here, empty_book::<Test>());
BookStateFor::<Test>::insert(There, empty_book::<Test>());
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1261,7 +1251,7 @@ fn enqueue_message_works() {
let max_msg_per_page = <Test as Config>::HeapSize::get() as u64 /
(ItemHeader::<<Test as Config>::Size>::max_encoded_len() as u64 + 1);
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Enqueue messages which should fill three pages.
let n = max_msg_per_page * 3;
for i in 1..=n {
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1291,7 +1281,7 @@ fn enqueue_messages_works() {
let max_msg_per_page = <Test as Config>::HeapSize::get() as u64 /
(ItemHeader::<<Test as Config>::Size>::max_encoded_len() as u64 + 1);
- test_closure(|| {
+ build_and_execute::<Test>(|| {
// Enqueue messages which should fill three pages.
let n = max_msg_per_page * 3;
let msgs = vec![msg("a"); n as usize];
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1320,7 +1310,7 @@ fn enqueue_messages_works() {
#[test]
fn service_queues_suspend_works() {
use MessageOrigin::*;
- test_closure(|| {
+ build_and_execute::<Test>(|| {
MessageQueue::enqueue_messages(vec![msg("a"), msg("ab"), msg("abc")].into_iter(), Here);
MessageQueue::enqueue_messages(vec![msg("x"), msg("xy"), msg("xyz")].into_iter(), There);
assert_eq!(QueueChanges::take(), vec![(Here, 3, 6), (There, 3, 6)]);
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1387,7 +1377,7 @@ fn service_queues_suspend_works() {
/// Tests that manual overweight execution on a suspended queue errors with `QueueSuspended`.
#[test]
fn execute_overweight_respects_suspension() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let origin = MessageOrigin::Here;
MessageQueue::enqueue_message(msg("weight=5"), origin);
// Mark the message as permanently overweight.
diff --git a/frame/message-queue/src/tests.rs b/frame/message-queue/src/tests.rs
--- a/frame/message-queue/src/tests.rs
+++ b/frame/message-queue/src/tests.rs
@@ -1433,7 +1423,7 @@ fn execute_overweight_respects_suspension() {
#[test]
fn service_queue_suspension_ready_ring_works() {
- test_closure(|| {
+ build_and_execute::<Test>(|| {
let origin = MessageOrigin::Here;
PausedQueues::set(vec![origin]);
MessageQueue::enqueue_message(msg("weight=5"), origin);
| Add `TryState` hook for `MessageQueue`
The `MessageQueue` pallet has quite a few storage assumptions which could be checked in a `try_state` hook.
:point_right: *Mentor* issues are meant for new-comers. Please ask before picking them up.
| i am a newcomer, can i pick this up?
Yes @mahmudsudo !
Please familiarize yourself with the `try_state` hook implementations that we have in other pallets and how to write them.
For example in the [bags-list](https://github.com/paritytech/substrate/blob/89498c0d756c649d71e82340bee44fcc7cfe8037/frame/bags-list/src/lib.rs#L271) pallet or in [nomination-pools](https://github.com/paritytech/substrate/blob/911f65be7949342e12601cc3015d5c47e9339de4/frame/nomination-pools/src/lib.rs#L2126).
Then take a look at the [message-queue](https://github.com/paritytech/substrate/blob/b3d9f3c57e7f799d49442e3456614431f2d0e51a/frame/message-queue/src/lib.rs#L827) and especially all the `debug_assert!`+`defensive!` in it. Ideally we check all these invariants in the `try_state` function.
So for example check that every page can be decoded into `peek_*` functions etc.
Hey @mahmudsudo , are you still working on this?
If not, I would like to take this over.
Looks stake, please go ahead @gitofdeepanshu
Hey @ggwpez
I have written the function taking following assumptions:
If `serviceHead` points to a ready Queue (i.e. has some value), then BookState of that Queue has:
* `message_count` > 0
* `size` > 0
* `end` > begin
* Some(ready_neighbours)
* If `ready_neighbours.next` == self.origin, then `ready_neighbours.prev` == self.origin (only queue in ring)
For a particular BookState of Queue present in ReadyRing, all pages from `begin` to `end`-1 should have:
* `remaining` > 0
* `remaining_size` > 0
* `first` <= `last`
* Every page can be decoded into peek_* functions
If all these assumptions are correct then all Queue present in ReadyRing must have atleast 1 message. (Is this correct?)
But there are some tests which add Queue with empty book into ReadyRing which causes try_state function to panic. Should I leave those tests or is there something wrong with my assumptions?
> If all these assumptions are correct then all Queue present in ReadyRing must have atleast 1 message. (Is this correct?)
> But there are some tests which add Queue with empty book into ReadyRing which causes try_state function to panic. Should I leave those tests or is there something wrong with my assumptions?
Looks good so far. Please open a Merge request with your changes. I will then go over them in more details.
But AFAIK yes; there should be no empty books in the ready ring. | 2023-03-01T20:02:06 | 0.0 | 3e8fc43f624714b3637555823bb670719b52b59d | [
"tests::bump_service_head_trivial_works",
"tests::bump_service_head_bails",
"tests::bump_service_head_works"
] | [
"integration_test::__construct_runtime_integrity_test::runtime_integrity_tests",
"mock::__construct_runtime_integrity_test::runtime_integrity_tests",
"tests::execute_overweight_respects_suspension",
"tests::bump_service_head_no_head_noops",
"tests::execute_overweight_works",
"tests::note_processed_at_pos_... | [] | [] |
FuelLabs/sway | 4,810 | FuelLabs__sway-4810 | [
"4482"
] | 8600ccec85647508bb0bce6147bb2de5574cbaa4 | diff --git a/sway-core/src/language/parsed/expression/mod.rs b/sway-core/src/language/parsed/expression/mod.rs
--- a/sway-core/src/language/parsed/expression/mod.rs
+++ b/sway-core/src/language/parsed/expression/mod.rs
@@ -142,6 +142,7 @@ pub struct ArrayIndexExpression {
#[derive(Debug, Clone)]
pub struct StorageAccessExpression {
pub field_names: Vec<Ident>,
+ pub storage_keyword_span: Span,
}
#[derive(Debug, Clone)]
diff --git a/sway-core/src/language/ty/declaration/storage.rs b/sway-core/src/language/ty/declaration/storage.rs
--- a/sway-core/src/language/ty/declaration/storage.rs
+++ b/sway-core/src/language/ty/declaration/storage.rs
@@ -58,6 +58,7 @@ impl TyStorageDecl {
decl_engine: &DeclEngine,
fields: Vec<Ident>,
storage_fields: &[TyStorageField],
+ storage_keyword_span: Span,
) -> CompileResult<(TyStorageAccess, TypeId)> {
let mut errors = vec![];
let warnings = vec![];
diff --git a/sway-core/src/language/ty/declaration/storage.rs b/sway-core/src/language/ty/declaration/storage.rs
--- a/sway-core/src/language/ty/declaration/storage.rs
+++ b/sway-core/src/language/ty/declaration/storage.rs
@@ -137,6 +138,7 @@ impl TyStorageDecl {
TyStorageAccess {
fields: type_checked_buf,
ix,
+ storage_keyword_span,
},
return_type,
),
diff --git a/sway-core/src/language/ty/expression/storage.rs b/sway-core/src/language/ty/expression/storage.rs
--- a/sway-core/src/language/ty/expression/storage.rs
+++ b/sway-core/src/language/ty/expression/storage.rs
@@ -9,6 +9,7 @@ use crate::{engine_threading::*, type_system::TypeId};
pub struct TyStorageAccess {
pub fields: Vec<TyStorageAccessDescriptor>,
pub(crate) ix: StateIndex,
+ pub storage_keyword_span: Span,
}
impl EqWithEngines for TyStorageAccess {}
diff --git a/sway-core/src/language/ty/expression/storage.rs b/sway-core/src/language/ty/expression/storage.rs
--- a/sway-core/src/language/ty/expression/storage.rs
+++ b/sway-core/src/language/ty/expression/storage.rs
@@ -22,9 +23,14 @@ impl PartialEqWithEngines for TyStorageAccess {
impl HashWithEngines for TyStorageAccess {
fn hash<H: Hasher>(&self, state: &mut H, engines: &Engines) {
- let TyStorageAccess { fields, ix } = self;
+ let TyStorageAccess {
+ fields,
+ ix,
+ storage_keyword_span,
+ } = self;
fields.hash(state, engines);
ix.hash(state);
+ storage_keyword_span.hash(state);
}
}
diff --git a/sway-core/src/language/ty/expression/storage.rs b/sway-core/src/language/ty/expression/storage.rs
--- a/sway-core/src/language/ty/expression/storage.rs
+++ b/sway-core/src/language/ty/expression/storage.rs
@@ -48,7 +54,7 @@ impl TyStorageAccess {
#[derive(Clone, Debug)]
pub struct TyStorageAccessDescriptor {
pub name: Ident,
- pub(crate) type_id: TypeId,
+ pub type_id: TypeId,
pub(crate) span: Span,
}
diff --git a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
--- a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
+++ b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
@@ -259,12 +259,15 @@ impl ty::TyExpression {
.with_help_text("");
Self::type_check_array_index(ctx, *prefix, *index, span)
}
- ExpressionKind::StorageAccess(StorageAccessExpression { field_names }) => {
+ ExpressionKind::StorageAccess(StorageAccessExpression {
+ field_names,
+ storage_keyword_span,
+ }) => {
let ctx = ctx
.by_ref()
.with_type_annotation(type_engine.insert(engines, TypeInfo::Unknown))
.with_help_text("");
- Self::type_check_storage_access(ctx, field_names, &span)
+ Self::type_check_storage_access(ctx, field_names, storage_keyword_span, &span)
}
ExpressionKind::IntrinsicFunction(IntrinsicFunctionExpression {
kind_binding,
diff --git a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
--- a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
+++ b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
@@ -896,6 +899,7 @@ impl ty::TyExpression {
fn type_check_storage_access(
ctx: TypeCheckContext,
checkee: Vec<Ident>,
+ storage_keyword_span: Span,
span: &Span,
) -> CompileResult<Self> {
let mut warnings = vec![];
diff --git a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
--- a/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
+++ b/sway-core/src/semantic_analysis/ast_node/expression/typed_expression.rs
@@ -919,8 +923,12 @@ impl ty::TyExpression {
// Do all namespace checking here!
let (storage_access, mut access_type) = check!(
- ctx.namespace
- .apply_storage_load(ctx.engines, checkee, &storage_fields,),
+ ctx.namespace.apply_storage_load(
+ ctx.engines,
+ checkee,
+ &storage_fields,
+ storage_keyword_span
+ ),
return err(warnings, errors),
warnings,
errors
diff --git a/sway-core/src/semantic_analysis/namespace/items.rs b/sway-core/src/semantic_analysis/namespace/items.rs
--- a/sway-core/src/semantic_analysis/namespace/items.rs
+++ b/sway-core/src/semantic_analysis/namespace/items.rs
@@ -59,6 +59,7 @@ impl Items {
engines: &Engines,
fields: Vec<Ident>,
storage_fields: &[ty::TyStorageField],
+ storage_keyword_span: Span,
) -> CompileResult<(ty::TyStorageAccess, TypeId)> {
let warnings = vec![];
let mut errors = vec![];
diff --git a/sway-core/src/semantic_analysis/namespace/items.rs b/sway-core/src/semantic_analysis/namespace/items.rs
--- a/sway-core/src/semantic_analysis/namespace/items.rs
+++ b/sway-core/src/semantic_analysis/namespace/items.rs
@@ -67,7 +68,13 @@ impl Items {
match self.declared_storage {
Some(ref decl_ref) => {
let storage = decl_engine.get_storage(&decl_ref.id().clone());
- storage.apply_storage_load(type_engine, decl_engine, fields, storage_fields)
+ storage.apply_storage_load(
+ type_engine,
+ decl_engine,
+ fields,
+ storage_fields,
+ storage_keyword_span,
+ )
}
None => {
errors.push(CompileError::NoDeclaredStorage {
diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
--- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
+++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
@@ -1988,7 +1988,7 @@ fn expr_to_expression(
// For example, `storage.foo.bar` would result in `Some([foo, bar])`.
let mut idents = vec![&name];
let mut base = &*target;
- let storage_access_field_names = loop {
+ let kind = loop {
match base {
// Parent is a projection itself, so check its parent.
Expr::FieldProjection { target, name, .. } => {
diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
--- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
+++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
@@ -2002,22 +2002,22 @@ fn expr_to_expression(
&& path_expr.prefix.generics_opt.is_none()
&& path_expr.prefix.name.as_str() == "storage" =>
{
- break Some(idents)
+ break ExpressionKind::StorageAccess(StorageAccessExpression {
+ field_names: idents.into_iter().rev().cloned().collect(),
+ storage_keyword_span: path_expr.prefix.name.span(),
+ })
}
// We'll never find `storage`, so stop here.
- _ => break None,
+ _ => {
+ break ExpressionKind::Subfield(SubfieldExpression {
+ prefix: Box::new(expr_to_expression(
+ context, handler, engines, *target,
+ )?),
+ field_to_access: name,
+ })
+ }
}
};
-
- let kind = match storage_access_field_names {
- Some(field_names) => ExpressionKind::StorageAccess(StorageAccessExpression {
- field_names: field_names.into_iter().rev().cloned().collect(),
- }),
- None => ExpressionKind::Subfield(SubfieldExpression {
- prefix: Box::new(expr_to_expression(context, handler, engines, *target)?),
- field_to_access: name,
- }),
- };
Expression { kind, span }
}
Expr::TupleFieldProjection {
diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
--- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
+++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
@@ -3768,7 +3768,7 @@ fn assignable_to_expression(
Assignable::FieldProjection { target, name, .. } => {
let mut idents = vec![&name];
let mut base = &*target;
- let storage_access_field_names_opt = loop {
+ let (storage_access_field_names_opt, storage_name_opt) = loop {
match base {
Assignable::FieldProjection { target, name, .. } => {
idents.push(name);
diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
--- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
+++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
@@ -3776,24 +3776,25 @@ fn assignable_to_expression(
}
Assignable::Var(name) => {
if name.as_str() == "storage" {
- break Some(idents);
+ break (Some(idents), Some(name.clone()));
}
- break None;
+ break (None, None);
}
- _ => break None,
+ _ => break (None, None),
}
};
- match storage_access_field_names_opt {
- Some(field_names) => {
+ match (storage_access_field_names_opt, storage_name_opt) {
+ (Some(field_names), Some(storage_name)) => {
let field_names = field_names.into_iter().rev().cloned().collect();
Expression {
kind: ExpressionKind::StorageAccess(StorageAccessExpression {
field_names,
+ storage_keyword_span: storage_name.span(),
}),
span,
}
}
- None => Expression {
+ _ => Expression {
kind: ExpressionKind::Subfield(SubfieldExpression {
prefix: Box::new(assignable_to_expression(
context, handler, engines, *target,
diff --git a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
--- a/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
+++ b/sway-core/src/transform/to_parsed_lang/convert_parse_tree.rs
@@ -3840,6 +3841,7 @@ fn assignable_to_reassignment_target(
) -> Result<ReassignmentTarget, ErrorEmitted> {
let mut idents = Vec::new();
let mut base = &assignable;
+
loop {
match base {
Assignable::FieldProjection { target, name, .. } => {
diff --git a/sway-lsp/src/core/token.rs b/sway-lsp/src/core/token.rs
--- a/sway-lsp/src/core/token.rs
+++ b/sway-lsp/src/core/token.rs
@@ -69,7 +69,8 @@ pub enum TypedAstToken {
TypedTraitFn(ty::TyTraitFn),
TypedSupertrait(Supertrait),
TypedStorageField(ty::TyStorageField),
- TyStorageAccessDescriptor(ty::TyStorageAccessDescriptor),
+ TypedStorageAccess(ty::TyStorageAccess),
+ TypedStorageAccessDescriptor(ty::TyStorageAccessDescriptor),
TypedReassignment(ty::TyReassignment),
TypedArgument(TypeArgument),
TypedParameter(TypeParameter),
diff --git a/sway-lsp/src/traverse/parsed_tree.rs b/sway-lsp/src/traverse/parsed_tree.rs
--- a/sway-lsp/src/traverse/parsed_tree.rs
+++ b/sway-lsp/src/traverse/parsed_tree.rs
@@ -292,7 +292,16 @@ impl Parse for Expression {
prefix.parse(ctx);
index.parse(ctx);
}
- ExpressionKind::StorageAccess(StorageAccessExpression { field_names }) => {
+ ExpressionKind::StorageAccess(StorageAccessExpression {
+ field_names,
+ storage_keyword_span,
+ }) => {
+ let storage_ident = Ident::new(storage_keyword_span.clone());
+ ctx.tokens.insert(
+ to_ident_key(&storage_ident),
+ Token::from_parsed(AstToken::Ident(storage_ident), SymbolKind::Unknown),
+ );
+
field_names.iter().for_each(|field_name| {
ctx.tokens.insert(
to_ident_key(field_name),
diff --git a/sway-lsp/src/traverse/typed_tree.rs b/sway-lsp/src/traverse/typed_tree.rs
--- a/sway-lsp/src/traverse/typed_tree.rs
+++ b/sway-lsp/src/traverse/typed_tree.rs
@@ -470,15 +470,77 @@ impl Parse for ty::TyExpression {
address.parse(ctx);
}
ty::TyExpressionVariant::StorageAccess(storage_access) => {
- storage_access.fields.iter().for_each(|field| {
+ // collect storage keyword
+ if let Some(mut token) = ctx
+ .tokens
+ .try_get_mut(&to_ident_key(&Ident::new(
+ storage_access.storage_keyword_span.clone(),
+ )))
+ .try_unwrap()
+ {
+ token.typed = Some(TypedAstToken::TypedStorageAccess(storage_access.clone()));
+ if let Some(storage) = ctx.namespace.get_declared_storage(ctx.engines.de()) {
+ token.type_def = Some(TypeDefinition::Ident(storage.storage_keyword));
+ }
+ }
+ if let Some((head_field, tail_fields)) = storage_access.fields.split_first() {
+ // collect the first ident as a field of the storage definition
if let Some(mut token) = ctx
.tokens
- .try_get_mut(&to_ident_key(&field.name))
+ .try_get_mut(&to_ident_key(&head_field.name))
.try_unwrap()
{
- token.typed = Some(TypedAstToken::TyStorageAccessDescriptor(field.clone()));
+ token.typed = Some(TypedAstToken::TypedStorageAccessDescriptor(
+ head_field.clone(),
+ ));
+ if let Some(storage_field) = ctx
+ .namespace
+ .get_declared_storage(ctx.engines.de())
+ .and_then(|storage| {
+ storage
+ .fields
+ .into_iter()
+ .find(|f| f.name.as_str() == head_field.name.as_str())
+ })
+ {
+ token.type_def = Some(TypeDefinition::Ident(storage_field.name));
+ }
}
- });
+ // collect the rest of the idents as fields of their respective types
+ for (field, container_type_id) in tail_fields
+ .iter()
+ .zip(storage_access.fields.iter().map(|f| f.type_id))
+ {
+ if let Some(mut token) = ctx
+ .tokens
+ .try_get_mut(&to_ident_key(&field.name))
+ .try_unwrap()
+ {
+ token.typed = Some(TypedAstToken::Ident(field.name.clone()));
+ match ctx.engines.te().get(container_type_id) {
+ TypeInfo::Struct(decl_ref) => {
+ if let Some(field_name) = ctx
+ .engines
+ .de()
+ .get_struct(&decl_ref)
+ .fields
+ .iter()
+ .find(|struct_field| {
+ // find the corresponding field in the containing type declaration
+ struct_field.name.as_str() == field.name.as_str()
+ })
+ .map(|struct_field| struct_field.name.clone())
+ {
+ token.type_def = Some(TypeDefinition::Ident(field_name));
+ }
+ }
+ _ => {
+ token.type_def = Some(TypeDefinition::TypeId(field.type_id));
+ }
+ }
+ }
+ }
+ }
}
ty::TyExpressionVariant::IntrinsicFunction(kind) => {
kind.parse(ctx);
| diff --git a/sway-lsp/tests/fixtures/tokens/storage/src/main.sw b/sway-lsp/tests/fixtures/tokens/storage/src/main.sw
--- a/sway-lsp/tests/fixtures/tokens/storage/src/main.sw
+++ b/sway-lsp/tests/fixtures/tokens/storage/src/main.sw
@@ -19,7 +19,7 @@ abi StorageExample {
fn store_something();
}
-impl StorageExample for Contract {
+impl StorageExample for Contract {
#[storage(write)]
fn store_something() {
storage.var1.x.write(42);
diff --git a/sway-lsp/tests/lib.rs b/sway-lsp/tests/lib.rs
--- a/sway-lsp/tests/lib.rs
+++ b/sway-lsp/tests/lib.rs
@@ -1358,6 +1358,95 @@ async fn go_to_definition_for_abi() {
definition_check_with_req_offset(&mut service, &mut go_to, 16, 15, &mut i).await;
}
+#[tokio::test]
+async fn go_to_definition_for_storage() {
+ let (mut service, _) = LspService::new(ServerState::new);
+ let uri = init_and_open(
+ &mut service,
+ test_fixtures_dir().join("tokens/storage/src/main.sw"),
+ )
+ .await;
+ let mut i = 0..;
+
+ let mut go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 24,
+ req_char: 9,
+ def_line: 12,
+ def_start_char: 0,
+ def_end_char: 7,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+ definition_check_with_req_offset(&mut service, &mut go_to, 25, 8, &mut i).await;
+ definition_check_with_req_offset(&mut service, &mut go_to, 26, 8, &mut i).await;
+
+ let mut go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 24,
+ req_char: 17,
+ def_line: 13,
+ def_start_char: 4,
+ def_end_char: 8,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage.var1
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+ definition_check_with_req_offset(&mut service, &mut go_to, 25, 17, &mut i).await;
+ definition_check_with_req_offset(&mut service, &mut go_to, 26, 17, &mut i).await;
+
+ let go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 24,
+ req_char: 21,
+ def_line: 3,
+ def_start_char: 4,
+ def_end_char: 5,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage.var1.x
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+
+ let go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 25,
+ req_char: 21,
+ def_line: 4,
+ def_start_char: 4,
+ def_end_char: 5,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage.var1.y
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+
+ let go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 26,
+ req_char: 21,
+ def_line: 5,
+ def_start_char: 4,
+ def_end_char: 5,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage.var1.z
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+
+ let go_to = GotoDefinition {
+ req_uri: &uri,
+ req_line: 26,
+ req_char: 23,
+ def_line: 9,
+ def_start_char: 4,
+ def_end_char: 5,
+ def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
+ };
+ // storage.var1.z.x
+ let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
+
+ shutdown_and_exit(&mut service).await;
+}
+
//------------------- HOVER DOCUMENTATION -------------------//
#[tokio::test]
| Add sway-lsp go_to_def for storage test back in.
This was removed in #4464. Just making a note to but it back in. The sway project that this tests against has been modified so the line and char's will need updating.
Old code for reference
```rust
#[tokio::test]
async fn go_to_definition_for_storage() {
let (mut service, _) = LspService::new(Backend::new);
let uri = init_and_open(
&mut service,
test_fixtures_dir().join("tokens/storage/src/main.sw"),
)
.await;
let mut i = 0..;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 24,
req_char: 9,
def_line: 12,
def_start_char: 0,
def_end_char: 7,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
definition_check_with_req_offset(&mut service, &mut go_to, 25, 8, &mut i).await;
definition_check_with_req_offset(&mut service, &mut go_to, 26, 8, &mut i).await;
let mut go_to = GotoDefinition {
req_uri: &uri,
req_line: 24,
req_char: 17,
def_line: 13,
def_start_char: 4,
def_end_char: 8,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage.var1
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
definition_check_with_req_offset(&mut service, &mut go_to, 25, 17, &mut i).await;
definition_check_with_req_offset(&mut service, &mut go_to, 26, 17, &mut i).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 24,
req_char: 21,
def_line: 3,
def_start_char: 4,
def_end_char: 5,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage.var1.x
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 25,
req_char: 21,
def_line: 4,
def_start_char: 4,
def_end_char: 5,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage.var1.y
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 26,
req_char: 21,
def_line: 5,
def_start_char: 4,
def_end_char: 5,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage.var1.z
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
let go_to = GotoDefinition {
req_uri: &uri,
req_line: 26,
req_char: 23,
def_line: 9,
def_start_char: 4,
def_end_char: 5,
def_path: "sway-lsp/tests/fixtures/tokens/storage/src/main.sw",
};
// storage.var1.z.x
let _ = lsp::definition_check(&mut service, &go_to, &mut i).await;
shutdown_and_exit(&mut service).await;
}
```
| Just porting over some comments from 4464.
I just pulled down and installed this branch and it does seem like there has been a regression somewhere. Previously this test was testing go to definition for `storage` , `var1`, and `x`.
```rust
storage.var1.x = 42;
```
I installed your branch and manually tested go_to for the updated line but it doesn't find anything for each of the 3 tokens.
```rust
storage.var1.x.write(42);
```
Yeah it looks like it's this block was the logic that had the go to logic working.
```rust
impl Parse for ty::TyStorageReassignment {
fn parse(&self, ctx: &ParseContext) {
// collect storage keyword
if let Some(mut token) = ctx
.tokens
.try_get_mut(&to_ident_key(&Ident::new(
self.storage_keyword_span.clone(),
)))
.try_unwrap()
{
token.typed = Some(TypedAstToken::TyStorageResassignment(Box::new(
self.clone(),
)));
if let Some(storage) = ctx.namespace.get_declared_storage(ctx.engines.de()) {
token.type_def = Some(TypeDefinition::Ident(storage.storage_keyword));
}
}
if let Some((head_field, tail_fields)) = self.fields.split_first() {
// collect the first ident as a field of the storage definition
if let Some(mut token) = ctx
.tokens
.try_get_mut(&to_ident_key(&head_field.name))
.try_unwrap()
{
token.typed = Some(TypedAstToken::TypeCheckedStorageReassignDescriptor(
head_field.clone(),
));
if let Some(storage_field) = ctx
.namespace
.get_declared_storage(ctx.engines.de())
.and_then(|storage| {
// find the corresponding field in the storage declaration
storage
.fields
.into_iter()
.find(|f| f.name.as_str() == head_field.name.as_str())
})
{
token.type_def = Some(TypeDefinition::Ident(storage_field.name));
}
}
// collect the rest of the idents as fields of their respective types
for (field, container_type_id) in tail_fields
.iter()
.zip(self.fields.iter().map(|f| f.type_id))
{
if let Some(mut token) = ctx
.tokens
.try_get_mut(&to_ident_key(&field.name))
.try_unwrap()
{
token.typed = Some(TypedAstToken::TypeCheckedStorageReassignDescriptor(
field.clone(),
));
match ctx.engines.te().get(container_type_id) {
TypeInfo::Struct(decl_ref) => {
if let Some(field_name) = ctx
.engines
.de()
.get_struct(&decl_ref)
.fields
.iter()
.find(|struct_field| {
// find the corresponding field in the containing type declaration
struct_field.name.as_str() == field.name.as_str()
})
.map(|struct_field| struct_field.name.clone())
{
token.type_def = Some(TypeDefinition::Ident(field_name));
}
}
_ => {
token.type_def = Some(TypeDefinition::TypeId(field.type_id));
}
}
}
}
}
self.rhs.parse(ctx);
}
}
```
Inspecting the AST and it seems we can move this logic when looping over the fields in `TyExpressionVariant::StorageAccess`
```rust
TyStorageAccess {
fields: [
TyStorageAccessDescriptor {
name: BaseIdent {
name_override_opt: None,
span: Span {
src (ptr): 0x000000010ef27890,
path: Some(
"/private/var/folders/3z/79mvj_fn5298rz6zb4gnctw00000gn/T/SWAY_LSP_TEMP_DIRQs3zQJ/storage/src/main.sw",
),
start: 345,
end: 349,
as_str(): "var1",
},
is_raw_ident: false,
},
type_id: TypeId(
67840,
),
span: Span {
src (ptr): 0x000000010ef27890,
path: Some(
"/private/var/folders/3z/79mvj_fn5298rz6zb4gnctw00000gn/T/SWAY_LSP_TEMP_DIRQs3zQJ/storage/src/main.sw",
),
start: 345,
end: 349,
as_str(): "var1",
},
},
..
]
}
``` | 2023-07-18T12:28:23 | 0.18 | b33b63c1081ac7f6e932c7747a5aa6aacc36cae4 | [
"go_to_definition_for_storage"
] | [
"did_open",
"did_close",
"code_lens",
"code_action_function",
"code_action_abi",
"code_action_trait_fn_request",
"code_action_struct_existing_impl",
"completion",
"document_symbol",
"code_action_struct",
"code_action_struct_type_params",
"format",
"go_to_definition",
"did_change",
"go_to... | [
"hover_docs_for_functions",
"hover_docs_for_self_keywords"
] | [] |
FuelLabs/sway | 6,451 | FuelLabs__sway-6451 | [
"6447"
] | 4af5cd5f52b26194c06c720fd14c7f8ce123befb | diff --git a/swayfmt/src/items/item_storage/mod.rs b/swayfmt/src/items/item_storage/mod.rs
--- a/swayfmt/src/items/item_storage/mod.rs
+++ b/swayfmt/src/items/item_storage/mod.rs
@@ -275,6 +275,12 @@ impl LeafSpans for StorageEntry {
impl LeafSpans for StorageField {
fn leaf_spans(&self) -> Vec<ByteSpan> {
let mut collected_spans = vec![ByteSpan::from(self.name.span())];
+ if let Some(in_token) = &self.in_token {
+ collected_spans.push(ByteSpan::from(in_token.span()));
+ }
+ if let Some(key_expr) = &self.key_expr {
+ collected_spans.push(ByteSpan::from(key_expr.span()));
+ }
collected_spans.push(ByteSpan::from(self.colon_token.span()));
collected_spans.append(&mut self.ty.leaf_spans());
collected_spans.push(ByteSpan::from(self.eq_token.span()));
diff --git a/swayfmt/src/utils/language/punctuated.rs b/swayfmt/src/utils/language/punctuated.rs
--- a/swayfmt/src/utils/language/punctuated.rs
+++ b/swayfmt/src/utils/language/punctuated.rs
@@ -258,12 +258,15 @@ impl Format for StorageField {
formatter.with_shape(
formatter.shape.with_default_code_line(),
|formatter| -> Result<(), FormatterError> {
- write!(
- formatted_code,
- "{}{} ",
- self.name.span().as_str(),
- self.colon_token.span().as_str(),
- )?;
+ write!(formatted_code, "{}", self.name.span().as_str())?;
+ if let Some(in_token) = &self.in_token {
+ write!(formatted_code, " {}", in_token.span().as_str())?;
+ }
+ if let Some(key_expr) = &self.key_expr {
+ write!(formatted_code, " {}", key_expr.span().as_str())?;
+ }
+ write!(formatted_code, "{} ", self.colon_token.span().as_str())?;
+
self.ty.format(formatted_code, formatter)?;
write!(formatted_code, " {} ", self.eq_token.span().as_str())?;
diff --git a/swayfmt/src/utils/language/punctuated.rs b/swayfmt/src/utils/language/punctuated.rs
--- a/swayfmt/src/utils/language/punctuated.rs
+++ b/swayfmt/src/utils/language/punctuated.rs
@@ -272,7 +275,6 @@ impl Format for StorageField {
)?;
self.initializer.format(formatted_code, formatter)?;
-
Ok(())
}
}
| diff --git a/swayfmt/tests/mod.rs b/swayfmt/tests/mod.rs
--- a/swayfmt/tests/mod.rs
+++ b/swayfmt/tests/mod.rs
@@ -3129,3 +3129,29 @@ fn impl_func_where() {
"#},
);
}
+
+#[test]
+fn retain_in_keyword() {
+ check(
+ indoc! {r#"
+ contract;
+ use standards::src14::{SRC14, SRC14_TARGET_STORAGE};
+
+ storage {
+ SRC14 {
+ target in 0x7bb458adc1d118713319a5baa00a2d049dd64d2916477d2688d76970c898cd55:ContractId = ContractId::zero(),
+ },
+ }
+ "#},
+ indoc! {r#"
+ contract;
+ use standards::src14::{SRC14, SRC14_TARGET_STORAGE};
+
+ storage {
+ SRC14 {
+ target in 0x7bb458adc1d118713319a5baa00a2d049dd64d2916477d2688d76970c898cd55: ContractId = ContractId::zero(),
+ },
+ }
+ "#},
+ );
+}
| forc-fmt: removes `in` keyword
With forc 0.63.1 nightly:
<img width="441" alt="Screenshot 2024-08-21 at 17 49 20" src="https://github.com/user-attachments/assets/f626e873-0273-4b2f-85fe-3eb3623bf766">
using forc fmt removes the `in` keyword:
https://github.com/user-attachments/assets/1e65a067-2ae4-40aa-a891-a66d91b94114
| 2024-08-22T10:56:18 | 0.32 | eaac6499681135f1f499a2a9bf89bbaf4f82f78e | [
"retain_in_keyword"
] | [
"comment_between_closing_brace_and_else",
"chained_methods_1",
"broken_doc_comment",
"bug_whitespace_added_after_comment",
"chained_methods_0",
"comments_before_module_kind",
"abi_supertrait",
"comment_between_if_else_inline_to_multiline",
"comments_between_if_else",
"comments_empty_traits",
"co... | [] | [] | |
swc-project/swc | 2,227 | swc-project__swc-2227 | [
"2050"
] | 9ffe47106a70150579b80d103bd1a0193e5b8483 | diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2287,7 +2287,7 @@ checksum = "6446ced80d6c486436db5c078dde11a9f73d42b57fb273121e160b84f63d894c"
[[package]]
name = "swc"
-version = "0.52.0"
+version = "0.53.0"
dependencies = [
"ahash",
"anyhow",
diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2608,7 +2608,7 @@ dependencies = [
[[package]]
name = "swc_ecma_loader"
-version = "0.18.1"
+version = "0.18.2"
dependencies = [
"anyhow",
"dashmap",
diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -20,7 +20,7 @@ edition = "2018"
license = "Apache-2.0/MIT"
name = "swc"
repository = "https://github.com/swc-project/swc.git"
-version = "0.52.0"
+version = "0.53.0"
[lib]
name = "swc"
diff --git a/ecmascript/loader/Cargo.toml b/ecmascript/loader/Cargo.toml
--- a/ecmascript/loader/Cargo.toml
+++ b/ecmascript/loader/Cargo.toml
@@ -6,7 +6,7 @@ edition = "2018"
license = "Apache-2.0/MIT"
name = "swc_ecma_loader"
repository = "https://github.com/swc-project/swc.git"
-version = "0.18.1"
+version = "0.18.2"
[package.metadata.docs.rs]
all-features = true
diff --git a/ecmascript/loader/src/resolvers/tsc.rs b/ecmascript/loader/src/resolvers/tsc.rs
--- a/ecmascript/loader/src/resolvers/tsc.rs
+++ b/ecmascript/loader/src/resolvers/tsc.rs
@@ -135,7 +135,7 @@ where
);
let mut errors = vec![];
for target in to {
- let replaced = target.replace('*', capture.as_str());
+ let mut replaced = target.replace('*', capture.as_str());
let rel = format!("./{}", replaced);
let res = self.inner.resolve(base, &rel).with_context(|| {
diff --git a/ecmascript/loader/src/resolvers/tsc.rs b/ecmascript/loader/src/resolvers/tsc.rs
--- a/ecmascript/loader/src/resolvers/tsc.rs
+++ b/ecmascript/loader/src/resolvers/tsc.rs
@@ -150,8 +150,15 @@ where
Err(err) => err,
});
+ if cfg!(target_os = "windows") {
+ if replaced.starts_with("./") {
+ replaced = replaced[2..].to_string();
+ }
+ replaced = replaced.replace('/', "\\");
+ }
+
if to.len() == 1 {
- return Ok(FileName::Custom(replaced));
+ return Ok(FileName::Real(self.base_url.join(replaced)));
}
}
diff --git a/ecmascript/transforms/module/src/path.rs b/ecmascript/transforms/module/src/path.rs
--- a/ecmascript/transforms/module/src/path.rs
+++ b/ecmascript/transforms/module/src/path.rs
@@ -97,7 +97,7 @@ where
let rel_path = match rel_path {
Some(v) => v,
- None => return Ok(module_specifier.into()),
+ None => return Ok(to_specifier(&target.display().to_string())),
};
{
diff --git a/src/builder.rs b/src/builder.rs
--- a/src/builder.rs
+++ b/src/builder.rs
@@ -4,7 +4,7 @@ use crate::{
};
use compat::es2020::export_namespace_from;
use either::Either;
-use std::{cell::RefCell, collections::HashMap, rc::Rc, sync::Arc};
+use std::{cell::RefCell, collections::HashMap, path::PathBuf, rc::Rc, sync::Arc};
use swc_atoms::JsWord;
use swc_common::{
chain, comments::Comments, errors::Handler, sync::Lrc, util::take::Take, FileName, Mark,
diff --git a/src/builder.rs b/src/builder.rs
--- a/src/builder.rs
+++ b/src/builder.rs
@@ -142,7 +142,7 @@ impl<'a, 'b, P: swc_ecma_visit::Fold> PassBuilder<'a, 'b, P> {
/// - fixer if enabled
pub fn finalize<'cmt>(
self,
- base_url: String,
+ base_url: PathBuf,
paths: CompiledPaths,
base: &FileName,
syntax: Syntax,
diff --git a/src/config/mod.rs b/src/config/mod.rs
--- a/src/config/mod.rs
+++ b/src/config/mod.rs
@@ -208,6 +208,7 @@ impl Options {
minify: js_minify,
..
} = config.jsc;
+
let target = target.unwrap_or_default();
let syntax = syntax.unwrap_or_default();
diff --git a/src/config/mod.rs b/src/config/mod.rs
--- a/src/config/mod.rs
+++ b/src/config/mod.rs
@@ -774,7 +775,7 @@ pub struct JscConfig {
pub keep_class_names: bool,
#[serde(default)]
- pub base_url: String,
+ pub base_url: PathBuf,
#[serde(default)]
pub paths: Paths,
diff --git a/src/config/mod.rs b/src/config/mod.rs
--- a/src/config/mod.rs
+++ b/src/config/mod.rs
@@ -816,7 +817,7 @@ pub enum ModuleConfig {
impl ModuleConfig {
pub fn build(
cm: Arc<SourceMap>,
- base_url: String,
+ base_url: PathBuf,
paths: CompiledPaths,
base: &FileName,
root_mark: Mark,
diff --git a/src/config/mod.rs b/src/config/mod.rs
--- a/src/config/mod.rs
+++ b/src/config/mod.rs
@@ -1224,8 +1225,8 @@ impl Merge for ConstModulesConfig {
}
}
-fn build_resolver(base_url: String, paths: CompiledPaths) -> SwcImportResolver {
- static CACHE: Lazy<DashMap<(String, CompiledPaths), SwcImportResolver>> =
+fn build_resolver(base_url: PathBuf, paths: CompiledPaths) -> SwcImportResolver {
+ static CACHE: Lazy<DashMap<(PathBuf, CompiledPaths), SwcImportResolver>> =
Lazy::new(|| Default::default());
if let Some(cached) = CACHE.get(&(base_url.clone(), paths.clone())) {
diff --git a/src/lib.rs b/src/lib.rs
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -52,6 +52,8 @@ use swc_ecma_visit::{noop_visit_type, FoldWith, Visit, VisitWith};
mod builder;
pub mod config;
pub mod resolver {
+ use std::path::PathBuf;
+
use crate::config::CompiledPaths;
use fxhash::FxHashMap;
use swc_ecma_ast::TargetEnv;
diff --git a/src/lib.rs b/src/lib.rs
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -64,14 +66,10 @@ pub mod resolver {
pub fn paths_resolver(
target_env: TargetEnv,
alias: FxHashMap<String, String>,
- base_url: String,
+ base_url: PathBuf,
paths: CompiledPaths,
) -> CachingResolver<TsConfigResolver<NodeModulesResolver>> {
- let r = TsConfigResolver::new(
- NodeModulesResolver::new(target_env, alias),
- base_url.clone().into(),
- paths.clone(),
- );
+ let r = TsConfigResolver::new(NodeModulesResolver::new(target_env, alias), base_url, paths);
CachingResolver::new(40, r)
}
diff --git a/src/lib.rs b/src/lib.rs
--- a/src/lib.rs
+++ b/src/lib.rs
@@ -588,6 +586,35 @@ impl Compiler {
config.merge(&config_file.into_config(Some(path))?)
}
+ if let Some(c) = &mut config {
+ if c.jsc.base_url != PathBuf::new() {
+ let joined = dir.join(&c.jsc.base_url);
+ c.jsc.base_url = if cfg!(target_os = "windows")
+ && c.jsc.base_url.as_os_str() == "."
+ {
+ dir.canonicalize().with_context(|| {
+ format!(
+ "failed to canonicalize base url using the \
+ path of .swcrc\nDir: {}\n(Used logic for \
+ windows)",
+ dir.display(),
+ )
+ })?
+ } else {
+ joined.canonicalize().with_context(|| {
+ format!(
+ "failed to canonicalize base url using the \
+ path of .swcrc\nPath: {}\nDir: {}\nbaseUrl: \
+ {}",
+ joined.display(),
+ dir.display(),
+ c.jsc.base_url.display()
+ )
+ })?
+ };
+ }
+ }
+
return Ok(config);
}
| diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3161,7 +3161,7 @@ dependencies = [
[[package]]
name = "testing"
-version = "0.13.0"
+version = "0.13.1"
dependencies = [
"ansi_term 0.12.1",
"difference",
diff --git a/node-swc/__tests__/paths_test.mjs b/node-swc/__tests__/paths_test.mjs
--- a/node-swc/__tests__/paths_test.mjs
+++ b/node-swc/__tests__/paths_test.mjs
@@ -1,4 +1,8 @@
import swc from "../..";
+import { dirname } from 'path';
+import { fileURLToPath } from 'url';
+
+const __dirname = dirname(fileURLToPath(import.meta.url));
it("should respect paths", async () => {
const { code } = await swc.transform(`
diff --git a/node-swc/__tests__/paths_test.mjs b/node-swc/__tests__/paths_test.mjs
--- a/node-swc/__tests__/paths_test.mjs
+++ b/node-swc/__tests__/paths_test.mjs
@@ -13,6 +17,7 @@ it("should respect paths", async () => {
transform: {
},
+ baseUrl: __dirname,
paths: {
'@src/*': ['bar/*']
}
diff --git a/testing/Cargo.toml b/testing/Cargo.toml
--- a/testing/Cargo.toml
+++ b/testing/Cargo.toml
@@ -6,7 +6,7 @@ edition = "2018"
license = "Apache-2.0/MIT"
name = "testing"
repository = "https://github.com/swc-project/swc.git"
-version = "0.13.0"
+version = "0.13.1"
[dependencies]
ansi_term = "0.12.1"
diff --git a/testing/src/output.rs b/testing/src/output.rs
--- a/testing/src/output.rs
+++ b/testing/src/output.rs
@@ -2,7 +2,7 @@ use crate::paths;
use pretty_assertions::assert_eq;
use std::{
fmt,
- fs::{create_dir_all, remove_file, File},
+ fs::{create_dir_all, File},
io::Read,
ops::Deref,
path::Path,
diff --git a/testing/src/output.rs b/testing/src/output.rs
--- a/testing/src/output.rs
+++ b/testing/src/output.rs
@@ -79,22 +79,12 @@ impl NormalizedOutput {
String::new()
});
- let path_for_actual = paths::test_results_dir().join("ui").join(
- path.strip_prefix(&paths::manifest_dir())
- .unwrap_or_else(|_| {
- unreachable!(
- "failed to strip prefix: CARGO_MANIFEST_DIR\nPath: {}\nManifest dir: {}",
- path.display(),
- paths::manifest_dir().display()
- )
- }),
- );
- eprintln!("{}:{}", path.display(), path_for_actual.display());
- if self.0 == expected {
- let _ = remove_file(path_for_actual);
+ if expected == self.0 {
return Ok(());
}
- create_dir_all(path_for_actual.parent().unwrap()).expect("failed to run `mkdir -p`");
+
+ eprintln!("Comparing output to {}", path.display());
+ create_dir_all(path.parent().unwrap()).expect("failed to run `mkdir -p`");
let diff = Diff {
expected: NormalizedOutput(expected),
diff --git a/testing/src/output.rs b/testing/src/output.rs
--- a/testing/src/output.rs
+++ b/testing/src/output.rs
@@ -106,7 +96,7 @@ impl NormalizedOutput {
eprintln!(
"Assertion failed: \nActual file printed to {}",
- path_for_actual.display()
+ path.display()
);
}
diff --git /dev/null b/tests/fixture/issue-2050/input/.swcrc
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/input/.swcrc
@@ -0,0 +1,17 @@
+{
+ "jsc": {
+ "parser": {
+ "syntax": "typescript"
+ },
+ "target": "es2020",
+ "baseUrl": ".",
+ "paths": {
+ "~/*": [
+ "./*"
+ ]
+ }
+ },
+ "module": {
+ "type": "commonjs"
+ }
+}
diff --git /dev/null b/tests/fixture/issue-2050/input/index.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/input/index.ts
@@ -0,0 +1,4 @@
+import A from './subfolder/A'
+
+
+console.log(A);
\ No newline at end of file
diff --git /dev/null b/tests/fixture/issue-2050/input/subfolder/A.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/input/subfolder/A.ts
@@ -0,0 +1,5 @@
+import { B } from '~/subfolder/B';
+
+console.log(B);
+
+export const A = 400;
\ No newline at end of file
diff --git /dev/null b/tests/fixture/issue-2050/input/subfolder/B.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/input/subfolder/B.ts
@@ -0,0 +1,1 @@
+export const B = 500;
\ No newline at end of file
diff --git /dev/null b/tests/fixture/issue-2050/output/index.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/output/index.ts
@@ -0,0 +1,8 @@
+"use strict";
+var _a = _interopRequireDefault(require("./subfolder/A"));
+function _interopRequireDefault(obj) {
+ return obj && obj.__esModule ? obj : {
+ default: obj
+ };
+}
+console.log(_a.default);
diff --git /dev/null b/tests/fixture/issue-2050/output/subfolder/A.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/output/subfolder/A.ts
@@ -0,0 +1,9 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.A = void 0;
+var _b = require("./B");
+console.log(_b.B);
+const A = 400;
+exports.A = A;
diff --git /dev/null b/tests/fixture/issue-2050/output/subfolder/B.ts
new file mode 100644
--- /dev/null
+++ b/tests/fixture/issue-2050/output/subfolder/B.ts
@@ -0,0 +1,7 @@
+"use strict";
+Object.defineProperty(exports, "__esModule", {
+ value: true
+});
+exports.B = void 0;
+const B = 500;
+exports.B = B;
diff --git a/tests/projects.rs b/tests/projects.rs
--- a/tests/projects.rs
+++ b/tests/projects.rs
@@ -783,15 +783,14 @@ fn should_visit() {
}
#[testing::fixture("tests/fixture/**/input/")]
-fn tests(dir: PathBuf) {
- let output = dir.parent().unwrap().join("output");
- let _ = create_dir_all(&output);
+fn tests(input_dir: PathBuf) {
+ let output = input_dir.parent().unwrap().join("output");
Tester::new()
.print_errors(|cm, handler| {
let c = Compiler::new(cm.clone());
- for entry in WalkDir::new(&dir) {
+ for entry in WalkDir::new(&input_dir) {
let entry = entry.unwrap();
if entry.metadata().unwrap().is_dir() {
continue;
diff --git a/tests/projects.rs b/tests/projects.rs
--- a/tests/projects.rs
+++ b/tests/projects.rs
@@ -805,6 +804,11 @@ fn tests(dir: PathBuf) {
continue;
}
+ let rel_path = entry
+ .path()
+ .strip_prefix(&input_dir)
+ .expect("failed to strip prefix");
+
let fm = cm.load_file(entry.path()).expect("failed to load file");
match c.process_js_file(
fm,
diff --git a/tests/projects.rs b/tests/projects.rs
--- a/tests/projects.rs
+++ b/tests/projects.rs
@@ -819,9 +823,11 @@ fn tests(dir: PathBuf) {
) {
Ok(v) => {
NormalizedOutput::from(v.code)
- .compare_to_file(output.join(entry.file_name()))
+ .compare_to_file(output.join(rel_path))
.unwrap();
+ let _ = create_dir_all(output.join(rel_path).parent().unwrap());
+
let map = v.map.map(|json| {
let json: serde_json::Value = serde_json::from_str(&json).unwrap();
serde_json::to_string_pretty(&json).unwrap()
diff --git a/tests/projects.rs b/tests/projects.rs
--- a/tests/projects.rs
+++ b/tests/projects.rs
@@ -829,8 +835,7 @@ fn tests(dir: PathBuf) {
NormalizedOutput::from(map.unwrap_or_default())
.compare_to_file(
- output
- .join(entry.path().with_extension("map").file_name().unwrap()),
+ output.join(rel_path.with_extension("map").file_name().unwrap()),
)
.unwrap();
}
diff --git a/tests/t.js /dev/null
--- a/tests/t.js
+++ /dev/null
@@ -1,4 +0,0 @@
-console.log('--Hello--');
-throw new Error('Error here');
-
-//# sourceMappingURL=data:application/json;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbIi9Vc2Vycy9rZHkxL3Byb2plY3RzL3N3Yy1idWdzL3Rlc3RzL3N0YWNrdHJhY2UvaXNzdWUtMTY4NS9pbnB1dC9pbmRleC5qcyJdLCJzb3VyY2VzQ29udGVudCI6WyJjb25zb2xlLmxvZygnLS1IZWxsby0tJylcblxudGhyb3cgbmV3IEVycm9yKCdFcnJvciBoZXJlJyk7Il0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiJBQUFBLE9BQU8sQ0FBQyxHQUFHLEVBQUMsU0FBVztBQUV2QixLQUFLLENBQUMsR0FBRyxDQUFDLEtBQUssRUFBQyxVQUFZIn0=
\ No newline at end of file
| Path aliases don't seem to respect nested folder structure
<!--
If you are using swc at work, please considering adding your company to https://swc.rs/users/
If then, your issue will be fixed more quickly.
-->
**Describe the bug**
After https://github.com/swc-project/swc/issues/1934 and https://github.com/swc-project/swc/issues/1935 were resolved in `1.2.76`, I gave it a quick try and run into another issue.
It seems to me that `paths` simply rewrites the path prefix but doesn't respect the location of given file. I also tried using `baseUrl` but no luck. Am I missing some configuration?
**Input code**
Using `~` as an alias for `src` and having a file `src/subfolder/A.ts` with an `import { B } from '~/subfolder/B.ts';` (and considering the `src/subfolder/B.ts` exists and exports `B`), we'll run into `Error: Cannot find module './subfolder/B'`.
This is because the import was rewritten to `./subfolder/B` instead of just `./B` or `../subfolder/B` (that should be correct as these files are in the same subfolder indeed).
The command I'm running is: `yarn swc src -s -d ./dist`
**Config**
Note that `baseUrl` probably has no effect but I tried to add it to conform with `tsconfig.json`.
```json
{
"jsc": {
"parser": {
"syntax": "typescript",
},
"target": "es2020",
"baseUrl": ".",
"paths": {
"~/*": ["./*"]
}
},
"module": {
"type": "commonjs"
}
}
```
Also note that in TypeScript the correct path in the config would be `"~/*": ["./src/*"]` in this case (including the `src` portion) since `baseUrl` is `.`. With SWC, however, I need to use just `./*`, probably because I'm passing `src` dir to the `swc` command.
**Expected behavior**
Paths should be rewritten using current file location (not just using a static prefix).
In the given example the import from `'~/subfolder/B.ts` was rewritten to `./subfolder/B` instead of just `./B` (or `../subfolder/B`).
**Version**
The version of @swc/core: 1.2.76
**Additional context**
TypeScript also uses `baseUrl` to set the base path relative to which imports are rewritten.
| Also worth noting that SWC currently only appears to transform ES-style `import`s.
Paths are not resolved for CJS imports with `require('~/file.ts')`
@schmod I don't think that's related to this issue, you may want to create a new one, I guess. There is also https://github.com/swc-project/swc/issues/1943 but that's about emitting to ES-style imports so I guess it's not related either.
@kdy1 This has no specific estimate or priority, right? Are the paths-related issues small fixes or large issues? Since decorators are fixed now I can't wait to test SWC on our backend codebase but unfortunately it's all path-aliased.
@JanJakes
> This has no specific estimate or priority, right
It has higher priority than issues without milestone.
> Are the paths-related issues small fixes or large issues?
I don't expect it to be a large task.
Eagerly waiting for this to be resolved :)
I'll fix this with the next version. | 2021-09-10T14:53:50 | 0.52 | 9ffe47106a70150579b80d103bd1a0193e5b8483 | [
"tests_tests__fixture__paths__cjs_001__input"
] | [
"config::tests::object",
"config::tests::issue_1532",
"config::tests::array",
"swcrc_simple",
"issue_1532",
"await_expr",
"await_expr_2",
"deno_10282_2",
"codegen_1",
"issue_1549",
"deno_10282_1",
"issue_1052",
"issue_226",
"issue_1984",
"issue_406",
"issue_225",
"issue_389",
"issu... | [
"env_query_chrome_71",
"project_env",
"tests_tests__fixture__issue_1271__input",
"tests_tests__fixture__issue_1235__case_1__input",
"tests_tests__fixture__issue_1314__case1__input",
"tests_tests__fixture__issue_1402__case1__input",
"tests_tests__fixture__issue_1448__case1__input",
"tests_tests__fixtur... | [] |
swc-project/swc | 2,053 | swc-project__swc-2053 | [
"2049"
] | 71080dbd26fdb09bc6b6e69b3e088e3994b704ce | diff --git a/ecmascript/transforms/module/Cargo.toml b/ecmascript/transforms/module/Cargo.toml
--- a/ecmascript/transforms/module/Cargo.toml
+++ b/ecmascript/transforms/module/Cargo.toml
@@ -6,7 +6,7 @@ edition = "2018"
license = "Apache-2.0/MIT"
name = "swc_ecma_transforms_module"
repository = "https://github.com/swc-project/swc.git"
-version = "0.31.0"
+version = "0.31.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
diff --git a/ecmascript/transforms/module/src/util.rs b/ecmascript/transforms/module/src/util.rs
--- a/ecmascript/transforms/module/src/util.rs
+++ b/ecmascript/transforms/module/src/util.rs
@@ -708,11 +708,27 @@ impl Scope {
args: vec![],
type_args: Default::default(),
});
+
+ let left = if let PatOrExpr::Pat(ref left_pat) = expr.left {
+ if let Pat::Ident(BindingIdent { ref id, .. }) = **left_pat {
+ let expr = match Self::fold_ident(folder, top_level, id.clone()) {
+ Ok(expr) => expr,
+ Err(ident) => Expr::Ident(ident),
+ };
+ PatOrExpr::Expr(Box::new(expr))
+ } else {
+ expr.left
+ }
+ } else {
+ expr.left
+ };
+
return Expr::Assign(AssignExpr {
right: Box::new(Expr::Seq(SeqExpr {
span: DUMMY_SP,
exprs: vec![expr.right, Box::new(throw)],
})),
+ left,
..expr
});
}
| diff --git a/ecmascript/transforms/module/tests/common_js.rs b/ecmascript/transforms/module/tests/common_js.rs
--- a/ecmascript/transforms/module/tests/common_js.rs
+++ b/ecmascript/transforms/module/tests/common_js.rs
@@ -1592,13 +1592,13 @@ var Bar = _interopRequireWildcard(require("bar"));
var _baz = require("baz");
-Foo = (42, (function() {
+_foo.default = (42, (function() {
throw new Error('"' + 'Foo' + '" is read-only.');
})());
Bar = (43, (function() {
throw new Error('"' + 'Bar' + '" is read-only.');
})());
-Baz = (44, (function() {
+_baz.Baz = (44, (function() {
throw new Error('"' + 'Baz' + '" is read-only.');
})());
({ Foo } = ( {
| A better transform for "misc import thorw"
<!--
If you are using swc at work, please considering adding your company to https://swc.rs/users/
If then, your issue will be fixed more quickly.
-->
**Describe the feature**
Now these code
``` js
import { bar } from 'foo';
bar = jest.fn()
```
will be transformed to
``` js
bar = (jest.fn(), (function() { throw new Error('"' + 'bar' + '" is read-only.') })())
```
If the assignee `bar` becomes `_foo.bar`(Babel's implement now), after jest plugin removes the assert, in the test, the test writer can override modules, for mock something.
| 2021-08-11T13:43:51 | 0.38 | 71080dbd26fdb09bc6b6e69b3e088e3994b704ce | [
"misc_import_const_throw"
] | [
"disable_strict_mode_strict_mode_false",
"interop_export_default_2",
"interop_export_default",
"interop_export_default_3",
"custom_usage",
"interop_export_default_11",
"custom_02",
"interop_export_default_10",
"interop_export_default_5",
"interop_export_default_7",
"for_of_as_array_for_of_import... | [] | [] | |
swc-project/swc | 2,514 | swc-project__swc-2514 | [
"2344"
] | 4abde38dd78fb1885b0acfcdd90be50795628f9d | diff --git a/ecmascript/transforms/module/src/util.rs b/ecmascript/transforms/module/src/util.rs
--- a/ecmascript/transforms/module/src/util.rs
+++ b/ecmascript/transforms/module/src/util.rs
@@ -506,6 +506,7 @@ impl Scope {
}
match expr {
+ // In a JavaScript module, this is undefined at the top level (i.e., outside functions).
Expr::This(ThisExpr { span }) if top_level => *undefined(span),
Expr::Ident(i) => match Self::fold_ident(folder, top_level, i) {
Ok(expr) => expr,
diff --git a/ecmascript/transforms/module/src/util.rs b/ecmascript/transforms/module/src/util.rs
--- a/ecmascript/transforms/module/src/util.rs
+++ b/ecmascript/transforms/module/src/util.rs
@@ -986,6 +987,24 @@ macro_rules! mark_as_nested {
mark_as_nested!(fold_constructor, Constructor);
mark_as_nested!(fold_setter_prop, SetterProp);
mark_as_nested!(fold_getter_prop, GetterProp);
+ mark_as_nested!(fold_static_block, StaticBlock);
+
+ fn fold_class_prop(&mut self, mut n: ClassProp) -> ClassProp {
+ use swc_common::util::take::Take;
+ if n.computed {
+ let key = n.key.take().fold_children_with(self);
+
+ let old = self.in_top_level;
+ self.in_top_level = false;
+ let mut n = n.fold_children_with(self);
+ self.in_top_level = old;
+
+ n.key = key;
+ n
+ } else {
+ n
+ }
+ }
};
($name:ident, $T:tt) => {
| diff --git a/ecmascript/transforms/module/tests/common_js.rs b/ecmascript/transforms/module/tests/common_js.rs
--- a/ecmascript/transforms/module/tests/common_js.rs
+++ b/ecmascript/transforms/module/tests/common_js.rs
@@ -26,6 +26,7 @@ fn syntax() -> Syntax {
Syntax::Es(EsConfig {
dynamic_import: true,
top_level_await: true,
+ static_blocks: true,
..Default::default()
})
}
diff --git a/ecmascript/transforms/module/tests/common_js.rs b/ecmascript/transforms/module/tests/common_js.rs
--- a/ecmascript/transforms/module/tests/common_js.rs
+++ b/ecmascript/transforms/module/tests/common_js.rs
@@ -3530,13 +3531,11 @@ test!(
misc_undefined_this_arrow_function,
r#"
var foo = () => this;
-
"#,
r#"
"use strict";
var foo = () => void 0;
-
"#
);
diff --git a/ecmascript/transforms/module/tests/common_js.rs b/ecmascript/transforms/module/tests/common_js.rs
--- a/ecmascript/transforms/module/tests/common_js.rs
+++ b/ecmascript/transforms/module/tests/common_js.rs
@@ -4940,6 +4939,95 @@ test!(
"
);
+test!(
+ syntax(),
+ |_| tr(Config {
+ ..Default::default()
+ }),
+ issue_2344_1,
+ "
+ class LoggingButton extends React.Component {
+ handleClick = () => {
+ console.log('this is:', this);
+ }
+ m() { this }
+ static a = () => this
+ }
+ ",
+ "
+ 'use strict';
+ class LoggingButton extends React.Component {
+ handleClick = () => {
+ console.log('this is:', this);
+ }
+ m() { this }
+ static a = () => this
+ }
+ "
+);
+
+test!(
+ syntax(),
+ |_| tr(Config {
+ ..Default::default()
+ }),
+ issue_2344_2,
+ "
+ class A {
+ // this is weird I know
+ [(() => this)()] = 123
+ }
+ class B {
+ // this is weird too I know
+ [(() => this)()]() {}
+ }
+ class C {
+ static [(() => this)()] = 1
+ }
+ class D {
+ static d = class {
+ [(() => this)()]() {}
+ }
+ }
+ ",
+ "
+ 'use strict';
+ class A {
+ [(() => void 0)()] = 123
+ }
+ class B {
+ [(() => void 0)()]() {}
+ }
+ class C {
+ static [(() => void 0)()] = 1
+ }
+ class D {
+ static d = class {
+ [(() => this)()]() {}
+ }
+ }
+ "
+);
+
+test!(
+ syntax(),
+ |_| tr(Config {
+ ..Default::default()
+ }),
+ issue_2344_3,
+ "
+ class A {
+ static { this.a = 123 }
+ }
+ ",
+ "
+ 'use strict';
+ class A {
+ static { this.a = 123 }
+ }
+ "
+);
+
#[testing::fixture("tests/fixture/commonjs/**/input.js")]
fn fixture(input: PathBuf) {
let dir = input.parent().unwrap().to_path_buf();
| Incorrect transpilation of an arrow function in class fields
Hi team, first, thanks for the awesome work!
I struggle with an issue when `swc` doesn't transpile correctly an arrow funciton when it used as a value of a class field (no matter JS or TS).
It's quite typical way for React users to avoid an explicit handler binding - https://reactjs.org/docs/handling-events.html.
The regular class fields `name=value` works as expected.
```js
class LoggingButton extends React.Component {
handleClick = () => {
console.log('this is:', this);
}
render () {
return (
<button onClick={this.handleClick}>
Click me
</button>
);
}
}
```
transpiles to `(void 0)`
```js
"use strict";
class LoggingButton extends React.Component {
handleClick = ()=>{
console.log('this is:', void 0);
};
render() {
return(/*#__PURE__*/ React.createElement("button", {
onClick: this.handleClick
}, "Click me"));
}
}
```
**Config**
```json
{
"test": [".*.js$", ".jsx$", ".tsx$"],
"exclude": [".*.ts$"],
"jsc": {
"target": "es2020",
"parser": {
"syntax": "ecmascript",
"jsx": true
}
},
"module": {
"type": "commonjs"
},
"env": {
"targets": {
"electron": "12"
},
"mode": "entry",
"coreJs": 3
}
}
```
**Expected behavior**
I guess It should be transpiled properly
**Version**
The version of @swc/core: `1.2.66`
| This is caused by commonjs transform.
Edit: I know
Another problem, although I don't know if it's within swc's scope to provide single transform:
When using only arrow transform, swc would transform following code
```js
class LoggingButton extends React.Component {
handleClick = () => {
console.log('this is:', this);
}
}
```
to
```js
var _this = this;
class LoggingButton extends React.Component {
handleClick = function() {
console.log('this is:', _this);
};
}
```
while babel would throw an error
> This is caused by commonjs transform. @kdy1 why do we transform top level this to undefined in cjs/amd? Neither babel nor tsc act like that.
I don't rememeber the reasoning and I it should be patched to not do something lke it.
> > This is caused by commonjs transform. @kdy1 why do we transform top level this to undefined in cjs/amd? Neither babel nor tsc act like that.
>
> I don't rememeber the reasoning and I it should be patched to not do something lke it.
My bad, top level arrow function contain this should be transpiled to undefined
> > > This is caused by commonjs transform. @kdy1 why do we transform top level this to undefined in cjs/amd? Neither babel nor tsc act like that.
> >
> >
> > I don't rememeber the reasoning and I it should be patched to not do something lke it.
>
> My bad, top level arrow function contain this should be transpiled to undefined
The conclusion is
> In a JavaScript module, this is undefined at the top level
per [rollup](https://rollupjs.org/guide/en/#error-this-is-undefined) and [awb](https://twitter.com/awbjs/status/535920492486483968) | 2021-10-23T03:02:56 | 0.77 | 430a06ce4ddf4e0ee96176789df86ed5df137702 | [
"issue_2344_3",
"issue_2344_1",
"issue_2344_2"
] | [
"for_of_as_array_for_of_import_commonjs",
"custom_02",
"fixture_tests__fixture__commonjs__issue_2211__2__input_js",
"fixture_tests__fixture__commonjs__issue_2297__input_js",
"fixture_tests__fixture__commonjs__issue_2211__1__input_js",
"disable_strict_mode_strict_mode_false",
"custom_usage",
"interop_e... | [] | [] |
swc-project/swc | 2,447 | swc-project__swc-2447 | [
"2351",
"2446"
] | 7f04ef47155cb8ec06eb4296d8c3c915c79221d6 | diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2707,7 +2707,7 @@ dependencies = [
[[package]]
name = "swc_ecma_parser"
-version = "0.73.13"
+version = "0.73.14"
dependencies = [
"either",
"enum_kind",
diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -2926,7 +2926,7 @@ dependencies = [
[[package]]
name = "swc_ecma_transforms_react"
-version = "0.50.0"
+version = "0.50.1"
dependencies = [
"ahash",
"base64 0.13.0",
diff --git a/Cargo.lock b/Cargo.lock
--- a/Cargo.lock
+++ b/Cargo.lock
@@ -3529,7 +3529,7 @@ checksum = "fd6fbd9a79829dd1ad0cc20627bf1ed606756a7f77edff7b66b7064f9cb327c6"
[[package]]
name = "wasm"
-version = "1.2.97"
+version = "1.2.98"
dependencies = [
"anyhow",
"console_error_panic_hook",
diff --git a/ecmascript/minifier/src/util/base54.rs b/ecmascript/minifier/src/util/base54.rs
--- a/ecmascript/minifier/src/util/base54.rs
+++ b/ecmascript/minifier/src/util/base54.rs
@@ -30,7 +30,8 @@ pub(crate) fn incr_base54(init: &mut usize) -> String {
base = 64;
}
- if ret.is_reserved() {
+ if ret.is_reserved() || ret.is_reserved_in_strict_bind() || ret.is_reserved_in_strict_mode(true)
+ {
return incr_base54(init);
}
diff --git a/ecmascript/parser/Cargo.toml b/ecmascript/parser/Cargo.toml
--- a/ecmascript/parser/Cargo.toml
+++ b/ecmascript/parser/Cargo.toml
@@ -7,7 +7,7 @@ include = ["Cargo.toml", "src/**/*.rs", "examples/**/*.rs"]
license = "Apache-2.0/MIT"
name = "swc_ecma_parser"
repository = "https://github.com/swc-project/swc.git"
-version = "0.73.13"
+version = "0.73.14"
[package.metadata.docs.rs]
all-features = true
diff --git a/ecmascript/parser/src/lexer/jsx.rs b/ecmascript/parser/src/lexer/jsx.rs
--- a/ecmascript/parser/src/lexer/jsx.rs
+++ b/ecmascript/parser/src/lexer/jsx.rs
@@ -157,9 +157,10 @@ impl<'a, I: Input> Lexer<'a, I> {
if ch == '\\' {
has_escape = true;
out.push_str(self.input.slice(chunk_start, cur_pos));
- if let Some(s) = self.read_escaped_char(&mut Raw(None))? {
- out.extend(s);
- }
+
+ let chars = self.read_jsx_escaped_char(&mut Raw(None))?;
+ out.extend(IntoIterator::into_iter(chars).flatten().flatten());
+
chunk_start = self.input.cur_pos();
continue;
}
diff --git a/ecmascript/parser/src/lexer/jsx.rs b/ecmascript/parser/src/lexer/jsx.rs
--- a/ecmascript/parser/src/lexer/jsx.rs
+++ b/ecmascript/parser/src/lexer/jsx.rs
@@ -191,6 +192,65 @@ impl<'a, I: Input> Lexer<'a, I> {
})
}
+ fn read_jsx_escaped_char(&mut self, raw: &mut Raw) -> LexResult<[Option<Char>; 2]> {
+ debug_assert_eq!(self.cur(), Some('\\'));
+ let start = self.cur_pos();
+ self.bump(); // '\'
+
+ let c = match self.cur() {
+ Some(c) => c,
+ None => self.error_span(pos_span(start), SyntaxError::InvalidStrEscape)?,
+ };
+
+ macro_rules! push_c_and_ret {
+ ($c:expr) => {{
+ raw.push(c);
+ $c
+ }};
+ }
+
+ let c = match c {
+ '\\' => push_c_and_ret!('\\'),
+ '\r' => {
+ raw.push_str("\r");
+ self.bump(); // remove '\r'
+
+ if self.eat(b'\n') {
+ raw.push_str("\n");
+ }
+ return Ok(Default::default());
+ }
+ '\n' | '\u{2028}' | '\u{2029}' => {
+ match c {
+ '\n' => raw.push_str("\n"),
+ '\u{2028}' => raw.push_str("\u{2028}"),
+ '\u{2029}' => raw.push_str("\u{2029}"),
+ _ => unreachable!(),
+ }
+ self.bump();
+ return Ok(Default::default());
+ }
+
+ // read hexadecimal escape sequences
+ 'x' => {
+ raw.push_str("0x");
+ self.bump(); // 'x'
+ return self.read_hex_char(start, 2, raw).map(|v| [Some(v), None]);
+ }
+
+ // read unicode escape sequences
+ 'u' => {
+ return self
+ .read_unicode_escape(start, raw)
+ .map(|v| [Some(v), None]);
+ }
+ _ => c,
+ };
+ self.input.bump();
+
+ Ok([Some('\\'.into()), Some(c.into())])
+ }
+
/// Read a JSX identifier (valid tag or attribute name).
///
/// Optimized version since JSX identifiers can"t contain
diff --git a/ecmascript/transforms/react/Cargo.toml b/ecmascript/transforms/react/Cargo.toml
--- a/ecmascript/transforms/react/Cargo.toml
+++ b/ecmascript/transforms/react/Cargo.toml
@@ -7,7 +7,7 @@ include = ["Cargo.toml", "src/**/*.rs"]
license = "Apache-2.0/MIT"
name = "swc_ecma_transforms_react"
repository = "https://github.com/swc-project/swc.git"
-version = "0.50.0"
+version = "0.50.1"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
diff --git a/ecmascript/transforms/react/Cargo.toml b/ecmascript/transforms/react/Cargo.toml
--- a/ecmascript/transforms/react/Cargo.toml
+++ b/ecmascript/transforms/react/Cargo.toml
@@ -23,7 +23,7 @@ string_enum = {version = "0.3.1", path = "../../../macros/string_enum"}
swc_atoms = {version = "0.2", path = "../../../atoms"}
swc_common = {version = "0.13.5", path = "../../../common"}
swc_ecma_ast = {version = "0.54.0", path = "../../ast"}
-swc_ecma_parser = {version = "0.73.0", path = "../../parser"}
+swc_ecma_parser = {version = "0.73.14", path = "../../parser"}
swc_ecma_transforms_base = {version = "0.38.0", path = "../base"}
swc_ecma_utils = {version = "0.47.0", path = "../../utils"}
swc_ecma_visit = {version = "0.40.0", path = "../../visit"}
diff --git a/ecmascript/transforms/react/src/jsx/mod.rs b/ecmascript/transforms/react/src/jsx/mod.rs
--- a/ecmascript/transforms/react/src/jsx/mod.rs
+++ b/ecmascript/transforms/react/src/jsx/mod.rs
@@ -1213,8 +1213,6 @@ fn transform_jsx_attr_str(v: &str) -> String {
'\u{000b}' => buf.push_str("\\v"),
'\0' => buf.push_str("\\x00"),
- '\\' => buf.push_str("\\\\"),
-
'\'' if single_quote => buf.push_str("\\'"),
'"' if !single_quote => buf.push_str("\""),
diff --git a/package.json b/package.json
--- a/package.json
+++ b/package.json
@@ -1,6 +1,6 @@
{
"name": "@swc/core",
- "version": "1.2.97",
+ "version": "1.2.98",
"description": "Super-fast alternative for babel",
"homepage": "https://swc.rs",
"main": "./index.js",
diff --git a/wasm/Cargo.toml b/wasm/Cargo.toml
--- a/wasm/Cargo.toml
+++ b/wasm/Cargo.toml
@@ -6,7 +6,7 @@ license = "Apache-2.0 AND MIT"
name = "wasm"
publish = false
repository = "https://github.com/swc-project/swc.git"
-version = "1.2.97"
+version = "1.2.98"
[lib]
crate-type = ["cdylib"]
| diff --git a/ecmascript/parser/src/lexer/tests.rs b/ecmascript/parser/src/lexer/tests.rs
--- a/ecmascript/parser/src/lexer/tests.rs
+++ b/ecmascript/parser/src/lexer/tests.rs
@@ -939,7 +939,7 @@ fn issue_299_01() {
Token::JSXName { name: "num".into() },
tok!('='),
Token::Str {
- value: " ".into(),
+ value: "\\ ".into(),
has_escape: true
},
Token::JSXTagEnd,
diff --git a/ecmascript/parser/src/lexer/tests.rs b/ecmascript/parser/src/lexer/tests.rs
--- a/ecmascript/parser/src/lexer/tests.rs
+++ b/ecmascript/parser/src/lexer/tests.rs
@@ -957,40 +957,6 @@ fn issue_299_01() {
#[test]
fn issue_299_02() {
- assert_eq!(
- lex_tokens(
- crate::Syntax::Es(crate::EsConfig {
- jsx: true,
- ..Default::default()
- }),
- "<Page num='\\''>ABC</Page>;"
- ),
- vec![
- Token::JSXTagStart,
- Token::JSXName {
- name: "Page".into()
- },
- Token::JSXName { name: "num".into() },
- tok!('='),
- Token::Str {
- value: "'".into(),
- has_escape: true
- },
- Token::JSXTagEnd,
- JSXText { raw: "ABC".into() },
- JSXTagStart,
- tok!('/'),
- JSXName {
- name: "Page".into()
- },
- JSXTagEnd,
- Semi,
- ]
- );
-}
-
-#[test]
-fn issue_299_03() {
assert_eq!(
lex_tokens(
crate::Syntax::Es(crate::EsConfig {
diff --git a/ecmascript/parser/src/lexer/tests.rs b/ecmascript/parser/src/lexer/tests.rs
--- a/ecmascript/parser/src/lexer/tests.rs
+++ b/ecmascript/parser/src/lexer/tests.rs
@@ -1007,7 +973,7 @@ fn issue_299_03() {
Token::JSXName { name: "num".into() },
tok!('='),
Token::Str {
- value: "\\".into(),
+ value: "\\\\".into(),
has_escape: true
},
Token::JSXTagEnd,
diff --git a/ecmascript/transforms/react/src/jsx/tests.rs b/ecmascript/transforms/react/src/jsx/tests.rs
--- a/ecmascript/transforms/react/src/jsx/tests.rs
+++ b/ecmascript/transforms/react/src/jsx/tests.rs
@@ -729,7 +729,7 @@ React.createElement("div", {
id: "w\xf4w"
});
React.createElement("div", {
- id: "w"
+ id: "\\w"
});
React.createElement("div", {
id: "w < w"
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/1/input.js
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/1/input.js
@@ -0,0 +1,1 @@
+<Page num='\\ '>ABC</Page>;
\ No newline at end of file
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/1/output.mjs
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/1/output.mjs
@@ -0,0 +1,3 @@
+React.createElement(Page, {
+ num: "\\\\ "
+}, "ABC");
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/2/input.js
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/2/input.js
@@ -0,0 +1,1 @@
+<Page num='\\\\'>ABC</Page>;
\ No newline at end of file
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/2/output.mjs
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/issue-299/2/output.mjs
@@ -0,0 +1,3 @@
+React.createElement(Page, {
+ num: "\\\\\\\\"
+}, "ABC");
diff --git a/ecmascript/transforms/react/tests/jsx/fixture/react-automatic/should-escape-xhtml-jsxattribute/output.mjs b/ecmascript/transforms/react/tests/jsx/fixture/react-automatic/should-escape-xhtml-jsxattribute/output.mjs
--- a/ecmascript/transforms/react/tests/jsx/fixture/react-automatic/should-escape-xhtml-jsxattribute/output.mjs
+++ b/ecmascript/transforms/react/tests/jsx/fixture/react-automatic/should-escape-xhtml-jsxattribute/output.mjs
@@ -3,7 +3,7 @@ _jsx("div", {
id: "w\xf4w"
});
_jsx("div", {
- id: "w"
+ id: "\\w"
});
_jsx("div", {
id: "w < w"
diff --git a/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute-babel-7/output.js b/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute-babel-7/output.js
--- a/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute-babel-7/output.js
+++ b/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute-babel-7/output.js
@@ -2,7 +2,7 @@ React.createElement("div", {
id: "w\xf4w"
});
React.createElement("div", {
- id: "w"
+ id: "\\w"
});
React.createElement("div", {
id: "w < w"
diff --git a/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute/output.js b/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute/output.js
--- a/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute/output.js
+++ b/ecmascript/transforms/react/tests/jsx/fixture/react/should-escape-xhtml-jsxattribute/output.js
@@ -1,9 +1,14 @@
+/*#__PURE__*/
React.createElement("div", {
- id: "w\xf4w"
+ id: "w\xF4w"
});
+
+/*#__PURE__*/
React.createElement("div", {
- id: "w"
+ id: "\\w"
});
+
+/*#__PURE__*/
React.createElement("div", {
id: "w < w"
-});
+});
\ No newline at end of file
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/vercel/2/input.js
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/vercel/2/input.js
@@ -0,0 +1,5 @@
+export default () => {
+ return <Input
+ pattern=".*\S+.*"
+ />
+}
\ No newline at end of file
diff --git /dev/null b/ecmascript/transforms/react/tests/jsx/fixture/vercel/2/output.mjs
new file mode 100644
--- /dev/null
+++ b/ecmascript/transforms/react/tests/jsx/fixture/vercel/2/output.mjs
@@ -0,0 +1,5 @@
+export default (()=>{
+ return React.createElement(Input, {
+ pattern: ".*\\S+.*"
+ });
+});
diff --git a/tests/fixture/issue-1661/case1/output/index.js b/tests/fixture/issue-1661/case1/output/index.js
--- a/tests/fixture/issue-1661/case1/output/index.js
+++ b/tests/fixture/issue-1661/case1/output/index.js
@@ -1,3 +1,3 @@
console.log(/*#__PURE__*/ React.createElement("h1", {
- value: "abc as"
+ value: "abc\\nas"
}, "s"));
diff --git a/tests/fixture/issue-2351/1/output/index.js b/tests/fixture/issue-2351/1/output/index.js
--- a/tests/fixture/issue-2351/1/output/index.js
+++ b/tests/fixture/issue-2351/1/output/index.js
@@ -1,3 +1,3 @@
var a = /*#__PURE__*/ React.createElement("abbr", {
- title: "d"
+ title: "\\d"
}, "\\d");
diff --git /dev/null b/tests/vercel/full/jsx/1/input/index.js
new file mode 100644
--- /dev/null
+++ b/tests/vercel/full/jsx/1/input/index.js
@@ -0,0 +1,5 @@
+export default () => {
+ return <Input
+ pattern=".*\S+.*"
+ />
+}
\ No newline at end of file
diff --git /dev/null b/tests/vercel/full/jsx/1/output/index.js
new file mode 100644
--- /dev/null
+++ b/tests/vercel/full/jsx/1/output/index.js
@@ -0,0 +1,6 @@
+import { jsx as a } from "react/jsx-runtime";
+export default function() {
+ return a(Input, {
+ pattern: ".*\\S+.*"
+ });
+};
| JSX attribute parsing applies incorrect escapes
**Describe the bug**
JSX attributes should be parsed as HTML (i.e. escapes are HTML entities), however SWC seems to parse them as JS strings (i.e. backslashes escape characters).
**Input code**
```js
const a = <abbr title="\d">\d</abbr>;
```
[See in REPL](https://swc.rs/repl#%7B%22rawCode%22%3A%22const%20a%20%3D%20%3Cabbr%20title%3D%5C%22%5C%5Cd%5C%22%3E%5C%5Cd%3C%2Fabbr%3E%3B%22%2C%22rawConfig%22%3A%22%7B%5Cn%20%20%5C%22jsc%5C%22%3A%20%7B%5Cn%20%20%20%20%5C%22parser%5C%22%3A%20%7B%5Cn%20%20%20%20%20%20%5C%22syntax%5C%22%3A%20%5C%22ecmascript%5C%22%2C%5Cn%20%20%20%20%20%20%5C%22jsx%5C%22%3A%20true%5Cn%20%20%20%20%7D%5Cn%20%20%7D%5Cn%7D%22%7D)
[And in the Babel REPL](https://babeljs.io/repl#?browsers=&build=&builtIns=false&corejs=3.6&spec=false&loose=false&code_lz=MYewdgzgLgBAhjAvDAPHARugTjKBLKAGwFNEAiAHQBMyA-alAeg21oG4g&debug=false&forceAllTransforms=false&shippedProposals=false&circleciRepo=&evaluate=true&fileSize=false&timeTravel=false&sourceType=module&lineWrap=false&presets=env%2Creact%2Ctypescript&prettier=false&targets=&version=7.15.7&externalPlugins=%40babel%2Fplugin-transform-classes%407.8.6&assumptions=%7B%7D)
**Config**
```json
{
"jsc": {
"parser": {
"syntax": "ecmascript",
"jsx": true
}
}
}
```
**Expected behavior**
The `title="\d"` should result in `\d` being the title, however the `\` is interpreted as a JS escape backslash and thus only `d` comes through.
Output should be (via [Babel REPL](https://babeljs.io/repl#?browsers=&build=&builtIns=false&corejs=3.6&spec=false&loose=false&code_lz=MYewdgzgLgBAhjAvDAPHARugTjKBLKAGwFNEAiAHQBMyA-alAeg21oG4g&debug=false&forceAllTransforms=false&shippedProposals=false&circleciRepo=&evaluate=true&fileSize=false&timeTravel=false&sourceType=module&lineWrap=false&presets=env%2Creact%2Ctypescript&prettier=false&targets=&version=7.15.7&externalPlugins=%40babel%2Fplugin-transform-classes%407.8.6&assumptions=%7B%7D)):
```js
var a = /*#__PURE__*/React.createElement("abbr", {
title: "\\d"
}, "\\d");
```
However, output is (via [SWC REPL](https://swc.rs/repl#%7B%22rawCode%22%3A%22const%20a%20%3D%20%3Cabbr%20title%3D%5C%22%5C%5Cd%5C%22%3E%5C%5Cd%3C%2Fabbr%3E%3B%22%2C%22rawConfig%22%3A%22%7B%5Cn%20%20%5C%22jsc%5C%22%3A%20%7B%5Cn%20%20%20%20%5C%22parser%5C%22%3A%20%7B%5Cn%20%20%20%20%20%20%5C%22syntax%5C%22%3A%20%5C%22ecmascript%5C%22%2C%5Cn%20%20%20%20%20%20%5C%22jsx%5C%22%3A%20true%5Cn%20%20%20%20%7D%5Cn%20%20%7D%5Cn%7D%22%7D)):
```js
var a = /*#__PURE__*/ React.createElement("abbr", {
title: "d"
}, "\\d");
```
**Version**
The version of @swc/core: `1.2.93`
**Additional context**
Add any other context about the problem here.
swc can minify a name to `let`, which is a syntax error
<!--
If you are using swc at work, please considering adding your company to https://swc.rs/users/
If then, your issue will be fixed more quickly.
-->
**Describe the bug**
Here's a quick demonstration
```
$ npm init -y && npm i @swc/cli @swc/core
$ echo '{"jsc":{"minify":{"mangle":{"topLevel":true}}}}' > .swcrc
$ node -p 'var a=[...Array(70000)].map((_,i)=>`v${i}`);`var ${a};export default [${a}]`' > foo.mjs
$ npx swc foo.mjs -ofoo.min.mjs
Successfully compiled 1 file with swc.
```
This generates a file with a syntax error:
```
$ node foo.min.mjs
...
SyntaxError: Unexpected strict mode reserved word
at Loader.moduleStrategy (node:internal/modules/esm/translators:146:18)
at async link (node:internal/modules/esm/module_job:67:21)
```
Running the file through swc again gives more information:
```
$ npx swc foo.min.mjs -ofoo.min.min.mjs
error: 'implements', 'interface', 'let', 'package', 'private', 'protected', 'public', 'static', or 'yield' cannot be used as an identifier in strict mode
--> foo.min.mjs:69399:5
|
69399 | let,
| ^^^
Caused by:
0: failed to process js file
1: failed to parse module: error was recoverable, but proceeding would result in wrong codegen
Error: Failed to compile 1 file with swc.
```
**Input code**
This appears to happen with any input file where the top-level scope has around 70,000 or more variables. You can generate such a file with the following command:
```
node -p 'var a=[...Array(70000)].map((_,i)=>`v${i}`);`var ${a};export default [${a}]`' > foo.mjs
```
**Config**
```json
{
"jsc": {
"minify": {
"mangle": {
"topLevel": true
}
}
}
}
```
**Expected behavior**
I expected minifying code with swc to generate valid minified output.
**Version**
The version of @swc/core: `1.2.97`
**Additional context**
N/A
| I don’t think the above PR fixes the issue; if I’m reading the test code correctly then it seems to enforce the broken behaviour.
| 2021-10-16T10:42:35 | 0.70 | 7f04ef47155cb8ec06eb4296d8c3c915c79221d6 | [
"jsx::tests::fixture_tests__jsx__fixture__vercel__2__input_js"
] | [
"error::size_of_error",
"lexer::number::tests::issue_480",
"lexer::number::tests::large_bin_number",
"lexer::number::tests::big_number_with_fract",
"lexer::number::tests::large_float_number",
"lexer::number::tests::num_big_many_zero",
"lexer::number::tests::num_big_exp",
"lexer::number::tests::num_inf... | [
"lexer::tests::issue_299_01",
"lexer::tests::issue_299_02",
"jsx::tests::fixture_tests__jsx__fixture__react__should_escape_xhtml_jsxattribute__input_js",
"jsx::tests::fixture_tests__jsx__fixture__react__should_escape_xhtml_jsxattribute_babel_7__input_js",
"jsx::tests::fixture_tests__jsx__fixture__react_auto... | [] |
swc-project/swc | 9,631 | swc-project__swc-9631 | [
"9630"
] | aa3bb873653d8c33a535a8b6547480e363575a05 | diff --git /dev/null b/.changeset/neat-ties-smoke.md
new file mode 100644
--- /dev/null
+++ b/.changeset/neat-ties-smoke.md
@@ -0,0 +1,6 @@
+---
+swc_ecma_codegen: patch
+swc_core: patch
+---
+
+fix(es/codegen): Emit space after div if rhs has leading comment
diff --git a/crates/swc_ecma_codegen/src/lib.rs b/crates/swc_ecma_codegen/src/lib.rs
--- a/crates/swc_ecma_codegen/src/lib.rs
+++ b/crates/swc_ecma_codegen/src/lib.rs
@@ -1267,6 +1267,13 @@ where
let need_post_space = if self.cfg.minify {
if is_kwd_op {
node.right.starts_with_alpha_num()
+ } else if node.op == op!("/") {
+ let span = node.right.span();
+
+ span.is_pure()
+ || self
+ .comments
+ .map_or(false, |comments| comments.has_leading(node.right.span().lo))
} else {
require_space_before_rhs(&node.right, &node.op)
}
| diff --git a/crates/swc_ecma_codegen/src/tests.rs b/crates/swc_ecma_codegen/src/tests.rs
--- a/crates/swc_ecma_codegen/src/tests.rs
+++ b/crates/swc_ecma_codegen/src/tests.rs
@@ -962,6 +962,19 @@ fn issue_8491_2() {
);
}
+#[test]
+fn issue_9630() {
+ test_from_to_custom_config(
+ "console.log(1 / /* @__PURE__ */ something())",
+ "console.log(1/ /* @__PURE__ */something())",
+ Config {
+ minify: true,
+ ..Default::default()
+ },
+ Default::default(),
+ );
+}
+
#[testing::fixture("tests/str-lits/**/*.txt")]
fn test_str_lit(input: PathBuf) {
test_str_lit_inner(input)
| Correctness - Division plus block comment is turned into line comment
### Describe the bug
When a division operator is followed by a block comment, the result must have a whitespace between them, otherwise it becomes a line comment.
```js
// original
a / /**/ b
// current behavior
a//**/b
// what it should look like
a/ /**/b
```
### Input code
```typescript
console.log(1 / /* @__PURE__ */something())
```
### Config
```json
{
"jsc": {
"externalHelpers": true,
"parser": {
"tsx": false,
"syntax": "typescript",
},
"preserveAllComments": true,
"target": "es5"
},
"minify": true,
}
```
### Playground link (or link to the minimal reproduction)
https://play.swc.rs/?version=1.7.28&code=H4sIAAAAAAAAA0vOzyvOz0nVy8lP1zBU0FfQ11JwiI8PCA1yjY9X0NIvzs9NLcnIzEvX0NQEAG54Sh4rAAAA&config=H4sIAAAAAAAAA12NPQ7CMAyF954i8tyVhQ2x9BpR9YqCnBDZBjWqendctXRg8%2Fvx95YuBHrqSNew%2BOkCs0FK5AFcIeqByRv9HtYoCjnL7pjOLqfIenTc01YsbjZZq9BRUjXa0%2FUHEjjogxvz%2FZUziv0tWZQHbGNAL9Qdn5RTSVM7q%2BsXeXWPCsAAAAA%3D
### SWC Info output
_No response_
### Expected behavior
The output should be this:
```js
console.log(1/ /* @__PURE__ */something())
```
### Actual behavior
The output is this:
```js
console.log(1//* @__PURE__ */something());
```
### Version
1.7.28
### Additional context
This just broke my builds very unexpectedly.
| 2024-10-11T10:23:50 | 2.0 | 82c6059121107a989b37c4ac0e8964cf54caaaae | [
"tests::issue_9630"
] | [
"expr::tests::conditional_expression",
"decl::tests::issue_1764",
"decl::tests::issue_275",
"expr::tests::bin_expr",
"expr::tests::array_spread",
"expr::tests::call_expression",
"decl::tests::class_static_block",
"decl::tests::single_argument_arrow_expression",
"expr::tests::bigint_property_key",
... | [
"tests::test_str_lit_tests__str_lits__pr_7985__splitted__16_txt",
"tests::test_str_lit_tests__str_lits__pr_7985__splitted__1_txt",
"tests::test_str_lit_tests__str_lits__pr_7985__splitted__15_txt",
"tests::test_str_lit_tests__str_lits__pr_7985__splitted__14_txt",
"tests::test_str_lit_tests__str_lits__pr_7985... | [] | |
sycamore-rs/sycamore | 760 | sycamore-rs__sycamore-760 | [
"718"
] | 1e492e2f7fe110663b26f44961aaf08c26717af7 | diff --git a/packages/sycamore-macro/src/component.rs b/packages/sycamore-macro/src/component.rs
--- a/packages/sycamore-macro/src/component.rs
+++ b/packages/sycamore-macro/src/component.rs
@@ -280,23 +280,31 @@ fn inline_props_impl(item: &mut ItemFn, attrs: Punctuated<Meta, Token![,]>) -> R
let props = inputs.clone().into_iter().collect::<Vec<_>>();
let generics: &mut Generics = &mut item.sig.generics;
let mut fields = Vec::new();
- inputs.into_iter().for_each(|arg| match arg {
- FnArg::Receiver(_) => {
- unreachable!("receiver cannot be a prop")
- }
- FnArg::Typed(pat_type) => match *pat_type.pat {
- Pat::Ident(ident_pat) => super::inline_props::push_field(
- &mut fields,
- generics,
- pat_type.attrs,
- ident_pat.clone().ident,
- *pat_type.ty,
- ),
- _ => {
- unreachable!("unexpected pattern!")
+ for arg in inputs {
+ match arg {
+ FnArg::Receiver(receiver) => {
+ return Err(syn::Error::new(
+ receiver.span(),
+ "`self` cannot be a property",
+ ))
}
- },
- });
+ FnArg::Typed(pat_type) => match *pat_type.pat {
+ Pat::Ident(ident_pat) => super::inline_props::push_field(
+ &mut fields,
+ generics,
+ pat_type.attrs,
+ ident_pat.clone().ident,
+ *pat_type.ty,
+ ),
+ _ => {
+ return Err(syn::Error::new(
+ pat_type.pat.span(),
+ "pattern must contain an identifier, properties cannot be unnamed",
+ ))
+ }
+ },
+ }
+ }
let generics_phantoms = generics.params.iter().enumerate().filter_map(|(i, param)| {
let phantom_ident = format_ident!("__phantom{i}");
diff --git a/packages/sycamore-macro/src/component.rs b/packages/sycamore-macro/src/component.rs
--- a/packages/sycamore-macro/src/component.rs
+++ b/packages/sycamore-macro/src/component.rs
@@ -342,8 +350,18 @@ fn inline_props_impl(item: &mut ItemFn, attrs: Punctuated<Meta, Token![,]>) -> R
// Get the ident (technically, patterns) of each prop.
let props_pats = props.iter().map(|arg| match arg {
- FnArg::Receiver(_) => unreachable!("receiver cannot be a prop"),
- FnArg::Typed(arg) => arg.pat.clone(),
+ FnArg::Receiver(_) => unreachable!(),
+ FnArg::Typed(arg) => match &*arg.pat {
+ Pat::Ident(pat) => {
+ if pat.subpat.is_some() {
+ let ident = &pat.ident;
+ quote! { #ident: #pat }
+ } else {
+ quote! { #pat }
+ }
+ }
+ _ => unreachable!(),
+ },
});
// Rewrite function signature.
let props_struct_generics = generics.split_for_impl().1;
| diff --git a/packages/sycamore-macro/tests/component/inline-props-fail.rs b/packages/sycamore-macro/tests/component/inline-props-fail.rs
--- a/packages/sycamore-macro/tests/component/inline-props-fail.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-fail.rs
@@ -5,4 +5,20 @@ fn NotInlineProps() -> View {
view! {}
}
+#[component(inline_props)]
+fn ReceiverProp(self) -> View {
+ view! {}
+}
+
+struct Foo {
+ bar: i32,
+}
+
+#[component(inline_props)]
+fn PatternWithoutIdent(Foo { bar }: Foo) -> View {
+ view! {
+ (bar)
+ }
+}
+
fn main() {}
diff --git a/packages/sycamore-macro/tests/component/inline-props-fail.stderr b/packages/sycamore-macro/tests/component/inline-props-fail.stderr
--- a/packages/sycamore-macro/tests/component/inline-props-fail.stderr
+++ b/packages/sycamore-macro/tests/component/inline-props-fail.stderr
@@ -3,3 +3,23 @@ error: expected `inline_props`
|
3 | #[component(not_inline_props)]
| ^^^^^^^^^^^^^^^^
+
+error: `self` cannot be a property
+ --> tests/component/inline-props-fail.rs:9:17
+ |
+9 | fn ReceiverProp(self) -> View {
+ | ^^^^
+
+error: pattern must contain an identifier, properties cannot be unnamed
+ --> tests/component/inline-props-fail.rs:18:24
+ |
+18 | fn PatternWithoutIdent(Foo { bar }: Foo) -> View {
+ | ^^^
+
+error: `self` parameter is only allowed in associated functions
+ --> tests/component/inline-props-fail.rs:9:17
+ |
+9 | fn ReceiverProp(self) -> View {
+ | ^^^^ not semantically valid as function parameter
+ |
+ = note: associated functions are those in `impl` or `trait` definitions
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -1,4 +1,6 @@
-use sycamore::prelude::{component, view, Signal, View, Props};
+#![allow(unused_parens)]
+
+use sycamore::prelude::{component, view, Props, Signal, View};
#[component(inline_props)]
fn NoProps() -> View {
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -47,7 +49,10 @@ fn PropsWithImplGenerics(foo: impl std::fmt::Display + 'static) -> View {
}
#[component(inline_props)]
-fn PropsWithMixedImplGenerics<T: std::fmt::Display + 'static>(foo: T, bar: impl std::fmt::Display + 'static) -> View {
+fn PropsWithMixedImplGenerics<T: std::fmt::Display + 'static>(
+ foo: T,
+ bar: impl std::fmt::Display + 'static,
+) -> View {
view! {
(foo.to_string())
(bar.to_string())
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -57,7 +62,10 @@ fn PropsWithMixedImplGenerics<T: std::fmt::Display + 'static>(foo: T, bar: impl
#[component(inline_props)]
fn PropsWithVariousImplGenerics(
t1: [impl std::fmt::Display + 'static; 10],
- t2: (impl std::fmt::Display + 'static, impl std::fmt::Display + 'static),
+ t2: (
+ impl std::fmt::Display + 'static,
+ impl std::fmt::Display + 'static,
+ ),
t3: (impl std::fmt::Display + 'static),
t4: impl std::fmt::Display + 'static,
t5: *const (impl std::fmt::Display + 'static),
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -77,7 +85,9 @@ fn PropsWithVariousImplGenerics(
#[component(inline_props, derive(Clone), derive(Debug))]
fn AdditionalStructAttributes(dummy: String) -> View {
- let props = AdditionalStructAttributes_Props::builder().dummy(dummy).build();
+ let props = AdditionalStructAttributes_Props::builder()
+ .dummy(dummy)
+ .build();
view! {
(format!("{:?}", props.clone()))
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -85,10 +95,7 @@ fn AdditionalStructAttributes(dummy: String) -> View {
}
#[component(inline_props)]
-fn PropsWithAttributes(
- #[prop(default)]
- dummy: String,
-) -> View {
+fn PropsWithAttributes(#[prop(default)] dummy: String) -> View {
fn call_component() -> View {
view! {
PropsWithAttributes {}
diff --git a/packages/sycamore-macro/tests/component/inline-props-pass.rs b/packages/sycamore-macro/tests/component/inline-props-pass.rs
--- a/packages/sycamore-macro/tests/component/inline-props-pass.rs
+++ b/packages/sycamore-macro/tests/component/inline-props-pass.rs
@@ -99,4 +106,19 @@ fn PropsWithAttributes(
}
}
+#[derive(Debug)]
+struct Foo {
+ bar: u32,
+}
+
+#[component(inline_props)]
+fn PropsWithPatterns(mut a: u32, b @ Foo { bar }: Foo) -> View {
+ let _ = &mut a;
+ view! {
+ (a)
+ (format!("{b:?}"))
+ (bar)
+ }
+}
+
fn main() {}
| Inline props does not properly support patterns
**Describe the bug**
Using `#[component(inline_props)]` does not properly account for using patterns in function parameters.
**To Reproduce**
```rust
#[component(inline_props)]
fn Foo(mut value: i32) -> View {
view! {}
}
```
The problem occurs when generating code for the prop struct. This is what ends up being generated currently:
```rust
#[derive(Props)]
struct FooProps {
mut value: i32
}
```
where the `mut` is obviously out of place.
**Expected behavior**
Patterns work seamlessly with `#[component(inline_props)]`
**Environment**
- Sycamore: master
**Additional information**
Relevant file is `packages/sycamore-macro/src/component.rs`, specifically the `inline_props_impl` function.
| @lukechu10 i can work on this
> @lukechu10 i can work on this
Great! Let's me know if you run into any issues. | 2024-11-17T22:26:39 | 0.9 | 1e492e2f7fe110663b26f44961aaf08c26717af7 | [
"tests/component/inline-props-pass.rs",
"component_ui"
] | [
"tests/view/component-pass.rs",
"tests/view/element-pass.rs",
"tests/view/root-pass.rs",
"view_ui",
"tests/component/component-pass.rs",
"packages/sycamore-macro/src/lib.rs - cfg_not_ssr (line 90)",
"packages/sycamore-macro/src/lib.rs - cfg_ssr (line 66)"
] | [] | [] |
trishume/syntect | 530 | trishume__syntect-530 | [
"529",
"529"
] | 53413d566d6dc10e78cae2733a29e4aae7a98bdd | diff --git a/src/parsing/syntax_set.rs b/src/parsing/syntax_set.rs
--- a/src/parsing/syntax_set.rs
+++ b/src/parsing/syntax_set.rs
@@ -213,6 +213,7 @@ impl SyntaxSet {
/// This uses regexes that come with some sublime syntax grammars for matching things like
/// shebangs and mode lines like `-*- Mode: C -*-`
pub fn find_syntax_by_first_line<'a>(&'a self, s: &str) -> Option<&'a SyntaxReference> {
+ let s = s.strip_prefix("\u{feff}").unwrap_or(s); // Strip UTF-8 BOM
let cache = self.first_line_cache();
for &(ref reg, i) in cache.regexes.iter().rev() {
if reg.search(s, 0, s.len(), None) {
| diff --git a/src/parsing/syntax_set.rs b/src/parsing/syntax_set.rs
--- a/src/parsing/syntax_set.rs
+++ b/src/parsing/syntax_set.rs
@@ -1401,6 +1402,16 @@ mod tests {
assert_prototype_only_on(&["main"], &rebuilt, &rebuilt.syntaxes()[0]);
}
+ #[test]
+ fn find_syntax_set_from_line_with_bom() {
+ // Regression test for #529
+ let syntax_set = SyntaxSet::load_defaults_newlines();
+ let syntax_ref = syntax_set
+ .find_syntax_by_first_line("\u{feff}<?xml version=\"1.0\"?>")
+ .unwrap();
+ assert_eq!(syntax_ref.name, "XML");
+ }
+
fn assert_ops_contain(ops: &[(usize, ScopeStackOp)], expected: &(usize, ScopeStackOp)) {
assert!(
ops.contains(expected),
| UTF-8 BOM prevents syntax detection by the first line of file
This issue was reoprted to my project at first: https://github.com/rhysd/hgrep/issues/20
When [UTF-8 BOM](https://en.wikipedia.org/wiki/Byte_order_mark) is inserted at the head of file content, syntect fails to detect the syntax from the first line.
Repro 1:
```rust
// This works fine. It returns syntax definition for XML
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_by_first_line("<?xml version=\"1.0\"?>")
.unwrap();
// This panics since no syntax definition was found
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_by_first_line("\u{feff}<?xml version=\"1.0\"?>")
.unwrap();
```
Expected behavior is ignoring the UTF-8 BOM on `SyntaxSet::find_syntax_by_first_line`.
Repro 2:
1. Open an empty file `foo` in Vim
2. Run `:set fenc=utf-8 bomb`
3. Paste the following code and `:wq`
```xml
<?xml version="1.0"?>
<root></root>
```
Then try the following code:
```rust
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_for_file("foo")
.unwrap(); // This panics
```
UTF-8 BOM prevents syntax detection by the first line of file
This issue was reoprted to my project at first: https://github.com/rhysd/hgrep/issues/20
When [UTF-8 BOM](https://en.wikipedia.org/wiki/Byte_order_mark) is inserted at the head of file content, syntect fails to detect the syntax from the first line.
Repro 1:
```rust
// This works fine. It returns syntax definition for XML
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_by_first_line("<?xml version=\"1.0\"?>")
.unwrap();
// This panics since no syntax definition was found
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_by_first_line("\u{feff}<?xml version=\"1.0\"?>")
.unwrap();
```
Expected behavior is ignoring the UTF-8 BOM on `SyntaxSet::find_syntax_by_first_line`.
Repro 2:
1. Open an empty file `foo` in Vim
2. Run `:set fenc=utf-8 bomb`
3. Paste the following code and `:wq`
```xml
<?xml version="1.0"?>
<root></root>
```
Then try the following code:
```rust
let syntax_ref =
SyntaxSet::load_defaults_newlines()
.find_syntax_for_file("foo")
.unwrap(); // This panics
```
| 2024-04-15T19:22:13 | 5.2 | 53413d566d6dc10e78cae2733a29e4aae7a98bdd | [
"parsing::syntax_set::tests::find_syntax_set_from_line_with_bom"
] | [
"highlighting::highlighter::tests::tricky_cases",
"highlighting::selector::tests::matching_works",
"highlighting::selector::tests::empty_stack_matching_works",
"highlighting::selector::tests::multiple_excludes_matching_works",
"highlighting::selector::tests::selectors_work",
"easy::tests::can_find_regions... | [
"public_api"
] | [] | |
trishume/syntect | 506 | trishume__syntect-506 | [
"469"
] | 100b8b2fa1545c27c792a5165a8ccfdbce2020b4 | diff --git a/src/highlighting/theme_load.rs b/src/highlighting/theme_load.rs
--- a/src/highlighting/theme_load.rs
+++ b/src/highlighting/theme_load.rs
@@ -300,7 +300,7 @@ impl ParseSettings for Theme {
_ => return Err(IncorrectSyntax),
};
let mut iter = items.into_iter();
- let settings = match iter.next() {
+ let mut settings = match iter.next() {
Some(Settings::Object(mut obj)) => {
match obj.remove("settings") {
Some(settings) => ThemeSettings::parse_settings(settings)?,
diff --git a/src/highlighting/theme_load.rs b/src/highlighting/theme_load.rs
--- a/src/highlighting/theme_load.rs
+++ b/src/highlighting/theme_load.rs
@@ -309,6 +309,20 @@ impl ParseSettings for Theme {
}
_ => return Err(UndefinedSettings),
};
+ if let Some(Settings::Object(obj)) = obj.remove("gutterSettings") {
+ for (key, value) in obj {
+ let color = Color::parse_settings(value).ok();
+ match &key[..] {
+ "background" => {
+ settings.gutter = settings.gutter.or(color)
+ }
+ "foreground" => {
+ settings.gutter_foreground = settings.gutter_foreground.or(color)
+ }
+ _ => (),
+ }
+ }
+ }
let mut scopes = Vec::new();
for json in iter {
// TODO option to disable best effort parsing and bubble up warnings
| diff --git a/src/highlighting/theme_set.rs b/src/highlighting/theme_set.rs
--- a/src/highlighting/theme_set.rs
+++ b/src/highlighting/theme_set.rs
@@ -108,7 +108,25 @@ mod tests {
r: 0xc0,
g: 0xc5,
b: 0xce,
- a: 0xFF,
+ a: 0xff,
+ }
+ );
+ assert_eq!(
+ theme.settings.gutter_foreground.unwrap(),
+ Color {
+ r: 0x65,
+ g: 0x73,
+ b: 0x7e,
+ a: 0xff,
+ }
+ );
+ assert_eq!(
+ theme.settings.gutter.unwrap(),
+ Color {
+ r: 0x34,
+ g: 0x3d,
+ b: 0x46,
+ a: 0xff,
}
);
// unreachable!();
| Support for gutterSettings
syntect currently reads from `gutter` and `gutterForeground` when loading themes: https://github.com/trishume/syntect/blob/c61ce60c72d67ad4e3dd06d60ff3b13ef4d2698c/src/highlighting/theme_load.rs#L254-L257
But many themes instead store their gutter settings in a `gutterSettings` key. See for instance the base16 themes at https://github.com/chriskempson/base16-textmate/tree/master/Themes . `gutterSettings` seems to be more popular than `gutterForeground`. A GitHub search for themes with `gutterSettings` returns [3k hits](https://github.com/search?q=path%3A*.tmTheme%20gutterSettings&type=code) while a search for `gutterForeground` returns [1.5k.](https://github.com/search?q=path%3A*.tmTheme+gutterForeground&type=code) Adding this would probably solve #315
| 2023-12-10T12:01:45 | 5.1 | 100b8b2fa1545c27c792a5165a8ccfdbce2020b4 | [
"highlighting::theme_set::tests::can_parse_common_themes"
] | [
"highlighting::selector::tests::empty_stack_matching_works",
"highlighting::highlighter::tests::tricky_cases",
"highlighting::selector::tests::matching_works",
"highlighting::selector::tests::selectors_work",
"highlighting::selector::tests::multiple_excludes_matching_works",
"dumps::tests::has_default_the... | [
"public_api"
] | [] | |
trishume/syntect | 456 | trishume__syntect-456 | [
"453"
] | 8678b7c68a7fbd195733aa23dcaa3dbaedaceb10 | diff --git a/src/util.rs b/src/util.rs
--- a/src/util.rs
+++ b/src/util.rs
@@ -228,7 +228,10 @@ impl<'a> Iterator for LinesWithEndings<'a> {
/// the `Vec<(Style, &str)>` returned by `highlight` methods. Look at the source
/// code for `modify_range` for an example usage.
#[allow(clippy::type_complexity)]
-pub fn split_at<'a, A: Clone>(v: &[(A, &'a str)], split_i: usize) -> (Vec<(A, &'a str)>, Vec<(A, &'a str)>) {
+pub fn split_at<'a, A: Clone>(
+ v: &[(A, &'a str)],
+ split_i: usize,
+) -> (Vec<(A, &'a str)>, Vec<(A, &'a str)>) {
// This function works by gradually reducing the problem into smaller sub-problems from the front
let mut rest = v;
let mut rest_split_i = split_i;
diff --git a/src/util.rs b/src/util.rs
--- a/src/util.rs
+++ b/src/util.rs
@@ -247,7 +250,14 @@ pub fn split_at<'a, A: Clone>(v: &[(A, &'a str)], split_i: usize) -> (Vec<(A, &'
let mut after = Vec::new();
// If necessary, split the token the split falls inside
if !rest.is_empty() && rest_split_i > 0 {
- let (sa, sb) = rest[0].1.split_at(rest_split_i);
+ let mut rest_split_index = rest_split_i;
+ // Splitting in the middle of a multibyte character causes panic,
+ // so if index is in the middle of such a character,
+ // reduce the index by 1.
+ while !rest[0].1.is_char_boundary(rest_split_index) && rest_split_index > 0 {
+ rest_split_index -= 1;
+ }
+ let (sa, sb) = rest[0].1.split_at(rest_split_index);
before.push((rest[0].0.clone(), sa));
after.push((rest[0].0.clone(), sb));
rest = &rest[1..];
| diff --git a/src/util.rs b/src/util.rs
--- a/src/util.rs
+++ b/src/util.rs
@@ -328,6 +338,31 @@ mod tests {
let (before, after) = split_at(l, 10); // out of bounds
assert_eq!((&before[..], &after[..]), (&[(0u8, "abc"), (1u8, "def"), (2u8, "ghi")][..], &[][..]));
+
+ let l = &[(0u8, "こんにちは"), (1u8, "世界"), (2u8, "!")];
+
+ let (before, after) = split_at(l, 3);
+
+ assert_eq!(
+ (&before[..], &after[..]),
+ (
+ &[(0u8, "こ")][..],
+ &[(0u8, "んにちは"), (1u8, "世界"), (2u8, "!")][..]
+ )
+ );
+
+ //Splitting inside a multibyte character could cause panic,
+ //so if index is inside such a character,
+ //index is decreased by 1.
+ let (before, after) = split_at(l, 4);
+
+ assert_eq!(
+ (&before[..], &after[..]),
+ (
+ &[(0u8, "こ")][..],
+ &[(0u8, "んにちは"), (1u8, "世界"), (2u8, "!")][..]
+ )
+ );
}
#[test]
| util::split_at panics with multibyte characters
Sorry if duplicated.
To reproduce panic, run the following code:
```rs
use syntect::easy::HighlightLines;
use syntect::highlighting::{Style, ThemeSet};
use syntect::parsing::SyntaxSet;
use syntect::util::{as_24_bit_terminal_escaped, split_at, LinesWithEndings};
fn main() {
let ps = SyntaxSet::load_defaults_newlines();
let ts = ThemeSet::load_defaults();
let syntax = ps.find_syntax_by_extension("rs").unwrap();
let mut h = HighlightLines::new(syntax, &ts.themes["base16-ocean.dark"]);
let s = "日本の首都は東京です";
for line in LinesWithEndings::from(s) {
let ranges: Vec<(Style, &str)> = h.highlight_line(line, &ps).unwrap();
//thread 'main' panicked at 'byte index 4 is not a char boundary; it is inside '本' (bytes 3..6) of `日本の首都は東京です`', library/core/src/str/mod.rs:127:5
let ranges = split_at(&ranges, 4);
let escaped = as_24_bit_terminal_escaped(&ranges.0, false);
print!("{}", escaped);
println!();
}
}
```
I think it would be better to return `Result<(Vec..., Vec...), Error>` instead of `(Vec... , Vec...)` to avoid this type of panic.
| Seems this panic comes from this line:
```
let (sa, sb) = rest[0].1.split_at(rest_split_i);
```
[[str - Rust](https://doc.rust-lang.org/std/primitive.str.html#method.split_at)](https://doc.rust-lang.org/std/primitive.str.html#method.split_at)
> Panics if mid is not on a UTF-8 code point boundary, or if it is past the end of the last code point of the string slice.
So it may be resolved by using `is_char_boundary()` ([[str - Rust](https://doc.rust-lang.org/std/primitive.str.html#method.is_char_boundary)](https://doc.rust-lang.org/std/primitive.str.html#method.is_char_boundary)), but depends on how you think about this. | 2022-11-03T13:57:07 | 5.0 | 8678b7c68a7fbd195733aa23dcaa3dbaedaceb10 | [
"util::tests::test_split_at"
] | [
"highlighting::selector::tests::empty_stack_matching_works",
"highlighting::highlighter::tests::tricky_cases",
"highlighting::selector::tests::matching_works",
"highlighting::selector::tests::multiple_excludes_matching_works",
"highlighting::selector::tests::selectors_work",
"dumps::tests::has_default_the... | [] | [] |
trishume/syntect | 170 | trishume__syntect-170 | [
"169"
] | 42f3da410ed78f3fcadf56e64f5990539c058110 | diff --git a/src/parsing/syntax_set.rs b/src/parsing/syntax_set.rs
--- a/src/parsing/syntax_set.rs
+++ b/src/parsing/syntax_set.rs
@@ -209,10 +209,10 @@ impl SyntaxSet {
path_obj: P)
-> io::Result<Option<&SyntaxDefinition>> {
let path: &Path = path_obj.as_ref();
+ let file_name = path.file_name().and_then(|n| n.to_str()).unwrap_or("");
let extension = path.extension().and_then(|x| x.to_str()).unwrap_or("");
- let ext_syntax = self.find_syntax_by_extension(extension)
- .or_else(|| self.find_syntax_by_extension(
- path.file_name().and_then(|n| n.to_str()).unwrap_or("")));
+ let ext_syntax = self.find_syntax_by_extension(file_name).or_else(
+ || self.find_syntax_by_extension(extension));
let line_syntax = if ext_syntax.is_none() {
let mut line = String::new();
let f = File::open(path)?;
| diff --git a/src/parsing/syntax_set.rs b/src/parsing/syntax_set.rs
--- a/src/parsing/syntax_set.rs
+++ b/src/parsing/syntax_set.rs
@@ -418,9 +418,25 @@ impl FirstLineCache {
mod tests {
use super::*;
use parsing::{Scope, syntax_definition};
+ use std::collections::HashMap;
+
#[test]
fn can_load() {
let mut ps = SyntaxSet::load_from_folder("testdata/Packages").unwrap();
+
+ let cmake_dummy_syntax = SyntaxDefinition {
+ name: "CMake".to_string(),
+ file_extensions: vec!["CMakeLists.txt".to_string(), "cmake".to_string()],
+ scope: Scope::new("source.cmake").unwrap(),
+ first_line_match: None,
+ hidden: false,
+ prototype: None,
+ variables: HashMap::new(),
+ contexts: HashMap::new(),
+ };
+
+ ps.add_syntax(cmake_dummy_syntax);
+
assert_eq!(&ps.find_syntax_by_first_line("#!/usr/bin/env node").unwrap().name,
"JavaScript");
ps.load_plain_text_syntax();
diff --git a/src/parsing/syntax_set.rs b/src/parsing/syntax_set.rs
--- a/src/parsing/syntax_set.rs
+++ b/src/parsing/syntax_set.rs
@@ -440,6 +456,10 @@ mod tests {
"Go");
assert_eq!(&ps.find_syntax_for_file(".bashrc").unwrap().unwrap().name,
"Bourne Again Shell (bash)");
+ assert_eq!(&ps.find_syntax_for_file("CMakeLists.txt").unwrap().unwrap().name,
+ "CMake");
+ assert_eq!(&ps.find_syntax_for_file("test.cmake").unwrap().unwrap().name,
+ "CMake");
assert_eq!(&ps.find_syntax_for_file("Rakefile").unwrap().unwrap().name, "Ruby");
assert!(&ps.find_syntax_by_first_line("derp derp hi lol").is_none());
assert_eq!(&ps.find_syntax_by_path("Packages/Rust/Rust.sublime-syntax").unwrap().name,
| Unable to find correct syntax for "CMakeLists.txt"
This [CMake.sublime-syntax](https://github.com/zyxar/Sublime-CMakeLists/blob/ff9a800a4ca942edd095de553ca05fba03b02275/CMake.sublime-syntax#L7) file specifies `CMakeLists.txt` as a "file extension", but `syntect` is currently not able to find the correct syntax, because it searches for the actual file extension first (`txt`) and finds the "Plain Text" syntax.
| 2018-05-30T04:56:38 | 2.0 | 42f3da410ed78f3fcadf56e64f5990539c058110 | [
"parsing::syntax_set::tests::can_load"
] | [
"highlighting::selector::tests::selectors_work",
"highlighting::selector::tests::multiple_excludes_matching_works",
"highlighting::selector::tests::matching_works",
"highlighting::selector::tests::empty_stack_matching_works",
"dumps::tests::has_default_themes",
"highlighting::theme_set::tests::can_parse_c... | [] | [] | |
trishume/syntect | 148 | trishume__syntect-148 | [
"139"
] | 7243993a5dd4fdb04e60091ac4b6f9ddfa68ee7a | diff --git a/src/parsing/yaml_load.rs b/src/parsing/yaml_load.rs
--- a/src/parsing/yaml_load.rs
+++ b/src/parsing/yaml_load.rs
@@ -54,8 +54,11 @@ struct ParserState<'a> {
lines_include_newline: bool,
}
+// `__start` must not include prototypes from the actual syntax definition,
+// otherwise it's possible that a prototype makes us pop out of `__start`.
static START_CONTEXTS: &'static str = "
__start:
+ - meta_include_prototype: false
- match: ''
push: __main
__main:
| diff --git a/src/parsing/parser.rs b/src/parsing/parser.rs
--- a/src/parsing/parser.rs
+++ b/src/parsing/parser.rs
@@ -675,6 +675,27 @@ mod tests {
expect_scope_stacks(&line, &expect, TEST_SYNTAX);
}
+ #[test]
+ fn can_parse_prototype_that_pops_main() {
+ let syntax = r#"
+name: test
+scope: source.test
+contexts:
+ prototype:
+ # This causes us to pop out of the main context. Sublime Text handles that
+ # by pushing main back automatically.
+ - match: (?=!)
+ pop: true
+ main:
+ - match: foo
+ scope: test.good
+"#;
+
+ let line = "foo!";
+ let expect = ["<source.test>, <test.good>"];
+ expect_scope_stacks(&line, &expect, syntax);
+ }
+
#[test]
fn can_parse_syntax_with_newline_in_character_class() {
let syntax = r#"
| Panicked at 'attempt to subtract with overflow', src/parsing/parser.rs
I got a panic when parsing a Java file, here's a minimal reproduction case:
```shell
$ echo '// TODO "%>"' > Minimal.java
$ RUST_BACKTRACE=1 cargo run --example synstats Minimal.java
```
Stack trace:
```
thread 'main' panicked at 'attempt to subtract with overflow', src/parsing/parser.rs:147:41
stack backtrace:
0: std::sys::unix::backtrace::tracing::imp::unwind_backtrace
at src/libstd/sys/unix/backtrace/tracing/gcc_s.rs:49
1: std::panicking::default_hook::{{closure}}
at src/libstd/sys_common/backtrace.rs:68
at src/libstd/sys_common/backtrace.rs:57
at src/libstd/panicking.rs:381
2: std::panicking::default_hook
at src/libstd/panicking.rs:397
3: std::panicking::begin_panic
at src/libstd/panicking.rs:577
4: std::panicking::begin_panic
at src/libstd/panicking.rs:538
5: std::panicking::try::do_call
at src/libstd/panicking.rs:522
6: std::panicking::try::do_call
at src/libstd/panicking.rs:498
7: <core::ops::range::Range<Idx> as core::fmt::Debug>::fmt
at src/libcore/panicking.rs:71
8: <core::ops::range::Range<Idx> as core::fmt::Debug>::fmt
at src/libcore/panicking.rs:51
9: syntect::parsing::parser::ParseState::parse_next_token
at src/parsing/parser.rs:147
10: <F as core::str::pattern::Pattern<'a>>::into_searcher
at src/parsing/parser.rs:122
11: synstats::count
at examples/synstats.rs:138
12: synstats::main
at examples/synstats.rs:163
13: std::rt::lang_start::{{closure}}
at /Users/travis/build/rust-lang/rust/src/libstd/rt.rs:74
14: std::panicking::try::do_call
at src/libstd/rt.rs:59
at src/libstd/panicking.rs:480
15: panic_unwind::dwarf::eh::read_encoded_pointer
at src/libpanic_unwind/lib.rs:101
16: std::sys_common::bytestring::debug_fmt_bytestring
at src/libstd/panicking.rs:459
at src/libstd/panic.rs:365
at src/libstd/rt.rs:58
17: std::rt::lang_start
at /Users/travis/build/rust-lang/rust/src/libstd/rt.rs:74
18: <synstats::Stats as core::default::Default>::default
```
In a release build the error looks like this:
```
thread 'main' panicked at 'index out of bounds: the len is 0 but the index is 18446744073709551615'
```
It looks like `self.stack` is empty here: https://github.com/trishume/syntect/blob/v2.0.0/src/parsing/parser.rs#L147
(CC @keith-hall)
| I'm guessing the `main` context gets pushed off the stack due to https://github.com/sublimehq/Packages/blob/1c1aa090b6e892620faf4c05e29dc4797deff59e/Java/Java.sublime-syntax#L26-L28. I thought we re-push it like ST does when it is the root syntax, but I guess it's not working properly in this case due to the lookahead not consuming anything. Nice find @robinst.
EDIT: https://github.com/trishume/syntect/issues/127 could be related | 2018-04-26T11:36:29 | 2.0 | 42f3da410ed78f3fcadf56e64f5990539c058110 | [
"parsing::parser::tests::can_parse_prototype_that_pops_main"
] | [
"highlighting::selector::tests::multiple_excludes_matching_works",
"highlighting::selector::tests::matching_works",
"highlighting::selector::tests::empty_stack_matching_works",
"highlighting::selector::tests::selectors_work",
"dumps::tests::has_default_themes",
"highlighting::theme_set::tests::can_parse_c... | [] | [] |
quickwit-oss/tantivy | 2,333 | quickwit-oss__tantivy-2333 | [
"2332"
] | 4e79e11007b6c9fc3d3965beefe33e1ffcf641d3 | diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -218,27 +218,14 @@ fn term_or_phrase_infallible(inp: &str) -> JResult<&str, Option<UserInputLeaf>>
}
fn term_group(inp: &str) -> IResult<&str, UserInputAst> {
- let occur_symbol = alt((
- value(Occur::MustNot, char('-')),
- value(Occur::Must, char('+')),
- ));
-
map(
tuple((
terminated(field_name, multispace0),
- delimited(
- tuple((char('('), multispace0)),
- separated_list0(multispace1, tuple((opt(occur_symbol), term_or_phrase))),
- char(')'),
- ),
+ delimited(tuple((char('('), multispace0)), ast, char(')')),
)),
- |(field_name, terms)| {
- UserInputAst::Clause(
- terms
- .into_iter()
- .map(|(occur, leaf)| (occur, leaf.set_field(Some(field_name.clone())).into()))
- .collect(),
- )
+ |(field_name, mut ast)| {
+ ast.set_default_field(field_name);
+ ast
},
)(inp)
}
diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -258,46 +245,18 @@ fn term_group_precond(inp: &str) -> IResult<&str, (), ()> {
}
fn term_group_infallible(inp: &str) -> JResult<&str, UserInputAst> {
- let (mut inp, (field_name, _, _, _)) =
+ let (inp, (field_name, _, _, _)) =
tuple((field_name, multispace0, char('('), multispace0))(inp).expect("precondition failed");
- let mut terms = Vec::new();
- let mut errs = Vec::new();
-
- let mut first_round = true;
- loop {
- let mut space_error = if first_round {
- first_round = false;
- Vec::new()
- } else {
- let (rest, (_, err)) = space1_infallible(inp)?;
- inp = rest;
- err
- };
- if inp.is_empty() {
- errs.push(LenientErrorInternal {
- pos: inp.len(),
- message: "missing )".to_string(),
- });
- break Ok((inp, (UserInputAst::Clause(terms), errs)));
- }
- if let Some(inp) = inp.strip_prefix(')') {
- break Ok((inp, (UserInputAst::Clause(terms), errs)));
- }
- // only append missing space error if we did not reach the end of group
- errs.append(&mut space_error);
-
- // here we do the assumption term_or_phrase_infallible always consume something if the
- // first byte is not `)` or ' '. If it did not, we would end up looping.
-
- let (rest, ((occur, leaf), mut err)) =
- tuple_infallible((occur_symbol, term_or_phrase_infallible))(inp)?;
- errs.append(&mut err);
- if let Some(leaf) = leaf {
- terms.push((occur, leaf.set_field(Some(field_name.clone())).into()));
- }
- inp = rest;
- }
+ let res = delimited_infallible(
+ nothing,
+ map(ast_infallible, |(mut ast, errors)| {
+ ast.set_default_field(field_name.to_string());
+ (ast, errors)
+ }),
+ opt_i_err(char(')'), "expected ')'"),
+ )(inp);
+ res
}
fn exists(inp: &str) -> IResult<&str, UserInputLeaf> {
diff --git a/query-grammar/src/user_input_ast.rs b/query-grammar/src/user_input_ast.rs
--- a/query-grammar/src/user_input_ast.rs
+++ b/query-grammar/src/user_input_ast.rs
@@ -44,6 +44,26 @@ impl UserInputLeaf {
},
}
}
+
+ pub(crate) fn set_default_field(&mut self, default_field: String) {
+ match self {
+ UserInputLeaf::Literal(ref mut literal) if literal.field_name.is_none() => {
+ literal.field_name = Some(default_field)
+ }
+ UserInputLeaf::All => {
+ *self = UserInputLeaf::Exists {
+ field: default_field,
+ }
+ }
+ UserInputLeaf::Range { ref mut field, .. } if field.is_none() => {
+ *field = Some(default_field)
+ }
+ UserInputLeaf::Set { ref mut field, .. } if field.is_none() => {
+ *field = Some(default_field)
+ }
+ _ => (), // field was already set, do nothing
+ }
+ }
}
impl Debug for UserInputLeaf {
diff --git a/query-grammar/src/user_input_ast.rs b/query-grammar/src/user_input_ast.rs
--- a/query-grammar/src/user_input_ast.rs
+++ b/query-grammar/src/user_input_ast.rs
@@ -205,6 +225,16 @@ impl UserInputAst {
pub fn or(asts: Vec<UserInputAst>) -> UserInputAst {
UserInputAst::compose(Occur::Should, asts)
}
+
+ pub(crate) fn set_default_field(&mut self, field: String) {
+ match self {
+ UserInputAst::Clause(clauses) => clauses
+ .iter_mut()
+ .for_each(|(_, ast)| ast.set_default_field(field.clone())),
+ UserInputAst::Leaf(leaf) => leaf.set_default_field(field),
+ UserInputAst::Boost(ref mut ast, _) => ast.set_default_field(field),
+ }
+ }
}
impl From<UserInputLiteral> for UserInputLeaf {
| diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -1468,8 +1427,18 @@ mod test {
#[test]
fn test_parse_query_term_group() {
- test_parse_query_to_ast_helper(r#"field:(abc)"#, r#"(*"field":abc)"#);
+ test_parse_query_to_ast_helper(r#"field:(abc)"#, r#""field":abc"#);
test_parse_query_to_ast_helper(r#"field:(+a -"b c")"#, r#"(+"field":a -"field":"b c")"#);
+ test_parse_query_to_ast_helper(r#"field:(a AND "b c")"#, r#"(+"field":a +"field":"b c")"#);
+ test_parse_query_to_ast_helper(r#"field:(a OR "b c")"#, r#"(?"field":a ?"field":"b c")"#);
+ test_parse_query_to_ast_helper(
+ r#"field:(a OR (b AND c))"#,
+ r#"(?"field":a ?(+"field":b +"field":c))"#,
+ );
+ test_parse_query_to_ast_helper(
+ r#"field:(a [b TO c])"#,
+ r#"(*"field":a *"field":["b" TO "c"])"#,
+ );
test_is_parse_err(r#"field:(+a -"b c""#, r#"(+"field":a -"field":"b c")"#);
}
| support more syntax for field grouping
we support field grouping like this `title:(+return +"pink panther")`, but not like `title:(return AND "pink panther")`. That should be added to be as close as possible to lucene when it can reasonably be done
| after testing, what can be put inside parenthesis is basically anything. For instance `title:(return AND ("pink panther" OR body:Clouseau))` will work, and match the same as `title:return AND (title:"pink panther" OR body:Clouseau)`.
It works as if inside of parenthesis, the default field was title instead of whatever it's supposed to be, but accepting the use of other fields too. It also allows range, phrase, prefix... | 2024-03-22T00:21:27 | 1.5 | b644d78a326dca4abb4462065eb028e02d779eec | [
"query_grammar::test::test_parse_query_term_group"
] | [
"occur::test::test_occur_compose",
"query_grammar::test::test_date_time",
"query_grammar::test::test_occur_leaf",
"query_grammar::test::test_escaping",
"query_grammar::test::test_exist_query",
"query_grammar::test::test_field_name",
"query_grammar::test::test_parse_empty_to_ast",
"query_grammar::test:... | [] | [] |
quickwit-oss/tantivy | 874 | quickwit-oss__tantivy-874 | [
"796"
] | 2737822620362dd348b8d9396a26dd68cd928337 | diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -37,7 +37,7 @@ owning_ref = "0.4"
stable_deref_trait = "1.0.0"
rust-stemmers = "1.2"
downcast-rs = { version="1.0" }
-tantivy-query-grammar = { version="0.13", path="./query-grammar" }
+tantivy-query-grammar = { version="0.14.0-dev", path="./query-grammar" }
bitpacking = {version="0.8", default-features = false, features=["bitpacker4x"]}
census = "0.4"
fnv = "1.0.6"
diff --git a/query-grammar/Cargo.toml b/query-grammar/Cargo.toml
--- a/query-grammar/Cargo.toml
+++ b/query-grammar/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tantivy-query-grammar"
-version = "0.13.0"
+version = "0.14.0-dev"
authors = ["Paul Masurel <paul.masurel@gmail.com>"]
license = "MIT"
categories = ["database-implementations", "data-structures"]
diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -9,8 +9,10 @@ use combine::{
fn field<'a>() -> impl Parser<&'a str, Output = String> {
(
- letter(),
- many(satisfy(|c: char| c.is_alphanumeric() || c == '_')),
+ (letter().or(char('_'))),
+ many(satisfy(|c: char| {
+ c.is_alphanumeric() || c == '_' || c == '-'
+ })),
)
.skip(char(':'))
.map(|(s1, s2): (char, String)| format!("{}{}", s1, s2))
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -1,5 +1,5 @@
-use crate::schema::IntOptions;
use crate::schema::TextOptions;
+use crate::schema::{is_valid_field_name, IntOptions};
use crate::schema::FieldType;
use serde::de::{self, MapAccess, Visitor};
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -24,6 +24,7 @@ impl FieldEntry {
/// Creates a new u64 field entry in the schema, given
/// a name, and some options.
pub fn new_text(field_name: String, text_options: TextOptions) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::Str(text_options),
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -33,6 +34,7 @@ impl FieldEntry {
/// Creates a new u64 field entry in the schema, given
/// a name, and some options.
pub fn new_u64(field_name: String, field_type: IntOptions) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::U64(field_type),
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -42,6 +44,7 @@ impl FieldEntry {
/// Creates a new i64 field entry in the schema, given
/// a name, and some options.
pub fn new_i64(field_name: String, field_type: IntOptions) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::I64(field_type),
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -51,6 +54,7 @@ impl FieldEntry {
/// Creates a new f64 field entry in the schema, given
/// a name, and some options.
pub fn new_f64(field_name: String, field_type: IntOptions) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::F64(field_type),
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -60,6 +64,7 @@ impl FieldEntry {
/// Creates a new date field entry in the schema, given
/// a name, and some options.
pub fn new_date(field_name: String, field_type: IntOptions) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::Date(field_type),
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -68,6 +73,7 @@ impl FieldEntry {
/// Creates a field entry for a facet.
pub fn new_facet(field_name: String) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::HierarchicalFacet,
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -76,6 +82,7 @@ impl FieldEntry {
/// Creates a field entry for a bytes field
pub fn new_bytes(field_name: String) -> FieldEntry {
+ assert!(is_valid_field_name(&field_name));
FieldEntry {
name: field_name,
field_type: FieldType::Bytes,
diff --git a/src/schema/mod.rs b/src/schema/mod.rs
--- a/src/schema/mod.rs
+++ b/src/schema/mod.rs
@@ -149,14 +149,16 @@ pub use self::int_options::IntOptions;
use once_cell::sync::Lazy;
use regex::Regex;
+/// Regular expression representing the restriction on a valid field names.
+pub const FIELD_NAME_PATTERN: &'static str = r#"^[_a-zA-Z][_\-a-zA-Z0-9]*$"#;
+
/// Validator for a potential `field_name`.
/// Returns true iff the name can be use for a field name.
///
/// A field name must start by a letter `[a-zA-Z]`.
/// The other characters can be any alphanumic character `[a-ZA-Z0-9]` or `_`.
pub fn is_valid_field_name(field_name: &str) -> bool {
- static FIELD_NAME_PTN: Lazy<Regex> =
- Lazy::new(|| Regex::new("^[a-zA-Z][_a-zA-Z0-9]*$").unwrap());
+ static FIELD_NAME_PTN: Lazy<Regex> = Lazy::new(|| Regex::new(FIELD_NAME_PATTERN).unwrap());
FIELD_NAME_PTN.is_match(field_name)
}
| diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -279,6 +281,8 @@ pub fn parse_to_ast<'a>() -> impl Parser<&'a str, Output = UserInputAST> {
#[cfg(test)]
mod test {
+ type TestParseResult = Result<(), StringStreamError>;
+
use super::*;
use combine::parser::Parser;
diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -296,9 +300,10 @@ mod test {
}
#[test]
- fn test_occur_symbol() {
- assert_eq!(super::occur_symbol().parse("-"), Ok((Occur::MustNot, "")));
- assert_eq!(super::occur_symbol().parse("+"), Ok((Occur::Must, "")));
+ fn test_occur_symbol() -> TestParseResult {
+ assert_eq!(super::occur_symbol().parse("-")?, (Occur::MustNot, ""));
+ assert_eq!(super::occur_symbol().parse("+")?, (Occur::Must, ""));
+ Ok(())
}
#[test]
diff --git a/query-grammar/src/query_grammar.rs b/query-grammar/src/query_grammar.rs
--- a/query-grammar/src/query_grammar.rs
+++ b/query-grammar/src/query_grammar.rs
@@ -410,6 +415,25 @@ mod test {
assert_eq!(format!("{:?}", ast), "\"abc\"");
}
+ #[test]
+ fn test_field_name() -> TestParseResult {
+ assert_eq!(
+ super::field().parse("my-field-name:a")?,
+ ("my-field-name".to_string(), "a")
+ );
+ assert_eq!(
+ super::field().parse("my_field_name:a")?,
+ ("my_field_name".to_string(), "a")
+ );
+ assert!(super::field().parse(":a").is_err());
+ assert!(super::field().parse("-my_field:a").is_err());
+ assert_eq!(
+ super::field().parse("_my_field:a")?,
+ ("_my_field".to_string(), "a")
+ );
+ Ok(())
+ }
+
#[test]
fn test_range_parser() {
// testing the range() parser separately
diff --git a/src/schema/field_entry.rs b/src/schema/field_entry.rs
--- a/src/schema/field_entry.rs
+++ b/src/schema/field_entry.rs
@@ -268,6 +275,12 @@ mod tests {
use crate::schema::TEXT;
use serde_json;
+ #[test]
+ #[should_panic]
+ fn test_invalid_field_name_should_panic() {
+ FieldEntry::new_text("-hello".to_string(), TEXT);
+ }
+
#[test]
fn test_json_serialization() {
let field_value = FieldEntry::new_text(String::from("title"), TEXT);
diff --git a/src/schema/mod.rs b/src/schema/mod.rs
--- a/src/schema/mod.rs
+++ b/src/schema/mod.rs
@@ -170,6 +172,11 @@ mod tests {
assert!(is_valid_field_name("text"));
assert!(is_valid_field_name("text0"));
assert!(!is_valid_field_name("0text"));
+ assert!(is_valid_field_name("field-name"));
+ assert!(is_valid_field_name("field_name"));
+ assert!(!is_valid_field_name("field!name"));
+ assert!(!is_valid_field_name("-fieldname"));
+ assert!(is_valid_field_name("_fieldname"));
assert!(!is_valid_field_name(""));
assert!(!is_valid_field_name("シャボン玉"));
assert!(is_valid_field_name("my_text_field"));
| Restrict characters in field names or fix query parser.
As reported in https://jstrong.dev/posts/2020/building-a-site-search-with-tantivy/.
Right now `my-field` is an accepted field, but the query parser is confused when parsing
`my-field:myfieldvalue`
Either fix the queryparser or restrict the available characters in field name.
| 2020-08-30T22:09:27 | 1.3 | 2737822620362dd348b8d9396a26dd68cd928337 | [
"query_grammar::test::test_field_name"
] | [
"occur::test::test_occur_compose",
"query_grammar::test::test_occur_symbol",
"query_grammar::test::test_occur_leaf",
"query_grammar::test::test_parse_empty_to_ast",
"query_grammar::test::test_parse_query_default_clause",
"query_grammar::test::test_parse_query_single_term",
"query_grammar::test::test_mus... | [] | [] | |
xd009642/tarpaulin | 1,206 | xd009642__tarpaulin-1206 | [
"1160"
] | 89c72d9590e376a3e300ae8158f8f58d4327d68b | diff --git a/src/source_analysis/attributes.rs b/src/source_analysis/attributes.rs
--- a/src/source_analysis/attributes.rs
+++ b/src/source_analysis/attributes.rs
@@ -55,8 +55,24 @@ pub(crate) fn check_cfg_attr(attr: &Meta) -> bool {
}
}
}
+ } else if id.is_ident("cfg_attr") {
+ if let Meta::List(ml) = attr {
+ let tarp_cfged_ignores = &["no_coverage"];
+ if let NestedMeta::Meta(Meta::Path(ref i)) = ml.nested[0] {
+ if i.is_ident("tarpaulin") {
+ for p in ml.nested.iter().skip(1) {
+ if let NestedMeta::Meta(Meta::Path(ref path)) = p {
+ if tarp_cfged_ignores.iter().any(|x| path.is_ident(x)) {
+ ignore_span = true;
+ break;
+ }
+ }
+ }
+ }
+ }
+ }
} else {
- let skip_attrs = vec!["tarpaulin", "skip"];
+ let skip_attrs = &["tarpaulin", "skip"];
let mut n = 0;
ignore_span = true;
for (segment, attr) in id.segments.iter().zip(skip_attrs.iter()) {
| diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -16,6 +16,7 @@ operating systems
- Set `LLVM_PROFILE_FILE` for building tests and delete the generated profraws to ignore build script
coverage
- Remove dependency on memmap
+- Filter out expressions or items with `#[cfg_attr(tarpaulin, no_coverage)]`
## [0.24.0] 2023-01-24
### Added
diff --git a/src/source_analysis/tests.rs b/src/source_analysis/tests.rs
--- a/src/source_analysis/tests.rs
+++ b/src/source_analysis/tests.rs
@@ -816,6 +816,11 @@ fn tarpaulin_skip_attr() {
fn uncovered3() {
println!(\"zombie lincoln\");
}
+
+ #[cfg_attr(tarpaulin, no_coverage)]
+ fn uncovered4() {
+ println!(\"zombie lincoln\");
+ }
",
file: Path::new(""),
ignore_mods: RefCell::new(HashSet::new()),
diff --git a/src/source_analysis/tests.rs b/src/source_analysis/tests.rs
--- a/src/source_analysis/tests.rs
+++ b/src/source_analysis/tests.rs
@@ -834,6 +839,7 @@ fn tarpaulin_skip_attr() {
assert!(lines.ignore.contains(&Lines::Line(18)));
assert!(lines.ignore.contains(&Lines::Line(22)));
assert!(lines.ignore.contains(&Lines::Line(23)));
+ assert!(lines.ignore.contains(&Lines::Line(28)));
let ctx = Context {
config: &config,
| `#[no_coverage]` is working but `#[cfg_attr(tarpaulin, no_coverage)]` not
**Describe the bug**
I am conditionally add `no_coverage` attribute only if `--cfg tarpaulin` is set.
```rust
#[cfg_attr(tarpaulin, no_coverage)]
mod info;
```
It's not working but putting just `#[no_coverage]` works as expected.
**To Reproduce**
1. Create function and add attribute `#[cfg_attr(tarpaulin, no_coverage)]`.
2. Run `cargo tarpaulin`.
3. Check report - function is included in coverage lines.
4. Change attribute into `#[no_coverage]`.
5. Run `cargo tarpaulin`.
6. Check report - function is ignored as expected.
**Expected behavior**
Item should be ignored from coverage.
**Live example**
Check my [`ferric-bytes/chksum-build`](https://github.com/ferric-bytes/chksum-build) repo - specially code below.
https://github.com/ferric-bytes/chksum-build/blob/299e286ddfea98edc0687d63c8959ce7e3ebd264/src/lib.rs#L178-L179
**Possible fix**
As far as I understand some extra checks should be added to this function.
https://github.com/xd009642/tarpaulin/blob/343e912bf33a3872ff9f49d48686831d7ca701da/src/source_analysis/attributes.rs#L35-L73
| 2023-01-31T04:49:15 | 0.24 | 89c72d9590e376a3e300ae8158f8f58d4327d68b | [
"source_analysis::tests::tarpaulin_skip_attr"
] | [
"cargo::tests::llvm_cov_compatible_version",
"cargo::tests::llvm_cov_incompatible_version",
"branching::tests::for_branches",
"branching::tests::if_elif_else",
"cargo::tests::parse_rustflags_from_toml",
"branching::tests::while_branches",
"branching::tests::match_branches",
"branching::tests::if_branc... | [
"cargo_run_coverage",
"examples_coverage",
"doc_test_bootstrap",
"array_coverage",
"failure_thresholds::coverage_above_threshold",
"access_env_var",
"breaks_expr_coverage",
"continues_expr_coverage",
"config_file_coverage",
"dot_rs_in_dir_name",
"boxes_coverage",
"failure_thresholds::coverage_... | [] | |
tealdeer-rs/tealdeer | 236 | tealdeer-rs__tealdeer-236 | [
"182"
] | f8785fbc3ee4e64281c9643ea34efd0f0ec263c1 | diff --git a/src/config.rs b/src/config.rs
--- a/src/config.rs
+++ b/src/config.rs
@@ -173,7 +173,10 @@ impl Default for RawDirectoriesConfig {
fn default() -> Self {
Self {
custom_pages_dir: get_app_root(AppDataType::UserData, &crate::APP_INFO)
- .map(|path| path.join("pages"))
+ .map(|path| {
+ // Note: The `join("")` call ensures that there's a trailing slash
+ path.join("pages").join("")
+ })
.ok(),
}
}
diff --git a/src/main.rs b/src/main.rs
--- a/src/main.rs
+++ b/src/main.rs
@@ -257,7 +257,7 @@ fn show_config_path(enable_styles: bool) {
}
/// Show file paths
-fn show_paths() {
+fn show_paths(config: &Config) {
let config_dir = get_config_dir().map_or_else(
|e| format!("[Error: {}]", e),
|(mut path, source)| {
diff --git a/src/main.rs b/src/main.rs
--- a/src/main.rs
+++ b/src/main.rs
@@ -289,13 +289,21 @@ fn show_paths() {
path.push(""); // Trailing path separator
path.into_os_string()
.into_string()
- .unwrap_or_else(|_| String::from("[Invalid]"))
+ .unwrap_or_else(|_| "[Invalid]".to_string())
},
);
- println!("Config dir: {}", config_dir);
- println!("Config path: {}", config_path);
- println!("Cache dir: {}", cache_dir);
- println!("Pages dir: {}", pages_dir);
+ let custom_pages_dir = config.directories.custom_pages_dir.as_deref().map_or_else(
+ || "[None]".to_string(),
+ |path| {
+ path.to_str()
+ .map_or_else(|| "[Invalid]".to_string(), ToString::to_string)
+ },
+ );
+ println!("Config dir: {}", config_dir);
+ println!("Config path: {}", config_path);
+ println!("Cache dir: {}", cache_dir);
+ println!("Pages dir: {}", pages_dir);
+ println!("Custom pages dir: {}", custom_pages_dir);
}
/// Create seed config file and exit
diff --git a/src/main.rs b/src/main.rs
--- a/src/main.rs
+++ b/src/main.rs
@@ -416,14 +424,6 @@ fn main() {
);
show_config_path(enable_styles);
}
- if args.show_paths {
- show_paths();
- }
-
- // Create a basic config and exit
- if args.seed_config {
- create_config_and_exit(enable_styles);
- }
// Look up config file, if none is found fall back to default config.
let config = match Config::load(enable_styles) {
diff --git a/src/main.rs b/src/main.rs
--- a/src/main.rs
+++ b/src/main.rs
@@ -438,10 +438,21 @@ fn main() {
}
};
+ // Set up pager
if args.pager || config.display.use_pager {
configure_pager(enable_styles);
}
+ // Show various paths
+ if args.show_paths {
+ show_paths(&config);
+ }
+
+ // Create a basic config and exit
+ if args.seed_config {
+ create_config_and_exit(enable_styles);
+ }
+
// Specify target OS
let platform: PlatformType = args.platform.unwrap_or_else(PlatformType::current);
| diff --git a/tests/lib.rs b/tests/lib.rs
--- a/tests/lib.rs
+++ b/tests/lib.rs
@@ -284,17 +284,18 @@ fn test_setup_seed_config() {
fn test_show_paths() {
let testenv = TestEnv::new();
+ // Show general commands
testenv
.command()
.args(["--show-paths"])
.assert()
.success()
.stdout(contains(format!(
- "Config dir: {}",
+ "Config dir: {}",
testenv.config_dir.path().to_str().unwrap(),
)))
.stdout(contains(format!(
- "Config path: {}",
+ "Config path: {}",
testenv
.config_dir
.path()
diff --git a/tests/lib.rs b/tests/lib.rs
--- a/tests/lib.rs
+++ b/tests/lib.rs
@@ -303,11 +304,11 @@ fn test_show_paths() {
.unwrap(),
)))
.stdout(contains(format!(
- "Cache dir: {}",
+ "Cache dir: {}",
testenv.cache_dir.path().to_str().unwrap(),
)))
.stdout(contains(format!(
- "Pages dir: {}",
+ "Pages dir: {}",
testenv
.cache_dir
.path()
diff --git a/tests/lib.rs b/tests/lib.rs
--- a/tests/lib.rs
+++ b/tests/lib.rs
@@ -315,6 +316,23 @@ fn test_show_paths() {
.to_str()
.unwrap(),
)));
+
+ // Set custom pages directory
+ testenv.write_config(format!(
+ "[directories]\ncustom_pages_dir = '{}'",
+ testenv.custom_pages_dir.path().to_str().unwrap()
+ ));
+
+ // Now ensure that this path is contained in the output
+ testenv
+ .command()
+ .args(["--show-paths"])
+ .assert()
+ .success()
+ .stdout(contains(format!(
+ "Custom pages dir: {}",
+ testenv.custom_pages_dir.path().to_str().unwrap(),
+ )));
}
#[test]
| Include custom pages and Patches directory in `--show-paths` option
Quote from PR #142
> We should also include the custom pages directory in the new --show-paths option output, but that's probably best done in a separate PR, in order not to hold up this one.
Acceptance Criteria:
* Using `--show-paths` includes the location of the custom_pages directory in the output
* Tests are created / updated for new and changed code
| 2021-12-20T07:29:13 | 1.4 | 0d1300c218cee551c3390acdfcd7a3745285d62c | [
"test_show_paths"
] | [
"cache::tests::test_page_lookup_result_iter_no_patch",
"cache::tests::test_page_lookup_result_iter_with_patch",
"formatter::tests::test_empty_command",
"formatter::tests::test_highlight_code_segment",
"formatter::tests::test_is_freestanding_substring",
"formatter::tests::test_i18n",
"line_iterator::test... | [
"test_create_cache_directory_path",
"test_cache_location_not_a_directory",
"test_autoupdate_cache",
"test_pager_flag_enable",
"test_quiet_cache",
"test_quiet_failures",
"test_quiet_old_cache",
"test_spaces_find_command",
"test_update_cache"
] | [] | |
Keats/tera | 908 | Keats__tera-908 | [
"882"
] | 3b2e96f624bd898cc96e964cd63194d58701ca4a | diff --git a/src/parser/mod.rs b/src/parser/mod.rs
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -427,6 +427,7 @@ fn parse_logic_val(pair: Pair<Rule>) -> TeraResult<Expr> {
Rule::in_cond => expr = Some(parse_in_condition(p)?),
Rule::comparison_expr => expr = Some(parse_comparison_expression(p)?),
Rule::string_expr_filter => expr = Some(parse_string_expr_with_filters(p)?),
+ Rule::logic_expr => expr = Some(parse_logic_expr(p)?),
_ => unreachable!(),
};
}
diff --git a/src/parser/tera.pest b/src/parser/tera.pest
--- a/src/parser/tera.pest
+++ b/src/parser/tera.pest
@@ -98,7 +98,7 @@ comparison_expr = { (string_expr_filter | comparison_val) ~ (comparison_op ~ (st
in_cond_container = {string_expr_filter | array_filter | dotted_square_bracket_ident}
in_cond = !{ (string_expr_filter | basic_expr_filter) ~ op_not? ~ "in" ~ in_cond_container }
-logic_val = !{ op_not? ~ (in_cond | comparison_expr) }
+logic_val = !{ op_not? ~ (in_cond | comparison_expr) | "(" ~ logic_expr ~ ")" }
logic_expr = !{ logic_val ~ ((op_or | op_and) ~ logic_val)* }
array = !{ "[" ~ (logic_val ~ ",")* ~ logic_val? ~ "]"}
| diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -360,6 +360,30 @@ fn parse_variable_tag_negated_expr() {
);
}
+#[test]
+fn parse_variable_tag_negated_expr_with_parentheses() {
+ let ast = parse("{{ (not id or not true) and not 1 + 1 }}").unwrap();
+ assert_eq!(
+ ast[0],
+ Node::VariableBlock(
+ WS::default(),
+ Expr::new(ExprVal::Logic(LogicExpr {
+ lhs: Box::new(Expr::new(ExprVal::Logic(LogicExpr {
+ lhs: Box::new(Expr::new_negated(ExprVal::Ident("id".to_string()))),
+ operator: LogicOperator::Or,
+ rhs: Box::new(Expr::new_negated(ExprVal::Bool(true))),
+ },))),
+ operator: LogicOperator::And,
+ rhs: Box::new(Expr::new_negated(ExprVal::Math(MathExpr {
+ lhs: Box::new(Expr::new(ExprVal::Int(1))),
+ operator: MathOperator::Add,
+ rhs: Box::new(Expr::new(ExprVal::Int(1))),
+ },))),
+ },))
+ )
+ );
+}
+
#[test]
fn parse_variable_tag_simple_test() {
let ast = parse("{{ id is defined }}").unwrap();
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -476,6 +476,19 @@ fn render_if_elif_else() {
("{% if 'n' in name %}Admin{% else %}Hmm{% endif %}", "Admin"),
// function in if
("{% if get_true() %}Truth{% endif %}", "Truth"),
+ // Parentheses around logic expressions
+ ("{% if age >= 18 and name == 'john' %}Truth{% endif %}", "Truth"),
+ ("{% if (age >= 18) and (name == 'john') %}Truth{% endif %}", "Truth"),
+ ("{% if (age >= 18) or (name == 'john') %}Truth{% endif %}", "Truth"),
+ ("{% if (age < 18) or (name == 'john') %}Truth{% endif %}", "Truth"),
+ ("{% if (age >= 18) or (name != 'john') %}Truth{% endif %}", "Truth"),
+ ("{% if (age < 18) and (name != 'john') %}Truth{% endif %}", ""),
+ ("{% if (age >= 18) and (name != 'john') %}Truth{% endif %}", ""),
+ ("{% if (age >= 18 and name == 'john') %}Truth{% endif %}", "Truth"),
+ ("{% if (age < 18 and name == 'john') %}Truth{% endif %}", ""),
+ ("{% if (age >= 18 and name != 'john') %}Truth{% endif %}", ""),
+ ("{% if age >= 18 or name == 'john' and is_false %}Truth{% endif %}", "Truth"),
+ ("{% if (age >= 18 or name == 'john') and is_false %}Truth{% endif %}", ""),
];
for (input, expected) in inputs {
| `if` statement with parens fails to parse
The following fails to parse with tera but works fine with jinja2:
```
{% if (brand.id == "barfoo") or (brand.id == "foobar") %}LOL{% endif %}
```
with the following error:
```
Tera error: Error { kind: Msg("Failed to parse 'test.html'"), source: Some(Error { kind: Msg(" --> 1:17\n |\n1 | {% if (brand.id == \"barfoo\") or (brand.id == \"foobar\") %}LOL{% endif %}\n | ^---\n |\n = expected `+`, `-`, `*`, `/`, or `%`"), source: None }) }
```
Changing the template to something more reasonable also fails with the exact same error:
```
{% if (brand.id == "barfoo" or brand.id == "foobar") %}LOL{% endif %}
```
I know the first example is a bit silly and you're not aiming for 100% jinja compatibility but do you think this could be fixed?
Thanks in advance!
| I've just added a test in the next version for that (https://github.com/Keats/tera2/commit/c5dc6c9b5cfad6ae54ecc4a6f4bfc58641e29e3f) and it works
Do you have a rough timeline for when we can expect terra2? Are we talking days, weeks or months? 😄
Definitely closer to months. A beta release will be tested for quite some time in Zola before actually releasing v2 proper | 2024-04-30T16:58:32 | 1.19 | 3b2e96f624bd898cc96e964cd63194d58701ca4a | [
"parser::tests::parser::parse_variable_tag_negated_expr_with_parentheses",
"renderer::tests::basic::render_if_elif_else"
] | [
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::tes... | [] | [] |
Keats/tera | 820 | Keats__tera-820 | [
"819"
] | 226d0108cdb64c8d056e46c5c9a67a4a4e8549ea | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,9 @@
# Changelog
+## 1.18.1 (2023-03-15)
+
+- Fix panic on invalid globs to Tera::new
+
## 1.18.0 (2023-03-08)
- Add `abs` filter
diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tera"
-version = "1.18.0"
+version = "1.18.1"
authors = ["Vincent Prouillet <hello@prouilletvincent.com>"]
license = "MIT"
readme = "README.md"
diff --git a/src/tera.rs b/src/tera.rs
--- a/src/tera.rs
+++ b/src/tera.rs
@@ -132,7 +132,7 @@ impl Tera {
// See https://github.com/Keats/tera/issues/574 for the Tera discussion
// and https://github.com/Gilnaa/globwalk/issues/28 for the upstream issue.
let (parent_dir, glob_end) = glob.split_at(glob.find('*').unwrap());
- let parent_dir = std::fs::canonicalize(parent_dir).unwrap();
+ let parent_dir = std::fs::canonicalize(parent_dir)?;
let dir = parent_dir.join(glob_end).into_os_string().into_string().unwrap();
// We are parsing all the templates on instantiation
| diff --git a/src/tera.rs b/src/tera.rs
--- a/src/tera.rs
+++ b/src/tera.rs
@@ -1219,4 +1219,12 @@ mod tests {
}
}
}
+
+ // https://github.com/Keats/tera/issues/819
+ #[test]
+ fn doesnt_panic_on_invalid_glob() {
+ let tera = Tera::new("\\dev/null/*");
+ println!("{:?}", tera);
+ assert!(tera.is_err());
+ }
}
| Non existent path causes panic instead of Error result
New versions of tera panic instead of returning Error when passed an invalid glob path. For example:
[rsgen-avro](https://github.com/lerouxrgd/rsgen-avro/blob/9e3eb0bfd0f7ba7a583bb7f3166fd67c41f4e708/src/templates.rs#L338
)
let mut tera = Tera::new("/dev/null/*")?;
Problem caused by use of unwrap() instead of ? operator, for example:
[tera.rs](https://github.com/Keats/tera/blob/226d0108cdb64c8d056e46c5c9a67a4a4e8549ea/src/tera.rs#L135)
let parent_dir = std::fs::canonicalize(parent_dir).unwrap();
| That's not good, I'll fix it today unless someone beats me to it
On Wed, 15 Mar 2023, 02:23 chupaty, ***@***.***> wrote:
> New versions of tera panic instead of returning Error when passed an
> invalid glob path. For example:
> rsgen-avro
> <https://github.com/lerouxrgd/rsgen-avro/blob/9e3eb0bfd0f7ba7a583bb7f3166fd67c41f4e708/src/templates.rs#L338>
>
> let mut tera = Tera::new("/dev/null/*")?;
>
> Problem caused by use of unwrap() instead of ? operator, for example:
>
> tera.rs
> <https://github.com/Keats/tera/blob/226d0108cdb64c8d056e46c5c9a67a4a4e8549ea/src/tera.rs#L135>
> let parent_dir = std::fs::canonicalize(parent_dir).unwrap();
>
> —
> Reply to this email directly, view it on GitHub
> <https://github.com/Keats/tera/issues/819>, or unsubscribe
> <https://github.com/notifications/unsubscribe-auth/AAFGDI2UPDC2CXBK4QZ25KLW4EKXVANCNFSM6AAAAAAV3FAIN4>
> .
> You are receiving this because you are subscribed to this thread.Message
> ID: ***@***.***>
>
| 2023-03-15T17:14:22 | 1.18 | 226d0108cdb64c8d056e46c5c9a67a4a4e8549ea | [
"tera::tests::doesnt_panic_on_invalid_glob"
] | [
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::tes... | [] | [] |
Keats/tera | 772 | Keats__tera-772 | [
"771"
] | da358f389f589649b4b99bef70195a870f5f73ff | diff --git a/src/builtins/filters/string.rs b/src/builtins/filters/string.rs
--- a/src/builtins/filters/string.rs
+++ b/src/builtins/filters/string.rs
@@ -61,7 +61,7 @@ const PYTHON_ENCODE_SET: &AsciiSet = &USERINFO_ENCODE_SET
lazy_static! {
static ref STRIPTAGS_RE: Regex = Regex::new(r"(<!--.*?-->|<[^>]*>)").unwrap();
- static ref WORDS_RE: Regex = Regex::new(r"\b(?P<first>\w)(?P<rest>\w*)\b").unwrap();
+ static ref WORDS_RE: Regex = Regex::new(r"\b(?P<first>[\w'])(?P<rest>[\w']*)\b").unwrap();
static ref SPACELESS_RE: Regex = Regex::new(r">\s+<").unwrap();
}
| diff --git a/src/builtins/filters/string.rs b/src/builtins/filters/string.rs
--- a/src/builtins/filters/string.rs
+++ b/src/builtins/filters/string.rs
@@ -662,6 +662,7 @@ mod tests {
("\tfoo\tbar\t", "\tFoo\tBar\t"),
("foo bar ", "Foo Bar "),
("foo bar\t", "Foo Bar\t"),
+ ("foo's bar", "Foo's Bar"),
];
for (input, expected) in tests {
let result = title(&to_value(input).unwrap(), &HashMap::new());
| Title case filter with apostrophe - Capitalize issue
With this input:
{{ "Steiner's theorem" | title }}
Current result is:
Steiner'**S** Theorem
Expected result is:
Steiner'**s** Theorem
First mentioned here: https://zola.discourse.group/t/title-case-filter-with-apostrophes/1526
Can be reproduced here: https://tera.netlify.app/playground/
| I have a fix and a test case for it.
I'll submit a PR. | 2022-11-05T14:50:47 | 1.17 | da358f389f589649b4b99bef70195a870f5f73ff | [
"builtins::filters::string::tests::test_title"
] | [
"builtins::filters::array::tests::test_filter",
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_filter_no_value",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests... | [] | [] |
Keats/tera | 744 | Keats__tera-744 | [
"738"
] | 76824f6a4bcdb0f845eaa3eac189c98cf25fd4ed | diff --git a/src/renderer/processor.rs b/src/renderer/processor.rs
--- a/src/renderer/processor.rs
+++ b/src/renderer/processor.rs
@@ -303,8 +303,8 @@ impl<'a> Processor<'a> {
}
fn eval_in_condition(&mut self, in_cond: &'a In) -> Result<bool> {
- let lhs = self.eval_expression(&in_cond.lhs)?;
- let rhs = self.eval_expression(&in_cond.rhs)?;
+ let lhs = self.safe_eval_expression(&in_cond.lhs)?;
+ let rhs = self.safe_eval_expression(&in_cond.rhs)?;
let present = match *rhs {
Value::Array(ref v) => v.contains(&lhs),
| diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -151,6 +151,7 @@ fn render_variable_block_logic_expr() {
hashmap.insert("b", 10);
hashmap.insert("john", 100);
context.insert("object", &hashmap);
+ context.insert("urls", &vec!["https://test"]);
let inputs = vec![
("{{ (1.9 + a) | round > 10 }}", "false"),
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -177,6 +178,8 @@ fn render_variable_block_logic_expr() {
("{{ name not in 'hello' }}", "true"),
("{{ name in ['bob', 2, 'john'] }}", "true"),
("{{ a in ['bob', 2, 'john'] }}", "true"),
+ ("{{ \"https://test\" in [\"https://test\"] }}", "true"),
+ ("{{ \"https://test\" in urls }}", "true"),
("{{ 'n' in name }}", "true"),
("{{ '<' in malicious }}", "true"),
("{{ 'a' in object }}", "true"),
| The template's `in` doesn't work when a slash (/) exists due to escaping
Given the Rust + template
```
context.insert("friends", &vec!["https://Bob", "Kate"]);
...
Output: {% if "https://Bob" in friends %}TRUE{% endif %}
```
the output will be blank.
If we instead run:
- `{% if "https://Bob" in ["https://Bob", "Kate"] %}`
- `{% if "https://Bob"|safe in friends %}TRUE{% endif %}`
then the statement works as expected.
---
After [adding a test to ensure I wasn't mad](https://github.com/Smerity/tera/commit/2ccb8785f18cb55d82f0fdc7bf5ab66ec00583b0) I traced the code for running an `in` to `eval_in_condition` which calls `eval_expression` on the `lhs` and `rhs` inputs. As our example has an `ExprVal::String` on the left we end up escaping `https://Bob`. Thus, to run the above correctly we need to instead run:
```
context.insert("friends", &vec!["https://Bob", "Kate"]);
...
Output: {% if "https://Bob"|safe in friends %}TRUE{% endif %}
```
This works as we prevent the `ExprVal::String` from being escaped.
**If this is expected behaviour:** I'll add a section to the [containing docs](https://tera.netlify.app/docs/#containing)
**If it isn't expected behaviour:** I can investigate this issue further (though would appreciate someone with more Tera knowledge as I'm uncertain how problematic making the `lhs` / `rhs` "unsafe" (i.e. skipping the safety) might be)
| Weird that the literal would be escaped when checking with `in`, definitely a bug | 2022-08-08T19:13:52 | 1.16 | 76824f6a4bcdb0f845eaa3eac189c98cf25fd4ed | [
"renderer::tests::basic::render_variable_block_logic_expr"
] | [
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_join_empty",
"builtins::filters::array::tests::test_last_empty",
"builtins::filters::array::tests::test_map_empty",
"builtins::filters::array::tests::test_map"... | [
"renderer::tests::basic::render_variable_block_ident",
"renderer::tests::basic::render_variable_block_lit_expr"
] | [] |
Keats/tera | 703 | Keats__tera-703 | [
"702"
] | bf8a3d8305cb3ddae4cc191a3172b1ec646ff3df | diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -268,11 +268,6 @@ pub fn slice(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
None => 0,
};
- // Not an error, but returns an empty Vec
- if start > arr.len() {
- return Ok(Vec::<Value>::new().into());
- }
-
let mut end = match args.get("end") {
Some(val) => get_index(try_get_value!("slice", "end", f64, val), &arr),
None => arr.len(),
diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -282,6 +277,11 @@ pub fn slice(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
end = arr.len();
}
+ // Not an error, but returns an empty Vec
+ if start >= end {
+ return Ok(Vec::<Value>::new().into());
+ }
+
Ok(arr[start..end].into())
}
| diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -646,6 +646,8 @@ mod tests {
(make_args(Some(1), Some(2.0)), vec![2]),
(make_args(None, Some(-2.0)), vec![1, 2, 3]),
(make_args(None, None), vec![1, 2, 3, 4, 5]),
+ (make_args(Some(3), Some(1.0)), vec![]),
+ (make_args(Some(9), None), vec![]),
];
for (args, expected) in inputs {
| if `start > end` then `slice` filter panicks
Hello!
```
{{ [1, "foo", 1, "true", "FOO", true, 2] | slice(start=4, end=2) }}
```
panick:
```
thread 'thread '<unnamed>thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2' panicked at 'slice index starts at 4 but ends at 2<unnamed>' panicked at '', thread 'slice index starts at 4 but ends at 2<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs/home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace
:285:8
', ', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
thread 'thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
/home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
thread '<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
<unnamed>', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
' panicked at 'thread 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
<unnamed>' panicked at 'slice index starts at 4 but ends at 2', /home/rootkea/.cargo/registry/src/github.com-1ecc6299db9ec823/tera-1.15.0/src/builtins/filters/array.rs:285:8
```
Interestingly, if `start == end` empty array is returned. So what to do when `start > end`?
1. Return empty array OR
2. throw error
| I think return an empty array so as to be consistent since following doesn't panick and simply returns empty array even if `start > end`:
```
{{ [1, "foo", 1, "true", "FOO", true, 2] | slice(start=8, end=2) }}
```
Preparing a PR now... | 2022-01-08T03:16:53 | 1.15 | bf8a3d8305cb3ddae4cc191a3172b1ec646ff3df | [
"builtins::filters::array::tests::test_slice"
] | [
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::tes... | [
"renderer::tests::basic::render_variable_block_lit_expr",
"renderer::tests::basic::render_variable_block_ident"
] | [] |
Keats/tera | 623 | Keats__tera-623 | [
"590"
] | 006f8e895f2dad8085378bb7feabcf6fcd38187b | diff --git a/src/renderer/processor.rs b/src/renderer/processor.rs
--- a/src/renderer/processor.rs
+++ b/src/renderer/processor.rs
@@ -63,11 +63,11 @@ fn evaluate_sub_variables<'a>(key: &str, call_stack: &CallStack<'a>) -> Result<S
Ok(new_key
.replace("/", "~1") // https://tools.ietf.org/html/rfc6901#section-3
- .replace("['", ".")
- .replace("[\"", ".")
+ .replace("['", ".\"")
+ .replace("[\"", ".\"")
.replace("[", ".")
- .replace("']", "")
- .replace("\"]", "")
+ .replace("']", "\"")
+ .replace("\"]", "\"")
.replace("]", ""))
}
| diff --git a/src/context.rs b/src/context.rs
--- a/src/context.rs
+++ b/src/context.rs
@@ -204,7 +204,15 @@ impl ValueTruthy for Value {
/// Converts a dotted path to a json pointer one
#[inline]
pub fn get_json_pointer(key: &str) -> String {
- ["/", &key.replace(".", "/")].join("")
+ lazy_static::lazy_static! {
+ // Split the key into dot-separated segments, respecting quoted strings as single units
+ // to fix https://github.com/Keats/tera/issues/590
+ static ref JSON_POINTER_REGEX: regex::Regex = regex::Regex::new("\"[^\"]*\"|[^.]+").unwrap();
+ }
+
+ let mut segments = vec![""];
+ segments.extend(JSON_POINTER_REGEX.find_iter(key).map(|mat| mat.as_str().trim_matches('"')));
+ segments.join("/")
}
#[cfg(test)]
diff --git a/src/tera.rs b/src/tera.rs
--- a/src/tera.rs
+++ b/src/tera.rs
@@ -951,6 +951,27 @@ mod tests {
assert_eq!(result, "Hello world");
}
+ #[test]
+ fn test_render_map_with_dotted_keys() {
+ let mut my_tera = Tera::default();
+ my_tera
+ .add_raw_templates(vec![
+ ("dots", r#"{{ map["a.b.c"] }}"#),
+ ("urls", r#"{{ map["https://example.com"] }}"#),
+ ])
+ .unwrap();
+
+ let mut map = HashMap::new();
+ map.insert("a.b.c", "success");
+ map.insert("https://example.com", "success");
+
+ let mut tera_context = Context::new();
+ tera_context.insert("map", &map);
+
+ my_tera.render("dots", &tera_context).unwrap();
+ my_tera.render("urls", &tera_context).unwrap();
+ }
+
#[test]
fn test_extend_no_overlap() {
let mut my_tera = Tera::default();
| URLs in HashMap keys results in "Failed to render"
First off: thanks for making & maintaining this! Love it :)
It seems like the square bracket syntax does not support (some of the characters in) URLs.
```rust
let mut tera_context = tera::Context::new();
let mut map= HashMap::new();
map.insert("https://example.com/url", "url value, not working");
map.insert("simplekey", "simple key value, working");
tera_context.insert("map", &map);
```
```
{{ map["simplekey"] }} => works fine
{{ map["https://example.com/url"]}} => "Failed to render \'map_example.html\'"
```
It took me a while to find the cause of this issue - the error wasn't clear. (but error handling isn't what this issue is about)
| I think that's because the map key in the parser doesn't support characters like `:` or `/` maybe? A bit weird and shouldn't be too hard to fix if it's that.
I did a few tests, and it's actually the `.` that's the problem.
The map index is interpreted as a JSON Pointer into the context when rendered, but the dot is not escaped (since it is valid JSON Pointer notation), which means that `map["https://example.com"]` actually performs a lookup against `map["https://example"].com`.
It appears that serde (probably in accordance with the json spec) makes no distinction between `{ "google.com": 3 }` and `{ "google": { "com": 3} }`, but I don't know for sure.
It would appear to be a general issue with any string containing dots.
Here's the test I used:
```rust
#[test]
fn test_render_map_with_complex_key() {
let mut my_tera = Tera::default();
my_tera
.add_raw_template("test", r#"{{ map["a.b.c"] }}"#)
.unwrap();
let mut map= HashMap::new();
map.insert("a.b.c", "success");
let mut tera_context = Context::new();
tera_context.insert("map", &map);
my_tera.render("test", &tera_context).unwrap();
}
``` | 2021-04-14T20:29:02 | 1.7 | 006f8e895f2dad8085378bb7feabcf6fcd38187b | [
"tera::tests::test_render_map_with_dotted_keys"
] | [
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::tes... | [
"renderer::tests::basic::render_variable_block_lit_expr",
"renderer::tests::basic::render_variable_block_ident"
] | [] |
Keats/tera | 508 | Keats__tera-508 | [
"497"
] | 414438c7a259a213d1ce109ed08703672c7d6568 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,5 +1,11 @@
# Changelog
+## 1.3.0 (2020-05-16)
+
+- Add a `urlencode_strict` filter
+- Add more array literals feature in templates
+- Make `filter` filter value argument optional
+
## 1.2.0 (2020-03-29)
- Add `trim_start`, `trim_end`, `trim_start_matches` and `trim_end_matches` filters
diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tera"
-version = "1.2.0"
+version = "1.3.0"
authors = ["Vincent Prouillet <hello@prouilletvincent.com>"]
license = "MIT"
readme = "README.md"
diff --git a/docs/content/docs/_index.md b/docs/content/docs/_index.md
--- a/docs/content/docs/_index.md
+++ b/docs/content/docs/_index.md
@@ -962,7 +962,7 @@ or by author name:
Filter the array values, returning only the values where the `attribute` is equal to the `value`.
Values with missing `attribute` or where `attribute` is null will be discarded.
-Both `attribute` and `value` are mandatory.
+`attribute` is mandatory.
Example:
diff --git a/docs/content/docs/_index.md b/docs/content/docs/_index.md
--- a/docs/content/docs/_index.md
+++ b/docs/content/docs/_index.md
@@ -994,6 +994,8 @@ or by author name:
{{ posts | filter(attribute="author.name", value="Vincent") }}
```
+If `value` is not passed, it will drop any elements where the attribute is `null`.
+
#### map
Retrieves an attribute from each object in an array. The `attribute` argument is mandatory and specifies what to extract.
diff --git a/docs/content/docs/_index.md b/docs/content/docs/_index.md
--- a/docs/content/docs/_index.md
+++ b/docs/content/docs/_index.md
@@ -1146,11 +1148,11 @@ Converts a value into a float. The `default` argument can be used to specify th
#### json_encode
Transforms any value into a JSON representation. This filter is better used together with `safe` or when automatic escape is disabled.
-Example: `{{ value | safe | json_encode() }}`
+Example: `{{ value | json_encode() | safe }}`
It accepts a parameter `pretty` (boolean) to print a formatted JSON instead of a one-liner.
-Example: `{{ value | safe | json_encode(pretty=true) }}`
+Example: `{{ value | json_encode(pretty=true) | safe }}`
#### as_str
Returns a string representation of the given value.
diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -186,6 +186,7 @@ pub fn group_by(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
/// Filter the array values, returning only the values where the `attribute` is equal to the `value`
/// Values without the `attribute` or with a null `attribute` are discarded
+/// If the `value` is not passed, discard all elements where the attribute is null.
pub fn filter(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
let mut arr = try_get_value!("filter", "value", Vec<Value>, value);
if arr.is_empty() {
diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -196,23 +197,17 @@ pub fn filter(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
Some(val) => try_get_value!("filter", "attribute", String, val),
None => return Err(Error::msg("The `filter` filter has to have an `attribute` argument")),
};
- let value = match args.get("value") {
- Some(val) => val,
- None => return Err(Error::msg("The `filter` filter has to have a `value` argument")),
- };
+ let value = args.get("value").unwrap_or(&Value::Null);
let json_pointer = get_json_pointer(&key);
arr = arr
.into_iter()
.filter(|v| {
- if let Some(val) = v.pointer(&json_pointer) {
- if val.is_null() {
- false
- } else {
- val == value
- }
+ let val = v.pointer(&json_pointer).unwrap_or(&Value::Null);
+ if value.is_null() {
+ !val.is_null()
} else {
- false
+ val == value
}
})
.collect::<Vec<_>>();
diff --git a/src/builtins/filters/string.rs b/src/builtins/filters/string.rs
--- a/src/builtins/filters/string.rs
+++ b/src/builtins/filters/string.rs
@@ -8,8 +8,6 @@ use serde_json::value::{to_value, Value};
#[cfg(feature = "builtins")]
use percent_encoding::{percent_encode, AsciiSet, NON_ALPHANUMERIC};
#[cfg(feature = "builtins")]
-use slug;
-#[cfg(feature = "builtins")]
use unic_segment::GraphemeIndices;
use crate::errors::{Error, Result};
diff --git a/src/builtins/functions.rs b/src/builtins/functions.rs
--- a/src/builtins/functions.rs
+++ b/src/builtins/functions.rs
@@ -184,8 +184,8 @@ pub fn get_env(args: &HashMap<String, Value>) -> Result<Value> {
Some(res) => Ok(Value::String(res)),
None => match args.get("default") {
Some(default) => Ok(default.clone()),
- None => Err(Error::msg(format!("Environment variable `{}` not found", &name)))
- }
+ None => Err(Error::msg(format!("Environment variable `{}` not found", &name))),
+ },
}
}
diff --git a/src/context.rs b/src/context.rs
--- a/src/context.rs
+++ b/src/context.rs
@@ -116,7 +116,7 @@ impl ValueRender for Value {
buf.push(']');
Cow::Owned(buf)
}
- Value::Object(_) => Cow::Owned("[object]".to_owned()),
+ Value::Object(_) => Cow::Borrowed("[object]"),
}
}
}
diff --git a/src/errors.rs b/src/errors.rs
--- a/src/errors.rs
+++ b/src/errors.rs
@@ -1,4 +1,3 @@
-use serde_json;
use std::convert::Into;
use std::error::Error as StdError;
use std::fmt;
diff --git a/src/renderer/processor.rs b/src/renderer/processor.rs
--- a/src/renderer/processor.rs
+++ b/src/renderer/processor.rs
@@ -982,8 +982,11 @@ impl<'a> Processor<'a> {
// which template are we in?
if let Some(&(ref name, ref _template, ref level)) = self.blocks.last() {
- let block_def =
- self.template.blocks_definitions.get(&(*name).to_string()).and_then(|b| b.get(*level));
+ let block_def = self
+ .template
+ .blocks_definitions
+ .get(&(*name).to_string())
+ .and_then(|b| b.get(*level));
if let Some(&(ref tpl_name, _)) = block_def {
if tpl_name != &self.template.name {
| diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -734,6 +729,37 @@ mod tests {
assert_eq!(res.unwrap(), to_value(expected).unwrap());
}
+ #[test]
+ fn test_filter_no_value() {
+ let input = json!([
+ {"id": 1, "year": 2015},
+ {"id": 2, "year": 2015},
+ {"id": 3, "year": 2016},
+ {"id": 4, "year": 2017},
+ {"id": 5, "year": 2017},
+ {"id": 6, "year": 2017},
+ {"id": 7, "year": 2018},
+ {"id": 8},
+ {"id": 9, "year": null},
+ ]);
+ let mut args = HashMap::new();
+ args.insert("attribute".to_string(), to_value("year").unwrap());
+
+ let expected = json!([
+ {"id": 1, "year": 2015},
+ {"id": 2, "year": 2015},
+ {"id": 3, "year": 2016},
+ {"id": 4, "year": 2017},
+ {"id": 5, "year": 2017},
+ {"id": 6, "year": 2017},
+ {"id": 7, "year": 2018},
+ ]);
+
+ let res = filter(&input, &args);
+ assert!(res.is_ok());
+ assert_eq!(res.unwrap(), to_value(expected).unwrap());
+ }
+
#[test]
fn test_map_empty() {
let res = map(&json!([]), &HashMap::new());
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -116,9 +116,11 @@ fn parse_variable_tag_array_lit() {
ast[0],
Node::VariableBlock(
WS::default(),
- Expr::new(
- ExprVal::Array(vec![Expr::new(ExprVal::Int(1)), Expr::new(ExprVal::Int(2)), Expr::new(ExprVal::Int(3))]),
- )
+ Expr::new(ExprVal::Array(vec![
+ Expr::new(ExprVal::Int(1)),
+ Expr::new(ExprVal::Int(2)),
+ Expr::new(ExprVal::Int(3))
+ ]),)
)
);
}
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -134,7 +136,11 @@ fn parse_variable_tag_array_lit_with_filter() {
Node::VariableBlock(
WS::default(),
Expr::with_filters(
- ExprVal::Array(vec![Expr::new(ExprVal::Int(1)), Expr::new(ExprVal::Int(2)), Expr::new(ExprVal::Int(3))]),
+ ExprVal::Array(vec![
+ Expr::new(ExprVal::Int(1)),
+ Expr::new(ExprVal::Int(2)),
+ Expr::new(ExprVal::Int(3))
+ ]),
vec![FunctionCall { name: "length".to_string(), args: HashMap::new() },],
)
)
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -472,7 +478,7 @@ fn parse_variable_tag_macro_call_with_array_with_filters() {
"some".to_string(),
Expr::with_filters(
ExprVal::Array(vec![Expr::new(ExprVal::Int(1)), Expr::new(ExprVal::Int(2))]),
- vec![FunctionCall { name: "reverse".to_string(), args: HashMap::new() },],
+ vec![FunctionCall { name: "reverse".to_string(), args: HashMap::new() }],
),
);
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -675,12 +681,13 @@ fn parse_set_array_with_filter() {
WS::default(),
Set {
key: "hello".to_string(),
- value: Expr::with_filters(ExprVal::Array(vec![
- Expr::new(ExprVal::Int(1)),
- Expr::new(ExprVal::Bool(true)),
- Expr::new(ExprVal::String("hello".to_string())),
- ]),
- vec![FunctionCall { name: "length".to_string(), args: HashMap::new() },],
+ value: Expr::with_filters(
+ ExprVal::Array(vec![
+ Expr::new(ExprVal::Int(1)),
+ Expr::new(ExprVal::Bool(true)),
+ Expr::new(ExprVal::String("hello".to_string())),
+ ]),
+ vec![FunctionCall { name: "length".to_string(), args: HashMap::new() },],
),
global: false,
},
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -776,7 +783,8 @@ fn parse_filter_section_preserves_ws() {
body: vec![
Node::Text(" ".to_string()),
Node::VariableBlock(WS::default(), Expr::new(ExprVal::Ident("a".to_string()))),
- Node::Text(" B ".to_string())]
+ Node::Text(" B ".to_string())
+ ]
},
WS::default(),
)
diff --git a/src/parser/tests/parser.rs b/src/parser/tests/parser.rs
--- a/src/parser/tests/parser.rs
+++ b/src/parser/tests/parser.rs
@@ -920,10 +928,8 @@ fn parse_value_forloop_array_with_filter() {
Forloop {
key: None,
value: "item".to_string(),
- container: Expr::with_filters(ExprVal::Array(vec![
- Expr::new(ExprVal::Int(1)),
- Expr::new(ExprVal::Int(2)),
- ]),
+ container: Expr::with_filters(
+ ExprVal::Array(vec![Expr::new(ExprVal::Int(1)), Expr::new(ExprVal::Int(2)),]),
vec![FunctionCall { name: "reverse".to_string(), args: HashMap::new() },],
),
body: vec![Node::Text("A".to_string())],
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -41,6 +41,7 @@ fn render_variable_block_lit_expr() {
(r#"{{ "{{ hey }}" }}"#, "{{ hey }}"),
("{{ true }}", "true"),
("{{ false }}", "false"),
+ ("{{ false and true or true }}", "true"),
("{{ 1 + 1 }}", "2"),
("{{ 1 + 1.1 }}", "2.1"),
("{{ 3 - 1 }}", "2"),
diff --git a/src/renderer/tests/whitespace.rs b/src/renderer/tests/whitespace.rs
--- a/src/renderer/tests/whitespace.rs
+++ b/src/renderer/tests/whitespace.rs
@@ -92,3 +92,13 @@ fn can_remove_whitespace_inheritance() {
assert_eq!(tera.render("tpl", &context).unwrap(), expected);
}
}
+
+// https://github.com/Keats/tera/issues/475
+#[test]
+fn works_with_filter_section() {
+ let mut context = Context::new();
+ context.insert("d", "d");
+ let input = r#"{% filter upper %} {{ "c" }} d{% endfilter %}"#;
+ let res = Tera::one_off(input, &context, true).unwrap();
+ assert_eq!(res, " C D");
+}
| Way to exclude / filter out before sorting
I would like to sort an array by an attribute and exclude ones that don't have that attribute. Currently I'm getting the following error
```
Null is not a sortable value
```
**Example template**
```
{% for post in posts
| sort(attribute="date")
| reverse
%}
...
{% endfor %}
```
**My suggested ways of doing this**
1. Make the `value` arg on `filter` optional and if not given then filter out nulls.
```
{% for post in posts
| filter(attribute="date")
| sort(attribute="date")
| reverse
%}
...
{% endif %
```
2. Sort doesn't fail on missing or null attributes and excludes by default (might be too magical).
3. Optional argument to sort to allow this.
| **Option 1**
```rust
pub fn filter(value: &Value, args: &HashMap<String, Value>) -> Result<Value> {
let mut arr = try_get_value!("filter", "value", Vec<Value>, value);
if arr.is_empty() {
return Ok(arr.into());
}
let key = match args.get("attribute") {
Some(val) => try_get_value!("filter", "attribute", String, val),
None => {
return Err(Error::msg(
"The `filter` filter has to have an `attribute` argument",
))
}
};
let value = args.get("value").unwrap_or(&json::Value::Null);
let json_pointer = get_json_pointer(&key);
arr = arr
.into_iter()
.filter(|v| {
let val = v.pointer(&json_pointer).unwrap_or(&json::Value::Null);
if value.is_null() {
!val.is_null()
} else {
val == value
}
})
.collect::<Vec<_>>();
Ok(to_value(arr).unwrap())
}
``` | 2020-05-16T16:47:33 | 1.2 | 414438c7a259a213d1ce109ed08703672c7d6568 | [
"builtins::filters::array::tests::test_filter_no_value"
] | [
"builtins::filters::array::tests::test_filter_empty",
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::tes... | [
"renderer::tests::basic::render_variable_block_lit_expr",
"renderer::tests::basic::render_variable_block_ident"
] | [] |
Keats/tera | 350 | Keats__tera-350 | [
"348"
] | 507ae7cc7043a38b8c3a49df1e6dc7c4728bde00 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -1,10 +1,16 @@
# Changelog
+## 0.11.19 (2018-10-31)
+
+- Allow function calls in math expressions
+- Allow string concatenation to start with a number
+- Allow function calls in string concatenations
+- Add a `concat` filter to concat arrays or push an element to an array
+
## 0.11.18 (2018-10-16)
- Allow concatenation of strings and numbers
-
## 0.11.17 (2018-10-09)
- Clear local context on each forloop iteration
diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,6 +1,6 @@
[package]
name = "tera"
-version = "0.11.18"
+version = "0.11.19"
authors = ["Vincent Prouillet <prouillet.vincent@gmail.com>"]
license = "MIT"
readme = "README.md"
diff --git a/docs/content/docs/templates.md b/docs/content/docs/templates.md
--- a/docs/content/docs/templates.md
+++ b/docs/content/docs/templates.md
@@ -727,6 +727,25 @@ or by author name:
{{ posts | filter(attribute="author.name", value="Vincent") }}
```
+#### concat
+Appends values to an array.
+
+```jinja2
+{{ posts | concat(with=drafts) }}
+```
+
+The filter takes an array and returns a new array with the value(s) from the `with` parameter
+added. If the `with` parameter is an array, all of its values will be appended one by one to the new array and
+not as an array.
+
+This filter can also be used to append a single value to an array if the value passed to `with` is not an array:
+
+```jinja2
+{% set pages_id = pages_id | concat(with=id) %}
+```
+
+The `with` attribute is mandatory.
+
#### urlencode
Percent-encodes a string.
diff --git a/src/parser/mod.rs b/src/parser/mod.rs
--- a/src/parser/mod.rs
+++ b/src/parser/mod.rs
@@ -186,6 +186,13 @@ fn parse_string_concat(pair: Pair<Rule>) -> ExprVal {
}
values.push(ExprVal::Ident(p.as_str().to_string()))
}
+ Rule::fn_call => {
+ if !current_str.is_empty() {
+ values.push(ExprVal::String(current_str));
+ current_str = String::new();
+ }
+ values.push(ExprVal::FunctionCall(parse_fn_call(p)))
+ }
_ => unreachable!("Got {:?} in parse_string_concat", p),
};
}
diff --git a/src/parser/tera.pest b/src/parser/tera.pest
--- a/src/parser/tera.pest
+++ b/src/parser/tera.pest
@@ -71,7 +71,7 @@ dotted_square_bracket_ident = @{
dotted_ident ~ ( ("." ~ all_chars+) | square_brackets )*
}
-string_concat = { (string | dotted_square_bracket_ident) ~ ("~" ~ (float | int | string | dotted_square_bracket_ident))+ }
+string_concat = { (fn_call | float | int | string | dotted_square_bracket_ident) ~ ("~" ~ (fn_call | float | int | string | dotted_square_bracket_ident))+ }
// ----------------------------------------------------
diff --git a/src/renderer/processor.rs b/src/renderer/processor.rs
--- a/src/renderer/processor.rs
+++ b/src/renderer/processor.rs
@@ -297,6 +297,14 @@ impl<'a> Processor<'a> {
i
),
},
+ ExprVal::FunctionCall(ref fn_call) => match *self.eval_tera_fn_call(fn_call)? {
+ Value::String(ref v) => res.push_str(&v),
+ Value::Number(ref v) => res.push_str(&v.to_string()),
+ _ => bail!(
+ "Tried to concat a value that is not a string or a number from function call {}",
+ fn_call.name
+ ),
+ },
_ => unreachable!(),
};
}
diff --git a/src/renderer/processor.rs b/src/renderer/processor.rs
--- a/src/renderer/processor.rs
+++ b/src/renderer/processor.rs
@@ -656,9 +664,21 @@ impl<'a> Processor<'a> {
}
}
}
+ ExprVal::FunctionCall(ref fn_call) => {
+ let v = self.eval_tera_fn_call(fn_call)?;
+ if v.is_i64() {
+ Some(Number::from(v.as_i64().unwrap()))
+ } else if v.is_u64() {
+ Some(Number::from(v.as_u64().unwrap()))
+ } else if v.is_f64() {
+ Some(Number::from_f64(v.as_f64().unwrap()).unwrap())
+ } else {
+ bail!("Function `{}` was used in a math operation but is not returning a number", fn_call.name,)
+ }
+ },
ExprVal::String(ref val) => bail!("Tried to do math with a string: `{}`", val),
ExprVal::Bool(val) => bail!("Tried to do math with a boolean: `{}`", val),
- _ => unreachable!("unimplemented"),
+ _ => unreachable!("unimplemented math expression for {:?}", expr),
};
Ok(result)
diff --git a/src/tera.rs b/src/tera.rs
--- a/src/tera.rs
+++ b/src/tera.rs
@@ -543,6 +543,7 @@ impl Tera {
self.register_filter("slice", array::slice);
self.register_filter("group_by", array::group_by);
self.register_filter("filter", array::filter);
+ self.register_filter("concat", array::concat);
self.register_filter("pluralize", number::pluralize);
self.register_filter("round", number::round);
| diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -174,6 +174,33 @@ pub fn slice(value: Value, args: HashMap<String, Value>) -> Result<Value> {
Ok(arr[start..end].into())
}
+/// Concat the array with another one if the `with` parameter is an array or
+/// just append it otherwise
+pub fn concat(value: Value, mut args: HashMap<String, Value>) -> Result<Value> {
+ let mut arr = try_get_value!("concat", "value", Vec<Value>, value);
+
+ let value = match args.remove("with") {
+ Some(val) => val,
+ None => bail!("The `concat` filter has to have a `with` argument"),
+ };
+
+ if value.is_array() {
+ match value {
+ Value::Array(vals) => {
+ for val in vals {
+ arr.push(val);
+ }
+ },
+ _ => unreachable!("Got something other than an array??")
+ }
+ } else {
+ arr.push(value);
+ }
+
+ Ok(to_value(arr).unwrap())
+}
+
+
#[cfg(test)]
mod tests {
use super::*;
diff --git a/src/builtins/filters/array.rs b/src/builtins/filters/array.rs
--- a/src/builtins/filters/array.rs
+++ b/src/builtins/filters/array.rs
@@ -469,4 +496,47 @@ mod tests {
assert!(res.is_ok());
assert_eq!(res.unwrap(), to_value(expected).unwrap());
}
+
+ #[test]
+ fn test_concat_array() {
+ let input = json!([
+ 1,
+ 2,
+ 3,
+ ]);
+ let mut args = HashMap::new();
+ args.insert("with".to_string(), json!([3, 4]));
+ let expected = json!([
+ 1,
+ 2,
+ 3,
+ 3,
+ 4,
+ ]);
+
+ let res = concat(input, args);
+ assert!(res.is_ok());
+ assert_eq!(res.unwrap(), to_value(expected).unwrap());
+ }
+
+ #[test]
+ fn test_concat_single_value() {
+ let input = json!([
+ 1,
+ 2,
+ 3,
+ ]);
+ let mut args = HashMap::new();
+ args.insert("with".to_string(), json!(4));
+ let expected = json!([
+ 1,
+ 2,
+ 3,
+ 4,
+ ]);
+
+ let res = concat(input, args);
+ assert!(res.is_ok());
+ assert_eq!(res.unwrap(), to_value(expected).unwrap());
+ }
}
diff --git a/src/parser/tera.pest b/src/parser/tera.pest
--- a/src/parser/tera.pest
+++ b/src/parser/tera.pest
@@ -79,7 +79,7 @@ string_concat = { (string | dotted_square_bracket_ident) ~ ("~" ~ (float | int |
/// We'll use precedence climbing on those in the parser phase
// boolean first so they are not caught as identifiers
-basic_val = _{ boolean | test | macro_call | fn_call | string_concat | dotted_square_bracket_ident | float | int | string }
+basic_val = _{ boolean | string_concat | test | macro_call | fn_call | dotted_square_bracket_ident | float | int | string }
basic_op = _{ op_plus | op_minus | op_times | op_slash | op_modulo }
basic_expr = { ("(" ~ basic_expr ~ ")" | basic_val) ~ (basic_op ~ basic_val)* }
basic_expr_filter = { basic_expr ~ filter* }
diff --git a/src/parser/tests/lexer.rs b/src/parser/tests/lexer.rs
--- a/src/parser/tests/lexer.rs
+++ b/src/parser/tests/lexer.rs
@@ -117,9 +117,13 @@ fn lex_string_concat() {
"'hello' ~ `hey`",
"'hello' ~ 1",
"'hello' ~ 3.14",
+ "1 ~ 'hello'",
+ "3.14 ~ 'hello'",
"'hello' ~ ident",
"ident ~ 'hello'",
"'hello' ~ ident[0]",
+ "'hello' ~ a_function()",
+ "a_function() ~ 'hello'",
r#"'hello' ~ "hey""#,
r#"a_string ~ " world""#,
"'hello' ~ ident ~ `ho`",
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -1,6 +1,9 @@
+use std::collections::BTreeMap;
+
+use serde_json::Value;
+
use context::Context;
use errors::Result;
-use std::collections::BTreeMap;
use tera::Tera;
use super::Review;
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -8,6 +11,8 @@ use super::Review;
fn render_template(content: &str, context: &Context) -> Result<String> {
let mut tera = Tera::default();
tera.add_raw_template("hello.html", content).unwrap();
+ tera.register_function("get_number", Box::new(|_| Ok(Value::Number(10.into()))));
+ tera.register_function("get_string", Box::new(|_| Ok(Value::String("Hello".to_string()))));
tera.render("hello.html", context)
}
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -96,6 +101,8 @@ fn render_variable_block_ident() {
("{{ 1 + 1 + 1 }}", "3"),
("{{ 2 - 2 - 1 }}", "-1"),
("{{ 1 - 1 + 1 }}", "1"),
+ ("{{ 1 + get_number() }}", "11"),
+ ("{{ get_number() + 1 }}", "11"),
("{{ (1.9 + a) | round }}", "4"),
("{{ 1.9 + a | round }}", "4"),
("{{ numbers | length - 1 }}", "2"),
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -504,6 +511,10 @@ fn can_do_string_concat() {
(r#"{{ "hello" ~ " world" }}"#, "hello world"),
(r#"{{ "hello" ~ 1 }}"#, "hello1"),
(r#"{{ "hello" ~ 3.14 }}"#, "hello3.14"),
+ (r#"{{ 3.14 ~ "hello"}}"#, "3.14hello"),
+ (r#"{{ "hello" ~ get_string() }}"#, "helloHello"),
+ (r#"{{ get_string() ~ "hello" }}"#, "Hellohello"),
+ (r#"{{ get_string() ~ 3.14 }}"#, "Hello3.14"),
(r#"{{ a_string ~ " world" }}"#, "hello world"),
(r#"{{ a_string ~ ' world ' ~ another_string }}"#, "hello world xXx"),
(r#"{{ a_string ~ another_string }}"#, "helloxXx"),
diff --git a/src/renderer/tests/basic.rs b/src/renderer/tests/basic.rs
--- a/src/renderer/tests/basic.rs
+++ b/src/renderer/tests/basic.rs
@@ -621,3 +632,22 @@ fn redefining_loop_value_doesnt_break_loop() {
assert_eq!(result.unwrap(), "abclol efghlol ijklmlol ");
}
+
+#[test]
+fn can_use_concat_to_push_to_array() {
+ let mut tera = Tera::default();
+ tera.add_raw_template(
+ "tpl",
+ r#"
+{%- set ids = [] -%}
+{% for i in range(end=5) -%}
+{%- set_global ids = ids | concat(with=i) -%}
+{%- endfor -%}
+{{ids}}"#,
+ )
+ .unwrap();
+ let context = Context::new();
+ let result = tera.render("tpl", &context);
+
+ assert_eq!(result.unwrap(), "[0, 1, 2, 3, 4]");
+}
| Expressions involving functions
Some expressions that involve function calls don't work
One such example is math operations on numbers:
```jinja2
{% set a = 3 %}
{% set b = a + func_returning_integer() %}
```
This throws an unimplemented panic in `src/renderer/processor.rs:661`
The workaround is to use an intermediate variable:
```jinja2
{% set a = 3 %}
{% set tmp = func_returning_integer() %}
{% set b = a + tmp %}
```
Another similar code is with string concatenation:
```jinja2
{% set a = "abc" %}
{% set b = a ~ func_returning_string() %}
```
This one fails to parse, the workaround is the same.
I'm not sure if this is by design or an implementation limitation, but I'd say at least the first one should work...
| That's a bug - well an unimplemented thing really. It should be easy to fix | 2018-10-23T23:44:21 | 0.11 | 549510506fb972654a625df5bb54d02f07127b9a | [
"parser::tests::lexer::lex_string_concat",
"renderer::tests::basic::can_use_concat_to_push_to_array",
"renderer::tests::basic::can_do_string_concat"
] | [
"builtins::filters::array::tests::test_concat_array",
"builtins::filters::array::tests::test_concat_single_value",
"builtins::filters::array::tests::test_first",
"builtins::filters::array::tests::test_first_empty",
"builtins::filters::array::tests::test_filter",
"builtins::filters::array::tests::test_join... | [
"parser::tests::errors::invalid_filter_section_missing_name",
"parser::tests::errors::invalid_fn_call_missing_value",
"parser::tests::errors::invalid_op",
"parser::tests::errors::missing_container_name_in_forloop",
"parser::tests::errors::missing_expression_in_if",
"parser::tests::errors::missing_value_in... | [] |
latex-lsp/texlab | 1,211 | latex-lsp__texlab-1211 | [
"1131"
] | 50c70923dffd48033c53cbe81a2b965abc8721b4 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -10,6 +10,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
### Added
- Add `texlab.inlayHints.maxLength` setting to allow limiting inlay hint text length ([#1212](https://github.com/latex-lsp/texlab/issues/1212))
+- Allow suppressing diagnostics using `% texlab: ignore` magic comments ([#1211](https://github.com/latex-lsp/texlab/pull/1211))
### Fixed
diff --git a/crates/base-db/src/semantics/tex.rs b/crates/base-db/src/semantics/tex.rs
--- a/crates/base-db/src/semantics/tex.rs
+++ b/crates/base-db/src/semantics/tex.rs
@@ -37,6 +37,7 @@ pub struct Semantics {
pub graphics_paths: FxHashSet<String>,
pub can_be_root: bool,
pub can_be_compiled: bool,
+ pub diagnostic_suppressions: Vec<TextRange>,
}
impl Semantics {
diff --git a/crates/base-db/src/semantics/tex.rs b/crates/base-db/src/semantics/tex.rs
--- a/crates/base-db/src/semantics/tex.rs
+++ b/crates/base-db/src/semantics/tex.rs
@@ -46,11 +47,15 @@ impl Semantics {
latex::SyntaxElement::Node(node) => {
self.process_node(conf, &node);
}
- latex::SyntaxElement::Token(token) => {
- if token.kind() == latex::COMMAND_NAME {
+ latex::SyntaxElement::Token(token) => match token.kind() {
+ latex::COMMAND_NAME => {
self.commands.push(Span::command(&token));
}
- }
+ latex::COMMENT if token.text().contains("texlab: ignore") => {
+ self.diagnostic_suppressions.push(token.text_range());
+ }
+ _ => {}
+ },
};
}
diff --git a/crates/diagnostics/src/manager.rs b/crates/diagnostics/src/manager.rs
--- a/crates/diagnostics/src/manager.rs
+++ b/crates/diagnostics/src/manager.rs
@@ -1,5 +1,8 @@
-use base_db::{deps::Project, util::filter_regex_patterns, Document, Owner, Workspace};
+use base_db::{
+ deps::Project, util::filter_regex_patterns, Document, DocumentData, Owner, Workspace,
+};
use multimap::MultiMap;
+use rowan::TextRange;
use rustc_hash::{FxHashMap, FxHashSet};
use url::Url;
diff --git a/crates/diagnostics/src/manager.rs b/crates/diagnostics/src/manager.rs
--- a/crates/diagnostics/src/manager.rs
+++ b/crates/diagnostics/src/manager.rs
@@ -16,7 +19,7 @@ pub struct Manager {
impl Manager {
/// Updates the syntax-based diagnostics for the given document.
pub fn update_syntax(&mut self, workspace: &Workspace, document: &Document) {
- if !Self::is_relevant(document) {
+ if !Self::is_relevant_document(document) {
return;
}
diff --git a/crates/diagnostics/src/manager.rs b/crates/diagnostics/src/manager.rs
--- a/crates/diagnostics/src/manager.rs
+++ b/crates/diagnostics/src/manager.rs
@@ -76,7 +79,7 @@ impl Manager {
for document in workspace
.iter()
- .filter(|document| Self::is_relevant(document))
+ .filter(|document| Self::is_relevant_document(document))
{
let project = Project::from_child(workspace, document);
super::citations::detect_undefined_citations(&project, document, &mut results);
diff --git a/crates/diagnostics/src/manager.rs b/crates/diagnostics/src/manager.rs
--- a/crates/diagnostics/src/manager.rs
+++ b/crates/diagnostics/src/manager.rs
@@ -87,28 +90,78 @@ impl Manager {
super::labels::detect_duplicate_labels(workspace, &mut results);
super::labels::detect_undefined_and_unused_labels(workspace, &mut results);
- let config = &workspace.config().diagnostics;
-
- results.retain(|uri, _| workspace.lookup(uri).map_or(false, Self::is_relevant));
+ results.retain(|uri, _| {
+ workspace
+ .lookup(uri)
+ .map_or(false, Self::is_relevant_document)
+ });
- for diagnostics in results.values_mut() {
- diagnostics.retain(|diagnostic| {
- filter_regex_patterns(
- diagnostic.message(),
- &config.allowed_patterns,
- &config.ignored_patterns,
- )
- });
+ for (uri, diagnostics) in results.iter_mut() {
+ diagnostics
+ .retain_mut(|diagnostic| Self::filter_diagnostic(workspace, uri, diagnostic));
}
results
}
- fn is_relevant(document: &Document) -> bool {
+ fn is_relevant_document(document: &Document) -> bool {
match document.owner {
Owner::Client => true,
Owner::Server => true,
Owner::Distro => false,
}
}
+
+ fn filter_diagnostic(workspace: &Workspace, uri: &Url, diagnostic: &mut Diagnostic) -> bool {
+ let config = &workspace.config().diagnostics;
+
+ if !filter_regex_patterns(
+ diagnostic.message(),
+ &config.allowed_patterns,
+ &config.ignored_patterns,
+ ) {
+ return false;
+ }
+
+ let Some(document) = workspace.lookup(uri) else {
+ return false;
+ };
+
+ let Some(primary_range) = diagnostic.range(&document.line_index) else {
+ return false;
+ };
+
+ if Self::is_ignored(workspace, &document.uri, &primary_range) {
+ return false;
+ }
+
+ let Some(additional_locations) = diagnostic.additional_locations_mut() else {
+ return true;
+ };
+
+ additional_locations.retain(|(uri, range)| !Self::is_ignored(workspace, uri, range));
+ if additional_locations.is_empty() {
+ return false;
+ }
+
+ true
+ }
+
+ fn is_ignored(workspace: &Workspace, uri: &Url, diag_range: &TextRange) -> bool {
+ let Some(document) = workspace.lookup(uri) else {
+ return false;
+ };
+
+ let DocumentData::Tex(data) = &document.data else {
+ return false;
+ };
+
+ let diag_line_col = document.line_index.line_col(diag_range.start());
+
+ data.semantics
+ .diagnostic_suppressions
+ .iter()
+ .map(|r| document.line_index.line_col(r.start()))
+ .any(|r| r.line == diag_line_col.line || r.line + 1 == diag_line_col.line)
+ }
}
diff --git a/crates/diagnostics/src/types.rs b/crates/diagnostics/src/types.rs
--- a/crates/diagnostics/src/types.rs
+++ b/crates/diagnostics/src/types.rs
@@ -1,9 +1,9 @@
-use line_index::LineCol;
+use line_index::{LineCol, LineIndex};
use rowan::TextRange;
use syntax::BuildError;
use url::Url;
-#[derive(Debug, PartialEq, Eq, Clone)]
+#[derive(PartialEq, Eq, Clone)]
pub enum TexError {
UnexpectedRCurly,
ExpectingRCurly,
diff --git a/crates/diagnostics/src/types.rs b/crates/diagnostics/src/types.rs
--- a/crates/diagnostics/src/types.rs
+++ b/crates/diagnostics/src/types.rs
@@ -14,7 +14,28 @@ pub enum TexError {
DuplicateLabel(Vec<(Url, TextRange)>),
}
-#[derive(Debug, PartialEq, Eq, Clone)]
+impl std::fmt::Debug for TexError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::UnexpectedRCurly => write!(f, "UnexpectedRCurly"),
+ Self::ExpectingRCurly => write!(f, "ExpectingRCurly"),
+ Self::MismatchedEnvironment => write!(f, "MismatchedEnvironment"),
+ Self::UnusedLabel => write!(f, "UnusedLabel"),
+ Self::UndefinedLabel => write!(f, "UndefinedLabel"),
+ Self::UndefinedCitation => write!(f, "UndefinedCitation"),
+ Self::DuplicateLabel(locations) => {
+ let mut t = f.debug_tuple("DuplicateLabel");
+ for (uri, range) in locations {
+ t.field(&(uri.as_str(), range));
+ }
+
+ t.finish()
+ }
+ }
+ }
+}
+
+#[derive(PartialEq, Eq, Clone)]
pub enum BibError {
ExpectingLCurly,
ExpectingKey,
diff --git a/crates/diagnostics/src/types.rs b/crates/diagnostics/src/types.rs
--- a/crates/diagnostics/src/types.rs
+++ b/crates/diagnostics/src/types.rs
@@ -25,6 +46,27 @@ pub enum BibError {
DuplicateEntry(Vec<(Url, TextRange)>),
}
+impl std::fmt::Debug for BibError {
+ fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
+ match self {
+ Self::ExpectingLCurly => write!(f, "ExpectingLCurly"),
+ Self::ExpectingKey => write!(f, "ExpectingKey"),
+ Self::ExpectingRCurly => write!(f, "ExpectingRCurly"),
+ Self::ExpectingEq => write!(f, "ExpectingEq"),
+ Self::ExpectingFieldValue => write!(f, "ExpectingFieldValue"),
+ Self::UnusedEntry => write!(f, "UnusedEntry"),
+ Self::DuplicateEntry(locations) => {
+ let mut t = f.debug_tuple("DuplicateEntry");
+ for (uri, range) in locations {
+ t.field(&(uri.as_str(), range));
+ }
+
+ t.finish()
+ }
+ }
+ }
+}
+
#[derive(Debug, PartialEq, Eq, Clone)]
pub struct ChktexError {
pub start: LineCol,
diff --git a/crates/diagnostics/src/types.rs b/crates/diagnostics/src/types.rs
--- a/crates/diagnostics/src/types.rs
+++ b/crates/diagnostics/src/types.rs
@@ -74,4 +116,42 @@ impl Diagnostic {
Diagnostic::Chktex(error) => &error.message,
}
}
+
+ pub fn range(&self, line_index: &LineIndex) -> Option<TextRange> {
+ Some(match self {
+ Diagnostic::Tex(range, _) => *range,
+ Diagnostic::Bib(range, _) => *range,
+ Diagnostic::Build(range, _) => *range,
+ Diagnostic::Chktex(error) => {
+ let start = line_index.offset(error.start)?;
+ let end = line_index.offset(error.end)?;
+ TextRange::new(start, end)
+ }
+ })
+ }
+
+ pub fn additional_locations_mut(&mut self) -> Option<&mut Vec<(Url, TextRange)>> {
+ match self {
+ Diagnostic::Tex(_, err) => match err {
+ TexError::UnexpectedRCurly
+ | TexError::ExpectingRCurly
+ | TexError::MismatchedEnvironment
+ | TexError::UnusedLabel
+ | TexError::UndefinedLabel
+ | TexError::UndefinedCitation => None,
+ TexError::DuplicateLabel(locations) => Some(locations),
+ },
+ Diagnostic::Bib(_, err) => match err {
+ BibError::ExpectingLCurly
+ | BibError::ExpectingKey
+ | BibError::ExpectingRCurly
+ | BibError::ExpectingEq
+ | BibError::ExpectingFieldValue
+ | BibError::UnusedEntry => None,
+ BibError::DuplicateEntry(locations) => Some(locations),
+ },
+ Diagnostic::Chktex(_) => None,
+ Diagnostic::Build(_, _) => None,
+ }
+ }
}
| diff --git a/crates/diagnostics/src/tests.rs b/crates/diagnostics/src/tests.rs
--- a/crates/diagnostics/src/tests.rs
+++ b/crates/diagnostics/src/tests.rs
@@ -283,6 +283,33 @@ fn test_citation_undefined() {
)
}
+#[test]
+fn test_citation_undefined_ignore() {
+ check(
+ r#"
+%! main.tex
+% texlab: ignore
+\cite{foo}
+"#,
+ expect![[r#"
+ []
+ "#]],
+ )
+}
+
+#[test]
+fn test_citation_undefined_ignore_single_line() {
+ check(
+ r#"
+%! main.tex
+\cite{foo} % texlab: ignore
+"#,
+ expect![[r#"
+ []
+ "#]],
+ )
+}
+
#[test]
fn test_citation_unused() {
check(
diff --git a/crates/diagnostics/src/tests.rs b/crates/diagnostics/src/tests.rs
--- a/crates/diagnostics/src/tests.rs
+++ b/crates/diagnostics/src/tests.rs
@@ -306,3 +333,46 @@ fn test_citation_unused() {
"#]],
)
}
+
+#[test]
+fn test_label_duplicate() {
+ check(
+ r#"
+%! main.tex
+\label{foo}
+ ^^^
+\label{foo}
+ ^^^
+\label{foo} % texlab: ignore
+
+\ref{foo}
+"#,
+ expect![[r#"
+ [
+ (
+ "file:///texlab/main.tex",
+ [
+ Tex(
+ 7..10,
+ DuplicateLabel(
+ (
+ "file:///texlab/main.tex",
+ 19..22,
+ ),
+ ),
+ ),
+ Tex(
+ 19..22,
+ DuplicateLabel(
+ (
+ "file:///texlab/main.tex",
+ 7..10,
+ ),
+ ),
+ ),
+ ],
+ ),
+ ]
+ "#]],
+ )
+}
| ignore single warnings/errors
It is useful sometimes to ignore single warnings. For example I would like to have labels for each section even if I do not cite the section. so for example I would like to ignore the warnings on those lines. E.g.
\section{test}
\label{test} % type: ignore
so even if I do not have sometime \label{test} I would not get unused label warning
| Seems to be a duplicate of #1092 | 2024-09-15T16:47:24 | 5.19 | 50c70923dffd48033c53cbe81a2b965abc8721b4 | [
"tests::test_citation_undefined_ignore",
"tests::test_citation_undefined_ignore_single_line",
"tests::test_label_duplicate"
] | [
"tests::test_label_undefined",
"tests::test_bib_entry_missing_name",
"tests::test_bib_entry_missing_l_delim",
"tests::test_citation_unused",
"tests::test_label_unused",
"tests::test_bib_entry_missing_r_delim",
"tests::test_tex_unmatched_braces",
"tests::test_citation_undefined",
"tests::test_tex_env... | [] | [] |
latex-lsp/texlab | 1,291 | latex-lsp__texlab-1291 | [
"1279"
] | e99406e06519db99c4855d1e5979427cd4075f42 | diff --git a/crates/parser/src/latex.rs b/crates/parser/src/latex.rs
--- a/crates/parser/src/latex.rs
+++ b/crates/parser/src/latex.rs
@@ -113,6 +113,7 @@ impl<'a> Parser<'a> {
Token::Pipe | Token::Word | Token::Comma => self.text(context),
Token::Eq => self.eat(),
Token::Dollar => self.formula(),
+ Token::Href => self.eat(),
Token::CommandName(name) => match name {
CommandName::Generic => self.generic_command(),
CommandName::BeginEnvironment if context.allow_environment => self.environment(),
diff --git a/crates/parser/src/latex.rs b/crates/parser/src/latex.rs
--- a/crates/parser/src/latex.rs
+++ b/crates/parser/src/latex.rs
@@ -1295,6 +1296,7 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
}
+
}
pub fn parse_latex(text: &str, config: &SyntaxConfig) -> GreenNode {
diff --git a/crates/parser/src/latex/lexer.rs b/crates/parser/src/latex/lexer.rs
--- a/crates/parser/src/latex/lexer.rs
+++ b/crates/parser/src/latex/lexer.rs
@@ -40,6 +40,7 @@ impl<'a> Lexer<'a> {
Token::Eq => SyntaxKind::EQUALITY_SIGN,
Token::Pipe => SyntaxKind::WORD,
Token::Word => SyntaxKind::WORD,
+ Token::Href => SyntaxKind::HREF,
Token::Dollar => SyntaxKind::DOLLAR,
Token::CommandName(_) => SyntaxKind::COMMAND_NAME,
};
diff --git a/crates/parser/src/latex/lexer/types.rs b/crates/parser/src/latex/lexer/types.rs
--- a/crates/parser/src/latex/lexer/types.rs
+++ b/crates/parser/src/latex/lexer/types.rs
@@ -43,6 +43,9 @@ pub enum Token {
#[regex(r"[^\s\\%\{\},\$\[\]\(\)=\|]+")]
Word,
+ #[regex(r"[a-zA-Z]+:\/\/[^{}]+")]
+ Href,
+
#[regex(r"\$\$?")]
Dollar,
diff --git a/crates/syntax/src/latex/kind.rs b/crates/syntax/src/latex/kind.rs
--- a/crates/syntax/src/latex/kind.rs
+++ b/crates/syntax/src/latex/kind.rs
@@ -16,6 +16,7 @@ pub enum SyntaxKind {
COMMA,
EQUALITY_SIGN,
WORD,
+ HREF,
DOLLAR,
COMMAND_NAME,
| diff --git a/crates/parser/src/latex/tests.rs b/crates/parser/src/latex/tests.rs
--- a/crates/parser/src/latex/tests.rs
+++ b/crates/parser/src/latex/tests.rs
@@ -3690,3 +3690,541 @@ fn test_label_brackets_unbalanced() {
"#]],
);
}
+
+#[test]
+fn test_href_with_space() {
+ check(
+ r#"\href{https://www.test.com/url%20%with%20space}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..59
+ PREAMBLE@0..59
+ GENERIC_COMMAND@0..59
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..47
+ L_CURLY@5..6 "{"
+ HREF@6..46 "https://www.test.com/ ..."
+ R_CURLY@46..47 "}"
+ CURLY_GROUP@47..59
+ L_CURLY@47..48 "{"
+ TEXT@48..58
+ WORD@48..49 "A"
+ WHITESPACE@49..50 " "
+ WORD@50..54 "test"
+ WHITESPACE@54..55 " "
+ WORD@55..58 "URL"
+ R_CURLY@58..59 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_lone_space() {
+ check(
+ r#"\href{http://example.com/%20}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..41
+ PREAMBLE@0..41
+ GENERIC_COMMAND@0..41
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..29
+ L_CURLY@5..6 "{"
+ HREF@6..28 "http://example.com/%20"
+ R_CURLY@28..29 "}"
+ CURLY_GROUP@29..41
+ L_CURLY@29..30 "{"
+ TEXT@30..40
+ WORD@30..31 "A"
+ WHITESPACE@31..32 " "
+ WORD@32..36 "test"
+ WHITESPACE@36..37 " "
+ WORD@37..40 "URL"
+ R_CURLY@40..41 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_with_variables() {
+ check(
+ r#"\href{https://example.com/path/to/resource?param=100%25complete}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..76
+ PREAMBLE@0..76
+ GENERIC_COMMAND@0..76
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..64
+ L_CURLY@5..6 "{"
+ HREF@6..63 "https://example.com/p ..."
+ R_CURLY@63..64 "}"
+ CURLY_GROUP@64..76
+ L_CURLY@64..65 "{"
+ TEXT@65..75
+ WORD@65..66 "A"
+ WHITESPACE@66..67 " "
+ WORD@67..71 "test"
+ WHITESPACE@71..72 " "
+ WORD@72..75 "URL"
+ R_CURLY@75..76 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_ftp_filename() {
+ check(
+ r#"\href{ftp://ftp.example.com/%file.txt}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..50
+ PREAMBLE@0..50
+ GENERIC_COMMAND@0..50
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..38
+ L_CURLY@5..6 "{"
+ HREF@6..37 "ftp://ftp.example.com ..."
+ R_CURLY@37..38 "}"
+ CURLY_GROUP@38..50
+ L_CURLY@38..39 "{"
+ TEXT@39..49
+ WORD@39..40 "A"
+ WHITESPACE@40..41 " "
+ WORD@41..45 "test"
+ WHITESPACE@45..46 " "
+ WORD@46..49 "URL"
+ R_CURLY@49..50 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_ampersand() {
+ check(
+ r#"\href{https://www.example.com/search?q=cats%26dogs}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..63
+ PREAMBLE@0..63
+ GENERIC_COMMAND@0..63
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..51
+ L_CURLY@5..6 "{"
+ HREF@6..50 "https://www.example.c ..."
+ R_CURLY@50..51 "}"
+ CURLY_GROUP@51..63
+ L_CURLY@51..52 "{"
+ TEXT@52..62
+ WORD@52..53 "A"
+ WHITESPACE@53..54 " "
+ WORD@54..58 "test"
+ WHITESPACE@58..59 " "
+ WORD@59..62 "URL"
+ R_CURLY@62..63 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_top_anchor() {
+ check(
+ r#"\href{http://example.com/#%top-anchor}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..50
+ PREAMBLE@0..50
+ GENERIC_COMMAND@0..50
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..38
+ L_CURLY@5..6 "{"
+ HREF@6..37 "http://example.com/#% ..."
+ R_CURLY@37..38 "}"
+ CURLY_GROUP@38..50
+ L_CURLY@38..39 "{"
+ TEXT@39..49
+ WORD@39..40 "A"
+ WHITESPACE@40..41 " "
+ WORD@41..45 "test"
+ WHITESPACE@45..46 " "
+ WORD@46..49 "URL"
+ R_CURLY@49..50 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_numeric_percentage() {
+ check(
+ r#"\href{https://example.com?query=100%&ref=bookmark}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..62
+ PREAMBLE@0..62
+ GENERIC_COMMAND@0..62
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..50
+ L_CURLY@5..6 "{"
+ HREF@6..49 "https://example.com?q ..."
+ R_CURLY@49..50 "}"
+ CURLY_GROUP@50..62
+ L_CURLY@50..51 "{"
+ TEXT@51..61
+ WORD@51..52 "A"
+ WHITESPACE@52..53 " "
+ WORD@53..57 "test"
+ WHITESPACE@57..58 " "
+ WORD@58..61 "URL"
+ R_CURLY@61..62 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_nonstandard_char() {
+ check(
+ r#"\href{https://example.com/%E2%9C%93}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..48
+ PREAMBLE@0..48
+ GENERIC_COMMAND@0..48
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..36
+ L_CURLY@5..6 "{"
+ HREF@6..35 "https://example.com/% ..."
+ R_CURLY@35..36 "}"
+ CURLY_GROUP@36..48
+ L_CURLY@36..37 "{"
+ TEXT@37..47
+ WORD@37..38 "A"
+ WHITESPACE@38..39 " "
+ WORD@39..43 "test"
+ WHITESPACE@43..44 " "
+ WORD@44..47 "URL"
+ R_CURLY@47..48 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_nested() {
+ check(
+ r#"\href{http://example.com/%25nested%2525}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..52
+ PREAMBLE@0..52
+ GENERIC_COMMAND@0..52
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..40
+ L_CURLY@5..6 "{"
+ HREF@6..39 "http://example.com/%2 ..."
+ R_CURLY@39..40 "}"
+ CURLY_GROUP@40..52
+ L_CURLY@40..41 "{"
+ TEXT@41..51
+ WORD@41..42 "A"
+ WHITESPACE@42..43 " "
+ WORD@43..47 "test"
+ WHITESPACE@47..48 " "
+ WORD@48..51 "URL"
+ R_CURLY@51..52 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_percent_in_path() {
+ check(
+ r#"\href{http://example.com/path/%name%}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..49
+ PREAMBLE@0..49
+ GENERIC_COMMAND@0..49
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..37
+ L_CURLY@5..6 "{"
+ HREF@6..36 "http://example.com/pa ..."
+ R_CURLY@36..37 "}"
+ CURLY_GROUP@37..49
+ L_CURLY@37..38 "{"
+ TEXT@38..48
+ WORD@38..39 "A"
+ WHITESPACE@39..40 " "
+ WORD@40..44 "test"
+ WHITESPACE@44..45 " "
+ WORD@45..48 "URL"
+ R_CURLY@48..49 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_multiple_percent_encoding() {
+ check(
+ r#"\href{https://example.com/special/%A3%24values}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..59
+ PREAMBLE@0..59
+ GENERIC_COMMAND@0..59
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..47
+ L_CURLY@5..6 "{"
+ HREF@6..46 "https://example.com/s ..."
+ R_CURLY@46..47 "}"
+ CURLY_GROUP@47..59
+ L_CURLY@47..48 "{"
+ TEXT@48..58
+ WORD@48..49 "A"
+ WHITESPACE@49..50 " "
+ WORD@50..54 "test"
+ WHITESPACE@54..55 " "
+ WORD@55..58 "URL"
+ R_CURLY@58..59 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_nested_percent_and_space() {
+ check(
+ r#"\href{https://example.com/query?var1=%25var2=%20end}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..64
+ PREAMBLE@0..64
+ GENERIC_COMMAND@0..64
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..52
+ L_CURLY@5..6 "{"
+ HREF@6..51 "https://example.com/q ..."
+ R_CURLY@51..52 "}"
+ CURLY_GROUP@52..64
+ L_CURLY@52..53 "{"
+ TEXT@53..63
+ WORD@53..54 "A"
+ WHITESPACE@54..55 " "
+ WORD@55..59 "test"
+ WHITESPACE@59..60 " "
+ WORD@60..63 "URL"
+ R_CURLY@63..64 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_triple_percent_encoding() {
+ check(
+ r#"\href{https://example.com/%2520%2520%2520}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..54
+ PREAMBLE@0..54
+ GENERIC_COMMAND@0..54
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..42
+ L_CURLY@5..6 "{"
+ HREF@6..41 "https://example.com/% ..."
+ R_CURLY@41..42 "}"
+ CURLY_GROUP@42..54
+ L_CURLY@42..43 "{"
+ TEXT@43..53
+ WORD@43..44 "A"
+ WHITESPACE@44..45 " "
+ WORD@45..49 "test"
+ WHITESPACE@49..50 " "
+ WORD@50..53 "URL"
+ R_CURLY@53..54 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_with_percent_character() {
+ check(
+ r#"\href{http://example.com/%}{Example with Percent}"#,
+ expect![[r#"
+ ROOT@0..49
+ PREAMBLE@0..49
+ GENERIC_COMMAND@0..49
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..27
+ L_CURLY@5..6 "{"
+ HREF@6..26 "http://example.com/%"
+ R_CURLY@26..27 "}"
+ CURLY_GROUP@27..49
+ L_CURLY@27..28 "{"
+ TEXT@28..48
+ WORD@28..35 "Example"
+ WHITESPACE@35..36 " "
+ WORD@36..40 "with"
+ WHITESPACE@40..41 " "
+ WORD@41..48 "Percent"
+ R_CURLY@48..49 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_href_with_percent_encoded_query() {
+ check(
+ r#"\href{https://example.com/search?q=%25complete}{Search Query}"#,
+ expect![[r#"
+ ROOT@0..61
+ PREAMBLE@0..61
+ GENERIC_COMMAND@0..61
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..47
+ L_CURLY@5..6 "{"
+ HREF@6..46 "https://example.com/s ..."
+ R_CURLY@46..47 "}"
+ CURLY_GROUP@47..61
+ L_CURLY@47..48 "{"
+ TEXT@48..60
+ WORD@48..54 "Search"
+ WHITESPACE@54..55 " "
+ WORD@55..60 "Query"
+ R_CURLY@60..61 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_double_percent_signs() {
+ check(
+ r#"\href{https://example.com/path/%%encoded%%}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..55
+ PREAMBLE@0..55
+ GENERIC_COMMAND@0..55
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..43
+ L_CURLY@5..6 "{"
+ HREF@6..42 "https://example.com/p ..."
+ R_CURLY@42..43 "}"
+ CURLY_GROUP@43..55
+ L_CURLY@43..44 "{"
+ TEXT@44..54
+ WORD@44..45 "A"
+ WHITESPACE@45..46 " "
+ WORD@46..50 "test"
+ WHITESPACE@50..51 " "
+ WORD@51..54 "URL"
+ R_CURLY@54..55 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_deep_nested_percent_encoding() {
+ check(
+ r#"\href{http://example.com/%%252525}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..46
+ PREAMBLE@0..46
+ GENERIC_COMMAND@0..46
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..34
+ L_CURLY@5..6 "{"
+ HREF@6..33 "http://example.com/%% ..."
+ R_CURLY@33..34 "}"
+ CURLY_GROUP@34..46
+ L_CURLY@34..35 "{"
+ TEXT@35..45
+ WORD@35..36 "A"
+ WHITESPACE@36..37 " "
+ WORD@37..41 "test"
+ WHITESPACE@41..42 " "
+ WORD@42..45 "URL"
+ R_CURLY@45..46 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_spaces_in_percent_encoding() {
+ check(
+ r#"\href{https://example.com/file%20name%20with%20spaces}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..66
+ PREAMBLE@0..66
+ GENERIC_COMMAND@0..66
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..54
+ L_CURLY@5..6 "{"
+ HREF@6..53 "https://example.com/f ..."
+ R_CURLY@53..54 "}"
+ CURLY_GROUP@54..66
+ L_CURLY@54..55 "{"
+ TEXT@55..65
+ WORD@55..56 "A"
+ WHITESPACE@56..57 " "
+ WORD@57..61 "test"
+ WHITESPACE@61..62 " "
+ WORD@62..65 "URL"
+ R_CURLY@65..66 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_percent_and_path_structure() {
+ check(
+ r#"\href{http://example.com/%/path/to/%/something}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..59
+ PREAMBLE@0..59
+ GENERIC_COMMAND@0..59
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..47
+ L_CURLY@5..6 "{"
+ HREF@6..46 "http://example.com/%/ ..."
+ R_CURLY@46..47 "}"
+ CURLY_GROUP@47..59
+ L_CURLY@47..48 "{"
+ TEXT@48..58
+ WORD@48..49 "A"
+ WHITESPACE@49..50 " "
+ WORD@50..54 "test"
+ WHITESPACE@54..55 " "
+ WORD@55..58 "URL"
+ R_CURLY@58..59 "}"
+
+ "#]],
+ );
+}
+
+#[test]
+fn test_url_with_parentheses_and_percent_encoding() {
+ check(
+ r#"\href{https://example.com/query?file=report%20%28draft%29%20v1}{A test URL}"#,
+ expect![[r#"
+ ROOT@0..75
+ PREAMBLE@0..75
+ GENERIC_COMMAND@0..75
+ COMMAND_NAME@0..5 "\\href"
+ CURLY_GROUP@5..63
+ L_CURLY@5..6 "{"
+ HREF@6..62 "https://example.com/q ..."
+ R_CURLY@62..63 "}"
+ CURLY_GROUP@63..75
+ L_CURLY@63..64 "{"
+ TEXT@64..74
+ WORD@64..65 "A"
+ WHITESPACE@65..66 " "
+ WORD@66..70 "test"
+ WHITESPACE@70..71 " "
+ WORD@71..74 "URL"
+ R_CURLY@74..75 "}"
+
+ "#]],
+ );
+}
| [bug] texlab considers links containing `%` in `\href` as a comment
Links to files containing spaces contain `%`, and `\href` allows you to insert the link as is. texlab seems to understand it as a comment and considers the `{` from `\href` to not be closed, therefore giving an error expecting a `}`.
Reproducible example:
```
\documentclass[12pt]{article}
\usepackage{hyperref}
\begin{document}
\href{https://github.com/latex-lsp/texlab/example%20file.pdf}{URL with \%}.
\end{document}
```
Thanks!
| You can escape the `%` as `\%`,
```
\href{https://github.com/latex-lsp/texlab/example\%20file.pdf}{URL with \%}.
```
Alternatively, you can use a space instead of `%20`,
```
\href{https://github.com/latex-lsp/texlab/example file.pdf}{URL with \%}.
```
Both of these produce the desired result without the diagnostic.
The hyperref documentation says:
> The special characters # and % do not need to be escaped in any way (unless
the command is used in the argument of another command).
I read this to mean that hyperref does not care if you escape the `%` or not.
Alright, thanks. I assume there is no intention to support having these characters as is (without escaping them) then?
I’m open to working on it if you have a suggestion for how to do it. I thought about it for a bit and it’s not obvious to me how to do it without some complicated look ahead while walking the tree. @pfoerster thoughts?
Great, thanks a lot! I am very new to LSPs, I'll let you know if I come up with something.
> I’m open to working on it if you have a suggestion for how to do it. I thought about it for a bit and it’s not obvious to me how to do it without some complicated look ahead while walking the tree. @pfoerster thoughts?
I think you need to do this at the parser/lexer level and introduce a `PATH` token. Walking the tree would be difficult because you have to split and tokenize the `COMMENT` token again.
I think I have this working now, and I've added one unit test for this command:
```
\href{https://www.test.com/url%20%with%20space}{A test URL}
```
I could use suggestions for other test cases to check before I push a PR. | 2024-12-14T08:26:40 | 5.21 | e99406e06519db99c4855d1e5979427cd4075f42 | [
"latex::tests::test_href_nonstandard_char",
"latex::tests::test_href_numeric_percentage",
"latex::tests::test_href_top_anchor",
"latex::tests::test_href_with_percent_character",
"latex::tests::test_href_with_percent_encoded_query",
"latex::tests::test_href_lone_space",
"latex::tests::test_href_with_spac... | [
"bibtex::tests::test_aho_2006",
"bibtex::tests::test_aksin_2006",
"bibtex::tests::test_averroes_1998",
"bibtex::tests::test_jain_1999",
"bibtex::tests::test_kastenholz_2006",
"bibtex::tests::test_betram_1996",
"bibtex::tests::test_almendro_1998",
"bibtex::tests::test_comment",
"bibtex::tests::test_k... | [] | [] |
latex-lsp/texlab | 1,251 | latex-lsp__texlab-1251 | [
"910"
] | 5fa0f4b49638497f8238a8a1e29d4ccfff50c74a | diff --git a/crates/base-db/src/semantics/auxiliary.rs b/crates/base-db/src/semantics/auxiliary.rs
--- a/crates/base-db/src/semantics/auxiliary.rs
+++ b/crates/base-db/src/semantics/auxiliary.rs
@@ -5,6 +5,7 @@ use syntax::latex::{self, HasCurly};
#[derive(Debug, Clone, Default)]
pub struct Semantics {
pub label_numbers: FxHashMap<String, String>,
+ pub section_numbers: FxHashMap<String, String>,
}
impl Semantics {
diff --git a/crates/base-db/src/semantics/auxiliary.rs b/crates/base-db/src/semantics/auxiliary.rs
--- a/crates/base-db/src/semantics/auxiliary.rs
+++ b/crates/base-db/src/semantics/auxiliary.rs
@@ -18,6 +19,9 @@ impl Semantics {
if let Some(label_number) = latex::LabelNumber::cast(node.clone()) {
self.process_label_number(&label_number);
}
+ if let Some(toc_line) = latex::TocContentsLine::cast(node.clone()) {
+ self.process_toc_line(&toc_line);
+ }
}
fn process_label_number(&mut self, label_number: &latex::LabelNumber) -> Option<()> {
diff --git a/crates/base-db/src/semantics/auxiliary.rs b/crates/base-db/src/semantics/auxiliary.rs
--- a/crates/base-db/src/semantics/auxiliary.rs
+++ b/crates/base-db/src/semantics/auxiliary.rs
@@ -40,4 +44,36 @@ impl Semantics {
self.label_numbers.insert(name, text);
Some(())
}
+
+ fn process_toc_line(&mut self, toc_line: &latex::TocContentsLine) -> Option<()> {
+ let unit = toc_line.unit().and_then(|u| u.content_text())?.to_string();
+
+ if [
+ "part",
+ "chapter",
+ "section",
+ "subsection",
+ "subsubsection",
+ "paragraph",
+ "subparagraph",
+ ]
+ .contains(&unit.as_str())
+ {
+ let line = toc_line.line()?;
+ let name = line.syntax().children().find_map(|child| {
+ latex::Text::cast(child.clone())?;
+ Some(child)
+ })?;
+ let name = name.to_string();
+
+ let num_line = line
+ .syntax()
+ .children()
+ .find_map(|child| latex::TocNumberLine::cast(child.clone()))?;
+ let number = num_line.number()?;
+ let number = number.content_text()?.replace(['{', '}'], "");
+ self.section_numbers.insert(name, number);
+ }
+ Some(())
+ }
}
diff --git a/crates/parser/src/latex.rs b/crates/parser/src/latex.rs
--- a/crates/parser/src/latex.rs
+++ b/crates/parser/src/latex.rs
@@ -159,6 +159,8 @@ impl<'a> Parser<'a> {
CommandName::VerbatimBlock => self.verbatim_block(),
CommandName::GraphicsPath => self.graphics_path(),
CommandName::BibItem => self.bibitem(),
+ CommandName::TocContentsLine => self.toc_contents_line(),
+ CommandName::TocNumberLine => self.toc_number_line(),
},
}
}
diff --git a/crates/parser/src/latex.rs b/crates/parser/src/latex.rs
--- a/crates/parser/src/latex.rs
+++ b/crates/parser/src/latex.rs
@@ -1261,6 +1263,38 @@ impl<'a> Parser<'a> {
self.builder.finish_node();
}
+
+ fn toc_contents_line(&mut self) {
+ self.builder.start_node(TOC_CONTENTS_LINE.into());
+ self.eat();
+ self.trivia();
+
+ if self.lexer.peek() == Some(Token::LCurly) {
+ self.curly_group();
+ }
+
+ if self.lexer.peek() == Some(Token::LCurly) {
+ self.curly_group();
+ }
+
+ if self.lexer.peek() == Some(Token::LCurly) {
+ self.curly_group();
+ }
+
+ self.builder.finish_node();
+ }
+
+ fn toc_number_line(&mut self) {
+ self.builder.start_node(TOC_NUMBER_LINE.into());
+ self.eat();
+ self.trivia();
+
+ if self.lexer.peek() == Some(Token::LCurly) {
+ self.curly_group();
+ }
+
+ self.builder.finish_node();
+ }
}
pub fn parse_latex(text: &str, config: &SyntaxConfig) -> GreenNode {
diff --git a/crates/parser/src/latex/lexer/commands.rs b/crates/parser/src/latex/lexer/commands.rs
--- a/crates/parser/src/latex/lexer/commands.rs
+++ b/crates/parser/src/latex/lexer/commands.rs
@@ -97,6 +97,8 @@ pub fn classify(name: &str, config: &SyntaxConfig) -> CommandName {
"fi" => CommandName::EndBlockComment,
"verb" => CommandName::VerbatimBlock,
"bibitem" => CommandName::BibItem,
+ "contentsline" => CommandName::TocContentsLine,
+ "numberline" => CommandName::TocNumberLine,
_ if config.citation_commands.contains(name) => CommandName::Citation,
_ if config.label_definition_commands.contains(name) => CommandName::LabelDefinition,
diff --git a/crates/parser/src/latex/lexer/types.rs b/crates/parser/src/latex/lexer/types.rs
--- a/crates/parser/src/latex/lexer/types.rs
+++ b/crates/parser/src/latex/lexer/types.rs
@@ -96,6 +96,8 @@ pub enum CommandName {
EndBlockComment,
VerbatimBlock,
BibItem,
+ TocContentsLine,
+ TocNumberLine,
}
#[derive(Debug, PartialEq, Eq, PartialOrd, Ord, Clone, Copy, Hash)]
diff --git a/crates/symbols/src/document/tex.rs b/crates/symbols/src/document/tex.rs
--- a/crates/symbols/src/document/tex.rs
+++ b/crates/symbols/src/document/tex.rs
@@ -67,16 +67,14 @@ impl<'a> SymbolBuilder<'a> {
let group_text = group.content_text()?;
let kind = SymbolKind::Section;
- let symbol = match self.find_label(section.syntax()) {
- Some(label) => {
- let name = match self.find_label_number(&label.text) {
- Some(number) => format!("{number} {group_text}"),
- None => group_text,
- };
+ let name = match self.find_section_number(&group_text) {
+ Some(number) => format!("{number} {group_text}"),
+ None => group_text,
+ };
- Symbol::new_label(name, kind, range, label)
- }
- None => Symbol::new_simple(group_text, kind, range, range),
+ let symbol = match self.find_label(section.syntax()) {
+ Some(label) => Symbol::new_label(name, kind, range, label),
+ None => Symbol::new_simple(name, kind, range, range)
};
Some(symbol)
diff --git a/crates/symbols/src/document/tex.rs b/crates/symbols/src/document/tex.rs
--- a/crates/symbols/src/document/tex.rs
+++ b/crates/symbols/src/document/tex.rs
@@ -247,6 +245,15 @@ impl<'a> SymbolBuilder<'a> {
Some(Span { text, range })
}
+ fn find_section_number(&self, name: &str) -> Option<&str> {
+ self.project
+ .documents
+ .iter()
+ .filter_map(|document| document.data.as_aux())
+ .find_map(|data| data.semantics.section_numbers.get(name))
+ .map(String::as_str)
+ }
+
fn find_label_number(&self, name: &str) -> Option<&str> {
self.project
.documents
diff --git a/crates/syntax/src/latex/cst.rs b/crates/syntax/src/latex/cst.rs
--- a/crates/syntax/src/latex/cst.rs
+++ b/crates/syntax/src/latex/cst.rs
@@ -762,3 +762,31 @@ impl BibItem {
self.syntax().children().find_map(CurlyGroupWord::cast)
}
}
+
+cst_node!(TocContentsLine, TOC_CONTENTS_LINE);
+
+impl TocContentsLine {
+ pub fn command(&self) -> Option<SyntaxToken> {
+ self.syntax().first_token()
+ }
+
+ pub fn unit(&self) -> Option<CurlyGroup> {
+ self.syntax().children().find_map(CurlyGroup::cast)
+ }
+
+ pub fn line(&self) -> Option<CurlyGroup> {
+ self.syntax().children().filter_map(CurlyGroup::cast).nth(1)
+ }
+}
+
+cst_node!(TocNumberLine, TOC_NUMBER_LINE);
+
+impl TocNumberLine {
+ pub fn command(&self) -> Option<SyntaxToken> {
+ self.syntax().first_token()
+ }
+
+ pub fn number(&self) -> Option<CurlyGroup> {
+ self.syntax().children().find_map(CurlyGroup::cast)
+ }
+}
diff --git a/crates/syntax/src/latex/kind.rs b/crates/syntax/src/latex/kind.rs
--- a/crates/syntax/src/latex/kind.rs
+++ b/crates/syntax/src/latex/kind.rs
@@ -83,6 +83,8 @@ pub enum SyntaxKind {
GRAPHICS_PATH,
BLOCK_COMMENT,
BIBITEM,
+ TOC_CONTENTS_LINE,
+ TOC_NUMBER_LINE,
ROOT,
}
| diff --git a/crates/symbols/src/document/tests.rs b/crates/symbols/src/document/tests.rs
--- a/crates/symbols/src/document/tests.rs
+++ b/crates/symbols/src/document/tests.rs
@@ -281,44 +281,44 @@ fn test_section() {
check(
&fixture,
expect![[r#"
- [
- Symbol {
- name: "Foo",
- kind: Section,
- label: None,
- full_range: 43..56,
- selection_range: 43..56,
- children: [],
- },
- Symbol {
- name: "2 Bar",
- kind: Section,
- label: Some(
- Span(
- "sec:bar",
- 71..86,
+ [
+ Symbol {
+ name: "1 Foo",
+ kind: Section,
+ label: None,
+ full_range: 43..56,
+ selection_range: 43..56,
+ children: [],
+ },
+ Symbol {
+ name: "2 Bar",
+ kind: Section,
+ label: Some(
+ Span(
+ "sec:bar",
+ 71..86,
+ ),
),
- ),
- full_range: 58..119,
- selection_range: 71..86,
- children: [
- Symbol {
- name: "Baz",
- kind: Section,
- label: Some(
- Span(
- "sec:baz",
- 104..119,
+ full_range: 58..119,
+ selection_range: 71..86,
+ children: [
+ Symbol {
+ name: "Baz",
+ kind: Section,
+ label: Some(
+ Span(
+ "sec:baz",
+ 104..119,
+ ),
),
- ),
- full_range: 88..119,
- selection_range: 104..119,
- children: [],
- },
- ],
- },
- ]
- "#]],
+ full_range: 88..119,
+ selection_range: 104..119,
+ children: [],
+ },
+ ],
+ },
+ ]
+ "#]],
);
}
| Numbering on textDocument/documentSymbol
I noticed, that when requesting for textDocument/documentSymbol the resulting sections (SymbolKind.Module = 2) only have a number if they are assigned with a label.
A small example would be a document with
```
\section{Section}\label{sec:outer} % 1 Section
\subsection{Subsection} % 1.1 Subsection
\section{Section'} % 2 Section'
\subsection{Subsection'}\label{sec:inner} % 2.1 Subsection'
```
The resulting document symbols would be:
```
1 Section
Subsection
Section'
2.1 Subsection'
```
So the numbers are correct. But when a section has no label, it is not assigned any number.
Similarly,
```
\section{Section}\label{sec:outer} % 1 Section
\subsection{Subsection} % 1.1 Subsection
\section{Section'} % 2 Section'
\subsection{Dummy} % 2.1 Dummy
\subsection*{Subsection'}\label{sec:inner} % Subsection'
```
would result in
```
1 Section
Subsection
Section'
Dummy
2.1 Subsection'
```
even though the last subsection does not have a number (it is assigned the number from the previous section, and when Dummy section is removed the number would be 2).
[This is not bad, since we can not use the label anyway, because there is no number.]
I guess my expectation would be for the document symbol to always include the number that appears in the actual document. So the names that are after the % in the code.
I have not looked at the code yet, so I don't know if this is feasible in the current way the document is parsed. I also noticed a similar thing for equations. Equations in the symbol list will be numbered only if they have a label.
| 2024-10-26T11:03:29 | 5.20 | 5fa0f4b49638497f8238a8a1e29d4ccfff50c74a | [
"document::tests::test_section"
] | [
"document::tests::test_equation",
"workspace::sort::tests::test_no_cycles",
"document::tests::test_float",
"workspace::sort::tests::test_two_layers",
"workspace::sort::tests::test_multiple_roots",
"document::tests::test_theorem_amsthm",
"document::tests::test_theorem_thmtools",
"workspace::sort::tests... | [] | [] | |
XAMPPRocky/tokei | 535 | XAMPPRocky__tokei-535 | [
"330"
] | ee19d28195647556607a7f0465ea96e94cf48eda | diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -65,6 +65,24 @@ let x = /* There is a reason
10;
```
+The `verbatim_quotes` property expects an array of strings, as some languages
+have multiple syntaxes for defining verbatim strings. A verbatim string
+in the context of Tokei is a string literal that can have unescaped `"`s. For example [`CSharp`](https://docs.microsoft.com/en-us/dotnet/csharp/programming-guide/strings/#regular-and-verbatim-string-literals)
+
+```json
+"CSharp": {
+ "verbatim_quotes": [
+ [
+ "@\\\"",
+ "\\\""
+ ]
+ ]
+```
+
+```csharp
+const string BasePath = @"C:\";
+```
+
Some languages have a single, standard filename with no extension
like `Makefile` or `Dockerfile`. These can be defined with the
`filenames` property:
diff --git a/build.rs b/build.rs
--- a/build.rs
+++ b/build.rs
@@ -46,6 +46,7 @@ fn generate_languages(out_dir: &OsStr) -> Result<(), Box<dyn error::Error>> {
}
sort_prop!("quotes");
+ sort_prop!("verbatim_quotes");
sort_prop!("multi_line");
}
diff --git a/languages.json b/languages.json
--- a/languages.json
+++ b/languages.json
@@ -208,6 +208,7 @@
"line_comment": ["//"],
"multi_line_comments": [["/*", "*/"]],
"quotes": [["\\\"", "\\\""]],
+ "verbatim_quotes": [["R\\\"(", ")\\\""]],
"extensions": ["cc", "cpp", "cxx", "c++", "pcc", "tpp"]
},
"CppHeader": {
diff --git a/languages.json b/languages.json
--- a/languages.json
+++ b/languages.json
@@ -229,6 +230,7 @@
"line_comment": ["//"],
"multi_line_comments": [["/*", "*/"]],
"quotes": [["\\\"", "\\\""]],
+ "verbatim_quotes": [["@\\\"", "\\\""]],
"extensions": ["cs", "csx"]
},
"CShell": {
diff --git a/languages.json b/languages.json
--- a/languages.json
+++ b/languages.json
@@ -399,6 +401,7 @@
"line_comment": ["//"],
"multi_line_comments": [["(*", "*)"]],
"quotes": [["\\\"", "\\\""]],
+ "verbatim_quotes": [["@\\\"", "\\\""]],
"extensions": ["fs", "fsi", "fsx", "fsscript"]
},
"Futhark": {
diff --git a/languages.json b/languages.json
--- a/languages.json
+++ b/languages.json
@@ -938,7 +941,8 @@
"multi_line_comments": [["/*", "*/"]],
"nested": true,
"extensions": ["rs"],
- "quotes": [["\\\"", "\\\""], ["r#\\\"", "\\\"#"], ["#\\\"", "\\\"#"]]
+ "quotes": [["\\\"", "\\\""], ["#\\\"", "\\\"#"]],
+ "verbatim_quotes": [["r##\\\"", "\\\"##"], ["r#\\\"", "\\\"#"]]
},
"ReStructuredText": {
"blank": true,
diff --git a/src/language/language_type.hbs.rs b/src/language/language_type.hbs.rs
--- a/src/language/language_type.hbs.rs
+++ b/src/language/language_type.hbs.rs
@@ -169,6 +169,28 @@ impl LanguageType {
}
}
+ /// Returns the verbatim quotes of a language.
+ /// ```
+ /// use tokei::LanguageType;
+ /// let lang = LanguageType::CSharp;
+ /// assert_eq!(lang.verbatim_quotes(), &[("@\"", "\"")]);
+ /// ```
+ pub fn verbatim_quotes(self) -> &'static [(&'static str, &'static str)] {
+ match self {
+ {{#each languages}}
+ {{#if this.verbatim_quotes}}
+ {{~@key}} =>
+ &[
+ {{~#each this.verbatim_quotes}}
+ ( {{~#each this}}"{{this}}",{{~/each}} ),
+ {{~/each}}
+ ],
+ {{~/if}}
+ {{~/each}}
+ _ => &[],
+ }
+ }
+
/// Returns the doc quotes of a language.
/// ```
/// use tokei::LanguageType;
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -23,6 +23,7 @@ pub(crate) struct SyntaxCounter {
pub(crate) quote: Option<&'static str>,
pub(crate) quote_is_doc_quote: bool,
pub(crate) stack: Vec<&'static str>,
+ pub(crate) quote_is_verbatim: bool,
}
#[derive(Clone, Debug)]
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -36,6 +37,7 @@ pub(crate) struct SharedMatchers {
pub multi_line_comments: &'static [(&'static str, &'static str)],
pub nested_comments: &'static [(&'static str, &'static str)],
pub string_literals: &'static [(&'static str, &'static str)],
+ pub verbatim_string_literals: &'static [(&'static str, &'static str)],
}
impl SharedMatchers {
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -72,6 +74,7 @@ impl SharedMatchers {
multi_line_comments: language.multi_line_comments(),
nested_comments: language.nested_comments(),
string_literals: language.quotes(),
+ verbatim_string_literals: language.verbatim_quotes(),
}
}
}
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -81,6 +84,7 @@ impl SyntaxCounter {
Self {
shared: SharedMatchers::new(language),
quote_is_doc_quote: false,
+ quote_is_verbatim: false,
stack: Vec::with_capacity(1),
quote: None,
}
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -119,10 +123,24 @@ impl SyntaxCounter {
{
trace!("Start Doc {:?}", start);
self.quote = Some(end);
+ self.quote_is_verbatim = false;
self.quote_is_doc_quote = true;
return Some(start.len());
}
+ if let Some((start, end)) = self
+ .shared
+ .verbatim_string_literals
+ .iter()
+ .find(|(s, _)| window.starts_with(s.as_bytes()))
+ {
+ trace!("Start verbatim {:?}", start);
+ self.quote = Some(end);
+ self.quote_is_verbatim = true;
+ self.quote_is_doc_quote = false;
+ return Some(start.len());
+ }
+
if let Some((start, end)) = self
.shared
.string_literals
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -131,6 +149,7 @@ impl SyntaxCounter {
{
trace!("Start {:?}", start);
self.quote = Some(end);
+ self.quote_is_verbatim = false;
self.quote_is_doc_quote = false;
return Some(start.len());
}
diff --git a/src/language/syntax.rs b/src/language/syntax.rs
--- a/src/language/syntax.rs
+++ b/src/language/syntax.rs
@@ -192,9 +211,9 @@ impl SyntaxCounter {
let quote = self.quote.take().unwrap();
trace!("End {:?}", quote);
Some(quote.len())
- } else if window.starts_with(br"\") {
+ } else if window.starts_with(br"\") && !self.quote_is_verbatim {
// Tell the state machine to skip the next character because it
- // has been escaped.
+ // has been escaped if the string isn't a verbatim string.
Some(2)
} else {
None
| diff --git a/CONTRIBUTING.md b/CONTRIBUTING.md
--- a/CONTRIBUTING.md
+++ b/CONTRIBUTING.md
@@ -123,11 +141,13 @@ The comment should use the syntax of the language you're testing.
A good example of a test file is [`tests/data/rust.rs`].
```rust
-// 39 lines 32 code 2 comments 5 blanks
+// 41 lines 33 code 3 comments 5 blanks
/* /**/ */
fn main() {
- let start = "/*";
+ let start = r##"/*\"
+\"##;
+ // comment
loop {
if x.len() >= 2 && x[0] == '*' && x[1] == '/' { // found the */
break;
diff --git a/tests/data/cpp.cpp b/tests/data/cpp.cpp
--- a/tests/data/cpp.cpp
+++ b/tests/data/cpp.cpp
@@ -1,45 +1,46 @@
-/* 45 lines 37 code 2 comments 6 blanks */
+/* 46 lines 37 code 3 comments 6 blanks */
#include <stdio.h>
// bubble_sort_function
-void bubble_sort (int a[10], int n) {
- int t;
- int j = n;
- int s = 1;
- while (s > 0) {
- s = 0;
- int i = 1;
- while (i < j) {
- if (a[i] < a[i - 1]) {
- t = a[i];
- a[i] = a[i - 1];
- a[i - 1] = t;
- s = 1;
- }
- i++;
- }
- j--;
+void bubble_sort(int a[10], int n) {
+ int t;
+ int j = n;
+ int s = 1;
+ while (s > 0) {
+ s = 0;
+ int i = 1;
+ while (i < j) {
+ if (a[i] < a[i - 1]) {
+ t = a[i];
+ a[i] = a[i - 1];
+ a[i - 1] = t;
+ s = 1;
+ }
+ i++;
}
+ j--;
+ }
}
-void main() {
- int a[] = {4, 65, 2, -31, 0, 99, 2, 83, 782, 1};
- int n = 10;
- int i = 0;
+int main() {
+ int a[] = {4, 65, 2, -31, 0, 99, 2, 83, 782, 1};
+ int n = 10;
+ int i = 0;
- printf("Before sorting:\n\n");
- while (i < n) {
- printf("%d ", a[i]);
- i++;
- }
+ printf(R"(Before sorting:\n\n" )");
+ // Single line comment
+ while (i < n) {
+ printf("%d ", a[i]);
+ i++;
+ }
- bubble_sort(a, n);
+ bubble_sort(a, n);
- printf("\n\nAfter sorting:\n\n");
- i = 0;
- while (i < n) {
- printf("%d ", a[i]);
- i++;
- }
+ printf("\n\nAfter sorting:\n\n");
+ i = 0;
+ while (i < n) {
+ printf("%d ", a[i]);
+ i++;
+ }
}
diff --git /dev/null b/tests/data/csharp.cs
new file mode 100644
--- /dev/null
+++ b/tests/data/csharp.cs
@@ -0,0 +1,26 @@
+// 26 lines 14 code 7 comments 5 blanks
+namespace Ns
+{
+ /*
+
+ multi-line comment
+
+ */
+ public class Cls
+ {
+ private const string BasePath = @"a:\";
+
+ [Fact]
+ public void MyTest()
+ {
+ // Arrange.
+ Foo();
+
+ // Act.
+ Bar();
+
+ // Assert.
+ Baz();
+ }
+ }
+}
diff --git a/tests/data/fsharp.fs b/tests/data/fsharp.fs
--- a/tests/data/fsharp.fs
+++ b/tests/data/fsharp.fs
@@ -1,4 +1,4 @@
-(* 13 lines 5 code 4 comments 4 blanks *)
+(* 15 lines 6 code 5 comments 4 blanks *)
// Comment
diff --git a/tests/data/fsharp.fs b/tests/data/fsharp.fs
--- a/tests/data/fsharp.fs
+++ b/tests/data/fsharp.fs
@@ -11,3 +11,5 @@ let bar = "(*
Code
*)"
+let baz = @"a:\"
+// Comment
diff --git a/tests/data/rust.rs b/tests/data/rust.rs
--- a/tests/data/rust.rs
+++ b/tests/data/rust.rs
@@ -1,8 +1,10 @@
-// 39 lines 32 code 2 comments 5 blanks
+// 41 lines 33 code 3 comments 5 blanks
/* /**/ */
fn main() {
- let start = "/*";
+ let start = r##"/*##\"
+\"##;
+ // comment
loop {
if x.len() >= 2 && x[0] == '*' && x[1] == '/' { // found the */
break;
| C# line count is broken if verbatim string with backslash is present
**Describe the bug**
Count is broken when there is a line in C# file with verbatim string in it like this:
`private const string BasePath = @"a:\";`.
**To Reproduce**
1. Download file [SccTokeiFailure.zip](https://github.com/boyter/scc/files/3132587/SccTokeiFailure.zip)
2. Extract
3. `tokei`
**Actual behavior**
```
-------------------------------------------------------------------------------
Language Files Lines Code Comments Blanks
-------------------------------------------------------------------------------
C# 1 20 17 0 3
-------------------------------------------------------------------------------
Total 1 20 17 0 3
-------------------------------------------------------------------------------
```
**Expected behavior**
```
-------------------------------------------------------------------------------
Language Files Lines Code Comments Blanks
-------------------------------------------------------------------------------
C# 1 20 14 3 3
-------------------------------------------------------------------------------
Total 1 20 14 3 3
-------------------------------------------------------------------------------
```
**Tested on**
* Debian 9.7
* Windows 10
| Thank you for your issue! Yes tokei has no concept of verbatim strings. Do you know of any more languages with verbatim strings? I know Rust has them. I'll try to add this for the next version of Tokei.
F# has them same way as C#. C++ has Raw Strings. Groovy has some form of that too AFAIK.
Python has them too, e.g. normally are used to write literal regular expression as-in `r'.+\.+'`.
@XAMPPRocky I'd be happy to look into this, if you don't mind :smile:
@NickHackman You're more than welcome to, it's important to be aware that some languages have simple verbatim strings where it's essentially a different quote syntax, and there are languages like Rust that allow you to have a more complex syntax such as `r####"Hello World"####` where the preceding `#`'s must match the `#`'s at the end. This is pretty uncommon however and I would accept something that only handles them like a quote syntax and for Rust includes `r#` and `r##` as those are two most common levels. | 2020-05-19T12:32:23 | 11.2 | ee19d28195647556607a7f0465ea96e94cf48eda | [
"cpp",
"csharp",
"fsharp",
"rust"
] | [
"utils::ext::tests::contains",
"language::language_type::tests::rust_allows_nested",
"utils::ext::tests::trim",
"utils::ext::tests::is_whitespace",
"utils::fs::tests::ignore_directory_with_extension",
"utils::fs::tests::no_ignore_parent",
"utils::fs::tests::no_ignore",
"utils::fs::tests::hidden",
"u... | [] | [] |
XAMPPRocky/tokei | 868 | XAMPPRocky__tokei-868 | [
"859"
] | 9ea344445cd938efe24d2132fb3b4347d8c8a6bc | diff --git a/src/language/language_type.rs b/src/language/language_type.rs
--- a/src/language/language_type.rs
+++ b/src/language/language_type.rs
@@ -260,7 +260,8 @@ impl LanguageType {
.collect::<Vec<_>>();
for (language, stats) in iter {
- *jupyter_stats.blobs.entry(language).or_default() += stats;
+ *jupyter_stats.blobs.entry(language).or_default() += &stats;
+ jupyter_stats += &stats;
}
Some(jupyter_stats)
diff --git a/src/stats.rs b/src/stats.rs
--- a/src/stats.rs
+++ b/src/stats.rs
@@ -46,12 +46,18 @@ impl CodeStats {
impl ops::AddAssign for CodeStats {
fn add_assign(&mut self, rhs: Self) {
+ self.add_assign(&rhs);
+ }
+}
+
+impl ops::AddAssign<&'_ CodeStats> for CodeStats {
+ fn add_assign(&mut self, rhs: &'_ CodeStats) {
self.blanks += rhs.blanks;
self.code += rhs.code;
self.comments += rhs.comments;
- for (language, stats) in rhs.blobs {
- *self.blobs.entry(language).or_default() += stats;
+ for (language, stats) in &rhs.blobs {
+ *self.blobs.entry(*language).or_default() += stats;
}
}
}
| diff --git a/src/language/language_type.rs b/src/language/language_type.rs
--- a/src/language/language_type.rs
+++ b/src/language/language_type.rs
@@ -271,8 +272,29 @@ impl LanguageType {
mod tests {
use super::*;
+ use std::{fs, path::Path};
+
#[test]
fn rust_allows_nested() {
assert!(LanguageType::Rust.allows_nested());
}
+
+ #[test]
+ fn jupyter_notebook_has_correct_totals() {
+ let sample_notebook =
+ fs::read_to_string(Path::new("tests").join("data").join("jupyter.ipynb")).unwrap();
+
+ let CodeStats {
+ blanks,
+ code,
+ comments,
+ ..
+ } = LanguageType::Jupyter
+ .parse_jupyter(sample_notebook.as_bytes(), &Config::default())
+ .unwrap();
+
+ assert_eq!(blanks, 115);
+ assert_eq!(code, 528);
+ assert_eq!(comments, 333);
+ }
}
| Jupyter Notebooks lines does not contribute to total sum
Hi!
It seams to me that `.ipynb` files get counted, but somehow do not contribute to the grand total.
Example with the table from below:
415 lines of JavaScript + 2536 lines of Python = 2951 lines. Note that we have not added the "Jupyter Notebooks" lines here. 2951 is the number in the last line of the table (grand total).
Correct would be 415 lines of JavaScript + 2536 lines of Python + 264 lines of Jupyter Notebooks = 3215 lines total.
```
❯ tokei src/bachelorarbeit simulation_results_and_analysis/*.ipynb
===============================================================================
Language Files Lines Code Comments Blanks
===============================================================================
JavaScript 5 415 300 30 85
Python 35 2536 2142 68 326
-------------------------------------------------------------------------------
Jupyter Notebooks 4 0 0 0 0
|- Markdown 2 30 0 28 2
|- Python 4 234 222 7 5
(Total) 264 222 35 7
===============================================================================
Total 44 2951 2442 98 411
===============================================================================
```
Tokei version:
```
❯ tokei -V
tokei 12.1.2 compiled with serialization support: json, cbor, yaml
```
Counting results without Jupyter Notebook for comparison:
```
❯ tokei src/bachelorarbeit
===============================================================================
Language Files Lines Code Comments Blanks
===============================================================================
JavaScript 5 415 300 30 85
Python 35 2536 2142 68 326
===============================================================================
Total 40 2951 2442 98 411
===============================================================================
```
I assume what happens is that the line `Jupyter Notebooks 4 0 0 0 0` gets added to the grand total. Because the number of files is correctly count in grand total. The other columns are not added to the grand total.
| 2021-12-19T13:58:12 | 12.1 | 9ea344445cd938efe24d2132fb3b4347d8c8a6bc | [
"language::language_type::tests::jupyter_notebook_has_correct_totals"
] | [
"language::language_type::tests::rust_allows_nested",
"utils::ext::tests::is_whitespace",
"utils::ext::tests::contains",
"utils::ext::tests::trim",
"utils::fs::tests::ignore_directory_with_extension",
"utils::fs::tests::no_ignore_dot",
"utils::fs::tests::no_ignore_implies_dot",
"utils::fs::tests::no_i... | [] | [] | |
hyperium/tonic | 395 | hyperium__tonic-395 | [
"379"
] | 034c8502549fef85cd81332bc401bd2492f66ed3 | diff --git a/tonic/src/status.rs b/tonic/src/status.rs
--- a/tonic/src/status.rs
+++ b/tonic/src/status.rs
@@ -474,10 +474,11 @@ impl Status {
}
if !self.details.is_empty() {
+ let details = base64::encode_config(&self.details[..], base64::STANDARD_NO_PAD);
+
header_map.insert(
GRPC_STATUS_DETAILS_HEADER,
- HeaderValue::from_maybe_shared(self.details.clone())
- .map_err(invalid_header_value_byte)?,
+ HeaderValue::from_maybe_shared(details).map_err(invalid_header_value_byte)?,
);
}
diff --git a/tonic/src/status.rs b/tonic/src/status.rs
--- a/tonic/src/status.rs
+++ b/tonic/src/status.rs
@@ -501,12 +502,6 @@ impl Status {
details: Bytes,
metadata: MetadataMap,
) -> Status {
- let details = if details.is_empty() {
- details
- } else {
- base64::encode_config(&details[..], base64::STANDARD_NO_PAD).into()
- };
-
Status {
code,
message: message.into(),
| diff --git a/tonic/src/status.rs b/tonic/src/status.rs
--- a/tonic/src/status.rs
+++ b/tonic/src/status.rs
@@ -837,4 +832,23 @@ mod tests {
assert_eq!(Status::data_loss("").code(), Code::DataLoss);
assert_eq!(Status::unauthenticated("").code(), Code::Unauthenticated);
}
+
+ #[test]
+ fn details() {
+ const DETAILS: &[u8] = &[0, 2, 3];
+
+ let status = Status::with_details(Code::Unavailable, "some message", DETAILS.into());
+
+ assert_eq!(&status.details()[..], DETAILS);
+
+ let header_map = status.to_header_map().unwrap();
+
+ let b64_details = base64::encode_config(&DETAILS[..], base64::STANDARD_NO_PAD);
+
+ assert_eq!(header_map[super::GRPC_STATUS_DETAILS_HEADER], b64_details);
+
+ let status = Status::from_header_map(&header_map).unwrap();
+
+ assert_eq!(&status.details()[..], DETAILS);
+ }
}
| `Status::details` leaks base64 encoding
Currently, the `Status::with_details` will encode the details blob into base64 but when the getter `Status::details` gets called it will return the base64 encoded version where in-fact this should return the original blob.
| 2020-07-11T00:31:22 | 0.2 | 034c8502549fef85cd81332bc401bd2492f66ed3 | [
"status::tests::details"
] | [
"codec::buffer::tests::decode_buf",
"metadata::map::tests::test_from_headers_takes_http_headers",
"metadata::map::tests::test_values_categorizes_binary_entries",
"metadata::map::tests::test_values_categorizes_ascii_entries",
"codec::buffer::tests::encode_buf",
"metadata::map::tests::test_iter_mut_categori... | [] | [] | |
rome/tools | 4,732 | rome__tools-4732 | [
"4193"
] | c1973600b0e5222ea7a294b82f744a8b08cc40c5 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -40,6 +40,11 @@
- Fixed the diagnostics emitted when running the `rome format` command;
+- Rome doesn't warn anymore when discovering (possibly infinite) symbolic links between directories.
+ This fixes [#4193](https://github.com/rome/tools/issues/4193) which resulted in incorrect warnings
+ when a single file or directory was pointed at by multiple symbolic links. Symbolic links to other
+ symbolic links do still trigger warnings if they are too deeply nested.
+
### Configuration
#### Other changes
diff --git a/crates/rome_fs/src/fs.rs b/crates/rome_fs/src/fs.rs
--- a/crates/rome_fs/src/fs.rs
+++ b/crates/rome_fs/src/fs.rs
@@ -306,8 +306,8 @@ pub enum ErrorKind {
UnknownFileType,
/// Dereferenced (broken) symbolic link
DereferencedSymlink(String),
- /// Symbolic link cycle or symbolic link infinite expansion
- InfiniteSymlinkExpansion(String),
+ /// Too deeply nested symbolic link expansion
+ DeeplyNestedSymlinkExpansion(String),
}
impl console::fmt::Display for ErrorKind {
diff --git a/crates/rome_fs/src/fs.rs b/crates/rome_fs/src/fs.rs
--- a/crates/rome_fs/src/fs.rs
+++ b/crates/rome_fs/src/fs.rs
@@ -316,7 +316,9 @@ impl console::fmt::Display for ErrorKind {
ErrorKind::CantReadFile(_) => fmt.write_str("Rome couldn't read the file"),
ErrorKind::UnknownFileType => fmt.write_str("Unknown file type"),
ErrorKind::DereferencedSymlink(_) => fmt.write_str("Dereferenced symlink"),
- ErrorKind::InfiniteSymlinkExpansion(_) => fmt.write_str("Infinite symlink expansion"),
+ ErrorKind::DeeplyNestedSymlinkExpansion(_) => {
+ fmt.write_str("Deeply nested symlink expansion")
+ }
}
}
}
diff --git a/crates/rome_fs/src/fs.rs b/crates/rome_fs/src/fs.rs
--- a/crates/rome_fs/src/fs.rs
+++ b/crates/rome_fs/src/fs.rs
@@ -327,7 +329,9 @@ impl std::fmt::Display for ErrorKind {
ErrorKind::CantReadFile(_) => fmt.write_str("Rome couldn't read the file"),
ErrorKind::UnknownFileType => write!(fmt, "Unknown file type"),
ErrorKind::DereferencedSymlink(_) => write!(fmt, "Dereferenced symlink"),
- ErrorKind::InfiniteSymlinkExpansion(_) => write!(fmt, "Infinite symlink expansion"),
+ ErrorKind::DeeplyNestedSymlinkExpansion(_) => {
+ write!(fmt, "Deeply nested symlink expansion")
+ }
}
}
}
diff --git a/crates/rome_fs/src/fs.rs b/crates/rome_fs/src/fs.rs
--- a/crates/rome_fs/src/fs.rs
+++ b/crates/rome_fs/src/fs.rs
@@ -348,9 +352,9 @@ impl Advices for ErrorKind {
LogCategory::Info,
&format!("Rome encountered a file system entry that is a broken symbolic link: {}", path),
),
- ErrorKind::InfiniteSymlinkExpansion(path) => visitor.record_log(
+ ErrorKind::DeeplyNestedSymlinkExpansion(path) => visitor.record_log(
LogCategory::Error,
- &format!("Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: {}", path),
+ &format!("Rome encountered a file system entry with too many nested symbolic links, possibly forming an infinite cycle: {}", path),
),
}
}
diff --git a/crates/rome_fs/src/fs/memory.rs b/crates/rome_fs/src/fs/memory.rs
--- a/crates/rome_fs/src/fs/memory.rs
+++ b/crates/rome_fs/src/fs/memory.rs
@@ -54,7 +54,7 @@ type FileEntry = Arc<Mutex<Vec<u8>>>;
pub enum ErrorEntry {
UnknownFileType,
DereferencedSymlink(PathBuf),
- InfiniteSymlinkExpansion(PathBuf),
+ DeeplyNestedSymlinkExpansion(PathBuf),
}
impl MemoryFileSystem {
diff --git a/crates/rome_fs/src/fs/memory.rs b/crates/rome_fs/src/fs/memory.rs
--- a/crates/rome_fs/src/fs/memory.rs
+++ b/crates/rome_fs/src/fs/memory.rs
@@ -254,8 +254,10 @@ impl<'scope> TraversalScope<'scope> for MemoryTraversalScope<'scope> {
ErrorEntry::DereferencedSymlink(path) => {
ErrorKind::DereferencedSymlink(path.to_string_lossy().to_string())
}
- ErrorEntry::InfiniteSymlinkExpansion(path) => {
- ErrorKind::InfiniteSymlinkExpansion(path.to_string_lossy().to_string())
+ ErrorEntry::DeeplyNestedSymlinkExpansion(path) => {
+ ErrorKind::DeeplyNestedSymlinkExpansion(
+ path.to_string_lossy().to_string(),
+ )
}
},
severity: Severity::Warning,
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -7,7 +7,7 @@ use crate::{
};
use rayon::{scope, Scope};
use rome_diagnostics::{adapters::IoError, DiagnosticExt, Error, Severity};
-use std::fs::DirEntry;
+use std::fs::{DirEntry, FileType};
use std::{
env,
ffi::OsStr,
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -17,6 +17,8 @@ use std::{
path::{Path, PathBuf},
};
+const MAX_SYMLINK_DEPTH: u8 = 3;
+
/// Implementation of [FileSystem] that directly calls through to the underlying OS
pub struct OsFileSystem;
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -104,8 +106,8 @@ impl<'scope> OsTraversalScope<'scope> {
}
impl<'scope> TraversalScope<'scope> for OsTraversalScope<'scope> {
- fn spawn(&self, ctx: &'scope dyn TraversalContext, path: PathBuf) {
- let file_type = match path.metadata() {
+ fn spawn(&self, ctx: &'scope dyn TraversalContext, mut path: PathBuf) {
+ let mut file_type = match path.metadata() {
Ok(meta) => meta.file_type(),
Err(err) => {
ctx.push_diagnostic(
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -114,36 +116,15 @@ impl<'scope> TraversalScope<'scope> for OsTraversalScope<'scope> {
return;
}
};
- if file_type.is_symlink() {
- tracing::info!("Translating symlink: {:?}", path);
- let path = match fs::read_link(&path) {
- Ok(path) => path,
- Err(err) => {
- ctx.push_diagnostic(
- IoError::from(err).with_file_path(path.to_string_lossy().to_string()),
- );
- return;
- }
- };
- if let Err(err) = fs::symlink_metadata(&path) {
- if err.kind() == IoErrorKind::NotFound {
- let path = path.to_string_lossy().to_string();
- ctx.push_diagnostic(Error::from(FileSystemDiagnostic {
- path: path.clone(),
- error_kind: ErrorKind::DereferencedSymlink(path),
- severity: Severity::Warning,
- }));
- } else {
- ctx.push_diagnostic(
- IoError::from(err).with_file_path(path.to_string_lossy().to_string()),
- );
- }
+ if file_type.is_symlink() {
+ let Ok((target_path, target_file_type)) = expand_symbolic_link(path, ctx) else {
return;
};
- return self.spawn(ctx, path);
- };
+ path = target_path;
+ file_type = target_file_type;
+ }
let _ = ctx.interner().intern_path(path.clone());
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -196,15 +177,12 @@ fn handle_dir<'scope>(
};
for entry in iter {
- let entry = match entry {
- Ok(entry) => entry,
+ match entry {
+ Ok(entry) => handle_dir_entry(scope, ctx, entry, origin_path.clone()),
Err(err) => {
ctx.push_diagnostic(IoError::from(err).with_file_path(path.display().to_string()));
- continue;
}
- };
-
- handle_dir_entry(scope, ctx, entry, origin_path.clone());
+ }
}
}
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -230,54 +208,24 @@ fn handle_dir_entry<'scope>(
};
if file_type.is_symlink() {
- tracing::info!("Translating symlink: {:?}", path);
- let target_path = match fs::read_link(&path) {
- Ok(path) => path,
- Err(err) => {
- ctx.push_diagnostic(
- IoError::from(err).with_file_path(path.to_string_lossy().to_string()),
- );
- return;
- }
- };
-
- file_type = match path.metadata() {
- Ok(meta) => meta.file_type(),
- Err(err) => {
- if err.kind() == IoErrorKind::NotFound {
- let path = path.to_string_lossy().to_string();
- ctx.push_diagnostic(Error::from(FileSystemDiagnostic {
- path: path.clone(),
- error_kind: ErrorKind::DereferencedSymlink(path),
- severity: Severity::Warning,
- }));
- } else {
- ctx.push_diagnostic(
- IoError::from(err).with_file_path(path.to_string_lossy().to_string()),
- );
- }
- return;
- }
+ let Ok((target_path, target_file_type)) = expand_symbolic_link(path.clone(), ctx) else {
+ return;
};
- if file_type.is_dir() {
+ if target_file_type.is_dir() {
// Override the origin path of the symbolic link
origin_path = Some(path);
}
path = target_path;
- };
+ file_type = target_file_type;
+ }
let inserted = ctx.interner().intern_path(path.clone());
- // Determine whether an equivalent path already exists
if !inserted {
- let path = path.to_string_lossy().to_string();
- ctx.push_diagnostic(Error::from(FileSystemDiagnostic {
- path: path.clone(),
- error_kind: ErrorKind::InfiniteSymlinkExpansion(path),
- severity: Severity::Warning,
- }));
+ // If the path was already inserted, it could have been pointed at by
+ // multiple symlinks. No need to traverse again.
return;
}
diff --git a/crates/rome_fs/src/fs/os.rs b/crates/rome_fs/src/fs/os.rs
--- a/crates/rome_fs/src/fs/os.rs
+++ b/crates/rome_fs/src/fs/os.rs
@@ -338,6 +286,86 @@ fn handle_dir_entry<'scope>(
}));
}
+/// Indicates a symbolic link could not be expanded.
+///
+/// Has no fields, since the diagnostics are already generated inside
+/// [follow_symbolic_link()] and the caller doesn't need to do anything except
+/// an early return.
+struct SymlinkExpansionError;
+
+/// Expands symlinks by recursively following them up to [MAX_SYMLINK_DEPTH].
+///
+/// ## Returns
+///
+/// Returns a tuple where the first argument is the target path being pointed to
+/// and the second argument is the target file type.
+fn expand_symbolic_link(
+ mut path: PathBuf,
+ ctx: &dyn TraversalContext,
+) -> Result<(PathBuf, FileType), SymlinkExpansionError> {
+ let mut symlink_depth = 0;
+ loop {
+ symlink_depth += 1;
+ if symlink_depth > MAX_SYMLINK_DEPTH {
+ let path = path.to_string_lossy().to_string();
+ ctx.push_diagnostic(Error::from(FileSystemDiagnostic {
+ path: path.clone(),
+ error_kind: ErrorKind::DeeplyNestedSymlinkExpansion(path),
+ severity: Severity::Warning,
+ }));
+ return Err(SymlinkExpansionError);
+ }
+
+ let (target_path, target_file_type) = follow_symlink(&path, ctx)?;
+
+ if target_file_type.is_symlink() {
+ path = target_path;
+ continue;
+ }
+
+ return Ok((target_path, target_file_type));
+ }
+}
+
+fn follow_symlink(
+ path: &Path,
+ ctx: &dyn TraversalContext,
+) -> Result<(PathBuf, FileType), SymlinkExpansionError> {
+ tracing::info!("Translating symlink: {path:?}");
+
+ let target_path = fs::read_link(path).map_err(|err| {
+ ctx.push_diagnostic(IoError::from(err).with_file_path(path.to_string_lossy().to_string()));
+ SymlinkExpansionError
+ })?;
+
+ // Make sure relative symlinks are resolved:
+ let target_path = path
+ .parent()
+ .map(|parent_dir| parent_dir.join(&target_path))
+ .unwrap_or(target_path);
+
+ let target_file_type = match fs::symlink_metadata(&target_path) {
+ Ok(meta) => meta.file_type(),
+ Err(err) => {
+ if err.kind() == IoErrorKind::NotFound {
+ let path = path.to_string_lossy().to_string();
+ ctx.push_diagnostic(Error::from(FileSystemDiagnostic {
+ path: path.clone(),
+ error_kind: ErrorKind::DereferencedSymlink(path),
+ severity: Severity::Warning,
+ }));
+ } else {
+ ctx.push_diagnostic(
+ IoError::from(err).with_file_path(path.to_string_lossy().to_string()),
+ );
+ }
+ return Err(SymlinkExpansionError);
+ }
+ };
+
+ Ok((target_path, target_file_type))
+}
+
impl From<fs::FileType> for ErrorKind {
fn from(_: fs::FileType) -> Self {
Self::UnknownFileType
| diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -61,10 +61,10 @@ rome_parser = { version = "0.0.1", path = "./crates/rome_parser"
rome_rowan = { version = "0.0.1", path = "./crates/rome_rowan" }
rome_service = { path = "./crates/rome_service" }
rome_suppression = { version = "0.0.1", path = "./crates/rome_suppression" }
+rome_test_utils = { path = "./crates/rome_test_utils" }
rome_text_edit = { version = "0.0.1", path = "./crates/rome_text_edit" }
rome_text_size = { version = "0.0.1", path = "./crates/rome_text_size" }
tests_macros = { path = "./crates/tests_macros" }
-rome_test_utils = { path = "./crates/rome_test_utils" }
# Crates needed in the workspace
bitflags = "2.3.1"
diff --git a/crates/rome_cli/tests/commands/check.rs b/crates/rome_cli/tests/commands/check.rs
--- a/crates/rome_cli/tests/commands/check.rs
+++ b/crates/rome_cli/tests/commands/check.rs
@@ -797,15 +797,11 @@ fn fs_error_dereferenced_symlink() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_broken_symlink");
+ let root_path = temp_dir().join("check_rome_test_broken_symlink");
let subdir_path = root_path.join("prefix");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
- create_dir(subdir_path).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir_all(subdir_path).unwrap();
#[cfg(target_family = "unix")]
{
diff --git a/crates/rome_cli/tests/commands/check.rs b/crates/rome_cli/tests/commands/check.rs
--- a/crates/rome_cli/tests/commands/check.rs
+++ b/crates/rome_cli/tests/commands/check.rs
@@ -840,39 +836,28 @@ fn fs_error_dereferenced_symlink() {
}
#[test]
-fn fs_error_infinite_symlink_exapansion() {
+fn fs_error_infinite_symlink_expansion_to_dirs() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_infinite_symlink_exapansion");
+ let root_path = temp_dir().join("check_rome_test_infinite_symlink_expansion_to_dirs");
let subdir1_path = root_path.join("prefix");
let subdir2_path = root_path.join("foo").join("bar");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
- create_dir(subdir1_path.clone()).unwrap();
-
- create_dir_all(subdir2_path.clone()).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir_all(&subdir1_path).unwrap();
+ create_dir_all(&subdir2_path).unwrap();
#[cfg(target_family = "unix")]
{
- symlink(subdir1_path.clone(), root_path.join("self_symlink1")).unwrap();
- symlink(subdir1_path, subdir2_path.join("self_symlink2")).unwrap();
+ symlink(&subdir2_path, subdir1_path.join("symlink1")).unwrap();
+ symlink(subdir1_path, subdir2_path.join("symlink2")).unwrap();
}
#[cfg(target_os = "windows")]
{
- check_windows_symlink!(symlink_dir(
- subdir1_path.clone(),
- root_path.join("self_symlink1")
- ));
- check_windows_symlink!(symlink_dir(
- subdir1_path,
- subdir2_path.join("self_symlink2")
- ));
+ check_windows_symlink!(symlink_dir(&subdir2_path, &subdir1_path.join("symlink1")));
+ check_windows_symlink!(symlink_dir(subdir1_path, subdir2_path.join("symlink2")));
}
let result = run_cli(
diff --git a/crates/rome_cli/tests/commands/check.rs b/crates/rome_cli/tests/commands/check.rs
--- a/crates/rome_cli/tests/commands/check.rs
+++ b/crates/rome_cli/tests/commands/check.rs
@@ -887,13 +872,69 @@ fn fs_error_infinite_symlink_exapansion() {
assert_cli_snapshot(SnapshotPayload::new(
module_path!(),
- "fs_error_infinite_symlink_expansion",
+ "fs_error_infinite_symlink_expansion_to_dirs",
fs,
console,
result,
));
}
+#[test]
+fn fs_error_infinite_symlink_expansion_to_files() {
+ let mut console = BufferConsole::default();
+
+ let root_path = temp_dir().join("check_rome_test_infinite_symlink_expansion_to_files");
+ let subdir1_path = root_path.join("prefix");
+ let subdir2_path = root_path.join("foo").join("bar");
+
+ let _ = remove_dir_all(&root_path);
+ create_dir_all(&subdir1_path).unwrap();
+ create_dir_all(&subdir2_path).unwrap();
+
+ let symlink1_path = subdir1_path.join("symlink1");
+ let symlink2_path = subdir2_path.join("symlink2");
+
+ #[cfg(target_family = "unix")]
+ {
+ symlink(&symlink2_path, &symlink1_path).unwrap();
+ symlink(&symlink1_path, &symlink2_path).unwrap();
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ check_windows_symlink!(symlink_dir(&symlink2_path, &symlink1_path));
+ check_windows_symlink!(symlink_dir(&symlink1_path, &symlink2_path));
+ }
+
+ let result = run_cli(
+ DynRef::Owned(Box::new(OsFileSystem)),
+ &mut console,
+ Args::from([("check"), (root_path.display().to_string().as_str())].as_slice()),
+ );
+
+ remove_dir_all(root_path).unwrap();
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ // Don't use a snapshot here, since the diagnostics can be reported in
+ // arbitrary order:
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains("Deeply nested symlink expansion")));
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains(&symlink1_path.display().to_string())));
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains(&symlink2_path.display().to_string())));
+}
+
#[test]
fn fs_error_read_only() {
let mut fs = MemoryFileSystem::new_read_only();
diff --git a/crates/rome_cli/tests/commands/check.rs b/crates/rome_cli/tests/commands/check.rs
--- a/crates/rome_cli/tests/commands/check.rs
+++ b/crates/rome_cli/tests/commands/check.rs
@@ -970,28 +1011,27 @@ fn fs_error_unknown() {
// │ └── test.js // ok
// └── src
// ├── symlink_testcase1_1 -> hidden_nested
+// ├── symlink_testcase1_3 -> hidden_testcase1/test/test.js
// └── symlink_testcase2 -> hidden_testcase2
#[test]
fn fs_files_ignore_symlink() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_files_ignore_symlink");
+ let root_path = temp_dir().join("check_rome_test_files_ignore_symlink");
let src_path = root_path.join("src");
let testcase1_path = root_path.join("hidden_testcase1");
let testcase1_sub_path = testcase1_path.join("test");
+ let testcase1_sub_file_path = testcase1_sub_path.join("test.js");
let testcase2_path = root_path.join("hidden_testcase2");
let nested_path = root_path.join("hidden_nested");
let nested_sub_path = nested_path.join("test");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
- create_dir(src_path.clone()).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir(&root_path).unwrap();
+ create_dir(&src_path).unwrap();
create_dir_all(testcase1_sub_path.clone()).unwrap();
create_dir(testcase2_path.clone()).unwrap();
create_dir_all(nested_sub_path.clone()).unwrap();
diff --git a/crates/rome_cli/tests/commands/check.rs b/crates/rome_cli/tests/commands/check.rs
--- a/crates/rome_cli/tests/commands/check.rs
+++ b/crates/rome_cli/tests/commands/check.rs
@@ -1000,27 +1040,32 @@ fn fs_files_ignore_symlink() {
let symlink_testcase1_1_path = src_path.join("symlink_testcase1_1");
// hidden_nested/test/symlink_testcase1_2
let symlink_testcase1_2_path = nested_sub_path.join("symlink_testcase1_2");
+ // src/symlink_testcase1_3
+ let symlink_testcase1_3_path = src_path.join("symlink_testcase1_3");
// src/symlink_testcase2
let symlink_testcase2_path = src_path.join("symlink_testcase2");
#[cfg(target_family = "unix")]
{
- // src/test/symlink_testcase1_1 -> hidden_nested
+ // src/symlink_testcase1_1 -> hidden_nested
symlink(nested_path, symlink_testcase1_1_path).unwrap();
// hidden_nested/test/symlink_testcase1_2 -> hidden_testcase1
symlink(testcase1_path, symlink_testcase1_2_path).unwrap();
+ // src/symlink_testcase1_3 -> hidden_testcase1/test/test.js
+ symlink(testcase1_sub_file_path, symlink_testcase1_3_path).unwrap();
// src/symlink_testcase2 -> hidden_testcase2
- symlink(testcase2_path.clone(), symlink_testcase2_path).unwrap();
+ symlink(&testcase2_path, symlink_testcase2_path).unwrap();
}
#[cfg(target_os = "windows")]
{
- check_windows_symlink!(symlink_dir(nested_path.clone(), symlink_testcase1_1_path));
+ check_windows_symlink!(symlink_dir(nested_path, symlink_testcase1_1_path));
+ check_windows_symlink!(symlink_dir(testcase1_path, symlink_testcase1_2_path));
check_windows_symlink!(symlink_dir(
- testcase1_path.clone(),
- symlink_testcase1_2_path
+ testcase1_sub_file_path,
+ symlink_testcase1_3_path
));
- check_windows_symlink!(symlink_dir(testcase2_path.clone(), symlink_testcase2_path));
+ check_windows_symlink!(symlink_dir(&testcase2_path, symlink_testcase2_path));
}
let config_path = root_path.join("rome.json");
diff --git a/crates/rome_cli/tests/commands/lint.rs b/crates/rome_cli/tests/commands/lint.rs
--- a/crates/rome_cli/tests/commands/lint.rs
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -794,14 +794,11 @@ fn fs_error_dereferenced_symlink() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_broken_symlink");
+ let root_path = temp_dir().join("lint_rome_test_broken_symlink");
let subdir_path = root_path.join("prefix");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir(&root_path).unwrap();
create_dir(subdir_path).unwrap();
#[cfg(target_family = "unix")]
diff --git a/crates/rome_cli/tests/commands/lint.rs b/crates/rome_cli/tests/commands/lint.rs
--- a/crates/rome_cli/tests/commands/lint.rs
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -837,39 +834,28 @@ fn fs_error_dereferenced_symlink() {
}
#[test]
-fn fs_error_infinite_symlink_exapansion() {
+fn fs_error_infinite_symlink_expansion_to_dirs() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_infinite_symlink_exapansion");
+ let root_path = temp_dir().join("lint_rome_test_infinite_symlink_expansion_to_dirs");
let subdir1_path = root_path.join("prefix");
let subdir2_path = root_path.join("foo").join("bar");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
- create_dir(subdir1_path.clone()).unwrap();
-
- create_dir_all(subdir2_path.clone()).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir_all(&subdir1_path).unwrap();
+ create_dir_all(&subdir2_path).unwrap();
#[cfg(target_family = "unix")]
{
- symlink(subdir1_path.clone(), root_path.join("self_symlink1")).unwrap();
- symlink(subdir1_path, subdir2_path.join("self_symlink2")).unwrap();
+ symlink(&subdir2_path, subdir1_path.join("symlink1")).unwrap();
+ symlink(subdir1_path, subdir2_path.join("symlink2")).unwrap();
}
#[cfg(target_os = "windows")]
{
- check_windows_symlink!(symlink_dir(
- subdir1_path.clone(),
- root_path.join("self_symlink1")
- ));
- check_windows_symlink!(symlink_dir(
- subdir1_path,
- subdir2_path.join("self_symlink2")
- ));
+ check_windows_symlink!(symlink_dir(&subdir2_path, &subdir1_path.join("symlink1")));
+ check_windows_symlink!(symlink_dir(subdir1_path, subdir2_path.join("symlink2")));
}
let result = run_cli(
diff --git a/crates/rome_cli/tests/commands/lint.rs b/crates/rome_cli/tests/commands/lint.rs
--- a/crates/rome_cli/tests/commands/lint.rs
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -884,13 +870,69 @@ fn fs_error_infinite_symlink_exapansion() {
assert_cli_snapshot(SnapshotPayload::new(
module_path!(),
- "fs_error_infinite_symlink_expansion",
+ "fs_error_infinite_symlink_expansion_to_dirs",
fs,
console,
result,
));
}
+#[test]
+fn fs_error_infinite_symlink_expansion_to_files() {
+ let mut console = BufferConsole::default();
+
+ let root_path = temp_dir().join("lint_rome_test_infinite_symlink_expansion_to_files");
+ let subdir1_path = root_path.join("prefix");
+ let subdir2_path = root_path.join("foo").join("bar");
+
+ let _ = remove_dir_all(&root_path);
+ create_dir_all(&subdir1_path).unwrap();
+ create_dir_all(&subdir2_path).unwrap();
+
+ let symlink1_path = subdir1_path.join("symlink1");
+ let symlink2_path = subdir2_path.join("symlink2");
+
+ #[cfg(target_family = "unix")]
+ {
+ symlink(&symlink2_path, &symlink1_path).unwrap();
+ symlink(&symlink1_path, &symlink2_path).unwrap();
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ check_windows_symlink!(symlink_dir(&symlink2_path, &symlink1_path));
+ check_windows_symlink!(symlink_dir(&symlink1_path, &symlink2_path));
+ }
+
+ let result = run_cli(
+ DynRef::Owned(Box::new(OsFileSystem)),
+ &mut console,
+ Args::from([("lint"), (root_path.display().to_string().as_str())].as_slice()),
+ );
+
+ remove_dir_all(root_path).unwrap();
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ // Don't use a snapshot here, since the diagnostics can be reported in
+ // arbitrary order:
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains("Deeply nested symlink expansion")));
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains(&symlink1_path.display().to_string())));
+ assert!(console
+ .out_buffer
+ .iter()
+ .flat_map(|msg| msg.content.0.iter())
+ .any(|node| node.content.contains(&symlink2_path.display().to_string())));
+}
+
#[test]
fn fs_error_read_only() {
let mut fs = MemoryFileSystem::new_read_only();
diff --git a/crates/rome_cli/tests/commands/lint.rs b/crates/rome_cli/tests/commands/lint.rs
--- a/crates/rome_cli/tests/commands/lint.rs
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -973,22 +1015,20 @@ fn fs_files_ignore_symlink() {
let fs = MemoryFileSystem::default();
let mut console = BufferConsole::default();
- let root_path = temp_dir().join("rome_test_files_ignore_symlink");
+ let root_path = temp_dir().join("lint_rome_test_files_ignore_symlink");
let src_path = root_path.join("src");
let testcase1_path = root_path.join("hidden_testcase1");
let testcase1_sub_path = testcase1_path.join("test");
+ let testcase1_sub_file_path = testcase1_sub_path.join("test.js");
let testcase2_path = root_path.join("hidden_testcase2");
let nested_path = root_path.join("hidden_nested");
let nested_sub_path = nested_path.join("test");
- #[allow(unused_must_use)]
- {
- remove_dir_all(root_path.display().to_string().as_str());
- }
- create_dir(root_path.display().to_string().as_str()).unwrap();
- create_dir(src_path.clone()).unwrap();
+ let _ = remove_dir_all(&root_path);
+ create_dir(&root_path).unwrap();
+ create_dir(&src_path).unwrap();
create_dir_all(testcase1_sub_path.clone()).unwrap();
create_dir(testcase2_path.clone()).unwrap();
create_dir_all(nested_sub_path.clone()).unwrap();
diff --git a/crates/rome_cli/tests/commands/lint.rs b/crates/rome_cli/tests/commands/lint.rs
--- a/crates/rome_cli/tests/commands/lint.rs
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -997,27 +1037,32 @@ fn fs_files_ignore_symlink() {
let symlink_testcase1_1_path = src_path.join("symlink_testcase1_1");
// hidden_nested/test/symlink_testcase1_2
let symlink_testcase1_2_path = nested_sub_path.join("symlink_testcase1_2");
+ // src/symlink_testcase1_3
+ let symlink_testcase1_3_path = src_path.join("symlink_testcase1_3");
// src/symlink_testcase2
let symlink_testcase2_path = src_path.join("symlink_testcase2");
#[cfg(target_family = "unix")]
{
- // src/test/symlink_testcase1_1 -> hidden_nested
+ // src/symlink_testcase1_1 -> hidden_nested
symlink(nested_path, symlink_testcase1_1_path).unwrap();
// hidden_nested/test/symlink_testcase1_2 -> hidden_testcase1
symlink(testcase1_path, symlink_testcase1_2_path).unwrap();
+ // src/symlink_testcase1_3 -> hidden_testcase1/test/test.js
+ symlink(testcase1_sub_file_path, symlink_testcase1_3_path).unwrap();
// src/symlink_testcase2 -> hidden_testcase2
- symlink(testcase2_path.clone(), symlink_testcase2_path).unwrap();
+ symlink(&testcase2_path, symlink_testcase2_path).unwrap();
}
#[cfg(target_os = "windows")]
{
- check_windows_symlink!(symlink_dir(nested_path.clone(), symlink_testcase1_1_path));
+ check_windows_symlink!(symlink_dir(nested_path, symlink_testcase1_1_path));
+ check_windows_symlink!(symlink_dir(testcase1_path, symlink_testcase1_2_path));
check_windows_symlink!(symlink_dir(
- testcase1_path.clone(),
- symlink_testcase1_2_path
+ testcase1_sub_file_path,
+ symlink_testcase1_3_path
));
- check_windows_symlink!(symlink_dir(testcase2_path.clone(), symlink_testcase2_path));
+ check_windows_symlink!(symlink_dir(&testcase2_path, symlink_testcase2_path));
}
let config_path = root_path.join("rome.json");
diff --git a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_dereferenced_symlink.snap b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_dereferenced_symlink.snap
--- a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_dereferenced_symlink.snap
+++ b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_dereferenced_symlink.snap
@@ -16,11 +16,11 @@ internalError/io ━━━━━━━━━━━━━━━━━━━━━
# Emitted Messages
```block
-<TEMP_DIR>/rome_test_broken_symlink/broken_symlink internalError/fs ━━━━━━━━━━━━━━━━━━━━
+<TEMP_DIR>/check_rome_test_broken_symlink/broken_symlink internalError/fs ━━━━━━━━━━━━━━━━━━━━
! Dereferenced symlink
- i Rome encountered a file system entry that is a broken symbolic link: <TEMP_DIR>/rome_test_broken_symlink/broken_symlink
+ i Rome encountered a file system entry that is a broken symbolic link: <TEMP_DIR>/check_rome_test_broken_symlink/broken_symlink
```
diff --git a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion.snap /dev/null
--- a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion.snap
+++ /dev/null
@@ -1,42 +0,0 @@
----
-source: crates/rome_cli/tests/snap_test.rs
-expression: content
----
-# Termination Message
-
-```block
-internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
-
- × No files were processed in the specified paths.
-
-
-
-```
-
-# Emitted Messages
-
-```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
-
- ! Infinite symlink expansion
-
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
-
-
-```
-
-```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
-
- ! Infinite symlink expansion
-
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
-
-
-```
-
-```block
-Checked 0 file(s) in <TIME>
-```
-
-
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion_to_dirs.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion_to_dirs.snap
@@ -0,0 +1,22 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_exapansion_unix.snap b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion_unix.snap
--- a/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_exapansion_unix.snap
+++ b/crates/rome_cli/tests/snapshots/main_commands_check/fs_error_infinite_symlink_expansion_unix.snap
@@ -11,21 +11,21 @@ no files were processed in the specified paths.
# Emitted Messages
```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+<TEMP_DIR>/rome_test_infinite_symlink_expansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
! Infinite symlink expansion
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
+ × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_expansion/prefix
```
```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+<TEMP_DIR>/rome_test_infinite_symlink_expansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
! Infinite symlink expansion
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
+ × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_expansion/prefix
```
diff --git a/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap
--- a/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap
@@ -16,11 +16,11 @@ internalError/io ━━━━━━━━━━━━━━━━━━━━━
# Emitted Messages
```block
-<TEMP_DIR>/rome_test_broken_symlink/broken_symlink internalError/fs ━━━━━━━━━━━━━━━━━━━━
+<TEMP_DIR>/lint_rome_test_broken_symlink/broken_symlink internalError/fs ━━━━━━━━━━━━━━━━━━━━
! Dereferenced symlink
- i Rome encountered a file system entry that is a broken symbolic link: <TEMP_DIR>/rome_test_broken_symlink/broken_symlink
+ i Rome encountered a file system entry that is a broken symbolic link: <TEMP_DIR>/lint_rome_test_broken_symlink/broken_symlink
```
diff --git a/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion.snap /dev/null
--- a/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion.snap
+++ /dev/null
@@ -1,42 +0,0 @@
----
-source: crates/rome_cli/tests/snap_test.rs
-expression: content
----
-# Termination Message
-
-```block
-internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
-
- × No files were processed in the specified paths.
-
-
-
-```
-
-# Emitted Messages
-
-```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
-
- ! Infinite symlink expansion
-
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
-
-
-```
-
-```block
-<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
-
- ! Infinite symlink expansion
-
- × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
-
-
-```
-
-```block
-Checked 0 file(s) in <TIME>
-```
-
-
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion_to_dirs.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion_to_dirs.snap
@@ -0,0 +1,22 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git a/crates/rome_js_analyze/Cargo.toml b/crates/rome_js_analyze/Cargo.toml
--- a/crates/rome_js_analyze/Cargo.toml
+++ b/crates/rome_js_analyze/Cargo.toml
@@ -31,12 +31,12 @@ serde_json = { workspace = true }
smallvec = { workspace = true }
[dev-dependencies]
-countme = { workspace = true, features = ["enable"] }
-insta = { workspace = true, features = ["glob"] }
-rome_js_parser = { workspace = true, features = ["tests"] }
-rome_text_edit = { workspace = true }
-tests_macros = { workspace = true }
-rome_test_utils = { workspace = true }
+countme = { workspace = true, features = ["enable"] }
+insta = { workspace = true, features = ["glob"] }
+rome_js_parser = { workspace = true, features = ["tests"] }
+rome_test_utils = { workspace = true }
+rome_text_edit = { workspace = true }
+tests_macros = { workspace = true }
[features]
schema = ["schemars", "rome_deserialize/schema"]
diff --git a/crates/rome_js_transform/Cargo.toml b/crates/rome_js_transform/Cargo.toml
--- a/crates/rome_js_transform/Cargo.toml
+++ b/crates/rome_js_transform/Cargo.toml
@@ -11,17 +11,17 @@ version = "0.1.0"
[dependencies]
lazy_static = { workspace = true }
rome_analyze = { workspace = true }
+rome_console = { workspace = true }
rome_diagnostics = { workspace = true }
rome_js_factory = { workspace = true }
rome_js_syntax = { workspace = true }
rome_rowan = { workspace = true }
-rome_console = {workspace= true}
[dev-dependencies]
+insta = { workspace = true }
+rome_analyze = { workspace = true }
rome_js_formatter = { workspace = true }
rome_js_parser = { workspace = true }
-rome_analyze = { workspace = true }
-rome_test_utils = { workspace = true }
-tests_macros= { workspace = true }
-insta = { workspace = true }
+rome_test_utils = { workspace = true }
+tests_macros = { workspace = true }
diff --git a/crates/rome_json_analyze/Cargo.toml b/crates/rome_json_analyze/Cargo.toml
--- a/crates/rome_json_analyze/Cargo.toml
+++ b/crates/rome_json_analyze/Cargo.toml
@@ -21,5 +21,5 @@ insta = { workspace = true, features = ["glob"] }
rome_json_factory = { workspace = true }
rome_json_parser = { workspace = true }
rome_service = { workspace = true }
+rome_test_utils = { workspace = true }
tests_macros = { workspace = true }
-rome_test_utils = { workspace = true }
diff --git a/crates/rome_test_utils/Cargo.toml b/crates/rome_test_utils/Cargo.toml
--- a/crates/rome_test_utils/Cargo.toml
+++ b/crates/rome_test_utils/Cargo.toml
@@ -1,25 +1,24 @@
[package]
-name = "rome_test_utils"
-version = "0.1.0"
edition = "2021"
+name = "rome_test_utils"
publish = false
+version = "0.1.0"
# See more keys and their definitions at https://doc.rust-lang.org/cargo/reference/manifest.html
[dependencies]
-rome_js_parser = {workspace = true }
-rome_json_parser = {workspace = true }
-rome_js_syntax = {workspace = true }
-rome_json_syntax = {workspace = true }
-rome_analyze = {workspace = true }
-rome_rowan = {workspace = true }
-rome_diagnostics = {workspace = true }
-rome_service = {workspace = true }
-rome_console = {workspace = true }
-rome_deserialize = {workspace = true }
-countme = { workspace = true, features = ["enable"] }
-similar = { version = "2.2.1" }
+countme = { workspace = true, features = ["enable"] }
json_comments = "0.2.1"
-serde = { workspace = true }
-serde_json = { workspace = true }
-
+rome_analyze = { workspace = true }
+rome_console = { workspace = true }
+rome_deserialize = { workspace = true }
+rome_diagnostics = { workspace = true }
+rome_js_parser = { workspace = true }
+rome_js_syntax = { workspace = true }
+rome_json_parser = { workspace = true }
+rome_json_syntax = { workspace = true }
+rome_rowan = { workspace = true }
+rome_service = { workspace = true }
+serde = { workspace = true }
+serde_json = { workspace = true }
+similar = { version = "2.2.1" }
| 🐛 Infinite symlink expansion error when there's no infinite symlink AFAICT
### Environment information
```block
CLI:
Version: 11.0.0
Color support: true
Platform:
CPU Architecture: x86_64
OS: linux
Environment:
ROME_LOG_DIR: unset
NO_COLOR: unset
TERM: "xterm-256color"
Rome Configuration:
Status: loaded
Formatter disabled: false
Linter disabled: false
Workspace:
Open Documents: 0
Discovering running Rome servers...
```
### What happened?
1. running `npx rome check .`
2. It complains about a bunch of file symlinks I have
For example:
```
../tools/node/npm-runner.js internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
⚠ Infinite symlink expansion
✖ Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: ../tools/node/npm-runner.js
```
Problems:
1. I have no idea what working directory it's in because I run rome from `.` and the npm-runner.js script is located at `workflow/tools/node/npm-runner.js`
2. I have no idea what the source of the symlink is that it's having problems with
3. npm-runner.js is not a symlink:
```
$ ls -l workflow/tools/node/npm-runner.js
-rwxr-xr-x 1 vlovich vlovich 7707 Jan 24 18:20 workflow/tools/node/npm-runner.js
```
Relevant information:
An example of a symlink looks like:
```
ls -l scripts/my-tool
lrwxrwxrwx 1 vlovich vlovich 36 Jan 12 20:22 scripts/my-tool -> ../workflow/tools/node/npm-runner.js
```
`workflow` is a submodule to another project. I get the similar errors when running rome from within the submodule (or even as a standalone checkout) so most of the errors about the symlinks that exist within workflow (the npm-runner.js script is reused within workflow and within projects that import it).
### Expected result
Simple symlinks to JS files shouldn't cause infinite symlink errors.
### Code of Conduct
- [X] I agree to follow Rome's Code of Conduct
| The symbolic link detection is optimized for speed and does not cache the entire resolution tree. For this reason, the displayed error message is not very detailed. The warning message is shown, when an already traversed file path is found more than once. The interner is a cache of all traversed file paths.
https://github.com/rome/tools/blob/99ee725674d3bbb7641a20f06c3e99c0788f14c5/crates/rome_fs/src/fs/os.rs#L240-L243
This warning message can only be caused by using symbolic links. Please verify, that no symbolic link points to a parent directory where the file is located.
There are no symbolic links to directories. All symbolic links in this setup always resolve to a specific file. So it may go through a parent directory (e.g. ln -s `../sibling-folder/file ./my-symlink`), but I don't see why symlinks to files would ever trigger an infinite loop check. I would think that you just either `readlink` and cap how many indirect links you can go through before a non symlink must be hit or use `realpath` and let the kernel let you know via `ELOOP` error and then only trigger infinite symlink detection when the symlink resolves to a directory.
My reading of the code is that any symlinks in multiple directories will trigger this because the infinite symlink expansion warning gets triggered if the same file appears in 2 separate directories (even `ln -s ./file1 ./file2` I think triggers). I believe this is tautologically true for any file symlink.
Yes, exactly. The provided example is too generic, but I assume there might be some symbolic link inside of a node_modules dependency.
I explicitly create a symlink in my project. That's tripping it up. What I'm suggesting is that if ln -s file1 file2 or ln -s dir1 dir2 triggers the warning, the warning isn't actually useful as it's not catching any issue. It needs to be a bit more careful (eg encountering a symlink directory while following a symlink directory).
This should be fixed now?
I think #4395 is a duplicate of this issue, but the issue is not fixed in Rome in 12.1.3 yet (which included the fix from #4166). | 2023-07-28T19:32:19 | 0.0 | 85645de58d9f2a6b692bba6d9f19456a201eb774 | [
"commands::check::fs_files_ignore_symlink"
] | [
"diagnostics::test::termination_diagnostic_size",
"metrics::tests::test_timing",
"metrics::tests::test_layer",
"commands::check::applies_organize_imports_from_cli",
"cases::config_extends::extends_should_raise_an_error_for_unresolved_configuration",
"cases::config_extends::extends_resolves_when_using_conf... | [
"commands::lint::fs_files_ignore_symlink"
] | [] |
rome/tools | 4,629 | rome__tools-4629 | [
"4452"
] | 08645a80c6139ab969aa1917f7e01dae3845d236 | diff --git /dev/null b/crates/rome_cli/src/commands/lint.rs
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/src/commands/lint.rs
@@ -0,0 +1,94 @@
+use crate::cli_options::CliOptions;
+use crate::configuration::{load_configuration, LoadedConfiguration};
+use crate::vcs::store_path_to_ignore_from_vcs;
+use crate::{execute_mode, CliDiagnostic, CliSession, Execution, TraversalMode};
+use rome_service::workspace::{FixFileMode, UpdateSettingsParams};
+use rome_service::{Configuration, MergeWith};
+use std::ffi::OsString;
+use std::path::PathBuf;
+
+pub(crate) struct LintCommandPayload {
+ pub(crate) apply: bool,
+ pub(crate) apply_unsafe: bool,
+ pub(crate) cli_options: CliOptions,
+ pub(crate) configuration: Option<Configuration>,
+ pub(crate) paths: Vec<OsString>,
+ pub(crate) stdin_file_path: Option<String>,
+}
+
+/// Handler for the "lint" command of the Rome CLI
+pub(crate) fn lint(
+ mut session: CliSession,
+ payload: LintCommandPayload,
+) -> Result<(), CliDiagnostic> {
+ let LintCommandPayload {
+ apply,
+ apply_unsafe,
+ cli_options,
+ configuration,
+ paths,
+ stdin_file_path,
+ } = payload;
+
+ let fix_file_mode = if apply && apply_unsafe {
+ return Err(CliDiagnostic::incompatible_arguments(
+ "--apply",
+ "--apply-unsafe",
+ ));
+ } else if !apply && !apply_unsafe {
+ None
+ } else if apply && !apply_unsafe {
+ Some(FixFileMode::SafeFixes)
+ } else {
+ Some(FixFileMode::SafeAndUnsafeFixes)
+ };
+
+ let LoadedConfiguration {
+ configuration: mut fs_configuration,
+ directory_path: configuration_path,
+ ..
+ } = load_configuration(&mut session, &cli_options)?
+ .or_diagnostic(session.app.console, cli_options.verbose)?;
+
+ fs_configuration.merge_with(configuration);
+
+ // check if support of git ignore files is enabled
+ let vcs_base_path = configuration_path.or(session.app.fs.working_directory());
+ store_path_to_ignore_from_vcs(
+ &mut session,
+ &mut fs_configuration,
+ vcs_base_path,
+ &cli_options,
+ )?;
+
+ let stdin = if let Some(stdin_file_path) = stdin_file_path {
+ let console = &mut session.app.console;
+ let input_code = console.read();
+ if let Some(input_code) = input_code {
+ let path = PathBuf::from(stdin_file_path);
+ Some((path, input_code))
+ } else {
+ // we provided the argument without a piped stdin, we bail
+ return Err(CliDiagnostic::missing_argument("stdin", "lint"));
+ }
+ } else {
+ None
+ };
+
+ session
+ .app
+ .workspace
+ .update_settings(UpdateSettingsParams {
+ configuration: fs_configuration,
+ })?;
+
+ execute_mode(
+ Execution::new(TraversalMode::Lint {
+ fix_file_mode,
+ stdin,
+ }),
+ session,
+ &cli_options,
+ paths,
+ )
+}
diff --git a/crates/rome_cli/src/commands/mod.rs b/crates/rome_cli/src/commands/mod.rs
--- a/crates/rome_cli/src/commands/mod.rs
+++ b/crates/rome_cli/src/commands/mod.rs
@@ -14,6 +14,7 @@ pub(crate) mod ci;
pub(crate) mod daemon;
pub(crate) mod format;
pub(crate) mod init;
+pub(crate) mod lint;
pub(crate) mod migrate;
pub(crate) mod rage;
pub(crate) mod version;
diff --git a/crates/rome_cli/src/commands/mod.rs b/crates/rome_cli/src/commands/mod.rs
--- a/crates/rome_cli/src/commands/mod.rs
+++ b/crates/rome_cli/src/commands/mod.rs
@@ -75,6 +76,26 @@ pub enum RomeCommand {
#[bpaf(positional("PATH"), many)]
paths: Vec<OsString>,
},
+ /// Run various checks on a set of files.
+ #[bpaf(command)]
+ Lint {
+ /// Apply safe fixes, formatting
+ #[bpaf(long("apply"), switch)]
+ apply: bool,
+ /// Apply safe fixes and unsafe fixes, formatting and import sorting
+ #[bpaf(long("apply-unsafe"), switch)]
+ apply_unsafe: bool,
+ #[bpaf(external, hide_usage, optional)]
+ configuration: Option<Configuration>,
+ #[bpaf(external, hide_usage)]
+ cli_options: CliOptions,
+ /// A file name with its extension to pass when reading from standard in, e.g. echo 'let a;' | rome lint --stdin-file-path=file.js"
+ #[bpaf(long("stdin-file-path"), argument("PATH"), hide_usage)]
+ stdin_file_path: Option<String>,
+ /// Single file, single path or list of paths
+ #[bpaf(positional("PATH"), many)]
+ paths: Vec<OsString>,
+ },
/// Run the formatter on a set of files.
#[bpaf(command)]
Format {
diff --git a/crates/rome_cli/src/commands/mod.rs b/crates/rome_cli/src/commands/mod.rs
--- a/crates/rome_cli/src/commands/mod.rs
+++ b/crates/rome_cli/src/commands/mod.rs
@@ -160,6 +181,7 @@ impl RomeCommand {
RomeCommand::Start => None,
RomeCommand::Stop => None,
RomeCommand::Check { cli_options, .. } => cli_options.colors.as_ref(),
+ RomeCommand::Lint { cli_options, .. } => cli_options.colors.as_ref(),
RomeCommand::Ci { cli_options, .. } => cli_options.colors.as_ref(),
RomeCommand::Format { cli_options, .. } => cli_options.colors.as_ref(),
RomeCommand::Init => None,
diff --git a/crates/rome_cli/src/commands/mod.rs b/crates/rome_cli/src/commands/mod.rs
--- a/crates/rome_cli/src/commands/mod.rs
+++ b/crates/rome_cli/src/commands/mod.rs
@@ -177,6 +199,7 @@ impl RomeCommand {
RomeCommand::Start => false,
RomeCommand::Stop => false,
RomeCommand::Check { cli_options, .. } => cli_options.use_server,
+ RomeCommand::Lint { cli_options, .. } => cli_options.use_server,
RomeCommand::Ci { cli_options, .. } => cli_options.use_server,
RomeCommand::Format { cli_options, .. } => cli_options.use_server,
RomeCommand::Init => false,
diff --git a/crates/rome_cli/src/commands/mod.rs b/crates/rome_cli/src/commands/mod.rs
--- a/crates/rome_cli/src/commands/mod.rs
+++ b/crates/rome_cli/src/commands/mod.rs
@@ -198,6 +221,7 @@ impl RomeCommand {
RomeCommand::Start => false,
RomeCommand::Stop => false,
RomeCommand::Check { cli_options, .. } => cli_options.verbose,
+ RomeCommand::Lint { cli_options, .. } => cli_options.verbose,
RomeCommand::Format { cli_options, .. } => cli_options.verbose,
RomeCommand::Ci { cli_options, .. } => cli_options.verbose,
RomeCommand::Init => false,
diff --git /dev/null b/crates/rome_cli/src/execute/lint_file.rs
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/src/execute/lint_file.rs
@@ -0,0 +1,85 @@
+use crate::execute::diagnostics::{ResultExt, ResultIoExt};
+use crate::execute::process_file::{FileResult, FileStatus, Message};
+use crate::execute::traverse::TraversalOptions;
+use crate::CliDiagnostic;
+use rome_diagnostics::{category, Error};
+use rome_fs::{OpenOptions, RomePath};
+use rome_service::file_handlers::Language;
+use rome_service::workspace::{FileGuard, OpenFileParams, RuleCategories};
+use std::path::Path;
+use std::sync::atomic::Ordering;
+
+pub(crate) struct LintFile<'ctx, 'app> {
+ pub(crate) ctx: &'app TraversalOptions<'ctx, 'app>,
+ pub(crate) path: &'app Path,
+}
+
+/// Lints a single file and returns a [FileResult]
+pub(crate) fn lint_file(payload: LintFile) -> FileResult {
+ let LintFile { ctx, path } = payload;
+ let rome_path = RomePath::new(path);
+ let mut errors = 0;
+ let open_options = OpenOptions::default()
+ .read(true)
+ .write(ctx.execution.requires_write_access());
+ let mut file = ctx
+ .fs
+ .open_with_options(path, open_options)
+ .with_file_path(path.display().to_string())?;
+
+ let mut input = String::new();
+ file.read_to_string(&mut input)
+ .with_file_path(path.display().to_string())?;
+
+ let file_guard = FileGuard::open(
+ ctx.workspace,
+ OpenFileParams {
+ path: rome_path,
+ version: 0,
+ content: input.clone(),
+ language_hint: Language::default(),
+ },
+ )
+ .with_file_path_and_code(path.display().to_string(), category!("internalError/fs"))?;
+ if let Some(fix_mode) = ctx.execution.as_fix_file_mode() {
+ let fixed = file_guard
+ .fix_file(*fix_mode, false)
+ .with_file_path_and_code(path.display().to_string(), category!("lint"))?;
+
+ ctx.push_message(Message::SkippedFixes {
+ skipped_suggested_fixes: fixed.skipped_suggested_fixes,
+ });
+
+ if fixed.code != input {
+ file.set_content(fixed.code.as_bytes())
+ .with_file_path(path.display().to_string())?;
+ file_guard.change_file(file.file_version(), fixed.code)?;
+ }
+ errors = fixed.errors;
+ }
+
+ let max_diagnostics = ctx.remaining_diagnostics.load(Ordering::Relaxed);
+ let result = file_guard
+ .pull_diagnostics(RuleCategories::LINT, max_diagnostics.into())
+ .with_file_path_and_code(path.display().to_string(), category!("lint"))?;
+
+ let no_diagnostics = result.diagnostics.is_empty() && result.skipped_diagnostics == 0;
+ let result = if no_diagnostics || ctx.execution.is_format() {
+ FileStatus::Success
+ } else {
+ FileStatus::Message(Message::Diagnostics {
+ name: path.display().to_string(),
+ content: input.clone(),
+ diagnostics: result.diagnostics.into_iter().map(Error::from).collect(),
+ skipped_diagnostics: result.skipped_diagnostics,
+ })
+ };
+ ctx.increment_processed();
+ if errors > 0 {
+ return Ok(FileStatus::Message(Message::ApplyError(
+ CliDiagnostic::file_apply_error(path.display().to_string()),
+ )));
+ } else {
+ Ok(result)
+ }
+}
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -1,4 +1,5 @@
mod diagnostics;
+mod lint_file;
mod migrate;
mod process_file;
mod std_in;
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -11,6 +12,7 @@ use rome_diagnostics::MAXIMUM_DISPLAYABLE_DIAGNOSTICS;
use rome_fs::RomePath;
use rome_service::workspace::{FeatureName, FixFileMode};
use std::ffi::OsString;
+use std::fmt::{Display, Formatter};
use std::path::PathBuf;
/// Useful information during the traversal of files and virtual content
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -48,6 +50,18 @@ pub(crate) enum TraversalMode {
/// 2. The content of the file
stdin: Option<(PathBuf, String)>,
},
+ /// This mode is enabled when running the command `rome lint`
+ Lint {
+ /// The type of fixes that should be applied when analyzing a file.
+ ///
+ /// It's [None] if the `check` command is called without `--apply` or `--apply-suggested`
+ /// arguments.
+ fix_file_mode: Option<FixFileMode>,
+ /// An optional tuple.
+ /// 1. The virtual path to the file
+ /// 2. The content of the file
+ stdin: Option<(PathBuf, String)>,
+ },
/// This mode is enabled when running the command `rome ci`
CI,
/// This mode is enabled when running the command `rome format`
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -68,6 +82,18 @@ pub(crate) enum TraversalMode {
},
}
+impl Display for TraversalMode {
+ fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
+ match self {
+ TraversalMode::Check { .. } => write!(f, "check"),
+ TraversalMode::CI { .. } => write!(f, "ci"),
+ TraversalMode::Format { .. } => write!(f, "format"),
+ TraversalMode::Migrate { .. } => write!(f, "migrate"),
+ TraversalMode::Lint { .. } => write!(f, "lint"),
+ }
+ }
+}
+
/// Tells to the execution of the traversal how the information should be reported
#[derive(Copy, Clone, Default)]
pub(crate) enum ReportMode {
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -111,10 +137,12 @@ impl Execution {
/// `true` only when running the traversal in [TraversalMode::Check] and `should_fix` is `true`
pub(crate) fn as_fix_file_mode(&self) -> Option<&FixFileMode> {
- if let TraversalMode::Check { fix_file_mode, .. } = &self.traversal_mode {
- fix_file_mode.as_ref()
- } else {
- None
+ match &self.traversal_mode {
+ TraversalMode::Check { fix_file_mode, .. }
+ | TraversalMode::Lint { fix_file_mode, .. } => fix_file_mode.as_ref(),
+ TraversalMode::Format { .. } | TraversalMode::CI | TraversalMode::Migrate { .. } => {
+ None
+ }
}
}
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -126,6 +154,10 @@ impl Execution {
matches!(self.traversal_mode, TraversalMode::Check { .. })
}
+ pub(crate) const fn is_lint(&self) -> bool {
+ matches!(self.traversal_mode, TraversalMode::Lint { .. })
+ }
+
pub(crate) const fn is_check_apply(&self) -> bool {
matches!(
self.traversal_mode,
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -153,7 +185,8 @@ impl Execution {
/// Whether the traversal mode requires write access to files
pub(crate) const fn requires_write_access(&self) -> bool {
match self.traversal_mode {
- TraversalMode::Check { fix_file_mode, .. } => fix_file_mode.is_some(),
+ TraversalMode::Check { fix_file_mode, .. }
+ | TraversalMode::Lint { fix_file_mode, .. } => fix_file_mode.is_some(),
TraversalMode::CI => false,
TraversalMode::Format { write, .. } => write,
TraversalMode::Migrate { write: dry_run, .. } => dry_run,
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -162,19 +195,10 @@ impl Execution {
pub(crate) fn as_stdin_file(&self) -> Option<&(PathBuf, String)> {
match &self.traversal_mode {
- TraversalMode::Format { stdin, .. } => stdin.as_ref(),
- TraversalMode::Check { stdin, .. } => stdin.as_ref(),
- _ => None,
- }
- }
-
- /// Returns the subcommand of the [traversal mode](TraversalMode) execution
- pub(crate) fn traversal_mode_subcommand(&self) -> &'static str {
- match self.traversal_mode {
- TraversalMode::Check { .. } => "check",
- TraversalMode::CI { .. } => "ci",
- TraversalMode::Format { .. } => "format",
- TraversalMode::Migrate { .. } => "migrate",
+ TraversalMode::Format { stdin, .. }
+ | TraversalMode::Lint { stdin, .. }
+ | TraversalMode::Check { stdin, .. } => stdin.as_ref(),
+ TraversalMode::CI { .. } | TraversalMode::Migrate { .. } => None,
}
}
}
diff --git a/crates/rome_cli/src/execute/mod.rs b/crates/rome_cli/src/execute/mod.rs
--- a/crates/rome_cli/src/execute/mod.rs
+++ b/crates/rome_cli/src/execute/mod.rs
@@ -197,10 +221,10 @@ pub(crate) fn execute_mode(
max_diagnostics
} else {
- // The command `rome check` gives a default value of 20.
+ // The commands `rome check` and `rome lint` give a default value of 20.
// In case of other commands that pass here, we limit to 50 to avoid to delay the terminal.
match &mode.traversal_mode {
- TraversalMode::Check { .. } => 20,
+ TraversalMode::Check { .. } | TraversalMode::Lint { .. } => 20,
TraversalMode::CI | TraversalMode::Format { .. } | TraversalMode::Migrate { .. } => 50,
}
};
diff --git a/crates/rome_cli/src/execute/process_file.rs b/crates/rome_cli/src/execute/process_file.rs
--- a/crates/rome_cli/src/execute/process_file.rs
+++ b/crates/rome_cli/src/execute/process_file.rs
@@ -1,4 +1,5 @@
use crate::execute::diagnostics::{ResultExt, ResultIoExt, SkippedDiagnostic, UnhandledDiagnostic};
+use crate::execute::lint_file::{lint_file, LintFile};
use crate::execute::traverse::TraversalOptions;
use crate::execute::TraversalMode;
use crate::{CliDiagnostic, FormatterReportFileDetail};
diff --git a/crates/rome_cli/src/execute/process_file.rs b/crates/rome_cli/src/execute/process_file.rs
--- a/crates/rome_cli/src/execute/process_file.rs
+++ b/crates/rome_cli/src/execute/process_file.rs
@@ -156,6 +157,7 @@ pub(crate) fn process_file(ctx: &TraversalOptions, path: &Path) -> FileResult {
}),
),
TraversalMode::Format { .. } => file_features.support_kind_for(&FeatureName::Format),
+ TraversalMode::Lint { .. } => file_features.support_kind_for(&FeatureName::Lint),
TraversalMode::Migrate { .. } => None,
};
diff --git a/crates/rome_cli/src/execute/process_file.rs b/crates/rome_cli/src/execute/process_file.rs
--- a/crates/rome_cli/src/execute/process_file.rs
+++ b/crates/rome_cli/src/execute/process_file.rs
@@ -173,6 +175,20 @@ pub(crate) fn process_file(ctx: &TraversalOptions, path: &Path) -> FileResult {
};
}
+ // NOTE: this is a work in progress that will be refactored over time
+ //
+ // With time, we will create a separate file for each traversal mode. Reason to do so
+ // is to keep the business logics of each traversal separate. Doing so would allow us to
+ // lower the changes to break the business logic of other traversal.
+ //
+ // This would definitely repeat the code, but it's worth the effort in the long run.
+ if let TraversalMode::Lint { .. } = ctx.execution.traversal_mode {
+ // the unsupported case should be handled already at this point
+ if file_features.supports_for(&FeatureName::Lint) {
+ return lint_file(LintFile { ctx, path });
+ }
+ }
+
let open_options = OpenOptions::default()
.read(true)
.write(ctx.execution.requires_write_access());
diff --git a/crates/rome_cli/src/execute/process_file.rs b/crates/rome_cli/src/execute/process_file.rs
--- a/crates/rome_cli/src/execute/process_file.rs
+++ b/crates/rome_cli/src/execute/process_file.rs
@@ -201,7 +217,7 @@ pub(crate) fn process_file(ctx: &TraversalOptions, path: &Path) -> FileResult {
if let Some(fix_mode) = ctx.execution.as_fix_file_mode() {
let fixed = file_guard
- .fix_file(*fix_mode)
+ .fix_file(*fix_mode, file_features.supports_for(&FeatureName::Format))
.with_file_path_and_code(path.display().to_string(), category!("lint"))?;
ctx.push_message(Message::SkippedFixes {
diff --git a/crates/rome_cli/src/execute/process_file.rs b/crates/rome_cli/src/execute/process_file.rs
--- a/crates/rome_cli/src/execute/process_file.rs
+++ b/crates/rome_cli/src/execute/process_file.rs
@@ -330,7 +346,9 @@ pub(crate) fn process_file(ctx: &TraversalOptions, path: &Path) -> FileResult {
let should_write = match ctx.execution.traversal_mode() {
// In check mode do not run the formatter and return the result immediately,
// but only if the argument `--apply` is not passed.
- TraversalMode::Check { .. } => ctx.execution.as_fix_file_mode().is_some(),
+ TraversalMode::Check { .. } | TraversalMode::Lint { .. } => {
+ ctx.execution.as_fix_file_mode().is_some()
+ }
TraversalMode::CI { .. } => false,
TraversalMode::Format { write, .. } => *write,
TraversalMode::Migrate { write: dry_run, .. } => *dry_run,
diff --git a/crates/rome_cli/src/execute/std_in.rs b/crates/rome_cli/src/execute/std_in.rs
--- a/crates/rome_cli/src/execute/std_in.rs
+++ b/crates/rome_cli/src/execute/std_in.rs
@@ -51,7 +51,7 @@ pub(crate) fn run<'a>(
<Warn>"The content was not formatted because the formatter is currently disabled."</Warn>
})
}
- } else if mode.is_check() {
+ } else if mode.is_check() || mode.is_lint() {
let mut diagnostics = Vec::new();
let mut new_content = Cow::Borrowed(content);
diff --git a/crates/rome_cli/src/execute/std_in.rs b/crates/rome_cli/src/execute/std_in.rs
--- a/crates/rome_cli/src/execute/std_in.rs
+++ b/crates/rome_cli/src/execute/std_in.rs
@@ -75,6 +75,8 @@ pub(crate) fn run<'a>(
let fix_file_result = workspace.fix_file(FixFileParams {
fix_file_mode: *fix_file_mode,
path: rome_path.clone(),
+ should_format: mode.is_check()
+ && file_features.supports_for(&FeatureName::Format),
})?;
if fix_file_result.code != new_content {
version += 1;
diff --git a/crates/rome_cli/src/execute/std_in.rs b/crates/rome_cli/src/execute/std_in.rs
--- a/crates/rome_cli/src/execute/std_in.rs
+++ b/crates/rome_cli/src/execute/std_in.rs
@@ -87,7 +89,7 @@ pub(crate) fn run<'a>(
}
}
- if file_features.supports_for(&FeatureName::OrganizeImports) {
+ if file_features.supports_for(&FeatureName::OrganizeImports) && mode.is_check() {
let result = workspace.organize_imports(OrganizeImportsParams {
path: rome_path.clone(),
})?;
diff --git a/crates/rome_cli/src/execute/std_in.rs b/crates/rome_cli/src/execute/std_in.rs
--- a/crates/rome_cli/src/execute/std_in.rs
+++ b/crates/rome_cli/src/execute/std_in.rs
@@ -115,7 +117,7 @@ pub(crate) fn run<'a>(
diagnostics.extend(result.diagnostics);
}
- if file_features.supports_for(&FeatureName::Format) {
+ if file_features.supports_for(&FeatureName::Format) && mode.is_check() {
let printed = workspace.format_file(FormatFileParams {
path: rome_path.clone(),
})?;
diff --git a/crates/rome_cli/src/execute/traverse.rs b/crates/rome_cli/src/execute/traverse.rs
--- a/crates/rome_cli/src/execute/traverse.rs
+++ b/crates/rome_cli/src/execute/traverse.rs
@@ -66,7 +66,7 @@ pub(crate) fn traverse(
if inputs.is_empty() && execution.as_stdin_file().is_none() {
return Err(CliDiagnostic::missing_argument(
"<INPUT>",
- execution.traversal_mode_subcommand(),
+ format!("{}", execution.traversal_mode),
));
}
diff --git a/crates/rome_cli/src/execute/traverse.rs b/crates/rome_cli/src/execute/traverse.rs
--- a/crates/rome_cli/src/execute/traverse.rs
+++ b/crates/rome_cli/src/execute/traverse.rs
@@ -130,7 +130,7 @@ pub(crate) fn traverse(
if execution.should_report_to_terminal() {
match execution.traversal_mode() {
- TraversalMode::Check { .. } => {
+ TraversalMode::Check { .. } | TraversalMode::Lint { .. } => {
if execution.as_fix_file_mode().is_some() {
console.log(markup! {
<Info>"Fixed "{count}" file(s) in "{duration}</Info>
diff --git a/crates/rome_cli/src/execute/traverse.rs b/crates/rome_cli/src/execute/traverse.rs
--- a/crates/rome_cli/src/execute/traverse.rs
+++ b/crates/rome_cli/src/execute/traverse.rs
@@ -676,6 +676,7 @@ impl<'ctx, 'app> TraversalContext for TraversalOptions<'ctx, 'app> {
|| file_features.supports_for(&FeatureName::OrganizeImports)
}
TraversalMode::Format { .. } => file_features.supports_for(&FeatureName::Format),
+ TraversalMode::Lint { .. } => file_features.supports_for(&FeatureName::Lint),
// Imagine if Rome can't handle its own configuration file...
TraversalMode::Migrate { .. } => true,
}
diff --git a/crates/rome_cli/src/lib.rs b/crates/rome_cli/src/lib.rs
--- a/crates/rome_cli/src/lib.rs
+++ b/crates/rome_cli/src/lib.rs
@@ -27,6 +27,7 @@ use crate::cli_options::ColorsArg;
use crate::commands::check::CheckCommandPayload;
use crate::commands::ci::CiCommandPayload;
use crate::commands::format::FormatCommandPayload;
+use crate::commands::lint::LintCommandPayload;
pub use crate::commands::{parse_command, RomeCommand};
pub use diagnostics::CliDiagnostic;
pub(crate) use execute::{execute_mode, Execution, TraversalMode};
diff --git a/crates/rome_cli/src/lib.rs b/crates/rome_cli/src/lib.rs
--- a/crates/rome_cli/src/lib.rs
+++ b/crates/rome_cli/src/lib.rs
@@ -98,6 +99,24 @@ impl<'app> CliSession<'app> {
formatter_enabled,
},
),
+ RomeCommand::Lint {
+ apply,
+ apply_unsafe,
+ cli_options,
+ configuration: rome_configuration,
+ paths,
+ stdin_file_path,
+ } => commands::lint::lint(
+ self,
+ LintCommandPayload {
+ apply_unsafe,
+ apply,
+ cli_options,
+ configuration: rome_configuration,
+ paths,
+ stdin_file_path,
+ },
+ ),
RomeCommand::Ci {
linter_enabled,
formatter_enabled,
diff --git a/crates/rome_lsp/src/handlers/analysis.rs b/crates/rome_lsp/src/handlers/analysis.rs
--- a/crates/rome_lsp/src/handlers/analysis.rs
+++ b/crates/rome_lsp/src/handlers/analysis.rs
@@ -162,9 +162,17 @@ fn fix_all(
line_index: &LineIndex,
diagnostics: &[lsp::Diagnostic],
) -> Result<Option<CodeActionOrCommand>, WorkspaceError> {
+ let should_format = session
+ .workspace
+ .file_features(SupportsFeatureParams {
+ path: rome_path.clone(),
+ feature: vec![FeatureName::Format],
+ })?
+ .supports_for(&FeatureName::Format);
let fixed = session.workspace.fix_file(FixFileParams {
path: rome_path,
fix_file_mode: FixFileMode::SafeFixes,
+ should_format,
})?;
if fixed.actions.is_empty() {
diff --git a/crates/rome_service/src/workspace.rs b/crates/rome_service/src/workspace.rs
--- a/crates/rome_service/src/workspace.rs
+++ b/crates/rome_service/src/workspace.rs
@@ -373,6 +373,7 @@ pub enum FixFileMode {
pub struct FixFileParams {
pub path: RomePath,
pub fix_file_mode: FixFileMode,
+ pub should_format: bool,
}
#[derive(Debug, serde::Serialize, serde::Deserialize)]
diff --git a/crates/rome_service/src/workspace.rs b/crates/rome_service/src/workspace.rs
--- a/crates/rome_service/src/workspace.rs
+++ b/crates/rome_service/src/workspace.rs
@@ -667,10 +668,15 @@ impl<'app, W: Workspace + ?Sized> FileGuard<'app, W> {
})
}
- pub fn fix_file(&self, fix_file_mode: FixFileMode) -> Result<FixFileResult, WorkspaceError> {
+ pub fn fix_file(
+ &self,
+ fix_file_mode: FixFileMode,
+ should_format: bool,
+ ) -> Result<FixFileResult, WorkspaceError> {
self.workspace.fix_file(FixFileParams {
path: self.path.clone(),
fix_file_mode,
+ should_format,
})
}
diff --git a/crates/rome_service/src/workspace/server.rs b/crates/rome_service/src/workspace/server.rs
--- a/crates/rome_service/src/workspace/server.rs
+++ b/crates/rome_service/src/workspace/server.rs
@@ -516,13 +516,12 @@ impl Workspace for WorkspaceServer {
let parse = self.get_parse(params.path.clone(), Some(FeatureName::Lint))?;
let rules = settings.linter().rules.as_ref();
- let should_format = settings.formatter().enabled;
fix_all(FixAllParams {
parse,
rules,
fix_file_mode: params.fix_file_mode,
settings: self.settings(),
- should_format,
+ should_format: params.should_format,
rome_path: ¶ms.path,
})
}
diff --git a/npm/backend-jsonrpc/src/workspace.ts b/npm/backend-jsonrpc/src/workspace.ts
--- a/npm/backend-jsonrpc/src/workspace.ts
+++ b/npm/backend-jsonrpc/src/workspace.ts
@@ -1393,6 +1393,7 @@ export interface FormatOnTypeParams {
export interface FixFileParams {
fix_file_mode: FixFileMode;
path: RomePath;
+ should_format: boolean;
}
/**
* Which fixes should be applied during the analyzing phase
| diff --git /dev/null b/crates/rome_cli/tests/commands/lint.rs
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/commands/lint.rs
@@ -0,0 +1,2215 @@
+use bpaf::Args;
+use std::env::temp_dir;
+use std::fs::{create_dir, create_dir_all, remove_dir_all, File};
+use std::io::Write;
+#[cfg(target_family = "unix")]
+use std::os::unix::fs::symlink;
+#[cfg(target_os = "windows")]
+use std::os::windows::fs::{symlink_dir, symlink_file};
+use std::path::{Path, PathBuf};
+
+use crate::configs::{
+ CONFIG_FILE_SIZE_LIMIT, CONFIG_IGNORE_SYMLINK, CONFIG_LINTER_AND_FILES_IGNORE,
+ CONFIG_LINTER_DISABLED, CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC, CONFIG_LINTER_IGNORED_FILES,
+ CONFIG_LINTER_SUPPRESSED_GROUP, CONFIG_LINTER_SUPPRESSED_RULE,
+ CONFIG_LINTER_UPGRADE_DIAGNOSTIC, CONFIG_RECOMMENDED_GROUP,
+};
+use crate::snap_test::{markup_to_string, SnapshotPayload};
+use crate::{assert_cli_snapshot, run_cli, FORMATTED, LINT_ERROR, PARSE_ERROR};
+use rome_console::{markup, BufferConsole, LogLevel, MarkupBuf};
+use rome_fs::{ErrorEntry, FileSystemExt, MemoryFileSystem, OsFileSystem};
+use rome_service::DynRef;
+
+const ERRORS: &str = r#"
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+"#;
+
+const NO_DEBUGGER: &str = "debugger;";
+const NEW_SYMBOL: &str = "new Symbol(\"\");";
+
+const FIX_BEFORE: &str = "(1 >= -0)";
+const FIX_AFTER: &str = "(1 >= 0)";
+
+const APPLY_SUGGESTED_BEFORE: &str = "let a = 4;
+debugger;
+console.log(a);
+";
+
+const APPLY_SUGGESTED_AFTER: &str = "const a = 4;\nconsole.log(a);\n";
+
+const NO_DEBUGGER_BEFORE: &str = "debugger;\n";
+const NO_DEBUGGER_AFTER: &str = "debugger;\n";
+
+const UPGRADE_SEVERITY_CODE: &str = r#"if(!cond) { exprA(); } else { exprB() }"#;
+
+const NURSERY_UNSTABLE: &str = r#"if(a = b) {}"#;
+
+#[test]
+fn check_help() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), "--help"]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "check_help",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ok() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), FORMATTED.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+}
+
+#[test]
+fn ok_read_only() {
+ let mut fs = MemoryFileSystem::new_read_only();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), FORMATTED.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+}
+
+#[test]
+fn parse_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), PARSE_ERROR.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "parse_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn lint_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), LINT_ERROR.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "lint_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn maximum_diagnostics() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), ERRORS.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let messages = &console.out_buffer;
+
+ assert_eq!(
+ messages
+ .iter()
+ .filter(|m| m.level == LogLevel::Error)
+ .count(),
+ 20_usize
+ );
+
+ assert!(messages
+ .iter()
+ .filter(|m| m.level == LogLevel::Log)
+ .any(|m| {
+ let content = format!("{:?}", m.content);
+ content.contains("The number of diagnostics exceeds the number allowed by Rome")
+ && content.contains("Diagnostics not shown")
+ && content.contains("76")
+ }));
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "maximum_diagnostics",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_ok() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_AFTER);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_ok",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_noop() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_AFTER.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_noop",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_suggested_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply-unsafe"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_suggested_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_suggested() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), APPLY_SUGGESTED_BEFORE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply-unsafe"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, APPLY_SUGGESTED_AFTER);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_suggested",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_unsafe_with_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ // last line doesn't have code fix
+ let source = "let a = 4;
+debugger;
+console.log(a);
+function f() { arguments; }
+";
+
+ let expected = "const a = 4;
+console.log(a);
+function f() { arguments; }
+";
+
+ let test1 = Path::new("test1.js");
+ fs.insert(test1.into(), source.as_bytes());
+
+ let test2 = Path::new("test2.js");
+ fs.insert(test2.into(), source.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply-unsafe"),
+ test1.as_os_str().to_str().unwrap(),
+ test2.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut file = fs
+ .open(test1)
+ .expect("formatting target file was removed by the CLI");
+
+ let mut content = String::new();
+ file.read_to_string(&mut content)
+ .expect("failed to read file from memory FS");
+
+ assert_eq!(content, expected);
+ drop(file);
+
+ content.clear();
+
+ let mut file = fs
+ .open(test2)
+ .expect("formatting target file was removed by the CLI");
+
+ file.read_to_string(&mut content)
+ .expect("failed to read file from memory FS");
+
+ drop(file);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_unsafe_with_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn no_lint_if_linter_is_disabled_when_run_apply() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_BEFORE);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "no_lint_if_linter_is_disabled_when_run_apply",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn no_lint_if_linter_is_disabled() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), CONFIG_LINTER_DISABLED.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_BEFORE);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "no_lint_if_linter_is_disabled",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_disable_a_rule() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), NO_DEBUGGER_BEFORE.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), CONFIG_LINTER_SUPPRESSED_RULE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, NO_DEBUGGER_AFTER);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_disable_a_rule",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_disable_a_rule_group() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(
+ config_path.into(),
+ CONFIG_LINTER_SUPPRESSED_GROUP.as_bytes(),
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, "(1 >= -0)");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_disable_a_rule_group",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn downgrade_severity() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+ let file_path = Path::new("rome.json");
+ fs.insert(
+ file_path.into(),
+ CONFIG_LINTER_DOWNGRADE_DIAGNOSTIC.as_bytes(),
+ );
+
+ let file_path = Path::new("file.js");
+ fs.insert(file_path.into(), NO_DEBUGGER.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ println!("{console:?}");
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let messages = &console.out_buffer;
+
+ assert_eq!(
+ messages
+ .iter()
+ .filter(|m| m.level == LogLevel::Error)
+ .filter(|m| {
+ let content = format!("{:#?}", m.content);
+ content.contains("suspicious/noDebugger")
+ })
+ .count(),
+ 1
+ );
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "downgrade_severity",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn upgrade_severity() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+ let file_path = Path::new("rome.json");
+ fs.insert(
+ file_path.into(),
+ CONFIG_LINTER_UPGRADE_DIAGNOSTIC.as_bytes(),
+ );
+
+ let file_path = Path::new("file.js");
+ fs.insert(file_path.into(), UPGRADE_SEVERITY_CODE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let messages = &console.out_buffer;
+
+ let error_count = messages
+ .iter()
+ .filter(|m| m.level == LogLevel::Error)
+ .filter(|m| {
+ let content = format!("{:?}", m.content);
+ content.contains("style/noNegationElse")
+ })
+ .count();
+
+ assert_eq!(
+ error_count, 1,
+ "expected 1 error-level message in console buffer, found {error_count:?}:\n{:?}",
+ console.out_buffer
+ );
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "upgrade_severity",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn no_lint_when_file_is_ignored() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("rome.json");
+ fs.insert(file_path.into(), CONFIG_LINTER_IGNORED_FILES.as_bytes());
+
+ let file_path = Path::new("test.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_BEFORE);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "no_lint_when_file_is_ignored",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn no_lint_if_files_are_listed_in_ignore_option() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("rome.json");
+ fs.insert(file_path.into(), CONFIG_LINTER_AND_FILES_IGNORE.as_bytes());
+
+ let file_path_test1 = Path::new("test1.js");
+ fs.insert(file_path_test1.into(), FIX_BEFORE.as_bytes());
+
+ let file_path_test2 = Path::new("test2.js");
+ fs.insert(file_path_test2.into(), FIX_BEFORE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path_test1.as_os_str().to_str().unwrap(),
+ file_path_test2.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path_test1)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_BEFORE);
+
+ let mut buffer = String::new();
+ fs.open(file_path_test2)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_eq!(buffer, FIX_BEFORE);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "no_lint_if_files_are_listed_in_ignore_option",
+ fs,
+ console,
+ result,
+ ));
+}
+
+/// Creating a symbolic link will fail on Windows if the current process is
+/// unprivileged. Since running tests as administrator is uncommon and
+/// constraining, this error gets silently ignored if we're not running on CI
+/// (the workflows are being being run with the correct permissions on CI)
+#[cfg(target_os = "windows")]
+macro_rules! check_windows_symlink {
+ ($result:expr) => {
+ match $result {
+ Ok(res) => res,
+ Err(err) if option_env!("CI") == Some("1") => panic!("failed to create symlink: {err}"),
+ Err(_) => return,
+ }
+ };
+}
+
+#[test]
+fn fs_error_dereferenced_symlink() {
+ let fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let root_path = temp_dir().join("rome_test_broken_symlink");
+ let subdir_path = root_path.join("prefix");
+
+ #[allow(unused_must_use)]
+ {
+ remove_dir_all(root_path.display().to_string().as_str());
+ }
+ create_dir(root_path.display().to_string().as_str()).unwrap();
+ create_dir(subdir_path).unwrap();
+
+ #[cfg(target_family = "unix")]
+ {
+ symlink(root_path.join("null"), root_path.join("broken_symlink")).unwrap();
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ check_windows_symlink!(symlink_file(
+ root_path.join("null"),
+ root_path.join("broken_symlink")
+ ));
+ }
+
+ let result = run_cli(
+ DynRef::Owned(Box::new(OsFileSystem)),
+ &mut console,
+ Args::from(&[("lint"), root_path.display().to_string().as_str()]),
+ );
+
+ remove_dir_all(root_path).unwrap();
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "fs_error_dereferenced_symlink",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn fs_error_infinite_symlink_exapansion() {
+ let fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let root_path = temp_dir().join("rome_test_infinite_symlink_exapansion");
+ let subdir1_path = root_path.join("prefix");
+ let subdir2_path = root_path.join("foo").join("bar");
+
+ #[allow(unused_must_use)]
+ {
+ remove_dir_all(root_path.display().to_string().as_str());
+ }
+ create_dir(root_path.display().to_string().as_str()).unwrap();
+ create_dir(subdir1_path.clone()).unwrap();
+
+ create_dir_all(subdir2_path.clone()).unwrap();
+
+ #[cfg(target_family = "unix")]
+ {
+ symlink(subdir1_path.clone(), root_path.join("self_symlink1")).unwrap();
+ symlink(subdir1_path, subdir2_path.join("self_symlink2")).unwrap();
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ check_windows_symlink!(symlink_dir(
+ subdir1_path.clone(),
+ root_path.join("self_symlink1")
+ ));
+ check_windows_symlink!(symlink_dir(
+ subdir1_path,
+ subdir2_path.join("self_symlink2")
+ ));
+ }
+
+ let result = run_cli(
+ DynRef::Owned(Box::new(OsFileSystem)),
+ &mut console,
+ Args::from(&[("lint"), (root_path.display().to_string().as_str())]),
+ );
+
+ remove_dir_all(root_path).unwrap();
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "fs_error_infinite_symlink_expansion",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn fs_error_read_only() {
+ let mut fs = MemoryFileSystem::new_read_only();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("test.js");
+ fs.insert(file_path.into(), *b"content");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--apply"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ // Do not store the content of the file in the snapshot
+ fs.remove(file_path);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "fs_error_read_only",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn fs_error_unknown() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ fs.insert_error(PathBuf::from("prefix/ci.js"), ErrorEntry::UnknownFileType);
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), ("prefix")]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "fs_error_unknown",
+ fs,
+ console,
+ result,
+ ));
+}
+
+// Symbolic link ignore pattern test
+//
+// Verifies, that ignore patterns to symbolic links are allowed.
+//
+// ├── rome.json
+// ├── hidden_nested
+// │ └── test
+// │ └── symlink_testcase1_2 -> hidden_testcase1
+// ├── hidden_testcase1
+// │ └── test
+// │ └── test.js // ok
+// ├── hidden_testcase2
+// │ ├── test1.ts // ignored
+// │ ├── test2.ts // ignored
+// │ └── test.js // ok
+// └── src
+// ├── symlink_testcase1_1 -> hidden_nested
+// └── symlink_testcase2 -> hidden_testcase2
+#[test]
+fn fs_files_ignore_symlink() {
+ let fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let root_path = temp_dir().join("rome_test_files_ignore_symlink");
+ let src_path = root_path.join("src");
+
+ let testcase1_path = root_path.join("hidden_testcase1");
+ let testcase1_sub_path = testcase1_path.join("test");
+ let testcase2_path = root_path.join("hidden_testcase2");
+
+ let nested_path = root_path.join("hidden_nested");
+ let nested_sub_path = nested_path.join("test");
+
+ #[allow(unused_must_use)]
+ {
+ remove_dir_all(root_path.display().to_string().as_str());
+ }
+ create_dir(root_path.display().to_string().as_str()).unwrap();
+ create_dir(src_path.clone()).unwrap();
+ create_dir_all(testcase1_sub_path.clone()).unwrap();
+ create_dir(testcase2_path.clone()).unwrap();
+ create_dir_all(nested_sub_path.clone()).unwrap();
+
+ // src/symlink_testcase1_1
+ let symlink_testcase1_1_path = src_path.join("symlink_testcase1_1");
+ // hidden_nested/test/symlink_testcase1_2
+ let symlink_testcase1_2_path = nested_sub_path.join("symlink_testcase1_2");
+ // src/symlink_testcase2
+ let symlink_testcase2_path = src_path.join("symlink_testcase2");
+
+ #[cfg(target_family = "unix")]
+ {
+ // src/test/symlink_testcase1_1 -> hidden_nested
+ symlink(nested_path, symlink_testcase1_1_path).unwrap();
+ // hidden_nested/test/symlink_testcase1_2 -> hidden_testcase1
+ symlink(testcase1_path, symlink_testcase1_2_path).unwrap();
+ // src/symlink_testcase2 -> hidden_testcase2
+ symlink(testcase2_path.clone(), symlink_testcase2_path).unwrap();
+ }
+
+ #[cfg(target_os = "windows")]
+ {
+ check_windows_symlink!(symlink_dir(nested_path.clone(), symlink_testcase1_1_path));
+ check_windows_symlink!(symlink_dir(
+ testcase1_path.clone(),
+ symlink_testcase1_2_path
+ ));
+ check_windows_symlink!(symlink_dir(testcase2_path.clone(), symlink_testcase2_path));
+ }
+
+ let config_path = root_path.join("rome.json");
+ let mut config_file = File::create(config_path).unwrap();
+ config_file
+ .write_all(CONFIG_IGNORE_SYMLINK.as_bytes())
+ .unwrap();
+
+ let files: [PathBuf; 4] = [
+ testcase1_sub_path.join("test.js"), // ok
+ testcase2_path.join("test.js"), // ok
+ testcase2_path.join("test1.ts"), // ignored
+ testcase2_path.join("test2.ts"), // ignored
+ ];
+
+ for file_path in files {
+ let mut file = File::create(file_path).unwrap();
+ file.write_all(APPLY_SUGGESTED_BEFORE.as_bytes()).unwrap();
+ }
+
+ let result = run_cli(
+ DynRef::Owned(Box::new(OsFileSystem)),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--config-path"),
+ (root_path.display().to_string().as_str()),
+ ("--apply-unsafe"),
+ (src_path.display().to_string().as_str()),
+ ]),
+ );
+
+ remove_dir_all(root_path).unwrap();
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "fs_files_ignore_symlink",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn file_too_large() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), "statement();\n".repeat(80660).as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ // Do not store the content of the file in the snapshot
+ fs.remove(file_path);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "file_too_large",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn file_too_large_config_limit() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ fs.insert(PathBuf::from("rome.json"), CONFIG_FILE_SIZE_LIMIT);
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), "statement1();\nstatement2();");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "file_too_large_config_limit",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn file_too_large_cli_limit() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), "statement1();\nstatement2();");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--files-max-size=16"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "file_too_large_cli_limit",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn files_max_size_parse_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), "statement1();\nstatement2();");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--files-max-size=-1"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "files_max_size_parse_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn max_diagnostics_default() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ // Creates 40 diagnostics.
+ for i in 0..40 {
+ let file_path = PathBuf::from(format!("src/file_{i}.js"));
+ fs.insert(file_path, LINT_ERROR.as_bytes());
+ }
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), ("src")]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ let mut diagnostic_count = 0;
+ let mut filtered_messages = Vec::new();
+
+ for msg in console.out_buffer {
+ let MarkupBuf(nodes) = &msg.content;
+ let is_diagnostic = nodes.iter().any(|node| {
+ node.content.contains("useWhile")
+ || node.content.contains("useBlockStatements")
+ || node.content.contains("noConstantCondition")
+ });
+
+ if is_diagnostic {
+ diagnostic_count += 1;
+ } else {
+ filtered_messages.push(msg);
+ }
+ }
+
+ console.out_buffer = filtered_messages;
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "max_diagnostics_default",
+ fs,
+ console,
+ result,
+ ));
+ assert_eq!(diagnostic_count, 20);
+}
+
+#[test]
+fn max_diagnostics() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ for i in 0..20 {
+ let file_path = PathBuf::from(format!("src/file_{i}.js"));
+ fs.insert(file_path, LINT_ERROR.as_bytes());
+ }
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--max-diagnostics"),
+ ("10"),
+ Path::new("src").as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+ let mut diagnostic_count = 0;
+ let mut filtered_messages = Vec::new();
+
+ for msg in console.out_buffer {
+ let MarkupBuf(nodes) = &msg.content;
+ let is_diagnostic = nodes.iter().any(|node| {
+ node.content.contains("useWhile")
+ || node.content.contains("useBlockStatements")
+ || node.content.contains("noConstantCondition")
+ });
+
+ if is_diagnostic {
+ diagnostic_count += 1;
+ } else {
+ filtered_messages.push(msg);
+ }
+ }
+
+ console.out_buffer = filtered_messages;
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "max_diagnostics",
+ fs,
+ console,
+ result,
+ ));
+
+ assert_eq!(diagnostic_count, 10);
+}
+
+#[test]
+fn no_supported_file_found() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), "."]),
+ );
+
+ eprintln!("{:?}", console.out_buffer);
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "no_supported_file_found",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn deprecated_suppression_comment() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("file.js");
+ fs.insert(
+ file_path.into(),
+ *b"// rome-ignore lint(suspicious/noDoubleEquals): test
+a == b;",
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "deprecated_suppression_comment",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn print_verbose() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), LINT_ERROR.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--verbose"),
+ file_path.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "print_verbose",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn unsupported_file() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.txt");
+ fs.insert(file_path.into(), LINT_ERROR.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "unsupported_file",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn suppression_syntax_error() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), *b"// rome-ignore(:\n");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "suppression_syntax_error",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn config_recommended_group() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("rome.json");
+ fs.insert(file_path.into(), CONFIG_RECOMMENDED_GROUP.as_bytes());
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), NEW_SYMBOL.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+ assert!(result.is_err(), "run_cli returned {result:?}");
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "config_recommended_group",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn nursery_unstable() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("check.js");
+ fs.insert(file_path.into(), NURSERY_UNSTABLE.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "nursery_unstable",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn all_rules() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "linter": {
+ "rules": { "all": true }
+ }
+ }"#;
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), FIX_BEFORE.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "all_rules",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn top_level_all_down_level_not_all() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "linter": {
+ "rules": {
+ "all": true,
+ "style": {
+ "all": false
+ }
+ }
+ }
+ }"#;
+
+ // style/noArguments
+ // style/noShoutyConstants
+ // style/useSingleVarDeclarator
+ let code = r#"
+ function f() {arguments;}
+ const FOO = "FOO";
+ var x, y;
+ "#;
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), code.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "top_level_all_down_level_not_all",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn top_level_not_all_down_level_all() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "linter": {
+ "rules": {
+ "all": false,
+ "style": {
+ "all": true
+ }
+ }
+ }
+ }"#;
+
+ // style/noArguments
+ // style/noShoutyConstants
+ // style/useSingleVarDeclarator
+ let code = r#"
+ function f() {arguments;}
+ const FOO = "FOO";
+ var x, y;
+ "#;
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), code.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "top_level_not_all_down_level_all",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ignore_configured_globals() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "javascript": {
+ "globals": ["foo", "bar"]
+ }
+ }"#;
+
+ // style/useSingleVarDeclarator
+ let code = r#"foo.call(); bar.call();"#;
+
+ let file_path = Path::new("fix.js");
+ fs.insert(file_path.into(), code.as_bytes());
+
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "ignore_configured_globals",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ignore_vcs_ignored_file() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ }
+ }"#;
+
+ let git_ignore = r#"
+file2.js
+"#;
+
+ let code2 = r#"foo.call(); bar.call();"#;
+ let code1 = r#"array.map(sentence => sentence.split(' ')).flat();"#;
+
+ // ignored files
+ let file_path1 = Path::new("file1.js");
+ fs.insert(file_path1.into(), code1.as_bytes());
+ let file_path2 = Path::new("file2.js");
+ fs.insert(file_path2.into(), code2.as_bytes());
+
+ // configuration
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ // git folder
+ let git_folder = Path::new(".git");
+ fs.insert(git_folder.into(), "".as_bytes());
+
+ // git ignore file
+ let ignore_file = Path::new(".gitignore");
+ fs.insert(ignore_file.into(), git_ignore.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ file_path1.as_os_str().to_str().unwrap(),
+ file_path2.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "ignore_vcs_ignored_file",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ignore_vcs_os_independent_parse() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let rome_json = r#"{
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ }
+ }"#;
+
+ let git_ignore = "something.js\nfile2.js\r\nfile3.js";
+
+ let code3 = r#"console.log('rome is cool');"#;
+ let code2 = r#"foo.call(); bar.call();"#;
+ let code1 = r#"blah.call();"#;
+
+ let file_path1 = Path::new("file1.js");
+ fs.insert(file_path1.into(), code1.as_bytes());
+
+ // ignored files
+ let file_path2 = Path::new("file2.js");
+ fs.insert(file_path2.into(), code2.as_bytes());
+ let file_path3 = Path::new("file3.js");
+ fs.insert(file_path3.into(), code3.as_bytes());
+
+ // configuration
+ let config_path = Path::new("rome.json");
+ fs.insert(config_path.into(), rome_json.as_bytes());
+
+ // git folder
+ let git_folder = Path::new(".git");
+ fs.insert(git_folder.into(), "".as_bytes());
+
+ // git ignore file
+ let ignore_file = Path::new(".gitignore");
+ fs.insert(ignore_file.into(), git_ignore.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ file_path1.as_os_str().to_str().unwrap(),
+ file_path2.as_os_str().to_str().unwrap(),
+ file_path3.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "ignore_vcs_os_independent_parse",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ignore_vcs_ignored_file_via_cli() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let git_ignore = r#"
+file2.js
+"#;
+
+ let code2 = r#"foo.call(); bar.call();"#;
+ let code1 = r#"array.map(sentence => sentence.split(' ')).flat();"#;
+
+ // ignored files
+ let file_path1 = Path::new("file1.js");
+ fs.insert(file_path1.into(), code1.as_bytes());
+ let file_path2 = Path::new("file2.js");
+ fs.insert(file_path2.into(), code2.as_bytes());
+
+ // git folder
+ let git_folder = Path::new("./.git");
+ fs.insert(git_folder.into(), "".as_bytes());
+
+ // git ignore file
+ let ignore_file = Path::new("./.gitignore");
+ fs.insert(ignore_file.into(), git_ignore.as_bytes());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ ("--vcs-enabled=true"),
+ ("--vcs-client-kind=git"),
+ ("--vcs-use-ignore-file=true"),
+ ("--vcs-root=."),
+ file_path1.as_os_str().to_str().unwrap(),
+ file_path2.as_os_str().to_str().unwrap(),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "ignore_vcs_ignored_file_via_cli",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn check_stdin_apply_successfully() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ console
+ .in_buffer
+ .push("function f() {return{}} class Foo { constructor() {} }".to_string());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), "--apply", ("--stdin-file-path"), ("mock.js")]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let message = console
+ .out_buffer
+ .get(0)
+ .expect("Console should have written a message");
+
+ let content = markup_to_string(markup! {
+ {message.content}
+ });
+
+ assert_eq!(content, "function f() {return{}} class Foo { }");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "check_stdin_apply_successfully",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn check_stdin_apply_unsafe_successfully() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ console
+ .in_buffer
+ .push("function f() {return{}} class Foo { constructor() {} }".to_string());
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ "--apply-unsafe",
+ ("--stdin-file-path"),
+ ("mock.js"),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let message = console
+ .out_buffer
+ .get(0)
+ .expect("Console should have written a message");
+
+ let content = markup_to_string(markup! {
+ {message.content}
+ });
+
+ assert_eq!(content, "function f() {return{}} class Foo { }");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "check_stdin_apply_unsafe_successfully",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_apply_correct_file_source() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("file.ts");
+ fs.insert(
+ file_path.into(),
+ "type A = { a: string }; type B = Partial<A>".as_bytes(),
+ );
+
+ let config_path = Path::new("rome.json");
+ fs.insert(
+ config_path.into(),
+ r#"{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "correctness": {
+ "noUndeclaredVariables": "error"
+ }
+ }
+ }
+ }"#
+ .as_bytes(),
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ let mut buffer = String::new();
+ fs.open(file_path)
+ .unwrap()
+ .read_to_string(&mut buffer)
+ .unwrap();
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_apply_correct_file_source",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_unsafe_no_assign_in_expression() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(
+ file_path.into(),
+ "res.onAborted(() => (aborted = true));".as_bytes(),
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ file_path.as_os_str().to_str().unwrap(),
+ ("--apply-unsafe"),
+ ]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_unsafe_no_assign_in_expression",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_not_enable_all_recommended_rules() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let configuration = r#" {
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": false,
+ "a11y": {},
+ "complexity": {},
+ "correctness": {},
+ "performance": {},
+ "security": {},
+ "style": {},
+ "suspicious": {}
+ }
+ }
+ }"#;
+
+ let configuration_path = Path::new("rome.json");
+ fs.insert(configuration_path.into(), configuration.as_bytes());
+
+ let file_path = Path::new("fix.js");
+ fs.insert(
+ file_path.into(),
+ r#"
+ LOOP: for (const x of xs) {
+ if (x > 0) {
+ break;
+ }
+ f(x);
+ }
+ "#,
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_not_enable_all_recommended_rules",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_not_disable_recommended_rules_for_a_group() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let configuration = r#" {
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true,
+ "complexity": {
+ "noUselessSwitchCase": "off"
+ }
+ }
+ }
+}"#;
+
+ let configuration_path = Path::new("rome.json");
+ fs.insert(configuration_path.into(), configuration.as_bytes());
+
+ let file_path = Path::new("fix.js");
+ fs.insert(
+ file_path.into(),
+ r#"const array = ["split", "the text", "into words"];
+// next line should error because of the recommended rule
+array.map((sentence) => sentence.split(" ")).flat();
+ "#,
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_not_disable_recommended_rules_for_a_group",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn should_not_enable_nursery_rules() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let configuration = r#" {
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true,
+ "nursery": {
+ "noAccumulatingSpread": "error"
+ }
+ }
+ }
+}"#;
+
+ let configuration_path = Path::new("rome.json");
+ fs.insert(configuration_path.into(), configuration.as_bytes());
+
+ let file_path = Path::new("fix.ts");
+ fs.insert(
+ file_path.into(),
+ r#"const bannedType: Boolean = true;
+
+if (true) {
+ const obj = {};
+ obj["useLiteralKey"];
+}
+ "#,
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "should_not_enable_nursery_rules",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn apply_bogus_argument() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path = Path::new("fix.js");
+ fs.insert(
+ file_path.into(),
+ "function _13_1_3_fun(arguments) { }".as_bytes(),
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ file_path.as_os_str().to_str().unwrap(),
+ ("--apply-unsafe"),
+ ]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "apply_bogus_argument",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn ignores_unknown_file() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path1 = Path::new("test.txt");
+ fs.insert(file_path1.into(), *b"content");
+
+ let file_path2 = Path::new("test.js");
+ fs.insert(file_path2.into(), *b"console.log('bar');\n");
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[
+ ("lint"),
+ file_path1.as_os_str().to_str().unwrap(),
+ file_path2.as_os_str().to_str().unwrap(),
+ "--files-ignore-unknown=true",
+ ]),
+ );
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "ignores_unknown_file",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn check_json_files() {
+ let mut fs = MemoryFileSystem::default();
+ let mut console = BufferConsole::default();
+
+ let file_path1 = Path::new("test.json");
+ fs.insert(
+ file_path1.into(),
+ r#"{ "foo": true, "foo": true }"#.as_bytes(),
+ );
+
+ let configuration = Path::new("rome.json");
+ fs.insert(
+ configuration.into(),
+ r#"{
+ "linter": {
+ "rules": {
+ "nursery": {
+ "noDuplicateJsonKeys": "error"
+ }
+ }
+ }
+ }"#
+ .as_bytes(),
+ );
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), file_path1.as_os_str().to_str().unwrap()]),
+ );
+
+ assert!(result.is_err(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "check_json_files",
+ fs,
+ console,
+ result,
+ ));
+}
+
+#[test]
+fn doesnt_error_if_no_files_were_processed() {
+ let mut console = BufferConsole::default();
+ let mut fs = MemoryFileSystem::default();
+
+ let result = run_cli(
+ DynRef::Borrowed(&mut fs),
+ &mut console,
+ Args::from(&[("lint"), "--no-errors-on-unmatched", ("file.js")]),
+ );
+
+ assert!(result.is_ok(), "run_cli returned {result:?}");
+
+ assert_cli_snapshot(SnapshotPayload::new(
+ module_path!(),
+ "doesnt_error_if_no_files_were_processed",
+ fs,
+ console,
+ result,
+ ));
+}
diff --git a/crates/rome_cli/tests/commands/mod.rs b/crates/rome_cli/tests/commands/mod.rs
--- a/crates/rome_cli/tests/commands/mod.rs
+++ b/crates/rome_cli/tests/commands/mod.rs
@@ -2,6 +2,7 @@ mod check;
mod ci;
mod format;
mod init;
+mod lint;
mod lsp_proxy;
mod migrate;
mod rage;
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/all_rules.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/all_rules.snap
@@ -0,0 +1,53 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": { "all": true }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+(1 >= -0)
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+fix.js:1:2 lint/suspicious/noCompareNegZero FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Do not use the >= operator to compare against -0.
+
+ > 1 │ (1 >= -0)
+ │ ^^^^^^^
+
+ i Safe fix: Replace -0 with 0
+
+ 1 │ (1·>=·-0)
+ │ -
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_bogus_argument.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_bogus_argument.snap
@@ -0,0 +1,39 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+function _13_1_3_fun(arguments) { }
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+fix.js:1:22 parse ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Illegal use of `arguments` as an identifier in strict mode
+
+ > 1 │ function _13_1_3_fun(arguments) { }
+ │ ^^^^^^^^^
+
+
+```
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_noop.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_noop.snap
@@ -0,0 +1,17 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+(1 >= 0)
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_ok.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_ok.snap
@@ -0,0 +1,17 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+(1 >= 0)
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_suggested.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_suggested.snap
@@ -0,0 +1,19 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+const a = 4;
+console.log(a);
+
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_suggested_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_suggested_error.snap
@@ -0,0 +1,25 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+let a = 4;
+debugger;
+console.log(a);
+
+```
+
+# Termination Message
+
+```block
+flags/invalid ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Incompatible arguments --apply and --apply-unsafe
+
+
+
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_unsafe_no_assign_in_expression.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_unsafe_no_assign_in_expression.snap
@@ -0,0 +1,17 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `fix.js`
+
+```js
+res.onAborted(() => (aborted === true));
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_unsafe_with_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/apply_unsafe_with_error.snap
@@ -0,0 +1,56 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `test1.js`
+
+```js
+const a = 4;
+console.log(a);
+function f() { arguments; }
+
+```
+
+## `test2.js`
+
+```js
+const a = 4;
+console.log(a);
+function f() { arguments; }
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+test1.js internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Fixes applied to the file, but there are still diagnostics to address.
+
+
+```
+
+```block
+test2.js internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Fixes applied to the file, but there are still diagnostics to address.
+
+
+```
+
+```block
+Fixed 2 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/check_help.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/check_help.snap
@@ -0,0 +1,71 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Emitted Messages
+
+```block
+Run various checks on a set of files.
+
+Usage: [--apply] [--apply-unsafe] [<PATH>]...
+
+Available positional items:
+ <PATH> Single file, single path or list of paths
+
+Available options:
+ --apply Apply safe fixes, formatting
+ --apply-unsafe Apply safe fixes and unsafe fixes, formatting and import sorting
+ The configuration that is contained inside the file `rome.json`
+ Set of properties to integrate Rome with a VCS software.
+ --vcs-client-kind <git> The kind of client.
+ --vcs-enabled <true|false> Whether Rome should integrate itself with the VCS client
+ --vcs-use-ignore-file <true|false> Whether Rome should use the VCS ignore file. When [true],
+ Rome will ignore the files specified in the ignore file.
+ --vcs-root <PATH> The folder where Rome should check for VCS files. By default, Rome will
+ use the same folder where `rome.json` was found. If Rome can't find the
+ configuration, it will attempt to use the current working directory. If no
+ current working directory can't be found, Rome won't use the VCS integration,
+ and a diagnostic will be emitted
+
+ The configuration of the filesystem
+ --files-max-size <NUMBER> The maximum allowed size for source code files in bytes. Files
+ above this limit will be ignored for performance reason. Defaults to 1 MiB
+ --files-ignore-unknown <true|false> Tells Rome to not emit diagnostics when handling files
+ that doesn't know
+
+ Options applied to the formatter
+ --indent-style <tab|space> The indent style.
+ --indent-size <NUMBER> The size of the indentation, 2 by default
+ --line-width <NUMBER> What's the max width of a line. Defaults to 80.
+
+ --quote-style <double|single> The style for quotes. Defaults to double.
+ --jsx-quote-style <double|single> The style for JSX quotes. Defaults to double.
+ --quote-properties <preserve|as-needed> When properties in objects are quoted. Defaults to
+ asNeeded.
+ --trailing-comma <all|es5|none> Print trailing commas wherever possible in multi-line
+ comma-separated syntactic structures. Defaults to "all".
+ --semicolons <always|as-needed> Whether the formatter prints semicolons for all statements
+ or only in for statements where it is necessary because of ASI.
+
+ Global options applied to all commands
+ --colors <off|force> Set the formatting mode for markup: "off" prints everything as plain
+ text, "force" forces the formatting of markup using ANSI even if the console
+ output is determined to be incompatible
+ --use-server Connect to a running instance of the Rome daemon server.
+ --verbose Print additional verbose advices on diagnostics
+ --config-path <PATH> Set the filesystem path to the directory of the rome.json configuration
+ file
+ --max-diagnostics <NUMBER> Cap the amount of diagnostics displayed (default: 20)
+ --skip-errors Skip over files containing syntax errors instead of emitting an error
+ diagnostic.
+ --no-errors-on-unmatched Silence errors that would be emitted in case no files were
+ processed during the execution of the command.
+ --json Reports information using the JSON format
+
+ --stdin-file-path <PATH> A file name with its extension to pass when reading from standard
+ in, e.g. echo 'let a;' | rome lint --stdin-file-path=file.js"
+ -h, --help Prints help information
+
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/check_json_files.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/check_json_files.snap
@@ -0,0 +1,60 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "nursery": {
+ "noDuplicateJsonKeys": "error"
+ }
+ }
+ }
+}
+```
+
+## `test.json`
+
+```json
+{ "foo": true, "foo": true }
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+test.json:1:3 lint/nursery/noDuplicateJsonKeys ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × The key foo was already declared.
+
+ > 1 │ { "foo": true, "foo": true }
+ │ ^^^^^
+
+ i This where a duplicated key was declared again.
+
+ > 1 │ { "foo": true, "foo": true }
+ │ ^^^^^
+
+ i If a key is defined multiple times, only the last definition takes effect. Previous definitions are ignored.
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/check_stdin_apply_successfully.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/check_stdin_apply_successfully.snap
@@ -0,0 +1,17 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Input messages
+
+```block
+function f() {return{}} class Foo { constructor() {} }
+```
+
+# Emitted Messages
+
+```block
+function f() {return{}} class Foo { }
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/check_stdin_apply_unsafe_successfully.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/check_stdin_apply_unsafe_successfully.snap
@@ -0,0 +1,17 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Input messages
+
+```block
+function f() {return{}} class Foo { constructor() {} }
+```
+
+# Emitted Messages
+
+```block
+function f() {return{}} class Foo { }
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/config_recommended_group.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/config_recommended_group.snap
@@ -0,0 +1,58 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": false,
+ "correctness": {
+ "recommended": true
+ }
+ }
+ }
+}
+```
+
+## `check.js`
+
+```js
+new Symbol("");
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:1:1 lint/correctness/noNewSymbol FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Symbol cannot be called as a constructor.
+
+ > 1 │ new Symbol("");
+ │ ^^^^^^^^^^^^^^
+
+ i Suggested fix: Remove new.
+
+ 1 │ new·Symbol("");
+ │ ----
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/deprecated_suppression_comment.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/deprecated_suppression_comment.snap
@@ -0,0 +1,36 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `file.js`
+
+```js
+// rome-ignore lint(suspicious/noDoubleEquals): test
+a == b;
+```
+
+# Emitted Messages
+
+```block
+file.js:1:1 suppressions/deprecatedSyntax FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Suppression is using a deprecated syntax
+
+ > 1 │ // rome-ignore lint(suspicious/noDoubleEquals): test
+ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ 2 │ a == b;
+
+ i Safe fix: Rewrite suppression to use the newer syntax
+
+ 1 │ - //·rome-ignore·lint(suspicious/noDoubleEquals):·test
+ 1 │ + //·rome-ignore·lint/suspicious/noDoubleEquals:·test
+ 2 2 │ a == b;
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/doesnt_error_if_no_files_were_processed.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/doesnt_error_if_no_files_were_processed.snap
@@ -0,0 +1,11 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/downgrade_severity.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/downgrade_severity.snap
@@ -0,0 +1,47 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "suspicious": {
+ "noDebugger": "warn"
+ }
+ }
+ }
+}
+```
+
+## `file.js`
+
+```js
+debugger;
+```
+
+# Emitted Messages
+
+```block
+file.js:1:1 lint/suspicious/noDebugger FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! This is an unexpected use of the debugger statement.
+
+ > 1 │ debugger;
+ │ ^^^^^^^^^
+
+ i Suggested fix: Remove debugger statement
+
+ 1 │ debugger;
+ │ ---------
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large.snap
@@ -0,0 +1,34 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js lint ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Size of check.js is 1.0 MiB which exceeds configured maximum of 1.0 MiB for this project. The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+```block
+Skipped 1 file(s)
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large_cli_limit.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large_cli_limit.snap
@@ -0,0 +1,41 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+statement1();
+statement2();
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js lint ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Size of check.js is 27 B which exceeds configured maximum of 16 B for this project. The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+```block
+Skipped 1 file(s)
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large_config_limit.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/file_too_large_config_limit.snap
@@ -0,0 +1,51 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "files": {
+ "maxSize": 16
+ }
+}
+```
+
+## `check.js`
+
+```js
+statement1();
+statement2();
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js lint ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Size of check.js is 27 B which exceeds configured maximum of 16 B for this project. The file size limit exists to prevent us inadvertently slowing down and loading large files that we shouldn't.
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+```block
+Skipped 1 file(s)
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/files_max_size_parse_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/files_max_size_parse_error.snap
@@ -0,0 +1,26 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+statement1();
+statement2();
+```
+
+# Termination Message
+
+```block
+flags/invalid ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Failed to parse CLI arguments.
+
+ Caused by:
+ Couldn't parse "-1": invalid digit found in string
+
+
+
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_dereferenced_symlink.snap
@@ -0,0 +1,32 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+<TEMP_DIR>/rome_test_broken_symlink/broken_symlink internalError/fs ━━━━━━━━━━━━━━━━━━━━
+
+ ! Dereferenced symlink
+
+ i Rome encountered a file system entry that is a broken symbolic link: <TEMP_DIR>/rome_test_broken_symlink/broken_symlink
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_infinite_symlink_expansion.snap
@@ -0,0 +1,42 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
+
+ ! Infinite symlink expansion
+
+ × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
+
+
+```
+
+```block
+<TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix internalError/fs ━━━━━━━━━━━━━━━━━━━━
+
+ ! Infinite symlink expansion
+
+ × Rome encountered a file system entry that leads to an infinite symbolic link expansion, causing an infinite cycle: <TEMP_DIR>/rome_test_infinite_symlink_exapansion/prefix
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_read_only.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_read_only.snap
@@ -0,0 +1,36 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+test.js internalError/io INTERNAL ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × cannot acquire write access to file in read-only filesystem
+
+ ! This diagnostic was derived from an internal Rome error. Potential bug, please report it if necessary.
+
+
+```
+
+```block
+Fixed 0 file(s) in <TIME>
+```
+
+```block
+Skipped 1 file(s)
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_unknown.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_error_unknown.snap
@@ -0,0 +1,32 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+prefix/ci.js internalError/fs ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Unknown file type
+
+ i Rome encountered a file system entry that's neither a file, directory or symbolic link
+
+
+```
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_files_ignore_symlink.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/fs_files_ignore_symlink.snap
@@ -0,0 +1,11 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Emitted Messages
+
+```block
+Fixed 2 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_configured_globals.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_configured_globals.snap
@@ -0,0 +1,27 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "javascript": {
+ "globals": ["foo", "bar"]
+ }
+}
+```
+
+## `fix.js`
+
+```js
+foo.call(); bar.call();
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_ignored_file.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_ignored_file.snap
@@ -0,0 +1,76 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ }
+}
+```
+
+## `.git`
+
+```git
+
+```
+
+## `.gitignore`
+
+```gitignore
+
+file2.js
+
+```
+
+## `file1.js`
+
+```js
+array.map(sentence => sentence.split(' ')).flat();
+```
+
+## `file2.js`
+
+```js
+foo.call(); bar.call();
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+file1.js:1:1 lint/complexity/useFlatMap FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × The call chain .map().flat() can be replaced with a single .flatMap() call.
+
+ > 1 │ array.map(sentence => sentence.split(' ')).flat();
+ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ i Safe fix: Replace the chain with .flatMap().
+
+ - array.map(sentence·=>·sentence.split('·')).flat();
+ + array.flatMap(sentence·=>·sentence.split('·'));
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_ignored_file_via_cli.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_ignored_file_via_cli.snap
@@ -0,0 +1,64 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `./.git`
+
+```git
+
+```
+
+## `./.gitignore`
+
+```gitignore
+
+file2.js
+
+```
+
+## `file1.js`
+
+```js
+array.map(sentence => sentence.split(' ')).flat();
+```
+
+## `file2.js`
+
+```js
+foo.call(); bar.call();
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+file1.js:1:1 lint/complexity/useFlatMap FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × The call chain .map().flat() can be replaced with a single .flatMap() call.
+
+ > 1 │ array.map(sentence => sentence.split(' ')).flat();
+ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ i Safe fix: Replace the chain with .flatMap().
+
+ - array.map(sentence·=>·sentence.split('·')).flat();
+ + array.flatMap(sentence·=>·sentence.split('·'));
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_os_independent_parse.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/ignore_vcs_os_independent_parse.snap
@@ -0,0 +1,55 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "vcs": {
+ "enabled": true,
+ "clientKind": "git",
+ "useIgnoreFile": true
+ }
+}
+```
+
+## `.git`
+
+```git
+
+```
+
+## `.gitignore`
+
+```gitignore
+something.js
+file2.js
+file3.js
+```
+
+## `file1.js`
+
+```js
+blah.call();
+```
+
+## `file2.js`
+
+```js
+foo.call(); bar.call();
+```
+
+## `file3.js`
+
+```js
+console.log('rome is cool');
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/ignores_unknown_file.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/ignores_unknown_file.snap
@@ -0,0 +1,24 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `test.js`
+
+```js
+console.log('bar');
+
+```
+
+## `test.txt`
+
+```txt
+content
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/lint_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/lint_error.snap
@@ -0,0 +1,59 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:1:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 1 │ for(;true;);
+ │ ^^^^^^^^^^^
+ 2 │
+
+ i Suggested fix: Use a while loop
+
+ 1 │ - for(;true;);
+ 1 │ + while·(true);
+ 2 2 │
+
+
+```
+
+```block
+check.js:1:6 lint/nursery/noConstantCondition ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Unexpected constant condition.
+
+ > 1 │ for(;true;);
+ │ ^^^^
+ 2 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/max_diagnostics.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/max_diagnostics.snap
@@ -0,0 +1,167 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `src/file_0.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_1.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_10.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_11.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_12.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_13.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_14.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_15.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_16.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_17.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_18.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_19.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_2.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_3.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_4.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_5.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_6.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_7.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_8.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_9.js`
+
+```js
+for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+The number of diagnostics exceeds the number allowed by Rome.
+Diagnostics not shown: 30.
+```
+
+```block
+Checked 20 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/max_diagnostics_default.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/max_diagnostics_default.snap
@@ -0,0 +1,307 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `src/file_0.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_1.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_10.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_11.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_12.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_13.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_14.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_15.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_16.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_17.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_18.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_19.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_2.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_20.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_21.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_22.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_23.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_24.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_25.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_26.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_27.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_28.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_29.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_3.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_30.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_31.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_32.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_33.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_34.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_35.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_36.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_37.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_38.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_39.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_4.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_5.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_6.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_7.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_8.js`
+
+```js
+for(;true;);
+
+```
+
+## `src/file_9.js`
+
+```js
+for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+The number of diagnostics exceeds the number allowed by Rome.
+Diagnostics not shown: 60.
+```
+
+```block
+Checked 40 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/maximum_diagnostics.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/maximum_diagnostics.snap
@@ -0,0 +1,498 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:2:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + while·(true);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:2:13 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + for(;true;);while·(true);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:2:25 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + for(;true;);for(;true;);while·(true);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:2:37 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + for(;true;);for(;true;);for(;true;);while·(true);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:2:49 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + for(;true;);for(;true;);for(;true;);for(;true;);while·(true);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:2:61 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 2 │ + for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);while·(true);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + while·(true);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:13 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + for(;true;);while·(true);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:25 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + for(;true;);for(;true;);while·(true);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:37 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + for(;true;);for(;true;);for(;true;);while·(true);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:49 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + for(;true;);for(;true;);for(;true;);for(;true;);while·(true);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:3:61 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 1 1 │
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ + for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);while·(true);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + while·(true);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:13 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + for(;true;);while·(true);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:25 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + for(;true;);for(;true;);while·(true);for(;true;);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:37 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + for(;true;);for(;true;);for(;true;);while·(true);for(;true;);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:49 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + for(;true;);for(;true;);for(;true;);for(;true;);while·(true);for(;true;);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:4:61 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 2 2 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ + for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);while·(true);
+ 5 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:5:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 7 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ + while·(true);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 7 7 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+check.js:5:13 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ > 5 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ │ ^^^^^^^^^^^
+ 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 7 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+ i Suggested fix: Use a while loop
+
+ 3 3 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 4 4 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ - for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 5 │ + for(;true;);while·(true);for(;true;);for(;true;);for(;true;);for(;true;);
+ 6 6 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+ 7 7 │ for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);for(;true;);
+
+
+```
+
+```block
+The number of diagnostics exceeds the number allowed by Rome.
+Diagnostics not shown: 76.
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_files_are_listed_in_ignore_option.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_files_are_listed_in_ignore_option.snap
@@ -0,0 +1,48 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "files": {
+ "ignore": ["test1.js"]
+ },
+ "linter": {
+ "enabled": true,
+ "ignore": ["test2.js"]
+ }
+}
+```
+
+## `test1.js`
+
+```js
+(1 >= -0)
+```
+
+## `test2.js`
+
+```js
+(1 >= -0)
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Fixed 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_linter_is_disabled.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_linter_is_disabled.snap
@@ -0,0 +1,38 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "enabled": false
+ }
+}
+```
+
+## `fix.js`
+
+```js
+(1 >= -0)
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_linter_is_disabled_when_run_apply.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_if_linter_is_disabled_when_run_apply.snap
@@ -0,0 +1,38 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "enabled": false
+ }
+}
+```
+
+## `fix.js`
+
+```js
+(1 >= -0)
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Fixed 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_when_file_is_ignored.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/no_lint_when_file_is_ignored.snap
@@ -0,0 +1,39 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "enabled": true,
+ "ignore": ["test.js"]
+ }
+}
+```
+
+## `test.js`
+
+```js
+(1 >= -0)
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Fixed 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/no_supported_file_found.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/no_supported_file_found.snap
@@ -0,0 +1,22 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/nursery_unstable.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/nursery_unstable.snap
@@ -0,0 +1,46 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+if(a = b) {}
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:1:4 lint/suspicious/noAssignInExpressions FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × The assignment should not be in an expression.
+
+ > 1 │ if(a = b) {}
+ │ ^^^^^
+
+ i The use of assignments in expressions is confusing.
+ Expressions are often considered as side-effect free.
+
+ i Suggested fix: Did you mean '==='?
+
+ 1 │ if(a·===·b)·{}
+ │ ++
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/parse_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/parse_error.snap
@@ -0,0 +1,47 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+if
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:2:1 parse ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × expected `(` but instead the file ends
+
+ 1 │ if
+ > 2 │
+ │
+
+ i the file ends here
+
+ 1 │ if
+ > 2 │
+ │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/print_verbose.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/print_verbose.snap
@@ -0,0 +1,59 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:1:1 lint/style/useWhile FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use while loops instead of for loops.
+
+ > 1 │ for(;true;);
+ │ ^^^^^^^^^^^
+ 2 │
+
+ i Suggested fix: Use a while loop
+
+ 1 │ - for(;true;);
+ 1 │ + while·(true);
+ 2 2 │
+
+
+```
+
+```block
+check.js:1:6 lint/nursery/noConstantCondition ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Unexpected constant condition.
+
+ > 1 │ for(;true;);
+ │ ^^^^
+ 2 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_apply_correct_file_source.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_apply_correct_file_source.snap
@@ -0,0 +1,32 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "correctness": {
+ "noUndeclaredVariables": "error"
+ }
+ }
+ }
+}
+```
+
+## `file.ts`
+
+```ts
+type A = { a: string }; type B = Partial<A>
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_disable_a_rule.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_disable_a_rule.snap
@@ -0,0 +1,33 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "suspicious": {
+ "noDebugger": "off"
+ }
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+debugger;
+
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_disable_a_rule_group.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_disable_a_rule_group.snap
@@ -0,0 +1,32 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "suspicious": {
+ "recommended": false
+ }
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+(1 >= -0)
+```
+
+# Emitted Messages
+
+```block
+Fixed 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_disable_recommended_rules_for_a_group.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_disable_recommended_rules_for_a_group.snap
@@ -0,0 +1,73 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true,
+ "complexity": {
+ "noUselessSwitchCase": "off"
+ }
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+const array = ["split", "the text", "into words"];
+// next line should error because of the recommended rule
+array.map((sentence) => sentence.split(" ")).flat();
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+fix.js:3:1 lint/complexity/useFlatMap FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × The call chain .map().flat() can be replaced with a single .flatMap() call.
+
+ 1 │ const array = ["split", "the text", "into words"];
+ 2 │ // next line should error because of the recommended rule
+ > 3 │ array.map((sentence) => sentence.split(" ")).flat();
+ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+ 4 │
+
+ i Safe fix: Replace the chain with .flatMap().
+
+ 1 1 │ const array = ["split", "the text", "into words"];
+ 2 2 │ // next line should error because of the recommended rule
+ 3 │ - array.map((sentence)·=>·sentence.split("·")).flat();
+ 3 │ + array.flatMap((sentence)·=>·sentence.split("·"));
+ 4 4 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_enable_all_recommended_rules.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_enable_all_recommended_rules.snap
@@ -0,0 +1,48 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": false,
+ "a11y": {},
+ "complexity": {},
+ "correctness": {},
+ "performance": {},
+ "security": {},
+ "style": {},
+ "suspicious": {}
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+
+ LOOP: for (const x of xs) {
+ if (x > 0) {
+ break;
+ }
+ f(x);
+ }
+
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_enable_nursery_rules.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/should_not_enable_nursery_rules.snap
@@ -0,0 +1,43 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "$schema": "https://docs.rome.tools/schemas/12.1.0/schema.json",
+ "organizeImports": {
+ "enabled": false
+ },
+ "linter": {
+ "enabled": true,
+ "rules": {
+ "recommended": true,
+ "nursery": {
+ "noAccumulatingSpread": "error"
+ }
+ }
+ }
+}
+```
+
+## `fix.ts`
+
+```ts
+const bannedType: Boolean = true;
+
+if (true) {
+ const obj = {};
+ obj["useLiteralKey"];
+}
+
+```
+
+# Emitted Messages
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/suppression_syntax_error.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/suppression_syntax_error.snap
@@ -0,0 +1,41 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.js`
+
+```js
+// rome-ignore(:
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+check.js:1:15 suppressions/parse ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × unexpected token, expected one of ':' or whitespace
+
+ > 1 │ // rome-ignore(:
+ │ ^
+ 2 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/top_level_all_down_level_not_all.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/top_level_all_down_level_not_all.snap
@@ -0,0 +1,137 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "all": true,
+ "style": {
+ "all": false
+ }
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+
+ function f() {arguments;}
+ const FOO = "FOO";
+ var x, y;
+
+```
+
+# Emitted Messages
+
+```block
+fix.js:2:19 lint/correctness/noUndeclaredVariables ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! The arguments variable is undeclared
+
+ > 2 │ function f() {arguments;}
+ │ ^^^^^^^^^
+ 3 │ const FOO = "FOO";
+ 4 │ var x, y;
+
+
+```
+
+```block
+fix.js:2:14 lint/correctness/noUnusedVariables ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! This function is unused.
+
+ > 2 │ function f() {arguments;}
+ │ ^
+ 3 │ const FOO = "FOO";
+ 4 │ var x, y;
+
+ i Unused variables usually are result of incomplete refactoring, typos and other source of bugs.
+
+
+```
+
+```block
+fix.js:3:11 lint/correctness/noUnusedVariables FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! This variable is unused.
+
+ 2 │ function f() {arguments;}
+ > 3 │ const FOO = "FOO";
+ │ ^^^
+ 4 │ var x, y;
+ 5 │
+
+ i Unused variables usually are result of incomplete refactoring, typos and other source of bugs.
+
+ i Suggested fix: If this is intentional, prepend FOO with an underscore.
+
+ 1 1 │
+ 2 2 │ function f() {arguments;}
+ 3 │ - ····const·FOO·=·"FOO";
+ 3 │ + ····const·_FOO·=·"FOO";
+ 4 4 │ var x, y;
+ 5 5 │
+
+
+```
+
+```block
+fix.js:4:9 lint/correctness/noUnusedVariables FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! This variable is unused.
+
+ 2 │ function f() {arguments;}
+ 3 │ const FOO = "FOO";
+ > 4 │ var x, y;
+ │ ^
+ 5 │
+
+ i Unused variables usually are result of incomplete refactoring, typos and other source of bugs.
+
+ i Suggested fix: If this is intentional, prepend x with an underscore.
+
+ 2 2 │ function f() {arguments;}
+ 3 3 │ const FOO = "FOO";
+ 4 │ - ····var·x,·y;
+ 4 │ + ····var·_x,·y;
+ 5 5 │
+
+
+```
+
+```block
+fix.js:4:12 lint/correctness/noUnusedVariables FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! This variable is unused.
+
+ 2 │ function f() {arguments;}
+ 3 │ const FOO = "FOO";
+ > 4 │ var x, y;
+ │ ^
+ 5 │
+
+ i Unused variables usually are result of incomplete refactoring, typos and other source of bugs.
+
+ i Suggested fix: If this is intentional, prepend y with an underscore.
+
+ 2 2 │ function f() {arguments;}
+ 3 3 │ const FOO = "FOO";
+ 4 │ - ····var·x,·y;
+ 4 │ + ····var·x,·_y;
+ 5 5 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/top_level_not_all_down_level_all.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/top_level_not_all_down_level_all.snap
@@ -0,0 +1,111 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "all": false,
+ "style": {
+ "all": true
+ }
+ }
+ }
+}
+```
+
+## `fix.js`
+
+```js
+
+ function f() {arguments;}
+ const FOO = "FOO";
+ var x, y;
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+fix.js:4:5 lint/style/useSingleVarDeclarator FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Declare variables separately
+
+ 2 │ function f() {arguments;}
+ 3 │ const FOO = "FOO";
+ > 4 │ var x, y;
+ │ ^^^^^^^^^
+ 5 │
+
+ i Suggested fix: Break out into multiple declarations
+
+ 2 2 │ function f() {arguments;}
+ 3 3 │ const FOO = "FOO";
+ 4 │ - ····var·x,·y;
+ 4 │ + ····var·x;
+ 5 │ + ····var·y;
+ 5 6 │
+
+
+```
+
+```block
+fix.js:2:19 lint/style/noArguments ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use the rest parameters instead of arguments.
+
+ > 2 │ function f() {arguments;}
+ │ ^^^^^^^^^
+ 3 │ const FOO = "FOO";
+ 4 │ var x, y;
+
+ i arguments does not have Array.prototype methods and can be inconvenient to use.
+
+
+```
+
+```block
+fix.js:4:5 lint/style/noVar FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Use let or const instead of var.
+
+ 2 │ function f() {arguments;}
+ 3 │ const FOO = "FOO";
+ > 4 │ var x, y;
+ │ ^^^^^^^^
+ 5 │
+
+ i A variable declared with var is accessible in the whole module. Thus, the variable can be accessed before its initialization and outside the block where it is declared.
+
+ i See MDN web docs for more details.
+
+ i Suggested fix: Use 'let' instead.
+
+ 2 2 │ function f() {arguments;}
+ 3 3 │ const FOO = "FOO";
+ 4 │ - ····var·x,·y;
+ 4 │ + ····let·x,·y;
+ 5 5 │
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/unsupported_file.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/unsupported_file.snap
@@ -0,0 +1,29 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `check.txt`
+
+```txt
+for(;true;);
+
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × No files were processed in the specified paths.
+
+
+
+```
+
+# Emitted Messages
+
+```block
+Checked 0 file(s) in <TIME>
+```
+
+
diff --git /dev/null b/crates/rome_cli/tests/snapshots/main_commands_lint/upgrade_severity.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_cli/tests/snapshots/main_commands_lint/upgrade_severity.snap
@@ -0,0 +1,59 @@
+---
+source: crates/rome_cli/tests/snap_test.rs
+expression: content
+---
+## `rome.json`
+
+```json
+{
+ "linter": {
+ "rules": {
+ "recommended": true,
+ "style": {
+ "noNegationElse": "error"
+ }
+ }
+ }
+}
+```
+
+## `file.js`
+
+```js
+if(!cond) { exprA(); } else { exprB() }
+```
+
+# Termination Message
+
+```block
+internalError/io ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Some errors were emitted while running checks
+
+
+
+```
+
+# Emitted Messages
+
+```block
+file.js:1:1 lint/style/noNegationElse FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ × Invert blocks when performing a negation test.
+
+ > 1 │ if(!cond) { exprA(); } else { exprB() }
+ │ ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
+
+ i Suggested fix: Exchange alternate and consequent of the node
+
+ - if(!cond)·{·exprA();·}·else·{·exprB()·}
+ + if(cond)·{·exprB()·}·else·{·exprA();·}
+
+
+```
+
+```block
+Checked 1 file(s) in <TIME>
+```
+
+
| 📎 New `rome lint` command
### Description
Rome has a command called `rome check`, which does multiple things:
- check formatting
- checks linting
- check imports
And it will contain more in the future. To avoid possible friction in the future, I think Rome should ship a new command called `rome lint`, which only checks the code against lint rules. It will also have `-apply` and `--apply-unsafe` arguments.
| > Rome has a command called rome check, which does multiple things:
>
> - check formatting
I think that `rome check` does not check formatting, in contrast to `rome ci`.
Do you think that it should? What could be the difference with `rome ci`?
Otherwise, I like the idea.
> > Rome has a command called rome check, which does multiple things:
> >
> > - check formatting
>
> I think that `rome check` does not check formatting, in contrast to `rome ci`.
I am about to change that logic (I think it is a bug), but yes it does format. It also checks the imports.
> Do you think that it should? What could be the difference with `rome ci`?
>
The CI commands wouldn't allow to change any code, it would show the diagnostics on GitHub (check discussions proposal), etc.
> Otherwise, I like the idea.
Nice! If you want, I can guide you :) | 2023-06-29T02:43:27 | 0.1 | 08645a80c6139ab969aa1917f7e01dae3845d236 | [
"commands::check::fs_files_ignore_symlink"
] | [
"diagnostics::test::termination_diagnostic_size",
"metrics::tests::test_timing",
"metrics::tests::test_layer",
"cases::config_extends::extends_should_raise_an_error_for_unresolved_configuration_and_show_verbose",
"cases::config_extends::extends_should_raise_an_error_for_unresolved_configuration",
"command... | [] | [] |
rome/tools | 4,618 | rome__tools-4618 | [
"3989"
] | 08645a80c6139ab969aa1917f7e01dae3845d236 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -117,6 +117,10 @@ multiple files:
This rules disallow the use of `void`.
+- Add [`noNonoctalDecimalEscape`](https://docs.rome.tools/lint/rules/nononoctaldecimalescape/)
+
+ This rule disallows `\8` and `\9` escape sequences in string literals.
+
#### Other changes
- [`noRedeclare`](https://docs.rome.tools/lint/rules/noredeclare/): allow redeclare of index signatures are in different type members [#4478](https://github.com/rome/tools/issues/4478)
diff --git a/crates/rome_diagnostics_categories/src/categories.rs b/crates/rome_diagnostics_categories/src/categories.rs
--- a/crates/rome_diagnostics_categories/src/categories.rs
+++ b/crates/rome_diagnostics_categories/src/categories.rs
@@ -103,6 +103,7 @@ define_categories! {
"lint/nursery/noGlobalIsFinite": "https://docs.rome.tools/lint/rules/noGlobalIsFinite",
"lint/nursery/useArrowFunction": "https://docs.rome.tools/lint/rules/useArrowFunction",
"lint/nursery/noVoid": "https://docs.rome.tools/lint/rules/noVoid",
+ "lint/nursery/noNonoctalDecimalEscape": "https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape",
// Insert new nursery rule here
diff --git a/crates/rome_js_analyze/src/analyzers/nursery.rs b/crates/rome_js_analyze/src/analyzers/nursery.rs
--- a/crates/rome_js_analyze/src/analyzers/nursery.rs
+++ b/crates/rome_js_analyze/src/analyzers/nursery.rs
@@ -4,6 +4,7 @@ use rome_analyze::declare_group;
pub(crate) mod no_confusing_arrow;
pub(crate) mod no_duplicate_jsx_props;
pub(crate) mod no_for_each;
+pub(crate) mod no_nonoctal_decimal_escape;
pub(crate) mod no_self_assign;
pub(crate) mod no_static_only_class;
pub(crate) mod no_void;
diff --git a/crates/rome_js_analyze/src/analyzers/nursery.rs b/crates/rome_js_analyze/src/analyzers/nursery.rs
--- a/crates/rome_js_analyze/src/analyzers/nursery.rs
+++ b/crates/rome_js_analyze/src/analyzers/nursery.rs
@@ -14,4 +15,4 @@ pub(crate) mod use_is_nan;
pub(crate) mod use_literal_enum_members;
pub(crate) mod use_literal_keys;
pub(crate) mod use_simple_number_keys;
-declare_group! { pub (crate) Nursery { name : "nursery" , rules : [self :: no_confusing_arrow :: NoConfusingArrow , self :: no_duplicate_jsx_props :: NoDuplicateJsxProps , self :: no_for_each :: NoForEach , self :: no_self_assign :: NoSelfAssign , self :: no_static_only_class :: NoStaticOnlyClass , self :: no_void :: NoVoid , self :: use_arrow_function :: UseArrowFunction , self :: use_grouped_type_import :: UseGroupedTypeImport , self :: use_heading_content :: UseHeadingContent , self :: use_is_nan :: UseIsNan , self :: use_literal_enum_members :: UseLiteralEnumMembers , self :: use_literal_keys :: UseLiteralKeys , self :: use_simple_number_keys :: UseSimpleNumberKeys ,] } }
+declare_group! { pub (crate) Nursery { name : "nursery" , rules : [self :: no_confusing_arrow :: NoConfusingArrow , self :: no_duplicate_jsx_props :: NoDuplicateJsxProps , self :: no_for_each :: NoForEach , self :: no_nonoctal_decimal_escape :: NoNonoctalDecimalEscape , self :: no_self_assign :: NoSelfAssign , self :: no_static_only_class :: NoStaticOnlyClass , self :: no_void :: NoVoid , self :: use_arrow_function :: UseArrowFunction , self :: use_grouped_type_import :: UseGroupedTypeImport , self :: use_heading_content :: UseHeadingContent , self :: use_is_nan :: UseIsNan , self :: use_literal_enum_members :: UseLiteralEnumMembers , self :: use_literal_keys :: UseLiteralKeys , self :: use_simple_number_keys :: UseSimpleNumberKeys ,] } }
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1860,6 +1860,15 @@ pub struct Nursery {
)]
#[serde(skip_serializing_if = "Option::is_none")]
pub no_noninteractive_tabindex: Option<RuleConfiguration>,
+ #[doc = "Disallow \\8 and \\9 escape sequences in string literals."]
+ #[bpaf(
+ long("no-nonoctal-decimal-escape"),
+ argument("on|off|warn"),
+ optional,
+ hide
+ )]
+ #[serde(skip_serializing_if = "Option::is_none")]
+ pub no_nonoctal_decimal_escape: Option<RuleConfiguration>,
#[doc = "Enforce explicit role property is not the same as implicit/default role property on an element."]
#[bpaf(long("no-redundant-roles"), argument("on|off|warn"), optional, hide)]
#[serde(skip_serializing_if = "Option::is_none")]
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1947,7 +1956,7 @@ pub struct Nursery {
}
impl Nursery {
const GROUP_NAME: &'static str = "nursery";
- pub(crate) const GROUP_RULES: [&'static str; 28] = [
+ pub(crate) const GROUP_RULES: [&'static str; 29] = [
"noAccumulatingSpread",
"noAriaUnsupportedElements",
"noBannedTypes",
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1960,6 +1969,7 @@ impl Nursery {
"noGlobalIsFinite",
"noGlobalIsNan",
"noNoninteractiveTabindex",
+ "noNonoctalDecimalEscape",
"noRedundantRoles",
"noSelfAssign",
"noStaticOnlyClass",
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1977,7 +1987,7 @@ impl Nursery {
"useNamingConvention",
"useSimpleNumberKeys",
];
- const RECOMMENDED_RULES: [&'static str; 16] = [
+ const RECOMMENDED_RULES: [&'static str; 17] = [
"noAriaUnsupportedElements",
"noBannedTypes",
"noConstantCondition",
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1985,6 +1995,7 @@ impl Nursery {
"noDuplicateJsxProps",
"noGlobalIsFinite",
"noGlobalIsNan",
+ "noNonoctalDecimalEscape",
"noRedundantRoles",
"noSelfAssign",
"noStaticOnlyClass",
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -1995,7 +2006,7 @@ impl Nursery {
"useLiteralEnumMembers",
"useLiteralKeys",
];
- const RECOMMENDED_RULES_AS_FILTERS: [RuleFilter<'static>; 16] = [
+ const RECOMMENDED_RULES_AS_FILTERS: [RuleFilter<'static>; 17] = [
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[5]),
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2006,14 +2017,15 @@ impl Nursery {
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[12]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[13]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[14]),
- RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[17]),
- RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[19]),
+ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[15]),
+ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[18]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[20]),
- RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[23]),
+ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[21]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[24]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[25]),
+ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[26]),
];
- const ALL_RULES_AS_FILTERS: [RuleFilter<'static>; 28] = [
+ const ALL_RULES_AS_FILTERS: [RuleFilter<'static>; 29] = [
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[0]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[1]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[2]),
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2042,6 +2054,7 @@ impl Nursery {
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[25]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[26]),
RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[27]),
+ RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[28]),
];
#[doc = r" Retrieves the recommended rules"]
pub(crate) fn is_recommended(&self) -> bool { matches!(self.recommended, Some(true)) }
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2112,86 +2125,91 @@ impl Nursery {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[11]));
}
}
- if let Some(rule) = self.no_redundant_roles.as_ref() {
+ if let Some(rule) = self.no_nonoctal_decimal_escape.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[12]));
}
}
- if let Some(rule) = self.no_self_assign.as_ref() {
+ if let Some(rule) = self.no_redundant_roles.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[13]));
}
}
- if let Some(rule) = self.no_static_only_class.as_ref() {
+ if let Some(rule) = self.no_self_assign.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[14]));
}
}
- if let Some(rule) = self.no_void.as_ref() {
+ if let Some(rule) = self.no_static_only_class.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[15]));
}
}
- if let Some(rule) = self.use_aria_prop_types.as_ref() {
+ if let Some(rule) = self.no_void.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[16]));
}
}
- if let Some(rule) = self.use_arrow_function.as_ref() {
+ if let Some(rule) = self.use_aria_prop_types.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[17]));
}
}
- if let Some(rule) = self.use_camel_case.as_ref() {
+ if let Some(rule) = self.use_arrow_function.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[18]));
}
}
- if let Some(rule) = self.use_exhaustive_dependencies.as_ref() {
+ if let Some(rule) = self.use_camel_case.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[19]));
}
}
- if let Some(rule) = self.use_grouped_type_import.as_ref() {
+ if let Some(rule) = self.use_exhaustive_dependencies.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[20]));
}
}
- if let Some(rule) = self.use_heading_content.as_ref() {
+ if let Some(rule) = self.use_grouped_type_import.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[21]));
}
}
- if let Some(rule) = self.use_hook_at_top_level.as_ref() {
+ if let Some(rule) = self.use_heading_content.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[22]));
}
}
- if let Some(rule) = self.use_is_nan.as_ref() {
+ if let Some(rule) = self.use_hook_at_top_level.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[23]));
}
}
- if let Some(rule) = self.use_literal_enum_members.as_ref() {
+ if let Some(rule) = self.use_is_nan.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[24]));
}
}
- if let Some(rule) = self.use_literal_keys.as_ref() {
+ if let Some(rule) = self.use_literal_enum_members.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[25]));
}
}
- if let Some(rule) = self.use_naming_convention.as_ref() {
+ if let Some(rule) = self.use_literal_keys.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[26]));
}
}
- if let Some(rule) = self.use_simple_number_keys.as_ref() {
+ if let Some(rule) = self.use_naming_convention.as_ref() {
if rule.is_enabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[27]));
}
}
+ if let Some(rule) = self.use_simple_number_keys.as_ref() {
+ if rule.is_enabled() {
+ index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[28]));
+ }
+ }
index_set
}
pub(crate) fn get_disabled_rules(&self) -> IndexSet<RuleFilter> {
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2256,86 +2274,91 @@ impl Nursery {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[11]));
}
}
- if let Some(rule) = self.no_redundant_roles.as_ref() {
+ if let Some(rule) = self.no_nonoctal_decimal_escape.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[12]));
}
}
- if let Some(rule) = self.no_self_assign.as_ref() {
+ if let Some(rule) = self.no_redundant_roles.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[13]));
}
}
- if let Some(rule) = self.no_static_only_class.as_ref() {
+ if let Some(rule) = self.no_self_assign.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[14]));
}
}
- if let Some(rule) = self.no_void.as_ref() {
+ if let Some(rule) = self.no_static_only_class.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[15]));
}
}
- if let Some(rule) = self.use_aria_prop_types.as_ref() {
+ if let Some(rule) = self.no_void.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[16]));
}
}
- if let Some(rule) = self.use_arrow_function.as_ref() {
+ if let Some(rule) = self.use_aria_prop_types.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[17]));
}
}
- if let Some(rule) = self.use_camel_case.as_ref() {
+ if let Some(rule) = self.use_arrow_function.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[18]));
}
}
- if let Some(rule) = self.use_exhaustive_dependencies.as_ref() {
+ if let Some(rule) = self.use_camel_case.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[19]));
}
}
- if let Some(rule) = self.use_grouped_type_import.as_ref() {
+ if let Some(rule) = self.use_exhaustive_dependencies.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[20]));
}
}
- if let Some(rule) = self.use_heading_content.as_ref() {
+ if let Some(rule) = self.use_grouped_type_import.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[21]));
}
}
- if let Some(rule) = self.use_hook_at_top_level.as_ref() {
+ if let Some(rule) = self.use_heading_content.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[22]));
}
}
- if let Some(rule) = self.use_is_nan.as_ref() {
+ if let Some(rule) = self.use_hook_at_top_level.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[23]));
}
}
- if let Some(rule) = self.use_literal_enum_members.as_ref() {
+ if let Some(rule) = self.use_is_nan.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[24]));
}
}
- if let Some(rule) = self.use_literal_keys.as_ref() {
+ if let Some(rule) = self.use_literal_enum_members.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[25]));
}
}
- if let Some(rule) = self.use_naming_convention.as_ref() {
+ if let Some(rule) = self.use_literal_keys.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[26]));
}
}
- if let Some(rule) = self.use_simple_number_keys.as_ref() {
+ if let Some(rule) = self.use_naming_convention.as_ref() {
if rule.is_disabled() {
index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[27]));
}
}
+ if let Some(rule) = self.use_simple_number_keys.as_ref() {
+ if rule.is_disabled() {
+ index_set.insert(RuleFilter::Rule(Self::GROUP_NAME, Self::GROUP_RULES[28]));
+ }
+ }
index_set
}
#[doc = r" Checks if, given a rule name, matches one of the rules contained in this category"]
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2344,10 +2367,10 @@ impl Nursery {
pub(crate) fn is_recommended_rule(rule_name: &str) -> bool {
Self::RECOMMENDED_RULES.contains(&rule_name)
}
- pub(crate) fn recommended_rules_as_filters() -> [RuleFilter<'static>; 16] {
+ pub(crate) fn recommended_rules_as_filters() -> [RuleFilter<'static>; 17] {
Self::RECOMMENDED_RULES_AS_FILTERS
}
- pub(crate) fn all_rules_as_filters() -> [RuleFilter<'static>; 28] { Self::ALL_RULES_AS_FILTERS }
+ pub(crate) fn all_rules_as_filters() -> [RuleFilter<'static>; 29] { Self::ALL_RULES_AS_FILTERS }
#[doc = r" Select preset rules"]
pub(crate) fn collect_preset_rules(
&self,
diff --git a/crates/rome_service/src/configuration/linter/rules.rs b/crates/rome_service/src/configuration/linter/rules.rs
--- a/crates/rome_service/src/configuration/linter/rules.rs
+++ b/crates/rome_service/src/configuration/linter/rules.rs
@@ -2380,6 +2403,7 @@ impl Nursery {
"noGlobalIsFinite" => self.no_global_is_finite.as_ref(),
"noGlobalIsNan" => self.no_global_is_nan.as_ref(),
"noNoninteractiveTabindex" => self.no_noninteractive_tabindex.as_ref(),
+ "noNonoctalDecimalEscape" => self.no_nonoctal_decimal_escape.as_ref(),
"noRedundantRoles" => self.no_redundant_roles.as_ref(),
"noSelfAssign" => self.no_self_assign.as_ref(),
"noStaticOnlyClass" => self.no_static_only_class.as_ref(),
diff --git a/crates/rome_service/src/configuration/parse/json/rules.rs b/crates/rome_service/src/configuration/parse/json/rules.rs
--- a/crates/rome_service/src/configuration/parse/json/rules.rs
+++ b/crates/rome_service/src/configuration/parse/json/rules.rs
@@ -1360,6 +1360,7 @@ impl VisitNode<JsonLanguage> for Nursery {
"noGlobalIsFinite",
"noGlobalIsNan",
"noNoninteractiveTabindex",
+ "noNonoctalDecimalEscape",
"noRedundantRoles",
"noSelfAssign",
"noStaticOnlyClass",
diff --git a/crates/rome_service/src/configuration/parse/json/rules.rs b/crates/rome_service/src/configuration/parse/json/rules.rs
--- a/crates/rome_service/src/configuration/parse/json/rules.rs
+++ b/crates/rome_service/src/configuration/parse/json/rules.rs
@@ -1611,6 +1612,24 @@ impl VisitNode<JsonLanguage> for Nursery {
));
}
},
+ "noNonoctalDecimalEscape" => match value {
+ AnyJsonValue::JsonStringValue(_) => {
+ let mut configuration = RuleConfiguration::default();
+ self.map_to_known_string(&value, name_text, &mut configuration, diagnostics)?;
+ self.no_nonoctal_decimal_escape = Some(configuration);
+ }
+ AnyJsonValue::JsonObjectValue(_) => {
+ let mut configuration = RuleConfiguration::default();
+ self.map_to_object(&value, name_text, &mut configuration, diagnostics)?;
+ self.no_nonoctal_decimal_escape = Some(configuration);
+ }
+ _ => {
+ diagnostics.push(DeserializationDiagnostic::new_incorrect_type(
+ "object or string",
+ value.range(),
+ ));
+ }
+ },
"noRedundantRoles" => match value {
AnyJsonValue::JsonStringValue(_) => {
let mut configuration = RuleConfiguration::default();
diff --git a/crates/rome_text_size/src/range.rs b/crates/rome_text_size/src/range.rs
--- a/crates/rome_text_size/src/range.rs
+++ b/crates/rome_text_size/src/range.rs
@@ -3,7 +3,9 @@ use cmp::Ordering;
use {
crate::TextSize,
std::{
- cmp, fmt,
+ cmp,
+ convert::TryFrom,
+ fmt,
ops::{Add, AddAssign, Bound, Index, IndexMut, Range, RangeBounds, Sub, SubAssign},
},
};
diff --git a/crates/rome_text_size/src/range.rs b/crates/rome_text_size/src/range.rs
--- a/crates/rome_text_size/src/range.rs
+++ b/crates/rome_text_size/src/range.rs
@@ -529,3 +531,14 @@ where
*self = *self - rhs
}
}
+
+impl TryFrom<(usize, usize)> for TextRange {
+ type Error = std::num::TryFromIntError;
+ #[inline]
+ fn try_from((start, end): (usize, usize)) -> Result<Self, Self::Error> {
+ Ok(TextRange::new(
+ TextSize::try_from(start)?,
+ TextSize::try_from(end)?,
+ ))
+ }
+}
diff --git a/editors/vscode/configuration_schema.json b/editors/vscode/configuration_schema.json
--- a/editors/vscode/configuration_schema.json
+++ b/editors/vscode/configuration_schema.json
@@ -824,6 +824,13 @@
{ "type": "null" }
]
},
+ "noNonoctalDecimalEscape": {
+ "description": "Disallow \\8 and \\9 escape sequences in string literals.",
+ "anyOf": [
+ { "$ref": "#/definitions/RuleConfiguration" },
+ { "type": "null" }
+ ]
+ },
"noRedundantRoles": {
"description": "Enforce explicit role property is not the same as implicit/default role property on an element.",
"anyOf": [
diff --git a/npm/backend-jsonrpc/src/workspace.ts b/npm/backend-jsonrpc/src/workspace.ts
--- a/npm/backend-jsonrpc/src/workspace.ts
+++ b/npm/backend-jsonrpc/src/workspace.ts
@@ -553,6 +553,10 @@ export interface Nursery {
* Enforce that tabIndex is not assigned to non-interactive HTML elements.
*/
noNoninteractiveTabindex?: RuleConfiguration;
+ /**
+ * Disallow \8 and \9 escape sequences in string literals.
+ */
+ noNonoctalDecimalEscape?: RuleConfiguration;
/**
* Enforce explicit role property is not the same as implicit/default role property on an element.
*/
diff --git a/npm/backend-jsonrpc/src/workspace.ts b/npm/backend-jsonrpc/src/workspace.ts
--- a/npm/backend-jsonrpc/src/workspace.ts
+++ b/npm/backend-jsonrpc/src/workspace.ts
@@ -1131,6 +1135,7 @@ export type Category =
| "lint/nursery/noGlobalIsFinite"
| "lint/nursery/useArrowFunction"
| "lint/nursery/noVoid"
+ | "lint/nursery/noNonoctalDecimalEscape"
| "lint/performance/noDelete"
| "lint/security/noDangerouslySetInnerHtml"
| "lint/security/noDangerouslySetInnerHtmlWithChildren"
diff --git a/npm/rome/configuration_schema.json b/npm/rome/configuration_schema.json
--- a/npm/rome/configuration_schema.json
+++ b/npm/rome/configuration_schema.json
@@ -824,6 +824,13 @@
{ "type": "null" }
]
},
+ "noNonoctalDecimalEscape": {
+ "description": "Disallow \\8 and \\9 escape sequences in string literals.",
+ "anyOf": [
+ { "$ref": "#/definitions/RuleConfiguration" },
+ { "type": "null" }
+ ]
+ },
"noRedundantRoles": {
"description": "Enforce explicit role property is not the same as implicit/default role property on an element.",
"anyOf": [
diff --git a/website/src/components/generated/NumberOfRules.astro b/website/src/components/generated/NumberOfRules.astro
--- a/website/src/components/generated/NumberOfRules.astro
+++ b/website/src/components/generated/NumberOfRules.astro
@@ -1,2 +1,2 @@
<!-- this file is auto generated, use `cargo lintdoc` to update it -->
- <p>Rome's linter has a total of <strong><a href='/lint/rules'>147 rules</a></strong><p>
\ No newline at end of file
+ <p>Rome's linter has a total of <strong><a href='/lint/rules'>148 rules</a></strong><p>
\ No newline at end of file
diff --git a/website/src/pages/lint/rules/index.mdx b/website/src/pages/lint/rules/index.mdx
--- a/website/src/pages/lint/rules/index.mdx
+++ b/website/src/pages/lint/rules/index.mdx
@@ -969,6 +969,12 @@ Use <code>Number.isNaN</code> instead of global <code>isNaN</code>.
Enforce that <code>tabIndex</code> is not assigned to non-interactive HTML elements.
</section>
<section class="rule">
+<h3 data-toc-exclude id="noNonoctalDecimalEscape">
+ <a href="/lint/rules/noNonoctalDecimalEscape">noNonoctalDecimalEscape</a>
+</h3>
+Disallow <code>\8</code> and <code>\9</code> escape sequences in string literals.
+</section>
+<section class="rule">
<h3 data-toc-exclude id="noRedundantRoles">
<a href="/lint/rules/noRedundantRoles">noRedundantRoles</a>
</h3>
| diff --git /dev/null b/crates/rome_js_analyze/src/analyzers/nursery/no_nonoctal_decimal_escape.rs
new file mode 100644
--- /dev/null
+++ b/crates/rome_js_analyze/src/analyzers/nursery/no_nonoctal_decimal_escape.rs
@@ -0,0 +1,381 @@
+use crate::JsRuleAction;
+use rome_analyze::{context::RuleContext, declare_rule, ActionCategory, Ast, Rule, RuleDiagnostic};
+use rome_console::markup;
+use rome_diagnostics::Applicability;
+use rome_js_factory::make;
+use rome_js_syntax::JsStringLiteralExpression;
+use rome_rowan::{AstNode, BatchMutationExt, TextRange};
+use std::ops::Range;
+
+declare_rule! {
+ /// Disallow `\8` and `\9` escape sequences in string literals.
+ ///
+ /// Since ECMAScript 2021, the escape sequences \8 and \9 have been defined as non-octal decimal escape sequences.
+ /// However, most JavaScript engines consider them to be "useless" escapes. For example:
+ ///
+ /// ```js
+ /// "\8" === "8"; // true
+ /// "\9" === "9"; // true
+ /// ```
+ ///
+ /// Although this syntax is deprecated, it is still supported for compatibility reasons.
+ /// If the ECMAScript host is not a web browser, this syntax is optional.
+ /// However, web browsers are still required to support it, but only in non-strict mode.
+ /// Regardless of your targeted environment, it is recommended to avoid using these escape sequences in new code.
+ ///
+ /// Source: https://eslint.org/docs/latest/rules/no-nonoctal-decimal-escape
+ ///
+ /// ## Examples
+ ///
+ /// ### Invalid
+ ///
+ /// ```js,expect_diagnostic
+ /// const x = "\8";
+ /// ```
+ ///
+ /// ```js,expect_diagnostic
+ /// const x = "Don't use \8 and \9 escapes.";
+ /// ```
+ ///
+ /// ```js,expect_diagnostic
+ /// const x = "\0\8";
+ /// ```
+ ///
+ /// ## Valid
+ ///
+ /// ```js
+ /// const x = "8";
+ /// ```
+ ///
+ /// ```js
+ /// const x = "Don't use \\8 and \\9 escapes.";
+ /// ```
+ ///
+ /// ```js
+ /// const x = "\0\u0038";;
+ /// ```
+ ///
+ pub(crate) NoNonoctalDecimalEscape {
+ version: "next",
+ name: "noNonoctalDecimalEscape",
+ recommended: true,
+ }
+}
+
+#[derive(Debug)]
+pub(crate) enum FixSuggestionKind {
+ Refactor,
+ EscapeBackslash,
+}
+
+#[derive(Debug)]
+pub(crate) struct RuleState {
+ kind: FixSuggestionKind,
+ diagnostics_text_range: TextRange,
+ replace_from: String,
+ replace_to: String,
+ replace_string_range: Range<usize>,
+}
+
+impl Rule for NoNonoctalDecimalEscape {
+ type Query = Ast<JsStringLiteralExpression>;
+ type State = RuleState;
+ type Signals = Vec<Self::State>;
+ type Options = ();
+
+ fn run(ctx: &RuleContext<Self>) -> Self::Signals {
+ let node = ctx.query();
+ let mut signals: Self::Signals = Vec::new();
+ let Some(token) = node.value_token().ok() else {
+ return signals
+ };
+ let text = token.text();
+ if !is_octal_escape_sequence(text) {
+ return signals;
+ }
+ let matches = lex_escape_sequences(text);
+
+ for EscapeSequence {
+ previous_escape,
+ decimal_escape,
+ decimal_escape_range: (decimal_escape_string_start, decimal_escape_string_end),
+ } in matches.iter()
+ {
+ let text_range_start = usize::from(node.range().start());
+ let decimal_escape_range_start = text_range_start + decimal_escape_string_start;
+ let decimal_escape_range_end = decimal_escape_range_start + decimal_escape.len();
+ let Some(decimal_escape_range) =
+ TextRange::try_from((decimal_escape_range_start, decimal_escape_range_end)).ok() else { continue };
+
+ let Some(decimal_char) = decimal_escape.chars().nth(1) else { continue };
+
+ let replace_string_range = *decimal_escape_string_start..*decimal_escape_string_end;
+
+ if let Some(previous_escape) = previous_escape {
+ if *previous_escape == "\\0" {
+ if let Some(unicode_escape) = get_unicode_escape('\0') {
+ let Some(previous_escape_range_start) = text.find(previous_escape) else { continue };
+ let Some(unicode_escape_text_range) = TextRange::try_from((
+ text_range_start + previous_escape_range_start,
+ decimal_escape_range_end
+ )).ok() else { continue };
+
+ let replace_string_range =
+ previous_escape_range_start..*decimal_escape_string_end;
+
+ // \0\8 -> \u00008
+ signals.push(RuleState {
+ kind: FixSuggestionKind::Refactor,
+ diagnostics_text_range: unicode_escape_text_range,
+ replace_from: format!("{previous_escape}{decimal_escape}"),
+ replace_to: format!("{unicode_escape}{decimal_char}"),
+ replace_string_range,
+ });
+ }
+
+ let Some(decimal_char_unicode_escaped) = get_unicode_escape(decimal_char) else { continue };
+ // \8 -> \u0038
+ signals.push(RuleState {
+ kind: FixSuggestionKind::Refactor,
+ diagnostics_text_range: decimal_escape_range,
+ replace_from: decimal_escape.to_string(),
+ replace_to: decimal_char_unicode_escaped,
+ replace_string_range: replace_string_range.clone(),
+ });
+ } else {
+ // \8 -> 8
+ signals.push(RuleState {
+ kind: FixSuggestionKind::Refactor,
+ diagnostics_text_range: decimal_escape_range,
+ replace_from: decimal_escape.to_string(),
+ replace_to: decimal_char.to_string(),
+ replace_string_range: replace_string_range.clone(),
+ })
+ }
+ }
+ // \8 -> \\8
+ signals.push(RuleState {
+ kind: FixSuggestionKind::EscapeBackslash,
+ diagnostics_text_range: decimal_escape_range,
+ replace_to: format!("\\{}", decimal_escape),
+ replace_from: decimal_escape.to_string(),
+ replace_string_range,
+ });
+ }
+ signals
+ }
+
+ fn diagnostic(
+ _: &RuleContext<Self>,
+ RuleState {
+ diagnostics_text_range,
+ ..
+ }: &Self::State,
+ ) -> Option<RuleDiagnostic> {
+ Some(RuleDiagnostic::new(
+ rule_category!(),
+ diagnostics_text_range,
+ markup! {
+ "Don't use "<Emphasis>"`\\8`"</Emphasis>" and "<Emphasis>"`\\9`"</Emphasis>" escape sequences in string literals."
+ },
+ ).note(
+ markup! {
+ "The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used."
+ }
+ ))
+ }
+
+ fn action(
+ ctx: &RuleContext<Self>,
+ RuleState {
+ kind,
+ replace_from,
+ replace_to,
+ replace_string_range,
+ ..
+ }: &Self::State,
+ ) -> Option<JsRuleAction> {
+ let mut mutation = ctx.root().begin();
+ let node = ctx.query();
+ let prev_token = node.value_token().ok()?;
+ let replaced = safe_replace_by_range(
+ prev_token.text().to_string(),
+ replace_string_range.to_owned(),
+ replace_to,
+ )?;
+
+ let next_token = make::ident(&replaced);
+
+ mutation.replace_token(prev_token, next_token);
+
+ Some(JsRuleAction {
+ category: ActionCategory::QuickFix,
+ applicability: Applicability::MaybeIncorrect,
+ message: match kind {
+ FixSuggestionKind::Refactor => {
+ markup! ("Replace "<Emphasis>{replace_from}</Emphasis>" with "<Emphasis>{replace_to}</Emphasis>". This maintains the current functionality.").to_owned()
+ }
+ FixSuggestionKind::EscapeBackslash => {
+ markup! ("Replace "<Emphasis>{replace_from}</Emphasis>" with "<Emphasis>{replace_to}</Emphasis>" to include the actual backslash character." ).to_owned()
+ }
+ },
+ mutation,
+ })
+ }
+}
+
+fn safe_replace_by_range(
+ mut target: String,
+ range: Range<usize>,
+ replace_with: &str,
+) -> Option<String> {
+ debug_assert!(target.len() >= range.end, "Range out of bounds");
+ debug_assert!(
+ target.is_char_boundary(range.start) && target.is_char_boundary(range.end),
+ "Range does not fall on char boundary"
+ );
+ target.replace_range(range, replace_with);
+ Some(target)
+}
+
+/// Returns true if input is octal decimal escape sequence and is not in JavaScript regular expression
+fn is_octal_escape_sequence(input: &str) -> bool {
+ let mut in_regex = false;
+ let mut prev_char_was_escape = false;
+ for ch in input.chars() {
+ match ch {
+ '/' if !prev_char_was_escape => in_regex = !in_regex,
+ '8' | '9' if prev_char_was_escape && !in_regex => return true,
+ '\\' => prev_char_was_escape = !prev_char_was_escape,
+ _ => prev_char_was_escape = false,
+ }
+ }
+ false
+}
+
+#[derive(Debug, PartialEq)]
+struct EscapeSequence {
+ previous_escape: Option<String>,
+ decimal_escape: String,
+ /// The range of the decimal escape sequence in the string literal
+ decimal_escape_range: (usize, usize),
+}
+
+/// Returns a list of escape sequences in the given string literal
+fn lex_escape_sequences(input: &str) -> Vec<EscapeSequence> {
+ let mut result = Vec::new();
+ let mut previous_escape = None;
+ let mut decimal_escape_start = None;
+ let mut chars = input.char_indices().peekable();
+
+ while let Some((i, ch)) = chars.next() {
+ match ch {
+ '\\' => match chars.peek() {
+ Some((_, '0')) => {
+ previous_escape = Some("\\0".to_string());
+ // Consume '0'
+ let _ = chars.next();
+ }
+ Some((_, '8'..='9')) => {
+ decimal_escape_start = Some(i);
+ }
+ _ => (),
+ },
+ '8' | '9' if decimal_escape_start.is_some() => {
+ result.push(EscapeSequence {
+ previous_escape: previous_escape.take(),
+ decimal_escape: match ch {
+ '8' => "\\8".to_string(),
+ '9' => "\\9".to_string(),
+ _ => unreachable!(),
+ },
+ decimal_escape_range: (decimal_escape_start.unwrap(), i + ch.len_utf8()),
+ });
+ decimal_escape_start = None;
+ }
+ _ => previous_escape = Some(ch.to_string()),
+ }
+ }
+ result
+}
+
+/// Returns unicode escape sequence "\uXXXX" that represents the given character
+pub(crate) fn get_unicode_escape(ch: char) -> Option<String> {
+ Some(format!("\\u{:04x}", ch as u32))
+}
+
+#[cfg(test)]
+mod tests {
+ use super::*;
+
+ #[test]
+ fn test_is_octal_escape_sequence() {
+ assert!(!is_octal_escape_sequence(""));
+ assert!(!is_octal_escape_sequence("Hello World!"));
+ assert!(!is_octal_escape_sequence("\\0"));
+ assert!(!is_octal_escape_sequence("\\7"));
+ assert!(is_octal_escape_sequence("\\8"));
+ assert!(is_octal_escape_sequence("\\9"));
+ assert!(!is_octal_escape_sequence("/\\8/"));
+ assert!(!is_octal_escape_sequence("/\\9/"));
+ assert!(is_octal_escape_sequence("\\0\\8"));
+ assert!(is_octal_escape_sequence("\\7\\9"));
+ }
+
+ #[test]
+ fn test_get_unicode_escape() {
+ assert_eq!(get_unicode_escape('\0'), Some("\\u0000".to_string()));
+ assert_eq!(get_unicode_escape('a'), Some("\\u0061".to_string()));
+ assert_eq!(get_unicode_escape('👍'), Some("\\u1f44d".to_string()));
+ }
+
+ #[test]
+ fn test_parse_escape_sequences() {
+ assert_eq!(
+ lex_escape_sequences("test\\8\\9"),
+ vec![
+ EscapeSequence {
+ previous_escape: Some("t".to_string()),
+ decimal_escape: "\\8".to_string(),
+ decimal_escape_range: (4, 6)
+ },
+ EscapeSequence {
+ previous_escape: None,
+ decimal_escape: "\\9".to_string(),
+ decimal_escape_range: (6, 8)
+ }
+ ]
+ );
+ assert_eq!(
+ lex_escape_sequences("\\0\\8"),
+ vec![EscapeSequence {
+ previous_escape: Some("\\0".to_string()),
+ decimal_escape: "\\8".to_string(),
+ decimal_escape_range: (2, 4)
+ },]
+ );
+ assert_eq!(
+ lex_escape_sequences("👍\\8\\9"),
+ vec![
+ EscapeSequence {
+ previous_escape: Some("👍".to_string()),
+ decimal_escape: "\\8".to_string(),
+ decimal_escape_range: (4, 6)
+ },
+ EscapeSequence {
+ previous_escape: None,
+ decimal_escape: "\\9".to_string(),
+ decimal_escape_range: (6, 8)
+ }
+ ]
+ );
+ assert_eq!(
+ lex_escape_sequences("\\\\ \\8"),
+ vec![EscapeSequence {
+ previous_escape: Some(" ".to_string()),
+ decimal_escape: "\\8".to_string(),
+ decimal_escape_range: (3, 5)
+ },]
+ )
+ }
+}
diff --git /dev/null b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/invalid.js
new file mode 100644
--- /dev/null
+++ b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/invalid.js
@@ -0,0 +1,44 @@
+let x = "\8"
+let x = "\9"
+let x = "\"\8\""
+let x = "f\9"
+let x = "fo\9"
+let x = "foo\9"
+let x = "foo\8bar"
+let x = "👍\8"
+let x = "\\\8"
+let x = "\\\\\9"
+let x = "foo\\\8"
+let x = "\\ \8"
+let x = "\\1\9"
+let x = "foo\\1\9"
+let x = "\\n\\n\8\\n"
+let x = "\\n.\\n\8\\n"
+let x = "\\n.\\nn\8\\n"
+let x = "\\👍\8"
+let x = "\\\8\9"
+let x = "\8\\\9"
+let x = "\8 \\\9"
+let x = "\8\8"
+let x = "\9\8"
+let x = "foo\8bar\9baz"
+let x = "\8\\1\9"
+let x = "\9\\n9\\\9\9"
+let x = "\8\\\\\9"
+let x = "var foo = '\8'; bar('\9')"
+let x = "var foo = '8'\n bar = '\9'"
+let x = "\\\n\8"
+let x = "\\\r\n\9"
+let x = "\\\\\n\8"
+let x = "foo\\\nbar\9baz"
+let x = "\\0\8"
+let x = "foo\\0\9bar"
+let x = "\\1\\0\8"
+let x = "\\0\8\9"
+let x = "\8\\0\9"
+let x = "0\8"
+let x = "\\0\8"
+let x = "\0 \8"
+let x = "\01\8"
+let x = "\0\1\8"
+let x = "\0\\n\8"
diff --git /dev/null b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/invalid.js.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/invalid.js.snap
@@ -0,0 +1,2579 @@
+---
+source: crates/rome_js_analyze/tests/spec_tests.rs
+expression: invalid.js
+---
+# Input
+```js
+let x = "\8"
+let x = "\9"
+let x = "\"\8\""
+let x = "f\9"
+let x = "fo\9"
+let x = "foo\9"
+let x = "foo\8bar"
+let x = "👍\8"
+let x = "\\\8"
+let x = "\\\\\9"
+let x = "foo\\\8"
+let x = "\\ \8"
+let x = "\\1\9"
+let x = "foo\\1\9"
+let x = "\\n\\n\8\\n"
+let x = "\\n.\\n\8\\n"
+let x = "\\n.\\nn\8\\n"
+let x = "\\👍\8"
+let x = "\\\8\9"
+let x = "\8\\\9"
+let x = "\8 \\\9"
+let x = "\8\8"
+let x = "\9\8"
+let x = "foo\8bar\9baz"
+let x = "\8\\1\9"
+let x = "\9\\n9\\\9\9"
+let x = "\8\\\\\9"
+let x = "var foo = '\8'; bar('\9')"
+let x = "var foo = '8'\n bar = '\9'"
+let x = "\\\n\8"
+let x = "\\\r\n\9"
+let x = "\\\\\n\8"
+let x = "foo\\\nbar\9baz"
+let x = "\\0\8"
+let x = "foo\\0\9bar"
+let x = "\\1\\0\8"
+let x = "\\0\8\9"
+let x = "\8\\0\9"
+let x = "0\8"
+let x = "\\0\8"
+let x = "\0 \8"
+let x = "\01\8"
+let x = "\0\1\8"
+let x = "\0\\n\8"
+
+```
+
+# Diagnostics
+```
+invalid.js:1:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ > 1 │ let x = "\8"
+ │ ^^
+ 2 │ let x = "\9"
+ 3 │ let x = "\"\8\""
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 1 │ let·x·=·"\8"
+ │ -
+
+```
+
+```
+invalid.js:1:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ > 1 │ let x = "\8"
+ │ ^^
+ 2 │ let x = "\9"
+ 3 │ let x = "\"\8\""
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 1 │ let·x·=·"\\8"
+ │ +
+
+```
+
+```
+invalid.js:2:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 1 │ let x = "\8"
+ > 2 │ let x = "\9"
+ │ ^^
+ 3 │ let x = "\"\8\""
+ 4 │ let x = "f\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 2 │ let·x·=·"\9"
+ │ -
+
+```
+
+```
+invalid.js:2:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 1 │ let x = "\8"
+ > 2 │ let x = "\9"
+ │ ^^
+ 3 │ let x = "\"\8\""
+ 4 │ let x = "f\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 2 │ let·x·=·"\\9"
+ │ +
+
+```
+
+```
+invalid.js:3:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 1 │ let x = "\8"
+ 2 │ let x = "\9"
+ > 3 │ let x = "\"\8\""
+ │ ^^
+ 4 │ let x = "f\9"
+ 5 │ let x = "fo\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 3 │ let·x·=·"\"\8\""
+ │ -
+
+```
+
+```
+invalid.js:3:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 1 │ let x = "\8"
+ 2 │ let x = "\9"
+ > 3 │ let x = "\"\8\""
+ │ ^^
+ 4 │ let x = "f\9"
+ 5 │ let x = "fo\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 3 │ let·x·=·"\"\\8\""
+ │ +
+
+```
+
+```
+invalid.js:4:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 2 │ let x = "\9"
+ 3 │ let x = "\"\8\""
+ > 4 │ let x = "f\9"
+ │ ^^
+ 5 │ let x = "fo\9"
+ 6 │ let x = "foo\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 2 2 │ let x = "\9"
+ 3 3 │ let x = "\"\8\""
+ 4 │ - let·x·=·"f\9"
+ 4 │ + let·x·=·"f9"
+ 5 5 │ let x = "fo\9"
+ 6 6 │ let x = "foo\9"
+
+
+```
+
+```
+invalid.js:4:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 2 │ let x = "\9"
+ 3 │ let x = "\"\8\""
+ > 4 │ let x = "f\9"
+ │ ^^
+ 5 │ let x = "fo\9"
+ 6 │ let x = "foo\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 4 │ let·x·=·"f\\9"
+ │ +
+
+```
+
+```
+invalid.js:5:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 3 │ let x = "\"\8\""
+ 4 │ let x = "f\9"
+ > 5 │ let x = "fo\9"
+ │ ^^
+ 6 │ let x = "foo\9"
+ 7 │ let x = "foo\8bar"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 3 3 │ let x = "\"\8\""
+ 4 4 │ let x = "f\9"
+ 5 │ - let·x·=·"fo\9"
+ 5 │ + let·x·=·"fo9"
+ 6 6 │ let x = "foo\9"
+ 7 7 │ let x = "foo\8bar"
+
+
+```
+
+```
+invalid.js:5:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 3 │ let x = "\"\8\""
+ 4 │ let x = "f\9"
+ > 5 │ let x = "fo\9"
+ │ ^^
+ 6 │ let x = "foo\9"
+ 7 │ let x = "foo\8bar"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 5 │ let·x·=·"fo\\9"
+ │ +
+
+```
+
+```
+invalid.js:6:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 4 │ let x = "f\9"
+ 5 │ let x = "fo\9"
+ > 6 │ let x = "foo\9"
+ │ ^^
+ 7 │ let x = "foo\8bar"
+ 8 │ let x = "👍\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 4 4 │ let x = "f\9"
+ 5 5 │ let x = "fo\9"
+ 6 │ - let·x·=·"foo\9"
+ 6 │ + let·x·=·"foo9"
+ 7 7 │ let x = "foo\8bar"
+ 8 8 │ let x = "👍\8"
+
+
+```
+
+```
+invalid.js:6:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 4 │ let x = "f\9"
+ 5 │ let x = "fo\9"
+ > 6 │ let x = "foo\9"
+ │ ^^
+ 7 │ let x = "foo\8bar"
+ 8 │ let x = "👍\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 6 │ let·x·=·"foo\\9"
+ │ +
+
+```
+
+```
+invalid.js:7:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 5 │ let x = "fo\9"
+ 6 │ let x = "foo\9"
+ > 7 │ let x = "foo\8bar"
+ │ ^^
+ 8 │ let x = "👍\8"
+ 9 │ let x = "\\\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 5 5 │ let x = "fo\9"
+ 6 6 │ let x = "foo\9"
+ 7 │ - let·x·=·"foo\8bar"
+ 7 │ + let·x·=·"foo8bar"
+ 8 8 │ let x = "👍\8"
+ 9 9 │ let x = "\\\8"
+
+
+```
+
+```
+invalid.js:7:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 5 │ let x = "fo\9"
+ 6 │ let x = "foo\9"
+ > 7 │ let x = "foo\8bar"
+ │ ^^
+ 8 │ let x = "👍\8"
+ 9 │ let x = "\\\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 7 │ let·x·=·"foo\\8bar"
+ │ +
+
+```
+
+```
+invalid.js:8:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 6 │ let x = "foo\9"
+ 7 │ let x = "foo\8bar"
+ > 8 │ let x = "👍\8"
+ │ ^^
+ 9 │ let x = "\\\8"
+ 10 │ let x = "\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 8 │ let·x·=·"👍\8"
+ │ -
+
+```
+
+```
+invalid.js:8:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 6 │ let x = "foo\9"
+ 7 │ let x = "foo\8bar"
+ > 8 │ let x = "👍\8"
+ │ ^^
+ 9 │ let x = "\\\8"
+ 10 │ let x = "\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 8 │ let·x·=·"👍\\8"
+ │ +
+
+```
+
+```
+invalid.js:9:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 7 │ let x = "foo\8bar"
+ 8 │ let x = "👍\8"
+ > 9 │ let x = "\\\8"
+ │ ^^
+ 10 │ let x = "\\\\\9"
+ 11 │ let x = "foo\\\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 9 │ let·x·=·"\\\8"
+ │ -
+
+```
+
+```
+invalid.js:9:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 7 │ let x = "foo\8bar"
+ 8 │ let x = "👍\8"
+ > 9 │ let x = "\\\8"
+ │ ^^
+ 10 │ let x = "\\\\\9"
+ 11 │ let x = "foo\\\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 9 │ let·x·=·"\\\\8"
+ │ +
+
+```
+
+```
+invalid.js:10:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 8 │ let x = "👍\8"
+ 9 │ let x = "\\\8"
+ > 10 │ let x = "\\\\\9"
+ │ ^^
+ 11 │ let x = "foo\\\8"
+ 12 │ let x = "\\ \8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 10 │ let·x·=·"\\\\\9"
+ │ -
+
+```
+
+```
+invalid.js:10:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 8 │ let x = "👍\8"
+ 9 │ let x = "\\\8"
+ > 10 │ let x = "\\\\\9"
+ │ ^^
+ 11 │ let x = "foo\\\8"
+ 12 │ let x = "\\ \8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 10 │ let·x·=·"\\\\\\9"
+ │ +
+
+```
+
+```
+invalid.js:11:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 9 │ let x = "\\\8"
+ 10 │ let x = "\\\\\9"
+ > 11 │ let x = "foo\\\8"
+ │ ^^
+ 12 │ let x = "\\ \8"
+ 13 │ let x = "\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 11 │ let·x·=·"foo\\\8"
+ │ -
+
+```
+
+```
+invalid.js:11:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 9 │ let x = "\\\8"
+ 10 │ let x = "\\\\\9"
+ > 11 │ let x = "foo\\\8"
+ │ ^^
+ 12 │ let x = "\\ \8"
+ 13 │ let x = "\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 11 │ let·x·=·"foo\\\\8"
+ │ +
+
+```
+
+```
+invalid.js:12:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 10 │ let x = "\\\\\9"
+ 11 │ let x = "foo\\\8"
+ > 12 │ let x = "\\ \8"
+ │ ^^
+ 13 │ let x = "\\1\9"
+ 14 │ let x = "foo\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 12 │ let·x·=·"\\·\8"
+ │ -
+
+```
+
+```
+invalid.js:12:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 10 │ let x = "\\\\\9"
+ 11 │ let x = "foo\\\8"
+ > 12 │ let x = "\\ \8"
+ │ ^^
+ 13 │ let x = "\\1\9"
+ 14 │ let x = "foo\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 12 │ let·x·=·"\\·\\8"
+ │ +
+
+```
+
+```
+invalid.js:13:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 11 │ let x = "foo\\\8"
+ 12 │ let x = "\\ \8"
+ > 13 │ let x = "\\1\9"
+ │ ^^
+ 14 │ let x = "foo\\1\9"
+ 15 │ let x = "\\n\\n\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 11 11 │ let x = "foo\\\8"
+ 12 12 │ let x = "\\ \8"
+ 13 │ - let·x·=·"\\1\9"
+ 13 │ + let·x·=·"\\19"
+ 14 14 │ let x = "foo\\1\9"
+ 15 15 │ let x = "\\n\\n\8\\n"
+
+
+```
+
+```
+invalid.js:13:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 11 │ let x = "foo\\\8"
+ 12 │ let x = "\\ \8"
+ > 13 │ let x = "\\1\9"
+ │ ^^
+ 14 │ let x = "foo\\1\9"
+ 15 │ let x = "\\n\\n\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 13 │ let·x·=·"\\1\\9"
+ │ +
+
+```
+
+```
+invalid.js:14:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 12 │ let x = "\\ \8"
+ 13 │ let x = "\\1\9"
+ > 14 │ let x = "foo\\1\9"
+ │ ^^
+ 15 │ let x = "\\n\\n\8\\n"
+ 16 │ let x = "\\n.\\n\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 12 12 │ let x = "\\ \8"
+ 13 13 │ let x = "\\1\9"
+ 14 │ - let·x·=·"foo\\1\9"
+ 14 │ + let·x·=·"foo\\19"
+ 15 15 │ let x = "\\n\\n\8\\n"
+ 16 16 │ let x = "\\n.\\n\8\\n"
+
+
+```
+
+```
+invalid.js:14:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 12 │ let x = "\\ \8"
+ 13 │ let x = "\\1\9"
+ > 14 │ let x = "foo\\1\9"
+ │ ^^
+ 15 │ let x = "\\n\\n\8\\n"
+ 16 │ let x = "\\n.\\n\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 14 │ let·x·=·"foo\\1\\9"
+ │ +
+
+```
+
+```
+invalid.js:15:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 13 │ let x = "\\1\9"
+ 14 │ let x = "foo\\1\9"
+ > 15 │ let x = "\\n\\n\8\\n"
+ │ ^^
+ 16 │ let x = "\\n.\\n\8\\n"
+ 17 │ let x = "\\n.\\nn\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 13 13 │ let x = "\\1\9"
+ 14 14 │ let x = "foo\\1\9"
+ 15 │ - let·x·=·"\\n\\n\8\\n"
+ 15 │ + let·x·=·"\\n\\n8\\n"
+ 16 16 │ let x = "\\n.\\n\8\\n"
+ 17 17 │ let x = "\\n.\\nn\8\\n"
+
+
+```
+
+```
+invalid.js:15:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 13 │ let x = "\\1\9"
+ 14 │ let x = "foo\\1\9"
+ > 15 │ let x = "\\n\\n\8\\n"
+ │ ^^
+ 16 │ let x = "\\n.\\n\8\\n"
+ 17 │ let x = "\\n.\\nn\8\\n"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 15 │ let·x·=·"\\n\\n\\8\\n"
+ │ +
+
+```
+
+```
+invalid.js:16:17 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 14 │ let x = "foo\\1\9"
+ 15 │ let x = "\\n\\n\8\\n"
+ > 16 │ let x = "\\n.\\n\8\\n"
+ │ ^^
+ 17 │ let x = "\\n.\\nn\8\\n"
+ 18 │ let x = "\\👍\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 14 14 │ let x = "foo\\1\9"
+ 15 15 │ let x = "\\n\\n\8\\n"
+ 16 │ - let·x·=·"\\n.\\n\8\\n"
+ 16 │ + let·x·=·"\\n.\\n8\\n"
+ 17 17 │ let x = "\\n.\\nn\8\\n"
+ 18 18 │ let x = "\\👍\8"
+
+
+```
+
+```
+invalid.js:16:17 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 14 │ let x = "foo\\1\9"
+ 15 │ let x = "\\n\\n\8\\n"
+ > 16 │ let x = "\\n.\\n\8\\n"
+ │ ^^
+ 17 │ let x = "\\n.\\nn\8\\n"
+ 18 │ let x = "\\👍\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 16 │ let·x·=·"\\n.\\n\\8\\n"
+ │ +
+
+```
+
+```
+invalid.js:17:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 15 │ let x = "\\n\\n\8\\n"
+ 16 │ let x = "\\n.\\n\8\\n"
+ > 17 │ let x = "\\n.\\nn\8\\n"
+ │ ^^
+ 18 │ let x = "\\👍\8"
+ 19 │ let x = "\\\8\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 15 15 │ let x = "\\n\\n\8\\n"
+ 16 16 │ let x = "\\n.\\n\8\\n"
+ 17 │ - let·x·=·"\\n.\\nn\8\\n"
+ 17 │ + let·x·=·"\\n.\\nn8\\n"
+ 18 18 │ let x = "\\👍\8"
+ 19 19 │ let x = "\\\8\9"
+
+
+```
+
+```
+invalid.js:17:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 15 │ let x = "\\n\\n\8\\n"
+ 16 │ let x = "\\n.\\n\8\\n"
+ > 17 │ let x = "\\n.\\nn\8\\n"
+ │ ^^
+ 18 │ let x = "\\👍\8"
+ 19 │ let x = "\\\8\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 17 │ let·x·=·"\\n.\\nn\\8\\n"
+ │ +
+
+```
+
+```
+invalid.js:18:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 16 │ let x = "\\n.\\n\8\\n"
+ 17 │ let x = "\\n.\\nn\8\\n"
+ > 18 │ let x = "\\👍\8"
+ │ ^^
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 18 │ let·x·=·"\\👍\8"
+ │ -
+
+```
+
+```
+invalid.js:18:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 16 │ let x = "\\n.\\n\8\\n"
+ 17 │ let x = "\\n.\\nn\8\\n"
+ > 18 │ let x = "\\👍\8"
+ │ ^^
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 18 │ let·x·=·"\\👍\\8"
+ │ +
+
+```
+
+```
+invalid.js:19:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 17 │ let x = "\\n.\\nn\8\\n"
+ 18 │ let x = "\\👍\8"
+ > 19 │ let x = "\\\8\9"
+ │ ^^
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 19 │ let·x·=·"\\\8\9"
+ │ -
+
+```
+
+```
+invalid.js:19:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 17 │ let x = "\\n.\\nn\8\\n"
+ 18 │ let x = "\\👍\8"
+ > 19 │ let x = "\\\8\9"
+ │ ^^
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 19 │ let·x·=·"\\\\8\9"
+ │ +
+
+```
+
+```
+invalid.js:19:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 17 │ let x = "\\n.\\nn\8\\n"
+ 18 │ let x = "\\👍\8"
+ > 19 │ let x = "\\\8\9"
+ │ ^^
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 19 │ let·x·=·"\\\8\\9"
+ │ +
+
+```
+
+```
+invalid.js:20:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 18 │ let x = "\\👍\8"
+ 19 │ let x = "\\\8\9"
+ > 20 │ let x = "\8\\\9"
+ │ ^^
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 20 │ let·x·=·"\8\\\9"
+ │ -
+
+```
+
+```
+invalid.js:20:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 18 │ let x = "\\👍\8"
+ 19 │ let x = "\\\8\9"
+ > 20 │ let x = "\8\\\9"
+ │ ^^
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 20 │ let·x·=·"\\8\\\9"
+ │ +
+
+```
+
+```
+invalid.js:20:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 18 │ let x = "\\👍\8"
+ 19 │ let x = "\\\8\9"
+ > 20 │ let x = "\8\\\9"
+ │ ^^
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 20 │ let·x·=·"\8\\\\9"
+ │ +
+
+```
+
+```
+invalid.js:21:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+ > 21 │ let x = "\8 \\\9"
+ │ ^^
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 21 │ let·x·=·"\8·\\\9"
+ │ -
+
+```
+
+```
+invalid.js:21:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+ > 21 │ let x = "\8 \\\9"
+ │ ^^
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 21 │ let·x·=·"\\8·\\\9"
+ │ +
+
+```
+
+```
+invalid.js:21:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+ > 21 │ let x = "\8 \\\9"
+ │ ^^
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 21 │ let·x·=·"\8·\\\\9"
+ │ +
+
+```
+
+```
+invalid.js:21:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 19 │ let x = "\\\8\9"
+ 20 │ let x = "\8\\\9"
+ > 21 │ let x = "\8 \\\9"
+ │ ^^
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 21 │ let·x·=·"\8·\\\9"
+ │ -
+
+```
+
+```
+invalid.js:22:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+ > 22 │ let x = "\8\8"
+ │ ^^
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 22 │ let·x·=·"\8\8"
+ │ -
+
+```
+
+```
+invalid.js:22:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+ > 22 │ let x = "\8\8"
+ │ ^^
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 22 │ let·x·=·"\\8\8"
+ │ +
+
+```
+
+```
+invalid.js:22:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 20 │ let x = "\8\\\9"
+ 21 │ let x = "\8 \\\9"
+ > 22 │ let x = "\8\8"
+ │ ^^
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 22 │ let·x·=·"\8\\8"
+ │ +
+
+```
+
+```
+invalid.js:23:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+ > 23 │ let x = "\9\8"
+ │ ^^
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 23 │ let·x·=·"\9\8"
+ │ -
+
+```
+
+```
+invalid.js:23:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+ > 23 │ let x = "\9\8"
+ │ ^^
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 23 │ let·x·=·"\\9\8"
+ │ +
+
+```
+
+```
+invalid.js:23:12 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 21 │ let x = "\8 \\\9"
+ 22 │ let x = "\8\8"
+ > 23 │ let x = "\9\8"
+ │ ^^
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 23 │ let·x·=·"\9\\8"
+ │ +
+
+```
+
+```
+invalid.js:24:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+ > 24 │ let x = "foo\8bar\9baz"
+ │ ^^
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 22 22 │ let x = "\8\8"
+ 23 23 │ let x = "\9\8"
+ 24 │ - let·x·=·"foo\8bar\9baz"
+ 24 │ + let·x·=·"foo8bar\9baz"
+ 25 25 │ let x = "\8\\1\9"
+ 26 26 │ let x = "\9\\n9\\\9\9"
+
+
+```
+
+```
+invalid.js:24:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+ > 24 │ let x = "foo\8bar\9baz"
+ │ ^^
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 24 │ let·x·=·"foo\\8bar\9baz"
+ │ +
+
+```
+
+```
+invalid.js:24:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+ > 24 │ let x = "foo\8bar\9baz"
+ │ ^^
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 24 │ let·x·=·"foo\8bar\\9baz"
+ │ +
+
+```
+
+```
+invalid.js:24:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 22 │ let x = "\8\8"
+ 23 │ let x = "\9\8"
+ > 24 │ let x = "foo\8bar\9baz"
+ │ ^^
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 22 22 │ let x = "\8\8"
+ 23 23 │ let x = "\9\8"
+ 24 │ - let·x·=·"foo\8bar\9baz"
+ 24 │ + let·x·=·"foo\8bar9baz"
+ 25 25 │ let x = "\8\\1\9"
+ 26 26 │ let x = "\9\\n9\\\9\9"
+
+
+```
+
+```
+invalid.js:25:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+ > 25 │ let x = "\8\\1\9"
+ │ ^^
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 25 │ let·x·=·"\8\\1\9"
+ │ -
+
+```
+
+```
+invalid.js:25:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+ > 25 │ let x = "\8\\1\9"
+ │ ^^
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 25 │ let·x·=·"\\8\\1\9"
+ │ +
+
+```
+
+```
+invalid.js:25:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+ > 25 │ let x = "\8\\1\9"
+ │ ^^
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 25 │ let·x·=·"\8\\1\\9"
+ │ +
+
+```
+
+```
+invalid.js:25:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 23 │ let x = "\9\8"
+ 24 │ let x = "foo\8bar\9baz"
+ > 25 │ let x = "\8\\1\9"
+ │ ^^
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 23 23 │ let x = "\9\8"
+ 24 24 │ let x = "foo\8bar\9baz"
+ 25 │ - let·x·=·"\8\\1\9"
+ 25 │ + let·x·=·"\8\\19"
+ 26 26 │ let x = "\9\\n9\\\9\9"
+ 27 27 │ let x = "\8\\\\\9"
+
+
+```
+
+```
+invalid.js:26:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+ > 26 │ let x = "\9\\n9\\\9\9"
+ │ ^^
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 26 │ let·x·=·"\9\\n9\\\9\9"
+ │ -
+
+```
+
+```
+invalid.js:26:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+ > 26 │ let x = "\9\\n9\\\9\9"
+ │ ^^
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 26 │ let·x·=·"\\9\\n9\\\9\9"
+ │ +
+
+```
+
+```
+invalid.js:26:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+ > 26 │ let x = "\9\\n9\\\9\9"
+ │ ^^
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 26 │ let·x·=·"\9\\n9\\\9\9"
+ │ -
+
+```
+
+```
+invalid.js:26:18 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+ > 26 │ let x = "\9\\n9\\\9\9"
+ │ ^^
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 26 │ let·x·=·"\9\\n9\\\\9\9"
+ │ +
+
+```
+
+```
+invalid.js:26:20 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 24 │ let x = "foo\8bar\9baz"
+ 25 │ let x = "\8\\1\9"
+ > 26 │ let x = "\9\\n9\\\9\9"
+ │ ^^
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 26 │ let·x·=·"\9\\n9\\\9\\9"
+ │ +
+
+```
+
+```
+invalid.js:27:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+ > 27 │ let x = "\8\\\\\9"
+ │ ^^
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 27 │ let·x·=·"\8\\\\\9"
+ │ -
+
+```
+
+```
+invalid.js:27:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+ > 27 │ let x = "\8\\\\\9"
+ │ ^^
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 27 │ let·x·=·"\\8\\\\\9"
+ │ +
+
+```
+
+```
+invalid.js:27:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 25 │ let x = "\8\\1\9"
+ 26 │ let x = "\9\\n9\\\9\9"
+ > 27 │ let x = "\8\\\\\9"
+ │ ^^
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 27 │ let·x·=·"\8\\\\\\9"
+ │ +
+
+```
+
+```
+invalid.js:28:21 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+ > 28 │ let x = "var foo = '\8'; bar('\9')"
+ │ ^^
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 28 │ let·x·=·"var·foo·=·'\8';·bar('\9')"
+ │ -
+
+```
+
+```
+invalid.js:28:21 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+ > 28 │ let x = "var foo = '\8'; bar('\9')"
+ │ ^^
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 28 │ let·x·=·"var·foo·=·'\\8';·bar('\9')"
+ │ +
+
+```
+
+```
+invalid.js:28:31 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+ > 28 │ let x = "var foo = '\8'; bar('\9')"
+ │ ^^
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 28 │ let·x·=·"var·foo·=·'\8';·bar('\\9')"
+ │ +
+
+```
+
+```
+invalid.js:28:31 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 26 │ let x = "\9\\n9\\\9\9"
+ 27 │ let x = "\8\\\\\9"
+ > 28 │ let x = "var foo = '\8'; bar('\9')"
+ │ ^^
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 28 │ let·x·=·"var·foo·=·'\8';·bar('\9')"
+ │ -
+
+```
+
+```
+invalid.js:29:34 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ > 29 │ let x = "var foo = '8'\n bar = '\9'"
+ │ ^^
+ 30 │ let x = "\\\n\8"
+ 31 │ let x = "\\\r\n\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 29 │ let·x·=·"var·foo·=·'8'\n··bar·=·'\9'"
+ │ -
+
+```
+
+```
+invalid.js:29:34 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 27 │ let x = "\8\\\\\9"
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ > 29 │ let x = "var foo = '8'\n bar = '\9'"
+ │ ^^
+ 30 │ let x = "\\\n\8"
+ 31 │ let x = "\\\r\n\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 29 │ let·x·=·"var·foo·=·'8'\n··bar·=·'\\9'"
+ │ +
+
+```
+
+```
+invalid.js:30:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ > 30 │ let x = "\\\n\8"
+ │ ^^
+ 31 │ let x = "\\\r\n\9"
+ 32 │ let x = "\\\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 28 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ - let·x·=·"\\\n\8"
+ 30 │ + let·x·=·"\\\n8"
+ 31 31 │ let x = "\\\r\n\9"
+ 32 32 │ let x = "\\\\\n\8"
+
+
+```
+
+```
+invalid.js:30:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 28 │ let x = "var foo = '\8'; bar('\9')"
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ > 30 │ let x = "\\\n\8"
+ │ ^^
+ 31 │ let x = "\\\r\n\9"
+ 32 │ let x = "\\\\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 30 │ let·x·=·"\\\n\\8"
+ │ +
+
+```
+
+```
+invalid.js:31:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+ > 31 │ let x = "\\\r\n\9"
+ │ ^^
+ 32 │ let x = "\\\\\n\8"
+ 33 │ let x = "foo\\\nbar\9baz"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 29 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 30 │ let x = "\\\n\8"
+ 31 │ - let·x·=·"\\\r\n\9"
+ 31 │ + let·x·=·"\\\r\n9"
+ 32 32 │ let x = "\\\\\n\8"
+ 33 33 │ let x = "foo\\\nbar\9baz"
+
+
+```
+
+```
+invalid.js:31:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 29 │ let x = "var foo = '8'\n bar = '\9'"
+ 30 │ let x = "\\\n\8"
+ > 31 │ let x = "\\\r\n\9"
+ │ ^^
+ 32 │ let x = "\\\\\n\8"
+ 33 │ let x = "foo\\\nbar\9baz"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 31 │ let·x·=·"\\\r\n\\9"
+ │ +
+
+```
+
+```
+invalid.js:32:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 30 │ let x = "\\\n\8"
+ 31 │ let x = "\\\r\n\9"
+ > 32 │ let x = "\\\\\n\8"
+ │ ^^
+ 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 30 30 │ let x = "\\\n\8"
+ 31 31 │ let x = "\\\r\n\9"
+ 32 │ - let·x·=·"\\\\\n\8"
+ 32 │ + let·x·=·"\\\\\n8"
+ 33 33 │ let x = "foo\\\nbar\9baz"
+ 34 34 │ let x = "\\0\8"
+
+
+```
+
+```
+invalid.js:32:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 30 │ let x = "\\\n\8"
+ 31 │ let x = "\\\r\n\9"
+ > 32 │ let x = "\\\\\n\8"
+ │ ^^
+ 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 32 │ let·x·=·"\\\\\n\\8"
+ │ +
+
+```
+
+```
+invalid.js:33:20 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 31 │ let x = "\\\r\n\9"
+ 32 │ let x = "\\\\\n\8"
+ > 33 │ let x = "foo\\\nbar\9baz"
+ │ ^^
+ 34 │ let x = "\\0\8"
+ 35 │ let x = "foo\\0\9bar"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with 9. This maintains the current functionality.
+
+ 31 31 │ let x = "\\\r\n\9"
+ 32 32 │ let x = "\\\\\n\8"
+ 33 │ - let·x·=·"foo\\\nbar\9baz"
+ 33 │ + let·x·=·"foo\\\nbar9baz"
+ 34 34 │ let x = "\\0\8"
+ 35 35 │ let x = "foo\\0\9bar"
+
+
+```
+
+```
+invalid.js:33:20 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 31 │ let x = "\\\r\n\9"
+ 32 │ let x = "\\\\\n\8"
+ > 33 │ let x = "foo\\\nbar\9baz"
+ │ ^^
+ 34 │ let x = "\\0\8"
+ 35 │ let x = "foo\\0\9bar"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 33 │ let·x·=·"foo\\\nbar\\9baz"
+ │ +
+
+```
+
+```
+invalid.js:34:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 32 │ let x = "\\\\\n\8"
+ 33 │ let x = "foo\\\nbar\9baz"
+ > 34 │ let x = "\\0\8"
+ │ ^^^^
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\8 with \u00008. This maintains the current functionality.
+
+ 32 32 │ let x = "\\\\\n\8"
+ 33 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ - let·x·=·"\\0\8"
+ 34 │ + let·x·=·"\\u00008"
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 36 │ let x = "\\1\\0\8"
+
+
+```
+
+```
+invalid.js:34:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 32 │ let x = "\\\\\n\8"
+ 33 │ let x = "foo\\\nbar\9baz"
+ > 34 │ let x = "\\0\8"
+ │ ^^
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \u0038. This maintains the current functionality.
+
+ 32 32 │ let x = "\\\\\n\8"
+ 33 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ - let·x·=·"\\0\8"
+ 34 │ + let·x·=·"\\0\u0038"
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 36 │ let x = "\\1\\0\8"
+
+
+```
+
+```
+invalid.js:34:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 32 │ let x = "\\\\\n\8"
+ 33 │ let x = "foo\\\nbar\9baz"
+ > 34 │ let x = "\\0\8"
+ │ ^^
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 34 │ let·x·=·"\\0\\8"
+ │ +
+
+```
+
+```
+invalid.js:35:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ let x = "\\0\8"
+ > 35 │ let x = "foo\\0\9bar"
+ │ ^^^^
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\9 with \u00009. This maintains the current functionality.
+
+ 33 33 │ let x = "foo\\\nbar\9baz"
+ 34 34 │ let x = "\\0\8"
+ 35 │ - let·x·=·"foo\\0\9bar"
+ 35 │ + let·x·=·"foo\\u00009bar"
+ 36 36 │ let x = "\\1\\0\8"
+ 37 37 │ let x = "\\0\8\9"
+
+
+```
+
+```
+invalid.js:35:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ let x = "\\0\8"
+ > 35 │ let x = "foo\\0\9bar"
+ │ ^^
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \u0039. This maintains the current functionality.
+
+ 33 33 │ let x = "foo\\\nbar\9baz"
+ 34 34 │ let x = "\\0\8"
+ 35 │ - let·x·=·"foo\\0\9bar"
+ 35 │ + let·x·=·"foo\\0\u0039bar"
+ 36 36 │ let x = "\\1\\0\8"
+ 37 37 │ let x = "\\0\8\9"
+
+
+```
+
+```
+invalid.js:35:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 33 │ let x = "foo\\\nbar\9baz"
+ 34 │ let x = "\\0\8"
+ > 35 │ let x = "foo\\0\9bar"
+ │ ^^
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 35 │ let·x·=·"foo\\0\\9bar"
+ │ +
+
+```
+
+```
+invalid.js:36:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 34 │ let x = "\\0\8"
+ 35 │ let x = "foo\\0\9bar"
+ > 36 │ let x = "\\1\\0\8"
+ │ ^^^^
+ 37 │ let x = "\\0\8\9"
+ 38 │ let x = "\8\\0\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\8 with \u00008. This maintains the current functionality.
+
+ 34 34 │ let x = "\\0\8"
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 │ - let·x·=·"\\1\\0\8"
+ 36 │ + let·x·=·"\\1\\u00008"
+ 37 37 │ let x = "\\0\8\9"
+ 38 38 │ let x = "\8\\0\9"
+
+
+```
+
+```
+invalid.js:36:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 34 │ let x = "\\0\8"
+ 35 │ let x = "foo\\0\9bar"
+ > 36 │ let x = "\\1\\0\8"
+ │ ^^
+ 37 │ let x = "\\0\8\9"
+ 38 │ let x = "\8\\0\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \u0038. This maintains the current functionality.
+
+ 34 34 │ let x = "\\0\8"
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 │ - let·x·=·"\\1\\0\8"
+ 36 │ + let·x·=·"\\1\\0\u0038"
+ 37 37 │ let x = "\\0\8\9"
+ 38 38 │ let x = "\8\\0\9"
+
+
+```
+
+```
+invalid.js:36:16 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 34 │ let x = "\\0\8"
+ 35 │ let x = "foo\\0\9bar"
+ > 36 │ let x = "\\1\\0\8"
+ │ ^^
+ 37 │ let x = "\\0\8\9"
+ 38 │ let x = "\8\\0\9"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 36 │ let·x·=·"\\1\\0\\8"
+ │ +
+
+```
+
+```
+invalid.js:37:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+ > 37 │ let x = "\\0\8\9"
+ │ ^^^^
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\8 with \u00008. This maintains the current functionality.
+
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 36 │ let x = "\\1\\0\8"
+ 37 │ - let·x·=·"\\0\8\9"
+ 37 │ + let·x·=·"\\u00008\9"
+ 38 38 │ let x = "\8\\0\9"
+ 39 39 │ let x = "0\8"
+
+
+```
+
+```
+invalid.js:37:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+ > 37 │ let x = "\\0\8\9"
+ │ ^^
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 37 │ let·x·=·"\\0\\8\9"
+ │ +
+
+```
+
+```
+invalid.js:37:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+ > 37 │ let x = "\\0\8\9"
+ │ ^^
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \u0038. This maintains the current functionality.
+
+ 35 35 │ let x = "foo\\0\9bar"
+ 36 36 │ let x = "\\1\\0\8"
+ 37 │ - let·x·=·"\\0\8\9"
+ 37 │ + let·x·=·"\\0\u0038\9"
+ 38 38 │ let x = "\8\\0\9"
+ 39 39 │ let x = "0\8"
+
+
+```
+
+```
+invalid.js:37:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 35 │ let x = "foo\\0\9bar"
+ 36 │ let x = "\\1\\0\8"
+ > 37 │ let x = "\\0\8\9"
+ │ ^^
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 37 │ let·x·=·"\\0\8\\9"
+ │ +
+
+```
+
+```
+invalid.js:38:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+ > 38 │ let x = "\8\\0\9"
+ │ ^^
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 38 │ let·x·=·"\8\\0\9"
+ │ -
+
+```
+
+```
+invalid.js:38:10 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+ > 38 │ let x = "\8\\0\9"
+ │ ^^
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 38 │ let·x·=·"\\8\\0\9"
+ │ +
+
+```
+
+```
+invalid.js:38:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+ > 38 │ let x = "\8\\0\9"
+ │ ^^^^
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\9 with \u00009. This maintains the current functionality.
+
+ 36 36 │ let x = "\\1\\0\8"
+ 37 37 │ let x = "\\0\8\9"
+ 38 │ - let·x·=·"\8\\0\9"
+ 38 │ + let·x·=·"\8\\u00009"
+ 39 39 │ let x = "0\8"
+ 40 40 │ let x = "\\0\8"
+
+
+```
+
+```
+invalid.js:38:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+ > 38 │ let x = "\8\\0\9"
+ │ ^^
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \u0039. This maintains the current functionality.
+
+ 36 36 │ let x = "\\1\\0\8"
+ 37 37 │ let x = "\\0\8\9"
+ 38 │ - let·x·=·"\8\\0\9"
+ 38 │ + let·x·=·"\8\\0\u0039"
+ 39 39 │ let x = "0\8"
+ 40 40 │ let x = "\\0\8"
+
+
+```
+
+```
+invalid.js:38:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 36 │ let x = "\\1\\0\8"
+ 37 │ let x = "\\0\8\9"
+ > 38 │ let x = "\8\\0\9"
+ │ ^^
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \9 with \\9 to include the actual backslash character.
+
+ 38 │ let·x·=·"\8\\0\\9"
+ │ +
+
+```
+
+```
+invalid.js:39:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 37 │ let x = "\\0\8\9"
+ 38 │ let x = "\8\\0\9"
+ > 39 │ let x = "0\8"
+ │ ^^
+ 40 │ let x = "\\0\8"
+ 41 │ let x = "\0 \8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 37 37 │ let x = "\\0\8\9"
+ 38 38 │ let x = "\8\\0\9"
+ 39 │ - let·x·=·"0\8"
+ 39 │ + let·x·=·"08"
+ 40 40 │ let x = "\\0\8"
+ 41 41 │ let x = "\0 \8"
+
+
+```
+
+```
+invalid.js:39:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 37 │ let x = "\\0\8\9"
+ 38 │ let x = "\8\\0\9"
+ > 39 │ let x = "0\8"
+ │ ^^
+ 40 │ let x = "\\0\8"
+ 41 │ let x = "\0 \8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 39 │ let·x·=·"0\\8"
+ │ +
+
+```
+
+```
+invalid.js:40:11 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+ > 40 │ let x = "\\0\8"
+ │ ^^^^
+ 41 │ let x = "\0 \8"
+ 42 │ let x = "\01\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \0\8 with \u00008. This maintains the current functionality.
+
+ 38 38 │ let x = "\8\\0\9"
+ 39 39 │ let x = "0\8"
+ 40 │ - let·x·=·"\\0\8"
+ 40 │ + let·x·=·"\\u00008"
+ 41 41 │ let x = "\0 \8"
+ 42 42 │ let x = "\01\8"
+
+
+```
+
+```
+invalid.js:40:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+ > 40 │ let x = "\\0\8"
+ │ ^^
+ 41 │ let x = "\0 \8"
+ 42 │ let x = "\01\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \u0038. This maintains the current functionality.
+
+ 38 38 │ let x = "\8\\0\9"
+ 39 39 │ let x = "0\8"
+ 40 │ - let·x·=·"\\0\8"
+ 40 │ + let·x·=·"\\0\u0038"
+ 41 41 │ let x = "\0 \8"
+ 42 42 │ let x = "\01\8"
+
+
+```
+
+```
+invalid.js:40:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 38 │ let x = "\8\\0\9"
+ 39 │ let x = "0\8"
+ > 40 │ let x = "\\0\8"
+ │ ^^
+ 41 │ let x = "\0 \8"
+ 42 │ let x = "\01\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 40 │ let·x·=·"\\0\\8"
+ │ +
+
+```
+
+```
+invalid.js:41:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+ > 41 │ let x = "\0 \8"
+ │ ^^
+ 42 │ let x = "\01\8"
+ 43 │ let x = "\0\1\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 41 │ let·x·=·"\0·\8"
+ │ -
+
+```
+
+```
+invalid.js:41:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 39 │ let x = "0\8"
+ 40 │ let x = "\\0\8"
+ > 41 │ let x = "\0 \8"
+ │ ^^
+ 42 │ let x = "\01\8"
+ 43 │ let x = "\0\1\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 41 │ let·x·=·"\0·\\8"
+ │ +
+
+```
+
+```
+invalid.js:42:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 40 │ let x = "\\0\8"
+ 41 │ let x = "\0 \8"
+ > 42 │ let x = "\01\8"
+ │ ^^
+ 43 │ let x = "\0\1\8"
+ 44 │ let x = "\0\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 40 40 │ let x = "\\0\8"
+ 41 41 │ let x = "\0 \8"
+ 42 │ - let·x·=·"\01\8"
+ 42 │ + let·x·=·"\018"
+ 43 43 │ let x = "\0\1\8"
+ 44 44 │ let x = "\0\\n\8"
+
+
+```
+
+```
+invalid.js:42:13 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 40 │ let x = "\\0\8"
+ 41 │ let x = "\0 \8"
+ > 42 │ let x = "\01\8"
+ │ ^^
+ 43 │ let x = "\0\1\8"
+ 44 │ let x = "\0\\n\8"
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 42 │ let·x·=·"\01\\8"
+ │ +
+
+```
+
+```
+invalid.js:43:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 41 │ let x = "\0 \8"
+ 42 │ let x = "\01\8"
+ > 43 │ let x = "\0\1\8"
+ │ ^^
+ 44 │ let x = "\0\\n\8"
+ 45 │
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 41 41 │ let x = "\0 \8"
+ 42 42 │ let x = "\01\8"
+ 43 │ - let·x·=·"\0\1\8"
+ 43 │ + let·x·=·"\0\18"
+ 44 44 │ let x = "\0\\n\8"
+ 45 45 │
+
+
+```
+
+```
+invalid.js:43:14 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 41 │ let x = "\0 \8"
+ 42 │ let x = "\01\8"
+ > 43 │ let x = "\0\1\8"
+ │ ^^
+ 44 │ let x = "\0\\n\8"
+ 45 │
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 43 │ let·x·=·"\0\1\\8"
+ │ +
+
+```
+
+```
+invalid.js:44:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 42 │ let x = "\01\8"
+ 43 │ let x = "\0\1\8"
+ > 44 │ let x = "\0\\n\8"
+ │ ^^
+ 45 │
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with 8. This maintains the current functionality.
+
+ 42 42 │ let x = "\01\8"
+ 43 43 │ let x = "\0\1\8"
+ 44 │ - let·x·=·"\0\\n\8"
+ 44 │ + let·x·=·"\0\\n8"
+ 45 45 │
+
+
+```
+
+```
+invalid.js:44:15 lint/nursery/noNonoctalDecimalEscape FIXABLE ━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━━
+
+ ! Don't use `\8` and `\9` escape sequences in string literals.
+
+ 42 │ let x = "\01\8"
+ 43 │ let x = "\0\1\8"
+ > 44 │ let x = "\0\\n\8"
+ │ ^^
+ 45 │
+
+ i The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.
+
+ i Suggested fix: Replace \8 with \\8 to include the actual backslash character.
+
+ 44 │ let·x·=·"\0\\n\\8"
+ │ +
+
+```
+
+
diff --git /dev/null b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/valid.js
new file mode 100644
--- /dev/null
+++ b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/valid.js
@@ -0,0 +1,41 @@
+// /* should not generate diagnostics */
+let x = "8"
+let x = "var \u8888"
+let x = /\8/
+let x = ""
+let x = "foo"
+let x = "8"
+let x = "9"
+let x = "foo8"
+let x = "foo9bar"
+let x = "\ "
+let x = "\\"
+let x = "\a"
+let x = "\n"
+let x = "\0"
+let x = "\1"
+let x = "\7"
+let x = "\01"
+let x = "\08"
+let x = "\19"
+let x = "\t9"
+let x = "\👍8"
+let x = "\\8"
+let x = "\\9"
+let x = "\\8\\9"
+let x = "\\ \\8"
+let x = "\\\\9"
+let x = "\\9bar"
+let x = "a\\8"
+let x = "foo\\8"
+let x = "foo\\8bar"
+let x = "9\\9"
+let x = "n\n8"
+let x = "n\nn\n8"
+let x = "\1.8"
+let x = "\1\28"
+let x = "\x99"
+let x = "\\\x38"
+let x = "\u99999"
+let x = "\n8"
+let x = "\n\\9"
diff --git /dev/null b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/valid.js.snap
new file mode 100644
--- /dev/null
+++ b/crates/rome_js_analyze/tests/specs/nursery/noNonoctalDecimalEscape/valid.js.snap
@@ -0,0 +1,51 @@
+---
+source: crates/rome_js_analyze/tests/spec_tests.rs
+expression: valid.js
+---
+# Input
+```js
+// /* should not generate diagnostics */
+let x = "8"
+let x = "var \u8888"
+let x = /\8/
+let x = ""
+let x = "foo"
+let x = "8"
+let x = "9"
+let x = "foo8"
+let x = "foo9bar"
+let x = "\ "
+let x = "\\"
+let x = "\a"
+let x = "\n"
+let x = "\0"
+let x = "\1"
+let x = "\7"
+let x = "\01"
+let x = "\08"
+let x = "\19"
+let x = "\t9"
+let x = "\👍8"
+let x = "\\8"
+let x = "\\9"
+let x = "\\8\\9"
+let x = "\\ \\8"
+let x = "\\\\9"
+let x = "\\9bar"
+let x = "a\\8"
+let x = "foo\\8"
+let x = "foo\\8bar"
+let x = "9\\9"
+let x = "n\n8"
+let x = "n\nn\n8"
+let x = "\1.8"
+let x = "\1\28"
+let x = "\x99"
+let x = "\\\x38"
+let x = "\u99999"
+let x = "\n8"
+let x = "\n\\9"
+
+```
+
+
diff --git /dev/null b/website/src/pages/lint/rules/noNonoctalDecimalEscape.md
new file mode 100644
--- /dev/null
+++ b/website/src/pages/lint/rules/noNonoctalDecimalEscape.md
@@ -0,0 +1,170 @@
+---
+title: Lint Rule noNonoctalDecimalEscape
+parent: lint/rules/index
+---
+
+# noNonoctalDecimalEscape (since vnext)
+
+Disallow `\8` and `\9` escape sequences in string literals.
+
+Since ECMAScript 2021, the escape sequences \8 and \9 have been defined as non-octal decimal escape sequences.
+However, most JavaScript engines consider them to be "useless" escapes. For example:
+
+```jsx
+"\8" === "8"; // true
+"\9" === "9"; // true
+```
+
+nursery/noNonoctalDecimalEscape.js:1:2 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>"\8" === "8"; // true
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>"\9" === "9"; // true
+ <strong>3 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>8</strong></span><span style="color: rgb(38, 148, 255);">. This maintains the current functionality.</span>
+
+ <strong>1</strong> <strong> │ </strong><span style="color: Tomato;">-</span> <span style="color: Tomato;">"</span><span style="color: Tomato;"><strong>\</strong></span><span style="color: Tomato;">8</span><span style="color: Tomato;">"</span><span style="color: Tomato;"><span style="opacity: 0.8;"><strong>·</strong></span></span><span style="color: Tomato;">=</span><span style="color: Tomato;">=</span><span style="color: Tomato;">=</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">"</span><span style="color: Tomato;">8</span><span style="color: Tomato;">"</span><span style="color: Tomato;">;</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">/</span><span style="color: Tomato;">/</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">t</span><span style="color: Tomato;">r</span><span style="color: Tomato;">u</span><span style="color: Tomato;">e</span>
+ <strong>1</strong><strong> │ </strong><span style="color: MediumSeaGreen;">+</span> <span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">8</span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;"><strong>·</strong></span></span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;"><strong>·</strong></span></span><span style="color: MediumSeaGreen;">=</span><span style="color: MediumSeaGreen;">=</span><span style="color: MediumSeaGreen;">=</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">8</span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">;</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">/</span><span style="color: MediumSeaGreen;">/</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">t</span><span style="color: MediumSeaGreen;">r</span><span style="color: MediumSeaGreen;">u</span><span style="color: MediumSeaGreen;">e</span>
+ <strong>2</strong> <strong>2</strong><strong> │ </strong> "\9" === "9"; // true
+ <strong>3</strong> <strong>3</strong><strong> │ </strong>
+
+Although this syntax is deprecated, it is still supported for compatibility reasons.
+If the ECMAScript host is not a web browser, this syntax is optional.
+However, web browsers are still required to support it, but only in non-strict mode.
+Regardless of your targeted environment, it is recommended to avoid using these escape sequences in new code.
+
+Source: https://eslint.org/docs/latest/rules/no-nonoctal-decimal-escape
+
+## Examples
+
+### Invalid
+
+```jsx
+const x = "\8";
+```
+
+<pre class="language-text"><code class="language-text">nursery/noNonoctalDecimalEscape.js:1:12 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "\8";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>8</strong></span><span style="color: rgb(38, 148, 255);">. This maintains the current functionality.</span>
+
+<strong> </strong><strong> 1 │ </strong>const<span style="opacity: 0.8;">·</span>x<span style="opacity: 0.8;">·</span>=<span style="opacity: 0.8;">·</span>"<span style="color: Tomato;">\</span>8";
+<strong> </strong><strong> │ </strong> <span style="color: Tomato;">-</span>
+nursery/noNonoctalDecimalEscape.js:1:12 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "\8";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>\\8</strong></span><span style="color: rgb(38, 148, 255);"> to include the actual backslash character.</span>
+
+<strong> </strong><strong> 1 │ </strong>const<span style="opacity: 0.8;">·</span>x<span style="opacity: 0.8;">·</span>=<span style="opacity: 0.8;">·</span>"\<span style="color: MediumSeaGreen;">\</span>8";
+<strong> </strong><strong> │ </strong> <span style="color: MediumSeaGreen;">+</span>
+</code></pre>
+
+```jsx
+const x = "Don't use \8 and \9 escapes.";
+```
+
+<pre class="language-text"><code class="language-text">nursery/noNonoctalDecimalEscape.js:1:22 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "Don't use \8 and \9 escapes.";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>8</strong></span><span style="color: rgb(38, 148, 255);">. This maintains the current functionality.</span>
+
+<strong> </strong><strong> 1 │ </strong>const<span style="opacity: 0.8;">·</span>x<span style="opacity: 0.8;">·</span>=<span style="opacity: 0.8;">·</span>"Don't<span style="opacity: 0.8;">·</span>use<span style="opacity: 0.8;">·</span><span style="color: Tomato;">\</span>8<span style="opacity: 0.8;">·</span>and<span style="opacity: 0.8;">·</span>\9<span style="opacity: 0.8;">·</span>escapes.";
+<strong> </strong><strong> │ </strong> <span style="color: Tomato;">-</span>
+nursery/noNonoctalDecimalEscape.js:1:22 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "Don't use \8 and \9 escapes.";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>\\8</strong></span><span style="color: rgb(38, 148, 255);"> to include the actual backslash character.</span>
+
+<strong> </strong><strong> 1 │ </strong>const<span style="opacity: 0.8;">·</span>x<span style="opacity: 0.8;">·</span>=<span style="opacity: 0.8;">·</span>"Don't<span style="opacity: 0.8;">·</span>use<span style="opacity: 0.8;">·</span>\<span style="color: MediumSeaGreen;">\</span>8<span style="opacity: 0.8;">·</span>and<span style="opacity: 0.8;">·</span>\9<span style="opacity: 0.8;">·</span>escapes.";
+<strong> </strong><strong> │ </strong> <span style="color: MediumSeaGreen;">+</span>
+</code></pre>
+
+```jsx
+const x = "\0\8";
+```
+
+<pre class="language-text"><code class="language-text">nursery/noNonoctalDecimalEscape.js:1:12 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "\0\8";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\0\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>\u00008</strong></span><span style="color: rgb(38, 148, 255);">. This maintains the current functionality.</span>
+
+ <strong>1</strong> <strong> │ </strong><span style="color: Tomato;">-</span> <span style="color: Tomato;">c</span><span style="color: Tomato;">o</span><span style="color: Tomato;">n</span><span style="color: Tomato;">s</span><span style="color: Tomato;">t</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">x</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">=</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">"</span><span style="color: Tomato;">\</span><span style="color: Tomato;"><strong>0</strong></span><span style="color: Tomato;"><strong>\</strong></span><span style="color: Tomato;"><strong>8</strong></span><span style="color: Tomato;">"</span><span style="color: Tomato;">;</span>
+ <strong>1</strong><strong> │ </strong><span style="color: MediumSeaGreen;">+</span> <span style="color: MediumSeaGreen;">c</span><span style="color: MediumSeaGreen;">o</span><span style="color: MediumSeaGreen;">n</span><span style="color: MediumSeaGreen;">s</span><span style="color: MediumSeaGreen;">t</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">x</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">=</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">\</span><span style="color: MediumSeaGreen;"><strong>u</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>8</strong></span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">;</span>
+ <strong>2</strong> <strong>2</strong><strong> │ </strong>
+
+nursery/noNonoctalDecimalEscape.js:1:14 <a href="https://docs.rome.tools/lint/rules/noNonoctalDecimalEscape">lint/nursery/noNonoctalDecimalEscape</a> <span style="color: #000; background-color: #ddd;"> FIXABLE </span> ━━━━━━━━━━━━━
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">✖</span></strong> <span style="color: Tomato;">Don't use </span><span style="color: Tomato;"><strong>`\8`</strong></span><span style="color: Tomato;"> and </span><span style="color: Tomato;"><strong>`\9`</strong></span><span style="color: Tomato;"> escape sequences in string literals.</span>
+
+<strong><span style="color: Tomato;"> </span></strong><strong><span style="color: Tomato;">></span></strong> <strong>1 │ </strong>const x = "\0\8";
+ <strong> │ </strong> <strong><span style="color: Tomato;">^</span></strong><strong><span style="color: Tomato;">^</span></strong>
+ <strong>2 │ </strong>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">The nonoctal decimal escape is a deprecated syntax that is left for compatibility and should not be used.</span>
+
+<strong><span style="color: rgb(38, 148, 255);"> </span></strong><strong><span style="color: rgb(38, 148, 255);">ℹ</span></strong> <span style="color: rgb(38, 148, 255);">Suggested fix</span><span style="color: rgb(38, 148, 255);">: </span><span style="color: rgb(38, 148, 255);">Replace </span><span style="color: rgb(38, 148, 255);"><strong>\8</strong></span><span style="color: rgb(38, 148, 255);"> with </span><span style="color: rgb(38, 148, 255);"><strong>\u0038</strong></span><span style="color: rgb(38, 148, 255);">. This maintains the current functionality.</span>
+
+ <strong>1</strong> <strong> │ </strong><span style="color: Tomato;">-</span> <span style="color: Tomato;">c</span><span style="color: Tomato;">o</span><span style="color: Tomato;">n</span><span style="color: Tomato;">s</span><span style="color: Tomato;">t</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">x</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">=</span><span style="color: Tomato;"><span style="opacity: 0.8;">·</span></span><span style="color: Tomato;">"</span><span style="color: Tomato;">\</span><span style="color: Tomato;">0</span><span style="color: Tomato;">\</span><span style="color: Tomato;"><strong>8</strong></span><span style="color: Tomato;">"</span><span style="color: Tomato;">;</span>
+ <strong>1</strong><strong> │ </strong><span style="color: MediumSeaGreen;">+</span> <span style="color: MediumSeaGreen;">c</span><span style="color: MediumSeaGreen;">o</span><span style="color: MediumSeaGreen;">n</span><span style="color: MediumSeaGreen;">s</span><span style="color: MediumSeaGreen;">t</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">x</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">=</span><span style="color: MediumSeaGreen;"><span style="opacity: 0.8;">·</span></span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">\</span><span style="color: MediumSeaGreen;">0</span><span style="color: MediumSeaGreen;">\</span><span style="color: MediumSeaGreen;"><strong>u</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>0</strong></span><span style="color: MediumSeaGreen;"><strong>3</strong></span><span style="color: MediumSeaGreen;"><strong>8</strong></span><span style="color: MediumSeaGreen;">"</span><span style="color: MediumSeaGreen;">;</span>
+ <strong>2</strong> <strong>2</strong><strong> │ </strong>
+
+</code></pre>
+
+## Valid
+
+```jsx
+const x = "8";
+```
+
+```jsx
+const x = "Don't use \\8 and \\9 escapes.";
+```
+
+```jsx
+const x = "\0\u0038";;
+```
+
+## Related links
+
+- [Disable a rule](/linter/#disable-a-lint-rule)
+- [Rule options](/linter/#rule-options)
| `noNonoctalDecimalEscape`, no-nonoctal-decimal-escape
https://eslint.org/docs/latest/rules/no-nonoctal-decimal-escape
| 👋 @rome/staff please triage this issue by adding one of the following labels: `S-Bug: confirmed`, `S-Planned` , `S-Wishlist` or `umbrella`
👋 @rome/staff please triage this issue by adding one of the following labels: `S-Bug: confirmed`, `S-Planned` , `S-Wishlist` or `umbrella`
I'll work on this. | 2023-06-26T01:26:59 | 0.1 | 08645a80c6139ab969aa1917f7e01dae3845d236 | [
"specs::nursery::no_nonoctal_decimal_escape::valid_js",
"specs::nursery::no_nonoctal_decimal_escape::invalid_js"
] | [
"aria_services::tests::test_extract_attributes",
"utils::batch::tests::ok_remove_formal_parameter_from_class_constructor_first",
"react::hooks::test::ok_react_stable_captures",
"utils::batch::tests::ok_remove_first_member",
"utils::batch::tests::ok_remove_formal_parameter_first",
"utils::batch::tests::ok_... | [] | [] |
tokio-rs/tracing | 2,794 | tokio-rs__tracing-2794 | [
"1934"
] | f622a1e83ebe23a41a5e85269b66bee9c24b09f8 | diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -3,7 +3,6 @@ name: CI
on:
push:
branches:
- - master
- "v0.1.x"
pull_request: {}
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -27,9 +26,6 @@ env:
RUSTUP_MAX_RETRIES: 10
# Don't emit giant backtraces in the CI logs.
RUST_BACKTRACE: short
- MSRV: 1.49.0
- # TODO: remove this once tracing's MSRV is bumped.
- APPENDER_MSRV: 1.53.0
jobs:
### check jobs ###
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -169,7 +121,7 @@ jobs:
shell: bash
check-msrv:
- # Run `cargo check` on our minimum supported Rust version (1.56.0). This
+ # Run `cargo check` on our minimum supported Rust version (1.63.0). This
# checks with minimal versions; maximal versions are checked above.
name: "cargo check (+MSRV -Zminimal-versions)"
needs: check
diff --git a/Cargo.toml b/Cargo.toml
--- a/Cargo.toml
+++ b/Cargo.toml
@@ -1,5 +1,5 @@
[workspace]
-
+resolver = "2"
members = [
"tracing",
"tracing-core",
diff --git a/examples/Cargo.toml b/examples/Cargo.toml
--- a/examples/Cargo.toml
+++ b/examples/Cargo.toml
@@ -3,7 +3,7 @@ name = "tracing-examples"
version = "0.0.0"
publish = false
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = []
diff --git a/tracing-appender/Cargo.toml b/tracing-appender/Cargo.toml
--- a/tracing-appender/Cargo.toml
+++ b/tracing-appender/Cargo.toml
@@ -18,7 +18,7 @@ categories = [
]
keywords = ["logging", "tracing", "file-appender", "non-blocking-writer"]
edition = "2018"
-rust-version = "1.53.0"
+rust-version = "1.63.0"
[dependencies]
crossbeam-channel = "0.5.6"
diff --git a/tracing-appender/README.md b/tracing-appender/README.md
--- a/tracing-appender/README.md
+++ b/tracing-appender/README.md
@@ -36,7 +36,7 @@ allows events and spans to be recorded in a non-blocking manner through a
dedicated logging thread. It also provides a [`RollingFileAppender`][file_appender]
that can be used with _or_ without the non-blocking writer.
-*Compiler support: [requires `rustc` 1.53+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-appender/src/lib.rs b/tracing-appender/src/lib.rs
--- a/tracing-appender/src/lib.rs
+++ b/tracing-appender/src/lib.rs
@@ -7,7 +7,7 @@
//! a dedicated logging thread. It also provides a [`RollingFileAppender`][file_appender] that can
//! be used with _or_ without the non-blocking writer.
//!
-//! *Compiler support: [requires `rustc` 1.53+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//! [file_appender]: rolling::RollingFileAppender
diff --git a/tracing-appender/src/lib.rs b/tracing-appender/src/lib.rs
--- a/tracing-appender/src/lib.rs
+++ b/tracing-appender/src/lib.rs
@@ -190,7 +190,7 @@ pub(crate) mod sync;
/// });
/// # }
/// ```
-pub fn non_blocking<T: Write + Send + Sync + 'static>(writer: T) -> (NonBlocking, WorkerGuard) {
+pub fn non_blocking<T: Write + Send + 'static>(writer: T) -> (NonBlocking, WorkerGuard) {
NonBlocking::new(writer)
}
diff --git a/tracing-appender/src/non_blocking.rs b/tracing-appender/src/non_blocking.rs
--- a/tracing-appender/src/non_blocking.rs
+++ b/tracing-appender/src/non_blocking.rs
@@ -146,11 +146,11 @@ impl NonBlocking {
///
/// [default]: NonBlockingBuilder::default
/// [builder]: NonBlockingBuilder
- pub fn new<T: Write + Send + Sync + 'static>(writer: T) -> (NonBlocking, WorkerGuard) {
+ pub fn new<T: Write + Send + 'static>(writer: T) -> (NonBlocking, WorkerGuard) {
NonBlockingBuilder::default().finish(writer)
}
- fn create<T: Write + Send + Sync + 'static>(
+ fn create<T: Write + Send + 'static>(
writer: T,
buffered_lines_limit: usize,
is_lossy: bool,
diff --git a/tracing-appender/src/non_blocking.rs b/tracing-appender/src/non_blocking.rs
--- a/tracing-appender/src/non_blocking.rs
+++ b/tracing-appender/src/non_blocking.rs
@@ -221,7 +221,7 @@ impl NonBlockingBuilder {
}
/// Completes the builder, returning the configured `NonBlocking`.
- pub fn finish<T: Write + Send + Sync + 'static>(self, writer: T) -> (NonBlocking, WorkerGuard) {
+ pub fn finish<T: Write + Send + 'static>(self, writer: T) -> (NonBlocking, WorkerGuard) {
NonBlocking::create(
writer,
self.buffered_lines_limit,
diff --git a/tracing-appender/src/worker.rs b/tracing-appender/src/worker.rs
--- a/tracing-appender/src/worker.rs
+++ b/tracing-appender/src/worker.rs
@@ -4,7 +4,7 @@ use std::fmt::Debug;
use std::io::Write;
use std::{io, thread};
-pub(crate) struct Worker<T: Write + Send + Sync + 'static> {
+pub(crate) struct Worker<T: Write + Send + 'static> {
writer: T,
receiver: Receiver<Msg>,
shutdown: Receiver<()>,
diff --git a/tracing-appender/src/worker.rs b/tracing-appender/src/worker.rs
--- a/tracing-appender/src/worker.rs
+++ b/tracing-appender/src/worker.rs
@@ -18,7 +18,7 @@ pub(crate) enum WorkerState {
Shutdown,
}
-impl<T: Write + Send + Sync + 'static> Worker<T> {
+impl<T: Write + Send + 'static> Worker<T> {
pub(crate) fn new(receiver: Receiver<Msg>, writer: T, shutdown: Receiver<()>) -> Worker<T> {
Self {
writer,
diff --git a/tracing-attributes/Cargo.toml b/tracing-attributes/Cargo.toml
--- a/tracing-attributes/Cargo.toml
+++ b/tracing-attributes/Cargo.toml
@@ -28,7 +28,7 @@ keywords = ["logging", "tracing", "macro", "instrument", "log"]
license = "MIT"
readme = "README.md"
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[lib]
proc-macro = true
diff --git a/tracing-attributes/README.md b/tracing-attributes/README.md
--- a/tracing-attributes/README.md
+++ b/tracing-attributes/README.md
@@ -37,7 +37,7 @@ structured, event-based diagnostic information. This crate provides the
Note that this macro is also re-exported by the main `tracing` crate.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-attributes/src/attr.rs b/tracing-attributes/src/attr.rs
--- a/tracing-attributes/src/attr.rs
+++ b/tracing-attributes/src/attr.rs
@@ -268,19 +268,14 @@ impl Parse for Skips {
}
}
-#[derive(Clone, Debug, Hash, PartialEq, Eq)]
+#[derive(Clone, Debug, Hash, PartialEq, Eq, Default)]
pub(crate) enum FormatMode {
+ #[default]
Default,
Display,
Debug,
}
-impl Default for FormatMode {
- fn default() -> Self {
- FormatMode::Default
- }
-}
-
#[derive(Clone, Debug)]
pub(crate) struct Fields(pub(crate) Punctuated<Field, Token![,]>);
diff --git a/tracing-attributes/src/expand.rs b/tracing-attributes/src/expand.rs
--- a/tracing-attributes/src/expand.rs
+++ b/tracing-attributes/src/expand.rs
@@ -422,10 +422,13 @@ impl RecordType {
"i32",
"u64",
"i64",
+ "u128",
+ "i128",
"f32",
"f64",
"usize",
"isize",
+ "String",
"NonZeroU8",
"NonZeroI8",
"NonZeroU16",
diff --git a/tracing-attributes/src/expand.rs b/tracing-attributes/src/expand.rs
--- a/tracing-attributes/src/expand.rs
+++ b/tracing-attributes/src/expand.rs
@@ -434,6 +437,8 @@ impl RecordType {
"NonZeroI32",
"NonZeroU64",
"NonZeroI64",
+ "NonZeroU128",
+ "NonZeroI128",
"NonZeroUsize",
"NonZeroIsize",
"Wrapping",
diff --git a/tracing-attributes/src/lib.rs b/tracing-attributes/src/lib.rs
--- a/tracing-attributes/src/lib.rs
+++ b/tracing-attributes/src/lib.rs
@@ -6,7 +6,7 @@
//!
//! Note that this macro is also re-exported by the main `tracing` crate.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-core/Cargo.toml b/tracing-core/Cargo.toml
--- a/tracing-core/Cargo.toml
+++ b/tracing-core/Cargo.toml
@@ -24,7 +24,7 @@ categories = [
]
keywords = ["logging", "tracing", "profiling"]
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["std", "valuable/std"]
diff --git a/tracing-core/README.md b/tracing-core/README.md
--- a/tracing-core/README.md
+++ b/tracing-core/README.md
@@ -53,7 +53,7 @@ The crate provides:
In addition, it defines the global callsite registry and per-thread current
dispatcher which other components of the tracing system rely on.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-core/src/lib.rs b/tracing-core/src/lib.rs
--- a/tracing-core/src/lib.rs
+++ b/tracing-core/src/lib.rs
@@ -23,7 +23,7 @@
//! In addition, it defines the global callsite registry and per-thread current
//! dispatcher which other components of the tracing system rely on.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-core/src/metadata.rs b/tracing-core/src/metadata.rs
--- a/tracing-core/src/metadata.rs
+++ b/tracing-core/src/metadata.rs
@@ -222,10 +222,9 @@ pub struct Level(LevelInner);
/// A filter comparable to a verbosity [`Level`].
///
-/// If a [`Level`] is considered less than a `LevelFilter`, it should be
-/// considered enabled; if greater than or equal to the `LevelFilter`,
-/// that level is disabled. See [`LevelFilter::current`] for more
-/// details.
+/// If a [`Level`] is considered less than or equal to a `LevelFilter`, it
+/// should be considered enabled; if greater than the `LevelFilter`, that level
+/// is disabled. See [`LevelFilter::current`] for more details.
///
/// Note that this is essentially identical to the `Level` type, but with the
/// addition of an [`OFF`] level that completely disables all trace
diff --git a/tracing-error/Cargo.toml b/tracing-error/Cargo.toml
--- a/tracing-error/Cargo.toml
+++ b/tracing-error/Cargo.toml
@@ -32,7 +32,7 @@ keywords = [
"backtrace"
]
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["traced-error"]
diff --git a/tracing-error/README.md b/tracing-error/README.md
--- a/tracing-error/README.md
+++ b/tracing-error/README.md
@@ -48,7 +48,7 @@ The crate provides the following:
**Note**: This crate is currently experimental.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-error/src/lib.rs b/tracing-error/src/lib.rs
--- a/tracing-error/src/lib.rs
+++ b/tracing-error/src/lib.rs
@@ -18,7 +18,7 @@
//!
//! **Note**: This crate is currently experimental.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-flame/Cargo.toml b/tracing-flame/Cargo.toml
--- a/tracing-flame/Cargo.toml
+++ b/tracing-flame/Cargo.toml
@@ -19,7 +19,7 @@ categories = [
"asynchronous",
]
keywords = ["tracing", "subscriber", "flamegraph", "profiling"]
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["smallvec"]
diff --git a/tracing-flame/README.md b/tracing-flame/README.md
--- a/tracing-flame/README.md
+++ b/tracing-flame/README.md
@@ -26,7 +26,7 @@ flamegraph/flamechart. Flamegraphs/flamecharts are useful for identifying perfor
bottlenecks in an application. For more details, see Brendan Gregg's [post]
on flamegraphs.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
[post]: http://www.brendangregg.com/flamegraphs.html
diff --git a/tracing-flame/src/lib.rs b/tracing-flame/src/lib.rs
--- a/tracing-flame/src/lib.rs
+++ b/tracing-flame/src/lib.rs
@@ -10,7 +10,7 @@
//! issues bottlenecks in an application. For more details, see Brendan Gregg's [post]
//! on flamegraphs.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//! [post]: http://www.brendangregg.com/flamegraphs.html
diff --git a/tracing-futures/Cargo.toml b/tracing-futures/Cargo.toml
--- a/tracing-futures/Cargo.toml
+++ b/tracing-futures/Cargo.toml
@@ -16,7 +16,7 @@ categories = [
]
keywords = ["logging", "profiling", "tracing", "futures", "async"]
license = "MIT"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["std-future", "std"]
diff --git a/tracing-futures/src/lib.rs b/tracing-futures/src/lib.rs
--- a/tracing-futures/src/lib.rs
+++ b/tracing-futures/src/lib.rs
@@ -15,7 +15,7 @@
//! * [`WithSubscriber`] allows a `tracing` [`Subscriber`] to be attached to a
//! future, sink, stream, or executor.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-journald/Cargo.toml b/tracing-journald/Cargo.toml
--- a/tracing-journald/Cargo.toml
+++ b/tracing-journald/Cargo.toml
@@ -13,7 +13,7 @@ categories = [
"development-tools::profiling",
]
keywords = ["tracing", "journald"]
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[dependencies]
libc = "0.2.126"
diff --git a/tracing-journald/README.md b/tracing-journald/README.md
--- a/tracing-journald/README.md
+++ b/tracing-journald/README.md
@@ -28,7 +28,7 @@ scoped, structured, and async-aware diagnostics. `tracing-journald` provides a
and events to [`systemd-journald`][journald], on Linux distributions that use
`systemd`.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
[`tracing`]: https://crates.io/crates/tracing
diff --git a/tracing-journald/src/lib.rs b/tracing-journald/src/lib.rs
--- a/tracing-journald/src/lib.rs
+++ b/tracing-journald/src/lib.rs
@@ -11,7 +11,7 @@
//! and events to [`systemd-journald`][journald], on Linux distributions that
//! use `systemd`.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//! [`tracing`]: https://crates.io/crates/tracing
diff --git a/tracing-log/Cargo.toml b/tracing-log/Cargo.toml
--- a/tracing-log/Cargo.toml
+++ b/tracing-log/Cargo.toml
@@ -15,7 +15,7 @@ categories = [
keywords = ["logging", "tracing", "log"]
license = "MIT"
readme = "README.md"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["log-tracer", "std"]
diff --git a/tracing-log/Cargo.toml b/tracing-log/Cargo.toml
--- a/tracing-log/Cargo.toml
+++ b/tracing-log/Cargo.toml
@@ -28,7 +28,7 @@ tracing-core = { path = "../tracing-core", version = "0.1.28"}
log = { version = "0.4.17" }
once_cell = "1.13.0"
lru = { version = "0.7.7", optional = true }
-ahash = { version = "0.7.6", optional = true }
+ahash = { version = "0.7.7", optional = true }
[dev-dependencies]
tracing = { path = "../tracing", version = "0.1.35"}
diff --git a/tracing-log/src/lib.rs b/tracing-log/src/lib.rs
--- a/tracing-log/src/lib.rs
+++ b/tracing-log/src/lib.rs
@@ -14,7 +14,7 @@
//! - [`LogTracer`], a [`log::Log`] implementation that consumes [`log::Record`]s
//! and outputs them as [`tracing::Event`].
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-macros/Cargo.toml b/tracing-macros/Cargo.toml
--- a/tracing-macros/Cargo.toml
+++ b/tracing-macros/Cargo.toml
@@ -15,7 +15,7 @@ categories = [
]
keywords = ["logging", "tracing"]
license = "MIT"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[dependencies]
tracing = { path = "../tracing", version = "0.1.35" }
diff --git a/tracing-mock/Cargo.toml b/tracing-mock/Cargo.toml
--- a/tracing-mock/Cargo.toml
+++ b/tracing-mock/Cargo.toml
@@ -14,7 +14,7 @@ readme = "README.md"
repository = "https://github.com/tokio-rs/tracing"
homepage = "https://tokio.rs"
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
publish = false
[dependencies]
diff --git a/tracing-serde/Cargo.toml b/tracing-serde/Cargo.toml
--- a/tracing-serde/Cargo.toml
+++ b/tracing-serde/Cargo.toml
@@ -16,7 +16,7 @@ categories = [
"encoding",
]
keywords = ["logging", "tracing", "serialization"]
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
valuable = ["valuable_crate", "valuable-serde", "tracing-core/valuable"]
diff --git a/tracing-serde/README.md b/tracing-serde/README.md
--- a/tracing-serde/README.md
+++ b/tracing-serde/README.md
@@ -36,7 +36,7 @@ and tracing data to monitor your services in production.
The `tracing` crate provides the APIs necessary for instrumenting
libraries and applications to emit trace data.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-serde/src/lib.rs b/tracing-serde/src/lib.rs
--- a/tracing-serde/src/lib.rs
+++ b/tracing-serde/src/lib.rs
@@ -32,7 +32,7 @@
//! The `tracing` crate provides the APIs necessary for instrumenting
//! libraries and applications to emit trace data.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-subscriber/Cargo.toml b/tracing-subscriber/Cargo.toml
--- a/tracing-subscriber/Cargo.toml
+++ b/tracing-subscriber/Cargo.toml
@@ -20,7 +20,7 @@ categories = [
"asynchronous",
]
keywords = ["logging", "tracing", "metrics", "subscriber"]
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
diff --git a/tracing-subscriber/src/filter/env/builder.rs b/tracing-subscriber/src/filter/env/builder.rs
--- a/tracing-subscriber/src/filter/env/builder.rs
+++ b/tracing-subscriber/src/filter/env/builder.rs
@@ -170,15 +170,16 @@ impl Builder {
self.parse_lossy(var)
}
- /// Returns a new [`EnvFilter`] from the directives in the in the configured
- /// environment variable, or an error if the environment variable is not set
- /// or contains invalid directives.
+ /// Returns a new [`EnvFilter`] from the directives in the configured
+ /// environment variable. If the environment variable is unset, no directive is added.
+ ///
+ /// An error is returned if the environment contains invalid directives.
pub fn from_env(&self) -> Result<EnvFilter, FromEnvError> {
let var = env::var(self.env_var_name()).unwrap_or_default();
self.parse(var).map_err(Into::into)
}
- /// Returns a new [`EnvFilter`] from the directives in the in the configured
+ /// Returns a new [`EnvFilter`] from the directives in the configured
/// environment variable, or an error if the environment variable is not set
/// or contains invalid directives.
pub fn try_from_env(&self) -> Result<EnvFilter, FromEnvError> {
diff --git a/tracing-subscriber/src/fmt/mod.rs b/tracing-subscriber/src/fmt/mod.rs
--- a/tracing-subscriber/src/fmt/mod.rs
+++ b/tracing-subscriber/src/fmt/mod.rs
@@ -16,7 +16,7 @@
//! tracing-subscriber = "0.3"
//! ```
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: super#supported-rust-versions
//!
diff --git a/tracing-subscriber/src/lib.rs b/tracing-subscriber/src/lib.rs
--- a/tracing-subscriber/src/lib.rs
+++ b/tracing-subscriber/src/lib.rs
@@ -10,7 +10,7 @@
//! `tracing-subscriber` is intended for use by both `Subscriber` authors and
//! application authors using `tracing` to instrument their applications.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//!
diff --git a/tracing-tower/Cargo.toml b/tracing-tower/Cargo.toml
--- a/tracing-tower/Cargo.toml
+++ b/tracing-tower/Cargo.toml
@@ -15,7 +15,7 @@ categories = [
]
keywords = ["logging", "tracing"]
license = "MIT"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[features]
default = ["tower-layer", "tower-make"]
diff --git a/tracing/Cargo.toml b/tracing/Cargo.toml
--- a/tracing/Cargo.toml
+++ b/tracing/Cargo.toml
@@ -25,7 +25,7 @@ categories = [
]
keywords = ["logging", "tracing", "metrics", "async"]
edition = "2018"
-rust-version = "1.56.0"
+rust-version = "1.63.0"
[dependencies]
tracing-core = { path = "../tracing-core", version = "0.1.32", default-features = false }
diff --git a/tracing/README.md b/tracing/README.md
--- a/tracing/README.md
+++ b/tracing/README.md
@@ -47,7 +47,7 @@ data as well as textual messages.
The `tracing` crate provides the APIs necessary for instrumenting libraries
and applications to emit trace data.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing/src/lib.rs b/tracing/src/lib.rs
--- a/tracing/src/lib.rs
+++ b/tracing/src/lib.rs
@@ -19,7 +19,7 @@
//! The `tracing` crate provides the APIs necessary for instrumenting libraries
//! and applications to emit trace data.
//!
-//! *Compiler support: [requires `rustc` 1.56+][msrv]*
+//! *Compiler support: [requires `rustc` 1.63+][msrv]*
//!
//! [msrv]: #supported-rust-versions
//! # Core Concepts
diff --git a/tracing/src/lib.rs b/tracing/src/lib.rs
--- a/tracing/src/lib.rs
+++ b/tracing/src/lib.rs
@@ -984,7 +984,10 @@ pub mod subscriber;
pub mod __macro_support {
pub use crate::callsite::Callsite;
use crate::{subscriber::Interest, Metadata};
- pub use core::concat;
+ // Re-export the `core` functions that are used in macros. This allows
+ // a crate to be named `core` and avoid name clashes.
+ // See here: https://github.com/tokio-rs/tracing/issues/2761
+ pub use core::{concat, format_args, iter::Iterator, option::Option};
/// Callsite implementation used by macro-generated code.
///
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -627,7 +627,7 @@ macro_rules! event {
target: $target,
parent: $parent,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(name: $name:expr, target: $target:expr, parent: $parent:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -678,7 +678,7 @@ macro_rules! event {
name: $name,
target: $target,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(name: $name:expr, target: $target:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -736,7 +736,7 @@ macro_rules! event {
target: $target,
parent: $parent,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(target: $target:expr, parent: $parent:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -789,7 +789,7 @@ macro_rules! event {
name: $name,
parent: $parent,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(name: $name:expr, parent: $parent:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -839,7 +839,7 @@ macro_rules! event {
$crate::event!(
name: $name,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(name: $name:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -894,7 +894,7 @@ macro_rules! event {
$crate::event!(
target: $target,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(target: $target:expr, $lvl:expr, $($k:ident).+ = $($fields:tt)* ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -910,7 +910,7 @@ macro_rules! event {
target: module_path!(),
parent: $parent,
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
(parent: $parent:expr, $lvl:expr, $($k:ident).+ = $($field:tt)*) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -970,7 +970,7 @@ macro_rules! event {
$crate::event!(
target: module_path!(),
$lvl,
- { message = ::core::format_args!($($arg)+), $($fields)* }
+ { message = $crate::__macro_support::format_args!($($arg)+), $($fields)* }
)
);
( $lvl:expr, { $($fields:tt)* }, $($arg:tt)+ ) => (
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -2802,79 +2802,79 @@ macro_rules! valueset {
// };
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = ?$val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$val) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = %$val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$val) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = $val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$val as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$val as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$($k).+ as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$($k).+ as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, ?$($k:ident).+, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$($k).+) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$($k).+) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, %$($k:ident).+, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$($k).+) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$($k).+) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = ?$val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$val) as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = %$val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$val) as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+ = $val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$val as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$val as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $($k:ident).+) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$($k).+ as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$($k).+ as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, ?$($k:ident).+) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$($k).+) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$($k).+) as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, %$($k:ident).+) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$($k).+) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$($k).+) as &dyn Value)) },
$next,
)
};
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -2882,40 +2882,40 @@ macro_rules! valueset {
// Handle literal names
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = ?$val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$val) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = %$val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$val) as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = $val:expr, $($rest:tt)*) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$val as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$val as &dyn Value)) },
$next,
$($rest)*
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = ?$val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&debug(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&debug(&$val) as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = %$val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&display(&$val) as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&display(&$val) as &dyn Value)) },
$next,
)
};
(@ { $(,)* $($out:expr),* }, $next:expr, $k:literal = $val:expr) => {
$crate::valueset!(
- @ { $($out),*, (&$next, ::core::option::Option::Some(&$val as &dyn Value)) },
+ @ { $($out),*, (&$next, $crate::__macro_support::Option::Some(&$val as &dyn Value)) },
$next,
)
};
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -2963,7 +2963,7 @@ macro_rules! valueset {
// Remainder is unparsable, but exists --- must be format args!
(@ { $(,)* $($out:expr),* }, $next:expr, $($rest:tt)+) => {
- $crate::valueset!(@ { (&$next, ::core::option::Option::Some(&::core::format_args!($($rest)+) as &dyn Value)), $($out),* }, $next, )
+ $crate::valueset!(@ { (&$next, $crate::__macro_support::Option::Some(&$crate::__macro_support::format_args!($($rest)+) as &dyn Value)), $($out),* }, $next, )
};
// === entry ===
diff --git a/tracing/src/macros.rs b/tracing/src/macros.rs
--- a/tracing/src/macros.rs
+++ b/tracing/src/macros.rs
@@ -2974,7 +2974,7 @@ macro_rules! valueset {
let mut iter = $fields.iter();
$fields.value_set($crate::valueset!(
@ { },
- ::core::iter::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
+ $crate::__macro_support::Iterator::next(&mut iter).expect("FieldSet corrupted (this is a bug)"),
$($kvs)+
))
}
| diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -40,16 +36,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
- with:
- toolchain: stable
- profile: minimal
- override: true
+ - uses: dtolnay/rust-toolchain@stable
- name: Check
- uses: actions-rs/cargo@v1
- with:
- command: check
- args: --all --tests --benches
+ run: cargo check --all --tests --benches
style:
# Check style.
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -58,17 +47,11 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
+ - uses: dtolnay/rust-toolchain@stable
with:
- toolchain: stable
components: rustfmt
- profile: minimal
- override: true
- name: rustfmt
- uses: actions-rs/cargo@v1
- with:
- command: fmt
- args: --all -- --check
+ run: cargo fmt --all -- --check
warnings:
# Check for any warnings. This is informational and thus is allowed to fail.
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -76,46 +59,15 @@ jobs:
needs: check
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
+ - uses: dtolnay/rust-toolchain@stable
with:
- toolchain: stable
components: clippy
- profile: minimal
- name: Clippy
uses: actions-rs/clippy-check@v1
with:
token: ${{ secrets.GITHUB_TOKEN }}
args: --all --examples --tests --benches -- -D warnings
- minimal-versions:
- # Check for minimal-versions errors where a dependency is too
- # underconstrained to build on the minimal supported version of all
- # dependencies in the dependency graph.
- name: cargo check (-Zminimal-versions)
- needs: check
- runs-on: ubuntu-latest
- steps:
- - uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
- with:
- toolchain: nightly
- profile: minimal
- override: true
- - name: install cargo-hack
- uses: taiki-e/install-action@cargo-hack
- - name: "check --all-features -Z minimal-versions"
- run: |
- # Remove dev-dependencies from Cargo.toml to prevent the next `cargo update`
- # from determining minimal versions based on dev-dependencies.
- cargo hack --remove-dev-deps --workspace
- # Update Cargo.lock to minimal version dependencies.
- cargo update -Z minimal-versions
- cargo hack check \
- --package tracing \
- --package tracing-core \
- --package tracing-subscriber \
- --all-features --ignore-private
-
cargo-hack:
needs: check
name: cargo check (feature combinations)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -191,32 +143,51 @@ jobs:
- tracing-tower
- tracing
toolchain:
- - 1.56.0
+ - 1.63.0
- stable
steps:
- uses: actions/checkout@v3
- - name: "install Rust ${{ env.APPENDER_MSRV }}"
- uses: actions-rs/toolchain@v1
- with:
- toolchain: ${{ env.APPENDER_MSRV }}
- profile: minimal
- - name: "install Rust nightly"
- uses: actions-rs/toolchain@v1
- with:
- toolchain: nightly
- profile: minimal
- - name: Select minimal versions
- uses: actions-rs/cargo@v1
+ - name: install Rust nightly
+ uses: dtolnay/rust-toolchain@nightly
+ - name: "install Rust ${{ matrix.toolchain }}"
+ uses: dtolnay/rust-toolchain@master
with:
- command: update
- args: -Z minimal-versions
- toolchain: nightly
- - name: Check
- uses: actions-rs/cargo@v1
- with:
- command: check
- args: --all-features --locked -p tracing-appender
- toolchain: ${{ env.APPENDER_MSRV }}
+ toolchain: ${{ matrix.toolchain }}
+ - name: install cargo-hack
+ uses: taiki-e/install-action@cargo-hack
+ - name: install cargo-minimal-versions
+ uses: taiki-e/install-action@cargo-minimal-versions
+ - name: cargo minimal-versions check
+ working-directory: ${{ matrix.subcrate }}
+ # tracing and tracing-subscriber have too many features to be checked by
+ # cargo-hack --feature-powerset with all features in the powerset, so
+ # exclude some
+ run: |
+ CARGO_MINVER=(cargo minimal-versions check --feature-powerset --no-dev-deps)
+ case "${{ matrix.subcrate }}" in
+ tracing)
+ EXCLUDE_FEATURES=(
+ max_level_off max_level_error max_level_warn max_level_info
+ max_level_debug max_level_trace release_max_level_off
+ release_max_level_error release_max_level_warn
+ release_max_level_info release_max_level_debug
+ release_max_level_trace
+ )
+ ${CARGO_MINVER[@]} --exclude-features "${EXCLUDE_FEATURES[*]}"
+ ;;
+ tracing-subscriber)
+ INCLUDE_FEATURES=(fmt ansi json registry env-filter)
+ ${CARGO_MINVER[@]} --include-features "${INCLUDE_FEATURES[*]}"
+ ;;
+ tracing-futures)
+ EXCLUDE_FEATURES=(futures-01 futures_01 tokio tokio_01)
+ ${CARGO_MINVER[@]} --exclude-features "${EXCLUDE_FEATURES[*]}"
+ ;;
+ *)
+ ${CARGO_MINVER[@]}
+ ;;
+ esac
+ shell: bash
### test jobs #############################################################
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -240,11 +211,10 @@ jobs:
runs-on: ${{ matrix.os }}
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
+ - name: "install Rust ${{ matrix.rust }}"
+ uses: dtolnay/rust-toolchain@master
with:
toolchain: ${{ matrix.rust }}
- profile: minimal
- override: true
- name: install cargo-nextest
uses: taiki-e/install-action@nextest
- name: Run tests
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -283,16 +253,11 @@ jobs:
fail-fast: false
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
+ - uses: dtolnay/rust-toolchain@stable
with:
target: wasm32-unknown-unknown
- toolchain: stable
- override: true
- name: build all tests
- uses: actions-rs/cargo@v1
- with:
- command: test
- args: --no-run -p ${{ matrix.subcrate }}
+ run: cargo test --no-run -p ${{ matrix.subcrate }}
test-wasm:
name: cargo test (wasm)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -304,11 +269,9 @@ jobs:
- tracing
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
+ - uses: dtolnay/rust-toolchain@stable
with:
target: wasm32-unknown-unknown
- toolchain: stable
- override: true
- name: install test runner for wasm
uses: taiki-e/install-action@wasm-pack
- name: run wasm tests
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -316,22 +279,14 @@ jobs:
test-features-stable:
# Feature flag tests that run on stable Rust.
- # TODO(david): once tracing's MSRV goes up to Rust 1.51, we should be able to switch to
- # using cargo's V2 feature resolver (https://doc.rust-lang.org/cargo/reference/resolver.html#resolver-versions)
- # and avoid cd'ing into each crate's directory.
name: cargo test (feature-specific)
needs: check
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v3
- - uses: actions-rs/toolchain@v1
- with:
- toolchain: stable
- profile: minimal
- override: true
+ - uses: dtolnay/rust-toolchain@stable
- name: "Test log support"
run: cargo test
- working-directory: "tracing/test-log-support"
- name: "Test static max level"
run: cargo test
working-directory: "tracing/test_static_max_level_features"
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -340,21 +295,12 @@ jobs:
working-directory: "tracing/test_static_max_level_features"
- name: "Test tracing-core no-std support"
run: cargo test --no-default-features
- working-directory: tracing-core
- name: "Test tracing no-std support"
- run: cargo test --lib --no-default-features
- working-directory: tracing
+ run: cargo test --no-default-features
# this skips running doctests under the `--no-default-features` flag,
# as rustdoc isn't aware of cargo's feature flags.
- - name: "Test tracing-subscriber no-std support"
+ - name: "Test tracing-subscriber with all features disabled"
run: cargo test --lib --tests --no-default-features
- working-directory: tracing-subscriber
- - name: "Test tracing-subscriber with liballoc only"
- run: cargo test --lib --tests --no-default-features --features "alloc"
- working-directory: tracing-subscriber
- - name: "Test tracing-subscriber with no default features"
- run: cargo test --lib --tests --no-default-features --features "std"
- working-directory: tracing-subscriber
# all required checks except for the main test run (which we only require
# specific matrix combinations from)
diff --git a/.github/workflows/CI.yml b/.github/workflows/CI.yml
--- a/.github/workflows/CI.yml
+++ b/.github/workflows/CI.yml
@@ -363,11 +309,10 @@ jobs:
runs-on: ubuntu-latest
needs:
- style
- - minimal-versions
- cargo-hack
- check-msrv
- test-build-wasm
- test-wasm
- test-features-stable
steps:
- - run: exit 0
+ - run: exit 0
\ No newline at end of file
diff --git a/README.md b/README.md
--- a/README.md
+++ b/README.md
@@ -252,7 +252,7 @@ attachment that `Future::instrument` does.
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-appender/README.md b/tracing-appender/README.md
--- a/tracing-appender/README.md
+++ b/tracing-appender/README.md
@@ -146,7 +146,7 @@ fn main() {
## Supported Rust Versions
`tracing-appender` is built against the latest stable release. The minimum supported
-version is 1.53. The current `tracing-appender` version is not guaranteed to build on
+version is 1.63. The current `tracing-appender` version is not guaranteed to build on
Rust versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-appender/src/lib.rs b/tracing-appender/src/lib.rs
--- a/tracing-appender/src/lib.rs
+++ b/tracing-appender/src/lib.rs
@@ -124,7 +124,7 @@
//! ## Supported Rust Versions
//!
//! `tracing-appender` is built against the latest stable release. The minimum supported
-//! version is 1.53. The current `tracing-appender` version is not guaranteed to build on
+//! version is 1.63. The current `tracing-appender` version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-attributes/README.md b/tracing-attributes/README.md
--- a/tracing-attributes/README.md
+++ b/tracing-attributes/README.md
@@ -69,7 +69,7 @@ pub fn my_function(my_arg: usize) {
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-attributes/src/lib.rs b/tracing-attributes/src/lib.rs
--- a/tracing-attributes/src/lib.rs
+++ b/tracing-attributes/src/lib.rs
@@ -41,7 +41,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-attributes/tests/instrument.rs b/tracing-attributes/tests/instrument.rs
--- a/tracing-attributes/tests/instrument.rs
+++ b/tracing-attributes/tests/instrument.rs
@@ -51,7 +51,7 @@ fn override_everything() {
#[test]
fn fields() {
#[instrument(target = "my_target", level = "debug")]
- fn my_fn(arg1: usize, arg2: bool) {}
+ fn my_fn(arg1: usize, arg2: bool, arg3: String) {}
let span = expect::span()
.named("my_fn")
diff --git a/tracing-attributes/tests/instrument.rs b/tracing-attributes/tests/instrument.rs
--- a/tracing-attributes/tests/instrument.rs
+++ b/tracing-attributes/tests/instrument.rs
@@ -68,6 +68,7 @@ fn fields() {
expect::field("arg1")
.with_value(&2usize)
.and(expect::field("arg2").with_value(&false))
+ .and(expect::field("arg3").with_value(&"Cool".to_string()))
.only(),
),
)
diff --git a/tracing-attributes/tests/instrument.rs b/tracing-attributes/tests/instrument.rs
--- a/tracing-attributes/tests/instrument.rs
+++ b/tracing-attributes/tests/instrument.rs
@@ -79,6 +80,7 @@ fn fields() {
expect::field("arg1")
.with_value(&3usize)
.and(expect::field("arg2").with_value(&true))
+ .and(expect::field("arg3").with_value(&"Still Cool".to_string()))
.only(),
),
)
diff --git a/tracing-attributes/tests/instrument.rs b/tracing-attributes/tests/instrument.rs
--- a/tracing-attributes/tests/instrument.rs
+++ b/tracing-attributes/tests/instrument.rs
@@ -89,8 +91,8 @@ fn fields() {
.run_with_handle();
with_default(subscriber, || {
- my_fn(2, false);
- my_fn(3, true);
+ my_fn(2, false, "Cool".to_string());
+ my_fn(3, true, "Still Cool".to_string());
});
handle.assert_finished();
diff --git a/tracing-core/README.md b/tracing-core/README.md
--- a/tracing-core/README.md
+++ b/tracing-core/README.md
@@ -99,7 +99,7 @@ The following crate feature flags are available:
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-core/src/lib.rs b/tracing-core/src/lib.rs
--- a/tracing-core/src/lib.rs
+++ b/tracing-core/src/lib.rs
@@ -92,7 +92,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-error/README.md b/tracing-error/README.md
--- a/tracing-error/README.md
+++ b/tracing-error/README.md
@@ -186,7 +186,7 @@ fn main() {
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-error/src/lib.rs b/tracing-error/src/lib.rs
--- a/tracing-error/src/lib.rs
+++ b/tracing-error/src/lib.rs
@@ -167,7 +167,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-flame/README.md b/tracing-flame/README.md
--- a/tracing-flame/README.md
+++ b/tracing-flame/README.md
@@ -106,7 +106,7 @@ _flamechart_, which _does not_ sort or collapse identical stack frames.
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-flame/src/lib.rs b/tracing-flame/src/lib.rs
--- a/tracing-flame/src/lib.rs
+++ b/tracing-flame/src/lib.rs
@@ -95,7 +95,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-futures/README.md b/tracing-futures/README.md
--- a/tracing-futures/README.md
+++ b/tracing-futures/README.md
@@ -51,14 +51,14 @@ The crate provides the following traits:
[`Subscriber`]: https://docs.rs/tracing/latest/tracing/subscriber/index.html
[`tracing`]: https://crates.io/crates/tracing
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-futures/src/lib.rs b/tracing-futures/src/lib.rs
--- a/tracing-futures/src/lib.rs
+++ b/tracing-futures/src/lib.rs
@@ -59,7 +59,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-journald/README.md b/tracing-journald/README.md
--- a/tracing-journald/README.md
+++ b/tracing-journald/README.md
@@ -38,7 +38,7 @@ and events to [`systemd-journald`][journald], on Linux distributions that use
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-journald/src/lib.rs b/tracing-journald/src/lib.rs
--- a/tracing-journald/src/lib.rs
+++ b/tracing-journald/src/lib.rs
@@ -20,7 +20,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-log/README.md b/tracing-log/README.md
--- a/tracing-log/README.md
+++ b/tracing-log/README.md
@@ -54,14 +54,14 @@ This crate provides:
[`tracing::Subscriber`]: https://docs.rs/tracing/latest/tracing/trait.Subscriber.html
[`tracing::Event`]: https://docs.rs/tracing/latest/tracing/struct.Event.html
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-log/src/lib.rs b/tracing-log/src/lib.rs
--- a/tracing-log/src/lib.rs
+++ b/tracing-log/src/lib.rs
@@ -75,7 +75,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-mock/README.md b/tracing-mock/README.md
--- a/tracing-mock/README.md
+++ b/tracing-mock/README.md
@@ -29,7 +29,7 @@ structured, event-based diagnostic information. `tracing-mock` provides
tools for making assertions about what `tracing` diagnostics are emitted
by code under test.
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
diff --git a/tracing-mock/README.md b/tracing-mock/README.md
--- a/tracing-mock/README.md
+++ b/tracing-mock/README.md
@@ -154,7 +154,7 @@ handle.assert_finished();
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-serde/README.md b/tracing-serde/README.md
--- a/tracing-serde/README.md
+++ b/tracing-serde/README.md
@@ -97,7 +97,7 @@ trace data.
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-serde/src/lib.rs b/tracing-serde/src/lib.rs
--- a/tracing-serde/src/lib.rs
+++ b/tracing-serde/src/lib.rs
@@ -142,7 +142,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-subscriber/README.md b/tracing-subscriber/README.md
--- a/tracing-subscriber/README.md
+++ b/tracing-subscriber/README.md
@@ -21,7 +21,7 @@ Utilities for implementing and composing [`tracing`][tracing] subscribers.
[crates-badge]: https://img.shields.io/crates/v/tracing-subscriber.svg
[crates-url]: https://crates.io/crates/tracing-subscriber
[docs-badge]: https://docs.rs/tracing-subscriber/badge.svg
-[docs-url]: https://docs.rs/tracing-subscriber/0.3.15
+[docs-url]: https://docs.rs/tracing-subscriber/latest
[docs-master-badge]: https://img.shields.io/badge/docs-master-blue
[docs-master-url]: https://tracing-rs.netlify.com/tracing_subscriber
[mit-badge]: https://img.shields.io/badge/license-MIT-blue.svg
diff --git a/tracing-subscriber/README.md b/tracing-subscriber/README.md
--- a/tracing-subscriber/README.md
+++ b/tracing-subscriber/README.md
@@ -32,14 +32,14 @@ Utilities for implementing and composing [`tracing`][tracing] subscribers.
[discord-url]: https://discord.gg/EeF3cQw
[maint-badge]: https://img.shields.io/badge/maintenance-experimental-blue.svg
-*Compiler support: [requires `rustc` 1.56+][msrv]*
+*Compiler support: [requires `rustc` 1.63+][msrv]*
[msrv]: #supported-rust-versions
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing-subscriber/src/lib.rs b/tracing-subscriber/src/lib.rs
--- a/tracing-subscriber/src/lib.rs
+++ b/tracing-subscriber/src/lib.rs
@@ -138,7 +138,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing/Cargo.toml b/tracing/Cargo.toml
--- a/tracing/Cargo.toml
+++ b/tracing/Cargo.toml
@@ -40,7 +40,7 @@ log = "0.4.17"
tracing-mock = { path = "../tracing-mock" }
[target.'cfg(target_arch = "wasm32")'.dev-dependencies]
-wasm-bindgen-test = "^0.3"
+wasm-bindgen-test = "0.3.38"
[features]
default = ["std", "attributes"]
diff --git a/tracing/README.md b/tracing/README.md
--- a/tracing/README.md
+++ b/tracing/README.md
@@ -445,7 +445,7 @@ undergoing active development. They may be less stable than `tracing` and
## Supported Rust Versions
Tracing is built against the latest stable release. The minimum supported
-version is 1.56. The current Tracing version is not guaranteed to build on Rust
+version is 1.63. The current Tracing version is not guaranteed to build on Rust
versions earlier than the minimum supported version.
Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing/src/lib.rs b/tracing/src/lib.rs
--- a/tracing/src/lib.rs
+++ b/tracing/src/lib.rs
@@ -871,7 +871,7 @@
//! ## Supported Rust Versions
//!
//! Tracing is built against the latest stable release. The minimum supported
-//! version is 1.56. The current Tracing version is not guaranteed to build on
+//! version is 1.63. The current Tracing version is not guaranteed to build on
//! Rust versions earlier than the minimum supported version.
//!
//! Tracing follows the same compiler support policies as the rest of the Tokio
diff --git a/tracing/tests/macros.rs b/tracing/tests/macros.rs
--- a/tracing/tests/macros.rs
+++ b/tracing/tests/macros.rs
@@ -5,10 +5,6 @@ extern crate tracing;
#[cfg(target_arch = "wasm32")]
extern crate wasm_bindgen_test;
-// TODO: remove this once https://github.com/tokio-rs/tracing/pull/2675#issuecomment-1667628907 is resolved
-#[cfg(target_arch = "wasm32")]
-use ::core::option::Option::None;
-
use tracing::{
callsite, debug, debug_span, enabled, error, error_span, event, event_enabled, info, info_span,
span, span_enabled, trace, trace_span, warn, warn_span, Level,
| `NonBlocking` requires its inner writer to be `Sync` and I'm not sure why
## Bug Report
### Version
`tracing-appender` master (according to tracing.rs) and 0.2.0 (according to docs.rs)
### Crates
tracing-appender
### Description
`tracing_appender::non_blocking::NonBlocking` requires its wrapped writer to be `Send + Sync + 'static`. The `Send + 'static` bounds make sense as the writer is given to a spawned worker thread, but I'm not sure why it's `Sync`. It doesn't occur in the type signature or fields of any of the `tracing_appender::non_blocking` types and so this bound is not necessary to make those types `Sync`, and I don't see any way in which access to the writer is shared across threads. It's moved into the spawned thread and accessed exclusively from there until the thread shuts down.
| 2023-11-07T08:23:27 | 0.1 | 96c0e297f1d5f1585166efcadf6057a804905447 | [
"fields"
] | [
"rolling::test::test_never_date_rounding - should panic",
"rolling::test::test_rotations",
"rolling::test::write_daily_log",
"rolling::test::write_hourly_log",
"rolling::test::write_never_log",
"rolling::test::test_make_writer",
"rolling::test::write_minutely_log",
"rolling::test::test_path_concatenat... | [
"string_field",
"tracing-error/src/backtrace.rs - backtrace::SpanTrace::capture (line 79)",
"tests::futures_03_tests::sink_enter_exit_is_reasonable",
"tests::futures_03_tests::stream_enter_exit_is_reasonable",
"journal_fields",
"internal_null_byte",
"multiline_message",
"multiple_spans_metadata",
"m... | [] | |
obi1kenobi/trustfall | 350 | obi1kenobi__trustfall-350 | [
"343"
] | 269ae7db3b16a63a1b0c1bcbdb1c4d91d0819ba4 | diff --git a/trustfall_core/src/ir/mod.rs b/trustfall_core/src/ir/mod.rs
--- a/trustfall_core/src/ir/mod.rs
+++ b/trustfall_core/src/ir/mod.rs
@@ -67,7 +67,7 @@ impl Eid {
/// [`Adapter::resolve_neighbors`]: crate::interpreter::Adapter::resolve_neighbors
#[derive(Debug, Clone, Default, PartialEq, Eq, Serialize, Deserialize)]
pub struct EdgeParameters {
- contents: Arc<BTreeMap<Arc<str>, FieldValue>>,
+ pub(crate) contents: Arc<BTreeMap<Arc<str>, FieldValue>>,
}
impl EdgeParameters {
diff --git a/trustfall_core/src/schema/adapter/mod.rs b/trustfall_core/src/schema/adapter/mod.rs
--- a/trustfall_core/src/schema/adapter/mod.rs
+++ b/trustfall_core/src/schema/adapter/mod.rs
@@ -33,7 +33,7 @@ use super::Schema;
///
/// // Create an adapter that queries
/// // the schema in the local `schema.graphql` file.
-/// # [allow(unused_variables)]
+/// # #[allow(unused_variables)]
/// let adapter = SchemaAdapter::new(&schema);
///
/// // Run queries using the adapter, etc.
diff --git a/trustfall_core/src/serialization/mod.rs b/trustfall_core/src/serialization/mod.rs
--- a/trustfall_core/src/serialization/mod.rs
+++ b/trustfall_core/src/serialization/mod.rs
@@ -62,3 +136,13 @@ impl TryIntoStruct for BTreeMap<Arc<str>, FieldValue> {
S::deserialize(deserializer)
}
}
+
+impl<'a> TryIntoStruct for &'a crate::ir::EdgeParameters {
+ type Error = deserializers::Error;
+
+ fn try_into_struct<S: DeserializeOwned>(self) -> Result<S, deserializers::Error> {
+ let data = (*self.contents).clone();
+ let deserializer = deserializers::QueryResultDeserializer::new(data);
+ S::deserialize(deserializer)
+ }
+}
| diff --git a/trustfall_core/src/serialization/mod.rs b/trustfall_core/src/serialization/mod.rs
--- a/trustfall_core/src/serialization/mod.rs
+++ b/trustfall_core/src/serialization/mod.rs
@@ -9,8 +9,40 @@ mod deserializers;
#[cfg(test)]
mod tests;
-/// Deserialize Trustfall query results into a Rust struct.
+/// Deserialize Trustfall query results or edge parameters into a Rust struct.
///
+/// # Use with query results
+///
+/// Running a Trustfall query produces an iterator of `BTreeMap<Arc<str>, FieldValue>` outputs
+/// representing the query results. These maps all have a common "shape" — the same keys and
+/// the same value types — as determined by the query and schema.
+///
+/// This trait allows deserializing those query result maps into a dedicated struct,
+/// to get you easy access to strongly-typed data instead of [`FieldValue`] enums.
+///
+/// ## Example
+///
+/// Say we ran a query like:
+/// ```graphql
+/// query {
+/// Order {
+/// item_name @output
+/// quantity @output
+/// }
+/// }
+/// ```
+///
+/// Each of this query's outputs contain a string named `item_name` and an integer named `quantity`.
+/// This trait allows us to define an output struct type:
+/// ```rust
+/// #[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+/// struct Output {
+/// item_name: String,
+/// quantity: i64,
+/// }
+/// ```
+///
+/// We can then unpack the query results into an iterator of such structs:
/// ```rust
/// # use std::{collections::BTreeMap, sync::Arc};
/// # use maplit::btreemap;
diff --git a/trustfall_core/src/serialization/mod.rs b/trustfall_core/src/serialization/mod.rs
--- a/trustfall_core/src/serialization/mod.rs
+++ b/trustfall_core/src/serialization/mod.rs
@@ -19,20 +51,20 @@ mod tests;
/// # fn run_query() -> Result<Box<dyn Iterator<Item = BTreeMap<Arc<str>, FieldValue>>>, ()> {
/// # Ok(Box::new(vec![
/// # btreemap! {
-/// # Arc::from("number") => FieldValue::Int64(42),
-/// # Arc::from("text") => FieldValue::String("the answer to everything".to_string()),
+/// # Arc::from("item_name") => FieldValue::String("widget".to_string()),
+/// # Arc::from("quantity") => FieldValue::Int64(42),
/// # }
/// # ].into_iter()))
/// # }
+/// #
+/// # #[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+/// # struct Output {
+/// # item_name: String,
+/// # quantity: i64,
+/// # }
///
/// use trustfall_core::TryIntoStruct;
///
-/// #[derive(Debug, PartialEq, Eq, serde::Deserialize)]
-/// struct Output {
-/// number: i64,
-/// text: String,
-/// }
-///
/// let results: Vec<_> = run_query()
/// .expect("bad query arguments")
/// .map(|v| v.try_into_struct().expect("struct definition did not match query result shape"))
diff --git a/trustfall_core/src/serialization/mod.rs b/trustfall_core/src/serialization/mod.rs
--- a/trustfall_core/src/serialization/mod.rs
+++ b/trustfall_core/src/serialization/mod.rs
@@ -41,13 +73,55 @@ mod tests;
/// assert_eq!(
/// vec![
/// Output {
-/// number: 42,
-/// text: "the answer to everything".to_string(),
+/// item_name: "widget".to_string(),
+/// quantity: 42,
/// },
/// ],
/// results,
/// );
/// ```
+///
+/// # Use with edge parameters
+///
+/// Edges defined in Trustfall schemas may take parameters, for example:
+/// ```graphql
+/// type NewsWebsite {
+/// latest_stories(count: Int!): [Story!]!
+/// }
+/// ```
+///
+/// This trait can be used to deserialize [`&EdgeParameters`](crate::ir::EdgeParameters)
+/// into a struct specific to the parameters of that edge:
+/// ```rust
+/// #[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+/// struct LatestStoriesParameters {
+/// count: usize
+/// }
+/// ```
+///
+/// For example:
+/// ```rust
+/// # use trustfall_core::{ir::EdgeParameters, interpreter::ContextIterator};
+/// #
+/// # #[derive(Debug, Clone)]
+/// # struct Vertex;
+/// #
+/// # #[derive(Debug, PartialEq, Eq, serde::Deserialize)]
+/// # struct LatestStoriesParameters {
+/// # count: usize
+/// # }
+///
+/// use trustfall_core::TryIntoStruct;
+///
+/// fn resolve_latest_stories(contexts: ContextIterator<Vertex>, parameters: &EdgeParameters) {
+/// let parameters: LatestStoriesParameters = parameters
+/// .try_into_struct()
+/// .expect("edge parameters did not match struct definition");
+/// let count = parameters.count;
+///
+/// // then resolve the edge with the given count
+/// }
+/// ```
pub trait TryIntoStruct {
type Error;
| implement TryIntoStruct for EdgeParameters
using the TryIntoStruct trait: https://github.com/obi1kenobi/trustfall/releases/tag/trustfall-v0.5.0 , we can unpack edge params into a struct like:
```rs
struct Params {
ends_with_regex: Vec<String>
}
```
| 2023-07-08T03:38:50 | 0.5 | 4d47a2d6ef00ee57d7fb01f5c56e03c9ebbf9514 | [
"trustfall_core/src/serialization/mod.rs - serialization::TryIntoStruct (line 103)",
"trustfall_core/src/schema/adapter/mod.rs - schema::adapter::SchemaAdapter (line 27)"
] | [
"frontend::tests::test_coercion_to_nonexistent_type",
"frontend::tests::test_coercion_of_non_interface",
"frontend::tests::test_avoid_output_name_conflict_with_alias_name",
"frontend::tests::test_alias_driven_output_names",
"frontend::tests::test_both_missing_and_unused",
"frontend::tests::test_alias_and_... | [] | [] | |
crate-ci/typos | 410 | crate-ci__typos-410 | [
"409"
] | a329a99ec746285b3f31f11109f738a05a77dbec | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -216,7 +216,14 @@ mod parser {
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
- terminated(
+ fn is_sep(c: impl AsChar) -> bool {
+ let c = c.as_char();
+ // Avoid markdown throwing off our ordinal detection
+ ['_'].contains(&c)
+ }
+
+ recognize(tuple((
+ take_while(is_sep),
take_while1(is_dec_digit),
alt((
pair(char('s'), char('t')),
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -224,7 +231,8 @@ mod parser {
pair(char('r'), char('d')),
pair(char('t'), char('h')),
)),
- )(input)
+ take_while(is_sep),
+ )))(input)
}
fn dec_literal<T>(input: T) -> IResult<T, T>
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -935,10 +943,10 @@ mod test {
fn tokenize_ignore_ordinal() {
let parser = TokenizerBuilder::new().build();
- let input = "Hello 1st 2nd 3rd 4th World";
+ let input = "Hello 1st 2nd 3rd 4th __5th__ World";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
- Identifier::new_unchecked("World", Case::None, 22),
+ Identifier::new_unchecked("World", Case::None, 30),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
| Underscore after ordinal number causes it to be marked as typo
Bolding "2nd" using markdown is causing it to be marked as a typo
```
error: `nd` should be `and`
--> ./gatsby/content/blog/2022/01/2022-01-21-twim.mdx:41:156
|
41 | > Work on preparing the release of Matrix v1.2 is currently underway. As of today, the Spec Core Team is aiming for a release of Matrix v1.2 on __February 2nd__.
| ^^
```
Related: https://github.com/crate-ci/typos/issues/331
| 2022-01-25T10:01:37 | 1.3 | 4b2e66487c0d94e0f0ab1765a9e74bfed663aa1e | [
"tokens::test::tokenize_ignore_ordinal"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_ignore_email",
"tokens::test::tokenize_ignore_base64",
"tokens::test::tokenize_ignore_e... | [] | [] | |
crate-ci/typos | 345 | crate-ci__typos-345 | [
"331"
] | 894c8d71ca692c3847007b083a5ec8cae63c4aae | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -182,6 +182,7 @@ mod parser {
terminated(hash_literal, sep1),
terminated(hex_literal, sep1),
terminated(dec_literal, sep1),
+ terminated(ordinal_literal, sep1),
terminated(base64_literal, sep1),
terminated(email_literal, sep1),
terminated(url_literal, sep1),
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -199,6 +200,30 @@ mod parser {
take_while1(is_ignore_char)(input)
}
+ fn ordinal_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ terminated(
+ take_while1(is_dec_digit),
+ alt((
+ pair(char('s'), char('t')),
+ pair(char('n'), char('d')),
+ pair(char('r'), char('d')),
+ pair(char('t'), char('h')),
+ )),
+ )(input)
+ }
+
fn dec_literal<T>(input: T) -> IResult<T, T>
where
T: nom::InputTakeAtPosition,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -434,6 +459,11 @@ mod parser {
}
}
+ #[inline]
+ fn is_dec_digit(i: impl AsChar + Copy) -> bool {
+ i.is_dec_digit()
+ }
+
#[inline]
fn is_dec_digit_with_sep(i: impl AsChar + Copy) -> bool {
i.is_dec_digit() || is_digit_sep(i.as_char())
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -884,6 +914,21 @@ mod test {
assert_eq!(expected, actual);
}
+ #[test]
+ fn tokenize_ignore_ordinal() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Hello 1st 2nd 3rd 4th World";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Hello", Case::None, 0),
+ Identifier::new_unchecked("World", Case::None, 22),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
#[test]
fn tokenize_ignore_hex() {
let parser = TokenizerBuilder::new().build();
| RFE: better handling of ordinal numbers
Currently there are false positives like:
```shellsession
$ echo 2nd | typos - | head -n 1
error: `nd` should be `and`
```
One approach to these would be to treat everything where a sequence of numbers is followed by `th`, `nd`, or `rd` as ok.
But it would be nice to do better, e.g. in regex pseudospec, stopping at first match
```
([0-9]*11)st -> \1th
([0-9]*12)nd -> \1th
([0-9]*13)rd -> \1th
([0-9]*1)th -> \1st
([0-9]*2)th -> \1nd
([0-9]*3)th -> \1rd
([0-9]+)(nd|rd) -> \1th
([0-9]+)th -> <this is ok>
```
| My concern is that adding a dynamic dictionary would slow things down for a rare case.
Let's start by just ignoring ordinals in the parser
That works for me, thanks for considering.
But just so I understand: ignoring ordinals would need to be done dynamically as well, right? Do you think the additional cost of doing the corrections while at it would be a concern?
- The above is 8 parse attempts with 2 branches
- `[0-9]+(st|nd|rd|th)` is 1 attempt with 4 branches,
You are welcome to try both and benchmark it. `cargo bench --bench checks` will give some idea of how it works (`Typos` group will include parse and correct) but to get the full idea, you'd want to run the end-to-end benchmarks
So something like:
```bash
./benchsuite/uut/typos.sh download
./benchsuite/fixtures/ripgrep_built.sh download
./typos/benchsuite/fixtures/linux_clean.sh download
./benchsuite/benchsuite.sh
# Backup the report, it has the date but not time in the file name
# Make a changes
./benchsuite/uut/typos.sh clean
./benchsuite/uut/typos.sh download
./benchsuite/benchsuite.sh
# Compare report
```
I might do that one day.
I wasn't suggesting following my "regex pseudospec" to the letter -- it was a pseudo one just for illustration purposes. We could very well narrow it down with the `[0-9]+(st|nd|rd|th)` which we'd do anyway to ignore these, and only if that matches, do a few further comparisons within that branch to provide fixes if we find any.
In the grand scheme of things, assuming input not crafted specifically to provoke a bad case here, I still believe the overhead by the fixes would be negligible, if not almost unnoticeable in practice (possibly visible in benchmarks though) when done along those lines.
But yeah, not insisting at all, avoiding the false positives would be a very nice first step. | 2021-09-14T21:56:39 | 1.1 | 894c8d71ca692c3847007b083a5ec8cae63c4aae | [
"tokens::test::tokenize_ignore_ordinal"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_ignore_email",
"tokens::test::tokenize_ignore_base64",
"tokens::test::tokenize_ignore_hex",
"tokens::test::tokenize_ignore_min_... | [] | [] |
crate-ci/typos | 293 | crate-ci__typos-293 | [
"288"
] | a46cc76baebd75257e806ce35b3747ed922a683d | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,26 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
<!-- next-header -->
## [Unreleased] - ReleaseDate
+#### Change of Behavior
+
+- `ignore-hex` and `identifier-leading-digit` are deprecated and `typos` acts as
+ if `ignore-hex=true` and `identifier-leading-digit=false`.
+
+#### Features
+
+- Automatically ignore
+ - UUIDs
+ - SHAs
+ - base64 encoded data (must be at least 90 bytes)
+ - emails
+ - URLs
+
+#### Performance
+
+- Due to new literal detection, finding identifiers is takes 10x longer.
+ Combined with word splitting, its only takes 3x longer. The majority of the
+ time is spent in dictionary lookups, so we don't expect this to have too much impact in the end.
+
## [1.0.10] - 2021-06-28
#### Bug Fixes
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -4,8 +4,6 @@ use bstr::ByteSlice;
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
pub struct TokenizerBuilder {
unicode: bool,
- ignore_hex: bool,
- leading_digits: bool,
}
impl TokenizerBuilder {
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -19,39 +17,15 @@ impl TokenizerBuilder {
self
}
- /// Specify that hexadecimal numbers should be ignored.
- pub fn ignore_hex(&mut self, yes: bool) -> &mut Self {
- self.ignore_hex = yes;
- self
- }
-
- /// Specify that leading digits are allowed for Identifiers.
- pub fn leading_digits(&mut self, yes: bool) -> &mut Self {
- self.leading_digits = yes;
- self
- }
-
pub fn build(&self) -> Tokenizer {
- let TokenizerBuilder {
- unicode,
- leading_digits,
- ignore_hex,
- } = self.clone();
- Tokenizer {
- unicode,
- leading_digits,
- ignore_hex,
- }
+ let TokenizerBuilder { unicode } = self.clone();
+ Tokenizer { unicode }
}
}
impl Default for TokenizerBuilder {
fn default() -> Self {
- Self {
- unicode: true,
- leading_digits: false,
- ignore_hex: true,
- }
+ Self { unicode: true }
}
}
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -59,8 +33,6 @@ impl Default for TokenizerBuilder {
#[derive(Debug, Clone)]
pub struct Tokenizer {
unicode: bool,
- leading_digits: bool,
- ignore_hex: bool,
}
impl Tokenizer {
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -70,9 +42,9 @@ impl Tokenizer {
pub fn parse_str<'c>(&'c self, content: &'c str) -> impl Iterator<Item = Identifier<'c>> {
let iter = if self.unicode && !ByteSlice::is_ascii(content.as_bytes()) {
- itertools::Either::Left(unicode_parser::iter_literals(content))
+ itertools::Either::Left(unicode_parser::iter_identifiers(content))
} else {
- itertools::Either::Right(ascii_parser::iter_literals(content.as_bytes()))
+ itertools::Either::Right(ascii_parser::iter_identifiers(content.as_bytes()))
};
iter.filter_map(move |identifier| {
let offset = offset(content.as_bytes(), identifier.as_bytes());
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -82,10 +54,11 @@ impl Tokenizer {
pub fn parse_bytes<'c>(&'c self, content: &'c [u8]) -> impl Iterator<Item = Identifier<'c>> {
let iter = if self.unicode && !ByteSlice::is_ascii(content) {
- let iter = Utf8Chunks::new(content).flat_map(move |c| unicode_parser::iter_literals(c));
+ let iter =
+ Utf8Chunks::new(content).flat_map(move |c| unicode_parser::iter_identifiers(c));
itertools::Either::Left(iter)
} else {
- itertools::Either::Right(ascii_parser::iter_literals(content))
+ itertools::Either::Right(ascii_parser::iter_identifiers(content))
};
iter.filter_map(move |identifier| {
let offset = offset(content, identifier.as_bytes());
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -95,17 +68,6 @@ impl Tokenizer {
fn transform<'i>(&self, identifier: &'i str, offset: usize) -> Option<Identifier<'i>> {
debug_assert!(!identifier.is_empty());
- if self.leading_digits {
- if is_number(identifier.as_bytes()) {
- return None;
- }
-
- if self.ignore_hex && is_hex(identifier.as_bytes()) {
- return None;
- }
- } else if is_digit(identifier.as_bytes()[0]) {
- return None;
- }
let case = Case::None;
Some(Identifier::new_unchecked(identifier, case, offset))
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -164,98 +126,348 @@ impl<'s> Iterator for Utf8Chunks<'s> {
}
}
-fn is_number(ident: &[u8]) -> bool {
- ident.iter().all(|b| is_digit(*b) || is_digit_sep(*b))
-}
+mod parser {
+ use nom::branch::*;
+ use nom::bytes::complete::*;
+ use nom::character::complete::*;
+ use nom::combinator::*;
+ use nom::sequence::*;
+ use nom::{AsChar, IResult};
-fn is_hex(ident: &[u8]) -> bool {
- if ident.len() < 3 {
- false
- } else {
- ident[0] == b'0'
- && ident[1] == b'x'
- && ident[2..]
- .iter()
- .all(|b| is_hex_digit(*b) || is_digit_sep(*b))
+ pub(crate) fn next_identifier<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Offset
+ + Clone
+ + PartialEq
+ + std::fmt::Debug,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ preceded(ignore, identifier)(input)
}
-}
-#[inline]
-fn is_digit(chr: u8) -> bool {
- chr.is_ascii_digit()
-}
+ fn identifier<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ {
+ // Generally a language would be `{XID_Start}{XID_Continue}*` but going with only
+ // `{XID_Continue}+` because XID_Continue is a superset of XID_Start and rather catch odd
+ // or unexpected cases than strip off start characters to a word since we aren't doing a
+ // proper word boundary parse
+ take_while1(is_xid_continue)(input)
+ }
-#[inline]
-fn is_digit_sep(chr: u8) -> bool {
- // `_`: number literal separator in Rust and other languages
- // `'`: number literal separator in C++
- chr == b'_' || chr == b'\''
-}
+ fn ignore<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Offset
+ + Clone
+ + PartialEq
+ + std::fmt::Debug,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ take_many0(alt((
+ terminated(uuid_literal, sep1),
+ terminated(hash_literal, sep1),
+ terminated(hex_literal, sep1),
+ terminated(dec_literal, sep1),
+ terminated(base64_literal, sep1),
+ terminated(email_literal, sep1),
+ terminated(url_literal, sep1),
+ sep1,
+ )))(input)
+ }
+
+ fn sep1<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ {
+ take_till1(is_xid_continue)(input)
+ }
-#[inline]
-fn is_hex_digit(chr: u8) -> bool {
- chr.is_ascii_hexdigit()
-}
+ fn dec_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ {
+ take_while1(is_dec_digit_with_sep)(input)
+ }
-mod parser {
- use nom::bytes::complete::*;
- use nom::sequence::*;
- use nom::IResult;
+ fn hex_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ preceded(
+ pair(char('0'), alt((char('x'), char('X')))),
+ take_while1(is_hex_digit_with_sep),
+ )(input)
+ }
- pub(crate) trait AsChar: nom::AsChar {
- #[allow(clippy::wrong_self_convention)]
- fn is_xid_continue(self) -> bool;
+ fn uuid_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ recognize(tuple((
+ take_while_m_n(8, 8, is_lower_hex_digit),
+ char('-'),
+ take_while_m_n(4, 4, is_lower_hex_digit),
+ char('-'),
+ take_while_m_n(4, 4, is_lower_hex_digit),
+ char('-'),
+ take_while_m_n(4, 4, is_lower_hex_digit),
+ char('-'),
+ take_while_m_n(12, 12, is_lower_hex_digit),
+ )))(input)
+ }
+
+ fn hash_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ // Size considerations:
+ // - sha-1 is git's original hash
+ // - sha-256 is git's new hash
+ // - Git hashes can be abbreviated but we need a good abbreviation that won't be mistaken
+ // for a variable name
+ const SHA_1_MAX: usize = 40;
+ const SHA_256_MAX: usize = 64;
+ take_while_m_n(SHA_1_MAX, SHA_256_MAX, is_lower_hex_digit)(input)
}
- impl AsChar for u8 {
- fn is_xid_continue(self) -> bool {
- (b'a'..=b'z').contains(&self)
- || (b'A'..=b'Z').contains(&self)
- || (b'0'..=b'9').contains(&self)
- || self == b'_'
+ fn base64_literal<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + std::fmt::Debug
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ let (padding, captured) = take_while1(is_base64_digit)(input.clone())?;
+ if captured.input_len() < 90 {
+ return Err(nom::Err::Error(nom::error::Error::new(
+ input,
+ nom::error::ErrorKind::LengthValue,
+ )));
}
- }
- impl AsChar for char {
- fn is_xid_continue(self) -> bool {
- unicode_xid::UnicodeXID::is_xid_continue(self)
+ const CHUNK: usize = 4;
+ let padding_offset = input.offset(&padding);
+ let mut padding_len = CHUNK - padding_offset % CHUNK;
+ if padding_len == CHUNK {
+ padding_len = 0;
}
+
+ let (after, _) = take_while_m_n(padding_len, padding_len, is_base64_padding)(padding)?;
+ let after_offset = input.offset(&after);
+ Ok(input.take_split(after_offset))
}
- pub(crate) fn next_literal<T>(input: T) -> IResult<T, T>
+ fn email_literal<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition,
- <T as nom::InputTakeAtPosition>::Item: AsChar,
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + std::fmt::Debug
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
{
- preceded(literal_sep, identifier)(input)
+ recognize(tuple((
+ take_while1(is_localport_char),
+ char('@'),
+ take_while1(is_domain_char),
+ )))(input)
}
- fn literal_sep<T>(input: T) -> IResult<T, T>
+ fn url_literal<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition,
- <T as nom::InputTakeAtPosition>::Item: AsChar,
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + std::fmt::Debug
+ + Clone,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
{
- take_till(AsChar::is_xid_continue)(input)
- }
-
- fn identifier<T>(input: T) -> IResult<T, T>
+ recognize(tuple((
+ opt(terminated(
+ take_while1(is_scheme_char),
+ // HACK: Technically you can skip `//` if you don't have a domain but that would
+ // get messy to support.
+ tuple((char(':'), char('/'), char('/'))),
+ )),
+ tuple((
+ opt(terminated(take_while1(is_localport_char), char('@'))),
+ take_while1(is_domain_char),
+ opt(preceded(char(':'), take_while1(AsChar::is_dec_digit))),
+ )),
+ char('/'),
+ // HACK: Too lazy to enumerate
+ take_while(is_localport_char),
+ )))(input)
+ }
+
+ fn take_many0<I, E, F>(mut f: F) -> impl FnMut(I) -> IResult<I, I, E>
where
- T: nom::InputTakeAtPosition,
- <T as nom::InputTakeAtPosition>::Item: AsChar,
+ I: nom::Offset + nom::InputTake + Clone + PartialEq + std::fmt::Debug,
+ F: nom::Parser<I, I, E>,
+ E: nom::error::ParseError<I>,
{
- // Generally a language would be `{XID_Start}{XID_Continue}*` but going with only
- // `{XID_Continue}+` because XID_Continue is a superset of XID_Start and rather catch odd
- // or unexpected cases than strip off start characters to a word since we aren't doing a
- // proper word boundary parse
- take_while1(AsChar::is_xid_continue)(input)
+ move |i: I| {
+ let mut current = i.clone();
+ loop {
+ match f.parse(current.clone()) {
+ Err(nom::Err::Error(_)) => {
+ let offset = i.offset(¤t);
+ let (after, before) = i.take_split(offset);
+ return Ok((after, before));
+ }
+ Err(e) => {
+ return Err(e);
+ }
+ Ok((next, _)) => {
+ if next == current {
+ return Err(nom::Err::Error(E::from_error_kind(
+ i,
+ nom::error::ErrorKind::Many0,
+ )));
+ }
+
+ current = next;
+ }
+ }
+ }
+ }
+ }
+
+ #[inline]
+ fn is_dec_digit_with_sep(i: impl AsChar + Copy) -> bool {
+ i.is_dec_digit() || is_digit_sep(i.as_char())
+ }
+
+ #[inline]
+ fn is_hex_digit_with_sep(i: impl AsChar + Copy) -> bool {
+ i.is_hex_digit() || is_digit_sep(i.as_char())
+ }
+
+ #[inline]
+ fn is_lower_hex_digit(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='f').contains(&c) || ('0'..='9').contains(&c)
+ }
+
+ #[inline]
+ fn is_base64_digit(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='z').contains(&c)
+ || ('A'..='Z').contains(&c)
+ || ('0'..='9').contains(&c)
+ || c == '+'
+ || c == '/'
+ }
+
+ #[inline]
+ fn is_base64_padding(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ c == '='
+ }
+
+ #[inline]
+ fn is_localport_char(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='z').contains(&c)
+ || ('A'..='Z').contains(&c)
+ || ('0'..='9').contains(&c)
+ || "!#$%&'*+-/=?^_`{|}~().".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_domain_char(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='z').contains(&c)
+ || ('A'..='Z').contains(&c)
+ || ('0'..='9').contains(&c)
+ || "-().".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_scheme_char(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='z').contains(&c) || ('0'..='9').contains(&c) || "+.-".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_xid_continue(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ unicode_xid::UnicodeXID::is_xid_continue(c)
+ }
+
+ #[inline]
+ fn is_digit_sep(chr: char) -> bool {
+ // `_`: number literal separator in Rust and other languages
+ // `'`: number literal separator in C++
+ chr == '_' || chr == '\''
}
}
mod unicode_parser {
- use super::parser::next_literal;
+ use super::parser::next_identifier;
- pub(crate) fn iter_literals(mut input: &str) -> impl Iterator<Item = &str> {
- std::iter::from_fn(move || match next_literal(input) {
+ pub(crate) fn iter_identifiers(mut input: &str) -> impl Iterator<Item = &str> {
+ std::iter::from_fn(move || match next_identifier(input) {
Ok((i, o)) => {
input = i;
debug_assert_ne!(o, "");
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -267,10 +479,10 @@ mod unicode_parser {
}
mod ascii_parser {
- use super::parser::next_literal;
+ use super::parser::next_identifier;
- pub(crate) fn iter_literals(mut input: &[u8]) -> impl Iterator<Item = &str> {
- std::iter::from_fn(move || match next_literal(input) {
+ pub(crate) fn iter_identifiers(mut input: &[u8]) -> impl Iterator<Item = &str> {
+ std::iter::from_fn(move || match next_identifier(input) {
Ok((i, o)) => {
input = i;
debug_assert_ne!(o, b"");
diff --git a/docs/comparison.md b/docs/comparison.md
--- a/docs/comparison.md
+++ b/docs/comparison.md
@@ -8,7 +8,12 @@
| Per-Lang Dict | Yes | ? | No | Yes |
| CamelCase | Yes | ? | No | Yes |
| snake_case | Yes | ? | No | Yes |
+| Ignore email | Yes | yes | No | No |
+| Ignore url | Yes | yes | No | No |
| Ignore Hex | Yes | ? | No | Yes |
+| Ignore UUID | Yes | ? | No | No |
+| Ignore base64 | Yes | ? | No | No |
+| Ignore SHAs | Yes | ? | No | No |
| C-Escapes | No ([#20][def-3]) | ? | No | Yes |
| Encodings | UTF-8 / UTF-16 | ? | Auto | Auto |
| Whole-project | Yes | Yes | Yes | No |
diff --git a/docs/reference.md b/docs/reference.md
--- a/docs/reference.md
+++ b/docs/reference.md
@@ -26,8 +26,6 @@ Configuration is read from the following (in precedence order)
| default.check-filename | \- | bool | Verifying spelling in file names. |
| default.check-file | \- | bool | Verifying spelling in files. |
| default.unicode | --unicode | bool | Allow unicode characters in identifiers (and not just ASCII) |
-| default.ignore-hex | \- | bool | Do not check identifiers that appear to be hexadecimal values. |
-| default.identifier-leading-digits | \- | bool | Allow identifiers to start with digits, in addition to letters. |
| default.locale | --locale | en, en-us, en-gb, en-ca, en-au | English dialect to correct to. |
| default.extend-identifiers | \- | table of strings | Corrections for identifiers. When the correction is blank, the word is never valid. When the correction is the key, the word is always valid. |
| default.extend-words | \- | table of strings | Corrections for identifiers. When the correction is blank, the word is never valid. When the correction is the key, the word is always valid. |
diff --git a/src/policy.rs b/src/policy.rs
--- a/src/policy.rs
+++ b/src/policy.rs
@@ -224,10 +224,15 @@ impl<'s> ConfigEngine<'s> {
tokenizer.unwrap_or_else(crate::config::TokenizerConfig::from_defaults);
let dict_config = dict.unwrap_or_else(crate::config::DictConfig::from_defaults);
+ if !tokenizer_config.ignore_hex() {
+ log::warn!("`ignore-hex` is deprecated");
+ if !tokenizer_config.identifier_leading_digits() {
+ log::warn!("`identifier-leading-digits` is deprecated");
+ }
+ }
+
let tokenizer = typos::tokens::TokenizerBuilder::new()
.unicode(tokenizer_config.unicode())
- .ignore_hex(tokenizer_config.ignore_hex())
- .leading_digits(tokenizer_config.identifier_leading_digits())
.build();
let dict = crate::dict::BuiltIn::new(dict_config.locale());
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -613,11 +825,8 @@ mod test {
}
#[test]
- fn tokenize_ignore_hex_enabled() {
- let parser = TokenizerBuilder::new()
- .ignore_hex(true)
- .leading_digits(true)
- .build();
+ fn tokenize_ignore_hex() {
+ let parser = TokenizerBuilder::new().build();
let input = "Hello 0xDEADBEEF World";
let expected: Vec<Identifier> = vec![
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -631,17 +840,13 @@ mod test {
}
#[test]
- fn tokenize_ignore_hex_disabled() {
- let parser = TokenizerBuilder::new()
- .ignore_hex(false)
- .leading_digits(true)
- .build();
+ fn tokenize_ignore_uuid() {
+ let parser = TokenizerBuilder::new().build();
- let input = "Hello 0xDEADBEEF World";
+ let input = "Hello 123e4567-e89b-12d3-a456-426652340000 World";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
- Identifier::new_unchecked("0xDEADBEEF", Case::None, 6),
- Identifier::new_unchecked("World", Case::None, 17),
+ Identifier::new_unchecked("World", Case::None, 43),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -650,18 +855,13 @@ mod test {
}
#[test]
- fn tokenize_leading_digits_enabled() {
- let parser = TokenizerBuilder::new()
- .ignore_hex(false)
- .leading_digits(true)
- .build();
+ fn tokenize_ignore_hash() {
+ let parser = TokenizerBuilder::new().build();
- let input = "Hello 0Hello 124 0xDEADBEEF World";
+ let input = "Hello 485865fd0412e40d041e861506bb3ac11a3a91e3 World";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
- Identifier::new_unchecked("0Hello", Case::None, 6),
- Identifier::new_unchecked("0xDEADBEEF", Case::None, 17),
- Identifier::new_unchecked("World", Case::None, 28),
+ Identifier::new_unchecked("World", Case::None, 47),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -670,15 +870,73 @@ mod test {
}
#[test]
- fn tokenize_leading_digits_disabled() {
- let parser = TokenizerBuilder::new()
- .ignore_hex(false)
- .leading_digits(false)
- .build();
+ fn tokenize_ignore_base64() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Good Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X1Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X122Iy9+btvut+d92V+v84444ziIqJKHK879KJH59//X12== Bye";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Good", Case::None, 0),
+ Identifier::new_unchecked("Bye", Case::None, 134),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn tokenize_ignore_email() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Good example@example.com Bye";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Good", Case::None, 0),
+ Identifier::new_unchecked("Bye", Case::None, 25),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn tokenize_ignore_min_url() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Good example.com/hello Bye";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Good", Case::None, 0),
+ Identifier::new_unchecked("Bye", Case::None, 23),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn tokenize_ignore_max_url() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Good http://user@example.com:3142/hello?query=value&extra=two#fragment Bye";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Good", Case::None, 0),
+ Identifier::new_unchecked("Bye", Case::None, 71),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
+ #[test]
+ fn tokenize_leading_digits() {
+ let parser = TokenizerBuilder::new().build();
let input = "Hello 0Hello 124 0xDEADBEEF World";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("Hello", Case::None, 0),
+ Identifier::new_unchecked("0Hello", Case::None, 6),
Identifier::new_unchecked("World", Case::None, 28),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
| Don't Change Typos inside of URL
I tried to apply this automatically to some repos and it tried to fix some URLs. Obviously, that won't work if the original URL has a typo and the URL is not under your control. Absolute HTTPS URLs should not be changed or at least there should be an option to disable the changing of them.
| Things that might be useful from another tool
- email handling: https://github.com/client9/misspell/blob/master/notwords.go#L66
- host handling: https://github.com/client9/misspell/blob/master/notwords.go#L71
- escape sequences: https://github.com/client9/misspell/blob/master/notwords.go#L77
- urls: https://github.com/client9/misspell/blob/master/url.go
RFC 5322 <http://www.ietf.org/rfc/rfc5322.txt> ie: http://emailregex.com/
exists
On Thu, Jun 17, 2021 at 3:42 PM Ed Page ***@***.***> wrote:
> Things that might be useful from another tool
>
> - email handling:
> https://github.com/client9/misspell/blob/master/notwords.go#L66
> - host handling:
> https://github.com/client9/misspell/blob/master/notwords.go#L71
> - escape sequences:
> https://github.com/client9/misspell/blob/master/notwords.go#L77
> - urls: https://github.com/client9/misspell/blob/master/url.go
>
> —
> You are receiving this because you authored the thread.
> Reply to this email directly, view it on GitHub
> <https://github.com/crate-ci/typos/issues/288#issuecomment-863514434>, or
> unsubscribe
> <https://github.com/notifications/unsubscribe-auth/AAPVMX4FZVRX3MQI5B6PQHLTTJF35ANCNFSM464JUMBQ>
> .
>
Even worse, I found it mangles go get urls for people who have intentional typos in their usernames. This breaks the repo. Furthermore, that regex may not catch it. Here is a diff in a go repo I tried this on.
```
- "github.com/Unknwon/goconfig"
+ "github.com/Unknown/goconfig"
``` | 2021-06-30T03:44:26 | 1.0 | a46cc76baebd75257e806ce35b3747ed922a683d | [
"tokens::test::tokenize_ignore_base64",
"tokens::test::tokenize_ignore_email",
"tokens::test::tokenize_ignore_max_url",
"tokens::test::tokenize_ignore_min_url",
"tokens::test::tokenize_ignore_uuid",
"tokens::test::tokenize_leading_digits"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_ignore_hash",
"tokens::test::tokenize_ignore_hex",
"tokens::test::tokenize_namespace_separated_words",
"tokens::test::tokenize_space_separated_words",
"tokens::te... | [] | [] |
crate-ci/typos | 279 | crate-ci__typos-279 | [
"277"
] | 04f5d40e574f04461859fd638c1c7b5b78eaecf7 | diff --git a/CHANGELOG.md b/CHANGELOG.md
--- a/CHANGELOG.md
+++ b/CHANGELOG.md
@@ -7,6 +7,11 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
<!-- next-header -->
## [Unreleased] - ReleaseDate
+#### Bug Fixes
+
+- Fix the prior `typos <file>` fix that broke all other forms
+- Extend the fix to other modes (`--dump-config`, etc)
+
## [1.0.5] - 2021-06-05
#### Bug Fixes
diff --git a/src/bin/typos-cli/main.rs b/src/bin/typos-cli/main.rs
--- a/src/bin/typos-cli/main.rs
+++ b/src/bin/typos-cli/main.rs
@@ -66,11 +66,13 @@ fn run_dump_config(args: &args::Args, output_path: &std::path::Path) -> proc_exi
let path = &args.path[0];
let cwd = if path == std::path::Path::new("-") {
- global_cwd.as_path()
+ global_cwd
} else if path.is_file() {
- path.parent().unwrap()
+ let mut cwd = path.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?;
+ cwd.pop();
+ cwd
} else {
- path.as_path()
+ path.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?
};
let cwd = cwd.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?;
diff --git a/src/bin/typos-cli/main.rs b/src/bin/typos-cli/main.rs
--- a/src/bin/typos-cli/main.rs
+++ b/src/bin/typos-cli/main.rs
@@ -108,11 +110,13 @@ fn run_type_list(args: &args::Args) -> proc_exit::ExitResult {
let path = &args.path[0];
let cwd = if path == std::path::Path::new("-") {
- global_cwd.as_path()
+ global_cwd
} else if path.is_file() {
- path.parent().unwrap()
+ let mut cwd = path.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?;
+ cwd.pop();
+ cwd
} else {
- path.as_path()
+ path.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?
};
let cwd = cwd.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?;
diff --git a/src/bin/typos-cli/main.rs b/src/bin/typos-cli/main.rs
--- a/src/bin/typos-cli/main.rs
+++ b/src/bin/typos-cli/main.rs
@@ -178,7 +182,7 @@ fn run_checks(
cwd.pop();
cwd
} else {
- path.clone()
+ path.canonicalize().with_code(proc_exit::Code::USAGE_ERR)?
};
engine
| diff --git a/tests/cli.rs b/tests/cli.rs
--- a/tests/cli.rs
+++ b/tests/cli.rs
@@ -29,3 +29,16 @@ fn test_file_failure() {
cmd.arg("README.md");
cmd.assert().code(2);
}
+
+#[test]
+fn test_relative_dir_failure() {
+ let mut cmd = Command::cargo_bin("typos").unwrap();
+ cmd.arg(".");
+ cmd.assert().code(2);
+}
+
+#[test]
+fn test_assumed_dir_failure() {
+ let mut cmd = Command::cargo_bin("typos").unwrap();
+ cmd.assert().code(2);
+}
| typos-cli Crash Report
name = 'typos-cli'
operating_system = 'windows'
crate_version = '1.0.5'
explanation = '''
Panic occurred in file 'C:\Users\brian\.cargo\registry\src\github.com-1ecc6299db9ec823\typos-cli-1.0.5\src\policy.rs' at line 90
'''
cause = '`walk()` should be called first'
method = 'Panic'
backtrace = '''
0: 0x7ff7059f4f79 - <unresolved>
1: 0x7ff705a1a1e0 - <unresolved>
2: 0x7ff705a1a032 - <unresolved>
3: 0x7ff7058a68b3 - <unresolved>
4: 0x7ff7058b071e - <unresolved>
5: 0x7ff7058a5b64 - <unresolved>
6: 0x7ff7058ecd9d - <unresolved>
7: 0x7ff7058ede8f - <unresolved>
8: 0x7ff7058f56c4 - <unresolved>
9: 0x7ff7058f032f - <unresolved>
10: 0x7ff7058f55e9 - <unresolved>
11: 0x7ff7059f850a - <unresolved>
12: 0x7fffb9f37034 - BaseThreadInitThunk
13: 0x7fffba122651 - RtlUserThreadStart'''
| Same error here.
Tried running ``typos`` on Manjaro Linux using the v1.0.5 precompiled binary and this crash occurred.
Same for `mac os`, when running `typos` binary after `cargo install`:
```
name = 'typos-cli'
operating_system = 'unix:OSX'
crate_version = '1.0.5'
explanation = '''
Panic occurred in file '/Users/coderaiser/.cargo/registry/src/github.com-1ecc6299db9ec823/typos-cli-1.0.5/src/policy.rs' at line 90
'''
cause = '`walk()` should be called first'
method = 'Panic'
backtrace = ''
``` | 2021-06-07T19:30:00 | 1.0 | a46cc76baebd75257e806ce35b3747ed922a683d | [
"test_assumed_dir_failure",
"test_relative_dir_failure"
] | [
"config::test::test_extend_glob_extends",
"config::test::test_extend_glob_updates",
"config::test::test_from_defaults",
"config::test::test_update_from_defaults",
"dict::test::test_case_correct",
"config::test::test_update_from_nothing",
"dict::test::test_dict_correct",
"dict::test::test_dict_to_varco... | [] | [] |
crate-ci/typos | 254 | crate-ci__typos-254 | [
"253"
] | e6c595c5851a55a461710ec63cf27b5f7c57f475 | diff --git a/crates/typos-vars/codegen/src/main.rs b/crates/typos-vars/codegen/src/main.rs
--- a/crates/typos-vars/codegen/src/main.rs
+++ b/crates/typos-vars/codegen/src/main.rs
@@ -78,6 +78,7 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
let mut smallest = usize::MAX;
let mut largest = usize::MIN;
+ let mut no_invalid = true;
writeln!(
file,
diff --git a/crates/typos-vars/codegen/src/main.rs b/crates/typos-vars/codegen/src/main.rs
--- a/crates/typos-vars/codegen/src/main.rs
+++ b/crates/typos-vars/codegen/src/main.rs
@@ -97,6 +98,8 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
builder.entry(unicase::UniCase::new(word), &value);
smallest = std::cmp::min(smallest, word.len());
largest = std::cmp::max(largest, word.len());
+
+ no_invalid &= !is_always_invalid(data);
}
let codegenned = builder.build();
writeln!(file, "{}", codegenned).unwrap();
diff --git a/crates/typos-vars/codegen/src/main.rs b/crates/typos-vars/codegen/src/main.rs
--- a/crates/typos-vars/codegen/src/main.rs
+++ b/crates/typos-vars/codegen/src/main.rs
@@ -110,6 +113,10 @@ fn generate_variations<W: std::io::Write>(file: &mut W) {
)
.unwrap();
+ writeln!(file).unwrap();
+ writeln!(file, "pub const NO_INVALID: bool = {:?};", no_invalid,).unwrap();
+
+ writeln!(file).unwrap();
for (symbol, entry) in entries.iter() {
if !referenced_symbols.contains(symbol.as_str()) {
continue;
diff --git a/crates/typos-vars/codegen/src/main.rs b/crates/typos-vars/codegen/src/main.rs
--- a/crates/typos-vars/codegen/src/main.rs
+++ b/crates/typos-vars/codegen/src/main.rs
@@ -156,6 +163,15 @@ fn is_always_valid(data: &[(&str, varcon::CategorySet)]) -> bool {
false
}
+fn is_always_invalid(data: &[(&str, varcon::CategorySet)]) -> bool {
+ for (_symbol, set) in data.iter() {
+ if set.is_empty() {
+ return true;
+ }
+ }
+ false
+}
+
fn entries() -> BTreeMap<String, varcon_core::Entry> {
varcon::VARCON
.iter()
diff --git a/crates/typos-vars/src/vars_codegen.rs b/crates/typos-vars/src/vars_codegen.rs
--- a/crates/typos-vars/src/vars_codegen.rs
+++ b/crates/typos-vars/src/vars_codegen.rs
@@ -113083,6 +113083,9 @@ pub static VARS_DICTIONARY: phf::Map<
};
pub const WORD_RANGE: std::ops::RangeInclusive<usize> = 2..=24;
+
+pub const NO_INVALID: bool = true;
+
pub(crate) static ENTRY_ABETTORS_7043394254318611656: VariantsMap =
[&["abettors"], &["abetters"], &["abettors"], &["abetters"]];
diff --git a/src/dict.rs b/src/dict.rs
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -48,8 +48,10 @@ impl BuiltIn {
.for_each(|mut s| case_correct(&mut s, word_token.case()));
Some(corrections)
}
+}
- #[cfg(feature = "dict")]
+#[cfg(feature = "dict")]
+impl BuiltIn {
// Not using `Status` to avoid the allocations
fn correct_with_dict(&self, word: &str) -> Option<&'static [&'static str]> {
if typos_dict::WORD_RANGE.contains(&word.len()) {
diff --git a/src/dict.rs b/src/dict.rs
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -58,40 +60,42 @@ impl BuiltIn {
None
}
}
+}
- #[cfg(not(feature = "dict"))]
+#[cfg(not(feature = "dict"))]
+impl BuiltIn {
fn correct_with_dict(&self, _word: &str) -> Option<&'static [&'static str]> {
None
}
+}
- #[cfg(feature = "vars")]
+#[cfg(feature = "vars")]
+impl BuiltIn {
fn chain_with_vars(&self, corrections: &'static [&'static str]) -> Status<'static> {
- let mut chained: Vec<_> = corrections
- .iter()
- .flat_map(|c| match self.correct_with_vars(c) {
- Some(Status::Valid) | None => vec![Cow::Borrowed(*c)],
- Some(Status::Corrections(vars)) => vars,
- Some(Status::Invalid) => {
- unreachable!("correct_with_vars should always have valid suggestions")
- }
- })
- .collect();
- if chained.len() != 1 {
- chained.sort_unstable();
- chained.dedup();
+ if self.is_vars_enabled() {
+ let mut chained: Vec<_> = corrections
+ .iter()
+ .flat_map(|c| match self.correct_with_vars(c) {
+ Some(Status::Valid) | None => vec![Cow::Borrowed(*c)],
+ Some(Status::Corrections(vars)) => vars,
+ Some(Status::Invalid) => {
+ unreachable!("correct_with_vars should always have valid suggestions")
+ }
+ })
+ .collect();
+ if chained.len() != 1 {
+ chained.sort_unstable();
+ chained.dedup();
+ }
+ debug_assert!(!chained.is_empty());
+ Status::Corrections(chained)
+ } else {
+ Status::Corrections(corrections.iter().map(|c| Cow::Borrowed(*c)).collect())
}
- debug_assert!(!chained.is_empty());
- Status::Corrections(chained)
- }
-
- #[cfg(not(feature = "vars"))]
- fn chain_with_vars(&self, corrections: &'static [&'static str]) -> Status<'static> {
- Status::Corrections(corrections.iter().map(|c| Cow::Borrowed(*c)).collect())
}
- #[cfg(feature = "vars")]
fn correct_with_vars(&self, word: &str) -> Option<Status<'static>> {
- if typos_vars::WORD_RANGE.contains(&word.len()) {
+ if self.is_vars_enabled() && typos_vars::WORD_RANGE.contains(&word.len()) {
map_lookup(&typos_vars::VARS_DICTIONARY, word)
.map(|variants| self.select_variant(variants))
} else {
diff --git a/src/dict.rs b/src/dict.rs
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -99,12 +103,12 @@ impl BuiltIn {
}
}
- #[cfg(not(feature = "vars"))]
- fn correct_with_vars(&self, _word: &str) -> Option<Status<'static>> {
- None
+ fn is_vars_enabled(&self) -> bool {
+ #![allow(clippy::assertions_on_constants)]
+ debug_assert!(typos_vars::NO_INVALID);
+ self.locale.is_some()
}
- #[cfg(feature = "vars")]
fn select_variant(
&self,
vars: &'static [(u8, &'static typos_vars::VariantsMap)],
diff --git a/src/dict.rs b/src/dict.rs
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -148,6 +152,17 @@ impl BuiltIn {
}
}
+#[cfg(not(feature = "vars"))]
+impl BuiltIn {
+ fn chain_with_vars(&self, corrections: &'static [&'static str]) -> Status<'static> {
+ Status::Corrections(corrections.iter().map(|c| Cow::Borrowed(*c)).collect())
+ }
+
+ fn correct_with_vars(&self, _word: &str) -> Option<Status<'static>> {
+ None
+ }
+}
+
impl typos::Dictionary for BuiltIn {
fn correct_ident<'s, 'w>(&'s self, ident: typos::tokens::Identifier<'w>) -> Option<Status<'s>> {
BuiltIn::correct_ident(self, ident)
| diff --git a/src/dict.rs b/src/dict.rs
--- a/src/dict.rs
+++ b/src/dict.rs
@@ -296,7 +311,7 @@ mod test {
typos::tokens::Case::Lower,
0,
));
- assert_eq!(correction, Some(Status::Valid));
+ assert_eq!(correction, None);
}
#[cfg(feature = "vars")]
| Speed up varcom support
When there are no hits, its only about a 10% cost. When there are hits, it can go up to 50%
For "code"
```
check_file/FoundFiles/code
time: [23.589 us 23.753 us 23.933 us]
thrpt: [12.114 MiB/s 12.205 MiB/s 12.290 MiB/s]
check_file/Identifiers/code
time: [26.211 us 26.350 us 26.498 us]
thrpt: [10.941 MiB/s 11.003 MiB/s 11.061 MiB/s]
check_file/Words/code time: [28.795 us 28.913 us 29.046 us]
thrpt: [9.9814 MiB/s 10.027 MiB/s 10.068 MiB/s]
check_file/Typos/code time: [32.651 us 32.788 us 32.934 us]
thrpt: [8.8029 MiB/s 8.8421 MiB/s 8.8794 MiB/s]
and with varcon
check_file/Typos/code time: [35.860 us 36.021 us 36.187 us]
thrpt: [8.0117 MiB/s 8.0486 MiB/s 8.0846 MiB/s]
change:
time: [+8.6784% +9.7190% +10.748%] (p = 0.00 < 0.05)
thrpt: [-9.7049% -8.8581% -7.9854%]
Performance has regressed.
```
For "corpus" (high token count compared to non-tokens, lots of corrections)
```
check_file/FoundFiles/corpus
time: [53.392 us 53.820 us 54.261 us]
thrpt: [10.464 GiB/s 10.550 GiB/s 10.635 GiB/s]
check_file/Identifiers/corpus
time: [2.5148 ms 2.5232 ms 2.5327 ms]
thrpt: [229.57 MiB/s 230.44 MiB/s 231.21 MiB/s]
check_file/Words/corpus time: [6.5589 ms 6.5755 ms 6.5940 ms]
thrpt: [88.177 MiB/s 88.425 MiB/s 88.649 MiB/s]
check_file/Typos/corpus time: [17.806 ms 17.900 ms 18.008 ms]
thrpt: [32.288 MiB/s 32.482 MiB/s 32.654 MiB/s]
and with varcon
check_file/Typos/corpus time: [26.966 ms 27.215 ms 27.521 ms]
thrpt: [21.127 MiB/s 21.365 MiB/s 21.562 MiB/s]
change:
time: [+50.409% +52.035% +53.975%] (p = 0.00 < 0.05)
thrpt: [-35.054% -34.226% -33.515%]
Performance has regressed.
```
| I wonder if there is cost in doing two different hashings and lookups.
We might be able to speed this up by merging the two dictionaries. | 2021-05-19T10:09:40 | 0.3 | e6c595c5851a55a461710ec63cf27b5f7c57f475 | [
"dict::test::test_varcon_same_locale"
] | [
"config::test::test_update_from_defaults",
"config::test::test_extend_glob_updates",
"config::test::test_extend_glob_extends",
"config::test::test_update_from_nothing",
"dict::test::test_case_correct",
"file::test::test_extract_line_end",
"file::test::test_extract_line_middle",
"file::test::test_fix_b... | [] | [] |
crate-ci/typos | 543 | crate-ci__typos-543 | [
"542"
] | 7d2ad4148d33c6af9f7516112f3ea57eb1bb50bc | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -306,8 +306,12 @@ mod parser {
+ nom::InputTake
+ nom::InputIter
+ nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
+ Clone
+ + Default
+ + PartialEq
+ std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -315,8 +319,8 @@ mod parser {
preceded(
char('#'),
alt((
- take_while_m_n(3, 8, is_lower_hex_digit),
- take_while_m_n(3, 8, is_upper_hex_digit),
+ terminated(take_while_m_n(3, 8, is_lower_hex_digit), peek(sep1)),
+ terminated(take_while_m_n(3, 8, is_upper_hex_digit), peek(sep1)),
)),
)(input)
}
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1351,12 +1355,12 @@ mod test {
fn tokenize_color() {
let parser = TokenizerBuilder::new().build();
- let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD World";
+ let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD #1175BA World";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("derive", Case::None, 2),
Identifier::new_unchecked("Clone", Case::None, 9),
Identifier::new_unchecked("hello", Case::None, 38),
- Identifier::new_unchecked("World", Case::None, 54),
+ Identifier::new_unchecked("World", Case::None, 62),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
| Hex colors still being picked up
I saw #462 , but it seems colors are still being picked up
version: `typos-cli 1.11.11
```
error: `BA` should be `BY`, `BE`
--> /local/home/deguzim/src/DeGuzim-Config/tampermonkey-scripts/Labunk-documents/presentations/1.better-build-with-brazil-gradle/remarkjs.css:12:17
|
12 | color: #1175BA;
| ^^
|
error: `BA` should be `BY`, `BE`
```
~/typos.toml:
```
1 # Repository: https://github.com/crate-ci/typo
2 # See: https://github.com/crate-ci/typos#false-positives
3
4 [default.extend-words]
5 # bre is an alias in script files
6 bre = "bre"
```
| Our CSS color parser looks like
```rust
preceded(
char('#'),
alt((
take_while_m_n(3, 8, is_lower_hex_digit),
take_while_m_n(3, 8, is_upper_hex_digit),
)),
)(input)
```
The problem is with `#1175BA`, `#1175` matches as a lower hex digit, satisfies that branch, and things move on. This then gets rejected when we later look for a identifier separator and so the whole thing gets rejected and treated as an identifier.
We need to peek into the parse and make sure a separator follows the first case so we can instead try the second | 2022-08-26T05:06:12 | 1.11 | 7d2ad4148d33c6af9f7516112f3ea57eb1bb50bc | [
"tokens::test::tokenize_color"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_ignore_base64_case_1",
"tokens::test::tokenize_ignore_base64_case_2",
"tokens::test::to... | [] | [] |
crate-ci/typos | 486 | crate-ci__typos-486 | [
"481"
] | 8cd9cef88ac00b9a94d03ec367a8f1564ebff31b | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -142,6 +142,7 @@ mod parser {
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Offset
+ Clone
+ + Default
+ PartialEq
+ std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -172,6 +173,7 @@ mod parser {
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Offset
+ Clone
+ + Default
+ PartialEq
+ std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -181,15 +183,15 @@ mod parser {
// CAUTION: If adding an ignorable literal, if it doesn't start with `is_xid_continue`,
// - Update `is_ignore_char` to make sure `sep1` doesn't eat it all up
// - Make sure you always consume it
- terminated(uuid_literal, sep1),
- terminated(hash_literal, sep1),
- terminated(hex_literal, sep1),
- terminated(dec_literal, sep1),
- terminated(ordinal_literal, sep1),
- terminated(base64_literal, sep1),
- terminated(email_literal, sep1),
- terminated(url_literal, sep1),
- terminated(css_color, sep1),
+ terminated(uuid_literal, peek(sep1)),
+ terminated(hash_literal, peek(sep1)),
+ terminated(base64_literal, peek(sep1)), // base64 should be quoted or something
+ terminated(ordinal_literal, peek(sep1)),
+ terminated(hex_literal, peek(sep1)),
+ terminated(dec_literal, peek(sep1)), // Allow digit-prefixed words
+ terminated(email_literal, peek(sep1)),
+ terminated(url_literal, peek(sep1)),
+ terminated(css_color, peek(sep1)),
c_escape,
printf,
other,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -198,10 +200,24 @@ mod parser {
fn sep1<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition + std::fmt::Debug,
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Offset
+ + Clone
+ + Default
+ + PartialEq
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
{
- take_while1(is_ignore_char)(input)
+ alt((
+ recognize(satisfy(|c| !is_xid_continue(c))),
+ map(eof, |_| T::default()),
+ ))(input)
}
fn other<T>(input: T) -> IResult<T, T>
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -391,7 +407,16 @@ mod parser {
<T as nom::InputIter>::Item: AsChar + Copy,
{
let (padding, captured) = take_while1(is_base64_digit)(input.clone())?;
+
+ const CHUNK: usize = 4;
+ let padding_offset = input.offset(&padding);
+ let mut padding_len = CHUNK - padding_offset % CHUNK;
+ if padding_len == CHUNK {
+ padding_len = 0;
+ }
+
if captured.input_len() < 90
+ && padding_len == 0
&& captured
.iter_elements()
.all(|c| !['/', '+'].contains(&c.as_char()))
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -402,14 +427,8 @@ mod parser {
)));
}
- const CHUNK: usize = 4;
- let padding_offset = input.offset(&padding);
- let mut padding_len = CHUNK - padding_offset % CHUNK;
- if padding_len == CHUNK {
- padding_len = 0;
- }
-
let (after, _) = take_while_m_n(padding_len, padding_len, is_base64_padding)(padding)?;
+
let after_offset = input.offset(&after);
Ok(input.take_split(after_offset))
}
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1132,12 +1151,6 @@ mod test {
("D41D8CD98F00B204E9800998ECF8427E", true),
// A 31-character hexadecimal string: too short to be a hash.
("D41D8CD98F00B204E9800998ECF8427", false),
- // A 40-character string, but with non-hex characters (in
- // several positions.)
- ("Z85865fd0412e40d041e861506bb3ac11a3a91e3", false),
- ("485865fd04Z2e40d041e861506bb3ac11a3a91e3", false),
- ("485865fd0412e40d041e8Z1506bb3ac11a3a91e3", false),
- ("485865fd0412e40d041e861506bb3ac11a3a91eZ", false),
] {
let input = format!("Hello {} World", hashlike);
let mut expected: Vec<Identifier> = vec![
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1154,6 +1167,22 @@ mod test {
}
}
+ #[test]
+ fn tokenize_hash_in_mixed_path() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = " /// at /rustc/c7087fe00d2ba919df1d813c040a5d47e43b0fe7\\/src\\libstd\\rt.rs:51";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("at", Case::None, 25),
+ // `rustc...` looks like the start of a URL
+ Identifier::new_unchecked("rs", Case::None, 91),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
#[test]
fn tokenize_ignore_base64_case_1() {
let parser = TokenizerBuilder::new().build();
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1181,6 +1210,21 @@ mod test {
assert_eq!(expected, actual);
}
+ #[test]
+ fn tokenize_ignore_base64_case_3() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = r#" "integrity": "sha512-hCmlUAIlUiav8Xdqw3Io4LcpA1DOt7h3LSTAC4G6JGHFFaWzI6qvFt9oilvl8BmkbBRX1IhM90ZAmpk68zccQA==","#;
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("integrity", Case::None, 8),
+ Identifier::new_unchecked("sha512", Case::None, 21),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
#[test]
fn tokenize_ignore_email() {
let parser = TokenizerBuilder::new().build();
| `/`, `_`, `.`, etc before or after natively ignored item (guid, hash, etc) causes it to be spell checked
In part of our code base we had a set of comments referencing text from a backtrace, with lines like this that triggers as typos with this tool:
```
error: `ba` should be `by`, `be`
--> ./src/panic.rs:94:45
|
94 | /// at /rustc/c7087fe00d2ba919df1d813c040a5d47e43b0fe7\/src\libstd\rt.rs:51
```
the same long hexadecimal number by itself in a comment doesn't trigger though (probably thanks to the fix for #326).
```
/// testing: c7087fe00d2ba919df1d813c040a5d47e43b0fe7
```
Is there, or should there be, some specific detection of paths so it doesn't spellcheck the path itself or does detect that one directory here was really a long hexadecimal number and should be treated just as a separate hexadecimal number is?
| Created tests for various combinations in #483. Specifically the issue is having an ignored token next to another ignored token (escape sequences). We don't gracefully handle that.
A workaround is to change the paths to use `/` exclusively until this is fixed.
Thanks! Will try the workaround also.
Also had some issues with a JWT token that had a big JSON text embedded into it with very long encoded strings for the crypto keys that also had some essentially paths in it
Can confirm that replacing `\/` in the paths that failed on with `/` fixed the false positives here, thx! | 2022-05-11T03:02:43 | 1.7 | 8cd9cef88ac00b9a94d03ec367a8f1564ebff31b | [
"tokens::test::tokenize_hash_in_mixed_path",
"tokens::test::tokenize_ignore_base64_case_3"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_color",
"tokens::test::tokenize_ignore_base64_case_2",
"tokens::test::tokenize_ignore_e... | [] | [] |
crate-ci/typos | 463 | crate-ci__typos-463 | [
"462"
] | 74cb409ce77e2c47e7b703b7bc68b1245ee72bc5 | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -189,6 +189,7 @@ mod parser {
terminated(base64_literal, sep1),
terminated(email_literal, sep1),
terminated(url_literal, sep1),
+ terminated(css_color, sep1),
c_escape,
printf,
other,
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -283,6 +284,27 @@ mod parser {
)(input)
}
+ fn css_color<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone
+ + std::fmt::Debug,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ preceded(
+ char('#'),
+ alt((
+ take_while_m_n(3, 8, is_lower_hex_digit),
+ take_while_m_n(3, 8, is_upper_hex_digit),
+ )),
+ )(input)
+ }
+
fn uuid_literal<T>(input: T) -> IResult<T, T>
where
T: nom::InputTakeAtPosition
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -620,8 +642,13 @@ mod parser {
#[inline]
fn is_ignore_char(i: impl AsChar + Copy) -> bool {
let c = i.as_char();
- // See c_escape and printf
- !unicode_xid::UnicodeXID::is_xid_continue(c) && c != '\\' && c != '%'
+ !unicode_xid::UnicodeXID::is_xid_continue(c) &&
+ // See c_escape
+ c != '\\' &&
+ // See printf
+ c != '%' &&
+ // See css_color
+ c != '#'
}
#[inline]
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1248,6 +1275,23 @@ mod test {
assert_eq!(expected, actual);
}
+ #[test]
+ fn tokenize_color() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "#[derive(Clone)] #aaa # #111 #AABBCC #hello #AABBCCDD World";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("derive", Case::None, 2),
+ Identifier::new_unchecked("Clone", Case::None, 9),
+ Identifier::new_unchecked("hello", Case::None, 38),
+ Identifier::new_unchecked("World", Case::None, 54),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
#[test]
fn tokenize_template() {
let parser = TokenizerBuilder::new().build();
| [question] Ignore hex color value
Hello, when I use it, I found that there are some scenes that do not need to be corrected. For example, when expressing colors in hexadecimal in a stylesheet file, e.g. `color: #ba431b`
error: `ba` should be `by`, `be`
--> ./index.less:3:9
|
| color:#ba431b
How should I handle this situation better, ignoring the whole *.less ? Or use `default.extend-words`
| 2022-04-18T22:20:19 | 1.6 | 74cb409ce77e2c47e7b703b7bc68b1245ee72bc5 | [
"tokens::test::tokenize_color"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_ignore_base64_case_1",
"tokens::test::tokenize_ignore_base64_case_2",
"tokens::test::to... | [] | [] | |
crate-ci/typos | 434 | crate-ci__typos-434 | [
"433"
] | 05773fe8157dcfbbe8e4d537590ce0792083b691 | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -422,13 +422,33 @@ mod parser {
tuple((char(':'), char('/'), char('/'))),
)),
tuple((
- opt(terminated(take_while1(is_localport_char), char('@'))),
+ opt(terminated(url_userinfo, char('@'))),
take_while1(is_domain_char),
opt(preceded(char(':'), take_while1(AsChar::is_dec_digit))),
)),
char('/'),
// HACK: Too lazy to enumerate
- take_while(is_localport_char),
+ take_while(is_path_query_fragment),
+ )))(input)
+ }
+
+ fn url_userinfo<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Offset
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + Clone
+ + std::fmt::Debug,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ recognize(tuple((
+ take_while1(is_localport_char),
+ opt(preceded(char(':'), take_while(is_localport_char))),
)))(input)
}
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -564,6 +584,33 @@ mod parser {
|| "-().".find(c).is_some()
}
+ #[inline]
+ fn is_path_query_fragment(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ is_pchar(c) || "/?#".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_pchar(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ is_uri_unreserved(c) || is_uri_sub_delims(c) || "%:@".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_uri_unreserved(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ ('a'..='z').contains(&c)
+ || ('A'..='Z').contains(&c)
+ || ('0'..='9').contains(&c)
+ || "-._~".find(c).is_some()
+ }
+
+ #[inline]
+ fn is_uri_sub_delims(i: impl AsChar + Copy) -> bool {
+ let c = i.as_char();
+ "!$&'()*+,;=".find(c).is_some()
+ }
+
#[inline]
fn is_scheme_char(i: impl AsChar + Copy) -> bool {
let c = i.as_char();
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1113,10 +1160,11 @@ mod test {
fn tokenize_ignore_max_url() {
let parser = TokenizerBuilder::new().build();
- let input = "Good http://user@example.com:3142/hello?query=value&extra=two#fragment Bye";
+ let input =
+ "Good http://user:password@example.com:3142/hello?query=value&extra=two#fragment,split Bye";
let expected: Vec<Identifier> = vec![
Identifier::new_unchecked("Good", Case::None, 0),
- Identifier::new_unchecked("Bye", Case::None, 71),
+ Identifier::new_unchecked("Bye", Case::None, 86),
];
let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
assert_eq!(expected, actual);
| Typo detected in URL
```
error: `ba` should be `by`, `be`
--> content/team/index.md:32:400
|
32 | <a href="https://xxx.service-now.com/nav_to.do?uri=%2F$oc.do%3Fsysparm_include_view%3Ddaily,weekly,monthly%26sysparm_timezone%3DUS%2FCentral%26sysparm_timeline_enabled%3Dfalse%26sysparm_current_view%3Dmonthly%26sysparm_group_id%3Dxxx%26sysparm_rotas%3Dxxx%26sysparm_rosters%3Dxxx%26sysparm_show_gaps%3Dtrue%26sysparm_show_conflicts%3Dtrue%26sysparm_start_date%3D2022-02-04">On-Call Calendar</a>
```
I've stripped out part of the url, but it's flagging a 35 char string of hex as a typo. My workaround for now was to ignore the `index.md` files as they are basically static content.
| I've copied that line into a file and ran typos v1.4.0 against it and ma not seeing any problems.
What version of typos are you using? If you isolate it into a single file and run typos against it, do you see the message?
Hi,
Just updated to 1.4.0 (was on 1.3.3) and I still see the same error. I noticed the error goes away if I strip out the comma separating the hex strings.
e.g. `https://xxx.com/.*[a-z0-9]{35}/` does not trigger the error, whereas `https://xxx.com/.*[a-z0-9]{35},[a-z0-9]{35}/` does seem to. Hopefully that makes sense | 2022-02-14T22:50:40 | 1.4 | 05773fe8157dcfbbe8e4d537590ce0792083b691 | [
"tokens::test::tokenize_ignore_max_url"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_ignore_base64_case_1",
"tokens::test::tokenize_ignore_email",
"tokens::test::tokenize_i... | [] | [] |
crate-ci/typos | 412 | crate-ci__typos-412 | [
"411"
] | 4b2e66487c0d94e0f0ab1765a9e74bfed663aa1e | diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -152,7 +152,7 @@ mod parser {
fn identifier<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition,
+ T: nom::InputTakeAtPosition + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
{
// Generally a language would be `{XID_Start}{XID_Continue}*` but going with only
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -191,18 +191,39 @@ mod parser {
terminated(url_literal, sep1),
c_escape,
printf,
- sep1,
+ other,
)))(input)
}
fn sep1<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition,
+ T: nom::InputTakeAtPosition + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
{
take_while1(is_ignore_char)(input)
}
+ fn other<T>(input: T) -> IResult<T, T>
+ where
+ T: nom::InputTakeAtPosition
+ + nom::InputTake
+ + nom::InputIter
+ + nom::InputLength
+ + nom::Slice<std::ops::RangeFrom<usize>>
+ + nom::Slice<std::ops::RangeTo<usize>>
+ + nom::Offset
+ + Clone
+ + PartialEq
+ + std::fmt::Debug,
+ <T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
+ <T as nom::InputIter>::Item: AsChar + Copy,
+ {
+ recognize(tuple((
+ satisfy(|c| !is_xid_continue(c)),
+ take_while(is_ignore_char),
+ )))(input)
+ }
+
fn ordinal_literal<T>(input: T) -> IResult<T, T>
where
T: nom::InputTakeAtPosition
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -212,7 +233,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -237,7 +259,7 @@ mod parser {
fn dec_literal<T>(input: T) -> IResult<T, T>
where
- T: nom::InputTakeAtPosition,
+ T: nom::InputTakeAtPosition + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
{
take_while1(is_dec_digit_with_sep)(input)
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -250,7 +272,8 @@ mod parser {
+ nom::InputIter
+ nom::InputLength
+ nom::Slice<std::ops::RangeFrom<usize>>
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -269,7 +292,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -295,7 +319,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -325,8 +350,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + std::fmt::Debug
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -359,8 +384,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + std::fmt::Debug
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -380,8 +405,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + std::fmt::Debug
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -412,8 +437,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + std::fmt::Debug
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -433,8 +458,8 @@ mod parser {
+ nom::Offset
+ nom::Slice<std::ops::RangeTo<usize>>
+ nom::Slice<std::ops::RangeFrom<usize>>
- + std::fmt::Debug
- + Clone,
+ + Clone
+ + std::fmt::Debug,
<T as nom::InputTakeAtPosition>::Item: AsChar + Copy,
<T as nom::InputIter>::Item: AsChar + Copy,
{
| diff --git a/crates/typos/src/tokens.rs b/crates/typos/src/tokens.rs
--- a/crates/typos/src/tokens.rs
+++ b/crates/typos/src/tokens.rs
@@ -1159,6 +1184,22 @@ mod test {
assert_eq!(expected, actual);
}
+ #[test]
+ fn tokenize_template() {
+ let parser = TokenizerBuilder::new().build();
+
+ let input = "Hello {{% foo %}} world!";
+ let expected: Vec<Identifier> = vec![
+ Identifier::new_unchecked("Hello", Case::None, 0),
+ Identifier::new_unchecked("foo", Case::None, 10),
+ Identifier::new_unchecked("world", Case::None, 18),
+ ];
+ let actual: Vec<_> = parser.parse_bytes(input.as_bytes()).collect();
+ assert_eq!(expected, actual);
+ let actual: Vec<_> = parser.parse_str(input).collect();
+ assert_eq!(expected, actual);
+ }
+
#[test]
fn split_ident() {
let cases = [
| Missed typo that is in the list
In https://github.com/matrix-org/matrix-doc/pull/3658#discussion_r792092382 the misspelt word ("defintions" instead of "definitions") was not caught, but it *is* already in `words.csv`.
CC @aaronraimist who added the CI workflow.
| Thanks for reporting this!
It looks like our tokenizer has a bug where its incorrectly ending at templates. In this case, its ending at:
```
Some API endpoints may allow or require the use of `POST` requests
without a transaction ID. Where this is optional, the use of a `PUT`
request is strongly recommended.
{{% http-api spec="client-server" api="versions" %}}
```
(I used the `--identifiers` flag to discover this) | 2022-01-26T23:40:54 | 1.3 | 4b2e66487c0d94e0f0ab1765a9e74bfed663aa1e | [
"tokens::test::tokenize_template"
] | [
"tokens::test::split_ident",
"tokens::test::tokenize_c_escape",
"tokens::test::tokenize_dot_separated_words",
"tokens::test::tokenize_empty_is_empty",
"tokens::test::tokenize_double_escape",
"tokens::test::tokenize_ignore_base64",
"tokens::test::tokenize_ignore_escape",
"tokens::test::tokenize_ignore_... | [] | [] |
crate-ci/typos | 776 | crate-ci__typos-776 | [
"733"
] | 2158ddd42cda41bf7d69c14ffe8114eec91e3f2e | diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -359,6 +359,7 @@ accapt,accept
accapted,accepted
accapts,accepts
acccept,accept
+accceptable,acceptable
acccepted,accepted
acccepting,accepting
acccepts,accepts
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -707,6 +708,7 @@ accuartely,accurately
accuastion,accusation
acculumate,accumulate
acculumated,accumulated
+acculumating,accumulating
acculumation,accumulation
accumalate,accumulate
accumalated,accumulated
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -1111,6 +1113,7 @@ acual,actual
acually,actually
acuired,acquired
acuires,acquires
+acumalated,accumulated
acumulate,accumulate
acumulated,accumulated
acumulates,accumulates
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -3392,6 +3395,7 @@ antrophology,anthropology
antry,entry
antyhing,anything
anual,annual
+anualized,annualized
anually,annually
anuglar,angular
anuled,annulled
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -3720,6 +3724,7 @@ applikations,applications
applikay,appliqué
applikays,appliqués
appling,applying,appalling
+applizes,applies
appllied,applied
applly,apply
appluad,applaud
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -4966,6 +4971,7 @@ asssembler,assembler
asssembly,assembly
asssert,assert
asssertion,assertion
+assset,asset
asssits,assists
asssociate,associated
asssociated,associated
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -5125,6 +5131,7 @@ asynchnous,asynchronous
asynchonous,asynchronous
asynchonously,asynchronously
asynchornous,asynchronous
+asynchornously,asynchronously
asynchoronous,asynchronous
asynchrnous,asynchronous
asynchrnously,asynchronously
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -6046,6 +6053,7 @@ aviable,available
aviaiton,aviation
avialability,availability
avialable,available
+avialible,available
avilability,availability
avilable,available
aviod,avoid
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -6220,6 +6228,7 @@ backwardss,backwards
backware,backward
backwark,backward
backwars,backward,backwards
+backword,backward
backwrad,backward
baclony,balcony
bactracking,backtracking
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -6242,6 +6251,7 @@ bahaviour,behaviour
baisc,basic
baiscly,basically
baised,raised
+baises,biases
bakc,back
bakcers,backers
bakcrefs,backrefs
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -7782,6 +7792,7 @@ buildd,build,builds
builded,built
buildes,builders
buildins,buildings
+buildning,building
buildpackge,buildpackage
buildpackges,buildpackages
builing,building
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -9963,6 +9974,7 @@ cicrulating,circulating
cicular,circular
ciculars,circulars
cielings,ceilings
+cients,clients
cigarattes,cigarettes
cigarete,cigarets,cigarette
cigaretes,cigarettes
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -10437,6 +10449,7 @@ clinicos,clinics
clipbaord,clipboard
clipboad,clipboard
clipboads,clipboards
+cliped,clipped
clipoard,clipboard
clipoards,clipboards
clipoing,clipping
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -10536,6 +10549,7 @@ cnosoles,consoles
cntain,contain
cntains,contains
cnter,center
+coachig,coaching
coalace,coalesce
coalacece,coalesce,coalescence
coalaced,coalesced
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -10772,6 +10786,7 @@ colera,cholera
colerscheme,colorscheme
colescing,coalescing
colgone,cologne
+colide,collide
colision,collision
colission,collision
collabarate,collaborate
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -10810,6 +10825,7 @@ collaspes,collapses
collaspible,collapsible
collasping,collapsing
collataral,collateral
+collater,collator
collaterial,collateral
collaterol,collateral
collationg,collation
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -10987,6 +11003,7 @@ comapres,compares
comapring,comparing
comaprison,comparison
comaprisons,comparisons
+comapt,compat
comaptibele,compatible
comaptibelities,compatibilities
comaptibelity,compatibility
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -11843,6 +11860,7 @@ completito,completion
completley,completely
completly,completely
completness,completeness
+completor,completer,completion
complets,completes
complette,complete
complettly,completely
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -13389,6 +13407,7 @@ constriant,constraint
constriants,constraints
constrint,constraint
constrints,constraints
+constrol,control
constrollers,controllers
construc,construct
construccion,construction
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -14061,6 +14080,7 @@ conventionnal,conventional
convento,convention
convenvient,convenient
conver,convert
+converage,converge,coverage
converastion,conversations
converastions,conservation
converdation,conservation
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -15412,6 +15432,7 @@ cuplrit,culprit
curage,courage
curageous,courageous
curatin,curtain
+curature,curvature
curce,course,curse,curve
curch,church
curcial,crucial
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -17086,6 +17107,7 @@ depcits,depicts
depcrecated,deprecated
depden,depend
depdence,dependence
+depdencencies,dependencies
depdencente,dependence
depdencentes,dependences
depdences,dependences
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -17901,6 +17923,7 @@ detectes,detects
detectetd,detected
detectie,detectives
detectiona,detection,detections
+detectionn,detection
detectivs,detectives
detectoare,detector
detectsion,detection
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -18051,6 +18074,7 @@ developmetn,developments
developmetns,developments
developmets,developments
developmnet,developments
+developoment,development
developors,develops
developp,develop
developpe,develop
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -21180,6 +21204,7 @@ emipres,empires
emision,emission
emiss,remiss,amiss,amass
emissed,amassed,amiss
+emitable,emittable
emited,emitted
emiting,emitting
emition,emission,emotion
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -21202,6 +21227,7 @@ emmitted,emitted
emmitting,emitting
emnity,enmity
emobdiment,embodiment
+emoiji,emoji
emotionaly,emotionally
emotionella,emotionally
emoty,empty
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -22585,6 +22611,7 @@ evey,every
eveyone,everyone
eveyr,every
eveyrones,everyones
+eveything,everything
evidencd,evidenced
evidende,evidenced
evidentally,evidently
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -22676,6 +22703,7 @@ examinated,examined
examind,examined
examinerad,examined
examing,examining
+examininig,examining
examinining,examining
examle,example
examles,examples
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -25170,6 +25198,7 @@ fingertits,fingertips
fingertops,fingertips
fingertrips,fingertips
finialization,finalization
+finialize,finalize
finializing,finalizing
finilizes,finalizes
fininsh,finnish
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -25256,6 +25285,7 @@ fixe,fixed,fixes,fix,fixme,fixer
fixel,pixel
fixels,pixels
fixeme,fixme
+fixutre,fixture
fixwd,fixed
fizeek,physique
flacons,falcons
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -25783,6 +25813,7 @@ formost,foremost
formt,format
formua,formula
formual,formula
+formuala,formula
formuale,formulae
formuals,formulas
formualte,formulate
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -26099,6 +26130,7 @@ fromates,formats
fromating,formatting
fromation,formation
fromats,formats
+fromatted,formatted
fromatting,formatting
frome,from
fromed,formed
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -26947,6 +26979,7 @@ gnerating,generating
gneration,generation
gnerations,generations
gneric,generic
+gnored,ignored
gnorung,ignoring
goalkeaper,goalkeeper
goalkeepr,goalkeeper
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -28677,6 +28710,7 @@ idenfitifer,identifier
idenfitifers,identifiers
idenfitify,identify
idenfity,identify
+idenifiable,identifiable
idenitfy,identify
idenities,identities
idenitify,identify
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -29757,6 +29791,7 @@ incomaptibele,incompatible
incomaptibelities,incompatibilities
incomaptibelity,incompatibility
incomaptible,incompatible
+incomatible,incompatible
incombatibilities,incompatibilities
incombatibility,incompatibility
incomfort,discomfort,uncomfortable
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -30714,6 +30749,7 @@ inherintly,inherently
inheritablility,inheritability
inheritage,heritage,inheritance
inheritence,inheritance
+inheritences,inheritances
inherith,inherit
inherithed,inherited
inherithing,inheriting
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -32970,6 +33006,7 @@ iteratered,iterated
iteratior,iterator
iteratiors,iterators
iteratons,iterations
+itereate,iterate
itereating,iterating
itereator,iterator
iterface,interface
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -35496,6 +35534,7 @@ maxumum,maximum
mayalsia,malaysia
mayalsian,malaysian
mayballine,maybelline
+maybed,maybe
maybee,maybe
maybelle,maybelline
maybelleine,maybelline
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -37155,6 +37194,7 @@ monolopy,monopoly
monolothic,monolithic
monolouge,monologue
monolythic,monolithic
+monomorpize,monomorphize
monontonicity,monotonicity
monopace,monospace
monopilies,monopolies
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -37965,6 +38005,7 @@ nealy,nearly,newly
neares,nearest
nearset,nearest
neast,nearest,beast
+necassary,necessary
necassery,necessary
necassry,necessary
necause,because
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -38864,6 +38905,7 @@ normaized,normalized
normale,normal
normales,normals
normalis,normals
+normalizd,normalized
normall,normal,normally
normallized,normalized
normalls,normals
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -39576,6 +39618,7 @@ offsett,offset
offsited,offside
offspirng,offspring
offsrping,offspring
+offst,offset
offstets,offsets
offten,often
oficial,official
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -39746,7 +39789,7 @@ ontraio,ontario
ontrolled,controlled
onveience,convenience
onventions,conventions
-onw,own
+onw,own,now
onwed,owned
onwee,ennui
onwer,owner
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -39970,6 +40013,7 @@ opprotunity,opportunity
opproximate,approximate
opps,oops
oppsofite,opposite
+oppurtinity,opportunity
oppurtunities,opportunities
oppurtunity,opportunity
opration,operation
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -40070,6 +40114,7 @@ optonally,optionally
optons,options
opulate,populate,opiate,opulent
opulates,populates,opiates
+oputput,output
opyion,option
opyions,options
oracels,oracles
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -40572,6 +40617,7 @@ outweights,outweighs
outwieghs,outweighs
ouur,our
ouurs,ours
+ove,oven,over
oveerun,overrun
oveflow,overflow
oveflowed,overflowed
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -41243,6 +41289,7 @@ pararaph,paragraph
parareter,parameter
parargaph,paragraph
parargaphs,paragraphs
+pararm,param
pararmeter,parameter
pararmeters,parameters
paraside,paradise
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -41888,6 +41935,7 @@ pensioen,pension
pensle,pencil
penssylvania,pennsylvania
pentagoon,pentagon
+pentalty,penalty
pentsylvania,pennsylvania
pentuim,pentium
penultimante,penultimate
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -43177,6 +43225,7 @@ poicy,policy
poignat,poignant
poiint,point
poiints,points
+poin,point
poind,point
poindcloud,pointcloud
poineer,pioneer
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -43304,6 +43353,7 @@ polotic,politic
polotical,political
polotically,politically
polotics,politics
+polpulate,populate
poltic,politic
poltical,political
poltically,politically
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -43694,6 +43744,7 @@ potition,position
potocol,protocol
potrait,portrait
potrayed,portrayed
+poulate,populate
poulations,populations
pount,point,pound
pounts,points
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -43957,6 +44008,7 @@ predicessor,predecessor
predicessors,predecessors
prediceted,predicated
prediciment,predicament
+prediciotn,prediction
predicited,predicated
predicitng,predicting
prediciton,prediction
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -43964,7 +44016,7 @@ predicitons,predictions
predicitve,predictive
predickted,predicated
predictave,predictive
-predicte,predictive
+predicte,predictive,predicted
predictible,predictable
predictie,predictive
predictin,prediction
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -44159,6 +44211,7 @@ premptively,preemptively
premuim,premium
premuims,premiums
preocess,process
+preocessing,processing,preprocessing
preocupation,preoccupation
preoperty,property
preorded,preordered
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -44545,6 +44598,7 @@ primive,primitive
primordal,primordial
primtiive,primitive
primtive,primitive
+primtives,primitives
princepals,principals
princeple,principle
princeples,principles
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -45283,10 +45337,12 @@ promisse,promise,promises,promised
promissed,promised
promisses,promises
promissing,promising
+promitives,primitives
promixity,proximity
prommpt,prompt
prommpts,prompts
promocional,promotional
+promordials,primordials
promose,promotes
promoteurs,promotes
promotheus,prometheus
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -45441,6 +45497,7 @@ propetrys,properties
propety,property
propetys,properties
propgated,propagated
+propgating,propagating
prophacies,prophecies
prophacy,prophecy
prophechies,prophecies
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -45844,6 +45901,7 @@ pruposefully,purposefully
pruposely,purposely
prusuit,pursuit
prviate,private
+prvode,provide
pryamid,pyramid
pryamids,pyramids
psace,space,pace
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -46644,7 +46702,7 @@ realtive,relative,reactive
realtively,relatively
realtives,relatives
realtivity,relativity
-realy,really,relay
+realy,really,relay,real
realyl,really
reamde,readme
reamin,remain
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -46786,6 +46844,7 @@ rebuplican,republican
rebuplicans,republicans
recahed,reached
recal,recall
+recalcelated,recalculated
recalcualte,recalculate
recalcualted,recalculated
recalcualtes,recalculates
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -49074,6 +49133,7 @@ requird,required
requireing,requiring
requiremenet,requirement
requiremenets,requirements
+requiremenht,requirement
requiremnt,requirement
requirment,requirement
requirmentes,requirements
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -49645,6 +49705,8 @@ resursively,recursively
resuse,reuse
resused,reused,refused,resumed
resut,result
+resutl,result
+resutls,results
resuts,results
resycn,resync
retailate,retaliate
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -50305,6 +50367,7 @@ ruleboook,rulebook
rulle,rule
rumatic,rheumatic
rumorus,rumors
+rumtime,runtime
rumuors,rumors
runing,running,ruining
runn,run
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -51172,6 +51235,8 @@ segement,segment
segementation,segmentation
segemented,segmented
segements,segments
+segemnt,segment
+segemntation,segmentation
segemnts,segments
segergation,segregation
segfualt,segfault
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -51193,6 +51258,7 @@ segmentes,segments
segmetn,segment
segmetned,segmented
segmetns,segments
+segmnet,segment
segragated,segregated
segragation,segregation
segregacion,segregation
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -52902,6 +52968,7 @@ slooth,sleuth,sloth,sooth
sloothed,sleuthing
sloothing,sleuthing
slooths,sleuths
+slotable,slottable
sloughtering,slaughtering
slowy,slowly
slq,sql
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -54241,6 +54308,7 @@ ssoaiating,associating
ssome,some
ssudo,sudo
stabalization,stabilization
+stabalized,stabilized
stabel,stable
stabelized,stabilized
stabilitation,stabilization
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -54477,6 +54545,7 @@ stawk,stalk
stcokbrush,stockbrush
stdanard,standard
stdanards,standards
+stderrr,stderr
steadilly,steadily
steadliy,steadily
stealhty,stealthy
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57147,6 +57216,7 @@ termniations,terminations
termniator,terminator
termniators,terminators
termo,thermo
+termonology,terminology
termostat,thermostat
termperatue,temperature
termperatues,temperatures
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57821,6 +57893,7 @@ togueter,together
toi,to,toy
toiletts,toilets
tolarable,tolerable
+tolearnce,tolerance
tolelerance,tolerance
tolen,token
tolens,tokens
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57844,6 +57917,7 @@ tomatos,tomatoes
tommorow,tomorrow
tommorrow,tomorrow
tomorrrow,tomorrow
+tonange,tonnage
tongiht,tonight
tonguers,tongues
tonihgt,tonight
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57994,6 +58068,7 @@ toxen,toxin
toxicitity,toxicity
toxicitiy,toxicity
toxiticy,toxicity
+tpos,typos
tpye,type
tpyed,typed
tpyes,types
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -58039,6 +58114,7 @@ trageting,targeting
tragets,targets
tragicallly,tragically
tragicaly,tragically
+traids,traits,triads
traige,triage
traiger,triager
traigers,triagers
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -59039,6 +59115,7 @@ tunrtable,turntable
tuotiral,tutorial
tuotirals,tutorials
tupel,tuple
+tupes,tuples
tupless,tuples
tupparware,tupperware
tupperwears,tupperware
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -59285,7 +59362,9 @@ unadvertantly,inadvertently
unadvertedly,inadvertently
unadvertent,inadvertent
unadvertently,inadvertently
+unafected,unaffected
unahppy,unhappy
+unalbe,unable
unale,unable
unalllowed,unallowed
unambigious,unambiguous
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -59923,6 +60002,7 @@ unfortuneatly,unfortunately
unfortunetely,unfortunately
unfortunetly,unfortunately
unfortuntaly,unfortunately
+unfortuntely,unfortunately
unforunate,unfortunate
unforunately,unfortunately
unforutunate,unfortunate
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -60423,7 +60503,7 @@ unroated,unrotated
unrosponsive,unresponsive
unrpoven,unproven
unrwitten,unwritten
-unsable,unusable,usable,unstable
+unsable,unusable,usable,unstable,unable
unsanfe,unsafe
unsccessful,unsuccessful
unscubscribe,subscribe
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -61283,6 +61363,7 @@ veamant,vehement
veamantly,vehemently
vebrose,verbose
vechiles,vehicles
+vecotor,vector
vecotr,vector
vecotrs,vectors
vectices,vertices
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62244,6 +62325,7 @@ weas,was
weathliest,wealthiest
webage,webpage
webapge,webpage
+webassemby,webassembly
webbooks,webhooks
webhools,webhooks
webiste,website
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62263,6 +62345,7 @@ wednessday,wednesdays,wednesday
wednsday,wednesday
wednseday,wednesday
wednsedays,wednesdays
+wee,we
weerd,weird
weerdly,weirdly
weev,weave
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62460,6 +62543,7 @@ whta,what
whther,whether
whtielist,whitelist
whtihin,within
+whule,while,whole
whyt,what,why
whyth,with
whythout,without
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62718,6 +62802,7 @@ wordl,world
wordlview,worldview
wordlwide,worldwide
wordpres,wordpress
+wordpresss,wordpress
worfklow,workflow
worfklows,workflows
worflow,workflow
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62758,6 +62843,7 @@ workboos,workbooks
workd,worked,works
worke,work,worked,works
workes,works,workers
+workfaround,workaround
workfore,workforce
workfow,workflow
workfows,workflows
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -62885,6 +62971,7 @@ wron,wrong
wronf,wrong
wronly,wrongly
wront,wrong
+wrorker,worker
wrteched,wretched
wrtie,write
wrting,writing
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -843,6 +843,7 @@ pub static WORD_WRO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("nf"),
dictgen::InsensitiveStr::Ascii("nly"),
dictgen::InsensitiveStr::Ascii("nt"),
+ dictgen::InsensitiveStr::Ascii("rker"),
],
values: &[
&["word"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -860,6 +861,7 @@ pub static WORD_WRO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["wrong"],
&["wrongly"],
&["wrong"],
+ &["worker"],
],
range: 1..=6,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -1285,6 +1287,7 @@ pub static WORD_WORK_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("d"),
dictgen::InsensitiveStr::Ascii("e"),
dictgen::InsensitiveStr::Ascii("es"),
+ dictgen::InsensitiveStr::Ascii("faround"),
dictgen::InsensitiveStr::Ascii("fore"),
dictgen::InsensitiveStr::Ascii("fow"),
dictgen::InsensitiveStr::Ascii("fows"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -1345,6 +1348,7 @@ pub static WORD_WORK_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["worked", "works"],
&["work", "worked", "works"],
&["works", "workers"],
+ &["workaround"],
&["workforce"],
&["workflow"],
&["workflows"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -1423,9 +1427,16 @@ pub static WORD_WORD_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("lview"),
dictgen::InsensitiveStr::Ascii("lwide"),
dictgen::InsensitiveStr::Ascii("pres"),
+ dictgen::InsensitiveStr::Ascii("presss"),
],
- values: &[&["world"], &["worldview"], &["worldwide"], &["wordpress"]],
- range: 1..=5,
+ values: &[
+ &["world"],
+ &["worldview"],
+ &["worldwide"],
+ &["wordpress"],
+ &["wordpress"],
+ ],
+ range: 1..=6,
};
static WORD_WORC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -2451,7 +2462,7 @@ static WORD_WH_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]>
Some(&WORD_WHR_NODE),
Some(&WORD_WHS_NODE),
Some(&WORD_WHT_NODE),
- None,
+ Some(&WORD_WHU_NODE),
None,
None,
None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -2474,6 +2485,17 @@ pub static WORD_WHY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
range: 1..=5,
};
+static WORD_WHU_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_WHU_CHILDREN),
+ value: None,
+};
+
+pub static WORD_WHU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[dictgen::InsensitiveStr::Ascii("le")],
+ values: &[&["while", "whole"]],
+ range: 2..=2,
+};
+
static WORD_WHT_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Flat(&WORD_WHT_CHILDREN),
value: None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -3125,7 +3147,7 @@ pub static WORD_WEG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
static WORD_WEE_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Flat(&WORD_WEE_CHILDREN),
- value: None,
+ value: Some(&["we"]),
};
pub static WORD_WEE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -3196,6 +3218,7 @@ pub static WORD_WEB_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
keys: &[
dictgen::InsensitiveStr::Ascii("age"),
dictgen::InsensitiveStr::Ascii("apge"),
+ dictgen::InsensitiveStr::Ascii("assemby"),
dictgen::InsensitiveStr::Ascii("books"),
dictgen::InsensitiveStr::Ascii("hools"),
dictgen::InsensitiveStr::Ascii("iste"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -3206,6 +3229,7 @@ pub static WORD_WEB_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
values: &[
&["webpage"],
&["webpage"],
+ &["webassembly"],
&["webhooks"],
&["webhooks"],
&["website"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -3213,7 +3237,7 @@ pub static WORD_WEB_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["website"],
&["websites"],
],
- range: 3..=5,
+ range: 3..=7,
};
static WORD_WEA_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -6548,6 +6572,7 @@ static WORD_VEC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_VEC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("hiles"),
+ dictgen::InsensitiveStr::Ascii("otor"),
dictgen::InsensitiveStr::Ascii("otr"),
dictgen::InsensitiveStr::Ascii("otrs"),
dictgen::InsensitiveStr::Ascii("tices"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -6563,6 +6588,7 @@ pub static WORD_VEC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
values: &[
&["vehicles"],
&["vector"],
+ &["vector"],
&["vectors"],
&["vertices"],
&["vector"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -9547,7 +9573,7 @@ pub static WORD_UNSA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ble"),
dictgen::InsensitiveStr::Ascii("nfe"),
],
- values: &[&["unusable", "usable", "unstable"], &["unsafe"]],
+ values: &[&["unusable", "usable", "unstable", "unable"], &["unsafe"]],
range: 3..=3,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -11193,6 +11219,7 @@ pub static WORD_UNF_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("ortunetely"),
dictgen::InsensitiveStr::Ascii("ortunetly"),
dictgen::InsensitiveStr::Ascii("ortuntaly"),
+ dictgen::InsensitiveStr::Ascii("ortuntely"),
dictgen::InsensitiveStr::Ascii("orunate"),
dictgen::InsensitiveStr::Ascii("orunately"),
dictgen::InsensitiveStr::Ascii("orutunate"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -11246,6 +11273,7 @@ pub static WORD_UNF_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["unfortunately"],
&["unfortunately"],
&["unfortunately"],
+ &["unfortunately"],
&["unfortunate"],
&["unfortunately"],
&["unfortunate"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -13263,7 +13291,7 @@ static WORD_UNA_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]
Some(&WORD_UNAC_NODE),
Some(&WORD_UNAD_NODE),
None,
- None,
+ Some(&WORD_UNAF_NODE),
None,
Some(&WORD_UNAH_NODE),
None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -13561,10 +13589,11 @@ static WORD_UNAL_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_UNAL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("be"),
dictgen::InsensitiveStr::Ascii("e"),
dictgen::InsensitiveStr::Ascii("llowed"),
],
- values: &[&["unable"], &["unallowed"]],
+ values: &[&["unable"], &["unable"], &["unallowed"]],
range: 1..=6,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -13579,6 +13608,17 @@ pub static WORD_UNAH_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
range: 3..=3,
};
+static WORD_UNAF_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_UNAF_CHILDREN),
+ value: None,
+};
+
+pub static WORD_UNAF_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[dictgen::InsensitiveStr::Ascii("ected")],
+ values: &[&["unaffected"]],
+ range: 5..=5,
+};
+
static WORD_UNAD_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Flat(&WORD_UNAD_CHILDREN),
value: None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -14351,6 +14391,7 @@ static WORD_TUP_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_TUP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("el"),
+ dictgen::InsensitiveStr::Ascii("es"),
dictgen::InsensitiveStr::Ascii("less"),
dictgen::InsensitiveStr::Ascii("parware"),
dictgen::InsensitiveStr::Ascii("perwears"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -14360,6 +14401,7 @@ pub static WORD_TUP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
values: &[
&["tuple"],
&["tuples"],
+ &["tuples"],
&["tupperware"],
&["tupperware"],
&["tuple"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -17625,6 +17667,7 @@ static WORD_TRAI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_TRAI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("ds"),
dictgen::InsensitiveStr::Ascii("ge"),
dictgen::InsensitiveStr::Ascii("ger"),
dictgen::InsensitiveStr::Ascii("gers"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -17658,6 +17701,7 @@ pub static WORD_TRAI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("tour"),
],
values: &[
+ &["traits", "triads"],
&["triage"],
&["triager"],
&["triagers"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -17843,12 +17887,13 @@ static WORD_TP_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::D
pub static WORD_TP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("os"),
dictgen::InsensitiveStr::Ascii("ye"),
dictgen::InsensitiveStr::Ascii("yed"),
dictgen::InsensitiveStr::Ascii("yes"),
dictgen::InsensitiveStr::Ascii("yo"),
],
- values: &[&["type"], &["typed"], &["types"], &["typo"]],
+ values: &[&["typos"], &["type"], &["typed"], &["types"], &["typo"]],
range: 2..=3,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -18277,12 +18322,19 @@ static WORD_TON_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_TON_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("ange"),
dictgen::InsensitiveStr::Ascii("giht"),
dictgen::InsensitiveStr::Ascii("guers"),
dictgen::InsensitiveStr::Ascii("ihgt"),
dictgen::InsensitiveStr::Ascii("uges"),
],
- values: &[&["tonight"], &["tongues"], &["tonight"], &["tongues"]],
+ values: &[
+ &["tonnage"],
+ &["tonight"],
+ &["tongues"],
+ &["tonight"],
+ &["tongues"],
+ ],
range: 4..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -18317,6 +18369,7 @@ static WORD_TOL_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_TOL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("arable"),
+ dictgen::InsensitiveStr::Ascii("earnce"),
dictgen::InsensitiveStr::Ascii("elerance"),
dictgen::InsensitiveStr::Ascii("en"),
dictgen::InsensitiveStr::Ascii("ens"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -18339,6 +18392,7 @@ pub static WORD_TOL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
values: &[
&["tolerable"],
&["tolerance"],
+ &["tolerance"],
&["token"],
&["tokens"],
&["tolerable"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20499,6 +20553,7 @@ pub static WORD_TES_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("ellated"),
dictgen::InsensitiveStr::Ascii("ellation"),
dictgen::InsensitiveStr::Ascii("ellator"),
+ dictgen::InsensitiveStr::Ascii("ing"),
dictgen::InsensitiveStr::Ascii("itcle"),
dictgen::InsensitiveStr::Ascii("itcles"),
dictgen::InsensitiveStr::Ascii("ited"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20530,6 +20585,7 @@ pub static WORD_TES_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("tin"),
dictgen::InsensitiveStr::Ascii("tng"),
dictgen::InsensitiveStr::Ascii("tomony"),
+ dictgen::InsensitiveStr::Ascii("tsdata"),
dictgen::InsensitiveStr::Ascii("tsing"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20818,6 +20876,7 @@ pub static WORD_TERM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("niator"),
dictgen::InsensitiveStr::Ascii("niators"),
dictgen::InsensitiveStr::Ascii("o"),
+ dictgen::InsensitiveStr::Ascii("onology"),
dictgen::InsensitiveStr::Ascii("ostat"),
dictgen::InsensitiveStr::Ascii("peratue"),
dictgen::InsensitiveStr::Ascii("peratues"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20879,6 +20938,7 @@ pub static WORD_TERM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["terminator"],
&["terminators"],
&["thermo"],
+ &["terminology"],
&["thermostat"],
&["temperature"],
&["temperatures"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -29725,9 +29785,10 @@ pub static WORD_STD_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
keys: &[
dictgen::InsensitiveStr::Ascii("anard"),
dictgen::InsensitiveStr::Ascii("anards"),
+ dictgen::InsensitiveStr::Ascii("errr"),
],
- values: &[&["standard"], &["standards"]],
- range: 5..=6,
+ values: &[&["standard"], &["standards"], &["stderr"]],
+ range: 4..=6,
};
static WORD_STC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -30485,6 +30546,7 @@ static WORD_STAB_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_STAB_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("alization"),
+ dictgen::InsensitiveStr::Ascii("alized"),
dictgen::InsensitiveStr::Ascii("el"),
dictgen::InsensitiveStr::Ascii("elized"),
dictgen::InsensitiveStr::Ascii("ilitation"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -30507,6 +30569,7 @@ pub static WORD_STAB_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
],
values: &[
&["stabilization"],
+ &["stabilized"],
&["stable"],
&["stabilized"],
&["stabilization"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -34736,109 +34799,127 @@ pub static WORD_SM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
};
static WORD_SL_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
- children: dictgen::DictTrieChild::Flat(&WORD_SL_CHILDREN),
+ children: dictgen::DictTrieChild::Nested(&WORD_SL_CHILDREN),
+ value: None,
+};
+
+static WORD_SL_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]>>; 26] = [
+ Some(&WORD_SLA_NODE),
+ None,
+ None,
+ Some(&WORD_SLD_NODE),
+ Some(&WORD_SLE_NODE),
+ None,
+ None,
+ None,
+ Some(&WORD_SLI_NODE),
+ None,
+ None,
+ None,
+ None,
+ None,
+ Some(&WORD_SLO_NODE),
+ None,
+ Some(&WORD_SLQ_NODE),
+ None,
+ None,
+ None,
+ Some(&WORD_SLU_NODE),
+ None,
+ None,
+ None,
+ None,
+ None,
+];
+
+static WORD_SLU_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLU_CHILDREN),
value: None,
};
-pub static WORD_SL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+pub static WORD_SLU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
- dictgen::InsensitiveStr::Ascii("ac"),
- dictgen::InsensitiveStr::Ascii("ach"),
- dictgen::InsensitiveStr::Ascii("aches"),
- dictgen::InsensitiveStr::Ascii("anguage"),
- dictgen::InsensitiveStr::Ascii("anguages"),
- dictgen::InsensitiveStr::Ascii("aptoon"),
- dictgen::InsensitiveStr::Ascii("ase"),
- dictgen::InsensitiveStr::Ascii("ases"),
- dictgen::InsensitiveStr::Ascii("ashs"),
- dictgen::InsensitiveStr::Ascii("aughted"),
- dictgen::InsensitiveStr::Ascii("aughterd"),
- dictgen::InsensitiveStr::Ascii("augterhouses"),
- dictgen::InsensitiveStr::Ascii("augther"),
- dictgen::InsensitiveStr::Ascii("augthered"),
- dictgen::InsensitiveStr::Ascii("augthering"),
- dictgen::InsensitiveStr::Ascii("avage"),
- dictgen::InsensitiveStr::Ascii("averly"),
- dictgen::InsensitiveStr::Ascii("ayign"),
- dictgen::InsensitiveStr::Ascii("diers"),
- dictgen::InsensitiveStr::Ascii("ect"),
- dictgen::InsensitiveStr::Ascii("ected"),
- dictgen::InsensitiveStr::Ascii("ecting"),
- dictgen::InsensitiveStr::Ascii("ection"),
- dictgen::InsensitiveStr::Ascii("eect"),
- dictgen::InsensitiveStr::Ascii("eeped"),
- dictgen::InsensitiveStr::Ascii("eepp"),
- dictgen::InsensitiveStr::Ascii("efies"),
- dictgen::InsensitiveStr::Ascii("efishness"),
- dictgen::InsensitiveStr::Ascii("ewth"),
- dictgen::InsensitiveStr::Ascii("ewthed"),
- dictgen::InsensitiveStr::Ascii("ewthing"),
- dictgen::InsensitiveStr::Ascii("ewths"),
- dictgen::InsensitiveStr::Ascii("icable"),
- dictgen::InsensitiveStr::Ascii("ienced"),
- dictgen::InsensitiveStr::Ascii("ient"),
- dictgen::InsensitiveStr::Ascii("iently"),
- dictgen::InsensitiveStr::Ascii("ighlty"),
- dictgen::InsensitiveStr::Ascii("ighly"),
- dictgen::InsensitiveStr::Ascii("ightl"),
- dictgen::InsensitiveStr::Ascii("ighty"),
- dictgen::InsensitiveStr::Ascii("ignt"),
- dictgen::InsensitiveStr::Ascii("igntly"),
- dictgen::InsensitiveStr::Ascii("igth"),
- dictgen::InsensitiveStr::Ascii("igthly"),
- dictgen::InsensitiveStr::Ascii("igtly"),
- dictgen::InsensitiveStr::Ascii("iped"),
- dictgen::InsensitiveStr::Ascii("ipperies"),
- dictgen::InsensitiveStr::Ascii("ipperly"),
- dictgen::InsensitiveStr::Ascii("ippes"),
- dictgen::InsensitiveStr::Ascii("ippey"),
- dictgen::InsensitiveStr::Ascii("iseshow"),
- dictgen::InsensitiveStr::Ascii("ite"),
- dictgen::InsensitiveStr::Ascii("ooth"),
- dictgen::InsensitiveStr::Ascii("oothed"),
- dictgen::InsensitiveStr::Ascii("oothing"),
- dictgen::InsensitiveStr::Ascii("ooths"),
- dictgen::InsensitiveStr::Ascii("oughtering"),
- dictgen::InsensitiveStr::Ascii("owy"),
- dictgen::InsensitiveStr::Ascii("q"),
- dictgen::InsensitiveStr::Ascii("uaghter"),
- dictgen::InsensitiveStr::Ascii("uaghtered"),
- dictgen::InsensitiveStr::Ascii("uaghtering"),
- dictgen::InsensitiveStr::Ascii("uggify"),
+ dictgen::InsensitiveStr::Ascii("aghter"),
+ dictgen::InsensitiveStr::Ascii("aghtered"),
+ dictgen::InsensitiveStr::Ascii("aghtering"),
+ dictgen::InsensitiveStr::Ascii("ggify"),
],
values: &[
- &["slack"],
- &["slash"],
- &["slashes"],
- &["language"],
- &["languages"],
- &["splatoon"],
- &["slash"],
- &["slashes"],
- &["slashes"],
- &["slaughtered"],
- &["slaughtered"],
- &["slaughterhouses"],
&["slaughter"],
&["slaughtered"],
&["slaughtering"],
- &["salvage"],
- &["slavery"],
- &["slaying"],
- &["sliders"],
- &["select"],
- &["selected"],
- &["selecting"],
- &["selection"],
- &["select"],
- &["slept"],
- &["sleep"],
- &["selfies"],
- &["selfishness"],
- &["sleuth"],
- &["sleuthed"],
+ &["slugify"],
+ ],
+ range: 5..=9,
+};
+
+static WORD_SLQ_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLQ_CHILDREN),
+ value: Some(&["sql"]),
+};
+
+pub static WORD_SLQ_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[],
+ values: &[],
+ range: 0..=0,
+};
+
+static WORD_SLO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLO_CHILDREN),
+ value: None,
+};
+
+pub static WORD_SLO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("oth"),
+ dictgen::InsensitiveStr::Ascii("othed"),
+ dictgen::InsensitiveStr::Ascii("othing"),
+ dictgen::InsensitiveStr::Ascii("oths"),
+ dictgen::InsensitiveStr::Ascii("table"),
+ dictgen::InsensitiveStr::Ascii("ughtering"),
+ dictgen::InsensitiveStr::Ascii("wy"),
+ ],
+ values: &[
+ &["sleuth", "sloth", "sooth"],
+ &["sleuthing"],
&["sleuthing"],
&["sleuths"],
+ &["slottable"],
+ &["slaughtering"],
+ &["slowly"],
+ ],
+ range: 2..=9,
+};
+
+static WORD_SLI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLI_CHILDREN),
+ value: None,
+};
+
+pub static WORD_SLI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("cable"),
+ dictgen::InsensitiveStr::Ascii("enced"),
+ dictgen::InsensitiveStr::Ascii("ent"),
+ dictgen::InsensitiveStr::Ascii("ently"),
+ dictgen::InsensitiveStr::Ascii("ghlty"),
+ dictgen::InsensitiveStr::Ascii("ghly"),
+ dictgen::InsensitiveStr::Ascii("ghtl"),
+ dictgen::InsensitiveStr::Ascii("ghty"),
+ dictgen::InsensitiveStr::Ascii("gnt"),
+ dictgen::InsensitiveStr::Ascii("gntly"),
+ dictgen::InsensitiveStr::Ascii("gth"),
+ dictgen::InsensitiveStr::Ascii("gthly"),
+ dictgen::InsensitiveStr::Ascii("gtly"),
+ dictgen::InsensitiveStr::Ascii("ped"),
+ dictgen::InsensitiveStr::Ascii("pperies"),
+ dictgen::InsensitiveStr::Ascii("pperly"),
+ dictgen::InsensitiveStr::Ascii("ppes"),
+ dictgen::InsensitiveStr::Ascii("ppey"),
+ dictgen::InsensitiveStr::Ascii("seshow"),
+ dictgen::InsensitiveStr::Ascii("te"),
+ ],
+ values: &[
&["sliceable"],
&["silenced"],
&["silent"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -34859,19 +34940,107 @@ pub static WORD_SL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
&["slippery"],
&["slideshow"],
&["elite", "site", "sleight", "slide"],
- &["sleuth", "sloth", "sooth"],
- &["sleuthing"],
+ ],
+ range: 2..=7,
+};
+
+static WORD_SLE_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLE_CHILDREN),
+ value: None,
+};
+
+pub static WORD_SLE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("ct"),
+ dictgen::InsensitiveStr::Ascii("cted"),
+ dictgen::InsensitiveStr::Ascii("cting"),
+ dictgen::InsensitiveStr::Ascii("ction"),
+ dictgen::InsensitiveStr::Ascii("ect"),
+ dictgen::InsensitiveStr::Ascii("eped"),
+ dictgen::InsensitiveStr::Ascii("epp"),
+ dictgen::InsensitiveStr::Ascii("fies"),
+ dictgen::InsensitiveStr::Ascii("fishness"),
+ dictgen::InsensitiveStr::Ascii("wth"),
+ dictgen::InsensitiveStr::Ascii("wthed"),
+ dictgen::InsensitiveStr::Ascii("wthing"),
+ dictgen::InsensitiveStr::Ascii("wths"),
+ ],
+ values: &[
+ &["select"],
+ &["selected"],
+ &["selecting"],
+ &["selection"],
+ &["select"],
+ &["slept"],
+ &["sleep"],
+ &["selfies"],
+ &["selfishness"],
+ &["sleuth"],
+ &["sleuthed"],
&["sleuthing"],
&["sleuths"],
- &["slaughtering"],
- &["slowly"],
- &["sql"],
+ ],
+ range: 2..=8,
+};
+
+static WORD_SLD_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLD_CHILDREN),
+ value: None,
+};
+
+pub static WORD_SLD_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[dictgen::InsensitiveStr::Ascii("iers")],
+ values: &[&["sliders"]],
+ range: 4..=4,
+};
+
+static WORD_SLA_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_SLA_CHILDREN),
+ value: None,
+};
+
+pub static WORD_SLA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("c"),
+ dictgen::InsensitiveStr::Ascii("ch"),
+ dictgen::InsensitiveStr::Ascii("ches"),
+ dictgen::InsensitiveStr::Ascii("nguage"),
+ dictgen::InsensitiveStr::Ascii("nguages"),
+ dictgen::InsensitiveStr::Ascii("ptoon"),
+ dictgen::InsensitiveStr::Ascii("se"),
+ dictgen::InsensitiveStr::Ascii("ses"),
+ dictgen::InsensitiveStr::Ascii("shs"),
+ dictgen::InsensitiveStr::Ascii("ughted"),
+ dictgen::InsensitiveStr::Ascii("ughterd"),
+ dictgen::InsensitiveStr::Ascii("ugterhouses"),
+ dictgen::InsensitiveStr::Ascii("ugther"),
+ dictgen::InsensitiveStr::Ascii("ugthered"),
+ dictgen::InsensitiveStr::Ascii("ugthering"),
+ dictgen::InsensitiveStr::Ascii("vage"),
+ dictgen::InsensitiveStr::Ascii("verly"),
+ dictgen::InsensitiveStr::Ascii("yign"),
+ ],
+ values: &[
+ &["slack"],
+ &["slash"],
+ &["slashes"],
+ &["language"],
+ &["languages"],
+ &["splatoon"],
+ &["slash"],
+ &["slashes"],
+ &["slashes"],
+ &["slaughtered"],
+ &["slaughtered"],
+ &["slaughterhouses"],
&["slaughter"],
&["slaughtered"],
&["slaughtering"],
- &["slugify"],
+ &["salvage"],
+ &["slavery"],
+ &["slaying"],
],
- range: 1..=12,
+ range: 1..=11,
};
static WORD_SK_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -40247,6 +40416,8 @@ pub static WORD_SEG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("ementation"),
dictgen::InsensitiveStr::Ascii("emented"),
dictgen::InsensitiveStr::Ascii("ements"),
+ dictgen::InsensitiveStr::Ascii("emnt"),
+ dictgen::InsensitiveStr::Ascii("emntation"),
dictgen::InsensitiveStr::Ascii("emnts"),
dictgen::InsensitiveStr::Ascii("ergation"),
dictgen::InsensitiveStr::Ascii("fualt"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -40268,6 +40439,7 @@ pub static WORD_SEG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("metn"),
dictgen::InsensitiveStr::Ascii("metned"),
dictgen::InsensitiveStr::Ascii("metns"),
+ dictgen::InsensitiveStr::Ascii("mnet"),
dictgen::InsensitiveStr::Ascii("ragated"),
dictgen::InsensitiveStr::Ascii("ragation"),
dictgen::InsensitiveStr::Ascii("regacion"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -40291,6 +40463,8 @@ pub static WORD_SEG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["segmentation"],
&["segmented"],
&["segments"],
+ &["segment"],
+ &["segmentation"],
&["segments"],
&["segregation"],
&["segfault"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -40312,6 +40486,7 @@ pub static WORD_SEG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["segment"],
&["segmented"],
&["segments"],
+ &["segment"],
&["segregated"],
&["segregation"],
&["segregation"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -43111,6 +43286,7 @@ pub static WORD_RU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
dictgen::InsensitiveStr::Ascii("lle"),
dictgen::InsensitiveStr::Ascii("matic"),
dictgen::InsensitiveStr::Ascii("morus"),
+ dictgen::InsensitiveStr::Ascii("mtime"),
dictgen::InsensitiveStr::Ascii("muors"),
dictgen::InsensitiveStr::Ascii("ning"),
dictgen::InsensitiveStr::Ascii("nn"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -43153,6 +43329,7 @@ pub static WORD_RU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
&["rule"],
&["rheumatic"],
&["rumors"],
+ &["runtime"],
&["rumors"],
&["running", "ruining"],
&["run"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -45580,6 +45757,8 @@ pub static WORD_RESU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("se"),
dictgen::InsensitiveStr::Ascii("sed"),
dictgen::InsensitiveStr::Ascii("t"),
+ dictgen::InsensitiveStr::Ascii("tl"),
+ dictgen::InsensitiveStr::Ascii("tls"),
dictgen::InsensitiveStr::Ascii("ts"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -45627,6 +45806,8 @@ pub static WORD_RESU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["reuse"],
&["reused", "refused", "resumed"],
&["result"],
+ &["result"],
+ &["results"],
&["results"],
],
range: 1..=9,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -47492,6 +47673,7 @@ pub static WORD_REQUI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("reing"),
dictgen::InsensitiveStr::Ascii("remenet"),
dictgen::InsensitiveStr::Ascii("remenets"),
+ dictgen::InsensitiveStr::Ascii("remenht"),
dictgen::InsensitiveStr::Ascii("remnt"),
dictgen::InsensitiveStr::Ascii("rment"),
dictgen::InsensitiveStr::Ascii("rmentes"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -47525,6 +47707,7 @@ pub static WORD_REQUI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["requirements"],
&["requirement"],
&["requirement"],
+ &["requirement"],
&["requirements"],
&["requirements"],
&["requisite"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -55374,6 +55557,7 @@ pub static WORD_RECA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
keys: &[
dictgen::InsensitiveStr::Ascii("hed"),
dictgen::InsensitiveStr::Ascii("l"),
+ dictgen::InsensitiveStr::Ascii("lcelated"),
dictgen::InsensitiveStr::Ascii("lcualte"),
dictgen::InsensitiveStr::Ascii("lcualted"),
dictgen::InsensitiveStr::Ascii("lcualtes"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -55397,6 +55581,7 @@ pub static WORD_RECA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
values: &[
&["reached"],
&["recall"],
+ &["recalculated"],
&["recalculate"],
&["recalculated"],
&["recalculates"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -55980,7 +56165,7 @@ pub static WORD_REAL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["relatively"],
&["relatives"],
&["relativity"],
- &["really", "relay"],
+ &["really", "relay", "real"],
&["really"],
],
range: 1..=10,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -58633,9 +58818,12 @@ static WORD_PRV_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
};
pub static WORD_PRV_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
- keys: &[dictgen::InsensitiveStr::Ascii("iate")],
- values: &[&["private"]],
- range: 4..=4,
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("iate"),
+ dictgen::InsensitiveStr::Ascii("ode"),
+ ],
+ values: &[&["private"], &["provide"]],
+ range: 3..=4,
};
static WORD_PRU_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -60019,9 +60207,12 @@ static WORD_PROPG_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen
};
pub static WORD_PROPG_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
- keys: &[dictgen::InsensitiveStr::Ascii("ated")],
- values: &[&["propagated"]],
- range: 4..=4,
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("ated"),
+ dictgen::InsensitiveStr::Ascii("ating"),
+ ],
+ values: &[&["propagated"], &["propagating"]],
+ range: 4..=5,
};
static WORD_PROPE_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -60380,10 +60571,12 @@ pub static WORD_PROM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("issed"),
dictgen::InsensitiveStr::Ascii("isses"),
dictgen::InsensitiveStr::Ascii("issing"),
+ dictgen::InsensitiveStr::Ascii("itives"),
dictgen::InsensitiveStr::Ascii("ixity"),
dictgen::InsensitiveStr::Ascii("mpt"),
dictgen::InsensitiveStr::Ascii("mpts"),
dictgen::InsensitiveStr::Ascii("ocional"),
+ dictgen::InsensitiveStr::Ascii("ordials"),
dictgen::InsensitiveStr::Ascii("ose"),
dictgen::InsensitiveStr::Ascii("oteurs"),
dictgen::InsensitiveStr::Ascii("otheus"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -60443,10 +60636,12 @@ pub static WORD_PROM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["promised"],
&["promises"],
&["promising"],
+ &["primitives"],
&["proximity"],
&["prompt"],
&["prompts"],
&["promotional"],
+ &["primordials"],
&["promotes"],
&["promotes"],
&["prometheus"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -62651,6 +62846,7 @@ pub static WORD_PRIM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ordal"),
dictgen::InsensitiveStr::Ascii("tiive"),
dictgen::InsensitiveStr::Ascii("tive"),
+ dictgen::InsensitiveStr::Ascii("tives"),
],
values: &[
&["primaries"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -62676,6 +62872,7 @@ pub static WORD_PRIM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["primordial"],
&["primitive"],
&["primitive"],
+ &["primitives"],
],
range: 2..=7,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -63778,6 +63975,7 @@ static WORD_PREO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_PREO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("cess"),
+ dictgen::InsensitiveStr::Ascii("cessing"),
dictgen::InsensitiveStr::Ascii("cupation"),
dictgen::InsensitiveStr::Ascii("perty"),
dictgen::InsensitiveStr::Ascii("rded"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -63790,6 +63988,7 @@ pub static WORD_PREO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
],
values: &[
&["process"],
+ &["processing", "preprocessing"],
&["preoccupation"],
&["property"],
&["preordered"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -64431,6 +64630,7 @@ pub static WORD_PREDI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("cessors"),
dictgen::InsensitiveStr::Ascii("ceted"),
dictgen::InsensitiveStr::Ascii("ciment"),
+ dictgen::InsensitiveStr::Ascii("ciotn"),
dictgen::InsensitiveStr::Ascii("cited"),
dictgen::InsensitiveStr::Ascii("citng"),
dictgen::InsensitiveStr::Ascii("citon"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -64459,6 +64659,7 @@ pub static WORD_PREDI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["predecessors"],
&["predicated"],
&["predicament"],
+ &["prediction"],
&["predicated"],
&["predicting"],
&["prediction"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -64466,7 +64667,7 @@ pub static WORD_PREDI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["predictive"],
&["predicated"],
&["predictive"],
- &["predictive"],
+ &["predictive", "predicted"],
&["predictable"],
&["predictive"],
&["prediction"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -65306,12 +65507,14 @@ static WORD_POU_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_POU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("late"),
dictgen::InsensitiveStr::Ascii("lations"),
dictgen::InsensitiveStr::Ascii("nt"),
dictgen::InsensitiveStr::Ascii("nts"),
dictgen::InsensitiveStr::Ascii("pular"),
],
values: &[
+ &["populate"],
&["populations"],
&["point", "pound"],
&["points"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -66452,7 +66655,7 @@ static WORD_POL_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]
None,
None,
Some(&WORD_POLO_NODE),
- None,
+ Some(&WORD_POLP_NODE),
None,
None,
None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -66564,6 +66767,17 @@ pub static WORD_POLT_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
range: 2..=6,
};
+static WORD_POLP_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_POLP_CHILDREN),
+ value: None,
+};
+
+pub static WORD_POLP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[dictgen::InsensitiveStr::Ascii("ulate")],
+ values: &[&["populate"]],
+ range: 5..=5,
+};
+
static WORD_POLO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Flat(&WORD_POLO_CHILDREN),
value: None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -66804,6 +67018,7 @@ pub static WORD_POI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("gnat"),
dictgen::InsensitiveStr::Ascii("int"),
dictgen::InsensitiveStr::Ascii("ints"),
+ dictgen::InsensitiveStr::Ascii("n"),
dictgen::InsensitiveStr::Ascii("nd"),
dictgen::InsensitiveStr::Ascii("ndcloud"),
dictgen::InsensitiveStr::Ascii("neer"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -66867,6 +67082,7 @@ pub static WORD_POI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["point"],
&["points"],
&["point"],
+ &["point"],
&["pointcloud"],
&["pioneer"],
&["pointer"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -71153,10 +71369,11 @@ static WORD_PENT_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_PENT_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("agoon"),
+ dictgen::InsensitiveStr::Ascii("alty"),
dictgen::InsensitiveStr::Ascii("sylvania"),
dictgen::InsensitiveStr::Ascii("uim"),
],
- values: &[&["pentagon"], &["pennsylvania"], &["pentium"]],
+ values: &[&["pentagon"], &["penalty"], &["pennsylvania"], &["pentium"]],
range: 3..=8,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -73631,6 +73848,7 @@ pub static WORD_PARAR_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("eter"),
dictgen::InsensitiveStr::Ascii("gaph"),
dictgen::InsensitiveStr::Ascii("gaphs"),
+ dictgen::InsensitiveStr::Ascii("m"),
dictgen::InsensitiveStr::Ascii("meter"),
dictgen::InsensitiveStr::Ascii("meters"),
],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -73640,10 +73858,11 @@ pub static WORD_PARAR_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["parameter"],
&["paragraph"],
&["paragraphs"],
+ &["param"],
&["parameter"],
&["parameters"],
],
- range: 3..=6,
+ range: 1..=6,
};
static WORD_PARAP_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -74852,7 +75071,7 @@ pub static WORD_OVR_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
static WORD_OVE_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Nested(&WORD_OVE_CHILDREN),
- value: None,
+ value: Some(&["oven", "over"]),
};
static WORD_OVE_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]>>; 26] = [
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -77739,10 +77958,12 @@ pub static WORD_OPU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
keys: &[
dictgen::InsensitiveStr::Ascii("late"),
dictgen::InsensitiveStr::Ascii("lates"),
+ dictgen::InsensitiveStr::Ascii("tput"),
],
values: &[
&["populate", "opiate", "opulent"],
&["populates", "opiates"],
+ &["output"],
],
range: 4..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -78101,6 +78322,7 @@ pub static WORD_OPP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("roximate"),
dictgen::InsensitiveStr::Ascii("s"),
dictgen::InsensitiveStr::Ascii("sofite"),
+ dictgen::InsensitiveStr::Ascii("urtinity"),
dictgen::InsensitiveStr::Ascii("urtunities"),
dictgen::InsensitiveStr::Ascii("urtunity"),
],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -78144,6 +78366,7 @@ pub static WORD_OPP_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["approximate"],
&["oops"],
&["opposite"],
+ &["opportunity"],
&["opportunities"],
&["opportunity"],
],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -78814,7 +79037,7 @@ pub static WORD_ON_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
&["controlled"],
&["convenience"],
&["conventions"],
- &["own"],
+ &["own", "now"],
&["owned"],
&["ennui"],
&["owner"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -79295,6 +79518,7 @@ pub static WORD_OFFS_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ited"),
dictgen::InsensitiveStr::Ascii("pirng"),
dictgen::InsensitiveStr::Ascii("rping"),
+ dictgen::InsensitiveStr::Ascii("t"),
dictgen::InsensitiveStr::Ascii("tets"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -79310,9 +79534,10 @@ pub static WORD_OFFS_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["offside"],
&["offspring"],
&["offspring"],
+ &["offset"],
&["offsets"],
],
- range: 2..=5,
+ range: 1..=5,
};
static WORD_OFFR_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -81717,6 +81942,7 @@ pub static WORD_NOR_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("male"),
dictgen::InsensitiveStr::Ascii("males"),
dictgen::InsensitiveStr::Ascii("malis"),
+ dictgen::InsensitiveStr::Ascii("malizd"),
dictgen::InsensitiveStr::Ascii("mall"),
dictgen::InsensitiveStr::Ascii("mallized"),
dictgen::InsensitiveStr::Ascii("malls"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -81781,6 +82007,7 @@ pub static WORD_NOR_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["normal"],
&["normals"],
&["normals"],
+ &["normalized"],
&["normal", "normally"],
&["normalized"],
&["normals"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -84992,11 +85219,12 @@ static WORD_NECA_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_NECA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("ssary"),
dictgen::InsensitiveStr::Ascii("ssery"),
dictgen::InsensitiveStr::Ascii("ssry"),
dictgen::InsensitiveStr::Ascii("use"),
],
- values: &[&["necessary"], &["necessary"], &["because"]],
+ values: &[&["necessary"], &["necessary"], &["necessary"], &["because"]],
range: 3..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -87589,6 +87817,7 @@ pub static WORD_MONO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("lothic"),
dictgen::InsensitiveStr::Ascii("louge"),
dictgen::InsensitiveStr::Ascii("lythic"),
+ dictgen::InsensitiveStr::Ascii("morpize"),
dictgen::InsensitiveStr::Ascii("ntonicity"),
dictgen::InsensitiveStr::Ascii("pace"),
dictgen::InsensitiveStr::Ascii("pilies"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -87628,6 +87857,7 @@ pub static WORD_MONO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["monolithic"],
&["monologue"],
&["monolithic"],
+ &["monomorphize"],
&["monotonicity"],
&["monospace"],
&["monopolies"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -92799,6 +93029,7 @@ pub static WORD_MAY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("alsia"),
dictgen::InsensitiveStr::Ascii("alsian"),
dictgen::InsensitiveStr::Ascii("balline"),
+ dictgen::InsensitiveStr::Ascii("bed"),
dictgen::InsensitiveStr::Ascii("bee"),
dictgen::InsensitiveStr::Ascii("belle"),
dictgen::InsensitiveStr::Ascii("belleine"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -92820,6 +93051,7 @@ pub static WORD_MAY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["malaysian"],
&["maybelline"],
&["maybe"],
+ &["maybe"],
&["maybelline"],
&["maybelline"],
&["maybelline"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -98396,6 +98628,7 @@ pub static WORD_LAT_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("nern"),
dictgen::InsensitiveStr::Ascii("ops"),
dictgen::InsensitiveStr::Ascii("set"),
+ dictgen::InsensitiveStr::Ascii("st"),
dictgen::InsensitiveStr::Ascii("titude"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -101199,6 +101433,7 @@ pub static WORD_ITE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("ratior"),
dictgen::InsensitiveStr::Ascii("ratiors"),
dictgen::InsensitiveStr::Ascii("ratons"),
+ dictgen::InsensitiveStr::Ascii("reate"),
dictgen::InsensitiveStr::Ascii("reating"),
dictgen::InsensitiveStr::Ascii("reator"),
dictgen::InsensitiveStr::Ascii("rface"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -101239,6 +101474,7 @@ pub static WORD_ITE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["iterator"],
&["iterators"],
&["iterations"],
+ &["iterate"],
&["iterating"],
&["iterator"],
&["interface"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -108394,6 +108630,7 @@ pub static WORD_INH_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("eritablility"),
dictgen::InsensitiveStr::Ascii("eritage"),
dictgen::InsensitiveStr::Ascii("eritence"),
+ dictgen::InsensitiveStr::Ascii("eritences"),
dictgen::InsensitiveStr::Ascii("erith"),
dictgen::InsensitiveStr::Ascii("erithed"),
dictgen::InsensitiveStr::Ascii("erithing"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -108437,6 +108674,7 @@ pub static WORD_INH_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["inheritability"],
&["heritage", "inheritance"],
&["inheritance"],
+ &["inheritances"],
&["inherit"],
&["inherited"],
&["inheriting"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -111555,6 +111793,7 @@ pub static WORD_INCOMA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = d
dictgen::InsensitiveStr::Ascii("ptibelities"),
dictgen::InsensitiveStr::Ascii("ptibelity"),
dictgen::InsensitiveStr::Ascii("ptible"),
+ dictgen::InsensitiveStr::Ascii("tible"),
],
values: &[
&["incompatibility"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -111563,8 +111802,9 @@ pub static WORD_INCOMA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = d
&["incompatibilities"],
&["incompatibility"],
&["incompatible"],
+ &["incompatible"],
],
- range: 6..=11,
+ range: 5..=11,
};
static WORD_INCOH_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -115472,13 +115712,20 @@ static WORD_IDENI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen
pub static WORD_IDENI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("fiable"),
dictgen::InsensitiveStr::Ascii("tfy"),
dictgen::InsensitiveStr::Ascii("ties"),
dictgen::InsensitiveStr::Ascii("tify"),
dictgen::InsensitiveStr::Ascii("ty"),
],
- values: &[&["identify"], &["identities"], &["identify"], &["identity"]],
- range: 2..=4,
+ values: &[
+ &["identifiable"],
+ &["identify"],
+ &["identities"],
+ &["identify"],
+ &["identity"],
+ ],
+ range: 2..=6,
};
static WORD_IDENF_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -121780,6 +122027,7 @@ pub static WORD_GN_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
dictgen::InsensitiveStr::Ascii("eration"),
dictgen::InsensitiveStr::Ascii("erations"),
dictgen::InsensitiveStr::Ascii("eric"),
+ dictgen::InsensitiveStr::Ascii("ored"),
dictgen::InsensitiveStr::Ascii("orung"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -121795,6 +122043,7 @@ pub static WORD_GN_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictg
&["generation"],
&["generations"],
&["generic"],
+ &["ignored"],
&["ignoring"],
],
range: 4..=8,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -124568,6 +124817,7 @@ pub static WORD_FRO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("mating"),
dictgen::InsensitiveStr::Ascii("mation"),
dictgen::InsensitiveStr::Ascii("mats"),
+ dictgen::InsensitiveStr::Ascii("matted"),
dictgen::InsensitiveStr::Ascii("matting"),
dictgen::InsensitiveStr::Ascii("me"),
dictgen::InsensitiveStr::Ascii("med"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -124613,6 +124863,7 @@ pub static WORD_FRO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["formatting"],
&["formation"],
&["formats"],
+ &["formatted"],
&["formatting"],
&["from"],
&["formed"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -125665,6 +125916,7 @@ pub static WORD_FORM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("t"),
dictgen::InsensitiveStr::Ascii("ua"),
dictgen::InsensitiveStr::Ascii("ual"),
+ dictgen::InsensitiveStr::Ascii("uala"),
dictgen::InsensitiveStr::Ascii("uale"),
dictgen::InsensitiveStr::Ascii("uals"),
dictgen::InsensitiveStr::Ascii("ualte"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -125723,6 +125975,7 @@ pub static WORD_FORM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["format"],
&["formula"],
&["formula"],
+ &["formula"],
&["formulae"],
&["formulas"],
&["formulate"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -127310,6 +127563,7 @@ pub static WORD_FIX_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("el"),
dictgen::InsensitiveStr::Ascii("els"),
dictgen::InsensitiveStr::Ascii("eme"),
+ dictgen::InsensitiveStr::Ascii("utre"),
dictgen::InsensitiveStr::Ascii("wd"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -127317,9 +127571,10 @@ pub static WORD_FIX_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["pixel"],
&["pixels"],
&["fixme"],
+ &["fixture"],
&["fixed"],
],
- range: 1..=3,
+ range: 1..=4,
};
static WORD_FIV_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -127592,6 +127847,7 @@ static WORD_FINI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_FINI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("alization"),
+ dictgen::InsensitiveStr::Ascii("alize"),
dictgen::InsensitiveStr::Ascii("alizing"),
dictgen::InsensitiveStr::Ascii("lizes"),
dictgen::InsensitiveStr::Ascii("nsh"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -127607,6 +127863,7 @@ pub static WORD_FINI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
],
values: &[
&["finalization"],
+ &["finalize"],
&["finalizing"],
&["finalizes"],
&["finnish"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -135502,6 +135759,7 @@ pub static WORD_EXAM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ind"),
dictgen::InsensitiveStr::Ascii("inerad"),
dictgen::InsensitiveStr::Ascii("ing"),
+ dictgen::InsensitiveStr::Ascii("ininig"),
dictgen::InsensitiveStr::Ascii("inining"),
dictgen::InsensitiveStr::Ascii("le"),
dictgen::InsensitiveStr::Ascii("les"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -135530,6 +135788,7 @@ pub static WORD_EXAM_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["examined"],
&["examining"],
&["examining"],
+ &["examining"],
&["example"],
&["examples"],
&["example"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -135913,8 +136172,9 @@ pub static WORD_EVEY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("one"),
dictgen::InsensitiveStr::Ascii("r"),
dictgen::InsensitiveStr::Ascii("rones"),
+ dictgen::InsensitiveStr::Ascii("thing"),
],
- values: &[&["everyone"], &["every"], &["everyones"]],
+ values: &[&["everyone"], &["every"], &["everyones"], &["everything"]],
range: 1..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -140165,6 +140425,7 @@ static WORD_EMO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_EMO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("bdiment"),
+ dictgen::InsensitiveStr::Ascii("iji"),
dictgen::InsensitiveStr::Ascii("tionaly"),
dictgen::InsensitiveStr::Ascii("tionella"),
dictgen::InsensitiveStr::Ascii("ty"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -140173,6 +140434,7 @@ pub static WORD_EMO_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
],
values: &[
&["embodiment"],
+ &["emoji"],
&["emotionally"],
&["emotionally"],
&["empty"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -140263,6 +140525,7 @@ pub static WORD_EMI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("sion"),
dictgen::InsensitiveStr::Ascii("ss"),
dictgen::InsensitiveStr::Ascii("ssed"),
+ dictgen::InsensitiveStr::Ascii("table"),
dictgen::InsensitiveStr::Ascii("ted"),
dictgen::InsensitiveStr::Ascii("ting"),
dictgen::InsensitiveStr::Ascii("tion"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -140275,6 +140538,7 @@ pub static WORD_EMI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["emission"],
&["remiss", "amiss", "amass"],
&["amassed", "amiss"],
+ &["emittable"],
&["emitted"],
&["emitting"],
&["emission", "emotion"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -151113,6 +151377,7 @@ pub static WORD_DEVE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("lopmetns"),
dictgen::InsensitiveStr::Ascii("lopmets"),
dictgen::InsensitiveStr::Ascii("lopmnet"),
+ dictgen::InsensitiveStr::Ascii("lopoment"),
dictgen::InsensitiveStr::Ascii("lopors"),
dictgen::InsensitiveStr::Ascii("lopp"),
dictgen::InsensitiveStr::Ascii("loppe"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -151168,6 +151433,7 @@ pub static WORD_DEVE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["developments"],
&["developments"],
&["developments"],
+ &["development"],
&["develops"],
&["develop"],
&["develop"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -151677,6 +151943,7 @@ pub static WORD_DETEC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("tetd"),
dictgen::InsensitiveStr::Ascii("tie"),
dictgen::InsensitiveStr::Ascii("tiona"),
+ dictgen::InsensitiveStr::Ascii("tionn"),
dictgen::InsensitiveStr::Ascii("tivs"),
dictgen::InsensitiveStr::Ascii("toare"),
dictgen::InsensitiveStr::Ascii("tsion"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -151700,6 +151967,7 @@ pub static WORD_DETEC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["detected"],
&["detectives"],
&["detection", "detections"],
+ &["detection"],
&["detectives"],
&["detector"],
&["detection"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -154437,6 +154705,7 @@ pub static WORD_DEPD_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
keys: &[
dictgen::InsensitiveStr::Ascii("en"),
dictgen::InsensitiveStr::Ascii("ence"),
+ dictgen::InsensitiveStr::Ascii("encencies"),
dictgen::InsensitiveStr::Ascii("encente"),
dictgen::InsensitiveStr::Ascii("encentes"),
dictgen::InsensitiveStr::Ascii("ences"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -154474,6 +154743,7 @@ pub static WORD_DEPD_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
values: &[
&["depend"],
&["dependence"],
+ &["dependencies"],
&["dependence"],
&["dependences"],
&["dependences"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -160108,8 +160378,9 @@ pub static WORD_CURA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ge"),
dictgen::InsensitiveStr::Ascii("geous"),
dictgen::InsensitiveStr::Ascii("tin"),
+ dictgen::InsensitiveStr::Ascii("ture"),
],
- values: &[&["courage"], &["courageous"], &["curtain"]],
+ values: &[&["courage"], &["courageous"], &["curtain"], &["curvature"]],
range: 2..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -164947,11 +165218,16 @@ static WORD_CONVERA_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictg
pub static WORD_CONVERA_CHILDREN: dictgen::DictTable<&'static [&'static str]> =
dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("ge"),
dictgen::InsensitiveStr::Ascii("stion"),
dictgen::InsensitiveStr::Ascii("stions"),
],
- values: &[&["conversations"], &["conservation"]],
- range: 5..=6,
+ values: &[
+ &["converge", "coverage"],
+ &["conversations"],
+ &["conservation"],
+ ],
+ range: 2..=6,
};
static WORD_CONVEN_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -167306,9 +167582,12 @@ static WORD_CONSTRO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictg
pub static WORD_CONSTRO_CHILDREN: dictgen::DictTable<&'static [&'static str]> =
dictgen::DictTable {
- keys: &[dictgen::InsensitiveStr::Ascii("llers")],
- values: &[&["controllers"]],
- range: 5..=5,
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("l"),
+ dictgen::InsensitiveStr::Ascii("llers"),
+ ],
+ values: &[&["control"], &["controllers"]],
+ range: 1..=5,
};
static WORD_CONSTRI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -172466,6 +172745,7 @@ pub static WORD_COMPLE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = d
dictgen::InsensitiveStr::Ascii("tley"),
dictgen::InsensitiveStr::Ascii("tly"),
dictgen::InsensitiveStr::Ascii("tness"),
+ dictgen::InsensitiveStr::Ascii("tor"),
dictgen::InsensitiveStr::Ascii("ts"),
dictgen::InsensitiveStr::Ascii("tte"),
dictgen::InsensitiveStr::Ascii("ttly"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -172528,6 +172808,7 @@ pub static WORD_COMPLE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = d
&["completely"],
&["completely"],
&["completeness"],
+ &["completer", "completion"],
&["completes"],
&["complete"],
&["completely"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -174916,6 +175197,7 @@ pub static WORD_COMA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("pring"),
dictgen::InsensitiveStr::Ascii("prison"),
dictgen::InsensitiveStr::Ascii("prisons"),
+ dictgen::InsensitiveStr::Ascii("pt"),
dictgen::InsensitiveStr::Ascii("ptibele"),
dictgen::InsensitiveStr::Ascii("ptibelities"),
dictgen::InsensitiveStr::Ascii("ptibelity"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -174956,6 +175238,7 @@ pub static WORD_COMA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["comparing"],
&["comparison"],
&["comparisons"],
+ &["compat"],
&["compatible"],
&["compatibilities"],
&["compatibility"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -175508,6 +175791,7 @@ pub static WORD_COLLA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("spible"),
dictgen::InsensitiveStr::Ascii("sping"),
dictgen::InsensitiveStr::Ascii("taral"),
+ dictgen::InsensitiveStr::Ascii("ter"),
dictgen::InsensitiveStr::Ascii("terial"),
dictgen::InsensitiveStr::Ascii("terol"),
dictgen::InsensitiveStr::Ascii("tiong"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -175550,6 +175834,7 @@ pub static WORD_COLLA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["collapsible"],
&["collapsing"],
&["collateral"],
+ &["collator"],
&["collateral"],
&["collateral"],
&["collation"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -175565,11 +175850,12 @@ static WORD_COLI_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_COLI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
+ dictgen::InsensitiveStr::Ascii("de"),
dictgen::InsensitiveStr::Ascii("sion"),
dictgen::InsensitiveStr::Ascii("ssion"),
],
- values: &[&["collision"], &["collision"]],
- range: 4..=5,
+ values: &[&["collide"], &["collision"], &["collision"]],
+ range: 2..=5,
};
static WORD_COLG_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -176060,7 +176346,7 @@ static WORD_COA_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
static WORD_COA_CHILDREN: [Option<&dictgen::DictTrieNode<&'static [&'static str]>>; 26] = [
None,
None,
- None,
+ Some(&WORD_COAC_NODE),
None,
None,
None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -176333,6 +176619,17 @@ pub static WORD_COALA_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
range: 2..=7,
};
+static WORD_COAC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
+ children: dictgen::DictTrieChild::Flat(&WORD_COAC_CHILDREN),
+ value: None,
+};
+
+pub static WORD_COAC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
+ keys: &[dictgen::InsensitiveStr::Ascii("hig")],
+ values: &[&["coaching"]],
+ range: 3..=3,
+};
+
static WORD_CN_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
children: dictgen::DictTrieChild::Flat(&WORD_CN_CHILDREN),
value: None,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -176663,6 +176960,7 @@ pub static WORD_CLI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("pbaord"),
dictgen::InsensitiveStr::Ascii("pboad"),
dictgen::InsensitiveStr::Ascii("pboads"),
+ dictgen::InsensitiveStr::Ascii("ped"),
dictgen::InsensitiveStr::Ascii("poard"),
dictgen::InsensitiveStr::Ascii("poards"),
dictgen::InsensitiveStr::Ascii("poing"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -176722,6 +177020,7 @@ pub static WORD_CLI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["clipboard"],
&["clipboard"],
&["clipboards"],
+ &["clipped"],
&["clipboard"],
&["clipboards"],
&["clipping"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -178284,9 +178583,12 @@ static WORD_CIE_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
};
pub static WORD_CIE_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
- keys: &[dictgen::InsensitiveStr::Ascii("lings")],
- values: &[&["ceilings"]],
- range: 5..=5,
+ keys: &[
+ dictgen::InsensitiveStr::Ascii("lings"),
+ dictgen::InsensitiveStr::Ascii("nts"),
+ ],
+ values: &[&["ceilings"], &["clients"]],
+ range: 3..=5,
};
static WORD_CIC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -185439,6 +185741,7 @@ pub static WORD_BUI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("lded"),
dictgen::InsensitiveStr::Ascii("ldes"),
dictgen::InsensitiveStr::Ascii("ldins"),
+ dictgen::InsensitiveStr::Ascii("ldning"),
dictgen::InsensitiveStr::Ascii("ldpackge"),
dictgen::InsensitiveStr::Ascii("ldpackges"),
dictgen::InsensitiveStr::Ascii("ling"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -185483,6 +185786,7 @@ pub static WORD_BUI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["built"],
&["builders"],
&["buildings"],
+ &["building"],
&["buildpackage"],
&["buildpackages"],
&["building"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -190705,8 +191009,9 @@ pub static WORD_BAI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("sc"),
dictgen::InsensitiveStr::Ascii("scly"),
dictgen::InsensitiveStr::Ascii("sed"),
+ dictgen::InsensitiveStr::Ascii("ses"),
],
- values: &[&["basic"], &["basically"], &["raised"]],
+ values: &[&["basic"], &["basically"], &["raised"], &["biases"]],
range: 2..=4,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -190912,6 +191217,7 @@ pub static WORD_BACKW_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
dictgen::InsensitiveStr::Ascii("are"),
dictgen::InsensitiveStr::Ascii("ark"),
dictgen::InsensitiveStr::Ascii("ars"),
+ dictgen::InsensitiveStr::Ascii("ord"),
dictgen::InsensitiveStr::Ascii("rad"),
],
values: &[
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -190921,6 +191227,7 @@ pub static WORD_BACKW_CHILDREN: dictgen::DictTable<&'static [&'static str]> = di
&["backward"],
&["backward", "backwards"],
&["backward"],
+ &["backward"],
],
range: 2..=5,
};
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -191637,6 +191944,7 @@ pub static WORD_AVI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("aiton"),
dictgen::InsensitiveStr::Ascii("alability"),
dictgen::InsensitiveStr::Ascii("alable"),
+ dictgen::InsensitiveStr::Ascii("alible"),
dictgen::InsensitiveStr::Ascii("lability"),
dictgen::InsensitiveStr::Ascii("lable"),
dictgen::InsensitiveStr::Ascii("od"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -191653,6 +191961,7 @@ pub static WORD_AVI_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["aviation"],
&["availability"],
&["available"],
+ &["available"],
&["availability"],
&["available"],
&["avoid"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -194919,6 +195228,7 @@ pub static WORD_ASY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("nchonous"),
dictgen::InsensitiveStr::Ascii("nchonously"),
dictgen::InsensitiveStr::Ascii("nchornous"),
+ dictgen::InsensitiveStr::Ascii("nchornously"),
dictgen::InsensitiveStr::Ascii("nchoronous"),
dictgen::InsensitiveStr::Ascii("nchrnous"),
dictgen::InsensitiveStr::Ascii("nchrnously"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -194957,6 +195267,7 @@ pub static WORD_ASY_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["asynchronous"],
&["asynchronously"],
&["asynchronous"],
+ &["asynchronously"],
&["asynchronous"],
&["asynchronous"],
&["asynchronously"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -195353,6 +195664,7 @@ pub static WORD_ASSS_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("embly"),
dictgen::InsensitiveStr::Ascii("ert"),
dictgen::InsensitiveStr::Ascii("ertion"),
+ dictgen::InsensitiveStr::Ascii("et"),
dictgen::InsensitiveStr::Ascii("its"),
dictgen::InsensitiveStr::Ascii("ociate"),
dictgen::InsensitiveStr::Ascii("ociated"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -195369,6 +195681,7 @@ pub static WORD_ASSS_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["assembly"],
&["assert"],
&["assertion"],
+ &["asset"],
&["assists"],
&["associated"],
&["associated"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -195377,7 +195690,7 @@ pub static WORD_ASSS_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["assumes"],
&["assuming"],
],
- range: 3..=8,
+ range: 2..=8,
};
static WORD_ASSO_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -199478,6 +199791,7 @@ pub static WORD_APPL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("ikay"),
dictgen::InsensitiveStr::Ascii("ikays"),
dictgen::InsensitiveStr::Ascii("ing"),
+ dictgen::InsensitiveStr::Ascii("izes"),
dictgen::InsensitiveStr::Ascii("lied"),
dictgen::InsensitiveStr::Ascii("ly"),
dictgen::InsensitiveStr::Ascii("uad"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -199541,6 +199855,7 @@ pub static WORD_APPL_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["appliqué"],
&["appliqués"],
&["applying", "appalling"],
+ &["applies"],
&["applied"],
&["apply"],
&["applaud"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -200294,6 +200609,7 @@ static WORD_ANU_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::
pub static WORD_ANU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("al"),
+ dictgen::InsensitiveStr::Ascii("alized"),
dictgen::InsensitiveStr::Ascii("ally"),
dictgen::InsensitiveStr::Ascii("glar"),
dictgen::InsensitiveStr::Ascii("led"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -200309,6 +200625,7 @@ pub static WORD_ANU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
],
values: &[
&["annual"],
+ &["annualized"],
&["annually"],
&["angular"],
&["annulled"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -200322,7 +200639,7 @@ pub static WORD_ANU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["aneurism"],
&["anywhere"],
],
- range: 2..=5,
+ range: 2..=6,
};
static WORD_ANT_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen::DictTrieNode {
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -207749,6 +208066,7 @@ pub static WORD_ACU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
dictgen::InsensitiveStr::Ascii("ally"),
dictgen::InsensitiveStr::Ascii("ired"),
dictgen::InsensitiveStr::Ascii("ires"),
+ dictgen::InsensitiveStr::Ascii("malated"),
dictgen::InsensitiveStr::Ascii("mulate"),
dictgen::InsensitiveStr::Ascii("mulated"),
dictgen::InsensitiveStr::Ascii("mulates"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -207776,6 +208094,7 @@ pub static WORD_ACU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["actually"],
&["acquired"],
&["acquires"],
+ &["accumulated"],
&["accumulate"],
&["accumulated"],
&["accumulates"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -208881,6 +209200,7 @@ pub static WORD_ACCU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
dictgen::InsensitiveStr::Ascii("astion"),
dictgen::InsensitiveStr::Ascii("lumate"),
dictgen::InsensitiveStr::Ascii("lumated"),
+ dictgen::InsensitiveStr::Ascii("lumating"),
dictgen::InsensitiveStr::Ascii("lumation"),
dictgen::InsensitiveStr::Ascii("malate"),
dictgen::InsensitiveStr::Ascii("malated"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -208943,6 +209263,7 @@ pub static WORD_ACCU_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
&["accusation"],
&["accumulate"],
&["accumulated"],
+ &["accumulating"],
&["accumulation"],
&["accumulate"],
&["accumulated"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -210024,6 +210345,7 @@ static WORD_ACCC_NODE: dictgen::DictTrieNode<&'static [&'static str]> = dictgen:
pub static WORD_ACCC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dictgen::DictTable {
keys: &[
dictgen::InsensitiveStr::Ascii("ept"),
+ dictgen::InsensitiveStr::Ascii("eptable"),
dictgen::InsensitiveStr::Ascii("epted"),
dictgen::InsensitiveStr::Ascii("epting"),
dictgen::InsensitiveStr::Ascii("epts"),
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -210054,6 +210376,7 @@ pub static WORD_ACCC_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dic
],
values: &[
&["accept"],
+ &["acceptable"],
&["accepted"],
&["accepting"],
&["accepts"],
| diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -33785,6 +33822,7 @@ latitute,latitude
latnern,lantern
latops,laptops
latset,latest
+latst,latest
lattitude,latitude
lauch,launch
lauched,launched
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57221,6 +57291,7 @@ tesellate,tessellate
tesellated,tessellated
tesellation,tessellation
tesellator,tessellator
+tesing,testing
tesitcle,testicle
tesitcles,testicles
tesited,tested
diff --git a/crates/typos-dict/assets/words.csv b/crates/typos-dict/assets/words.csv
--- a/crates/typos-dict/assets/words.csv
+++ b/crates/typos-dict/assets/words.csv
@@ -57252,6 +57323,7 @@ testimoney,testimony
testin,testing
testng,testing
testomony,testimony
+testsdata,testdata
testsing,testing
tetrahedora,tetrahedra
tetrahedoren,tetrahedron
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20541,6 +20597,7 @@ pub static WORD_TES_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["tessellated"],
&["tessellation"],
&["tessellator"],
+ &["testing"],
&["testicle"],
&["testicles"],
&["tested"],
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -20572,6 +20629,7 @@ pub static WORD_TES_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["testing"],
&["testing"],
&["testimony"],
+ &["testdata"],
&["testing"],
],
range: 1..=9,
diff --git a/crates/typos-dict/src/dict_codegen.rs b/crates/typos-dict/src/dict_codegen.rs
--- a/crates/typos-dict/src/dict_codegen.rs
+++ b/crates/typos-dict/src/dict_codegen.rs
@@ -98411,6 +98644,7 @@ pub static WORD_LAT_CHILDREN: dictgen::DictTable<&'static [&'static str]> = dict
&["lantern"],
&["laptops"],
&["latest"],
+ &["latest"],
&["latitude"],
],
range: 2..=7,
| Non-critical corrections - June 2023 Edition
Please list any corrections you noticed `typos` doesn't fix and we'll roll these up into a single PR, making it lighterweight for adding new corrections.
| Probably safe to include:
```
acculumating -> accumulating
anualized -> annualized
assset -> asset
avialible -> available
baises -> biases
cliped -> clipped
curature -> curvature
depdencencies -> dependencies
eveything -> everything
formuala -> formula
hense -> hence
pararm -> param
segmnet -> segment
termonology -> terminology
tolearnce -> tolerance
tonange -> tonnage
traids -> triads
verticla -> vertical
```
Might have other matches?
```
tupes -> tuples
poin -> point
```
collater -> collator
```
resutls -> results
```
idenifiable -> identifiable
Hi!
Here are a few typos discovered by `codespell` but ignored by `typos`:
```
usefull -> useful
wont -> won't
should'nt -> shouldn't
te -> the, be, we, to
wee -> we
direciton -> direction
forcaster -> forecaster
backword -> backward
appropritate -> appropriate
```
creat -> create, great
(note, my typo was `creat -> create`, but dictionary matching suggested `great`.)
tpos -> typos (maybe also others like `tops`)
I hit this typo when adding typos CI
> Probably safe to include:
>
> ```
> [...]
> traids -> triads
> [...]
> ```
Maybe also list 'traits' as a possible correction?
oppurtinity -> opportunity
`coachig` -> `coaching`
```
unalbe -> unable
```
onw -> now
it's already recognized as a typo, but the only suggestion is `own`
```
buildning -> building
detectionn -> detection
necassary -> necessary
oputput -> output
ove -> oven, over
polpulate -> populate
poulate -> populate
preocessing -> processing, preprocessing
prediciotn -> prediction
requiremenht -> requirement
vecotor -> vector
```
```
implementationis -> implementations | implementation is
latst -> latest
propgating -> propagating
> ```
> latst -> latest
> ```
I'd suggest also suggesting `last` as a possible correction.
```
acumalated -> accumulated
developoment -> development
incomatible -> incompatible
tesing -> testing
recalcelated -> recalculated
```
```
fromatted -> formatted
```
unsable -> unable, unstable
pentalty -> penalty
predicte -> predicted
realy -> real
accceptable -> acceptable
asynchornously -> asynchronously
cients -> clients
colide -> collide
comapt -> compat
completor -> completer, completion
constrol -> control
converage -> coverage
emitable -> emittable
emoiji -> emoji
examininig -> examining
finialize -> finalize
fixutre -> fixture
gnored -> ignored
inheritences -> inheritances
itereate -> iterate
maybed -> maybe
monomorpize -> monomorphize
normalizd -> normalized
offst -> offset
primtives -> primitives
promitives -> primitives
promordials -> primordials
prvode -> provide
rumtime -> runtime
slotable -> slottable
stabalized -> stabilized
stderrr -> stderr
testsdata -> testdata
unafected -> unaffected
unfortuntely -> unfortunately
webassemby -> webassembly
workfaround -> workaround
wrorker -> worker
```
wordpresss -> wordpress
```
> realy -> real
I'd suggest also suggesting `really` as a possible correction.
Yeah that correction is already there ... I didn't bother to include suggestions already present.
whule -> while, whole
applizes -> applies
Hello,
Sorry to bother, however, why does: `contiguities,continuities` exist?
`contiguities` seems to be a correct word.
I've update my personal typos.toml, but I'd like to understand the reasoning behind it
Thanks in advance,
B.R.
@vinchona I PR is up to fix that. However, in the future I would recommend creating Issues or Discussions for that as that isn't on-topic for the current issue.
```
segemnt -> segment
segemntation -> segmentation
```
```
buildning -> building
detectionn -> detection
detectionns -> detections
infomatrion -> information
raduis -> radius
radus -> radius
realstic -> realistic
segmnet -> segment
segmnets -> segments
unifom -> uniform
unifomly -> uniformly
vectorr -> vector
vectorrs -> vectors
writre -> writer, write
```
| 2023-07-03T22:18:57 | 1.64 | d4258b1aa03ddeb54b2c52fc85348745b7bd060f | [
"codegen"
] | [
"test_varcon_removal",
"test_merge_duplicates",
"test_preserve_correction_order",
"test_duplicate_correction_removal",
"test_varcon_best_match",
"test_cycle_removal",
"verify"
] | [] | [] |
wez/wezterm | 4,764 | wez__wezterm-4764 | [
"4730",
"4730"
] | 4921f139d35590ab35415021221a2a6f5cf10ab3 | diff --git a/vtparse/src/lib.rs b/vtparse/src/lib.rs
--- a/vtparse/src/lib.rs
+++ b/vtparse/src/lib.rs
@@ -444,6 +444,7 @@ impl VTParser {
for src in &self.params[0..self.num_params] {
if let CsiParam::Integer(value) = src {
res[i] = *value;
+ } else if let CsiParam::P(b';') = src {
i += 1;
}
}
| diff --git a/vtparse/src/lib.rs b/vtparse/src/lib.rs
--- a/vtparse/src/lib.rs
+++ b/vtparse/src/lib.rs
@@ -1103,4 +1104,26 @@ mod test {
]
);
}
+
+ #[test]
+ fn test_ommitted_dcs_param() {
+ assert_eq!(
+ parse_as_vec("\x1bP;1q\x1b\\".as_bytes()),
+ vec![
+ VTAction::DcsHook {
+ byte: b'q',
+ params: vec![0, 1],
+ intermediates: vec![],
+ ignored_excess_intermediates: false,
+ },
+ VTAction::DcsUnhook,
+ VTAction::EscDispatch {
+ params: vec![],
+ intermediates: vec![],
+ ignored_excess_intermediates: false,
+ byte: b'\\',
+ }
+ ]
+ );
+ }
}
| Sixel parser ignores `P2` parameter if `P1` is blank
### What Operating System(s) are you seeing this problem on?
Linux Wayland
### Which Wayland compositor or X11 Window manager(s) are you using?
kwin
### WezTerm version
20231228-084719-ff274374
### Did you try the latest nightly build to see if the issue is better (or worse!) than your current version?
Yes, and I updated the version box above to show the version of the nightly that I tried
### Describe the bug
A sixel sequence follows the pattern:
```
DCS P1; P2; P3; q s..s ST
```
where `P1` is the macro parameter and `P2` selects how the terminal draws the background color.
If `P1` is blank but `P2` is set to `1` as follows:
```
\x1bP;1q ...
```
then unspecified pixel are set to the current background color instead of remaining at their current color as expected.
### To Reproduce
Run the following twice in the terminal:
```bash
echo -e '\eP;1q;#1;2;100;0;0#1??}}GG}}??}}??--#0;2;0;100;100\e\\'
```

The second sixel graphic has a cyan background, which is not expected: it should be transparent, because `P2` was set to `1`.
The third sixel graphic shows the expected output (`P1` is not blank and `P2` is `1`, resulting in transparent background).
### Configuration
default
### Expected Behavior
I would expect sixel device control string parameters to be parsed even if preceding parameters are blank.
### Logs
_No response_
### Anything else?
_No response_
Sixel parser ignores `P2` parameter if `P1` is blank
### What Operating System(s) are you seeing this problem on?
Linux Wayland
### Which Wayland compositor or X11 Window manager(s) are you using?
kwin
### WezTerm version
20231228-084719-ff274374
### Did you try the latest nightly build to see if the issue is better (or worse!) than your current version?
Yes, and I updated the version box above to show the version of the nightly that I tried
### Describe the bug
A sixel sequence follows the pattern:
```
DCS P1; P2; P3; q s..s ST
```
where `P1` is the macro parameter and `P2` selects how the terminal draws the background color.
If `P1` is blank but `P2` is set to `1` as follows:
```
\x1bP;1q ...
```
then unspecified pixel are set to the current background color instead of remaining at their current color as expected.
### To Reproduce
Run the following twice in the terminal:
```bash
echo -e '\eP;1q;#1;2;100;0;0#1??}}GG}}??}}??--#0;2;0;100;100\e\\'
```

The second sixel graphic has a cyan background, which is not expected: it should be transparent, because `P2` was set to `1`.
The third sixel graphic shows the expected output (`P1` is not blank and `P2` is `1`, resulting in transparent background).
### Configuration
default
### Expected Behavior
I would expect sixel device control string parameters to be parsed even if preceding parameters are blank.
### Logs
_No response_
### Anything else?
_No response_
| It appears that the sixel parameter parser ignores any leading parameter deliminators.
I would not expect the following to render an image with a transparent background:

It looks like omitted parameters get added at the end of the parameter array:

I think perhaps there is a bug in `VTParser.as_integer_params`?
Presumably `i` should be incremented whether the parameter is a `CsiParam::Integer` or not?
```diff
fn as_integer_params(&self) -> [i64; MAX_PARAMS] {
let mut res = [0i64; MAX_PARAMS];
let mut i = 0;
for src in &self.params[0..self.num_params] {
if let CsiParam::Integer(value) = src {
res[i] = *value;
- i += 1;
}
+ i += 1;
}
res
}
```
It appears that the sixel parameter parser ignores any leading parameter deliminators.
I would not expect the following to render an image with a transparent background:

It looks like omitted parameters get added at the end of the parameter array:

I think perhaps there is a bug in `VTParser.as_integer_params`?
Presumably `i` should be incremented whether the parameter is a `CsiParam::Integer` or not?
```diff
fn as_integer_params(&self) -> [i64; MAX_PARAMS] {
let mut res = [0i64; MAX_PARAMS];
let mut i = 0;
for src in &self.params[0..self.num_params] {
if let CsiParam::Integer(value) = src {
res[i] = *value;
- i += 1;
}
+ i += 1;
}
res
}
``` | 2024-01-08T23:37:59 | 0.18 | 4921f139d35590ab35415021221a2a6f5cf10ab3 | [
"test::test_ommitted_dcs_param"
] | [
"test::kitty_img",
"test::osc_fedora_vte",
"test::osc_utf8",
"test::print_utf8",
"test::sixel",
"test::test_colon_rgb",
"test::test_csi_intermediates",
"test::test_csi_omitted_param",
"test::test_csi_too_many_params",
"test::test_decset",
"test::test_fancy_underline",
"test::test_mixed",
"te... | [] | [] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.