repo
stringlengths
8
35
pull_number
int64
14
14.5k
instance_id
stringlengths
13
40
issue_numbers
listlengths
1
3
base_commit
stringlengths
40
40
patch
stringlengths
344
132k
test_patch
stringlengths
308
274k
problem_statement
stringlengths
25
19.8k
hints_text
stringlengths
0
37.4k
created_at
stringlengths
19
19
version
stringlengths
3
4
environment_setup_commit
stringlengths
40
40
FAIL_TO_PASS
listlengths
1
1.1k
PASS_TO_PASS
listlengths
0
7.38k
FAIL_TO_FAIL
listlengths
0
1.72k
PASS_TO_FAIL
listlengths
0
49
casey/just
1,833
casey__just-1833
[ "1019" ]
8fc91b298c6b436b4dd54da34d365399f13fa172
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -1323,6 +1323,23 @@ $ just The executable is at: /bin/just ``` +#### Just Process ID + +- `just_pid()` - Process ID of the `just` executable. + +For example: + +```just +pid: + @echo The process ID is: {{ just_pid() }} +``` + +```sh +$ just +The process ID is: 420 +``` + + #### String Manipulation - `quote(s)` - Replace all single quotes with `'\''` and prepend and append diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -41,6 +41,7 @@ pub(crate) fn get(name: &str) -> Option<Function> { "invocation_directory_native" => Nullary(invocation_directory_native), "join" => BinaryPlus(join), "just_executable" => Nullary(just_executable), + "just_pid" => Nullary(just_pid), "justfile" => Nullary(justfile), "justfile_directory" => Nullary(justfile_directory), "kebabcase" => Unary(kebabcase), diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -251,6 +252,10 @@ fn just_executable(_context: &FunctionContext) -> Result<String, String> { }) } +fn just_pid(_context: &FunctionContext) -> Result<String, String> { + Ok(std::process::id().to_string()) +} + fn justfile(context: &FunctionContext) -> Result<String, String> { context .search
diff --git a/tests/functions.rs b/tests/functions.rs --- a/tests/functions.rs +++ b/tests/functions.rs @@ -651,3 +651,14 @@ fn sha256_file() { .stdout("177b3d79aaafb53a7a4d7aaba99a82f27c73370e8cb0295571aade1e4fea1cd2") .run(); } + +#[test] +fn just_pid() { + let Output { stdout, pid, .. } = Test::new() + .args(["--evaluate", "x"]) + .justfile("x := just_pid()") + .stdout_regex(r"\d+") + .run(); + + assert_eq!(stdout.parse::<u32>().unwrap(), pid); +} diff --git a/tests/invocation_directory.rs b/tests/invocation_directory.rs --- a/tests/invocation_directory.rs +++ b/tests/invocation_directory.rs @@ -85,7 +85,9 @@ fn test_invocation_directory() { #[test] fn invocation_directory_native() { - let Output { stdout, tempdir } = Test::new() + let Output { + stdout, tempdir, .. + } = Test::new() .justfile("x := invocation_directory_native()") .args(["--evaluate", "x"]) .stdout_regex(".*") diff --git a/tests/test.rs b/tests/test.rs --- a/tests/test.rs +++ b/tests/test.rs @@ -35,6 +35,7 @@ macro_rules! test { } pub(crate) struct Output { + pub(crate) pid: u32, pub(crate) stdout: String, pub(crate) tempdir: TempDir, } diff --git a/tests/test.rs b/tests/test.rs --- a/tests/test.rs +++ b/tests/test.rs @@ -210,6 +211,8 @@ impl Test { .spawn() .expect("just invocation failed"); + let pid = child.id(); + { let mut stdin_handle = child.stdin.take().expect("failed to unwrap stdin handle"); diff --git a/tests/test.rs b/tests/test.rs --- a/tests/test.rs +++ b/tests/test.rs @@ -257,8 +260,9 @@ impl Test { } Output { - tempdir: self.tempdir, + pid, stdout: output_stdout.into(), + tempdir: self.tempdir, } } }
Add function that returns PID of the just process Mentioned by @page-down in #1011.
2024-01-11T00:55:03
1.22
8fc91b298c6b436b4dd54da34d365399f13fa172
[ "functions::just_pid" ]
[ "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::alias_shadows_recipe_before", "analy...
[ "functions::env_var_functions", "fmt::write_error" ]
[]
casey/just
1,809
casey__just-1809
[ "1807" ]
743ab2fc8293c18f4b44f83e65cef5ec5080da3b
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -2669,6 +2669,13 @@ $ just --unstable bar b B ``` +Or with path syntax: + +```sh +$ just --unstable bar::b +B +``` + If a module is named `foo`, just will search for the module file in `foo.just`, `foo/mod.just`, `foo/justfile`, and `foo/.justfile`. In the latter two cases, the module file may have any capitalization. diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -2,7 +2,7 @@ use {super::*, serde::Serialize}; #[derive(Debug)] struct Invocation<'src: 'run, 'run> { - arguments: &'run [&'run str], + arguments: Vec<&'run str>, recipe: &'run Recipe<'src>, settings: &'run Settings<'src>, scope: &'run Scope<'src, 'run>, diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -209,7 +209,7 @@ impl<'src> Justfile<'src> { _ => {} } - let argvec: Vec<&str> = if !arguments.is_empty() { + let mut remaining: Vec<&str> = if !arguments.is_empty() { arguments.iter().map(String::as_str).collect() } else if let Some(recipe) = &self.default { recipe.check_can_be_default_recipe()?; diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -220,15 +220,29 @@ impl<'src> Justfile<'src> { return Err(Error::NoDefaultRecipe); }; - let arguments = argvec.as_slice(); - let mut missing = Vec::new(); let mut invocations = Vec::new(); - let mut remaining = arguments; let mut scopes = BTreeMap::new(); let arena: Arena<Scope> = Arena::new(); - while let Some((first, mut rest)) = remaining.split_first() { + while let Some(first) = remaining.first().copied() { + if first.contains("::") { + if first.starts_with(':') || first.ends_with(':') || first.contains(":::") { + missing.push(first.to_string()); + remaining = remaining[1..].to_vec(); + continue; + } + + remaining = first + .split("::") + .chain(remaining[1..].iter().copied()) + .collect(); + + continue; + } + + let rest = &remaining[1..]; + if let Some((invocation, consumed)) = self.invocation( 0, &mut Vec::new(), diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -241,12 +255,12 @@ impl<'src> Justfile<'src> { first, rest, )? { - rest = &rest[consumed..]; + remaining = rest[consumed..].to_vec(); invocations.push(invocation); } else { - missing.push((*first).to_owned()); + missing.push(first.to_string()); + remaining = rest.to_vec(); } - remaining = rest; } if !missing.is_empty() { diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -273,7 +287,7 @@ impl<'src> Justfile<'src> { Self::run_recipe( &context, invocation.recipe, - invocation.arguments, + &invocation.arguments, &dotenv, search, &mut ran, diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -306,7 +320,7 @@ impl<'src> Justfile<'src> { search: &'run Search, parent: &'run Scope<'src, 'run>, first: &'run str, - rest: &'run [&'run str], + rest: &[&'run str], ) -> RunResult<'src, Option<(Invocation<'src, 'run>, usize)>> { if let Some(module) = self.modules.get(first) { path.push(first); diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -327,7 +341,7 @@ impl<'src> Justfile<'src> { Invocation { settings: &module.settings, recipe, - arguments: &[], + arguments: Vec::new(), scope, }, depth, diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -352,7 +366,7 @@ impl<'src> Justfile<'src> { if recipe.parameters.is_empty() { Ok(Some(( Invocation { - arguments: &[], + arguments: Vec::new(), recipe, scope: parent, settings: &self.settings, diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -373,7 +387,7 @@ impl<'src> Justfile<'src> { } Ok(Some(( Invocation { - arguments: &rest[..argument_count], + arguments: rest[..argument_count].to_vec(), recipe, scope: parent, settings: &self.settings,
diff --git a/tests/modules.rs b/tests/modules.rs --- a/tests/modules.rs +++ b/tests/modules.rs @@ -52,6 +52,74 @@ fn module_recipes_can_be_run_as_subcommands() { .run(); } +#[test] +fn module_recipes_can_be_run_with_path_syntax() { + Test::new() + .write("foo.just", "foo:\n @echo FOO") + .justfile( + " + mod foo + ", + ) + .test_round_trip(false) + .arg("--unstable") + .arg("foo::foo") + .stdout("FOO\n") + .run(); +} + +#[test] +fn nested_module_recipes_can_be_run_with_path_syntax() { + Test::new() + .write("foo.just", "mod bar") + .write("bar.just", "baz:\n @echo BAZ") + .justfile( + " + mod foo + ", + ) + .test_round_trip(false) + .arg("--unstable") + .arg("foo::bar::baz") + .stdout("BAZ\n") + .run(); +} + +#[test] +fn invalid_path_syntax() { + Test::new() + .test_round_trip(false) + .arg(":foo::foo") + .stderr("error: Justfile does not contain recipe `:foo::foo`.\n") + .status(EXIT_FAILURE) + .run(); + + Test::new() + .test_round_trip(false) + .arg("foo::foo:") + .stderr("error: Justfile does not contain recipe `foo::foo:`.\n") + .status(EXIT_FAILURE) + .run(); + + Test::new() + .test_round_trip(false) + .arg("foo:::foo") + .stderr("error: Justfile does not contain recipe `foo:::foo`.\n") + .status(EXIT_FAILURE) + .run(); +} + +#[test] +fn missing_recipe_after_invalid_path() { + Test::new() + .test_round_trip(false) + .arg(":foo::foo") + .arg("bar") + .stderr("error: Justfile does not contain recipes `:foo::foo` or `bar`.\n") + .status(EXIT_FAILURE) + .run(); +} + #[test] fn assignments_are_evaluated_in_modules() { Test::new()
completions and modules don't work together The completions script uses `just --summary` to fetch a list of recipes. A recipe `bar` inside module `foo` is listed as `foo::bar`, however module recipes can't be invoked like this ("Justfile does not contain recipe `foo::bar`"). I'm not sure the best solution although I do personally find `foo::bar` easier syntax to read as `foo bar` in my mind looks like a recipe `foo` that takes a parameter `bar`. I wonder whether `foo::bar` should be added as additional valid syntax to run a module's recipe?
Yah, this is definitely annoying. I would have actually liked to have different recipes be tab separated in the output of `just --summary`, so that recipes in modules could be space separated, i.e., if there's a recipe `foo` and a recipe `bar` in module `baz`, `just --summary` would output `foo\tbar\sbaz`, so then you could split on tabs to the valid recipes. Unfortunately, this would break any scripts which relied on splitting the output of `just --summary` on spaces, or on whitespace, so I opted to use the `::` syntax instead. Eventually, when recipes can have dependencies that are in other modules, I'll probably wind up using the same syntax for dependencies: ``` mod baz foo: baz::bar ``` I'm not opposed to adding `foo::bar` as an additional syntax to call recipes in modules from the command line. It seems a bit inelegant to add it just to make the lives of completion scripts easier, but I think some people will prefer that syntax anyhow, and if it becomes the actual syntax for dependencies in other modules, being able to use that syntax on the command line makes sense. Another thing that the completion scripts could do is split on `::`, but I have basically never touched the completion scripts personally, and always left them to other brave souls, so I don't know how annoying this would be to do. Differentiating between spaces and tabs in completion results sounds messy to me, and like you said, probably backwards-incompatible. Developers tend to be used to the concept of namespace separators, and `::` is a common one. In the past I've wanted to use `:` to create categories of recipe, e.g. `make:migration` but couldn't because `:` is not valid in a recipe name. I would use this feature to put all the make recipes in `make.just` and reference them as a module, which would tidy things up. Unfortunately if invoking module recipes by `::` became the only way, the output of `just --list` would probably need to change from the neat tabbed output to a flat list with module recipes listed as `foo::bar`, otherwise it wouldn't be clear how to invoke them. Not sure what's best either way, so just sharing my thoughts. > Unfortunately, this would break any scripts which relied on splitting the output of just --summary on spaces Not sure if that'd be something worth doing as (at least with bash) it's not the completion script's job to split up the input: https://www.gnu.org/software/bash/manual/html_node/Bash-Variables.html#index-COMP_005fWORDS It does mention `COMP_WORDBREAKS` but imo completions are fragile enough even without trying to change the defaults. > Unfortunately if invoking module recipes by `::` became the only way, the output of `just --list` would probably need to change from the neat tabbed output to a flat list with module recipes listed as `foo::bar`, otherwise it wouldn't be clear how to invoke them. I'd definitely allow both `::` separated and space separated from the command line, since I personally like space-separated, and space is one character, whereas `::` is two characters, and requires shift. > Not sure if that'd be something worth doing as (at least with bash) it's not the completion script's job to split up the input: Ahh, good catch. So I guess separating by tabs wouldn't work anyway.
2024-01-01T05:52:45
1.21
743ab2fc8293c18f4b44f83e65cef5ec5080da3b
[ "modules::module_recipes_can_be_run_with_path_syntax", "modules::nested_module_recipes_can_be_run_with_path_syntax" ]
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_variable", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_variadic_parameter", "analy...
[ "functions::env_var_functions", "fmt::write_error" ]
[]
casey/just
775
casey__just-775
[ "774" ]
6f42c8b737996ad1a6e52ef742b983f170cdb229
diff --git a/README.adoc b/README.adoc --- a/README.adoc +++ b/README.adoc @@ -622,6 +622,22 @@ script: ./{{justfile_directory()}}/scripts/some_script ``` +==== Just Executable + +- `just_executable()` - Absolute path to the just executable. + +For example: + +```make +executable: + @echo The executable is at: {{just_executable()}} +``` + +``` +$ just +The executable is at: /bin/just +``` + ==== Dotenv Integration `just` will load environment variables from a file named `.env`. This file can be located in the same directory as your justfile or in a parent directory. These variables are environment variables, not `just` variables, and so must be accessed using `$VARIABLE_NAME` in recipes and backticks. diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -18,6 +18,7 @@ lazy_static! { ("invocation_directory", Nullary(invocation_directory)), ("env_var", Unary(env_var)), ("env_var_or_default", Binary(env_var_or_default)), + ("just_executable", Nullary(just_executable)), ] .into_iter() .collect(); diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -123,3 +124,15 @@ fn env_var_or_default( Ok(value) => Ok(value), } } + +fn just_executable(_context: &FunctionContext) -> Result<String, String> { + let exe_path = + std::env::current_exe().map_err(|e| format!("Error getting current executable: {}", e))?; + + exe_path.to_str().map(str::to_owned).ok_or_else(|| { + format!( + "Executable path is not valid unicode: {}", + exe_path.to_string_lossy() + ) + }) +}
diff --git a/tests/misc.rs b/tests/misc.rs --- a/tests/misc.rs +++ b/tests/misc.rs @@ -1203,6 +1203,18 @@ test! { status: EXIT_FAILURE, } +test! { + name: test_just_executable_function, + justfile: " + a: + @printf 'Executable path is: %s\\n' '{{ just_executable() }}' + ", + args: ("a"), + stdout: format!("Executable path is: {}\n", executable_path("just").to_str().unwrap()).as_str(), + stderr: "", + status: EXIT_SUCCESS, +} + test! { name: infallable_command, justfile: r#"
Function to get just' executable path (for when just is not in $PATH) Hello, I have a simple justfile like: ```make default: @just --list test: echo "it works!" ``` I'm using the [Nix package manager](https://nixos.org/) which allows me to run software without really installing them, and thus they are not in PATH. I'm trying to run `just` like this as a bootstrapping step: `nix run nixpkgs/nixpkgs-unstable#just` to run the first (default) recipe, but it fails rightfully with `sh: just: command not found` when trying to run `@just --list` in the `default` recipe. I would like a function to get the full path to the `just` binary that is currently running, to be able to re-run it from a recipe. The usage would be something like this: ```make default: @{{just_executable()}} --list ``` And this would expand at runtime to something like (here using Nix): ```make default: @/nix/store/ajq3r70f4pz5sqh5p83bbhzrkdj1cdrz-just-0.8.4/bin/just --list ```
2021-03-28T01:39:42
0.8
6f42c8b737996ad1a6e52ef742b983f170cdb229
[ "misc::test_just_executable_function" ]
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_parameter", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_recipe", "analyzer::tests::extra_whitespace", "analyzer::tests::...
[ "config::tests::help", "misc::env_var_functions" ]
[]
casey/just
656
casey__just-656
[ "642" ]
cea4a1608199672934f9db1b1b2c11f0feb3fd2f
diff --git a/src/justfile.rs b/src/justfile.rs --- a/src/justfile.rs +++ b/src/justfile.rs @@ -149,7 +149,7 @@ impl<'src> Justfile<'src> { while let Some((argument, mut tail)) = rest.split_first() { if let Some(recipe) = self.get_recipe(argument) { if recipe.parameters.is_empty() { - grouped.push((recipe, &tail[0..0])); + grouped.push((recipe, &[][..])); } else { let argument_range = recipe.argument_range(); let argument_count = cmp::min(tail.len(), recipe.max_arguments()); diff --git a/src/lexer.rs b/src/lexer.rs --- a/src/lexer.rs +++ b/src/lexer.rs @@ -678,7 +678,7 @@ impl<'src> Lexer<'src> { match self.next { Some('\'') => break, None => return Err(self.error(UnterminatedString)), - _ => {}, + Some(_) => {}, } self.advance()?; diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -3,29 +3,28 @@ clippy::comparison_chain, clippy::else_if_without_else, clippy::enum_glob_use, + clippy::expect_used, clippy::filter_map, clippy::if_not_else, clippy::implicit_return, clippy::indexing_slicing, clippy::integer_arithmetic, clippy::let_underscore_must_use, + clippy::map_unwrap_or, clippy::match_same_arms, clippy::missing_docs_in_private_items, clippy::missing_errors_doc, clippy::missing_inline_in_public_items, clippy::needless_pass_by_value, clippy::non_ascii_literal, - clippy::option_expect_used, - clippy::option_map_unwrap_or, - clippy::option_unwrap_used, clippy::panic, clippy::print_stdout, - clippy::result_expect_used, clippy::shadow_unrelated, clippy::string_add, clippy::struct_excessive_bools, clippy::too_many_lines, clippy::unreachable, + clippy::unwrap_used, clippy::use_debug, clippy::wildcard_enum_match_arm, clippy::wildcard_imports diff --git a/src/load_dotenv.rs b/src/load_dotenv.rs --- a/src/load_dotenv.rs +++ b/src/load_dotenv.rs @@ -6,8 +6,14 @@ pub(crate) fn load_dotenv() -> RunResult<'static, BTreeMap<String, String>> { #![allow(deprecated)] match dotenv::dotenv_iter() { Ok(iter) => { - let result: dotenv::Result<BTreeMap<String, String>> = iter.collect(); - result.map_err(|dotenv_error| RuntimeError::Dotenv { dotenv_error }) + let mut dotenv = BTreeMap::new(); + for result in iter { + let (key, value) = result.map_err(|dotenv_error| RuntimeError::Dotenv { dotenv_error })?; + if env::var_os(&key).is_none() { + dotenv.insert(key, value); + } + } + Ok(dotenv) }, Err(dotenv_error) => if dotenv_error.not_found() { diff --git a/src/tree.rs b/src/tree.rs --- a/src/tree.rs +++ b/src/tree.rs @@ -101,8 +101,7 @@ impl<'text> Tree<'text> { /// Like `push`, but modify self in-place pub(crate) fn push_mut(&mut self, tree: impl Into<Tree<'text>>) { - let tree = mem::replace(self, Tree::List(Vec::new())).push(tree.into()); - mem::replace(self, tree); + *self = mem::replace(self, Tree::List(Vec::new())).push(tree.into()); } }
diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -1,4 +1,5 @@ use std::{ + collections::BTreeMap, env, fs, io::Write, path::Path, diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -16,6 +17,9 @@ macro_rules! test { name: $name:ident, justfile: $justfile:expr, $(args: ($($arg:tt)*),)? + $(env: { + $($env_key:literal : $env_value:literal,)* + },)? $(stdin: $stdin:expr,)? $(stdout: $stdout:expr,)? $(stderr: $stderr:expr,)? diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -24,6 +28,11 @@ macro_rules! test { ) => { #[test] fn $name() { + #[allow(unused_mut)] + let mut env = BTreeMap::new(); + + $($(env.insert($env_key.to_string(), $env_value.to_string());)*)? + Test { justfile: $justfile, $(args: &[$($arg)*],)? diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -32,6 +41,7 @@ macro_rules! test { $(stderr: $stderr,)? $(status: $status,)? $(shell: $shell,)? + env, ..Test::default() }.run(); } diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -41,6 +51,7 @@ macro_rules! test { struct Test<'a> { justfile: &'a str, args: &'a [&'a str], + env: BTreeMap<String, String>, stdin: &'a str, stdout: &'a str, stderr: &'a str, diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -53,6 +64,7 @@ impl<'a> Default for Test<'a> { Test { justfile: "", args: &[], + env: BTreeMap::new(), stdin: "", stdout: "", stderr: "", diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -86,6 +98,7 @@ impl<'a> Test<'a> { let mut child = command .args(self.args) + .envs(self.env) .current_dir(tmp.path()) .stdin(Stdio::piped()) .stdout(Stdio::piped()) diff --git a/tests/integration.rs b/tests/integration.rs --- a/tests/integration.rs +++ b/tests/integration.rs @@ -2070,6 +2083,18 @@ echo: stderr: "echo DEFAULT\n", } +test! { + name: dotenv_env_var_override, + justfile: " +# +echo: + echo $DOTENV_KEY + ", + env: {"DOTENV_KEY": "not-the-dotenv-value",}, + stdout: "not-the-dotenv-value\n", + stderr: "echo $DOTENV_KEY\n", +} + test! { name: invalid_escape_sequence_message, justfile: r#"
Bug: env var priority: CLI value should override all others I would expect the order of env var priority would be (highest taken first): 1. CLI 2. `.env` file 3. Defaults from `env_var_or_default(...)` However the `.env` value clobbers the value given on the CLI Reproduction: `justfile`: ``` export SOME_VAL := env_var_or_default("SOME_VAL", "default") @test: echo "SOME_VAL=$SOME_VAL" ``` `.env`: ``` SOME_VAL=from-file ``` `just` gives: ``` SOME_VAL=from-file ``` which is to be expected, but `SOME_VAL=fromcli just` gives: ``` SOME_VAL=from-file ``` instead of: ``` SOME_VAL=fromcli ``` So there's no way to set env vars on the CLI if they are already set in the `.env` file.
Thanks for opening this issue! I agree that this is a bug. I believe that most dotenv implementations don't set environment variables that are already present in the environment. As a workaround for now, what does `just SOME_VAL=fromcli` do? Just supports arguments of the form `NAME=VALUE`, which can override justfile variables. (Note that it's after `just`, not before it, so it's interpreted by Just and not the shell.) Ah! `just SOME_VAL=fromcli` gives: ``` SOME_VAL=fromcli ``` so that is a workaround! I think the way to fix this is to modify `load_dotenv` in `load_dotenv.rs` to remove anything from the dotenv dictionary that is set in the environment. This would be a breaking change, so I'm eager to hear from people who use `.env` files with just whether or not this would be desirable. In my opinion the change is very welcome, even if it's breaking. I think most people would probably expect that this behavior already is implemented in the first place too I'm definitely leaning towards doing this. Is this the norm across other .env implementations? I think it is, but I don't have a ton of experience with any of them. so i did some digging to be certain, and it looks to be the norm for [node's dotenv](https://www.npmjs.com/package/dotenv) they don't override existing variables > We will never modify any environment variables that have already been set. In particular, if there is a variable in your .env file which collides with one that already exists in your environment, then that variable will be skipped. for [go's godotenv](https://pkg.go.dev/github.com/joho/godotenv?tab=doc#Load), their Load function (which is used in the usage example), has the same behavior (they also have an Overload function that does override though) > It's important to note that it WILL NOT OVERRIDE an env variable that already exists - consider the .env file to set dev vars or sensible defaults and the same for [ruby](https://github.com/bkeepers/dotenv#why-is-it-not-overriding-existing-env-variables) > By default, it won't overwrite existing environment variables as dotenv assumes the deployment environment has more knowledge about configuration than the application does. To overwrite existing environment variables you can use Dotenv.overload. so in all these cases if the user supplies the variable themselves, it "wins" over what is in the dotfile
2020-07-17T12:30:27
0.6
cea4a1608199672934f9db1b1b2c11f0feb3fd2f
[ "dotenv_env_var_override" ]
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_parameter", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::extra_whitespace", "analyzer::tests::duplicate_variadic_parameter", "analy...
[ "config::tests::help", "env_var_functions" ]
[]
casey/just
2,497
casey__just-2497
[ "2496" ]
cdf104bf8cce5faa1e8649856cf747d6584d6e36
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -1986,6 +1986,7 @@ change their behavior. | `[no-cd]`<sup>1.9.0</sup> | recipe | Don't change directory before executing recipe. | | `[no-exit-message]`<sup>1.7.0</sup> | recipe | Don't print an error message if recipe fails. | | `[no-quiet]`<sup>1.23.0</sup> | recipe | Override globally quiet recipes and always echo out the recipe. | +| `[openbsd]`<sup>master</sup> | recipe | Enable recipe on OpenBSD. | | `[positional-arguments]`<sup>1.29.0</sup> | recipe | Turn on [positional arguments](#positional-arguments) for this recipe. | | `[private]`<sup>1.10.0</sup> | alias, recipe | Make recipe, alias, or variable private. See [Private Recipes](#private-recipes). | | `[script]`<sup>1.33.0</sup> | recipe | Execute recipe as script. See [script recipes](#script-recipes) for more details. | diff --git a/crates-io-readme.md b/crates-io-readme.md --- a/crates-io-readme.md +++ b/crates-io-readme.md @@ -1,6 +1,7 @@ `just` is a handy way to save and run project-specific commands. -Commands are stored in a file called `justfile` or `Justfile` with syntax inspired by `make`: +Commands are stored in a file called `justfile` or `Justfile` with syntax +inspired by `make`: ```make build: diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -18,6 +18,7 @@ pub(crate) enum Attribute<'src> { NoCd, NoExitMessage, NoQuiet, + Openbsd, PositionalArguments, Private, Script(Option<Interpreter<'src>>), diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -36,6 +37,7 @@ impl AttributeDiscriminant { | Self::NoCd | Self::NoExitMessage | Self::NoQuiet + | Self::Openbsd | Self::PositionalArguments | Self::Private | Self::Unix diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -83,6 +85,7 @@ impl<'src> Attribute<'src> { AttributeDiscriminant::NoCd => Self::NoCd, AttributeDiscriminant::NoExitMessage => Self::NoExitMessage, AttributeDiscriminant::NoQuiet => Self::NoQuiet, + AttributeDiscriminant::Openbsd => Self::Openbsd, AttributeDiscriminant::PositionalArguments => Self::PositionalArguments, AttributeDiscriminant::Private => Self::Private, AttributeDiscriminant::Script => Self::Script({ diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -131,6 +134,7 @@ impl Display for Attribute<'_> { | Self::NoCd | Self::NoExitMessage | Self::NoQuiet + | Self::Openbsd | Self::PositionalArguments | Self::Private | Self::Script(None) diff --git a/src/recipe.rs b/src/recipe.rs --- a/src/recipe.rs +++ b/src/recipe.rs @@ -113,17 +113,19 @@ impl<'src, D> Recipe<'src, D> { } pub(crate) fn enabled(&self) -> bool { - let windows = self.attributes.contains(&Attribute::Windows); let linux = self.attributes.contains(&Attribute::Linux); let macos = self.attributes.contains(&Attribute::Macos); + let openbsd = self.attributes.contains(&Attribute::Openbsd); let unix = self.attributes.contains(&Attribute::Unix); + let windows = self.attributes.contains(&Attribute::Windows); - (!windows && !linux && !macos && !unix) - || (cfg!(target_os = "windows") && windows) + (!windows && !linux && !macos && !openbsd && !unix) || (cfg!(target_os = "linux") && (linux || unix)) || (cfg!(target_os = "macos") && (macos || unix)) - || (cfg!(windows) && windows) + || (cfg!(target_os = "openbsd") && (openbsd || unix)) + || (cfg!(target_os = "windows") && windows) || (cfg!(unix) && unix) + || (cfg!(windows) && windows) } fn print_exit_message(&self) -> bool {
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -48,9 +48,9 @@ Yay, all your tests passed! [`make`'s complexity and idiosyncrasies](#what-are-the-idiosyncrasies-of-make-that-just-avoids). No need for `.PHONY` recipes! -- Linux, MacOS, and Windows are supported with no additional dependencies. - (Although if your system doesn't have an `sh`, you'll need to - [choose a different shell](#shell).) +- Linux, MacOS, Windows, and other reasonable unices are supported with no + additional dependencies. (Although if your system doesn't have an `sh`, + you'll need to [choose a different shell](#shell).) - Errors are specific and informative, and syntax errors are reported along with their source context. diff --git a/crates-io-readme.md b/crates-io-readme.md --- a/crates-io-readme.md +++ b/crates-io-readme.md @@ -15,8 +16,9 @@ test TEST: build ./test --test {{TEST}} ``` -`just` produces detailed error messages and avoids `make`'s idiosyncrasies, so debugging a justfile is easier and less surprising than debugging a makefile. +`just` produces detailed error messages and avoids `make`'s idiosyncrasies, so +debugging a justfile is easier and less surprising than debugging a makefile. -It works on Linux, MacOS, and Windows. +It works on all operating systems supported by Rust. Read more on [GitHub](https://github.com/casey/just). diff --git a/tests/attributes.rs b/tests/attributes.rs --- a/tests/attributes.rs +++ b/tests/attributes.rs @@ -6,9 +6,10 @@ fn all() { .justfile( " [macos] - [windows] [linux] + [openbsd] [unix] + [windows] [no-exit-message] foo: exit 1 diff --git a/tests/attributes.rs b/tests/attributes.rs --- a/tests/attributes.rs +++ b/tests/attributes.rs @@ -48,7 +49,7 @@ fn multiple_attributes_one_line() { Test::new() .justfile( " - [macos, windows,linux] + [macos,windows,linux,openbsd] [no-exit-message] foo: exit 1 diff --git a/tests/attributes.rs b/tests/attributes.rs --- a/tests/attributes.rs +++ b/tests/attributes.rs @@ -64,7 +65,7 @@ fn multiple_attributes_one_line_error_message() { Test::new() .justfile( " - [macos, windows linux] + [macos,windows linux,openbsd] [no-exit-message] foo: exit 1 diff --git a/tests/attributes.rs b/tests/attributes.rs --- a/tests/attributes.rs +++ b/tests/attributes.rs @@ -73,10 +74,10 @@ fn multiple_attributes_one_line_error_message() { .stderr( " error: Expected ']', ':', ',', or '(', but found identifier - ——▶ justfile:1:17 + ——▶ justfile:1:16 │ - 1 │ [macos, windows linux] - │ ^^^^^ + 1 │ [macos,windows linux,openbsd] + │ ^^^^^ ", ) .status(1) diff --git a/tests/attributes.rs b/tests/attributes.rs --- a/tests/attributes.rs +++ b/tests/attributes.rs @@ -88,7 +89,7 @@ fn multiple_attributes_one_line_duplicate_check() { Test::new() .justfile( " - [macos, windows, linux] + [macos, windows, linux, openbsd] [linux] foo: exit 1 diff --git a/tests/os_attributes.rs b/tests/os_attributes.rs --- a/tests/os_attributes.rs +++ b/tests/os_attributes.rs @@ -47,6 +47,10 @@ fn os() { [linux] foo: echo quxx + + [openbsd] + foo: + echo bob ", ) .stdout(if cfg!(target_os = "macos") { diff --git a/tests/os_attributes.rs b/tests/os_attributes.rs --- a/tests/os_attributes.rs +++ b/tests/os_attributes.rs @@ -55,6 +59,8 @@ fn os() { "baz\n" } else if cfg!(target_os = "linux") { "quxx\n" + } else if cfg!(target_os = "openbsd") { + "bob\n" } else { panic!("unexpected os family") }) diff --git a/tests/os_attributes.rs b/tests/os_attributes.rs --- a/tests/os_attributes.rs +++ b/tests/os_attributes.rs @@ -64,6 +70,8 @@ fn os() { "echo baz\n" } else if cfg!(target_os = "linux") { "echo quxx\n" + } else if cfg!(target_os = "openbsd") { + "echo bob\n" } else { panic!("unexpected os family") }) diff --git a/tests/os_attributes.rs b/tests/os_attributes.rs --- a/tests/os_attributes.rs +++ b/tests/os_attributes.rs @@ -75,10 +83,11 @@ fn all() { Test::new() .justfile( " - [macos] - [windows] [linux] + [macos] + [openbsd] [unix] + [windows] foo: echo bar ",
Feature request: add attribute `openbsd` Trying to compile a recent version of [optimism](https://github.com/ethereum-optimism/optimism/blob/op-node/v1.10.0/CONTRIBUTING.md#development-quick-start) on OpenBSD/adJ 7.6beta1 I found that it uses `just`. On OpenBSD/adJ 7.6beta1, it was straightforward to compile `just` and run its tests except for two tests that depend on attributes for the supported platforms (`linux`, `windows`, `macos`): ```sh % uname -a OpenBSD selah.pasosdeJesus.org 7.6 APRENDIENDODEJESUS.MP#1 amd64 % rustc --version rustc 1.81.0 (eeb90cda1 2024-09-04) (built from a source tarball) % git clone git@github.com:casey/just.git/ ... % cargo build ... % cargo install --path . ... % RUST_BACKTRACE=1 LANG=POSIX cargo test ... failures: ---- attributes::multiple_attributes_one_line stdout ---- Bad stderr: Diff < left / right > : <error: Justfile contains no recipes. >exit 1 thread 'attributes::multiple_attributes_one_line' panicked at tests/attributes.rs:59:6: Output mismatch. stack backtrace: 0: rust_begin_unwind 1: core::panicking::panic_fmt 2: integration::test::Test::run at ./tests/test.rs:293:7 3: integration::attributes::multiple_attributes_one_line at ./tests/attributes.rs:48:3 4: integration::attributes::multiple_attributes_one_line::{{closure}} at ./tests/attributes.rs:47:34 5: core::ops::function::FnOnce::call_once at /usr/obj/ports/rust-1.81.0/rustc-1.81.0-src/library/core/src/ops/function.rs:250:5 note: Some details are omitted, run with `RUST_BACKTRACE=full` for a verbose backtrace. ---- os_attributes::os stdout ---- thread 'os_attributes::os' panicked at tests/os_attributes.rs:59:7: unexpected os family stack backtrace: 0: rust_begin_unwind 1: core::panicking::panic_fmt 2: integration::os_attributes::os at ./tests/os_attributes.rs:59:7 3: integration::os_attributes::os::{{closure}} at ./tests/os_attributes.rs:35:8 4: core::ops::function::FnOnce::call_once at /usr/obj/ports/rust-1.81.0/rustc-1.81.0-src/library/core/src/ops/function.rs:250:5 note: Some details are omitted, run with `RUST_BACKTRACE=full` for a verbose backtrace. failures: attributes::multiple_attributes_one_line os_attributes::os test result: FAILED. 850 passed; 2 failed; 7 ignored; 0 measured; 0 filtered out; finished in 20.05s error: test failed, to rerun pass `--test integration` ``` Since Rust supports OpenBSD: <https://doc.rust-lang.org/rustc/platform-support/openbsd.html>, IMHO it makes sense to add initial support for OpenBSD in `just` by adding an attribute `openbsd` to enable recipes on OpenBSD.
2024-12-02T08:24:32
1.37
cdf104bf8cce5faa1e8649856cf747d6584d6e36
[ "attributes::all", "attributes::multiple_attributes_one_line_duplicate_check", "attributes::multiple_attributes_one_line", "os_attributes::all", "os_attributes::os" ]
[ "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_recipe", "analy...
[ "functions::env_var_functions" ]
[]
casey/just
2,462
casey__just-2462
[ "1702" ]
eb6e3741b8b02f53b22c4f684cf58587e2af42f5
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -1851,6 +1851,24 @@ for details. `requirement`, e.g., `">=0.1.0"`, returning `"true"` if so and `"false"` otherwise. +#### Style + +- `style(name)`<sup>master</sup> - Return a named terminal display attribute + escape sequence used by `just`. Unlike terminal display attribute escape + sequence constants, which contain standard colors and styles, `style(name)` + returns an escape sequence used by `just` itself, and can be used to make + recipe output match `just`'s own output. + + Recognized values for `name` are `'command'`, for echoed recipe lines, + `error`, and `warning`. + + For example, to style an error message: + + ```just + scary: + @echo '{{ style("error") }}OH NO{{ NORMAL }}' + ``` + ##### XDG Directories<sup>1.23.0</sup> These functions return paths to user-specific directories for things like diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -98,6 +98,7 @@ pub(crate) fn get(name: &str) -> Option<Function> { "snakecase" => Unary(snakecase), "source_directory" => Nullary(source_directory), "source_file" => Nullary(source_file), + "style" => Unary(style), "titlecase" => Unary(titlecase), "trim" => Unary(trim), "trim_end" => Unary(trim_end), diff --git a/src/function.rs b/src/function.rs --- a/src/function.rs +++ b/src/function.rs @@ -623,6 +624,20 @@ fn source_file(context: Context) -> FunctionResult { }) } +fn style(context: Context, s: &str) -> FunctionResult { + match s { + "command" => Ok( + Color::always() + .command(context.evaluator.context.config.command_color) + .prefix() + .to_string(), + ), + "error" => Ok(Color::always().error().prefix().to_string()), + "warning" => Ok(Color::always().warning().prefix().to_string()), + _ => Err(format!("unknown style: `{s}`")), + } +} + fn titlecase(_context: Context, s: &str) -> FunctionResult { Ok(s.to_title_case()) }
diff --git a/src/color.rs b/src/color.rs --- a/src/color.rs +++ b/src/color.rs @@ -35,7 +35,6 @@ impl Color { Self::default() } - #[cfg(test)] pub(crate) fn always() -> Self { Self { use_color: UseColor::Always, diff --git a/tests/functions.rs b/tests/functions.rs --- a/tests/functions.rs +++ b/tests/functions.rs @@ -1183,3 +1183,78 @@ bar: .args(["foo", "bar"]) .run(); } + +#[test] +fn style_command_default() { + Test::new() + .justfile( + r#" + foo: + @echo '{{ style("command") }}foo{{NORMAL}}' + "#, + ) + .stdout("\x1b[1mfoo\x1b[0m\n") + .run(); +} + +#[test] +fn style_command_non_default() { + Test::new() + .justfile( + r#" + foo: + @echo '{{ style("command") }}foo{{NORMAL}}' + "#, + ) + .args(["--command-color", "red"]) + .stdout("\x1b[1;31mfoo\x1b[0m\n") + .run(); +} + +#[test] +fn style_error() { + Test::new() + .justfile( + r#" + foo: + @echo '{{ style("error") }}foo{{NORMAL}}' + "#, + ) + .stdout("\x1b[1;31mfoo\x1b[0m\n") + .run(); +} + +#[test] +fn style_warning() { + Test::new() + .justfile( + r#" + foo: + @echo '{{ style("warning") }}foo{{NORMAL}}' + "#, + ) + .stdout("\x1b[1;33mfoo\x1b[0m\n") + .run(); +} + +#[test] +fn style_unknown() { + Test::new() + .justfile( + r#" + foo: + @echo '{{ style("hippo") }}foo{{NORMAL}}' + "#, + ) + .stderr( + r#" + error: Call to function `style` failed: unknown style: `hippo` + ——▶ justfile:2:13 + │ + 2 │ @echo '{{ style("hippo") }}foo{{NORMAL}}' + │ ^^^^^ + "#, + ) + .status(EXIT_FAILURE) + .run(); +}
Get color code `just` will use for echoed recipe commands? Sometimes in shebang recipes it is useful to be able to echo some individual commands before running, the same as `just` echoes commands during a non-shebang recipe. Prior to https://github.com/casey/just/pull/1670 , simply using bold color when echoing those commands would do the job. Now, it's no longer known in advance how echoed commands would look. Could a `just` function to help this please be added? Perhaps it could wrap a string in color codes same as `just` would wrap an echoed recipe line? Or, alternatively, perhaps it could take no arguments and return the color code `just` would use for echoing recipe lines? Thanks :slightly_smiling_face:
+1 - I am finding myself using bash scripts more and more in justfiles - I'd like to have a way to echo commands and match just. Currently I have a `just echo` recipe that just assumes that the command is bold: ```just [private] echo $text: #!/usr/bin/env bash bold=$(tput bold) normal=$(tput sgr0) echo "${bold}$text${normal}" ``` We could expose color codes as constants, like we do with `HEX` and friends. So you could do: ```` foo: echo {{GREEN}}whatever{{NORMAL}} ``` Colors could be exposed both by their color name, like `GREEN`, and by their role in just's color formatting, like `ERROR`. Constants cannot be removed after they are added, so the names would need careful thought. > We could expose color codes as constants, (Tag https://github.com/casey/just/issues/1645 ) > Colors could be exposed both by their color name, like `GREEN`, and by their role in just's color formatting, like `ERROR`. Interesting idea. This looks easier to use than a function. Would it be problematic that constants would contain the raw `\x1B` character, which may need to be escaped where it's used? Although if the raw `\x1B` character does works fine in most cases, there is `replace(GREEN, "\u{1B}", '\x1B')` for the times when it'd be an issue, which would also ensure the correct escape sequence for whatever specific context :thinking: I just did a little testing, and they do need to be quoted, but not escaped, since the shell interprets both `[` and `;` as special characters. So you need to do: ```just foo: echo 'foo {{CYAN}}bar{{NORMAL}} baz' ``` There are also other useful escape sequences, like clearing the screen, bold text, and cursor manipulation. Foreground and background can also be manipulated independently. There are a few different kinds of color codes. There are eight standard colors (red, green, black, etc), eight high-intensity colors, 216 colors in a sort of color cube, 24 grayscale colors in steps, and 24-bit colors, which are arbitrary RGB values. Maybe we should have constants for the eight standard colors, as well as things like bold, normal style, and clear screen, and a function which can be used for constructing arbitrarily complex escape sequences. So this: ``` style('bold', 'fg:red', 'bg:#00f') ``` Would produce the escape sequence: ``` \x1b[1;31;48;2;0;0;255m ``` This is bold (1), red foreground color (31), blue background color as 24-bit color (48 is set background color, 2 introduces a 24-bit RGB color code with arguments 0, 0, and 255). In addition to the standard color codes, a few more "semantic" color codes, which match `just` would be useful. These would match common colors used in `just` output. `WARNING` and `ERROR`, which are bold yellow and red respectively. Check out #2461. I added constants for color escape sequences. This is nice, but to actually address this issue, we need to make it possible to get the escape sequences that `just` uses for echoing commands. I think we should do this with the `style()` function, and have it support semantic values, like `style('command')`, `style('warning')`, `style('error')`, which would return the escape sequence that `just` uses. @laniakea64 What do you think about the above for getting semantic escape sequences which may change dynamically based on config and settings, or are semantic in nature, like `warning`? > @laniakea64 What do you think about the above for getting semantic escape sequences which may change dynamically based on config and settings, or are semantic in nature, like `warning`? So to be clear, applying this proposal to the original use case would look like this? - ``` foo: #!/bin/bash echo '{{style('command')}}cat /some/file.txt{{NORMAL}}' >&2 cat /some/file.txt ``` This sounds good to me :+1:
2024-11-12T09:10:18
1.36
eb6e3741b8b02f53b22c4f684cf58587e2af42f5
[ "functions::style_unknown", "functions::style_command_default", "functions::style_command_non_default", "functions::style_error", "functions::style_warning" ]
[ "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_recipe", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_alias", "analyzer::tests::extra_whitespace", "analyzer...
[ "functions::env_var_functions" ]
[]
casey/just
2,353
casey__just-2353
[ "2350" ]
beef0e1a0fd1ac3f5a62212947408d32e1fd9d6f
diff --git a/src/analyzer.rs b/src/analyzer.rs --- a/src/analyzer.rs +++ b/src/analyzer.rs @@ -52,10 +52,10 @@ impl<'src> Analyzer<'src> { -> CompileResult<'src> { if let Some((first_type, original)) = definitions.get(name.lexeme()) { if !(*first_type == second_type && duplicates_allowed) { - let (original, redefinition) = if name.line < original.line { - (name, *original) + let ((first_type, second_type), (original, redefinition)) = if name.line < original.line { + ((second_type, *first_type), (name, *original)) } else { - (*original, name) + ((*first_type, second_type), (*original, name)) }; return Err(redefinition.token.error(Redefinition {
diff --git a/src/analyzer.rs b/src/analyzer.rs --- a/src/analyzer.rs +++ b/src/analyzer.rs @@ -383,7 +383,7 @@ mod tests { line: 2, column: 6, width: 3, - kind: Redefinition { first_type: "alias", second_type: "recipe", name: "foo", first: 0 }, + kind: Redefinition { first_type: "recipe", second_type: "alias", name: "foo", first: 0 }, } analysis_error! { diff --git a/tests/error_messages.rs b/tests/error_messages.rs --- a/tests/error_messages.rs +++ b/tests/error_messages.rs @@ -109,3 +109,21 @@ fn file_paths_not_in_subdir_are_absolute() { ) .run(); } + +#[test] +fn redefinition_errors_properly_swap_types() { + Test::new() + .write("foo.just", "foo:") + .justfile("foo:\n echo foo\n\nmod foo 'foo.just'") + .status(EXIT_FAILURE) + .stderr( + " +error: Recipe `foo` defined on line 1 is redefined as a module on line 4 + ——▶ justfile:4:5 + │ +4 │ mod foo 'foo.just' + │ ^^^ +", + ) + .run(); +}
Bug in redefinition compile error reporting While working on #2344 I noticed that the error reporting for redefinition compile errors was not properly swapping types in the messaging when the redefinition is between two types. Examples: #### Aliases ```just foo: echo foo alias foo := bar bar: echo bar ``` ``` error: Alias `foo` defined on line 1 is redefined as a recipe on line 4 ——▶ just-test:4:7 │ 4 │ alias foo := bar │ ^^^ ``` #### Modules ```just foo: echo foo mod foo "foo.just" ``` ``` error: Module `foo` defined on line 1 is redefined as a recipe on line 4 ——▶ just-test:4:5 │ 4 │ mod foo "foo.just" │ ^^^ ``` Should I pull the relevant code changes from #2344 and put a new PR in addressing the bug?
Thanks for the report! > Should I pull the relevant code changes from https://github.com/casey/just/pull/2344 and put a new PR in addressing the bug? Yes, that would be awesome!
2024-09-07T23:16:08
1.35
90ae09bb8495bf132695a69369ba41f7305ad5e0
[ "analyzer::tests::alias_shadows_recipe_after", "error_messages::redefinition_errors_properly_swap_types" ]
[ "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_recipe", "analyzer::tests::extra_whitespace", "analyzer::tests...
[ "functions::env_var_functions" ]
[]
casey/just
2,329
casey__just-2329
[ "2327" ]
10ebaecadbf81d7995b5b21179c42db92a2a52f7
diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -147,7 +147,7 @@ impl Subcommand { }; match Self::run_inner(config, loader, arguments, overrides, &search) { - Err((err @ Error::UnknownRecipe { .. }, true)) => { + Err((err @ (Error::UnknownRecipe { .. } | Error::UnknownSubmodule { .. }), true)) => { match search.justfile.parent().unwrap().parent() { Some(parent) => { unknown_recipes_errors.get_or_insert(err);
diff --git a/tests/fallback.rs b/tests/fallback.rs --- a/tests/fallback.rs +++ b/tests/fallback.rs @@ -362,3 +362,15 @@ fn stop_fallback_when_fallback_is_false() { .status(EXIT_FAILURE) .run(); } + +#[test] +fn works_with_modules() { + Test::new() + .write("bar/justfile", "set fallback := true") + .write("foo.just", "baz:\n @echo BAZ") + .justfile("mod foo") + .args(["foo::baz"]) + .current_dir("bar") + .stdout("BAZ\n") + .run(); +}
Fallback for submodules @casey Thanks for adding submodule support, it's working great. Currently, when you invoke a submodule recipe within a directory containing a justfile with `set fallback` but no submodules defined (the requested submodule is defined in a parent justfile), the following error is encountered: ```pwsh ❯ just myapp::start error: Justfile does not contain submodule `myapp` ``` It would be nice if the `fallback` setting (or another one, e.g. `fallback-submodules`) would allow `just` to look for a requested submodule in parent justfiles. Sometimes you are in a nested folder structure, and you quickly want to call a recipe defined in the root justfile. Since fallback does not work for submodules, the workaround is to add the submodule to the justfile at your level in the hierarchy (even though the recipe you want to run is unrelated to that justfile), or to cd back to the root and run the recipe from there, or to add a recipe in the root justfile that calls into its own submodule definition. ### Example This is what happens currently: ```pwsh ❯ cd proj/foo/bar ❯ just --version just 1.34.0 ❯ just hello hello ❯ just myapp::start error: Justfile does not contain submodule `myapp` ``` This is what should happen: ```pwsh ❯ cd proj/foo/bar ❯ just hello hello ❯ just myapp::start starting ``` #### proj/justfile ```justfile mod myapp 'justfile-myapp.just' default: just --list ``` #### proj/justfile-myapp.just ```justfile start: @echo "starting" ``` #### proj/foo/bar/justfile ```justfile set fallback # set fallback-submodules hello: @echo "hello" ```
2024-08-29T06:09:05
1.34
10ebaecadbf81d7995b5b21179c42db92a2a52f7
[ "fallback::works_with_modules" ]
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_variable", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_recipe", "analy...
[ "functions::env_var_functions" ]
[]
casey/just
2,151
casey__just-2151
[ "2146" ]
637023e86fda2005a98c022816e15ee6545f86ee
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -1686,6 +1700,7 @@ Recipes may be annotated with attributes that change their behavior. | `[no-cd]`<sup>1.9.0</sup> | Don't change directory before executing recipe. | | `[no-exit-message]`<sup>1.7.0</sup> | Don't print an error message if recipe fails. | | `[no-quiet]`<sup>1.23.0</sup> | Override globally quiet recipes and always echo out the recipe. | +| `[positional-arguments]`<sup>master</sup> | Turn on [positional arguments](#positional-arguments) for this recipe. | | `[private]`<sup>1.10.0</sup> | See [Private Recipes](#private-recipes). | | `[unix]`<sup>1.8.0</sup> | Enable recipe on Unixes. (Includes MacOS). | | `[windows]`<sup>1.8.0</sup> | Enable recipe on Windows. | diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -3362,9 +3377,9 @@ foo argument: touch "$1" ``` -This defeats `just`'s ability to catch typos, for example if you type `$2`, but -works for all possible values of `argument`, including those with double -quotes. +This defeats `just`'s ability to catch typos, for example if you type `$2` +instead of `$1`, but works for all possible values of `argument`, including +those with double quotes. #### Exported Arguments diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -17,6 +17,7 @@ pub(crate) enum Attribute<'src> { NoCd, NoExitMessage, NoQuiet, + PositionalArguments, Private, Unix, Windows, diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -32,6 +33,7 @@ impl AttributeDiscriminant { | Self::NoCd | Self::NoExitMessage | Self::NoQuiet + | Self::PositionalArguments | Self::Private | Self::Unix | Self::Windows => 0..=0, diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -78,6 +80,7 @@ impl<'src> Attribute<'src> { NoCd => Self::NoCd, NoExitMessage => Self::NoExitMessage, NoQuiet => Self::NoQuiet, + PositionalArguments => Self::PositionalArguments, Private => Self::Private, Unix => Self::Unix, Windows => Self::Windows, diff --git a/src/attribute.rs b/src/attribute.rs --- a/src/attribute.rs +++ b/src/attribute.rs @@ -98,6 +101,7 @@ impl<'src> Attribute<'src> { | Self::NoCd | Self::NoExitMessage | Self::NoQuiet + | Self::PositionalArguments | Self::Private | Self::Unix | Self::Windows => None, diff --git a/src/recipe.rs b/src/recipe.rs --- a/src/recipe.rs +++ b/src/recipe.rs @@ -106,6 +106,10 @@ impl<'src, D> Recipe<'src, D> { !self.private && !self.attributes.contains(&Attribute::Private) } + pub(crate) fn takes_positional_arguments(&self, settings: &Settings) -> bool { + settings.positional_arguments || self.attributes.contains(&Attribute::PositionalArguments) + } + pub(crate) fn change_directory(&self) -> bool { !self.attributes.contains(&Attribute::NoCd) } diff --git a/src/recipe.rs b/src/recipe.rs --- a/src/recipe.rs +++ b/src/recipe.rs @@ -263,7 +267,7 @@ impl<'src, D> Recipe<'src, D> { cmd.arg(command); - if context.settings.positional_arguments { + if self.takes_positional_arguments(context.settings) { cmd.arg(self.name.lexeme()); cmd.args(positional); } diff --git a/src/recipe.rs b/src/recipe.rs --- a/src/recipe.rs +++ b/src/recipe.rs @@ -415,7 +419,7 @@ impl<'src, D> Recipe<'src, D> { output_error, })?; - if context.settings.positional_arguments { + if self.takes_positional_arguments(context.settings) { command.args(positional); }
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -996,6 +996,20 @@ $ just test foo "bar baz" - bar baz ``` +Positional arguments may also be turned on on a per-recipe basis with the +`[positional-arguments]` attribute<sup>master</sup>: + +```just +[positional-arguments] +@foo bar: + echo $0 + echo $1 +``` + +Note that PowerShell does not handle positional arguments in the same way as +other shells, so turning on positional arguments will likely break recipes that +use PowerShell. + #### Shell The `shell` setting controls the command used to invoke recipe lines and diff --git a/tests/positional_arguments.rs b/tests/positional_arguments.rs --- a/tests/positional_arguments.rs +++ b/tests/positional_arguments.rs @@ -24,6 +24,31 @@ test! { "#, } +test! { + name: linewise_with_attribute, + justfile: r#" + [positional-arguments] + foo bar baz: + echo $0 + echo $1 + echo $2 + echo "$@" + "#, + args: ("foo", "hello", "goodbye"), + stdout: " + foo + hello + goodbye + hello goodbye + ", + stderr: r#" + echo $0 + echo $1 + echo $2 + echo "$@" + "#, +} + test! { name: variadic_linewise, justfile: r#" diff --git a/tests/positional_arguments.rs b/tests/positional_arguments.rs --- a/tests/positional_arguments.rs +++ b/tests/positional_arguments.rs @@ -51,6 +76,18 @@ test! { stdout: "hello\n", } +test! { + name: shebang_with_attribute, + justfile: " + [positional-arguments] + foo bar: + #!/bin/sh + echo $1 + ", + args: ("foo", "hello"), + stdout: "hello\n", +} + test! { name: variadic_shebang, justfile: r#"
`set positionnal-arguments` append target at the end of the command on Windows but not on Linux Hi ! The title says it all. Here is a justfile to reproduce the issue. ```just # Enable positional args set positional-arguments set windows-shell := ["powershell.exe", "-NoLogo", "-Command"] set shell := ["bash", "-c"] target: echo hey ``` On Linux (my wsl) with just version 1.25.2, I get the following result ``` echo hey hey ``` and on Windows Powershell with just version 1.28.0, I get the following result: ``` echo hey hey target ``` Removing `set positional-arguments` make it behave the same way in both Windows and Linux. Did I understood something wrong ? Thanks for any help !
Positional arguments will include the name of the recipe being run before arguments, since bash expects the name of the executable to be the first argument, with arguments afterwards being the actual positional arguments. So on Windows, I think it's running something like: ``` powershell.exe -NoLogo -Command 'echo hey' target ``` From a bunch of different issues over the years, I think the way powershell handles arguments is super weird, and positional arguments just don't really work with powershell. I don't know what the best workaround is, or if maybe the best answer is just to document that powershell and positional arguments aren't a good match. That's exactly what happens ! The issue here is that a perfectly fine script with bash does not work on powershell when positional arguments are enabled. That is very annoying. On our side, we rely on positionnal arguments so we cannot completely disable it. However, when we use powershell, we do not need positionnal arguments. We need it for other recipes. I can give you a few suggestions: - allow to set settings from the command line - allow to use dynamic conditions on settings - add a setting to remove the file name ($0) from the command sent to shell Do you think something is doable ? Thanks! We could allow `positional-arguments` to be set from the command line. However, I'm a bit hesitant to make settings which recipes depend on to operate to be set on the command line. I.e., if a setting is obligatory for a recipe to work correctly, it should be set in the justfile, so that it's always set. An argument against this is the fact that the `--shell` and related arguments can be set from the command line. So maybe the best option is a `[positional-arguments]` attribute which can be set on recipes that require it, so you can put it just on the recipes that require it. I think that the attribute is a good compromise until powershell is less weird. For your use case, would it be better to have an attribute that turns off positional attributes, or one that turns on positional attributes? I.e., do you have more recipes that use positional attributes, or more recipes that use powershell and so will break if you use positional arguments? It feels more natural to enable it with the addition of the attribute.
2024-06-14T03:27:11
1.28
637023e86fda2005a98c022816e15ee6545f86ee
[ "positional_arguments::linewise_with_attribute", "positional_arguments::shebang_with_attribute", "readme::readme" ]
[ "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_variable", "analyzer::tests::extra_whitespace", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::...
[ "functions::env_var_functions" ]
[]
casey/just
2,128
casey__just-2128
[ "2123" ]
7c30fb4944add30f22dbf74404674234b76af0d8
diff --git a/src/error.rs b/src/error.rs --- a/src/error.rs +++ b/src/error.rs @@ -20,7 +20,7 @@ pub(crate) enum Error<'src> { token: Token<'src>, output_error: OutputError, }, - CacheDirIo { + RuntimeDirIo { io_error: io::Error, path: PathBuf, }, diff --git a/src/error.rs b/src/error.rs --- a/src/error.rs +++ b/src/error.rs @@ -287,9 +287,6 @@ impl<'src> ColorDisplay for Error<'src> { }?, OutputError::Utf8(utf8_error) => write!(f, "Backtick succeeded but stdout was not utf8: {utf8_error}")?, } - CacheDirIo { io_error, path } => { - write!(f, "I/O error in cache dir `{}`: {io_error}", path.display())?; - } ChooserInvoke { shell_binary, shell_arguments, chooser, io_error} => { let chooser = chooser.to_string_lossy(); write!(f, "Chooser `{shell_binary} {shell_arguments} {chooser}` invocation failed: {io_error}")?; diff --git a/src/error.rs b/src/error.rs --- a/src/error.rs +++ b/src/error.rs @@ -407,6 +404,9 @@ impl<'src> ColorDisplay for Error<'src> { write!(f, "Recipe `{recipe}` was not confirmed")?; } RegexCompile { source } => write!(f, "{source}")?, + RuntimeDirIo { io_error, path } => { + write!(f, "I/O error in runtime dir `{}`: {io_error}", path.display())?; + } Search { search_error } => Display::fmt(search_error, f)?, Shebang { recipe, command, argument, io_error} => { if let Some(argument) = argument { diff --git a/src/recipe.rs b/src/recipe.rs --- a/src/recipe.rs +++ b/src/recipe.rs @@ -353,9 +353,9 @@ impl<'src, D> Recipe<'src, D> { let tempdir = match &context.settings.tempdir { Some(tempdir) => tempdir_builder.tempdir_in(context.search.working_directory.join(tempdir)), None => { - if let Some(cache_dir) = dirs::cache_dir() { - let path = cache_dir.join("just"); - fs::create_dir_all(&path).map_err(|io_error| Error::CacheDirIo { + if let Some(runtime_dir) = dirs::runtime_dir() { + let path = runtime_dir.join("just"); + fs::create_dir_all(&path).map_err(|io_error| Error::RuntimeDirIo { io_error, path: path.clone(), })?;
diff --git a/tests/tempdir.rs b/tests/tempdir.rs --- a/tests/tempdir.rs +++ b/tests/tempdir.rs @@ -5,8 +5,8 @@ pub(crate) fn tempdir() -> TempDir { builder.prefix("just-test-tempdir"); - if let Some(cache_dir) = dirs::cache_dir() { - let path = cache_dir.join("just"); + if let Some(runtime_dir) = dirs::runtime_dir() { + let path = runtime_dir.join("just"); fs::create_dir_all(&path).unwrap(); builder.tempdir_in(path) } else {
Shebang recipes no longer works when HOME is read-only As the temporary directories are now created in `~/.cache` on Linux by default. Figured this out while trying to update Just in nixpkgs[^1]. [^1]: https://github.com/NixOS/nixpkgs/pull/316156 IMO, it would be nice to fall back gracefully to the platform's temporary directory when we could not use the cache directory. I am willing to open a PR with something like the following diff, if you find this acceptable. ```diff diff --git a/src/error.rs b/src/error.rs index 06b0edf..1cc6ab6 100644 --- a/src/error.rs +++ b/src/error.rs @@ -20,10 +20,6 @@ pub(crate) enum Error<'src> { token: Token<'src>, output_error: OutputError, }, - CacheDirIo { - io_error: io::Error, - path: PathBuf, - }, ChooserInvoke { shell_binary: String, shell_arguments: String, @@ -287,9 +283,6 @@ impl<'src> ColorDisplay for Error<'src> { }?, OutputError::Utf8(utf8_error) => write!(f, "Backtick succeeded but stdout was not utf8: {utf8_error}")?, } - CacheDirIo { io_error, path } => { - write!(f, "I/O error in cache dir `{}`: {io_error}", path.display())?; - } ChooserInvoke { shell_binary, shell_arguments, chooser, io_error} => { let chooser = chooser.to_string_lossy(); write!(f, "Chooser `{shell_binary} {shell_arguments} {chooser}` invocation failed: {io_error}")?; diff --git a/src/recipe.rs b/src/recipe.rs index 97dc847..c19910f 100644 --- a/src/recipe.rs +++ b/src/recipe.rs @@ -355,11 +355,10 @@ impl<'src, D> Recipe<'src, D> { None => { if let Some(cache_dir) = dirs::cache_dir() { let path = cache_dir.join("just"); - fs::create_dir_all(&path).map_err(|io_error| Error::CacheDirIo { - io_error, - path: path.clone(), - })?; - tempdir_builder.tempdir_in(path) + fs::create_dir_all(&path).map_or_else( + |_| tempdir_builder.tempdir(), + |_| tempdir_builder.tempdir_in(path), + ) } else { tempdir_builder.tempdir() } diff --git a/tests/tempdir.rs b/tests/tempdir.rs index a7d2a5f..867aa3d 100644 --- a/tests/tempdir.rs +++ b/tests/tempdir.rs @@ -7,8 +7,7 @@ pub(crate) fn tempdir() -> TempDir { if let Some(cache_dir) = dirs::cache_dir() { let path = cache_dir.join("just"); - fs::create_dir_all(&path).unwrap(); - builder.tempdir_in(path) + fs::create_dir_all(&path).map_or_else(|_| builder.tempdir(), |_| builder.tempdir_in(path)) } else { builder.tempdir() } ```
Thank you for opening this issue! I changed the location shebang scripts are written to in #2067. The precipitating issue was that someone's `/tmp` directory was mounted `noexec`, so shebang scripts written there weren't runnable. I sort of had a hunch that #2067 would break something else and I didn't know what, but it makes sense that read-only home would be the culprit. Thinking out loud here, I'm wondering which is more common, having a read-only homedir or a noexec tmpdir? #2067 wasn't a particularly principled choice, I got a report of an issue with a particular configuration and made a change which fixed things for that configuration, but there is now a new issue with a different configuration, so it would be perfectly reasonable to just change it back. Also, I'm not entirely sure that the cache dir is the right way to put shebang scripts anyways. Shebang scripts aren't reused once they're written, so there's no benefit to persisting them in the cache dir, vs having them be cleared on restart, as they would be in the tempdir. Maybe the way to fix this is: 1. Allow users to set, via environment variable and flag, where just puts its tempfiles. This would let users with weird configurations put them wherever they want. 2. Probably default to the tmpdir instead of the cache dir, because that might be the better default. 3. Check if /tmp is noexec, and if so, fall back to the cache dir. What about using `dirs.runtime_dir()`? It is only returning something on Linux: > XDG_RUNTIME_DIR: the base directory relative to which user-specific non-essential runtime files and other file objects (such as sockets, named pipes, ...) should be stored. The directory MUST be owned by the user, who MUST be the only one having read and write access to it; its permissions MUST be 0700. See also https://github.com/systemd/systemd/issues/4081#issuecomment-252301790 It'd make sense to not use world writable temporary locations from a security perspective anyway, so maybe cache_dir could be used as a fallback for Windows and Mac. --- <details> <summary>Tested, that using the below diff is sufficient to fix build in nixpkgs</summary> ```diff diff --git a/src/recipe.rs b/src/recipe.rs index 97dc847..230f685 100644 --- a/src/recipe.rs +++ b/src/recipe.rs @@ -353,7 +353,7 @@ impl<'src, D> Recipe<'src, D> { let tempdir = match &context.settings.tempdir { Some(tempdir) => tempdir_builder.tempdir_in(context.search.working_directory.join(tempdir)), None => { - if let Some(cache_dir) = dirs::cache_dir() { + if let Some(cache_dir) = dirs::runtime_dir() { let path = cache_dir.join("just"); fs::create_dir_all(&path).map_err(|io_error| Error::CacheDirIo { io_error, diff --git a/tests/tempdir.rs b/tests/tempdir.rs index a7d2a5f..17a6f63 100644 --- a/tests/tempdir.rs +++ b/tests/tempdir.rs @@ -5,7 +5,7 @@ pub(crate) fn tempdir() -> TempDir { builder.prefix("just-test-tempdir"); - if let Some(cache_dir) = dirs::cache_dir() { + if let Some(cache_dir) = dirs::runtime_dir() { let path = cache_dir.join("just"); fs::create_dir_all(&path).unwrap(); builder.tempdir_in(path) ``` </details>
2024-06-06T03:00:29
1.27
7c30fb4944add30f22dbf74404674234b76af0d8
[ "choose::status_error" ]
[ "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_variable", "analyzer::tests::alias_shadows_recipe_before", "analy...
[ "functions::env_var_functions" ]
[]
casey/just
2,116
casey__just-2116
[ "2093" ]
d38c1add13580d163ba211b4ce6d6d0e9e18b14a
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -812,6 +812,7 @@ foo: | `dotenv-filename` | string | - | Load a `.env` file with a custom name, if present. | | `dotenv-load` | boolean | `false` | Load a `.env` file, if present. | | `dotenv-path` | string | - | Load a `.env` file from a custom path and error if not present. Overrides `dotenv-filename`. | +| `dotenv-required` | boolean | `false` | Error if a `.env` file isn't found. | | `export` | boolean | `false` | Export all variables as environment variables. | | `fallback` | boolean | `false` | Search `justfile` in parent directory if the first recipe on the command line is not found. | | `ignore-comments` | boolean | `false` | Ignore recipe lines beginning with `#`. | diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -877,17 +878,25 @@ bar #### Dotenv Settings -If `dotenv-load`, `dotenv-filename` or `dotenv-path` is set, `just` will load -environment variables from a file. +If any of `dotenv-load`, `dotenv-filename`, `dotenv-path`, or `dotenv-required` +are set, `just` will try to load environment variables from a file. -If `dotenv-path` is set, `just` will look for a file at the given path. It is -an error if a dotenv file is not found at `dotenv-path`, but not an error if a -dotenv file is not found with `dotenv-filename`. +If `dotenv-path` is set, `just` will look for a file at the given path, which +may be absolute, or relative to the working directory. -Otherwise, `just` looks for a file named `.env` by default, unless -`dotenv-filename` set, in which case the value of `dotenv-filename` is used. -This file can be located in the same directory as your `justfile` or in a -parent directory. +If `dotenv-filename` is set `just` will look for a file at the given path, +relative to the working directory and each of its ancestors. + +If `dotenv-filename` is not set, but `dotenv-load` or `dotenv-required` are +set, just will look for a file named `.env`, relative to the working directory +and each of its ancestors. + +`dotenv-filename` and `dotenv-path` and similar, but `dotenv-path` is only +checked relative to the working directory, whereas `dotenv-filename` is checked +relative to the working directory and each of its ancestors. + +It is not an error if an environment file is not found, unless +`dotenv-required` is set. The loaded variables are environment variables, not `just` variables, and so must be accessed using `$VARIABLE_NAME` in recipes and backticks. diff --git a/src/error.rs b/src/error.rs --- a/src/error.rs +++ b/src/error.rs @@ -79,6 +79,7 @@ pub(crate) enum Error<'src> { Dotenv { dotenv_error: dotenvy::Error, }, + DotenvRequired, DumpJson { serde_json_error: serde_json::Error, }, diff --git a/src/error.rs b/src/error.rs --- a/src/error.rs +++ b/src/error.rs @@ -347,6 +348,9 @@ impl<'src> ColorDisplay for Error<'src> { Dotenv { dotenv_error } => { write!(f, "Failed to load environment file: {dotenv_error}")?; } + DotenvRequired => { + write!(f, "Dotenv file not found")?; + } DumpJson { serde_json_error } => { write!(f, "Failed to dump JSON to stdout: {serde_json_error}")?; } diff --git a/src/keyword.rs b/src/keyword.rs --- a/src/keyword.rs +++ b/src/keyword.rs @@ -10,6 +10,7 @@ pub(crate) enum Keyword { DotenvFilename, DotenvLoad, DotenvPath, + DotenvRequired, Else, Export, Fallback, diff --git a/src/load_dotenv.rs b/src/load_dotenv.rs --- a/src/load_dotenv.rs +++ b/src/load_dotenv.rs @@ -1,7 +1,5 @@ use super::*; -const DEFAULT_DOTENV_FILENAME: &str = ".env"; - pub(crate) fn load_dotenv( config: &Config, settings: &Settings, diff --git a/src/load_dotenv.rs b/src/load_dotenv.rs --- a/src/load_dotenv.rs +++ b/src/load_dotenv.rs @@ -17,16 +15,21 @@ pub(crate) fn load_dotenv( .as_ref() .or(settings.dotenv_path.as_ref()); - if !settings.dotenv_load.unwrap_or_default() && dotenv_filename.is_none() && dotenv_path.is_none() + if !settings.dotenv_load + && dotenv_filename.is_none() + && dotenv_path.is_none() + && !settings.dotenv_required { return Ok(BTreeMap::new()); } if let Some(path) = dotenv_path { - return load_from_file(&working_directory.join(path)); + if path.is_file() { + return load_from_file(&working_directory.join(path)); + } } - let filename = dotenv_filename.map_or(DEFAULT_DOTENV_FILENAME, |s| s.as_str()); + let filename = dotenv_filename.map_or(".env", |s| s.as_str()); for directory in working_directory.ancestors() { let path = directory.join(filename); diff --git a/src/load_dotenv.rs b/src/load_dotenv.rs --- a/src/load_dotenv.rs +++ b/src/load_dotenv.rs @@ -35,7 +38,11 @@ pub(crate) fn load_dotenv( } } - Ok(BTreeMap::new()) + if settings.dotenv_required { + Err(Error::DotenvRequired) + } else { + Ok(BTreeMap::new()) + } } fn load_from_file(path: &Path) -> RunResult<'static, BTreeMap<String, String>> { diff --git a/src/node.rs b/src/node.rs --- a/src/node.rs +++ b/src/node.rs @@ -284,6 +284,7 @@ impl<'src> Node<'src> for Set<'src> { Setting::AllowDuplicateRecipes(value) | Setting::AllowDuplicateVariables(value) | Setting::DotenvLoad(value) + | Setting::DotenvRequired(value) | Setting::Export(value) | Setting::Fallback(value) | Setting::PositionalArguments(value) diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -917,6 +917,7 @@ impl<'run, 'src> Parser<'run, 'src> { Some(Setting::AllowDuplicateVariables(self.parse_set_bool()?)) } Keyword::DotenvLoad => Some(Setting::DotenvLoad(self.parse_set_bool()?)), + Keyword::DotenvRequired => Some(Setting::DotenvRequired(self.parse_set_bool()?)), Keyword::Export => Some(Setting::Export(self.parse_set_bool()?)), Keyword::Fallback => Some(Setting::Fallback(self.parse_set_bool()?)), Keyword::IgnoreComments => Some(Setting::IgnoreComments(self.parse_set_bool()?)), diff --git a/src/setting.rs b/src/setting.rs --- a/src/setting.rs +++ b/src/setting.rs @@ -7,6 +7,7 @@ pub(crate) enum Setting<'src> { DotenvFilename(String), DotenvLoad(bool), DotenvPath(String), + DotenvRequired(bool), Export(bool), Fallback(bool), IgnoreComments(bool), diff --git a/src/setting.rs b/src/setting.rs --- a/src/setting.rs +++ b/src/setting.rs @@ -24,6 +25,7 @@ impl<'src> Display for Setting<'src> { Self::AllowDuplicateRecipes(value) | Self::AllowDuplicateVariables(value) | Self::DotenvLoad(value) + | Self::DotenvRequired(value) | Self::Export(value) | Self::Fallback(value) | Self::IgnoreComments(value) diff --git a/src/settings.rs b/src/settings.rs --- a/src/settings.rs +++ b/src/settings.rs @@ -10,8 +10,9 @@ pub(crate) struct Settings<'src> { pub(crate) allow_duplicate_recipes: bool, pub(crate) allow_duplicate_variables: bool, pub(crate) dotenv_filename: Option<String>, - pub(crate) dotenv_load: Option<bool>, + pub(crate) dotenv_load: bool, pub(crate) dotenv_path: Option<PathBuf>, + pub(crate) dotenv_required: bool, pub(crate) export: bool, pub(crate) fallback: bool, pub(crate) ignore_comments: bool, diff --git a/src/settings.rs b/src/settings.rs --- a/src/settings.rs +++ b/src/settings.rs @@ -39,11 +40,14 @@ impl<'src> Settings<'src> { settings.dotenv_filename = Some(filename); } Setting::DotenvLoad(dotenv_load) => { - settings.dotenv_load = Some(dotenv_load); + settings.dotenv_load = dotenv_load; } Setting::DotenvPath(path) => { settings.dotenv_path = Some(PathBuf::from(path)); } + Setting::DotenvRequired(dotenv_required) => { + settings.dotenv_required = dotenv_required; + } Setting::Export(export) => { settings.export = export; }
diff --git a/tests/command.rs b/tests/command.rs --- a/tests/command.rs +++ b/tests/command.rs @@ -47,16 +47,21 @@ test! { status: 2, } -test! { - name: env_is_loaded, - justfile: " - set dotenv-load - - x: - echo XYZ - ", - args: ("--command", "sh", "-c", "printf $DOTENV_KEY"), - stdout: "dotenv-value", +#[test] +fn env_is_loaded() { + Test::new() + .justfile( + " + set dotenv-load + + x: + echo XYZ + ", + ) + .args(["--command", "sh", "-c", "printf $DOTENV_KEY"]) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value") + .run(); } test! { diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -12,40 +12,54 @@ fn dotenv() { .run(); } -test! { - name: set_false, - justfile: r#" - set dotenv-load := false - - foo: - if [ -n "${DOTENV_KEY+1}" ]; then echo defined; else echo undefined; fi - "#, - stdout: "undefined\n", - stderr: "if [ -n \"${DOTENV_KEY+1}\" ]; then echo defined; else echo undefined; fi\n", +#[test] +fn set_false() { + Test::new() + .justfile( + r#" + set dotenv-load := false + + @foo: + if [ -n "${DOTENV_KEY+1}" ]; then echo defined; else echo undefined; fi + "#, + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("undefined\n") + .run(); } -test! { - name: set_implicit, - justfile: r#" - set dotenv-load +#[test] +fn set_implicit() { + Test::new() + .justfile( + " + set dotenv-load - foo: - echo $DOTENV_KEY - "#, - stdout: "dotenv-value\n", - stderr: "echo $DOTENV_KEY\n", + foo: + echo $DOTENV_KEY + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\n") + .stderr("echo $DOTENV_KEY\n") + .run(); } -test! { - name: set_true, - justfile: r#" - set dotenv-load := true +#[test] +fn set_true() { + Test::new() + .justfile( + " + set dotenv-load := true - foo: - echo $DOTENV_KEY - "#, - stdout: "dotenv-value\n", - stderr: "echo $DOTENV_KEY\n", + foo: + echo $DOTENV_KEY + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\n") + .stderr("echo $DOTENV_KEY\n") + .run(); } #[test] diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -53,32 +67,28 @@ fn no_warning() { Test::new() .justfile( " - foo: - echo ${DOTENV_KEY:-unset} - ", + foo: + echo ${DOTENV_KEY:-unset} + ", ) + .write(".env", "DOTENV_KEY=dotenv-value") .stdout("unset\n") .stderr("echo ${DOTENV_KEY:-unset}\n") .run(); } #[test] -fn path_not_found() { +fn dotenv_required() { Test::new() .justfile( " - foo: - echo $JUST_TEST_VARIABLE - ", + set dotenv-required + + foo: + ", ) - .args(["--dotenv-path", ".env.prod"]) - .stderr(if cfg!(windows) { - "error: Failed to load environment file: The system cannot find the file specified. (os \ - error 2)\n" - } else { - "error: Failed to load environment file: No such file or directory (os error 2)\n" - }) - .status(EXIT_FAILURE) + .stderr("error: Dotenv file not found\n") + .status(1) .run(); } diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -87,9 +97,9 @@ fn path_resolves() { Test::new() .justfile( " - foo: - @echo $JUST_TEST_VARIABLE - ", + foo: + @echo $JUST_TEST_VARIABLE + ", ) .tree(tree! { subdir: { diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -107,9 +117,9 @@ fn filename_resolves() { Test::new() .justfile( " - foo: - @echo $JUST_TEST_VARIABLE - ", + foo: + @echo $JUST_TEST_VARIABLE + ", ) .tree(tree! { ".env.special": "JUST_TEST_VARIABLE=bar" diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -145,11 +155,11 @@ fn path_flag_overwrites_no_load() { Test::new() .justfile( " - set dotenv-load := false + set dotenv-load := false - foo: - @echo $JUST_TEST_VARIABLE - ", + foo: + @echo $JUST_TEST_VARIABLE + ", ) .tree(tree! { subdir: { diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -227,12 +237,12 @@ fn program_argument_has_priority_for_dotenv_filename() { fn program_argument_has_priority_for_dotenv_path() { Test::new() .justfile( - r#" - set dotenv-path := "subdir/.env" + " + set dotenv-path := 'subdir/.env' foo: @echo $JUST_TEST_VARIABLE - "#, + ", ) .tree(tree! { subdir: { diff --git a/tests/dotenv.rs b/tests/dotenv.rs --- a/tests/dotenv.rs +++ b/tests/dotenv.rs @@ -257,8 +267,111 @@ fn dotenv_path_is_relative_to_working_directory() { @echo $DOTENV_KEY ", ) + .write(".env", "DOTENV_KEY=dotenv-value") .tree(tree! { subdir: { } }) .current_dir("subdir") .stdout("dotenv-value\n") .run(); } + +#[test] +fn dotenv_variable_in_recipe() { + Test::new() + .justfile( + " + set dotenv-load + + echo: + echo $DOTENV_KEY + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\n") + .stderr("echo $DOTENV_KEY\n") + .run(); +} + +#[test] +fn dotenv_variable_in_backtick() { + Test::new() + .justfile( + " + set dotenv-load + X:=`echo $DOTENV_KEY` + echo: + echo {{X}} + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\n") + .stderr("echo dotenv-value\n") + .run(); +} + +#[test] +fn dotenv_variable_in_function_in_recipe() { + Test::new() + .justfile( + " + set dotenv-load + echo: + echo {{env_var_or_default('DOTENV_KEY', 'foo')}} + echo {{env_var('DOTENV_KEY')}} + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\ndotenv-value\n") + .stderr("echo dotenv-value\necho dotenv-value\n") + .run(); +} + +#[test] +fn dotenv_variable_in_function_in_backtick() { + Test::new() + .justfile( + " + set dotenv-load + X:=env_var_or_default('DOTENV_KEY', 'foo') + Y:=env_var('DOTENV_KEY') + echo: + echo {{X}} + echo {{Y}} +", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .stdout("dotenv-value\ndotenv-value\n") + .stderr("echo dotenv-value\necho dotenv-value\n") + .run(); +} + +#[test] +fn no_dotenv() { + Test::new() + .justfile( + " + X:=env_var_or_default('DOTENV_KEY', 'DEFAULT') + echo: + echo {{X}} + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .arg("--no-dotenv") + .stdout("DEFAULT\n") + .stderr("echo DEFAULT\n") + .run(); +} +#[test] +fn dotenv_env_var_override() { + Test::new() + .justfile( + " + echo: + echo $DOTENV_KEY + ", + ) + .write(".env", "DOTENV_KEY=dotenv-value") + .env("DOTENV_KEY", "not-the-dotenv-value") + .stdout("not-the-dotenv-value\n") + .stderr("echo $DOTENV_KEY\n") + .run(); +} diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -46,8 +46,9 @@ fn alias() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "positional_arguments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -84,8 +85,9 @@ fn assignment() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -136,8 +138,9 @@ fn body() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -200,8 +203,9 @@ fn dependencies() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -302,8 +306,9 @@ fn dependency_argument() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -366,8 +371,9 @@ fn duplicate_recipes() { "allow_duplicate_recipes": true, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -408,8 +414,9 @@ fn duplicate_variables() { "allow_duplicate_recipes": false, "allow_duplicate_variables": true, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -453,8 +460,9 @@ fn doc_comment() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -484,8 +492,9 @@ fn empty_justfile() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -636,8 +645,9 @@ fn parameters() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -721,8 +731,9 @@ fn priors() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -766,8 +777,9 @@ fn private() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -811,8 +823,9 @@ fn quiet() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -870,6 +883,7 @@ fn settings() { "dotenv_filename": "filename", "dotenv_load": true, "dotenv_path": "path", + "dotenv_required": false, "export": true, "fallback": true, "ignore_comments": true, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -919,8 +933,9 @@ fn shebang() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -964,8 +979,9 @@ fn simple() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "ignore_comments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -1012,8 +1028,9 @@ fn attribute() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "positional_arguments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -1073,8 +1090,9 @@ fn module() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "positional_arguments": false, diff --git a/tests/json.rs b/tests/json.rs --- a/tests/json.rs +++ b/tests/json.rs @@ -1093,8 +1111,9 @@ fn module() { "allow_duplicate_recipes": false, "allow_duplicate_variables": false, "dotenv_filename": null, - "dotenv_load": null, + "dotenv_load": false, "dotenv_path": null, + "dotenv_required": false, "export": false, "fallback": false, "positional_arguments": false, diff --git a/tests/misc.rs b/tests/misc.rs --- a/tests/misc.rs +++ b/tests/misc.rs @@ -1589,84 +1589,6 @@ echo: stderr: "echo 1\n", } -test! { - name: dotenv_variable_in_recipe, - justfile: " -# -set dotenv-load - -echo: - echo $DOTENV_KEY - ", - stdout: "dotenv-value\n", - stderr: "echo $DOTENV_KEY\n", -} - -test! { - name: dotenv_variable_in_backtick, - justfile: " -# -set dotenv-load -X:=`echo $DOTENV_KEY` -echo: - echo {{X}} - ", - stdout: "dotenv-value\n", - stderr: "echo dotenv-value\n", -} -test! { - name: dotenv_variable_in_function_in_recipe, - justfile: " -# -set dotenv-load -echo: - echo {{env_var_or_default('DOTENV_KEY', 'foo')}} - echo {{env_var('DOTENV_KEY')}} - ", - stdout: "dotenv-value\ndotenv-value\n", - stderr: "echo dotenv-value\necho dotenv-value\n", -} - -test! { - name: dotenv_variable_in_function_in_backtick, - justfile: " -# -set dotenv-load -X:=env_var_or_default('DOTENV_KEY', 'foo') -Y:=env_var('DOTENV_KEY') -echo: - echo {{X}} - echo {{Y}} - ", - stdout: "dotenv-value\ndotenv-value\n", - stderr: "echo dotenv-value\necho dotenv-value\n", -} - -test! { - name: no_dotenv, - justfile: " -# -X:=env_var_or_default('DOTENV_KEY', 'DEFAULT') -echo: - echo {{X}} - ", - args: ("--no-dotenv"), - stdout: "DEFAULT\n", - stderr: "echo DEFAULT\n", -} - -test! { - name: dotenv_env_var_override, - justfile: " -# -echo: - echo $DOTENV_KEY - ", - env: {"DOTENV_KEY": "not-the-dotenv-value",}, - stdout: "not-the-dotenv-value\n", - stderr: "echo $DOTENV_KEY\n", -} - test! { name: invalid_escape_sequence_message, justfile: r#" diff --git a/tests/modules.rs b/tests/modules.rs --- a/tests/modules.rs +++ b/tests/modules.rs @@ -515,7 +515,6 @@ fn missing_optional_modules_do_not_conflict() { #[test] fn root_dotenv_is_available_to_submodules() { Test::new() - .write("foo.just", "foo:\n @echo $DOTENV_KEY") .justfile( " set dotenv-load diff --git a/tests/modules.rs b/tests/modules.rs --- a/tests/modules.rs +++ b/tests/modules.rs @@ -523,10 +522,10 @@ fn root_dotenv_is_available_to_submodules() { mod foo ", ) + .write("foo.just", "foo:\n @echo $DOTENV_KEY") + .write(".env", "DOTENV_KEY=dotenv-value") .test_round_trip(false) - .arg("--unstable") - .arg("foo") - .arg("foo") + .args(["--unstable", "foo", "foo"]) .stdout("dotenv-value\n") .run(); } diff --git a/tests/modules.rs b/tests/modules.rs --- a/tests/modules.rs +++ b/tests/modules.rs @@ -534,10 +533,6 @@ fn root_dotenv_is_available_to_submodules() { #[test] fn dotenv_settings_in_submodule_are_ignored() { Test::new() - .write( - "foo.just", - "set dotenv-load := false\nfoo:\n @echo $DOTENV_KEY", - ) .justfile( " set dotenv-load diff --git a/tests/modules.rs b/tests/modules.rs --- a/tests/modules.rs +++ b/tests/modules.rs @@ -545,10 +540,13 @@ fn dotenv_settings_in_submodule_are_ignored() { mod foo ", ) + .write( + "foo.just", + "set dotenv-load := false\nfoo:\n @echo $DOTENV_KEY", + ) + .write(".env", "DOTENV_KEY=dotenv-value") .test_round_trip(false) - .arg("--unstable") - .arg("foo") - .arg("foo") + .args(["--unstable", "foo", "foo"]) .stdout("dotenv-value\n") .run(); } diff --git a/tests/shell_expansion.rs b/tests/shell_expansion.rs --- a/tests/shell_expansion.rs +++ b/tests/shell_expansion.rs @@ -82,6 +82,7 @@ fn shell_expanded_strings_can_be_used_in_settings() { echo $DOTENV_KEY ", ) + .write(".env", "DOTENV_KEY=dotenv-value") .env("JUST_TEST_VARIABLE", ".env") .stdout("dotenv-value\n") .run(); diff --git a/tests/test.rs b/tests/test.rs --- a/tests/test.rs +++ b/tests/test.rs @@ -201,9 +201,8 @@ impl Test { } else { self.stdout.clone() }; - let stderr = unindent(&self.stderr); - fs::write(self.tempdir.path().join(".env"), "DOTENV_KEY=dotenv-value").unwrap(); + let stderr = unindent(&self.stderr); let mut command = Command::new(executable_path("just"));
`dotenv-path` causes error if the specified envfile does not exist **Steps to reproduce:** 1. Create this `justfile` in an empty directory: ```just set dotenv-load := true set dotenv-path := ".env" sayhi: @echo "Hello world" ``` 2. execute `just sayhi` **Expected** > Hello World **Actual** ``` error: Failed to load environment file: No such file or directory (os error 2) ``` **Additional context** * just version 1.27.0 (cargo) * setting `dotenv-load` to `false` still reproduces the issue * [Documentation for `dotenv-path`](https://github.com/casey/just#table-of-settings) states (my emphasis): > Load a .env file from a custom path, **if present**."
I just tested this, and it looks like `dotenv-path` has always failed if the environment file isn't present. Whether or not this is the best behavior is up for debate. On the one hand, if you have a truly optional `.env` file, then it would be nice to run if it isn't found. On the other hand, if the `.env` file is required for recipes to run correctly, you would want an error. For now I'm going to update the docs, and leave this issue open to collect feedback. In my case, the env file is required for most actions. However, I have some additional requirements (specifically, docker compose secrets) which makes setting up the env file manually a bit of a pain. For this reason, I have a special task to help the user generate their env file, which is placed in a special location. If `dotenv-path` did not care about the presence of the env file, as originally documented, my users could start using `just` from a fresh clone. Now though, there is a manual step required first in creating an empty env file. Perhaps you're right that `dotenv-path` and `dotenv-filename` should not define the behaviour for the case of a missing env file. However, I really think there should be an option to support cases where `just` itself is used to generate the envfile (using `just` to interact with [`vercel env`](https://vercel.com/docs/cli/env) is yet another example of this)! Perhaps something like `dotenv-optional` or `dotenv-fallback: ignore/fail` Also, forcing the presence of an env file seems a bit weird in general. The env file is supposed to offer one *alternative* way of defining environment variables. It does not necessarily have to be *the* way. Consider a CI environment such as GitHub actions where environment variables are pre-defined. If `just` throws an error simply because the env file does not exist, I'm now forced to add an extra step of setting up an env file, despite my environment having been correctly configured from the start. I think maybe the best option is to add a new setting, `set dotenv-required`, or something like that, which, when set, makes not finding a `.env` file an error, and at the same time time, making `set dotenv-path` not cause an error if an environment file is not found. This would make `set dotenv-path` and `set dotenv-file` work the same way, with respect to a dotenv file not being found, and allow users to opt-in to getting an error if a `.env` is not found.
2024-05-31T07:04:23
1.27
7c30fb4944add30f22dbf74404674234b76af0d8
[ "dotenv::dotenv_required", "json::body", "json::attribute", "json::alias", "json::dependencies", "json::assignment", "json::dependency_argument", "json::module", "json::doc_comment", "json::empty_justfile", "json::duplicate_recipes", "json::quiet", "json::priors", "json::parameters", "js...
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_parameter", "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::alias_shadows_recipe_before", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_variadic_parameter", "analy...
[ "functions::env_var_functions" ]
[]
casey/just
2,112
casey__just-2112
[ "2107" ]
de1256f1bddd47322b3e37e6ca8c3a4ac34b33d0
diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -90,7 +90,7 @@ impl Subcommand { Dump => Self::dump(config, ast, justfile)?, Format => Self::format(config, &search, src, ast)?, Groups => Self::groups(config, justfile), - List { path } => Self::list_module(config, justfile, path)?, + List { path } => Self::list(config, justfile, path)?, Show { path } => Self::show(config, justfile, path)?, Summary => Self::summary(config, justfile), Variables => Self::variables(justfile), diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -479,11 +479,7 @@ impl Subcommand { Ok(()) } - fn list_module( - config: &Config, - mut module: &Justfile, - path: &ModulePath, - ) -> Result<(), Error<'static>> { + fn list(config: &Config, mut module: &Justfile, path: &ModulePath) -> Result<(), Error<'static>> { for name in &path.path { module = module .modules diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -491,21 +487,11 @@ impl Subcommand { .ok_or_else(|| Error::UnknownSubmodule { path: path.clone() })?; } - Self::list(config, 0, module); - - Ok(()) - } - - fn list(config: &Config, level: usize, justfile: &Justfile) { let aliases = if config.no_aliases { BTreeMap::new() } else { let mut aliases = BTreeMap::<&str, Vec<&str>>::new(); - for alias in justfile - .aliases - .values() - .filter(|alias| !alias.is_private()) - { + for alias in module.aliases.values().filter(|alias| !alias.is_private()) { aliases .entry(alias.target.name.lexeme()) .or_default() diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -517,7 +503,7 @@ impl Subcommand { let signature_widths = { let mut signature_widths: BTreeMap<&str, usize> = BTreeMap::new(); - for (name, recipe) in &justfile.recipes { + for (name, recipe) in &module.recipes { if !recipe.is_public() { continue; } diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -545,13 +531,11 @@ impl Subcommand { .max() .unwrap_or(0); - if level == 0 { - print!("{}", config.list_heading); - } + print!("{}", config.list_heading); let groups = { let mut groups = BTreeMap::<Option<String>, Vec<&Recipe>>::new(); - for recipe in justfile.public_recipes(config) { + for recipe in module.public_recipes(config) { let recipe_groups = recipe.groups(); if recipe_groups.is_empty() { groups.entry(None).or_default().push(recipe); diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -572,7 +556,7 @@ impl Subcommand { let no_groups = groups.contains_key(&None) && groups.len() == 1; if !no_groups { - print!("{}", config.list_prefix.repeat(level + 1)); + print!("{}", config.list_prefix); if let Some(group_name) = group { println!("[{group_name}]"); } else { diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -596,7 +580,7 @@ impl Subcommand { for line in doc.lines() { println!( "{}{} {}", - config.list_prefix.repeat(level + 1), + config.list_prefix, config.color.stdout().doc().paint("#"), config.color.stdout().doc().paint(line), ); diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -606,7 +590,7 @@ impl Subcommand { print!( "{}{}", - config.list_prefix.repeat(level + 1), + config.list_prefix, RecipeSignature { name, recipe }.color_display(config.color.stdout()) ); diff --git a/src/subcommand.rs b/src/subcommand.rs --- a/src/subcommand.rs +++ b/src/subcommand.rs @@ -626,14 +610,11 @@ impl Subcommand { } } - for (i, module) in justfile.modules(config).into_iter().enumerate() { - if i + groups.len() > 0 { - println!(); - } - - println!("{}{}:", config.list_prefix.repeat(level + 1), module.name()); - Self::list(config, level + 1, module); + for submodule in module.modules(config) { + println!("{}{} ...", config.list_prefix, submodule.name(),); } + + Ok(()) } fn show<'src>(
diff --git a/tests/groups.rs b/tests/groups.rs --- a/tests/groups.rs +++ b/tests/groups.rs @@ -144,31 +144,3 @@ fn list_groups_with_custom_prefix() { ) .run(); } - -#[test] -fn list_with_groups_in_modules() { - Test::new() - .justfile( - " - [group('FOO')] - foo: - - mod bar - ", - ) - .write("bar.just", "[group('BAZ')]\nbaz:") - .test_round_trip(false) - .args(["--unstable", "--list"]) - .stdout( - " - Available recipes: - [FOO] - foo - - bar: - [BAZ] - baz - ", - ) - .run(); -} diff --git a/tests/list.rs b/tests/list.rs --- a/tests/list.rs +++ b/tests/list.rs @@ -1,54 +1,5 @@ use super::*; -#[test] -fn list_displays_recipes_in_submodules() { - Test::new() - .write("foo.just", "bar:\n @echo FOO") - .justfile( - " - mod foo - ", - ) - .test_round_trip(false) - .arg("--unstable") - .arg("--list") - .stdout( - " - Available recipes: - foo: - bar - ", - ) - .run(); -} - -#[test] -fn modules_are_space_separated_in_output() { - Test::new() - .write("foo.just", "foo:") - .write("bar.just", "bar:") - .justfile( - " - mod foo - - mod bar - ", - ) - .test_round_trip(false) - .args(["--unstable", "--list"]) - .stdout( - " - Available recipes: - bar: - bar - - foo: - foo - ", - ) - .run(); -} - #[test] fn modules_unsorted() { Test::new() diff --git a/tests/list.rs b/tests/list.rs --- a/tests/list.rs +++ b/tests/list.rs @@ -66,72 +17,13 @@ fn modules_unsorted() { .stdout( " Available recipes: - foo: - foo - - bar: - bar - ", - ) - .run(); -} - -#[test] -fn module_recipe_list_alignment_ignores_private_recipes() { - Test::new() - .write( - "foo.just", - " -# foos -foo: - @echo FOO - -[private] -barbarbar: - @echo BAR - -@_bazbazbaz: - @echo BAZ - ", - ) - .justfile("mod foo") - .test_round_trip(false) - .arg("--unstable") - .arg("--list") - .stdout( - " - Available recipes: - foo: - foo # foos + foo ... + bar ... ", ) .run(); } -#[test] -fn nested_modules_are_properly_indented() { - Test::new() - .write("foo.just", "mod bar") - .write("bar.just", "baz:\n @echo FOO") - .justfile( - " - mod foo - ", - ) - .test_round_trip(false) - .arg("--unstable") - .arg("--list") - .stdout( - " - Available recipes: - foo: - bar: - baz - ", - ) - .run(); -} - #[test] fn unsorted_list_order() { Test::new()
Improve `--list` output with modules Collecting feedback from #929. To sum up: 1. [x] `--list` output with modules is too verbose and nested 2. [x] It should be possible to do `--list SUBMODULE` and only see `--list` output for that submodule, and iteratively descend into submodules with For 1, I think I agree. The current output is very verbose and gets unwieldy quickly if there are a lot of submodules, and gets quite nested if the module hierarchy is deep. I think that, by default, `--list` should not print out the recipes in submodules, but only the submodules themselves. So something like: ``` $ just --list Available recipes: foo bar Available submodules: baz bob ``` If there are no recipes at the top level, it would just be: ``` $ just --list Available submodules: baz bob ``` I'm undecided about whether to completely remove the nested output, or require `--verbose` or `--nested` or whatever to get it. For 2, I'm working on a patch that allows `--list` to take a module path, which are space- or `::`- separated module names, and will descend into that module, and only print the `--list` output for that module. @gl-yziquel and @valscion what do you think?
I just merged #2108, which allows passing a submodule path to `--list`, so the second item should be good. My use case would be commands and subcommands. So, ideally, I'd need the module to be on the same level as the recipes. As to the nesting layout, I'd advise gating behind unstable some directive that controls what is being shown, and iteratively collect feedback as to the layout needs. I'd started with a [folded] directive on the mod declaration to keep the same behaviour as today but add that directive to get the submodule recipes folded. IMO, [folded] should be the default and [unfolded] the explicitly requested functionality, but, well... Again, in the ideal world, the default command of the module should/could be listed as just a recipe in the toplevel, when [folded]. Another alternative to this: ``` $ just --list Available recipes: foo bar Available submodules: baz bob ``` Would be listing submodules along with recipes: ``` $ just --list Available recipes: foo bar baz bob ``` This could be done with some kind of visual indication: ``` $ just --list Available recipes: foo bar baz… bob… ``` @casey The visual indication like that would be fine. The only point I'd like, which is perhaps too far fetched, is the ability to do something like: ```console $ just --list --unsorted Available recipes: foo bar… baz bob… ``` But I'd really advise for ASCII '...' instead of non-ASCII '…'. I do work, more than occasionally, in a raw tty unix terminal provided by the ubuntu OS when no graphics are available. And I noticed that the fancy utf-8 characters do not pass well. In general terminal tools, so be it. But just should be able to run in such a context, with really low-level stuff. IMO. I'm in favor of not displaying the submodules under their own heading as I do think for the one who calls `just`, the submodules should be an implementation detail. That is, submodules should be the way the Justfile is organized but should not force the caller of `just` to know where each recipe came from. I also don't feel strongly about this as either way as long as the submodule recipes are not shown in the initial `just --list` output, I'm happy. I do concur with @gl-yziquel that using plain ASCII would be better than the ellipsis character `…`. So like this: ``` $ just --list Available recipes: foo bar baz ... bob ... ``` The space between `baz` and `...` is on purpose as it would signal one can call `just baz something` — that is, the space is meaningful in there.
2024-05-30T10:12:22
1.27
7c30fb4944add30f22dbf74404674234b76af0d8
[ "list::modules_unsorted" ]
[ "analyzer::tests::alias_shadows_recipe_after", "analyzer::tests::duplicate_variable", "analyzer::tests::duplicate_alias", "analyzer::tests::alias_shadows_recipe_before", "assignment_resolver::tests::circular_variable_dependency", "assignment_resolver::tests::self_variable_dependency", "analyzer::tests::...
[ "functions::env_var_functions" ]
[]
casey/just
909
casey__just-909
[ "908" ]
6cf3d204e60c972d607dd8b5f6b1eccdf65613c1
diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -5,7 +5,6 @@ use CompilationErrorKind::*; pub(crate) struct AssignmentResolver<'src: 'run, 'run> { assignments: &'run Table<'src, Assignment<'src>>, stack: Vec<&'src str>, - seen: BTreeSet<&'src str>, evaluated: BTreeSet<&'src str>, } diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -14,9 +13,8 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> { assignments: &Table<'src, Assignment<'src>>, ) -> CompilationResult<'src, ()> { let mut resolver = AssignmentResolver { - stack: empty(), - seen: empty(), - evaluated: empty(), + stack: Vec::new(), + evaluated: BTreeSet::new(), assignments, }; diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -32,7 +30,6 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> { return Ok(()); } - self.seen.insert(name); self.stack.push(name); if let Some(assignment) = self.assignments.get(name) { diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -53,6 +50,9 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> { token, }); } + + self.stack.pop(); + Ok(()) } diff --git a/src/assignment_resolver.rs b/src/assignment_resolver.rs --- a/src/assignment_resolver.rs +++ b/src/assignment_resolver.rs @@ -62,7 +62,7 @@ impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> { let variable = name.lexeme(); if self.evaluated.contains(variable) { Ok(()) - } else if self.seen.contains(variable) { + } else if self.stack.contains(&variable) { let token = self.assignments[variable].name.token(); self.stack.push(variable); Err(token.error(CircularVariableDependency { diff --git a/src/color.rs b/src/color.rs --- a/src/color.rs +++ b/src/color.rs @@ -41,21 +41,21 @@ impl Color { pub(crate) fn auto() -> Self { Self { use_color: UseColor::Auto, - ..default() + ..Color::default() } } pub(crate) fn always() -> Self { Self { use_color: UseColor::Always, - ..default() + ..Color::default() } } pub(crate) fn never() -> Self { Self { use_color: UseColor::Never, - ..default() + ..Color::default() } } diff --git a/src/common.rs b/src/common.rs --- a/src/common.rs +++ b/src/common.rs @@ -33,9 +33,7 @@ pub(crate) use unicode_width::{UnicodeWidthChar, UnicodeWidthStr}; pub(crate) use crate::{config_error, setting}; // functions -pub(crate) use crate::{ - default::default, empty::empty, load_dotenv::load_dotenv, output::output, unindent::unindent, -}; +pub(crate) use crate::{load_dotenv::load_dotenv, output::output, unindent::unindent}; // traits pub(crate) use crate::{ diff --git a/src/default.rs /dev/null --- a/src/default.rs +++ /dev/null @@ -1,3 +0,0 @@ -pub(crate) fn default<T: Default>() -> T { - Default::default() -} diff --git a/src/empty.rs /dev/null --- a/src/empty.rs +++ /dev/null @@ -1,5 +0,0 @@ -use crate::common::*; - -pub(crate) fn empty<T, C: iter::FromIterator<T>>() -> C { - iter::empty().collect() -} diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -71,10 +71,8 @@ mod compiler; mod config; mod config_error; mod count; -mod default; mod delimiter; mod dependency; -mod empty; mod enclosure; mod error; mod error_result_ext; diff --git a/src/recipe_resolver.rs b/src/recipe_resolver.rs --- a/src/recipe_resolver.rs +++ b/src/recipe_resolver.rs @@ -14,7 +14,7 @@ impl<'src: 'run, 'run> RecipeResolver<'src, 'run> { assignments: &Table<'src, Assignment<'src>>, ) -> CompilationResult<'src, Table<'src, Rc<Recipe<'src>>>> { let mut resolver = RecipeResolver { - resolved_recipes: empty(), + resolved_recipes: Table::new(), unresolved_recipes, assignments, };
diff --git a/tests/misc.rs b/tests/misc.rs --- a/tests/misc.rs +++ b/tests/misc.rs @@ -1410,6 +1410,25 @@ test! { status: EXIT_FAILURE, } +test! { + name: variable_circular_dependency_with_additional_variable, + justfile: " + a := '' + x := y + y := x + + a: + ", + args: ("a"), + stdout: "", + stderr: "error: Variable `x` depends on its own value: `x -> y -> x` + | +2 | x := y + | ^ +", + status: EXIT_FAILURE, +} + test! { name: plus_variadic_recipe, justfile: "
Bad error message for cyclical variable declarations `just` output an error message for cyclical variable definitions where the reported cycle contains the `TARGET` variable. That seems like a bug. Here's the justfile: ``` just export TARGET := "vagrant" a := b b := a ``` Here's the error message: ``` (2) $ just --evaluate error: Variable `a` depends on its own value: `TARGET -> a -> b -> a` | 17 | a := b | ^ ```
2021-07-20T09:04:06
0.9
6cf3d204e60c972d607dd8b5f6b1eccdf65613c1
[ "misc::variable_circular_dependency_with_additional_variable" ]
[ "analyzer::tests::duplicate_alias", "analyzer::tests::duplicate_variadic_parameter", "analyzer::tests::duplicate_parameter", "analyzer::tests::duplicate_recipe", "analyzer::tests::duplicate_variable", "analyzer::tests::parameter_shadows_varible", "analyzer::tests::extra_whitespace", "analyzer::tests::...
[ "config::tests::help", "functions::env_var_functions" ]
[]
jtroo/kanata
664
jtroo__kanata-664
[ "663" ]
73e7c0c9ede97fe920df57a10f73eb860e9dbd51
diff --git a/cfg_samples/kanata.kbd b/cfg_samples/kanata.kbd --- a/cfg_samples/kanata.kbd +++ b/cfg_samples/kanata.kbd @@ -189,6 +189,12 @@ If you need help, please feel welcome to ask in the GitHub discussions. ;; The default limit is 128 keys. ;; ;; dynamic-macro-max-presses 1000 + + ;; This configuration makes multiple tap-hold actions that are activated near + ;; in time expire their timeout quicker. Without this, the timeout for the 2nd + ;; tap-hold onwards will start from 0ms after the previous tap-hold expires. + ;; + ;; concurrent-tap-hold yes ) ;; deflocalkeys-* enables you to define and use key names that match your locale diff --git a/docs/config.adoc b/docs/config.adoc --- a/docs/config.adoc +++ b/docs/config.adoc @@ -463,6 +463,18 @@ The default length limit is 128 keys. ) ---- +=== concurrent-tap-hold [[concurrent-tap-hold]] +This configuration makes multiple tap-hold actions +that are activated near in time expire their timeout quicker. +By default this is disabled. +When disabled, the timeout for a following tap-hold +will start from 0ms **after** the previous tap-hold expires. +When enabled, the timeout will start +as soon as the tap-hold action is pressed +even if a previous tap-hold action is still held and has not expired. + +concurrent-tap-hold yes + [[linux-only-linux-dev]] === Linux only: linux-dev <<table-of-contents,Back to ToC>> diff --git a/docs/config.adoc b/docs/config.adoc --- a/docs/config.adoc +++ b/docs/config.adoc @@ -734,8 +746,8 @@ The `defcfg` entry is treated as a list with pairs of strings. For example: This will be treated as configuration `a` having value `1` and configuration `b` having value `2`. -An example defcfg containing all of the options is shown below. It should be -noted options that are Linux-only or Windows-only will be ignored when used on +An example defcfg containing many of the options is shown below. It should be +noted options that are Linux-only, Windows-only, or macOS-only will be ignored when used on a non-applicable operating system. [source] diff --git a/keyberon/src/layout.rs b/keyberon/src/layout.rs --- a/keyberon/src/layout.rs +++ b/keyberon/src/layout.rs @@ -84,6 +84,7 @@ where pub action_queue: ActionQueue<'a, T>, pub rpt_action: Option<&'a Action<'a, T>>, pub historical_keys: ArrayDeque<[KeyCode; 8], arraydeque::behavior::Wrapping>, + pub quick_tap_hold_timeout: bool, rpt_multikey_key_buffer: MultiKeyBuffer<'a, T>, } diff --git a/keyberon/src/layout.rs b/keyberon/src/layout.rs --- a/keyberon/src/layout.rs +++ b/keyberon/src/layout.rs @@ -438,7 +439,7 @@ impl<'a, T: std::fmt::Debug> WaitingState<'a, T> { } else { Some(WaitingAction::Timeout) } - } else if (self.timeout == 0) && (!skip_timeout) { + } else if self.timeout == 0 && (!skip_timeout) { Some(WaitingAction::Timeout) } else { None diff --git a/keyberon/src/layout.rs b/keyberon/src/layout.rs --- a/keyberon/src/layout.rs +++ b/keyberon/src/layout.rs @@ -881,6 +882,7 @@ impl<'a, const C: usize, const R: usize, const L: usize, T: 'a + Copy + std::fmt rpt_action: None, historical_keys: ArrayDeque::new(), rpt_multikey_key_buffer: unsafe { MultiKeyBuffer::new() }, + quick_tap_hold_timeout: false, } } /// Iterates on the key codes of the current state. diff --git a/keyberon/src/layout.rs b/keyberon/src/layout.rs --- a/keyberon/src/layout.rs +++ b/keyberon/src/layout.rs @@ -1254,7 +1256,11 @@ impl<'a, const C: usize, const R: usize, const L: usize, T: 'a + Copy + std::fmt { let waiting: WaitingState<T> = WaitingState { coord, - timeout: *timeout, + timeout: if self.quick_tap_hold_timeout { + timeout.saturating_sub(delay) + } else { + *timeout + }, delay, ticks: 0, hold, diff --git a/parser/src/cfg/defcfg.rs b/parser/src/cfg/defcfg.rs --- a/parser/src/cfg/defcfg.rs +++ b/parser/src/cfg/defcfg.rs @@ -18,6 +18,7 @@ pub struct CfgOptions { pub movemouse_inherit_accel_state: bool, pub movemouse_smooth_diagonals: bool, pub dynamic_macro_max_presses: u16, + pub concurrent_tap_hold: bool, #[cfg(any(target_os = "linux", target_os = "unknown"))] pub linux_dev: Vec<String>, #[cfg(any(target_os = "linux", target_os = "unknown"))] diff --git a/parser/src/cfg/defcfg.rs b/parser/src/cfg/defcfg.rs --- a/parser/src/cfg/defcfg.rs +++ b/parser/src/cfg/defcfg.rs @@ -56,6 +57,7 @@ impl Default for CfgOptions { movemouse_inherit_accel_state: false, movemouse_smooth_diagonals: false, dynamic_macro_max_presses: 128, + concurrent_tap_hold: false, #[cfg(any(target_os = "linux", target_os = "unknown"))] linux_dev: vec![], #[cfg(any(target_os = "linux", target_os = "unknown"))] diff --git a/parser/src/cfg/defcfg.rs b/parser/src/cfg/defcfg.rs --- a/parser/src/cfg/defcfg.rs +++ b/parser/src/cfg/defcfg.rs @@ -281,6 +283,9 @@ pub fn parse_defcfg(expr: &[SExpr]) -> Result<CfgOptions> { "movemouse-inherit-accel-state" => { cfg.movemouse_inherit_accel_state = parse_defcfg_val_bool(val, label)? } + "concurrent-tap-hold" => { + cfg.concurrent_tap_hold = parse_defcfg_val_bool(val, label)? + } _ => bail_expr!(key, "Unknown defcfg option {}", label), }; } diff --git a/parser/src/cfg/defcfg.rs b/parser/src/cfg/defcfg.rs --- a/parser/src/cfg/defcfg.rs +++ b/parser/src/cfg/defcfg.rs @@ -380,7 +385,7 @@ pub fn parse_dev(val: &SExpr) -> Result<Vec<String>> { let trimmed_path = path.t.trim_matches('"').to_string(); if trimmed_path.is_empty() { bail_span!( - &path, + path, "an empty string is not a valid device name or path" ) } diff --git a/parser/src/cfg/defcfg.rs b/parser/src/cfg/defcfg.rs --- a/parser/src/cfg/defcfg.rs +++ b/parser/src/cfg/defcfg.rs @@ -388,7 +393,7 @@ pub fn parse_dev(val: &SExpr) -> Result<Vec<String>> { Ok(acc) } SExpr::List(inner_list) => { - bail_span!(&inner_list, "expected strings, found a list") + bail_span!(inner_list, "expected strings, found a list") } }); diff --git a/parser/src/cfg/mod.rs b/parser/src/cfg/mod.rs --- a/parser/src/cfg/mod.rs +++ b/parser/src/cfg/mod.rs @@ -254,15 +254,10 @@ fn parse_cfg( )> { let mut s = ParsedState::default(); let (cfg, src, layer_info, klayers, seqs, overrides) = parse_cfg_raw(p, &mut s)?; - Ok(( - cfg, - src, - layer_info, - create_key_outputs(&klayers, &overrides), - create_layout(klayers, s.a), - seqs, - overrides, - )) + let key_outputs = create_key_outputs(&klayers, &overrides); + let mut layout = create_layout(klayers, s.a); + layout.bm().quick_tap_hold_timeout = cfg.concurrent_tap_hold; + Ok((cfg, src, layer_info, key_outputs, layout, seqs, overrides)) } #[cfg(all(not(feature = "interception_driver"), target_os = "windows"))]
diff --git a/keyberon/src/layout.rs b/keyberon/src/layout.rs --- a/keyberon/src/layout.rs +++ b/keyberon/src/layout.rs @@ -1873,6 +1879,66 @@ mod test { assert_keys(&[], layout.keycodes()); } + #[test] + fn simultaneous_hold() { + static LAYERS: Layers<3, 1, 1> = [[[ + HoldTap(&HoldTapAction { + timeout: 200, + hold: k(LAlt), + timeout_action: k(LAlt), + tap: k(Space), + config: HoldTapConfig::Default, + tap_hold_interval: 0, + }), + HoldTap(&HoldTapAction { + timeout: 200, + hold: k(RAlt), + timeout_action: k(RAlt), + tap: k(A), + config: HoldTapConfig::Default, + tap_hold_interval: 0, + }), + HoldTap(&HoldTapAction { + timeout: 200, + hold: k(LCtrl), + timeout_action: k(LCtrl), + tap: k(A), + config: HoldTapConfig::Default, + tap_hold_interval: 0, + }), + ]]]; + let mut layout = Layout::new(&LAYERS); + layout.quick_tap_hold_timeout = true; + + // Press and release another key before timeout + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[], layout.keycodes()); + layout.event(Press(0, 0)); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[], layout.keycodes()); + layout.event(Press(0, 1)); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[], layout.keycodes()); + layout.event(Press(0, 2)); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[], layout.keycodes()); + + for _ in 0..196 { + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[], layout.keycodes()); + } + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[LAlt], layout.keycodes()); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[LAlt], layout.keycodes()); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[LAlt, RAlt], layout.keycodes()); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[LAlt, RAlt], layout.keycodes()); + assert_eq!(CustomEvent::NoEvent, layout.tick()); + assert_keys(&[LAlt, RAlt, LCtrl], layout.keycodes()); + } + #[test] fn multiple_actions() { static LAYERS: Layers<2, 1, 2> = [ diff --git a/parser/src/cfg/tests.rs b/parser/src/cfg/tests.rs --- a/parser/src/cfg/tests.rs +++ b/parser/src/cfg/tests.rs @@ -1317,6 +1317,7 @@ fn parse_all_defcfg() { movemouse-inherit-accel-state yes movemouse-smooth-diagonals yes dynamic-macro-max-presses 1000 + concurrent-tap-hold yes linux-dev /dev/input/dev1:/dev/input/dev2 linux-dev-names-include "Name 1:Name 2" linux-dev-names-exclude "Name 3:Name 4"
Bug: tap-hold delay is cumulative ### Requirements - [X] I've searched [issues](https://github.com/jtroo/kanata/issues) to see if this has not been reported before. ### Describe the bug Multiple tap-hold modifiers pressed simultaneously trigger one at a time with a delay between each ### Relevant kanata config ``` (defvar tt 150 ht 250 ) (defalias alt_s (tap-hold $tt $ht s lalt) ctl_d (tap-hold $tt $ht d lctl) sft_f (tap-hold $tt $ht f lsft) ) ``` ### To Reproduce 1. Visit https://config.qmk.fm/#/test 2. Hold s-d-f ### Expected behavior A-C-S should be pressed after 250ms Instead you see A 250ms C 250ms S 250ms for a total of 750ms. This makes timing combos with multiple modifiers really slow. ### Kanata version 1.5.0-prerelease-3 ### Debug logs _No response_ ### Operating system Linux ### Additional context _No response_
Looks like this is long-persisting behaviour in keyberon. <details> <summary>Test to add in keyberon (fails in latest main)</summary> ```rust #[test] fn simultaneous_hold() { static LAYERS: Layers<3, 1, 1> = [[[ HoldTap(&HoldTapAction { timeout: 200, hold: k(LAlt), timeout_action: k(LAlt), tap: k(Space), config: HoldTapConfig::Default, tap_hold_interval: 0, }), HoldTap(&HoldTapAction { timeout: 200, hold: k(RAlt), timeout_action: k(RAlt), tap: k(A), config: HoldTapConfig::Default, tap_hold_interval: 0, }), HoldTap(&HoldTapAction { timeout: 200, hold: k(LCtrl), timeout_action: k(LCtrl), tap: k(A), config: HoldTapConfig::Default, tap_hold_interval: 0, }), ]]]; let mut layout = Layout::new(&LAYERS); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[], layout.keycodes()); layout.event(Press(0, 0)); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[], layout.keycodes()); layout.event(Press(0, 1)); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[], layout.keycodes()); layout.event(Press(0, 2)); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[], layout.keycodes()); for _ in 0..196 { assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[], layout.keycodes()); } assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[LAlt], layout.keycodes()); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[LAlt], layout.keycodes()); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[LAlt, RAlt], layout.keycodes()); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[LAlt, RAlt], layout.keycodes()); assert_eq!(CustomEvent::NoEvent, layout.tick()); assert_keys(&[LAlt, RAlt, LCtrl], layout.keycodes()); } ``` </details> The change to make would be here: https://github.com/jtroo/kanata/blob/5ad5116de42ddfd585d2027523e967b2f5e2eced/keyberon/src/layout.rs#L441C2-L441C2 With: ```rust } else if (self.timeout.saturating_sub(self.delay) == 0) && (!skip_timeout) { ``` Or maybe ```rust } else if self.timeout <= self.delay && (!skip_timeout) { ``` Unfortunately this breaks a bunch of existing tests, and might break other use cases that somehow depend on the existing delayed behaviour. So IMO best option is to keep the existing behaviour and add a way to change to the desired quick behaviour. E.g. have it adjustable in `defcfg`, and something like: - have a settable field in the keyberon `Layout` that changes the behaviours in `WaitingState`, based on the defcfg entry - have a global atomic in keyberon that the defcfg entry adjusts
2023-12-13T16:06:19
1.5
73e7c0c9ede97fe920df57a10f73eb860e9dbd51
[ "cfg::tests::parse_all_defcfg" ]
[ "cfg::test_collect_strings", "cfg::tests::disallow_ancestor_seq", "cfg::tests::chord_in_macro_dont_panic", "cfg::tests::disallow_descendent_seq", "cfg::tests::disallow_nested_tap_hold", "cfg::tests::parse_device_paths", "cfg::tests::disallow_multiple_waiting_actions", "cfg::tests::list_action_not_in_l...
[]
[]
kube-rs/kube
1,324
kube-rs__kube-1324
[ "1322" ]
9c81f1f22802b2e9e1c63568ff49c4ddf92ffe2a
diff --git a/kube-runtime/src/scheduler.rs b/kube-runtime/src/scheduler.rs --- a/kube-runtime/src/scheduler.rs +++ b/kube-runtime/src/scheduler.rs @@ -131,6 +131,9 @@ impl<'a, T: Hash + Eq + Clone, R> SchedulerProj<'a, T, R> { pub fn pop_queue_message_into_pending(&mut self, cx: &mut Context<'_>) { while let Poll::Ready(Some(msg)) = self.queue.poll_expired(cx) { let msg = msg.into_inner(); + self.scheduled.remove_entry(&msg).expect( + "Expired message was popped from the Scheduler queue, but was not in the metadata map", + ); self.pending.insert(msg); } }
diff --git a/kube-runtime/src/controller/runner.rs b/kube-runtime/src/controller/runner.rs --- a/kube-runtime/src/controller/runner.rs +++ b/kube-runtime/src/controller/runner.rs @@ -444,6 +444,18 @@ mod tests { advance(Duration::from_secs(3)).await; assert!(poll!(runner.as_mut()).is_pending()); + // Send the third message again and check it's ran + sched_tx + .send(ScheduleRequest { + message: 3, + run_at: Instant::now(), + }) + .await + .unwrap(); + advance(Duration::from_secs(3)).await; + assert!(poll!(runner.as_mut()).is_pending()); + assert_eq!(*count.lock().unwrap(), 4); + let (mut sched_tx, sched_rx) = mpsc::unbounded(); let mut runner = Box::pin( Runner::new(scheduler(sched_rx), 1, |_| {
Controller concurrency appears to cause reconciliations to be skipped ### Current and expected behavior I have a controller that uses `reconcile_on` for a kind of relationship where some resource (a pod) is referenced by my CRD. When the external resource is modified, I reconcile a subset of all instances of my resource. In my actual code I use `reconcile_on`, but the same happens when using `reconcile_all_on`. This demonstrates the problem: ```rust #[tokio::main] async fn main() -> Result<()> { println!("Starting controller"); let client = Client::try_default().await?; let things = Api::<MyCRD>::namespaced(client.clone(), "dev"); let pods = Api::<Pod>::namespaced(client.clone(), "dev"); let (_, writer) = reflector::store(); let pods = kube::runtime::reflector( writer, watcher(pods, watcher::Config::default(), ); let (mut tx, rx) = futures::channel::mpsc::channel(100); let mut stream = Box::pin(pods.touched_objects()); tokio::task::spawn(async move { while let Some(evt) = stream.next().await { println!("Trigger reconciliation from change to pod"); let _ = tx.try_send(()); } }); let config = Config::default().concurrency(2); Controller::new(things, watcher::Config::default()) .with_config(config) .reconcile_all_on(rx) .shutdown_on_signal() .run( |j, _| async move { println!("Reconcile {:?}", j.metadata.name); Ok::<_, ControllerError>(Action::requeue(Duration::from_secs(100))) }, |_, _, _| Action::requeue(Duration::from_secs(10)), Arc::new(()), ) .for_each(|res| async move { match res { Ok(o) => info!("Reconciled {o:?}"), Err(e) => warn!("Failed to reconcile: {e}"), } }) .await; Ok(()) } ``` I have 3 instances of `MyCRD` with names "inst-1", "inst-2", "inst-3". I expect roughly the following log output when I start the controller, then modify a pod: ``` Starting controller Reconcile Some("inst-1") Reconcile Some("inst-2") Reconcile Some("inst-3") Trigger reconciliation from change to pod Reconcile Some("inst-1") Reconcile Some("inst-2") Reconcile Some("inst-3") ``` Instead, what I get is ``` Starting controller Reconcile Some("inst-1") Reconcile Some("inst-2") Reconcile Some("inst-3") Trigger reconciliation from change to pod Reconcile Some("inst-2") Reconcile Some("inst-3") ... ``` Consistently, one of the instances are skipped. If you set concurrency to 1 instead, then two are skipped. Removing concurrency makes it so that none are skipped. Is this intentional? If so, what am I doing wrong? I can work around this by just implementing the concurrency with a semaphore in my reconcile method, but this feels like a bug to me. ### Possible solution _No response_ ### Additional context _No response_ ### Environment Server Version: v1.27.4 Running minikube ### Configuration and features ```toml k8s-openapi = { version = "0.20.0", features = ["latest"] } kube = { version = "0.86.0", features = ["runtime", "derive", "unstable-runtime"] } ``` ### Affected crates kube-runtime ### Would you like to work on fixing this bug? None
Thanks for the report. If true, this is not intentional. At most what you should expect to see is deduplication of the objects that are queued to run (i.e. if you queued up multiple `inst-1` reconciliations while `inst-1` was already running, then you'd only get the last). The concurrency was added relatively recently in https://github.com/kube-rs/kube/pull/1277/files I don't have time to look at this at the moment, so putting a help wanted on it. Might be good to get a failing test case for this setup as a starting point. Alright, thanks. If I find the time I'll see if I can figure out why this is happening, since I'm able to reproduce.
2023-10-25T15:25:31
0.86
9c81f1f22802b2e9e1c63568ff49c4ddf92ffe2a
[ "controller::runner::tests::runner_should_respect_max_concurrent_executions" ]
[ "controller::future_hash_map::tests::fhm_should_forward_all_values_and_shut_down", "controller::runner::tests::runner_should_wait_for_readiness", "reflector::store::tests::should_allow_getting_clusterscoped_object_by_clusterscoped_ref", "reflector::store::tests::should_allow_getting_namespaced_object_by_names...
[ "utils::stream_subscribe::tests::a_lagging_subscriber_does_not_impact_a_well_behaved_subscriber" ]
[]
kube-rs/kube
1,281
kube-rs__kube-1281
[ "1278" ]
3af6b7f1df216380dd9c9bd13a163fe5281d67fa
diff --git a/kube-core/src/params.rs b/kube-core/src/params.rs --- a/kube-core/src/params.rs +++ b/kube-core/src/params.rs @@ -103,18 +103,22 @@ impl ListParams { } if let Some(continue_token) = &self.continue_token { qp.append_pair("continue", continue_token); - } - - if let Some(rv) = &self.resource_version { - qp.append_pair("resourceVersion", rv.as_str()); - } - match &self.version_match { - None => {} - Some(VersionMatch::NotOlderThan) => { - qp.append_pair("resourceVersionMatch", "NotOlderThan"); - } - Some(VersionMatch::Exact) => { - qp.append_pair("resourceVersionMatch", "Exact"); + } else { + // When there's a continue token, we don't want to set resourceVersion + if let Some(rv) = &self.resource_version { + if rv != "0" || (rv == "0" && self.limit.is_none()) { + qp.append_pair("resourceVersion", rv.as_str()); + + match &self.version_match { + None => {} + Some(VersionMatch::NotOlderThan) => { + qp.append_pair("resourceVersionMatch", "NotOlderThan"); + } + Some(VersionMatch::Exact) => { + qp.append_pair("resourceVersionMatch", "Exact"); + } + } + } } } }
diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -746,16 +746,31 @@ mod test { } #[test] - fn list_not_older() { + fn list_paged_any_semantic() { + let url = corev1::Pod::url_path(&(), Some("ns")); + let gp = ListParams::default().limit(50).match_any(); + let req = Request::new(url).list(&gp).unwrap(); + assert_eq!(req.uri().query().unwrap(), "&limit=50"); + } + + #[test] + fn list_paged_with_continue_any_semantic() { + let url = corev1::Pod::url_path(&(), Some("ns")); + let gp = ListParams::default().limit(50).continue_token("1234").match_any(); + let req = Request::new(url).list(&gp).unwrap(); + assert_eq!(req.uri().query().unwrap(), "&limit=50&continue=1234"); + } + + #[test] + fn list_paged_with_continue_starting_at() { let url = corev1::Pod::url_path(&(), Some("ns")); let gp = ListParams::default() - .at("20") - .matching(VersionMatch::NotOlderThan); + .limit(50) + .continue_token("1234") + .at("9999") + .matching(VersionMatch::Exact); let req = Request::new(url).list(&gp).unwrap(); - assert_eq!( - req.uri().query().unwrap(), - "&resourceVersion=20&resourceVersionMatch=NotOlderThan" - ); + assert_eq!(req.uri().query().unwrap(), "&limit=50&continue=1234"); } #[test]
ResourceVersionMatch should be cleared when doing a paged call ### Current and expected behavior There's a [validation the APIServer](https://github.com/kubernetes/apimachinery/blob/master/pkg/apis/meta/internalversion/validation/validation.go#L38) that disallows the use of ResourceVersionMatch and Continue on the same API call. This seems to become problematic with the introduction of paged lists on watcher. ### Possible solution There's a similar PR on client-go ( https://github.com/kubernetes/kubernetes/pull/107311 ) for reference. So I guess the simplest option is to change `populate_qp` so that `resourceVersionMatch` is omitted when `continue_token` is Some. What you think? ### Additional context _No response_ ### Environment N/A ### Configuration and features ```toml kube = { version = "=0.84.0", default-features = false, features = ["client", "runtime", "ws", "gzip", "rustls-tls"] } k8s-openapi = { version = "0.18.0", features = ["v1_24"] } k8s-metrics = "=0.12.0" ``` ### Affected crates kube-client ### Would you like to work on fixing this bug? maybe
While investigating the above, I also noticed that combining `any_semantic` with `page_size` doesn't seem to work as intended. ```rust let cf = watcher::Config::default() .fields(&field_selector) .labels(&label_selector) .any_semantic() .page_size(1) ``` The end result is a query that looks like this: ``` /api/v1/pods?&fieldSelector=&labelSelector=&limit=1&resourceVersion=0&resourceVersionMatch=NotOlderThan ``` And as per https://github.com/kubernetes/kubernetes/issues/59684 when `resourceVersion=0`, the `limit` param is ignored. According to this [table](https://kubernetes.io/docs/reference/using-api/api-concepts/#semantics-for-get-and-list), if we don't set `resourceVersionMatch` when using `limit` and `resourceVersion="0"`, we still achieve the `Any` semantic. So I guess omitting resourceVersionMatch when paging would solve both problems. Will try this now. Ah, thanks for the report. I imagine we could add in a way to unset this via https://github.com/kube-rs/kube/blob/5e98a92c881b9387af9e0f32cc38b92246a5aa7e/kube-runtime/src/watcher.rs#L418-L420 when paging.
2023-08-17T19:35:20
0.85
3af6b7f1df216380dd9c9bd13a163fe5281d67fa
[ "request::test::list_paged_any_semantic", "request::test::list_paged_with_continue_any_semantic", "request::test::list_paged_with_continue_starting_at" ]
[ "discovery::test_to_plural_native", "duration::tests::parses_the_same_as_go", "conversion::types::tests::simple_request_parses", "metadata::test::can_convert_and_derive_partial_metadata", "params::test::delete_param_serialize", "gvk::tests::gvk_yaml", "object::test::simplified_k8s_object", "dynamic::t...
[]
[]
kube-rs/kube
1,541
kube-rs__kube-1541
[ "1540", "1540" ]
e57b06011e30696e8f85e29b0d246460bae9c96e
diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -80,6 +80,7 @@ impl Request { /// Get a single instance pub fn get(&self, name: &str, gp: &GetParams) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let urlstr = if let Some(rv) = &gp.resource_version { let target = format!("{}/{}?", self.url_path, name); form_urlencoded::Serializer::new(target) diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -106,6 +107,7 @@ impl Request { /// Delete an instance of a resource pub fn delete(&self, name: &str, dp: &DeleteParams) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let target = format!("{}/{}?", self.url_path, name); let mut qp = form_urlencoded::Serializer::new(target); let urlstr = qp.finish(); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -149,6 +151,7 @@ impl Request { pp: &PatchParams, patch: &Patch<P>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; pp.validate(patch)?; let target = format!("{}/{}?", self.url_path, name); let mut qp = form_urlencoded::Serializer::new(target); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -171,6 +174,7 @@ impl Request { pp: &PostParams, data: Vec<u8>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let target = format!("{}/{}?", self.url_path, name); let mut qp = form_urlencoded::Serializer::new(target); pp.populate_qp(&mut qp); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -188,6 +192,7 @@ impl Request { subresource_name: &str, name: &str, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let target = format!("{}/{}/{}", self.url_path, name, subresource_name); let mut qp = form_urlencoded::Serializer::new(target); let urlstr = qp.finish(); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -203,6 +208,7 @@ impl Request { pp: &PostParams, data: Vec<u8>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let target = format!("{}/{}/{}?", self.url_path, name, subresource_name); let mut qp = form_urlencoded::Serializer::new(target); pp.populate_qp(&mut qp); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -219,6 +225,7 @@ impl Request { pp: &PatchParams, patch: &Patch<P>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; pp.validate(patch)?; let target = format!("{}/{}/{}?", self.url_path, name, subresource_name); let mut qp = form_urlencoded::Serializer::new(target); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -240,6 +247,7 @@ impl Request { pp: &PostParams, data: Vec<u8>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let target = format!("{}/{}/{}?", self.url_path, name, subresource_name); let mut qp = form_urlencoded::Serializer::new(target); pp.populate_qp(&mut qp); diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -256,6 +264,7 @@ impl Request { impl Request { /// Get a single metadata instance for a named resource pub fn get_metadata(&self, name: &str, gp: &GetParams) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; let urlstr = if let Some(rv) = &gp.resource_version { let target = format!("{}/{}?", self.url_path, name); form_urlencoded::Serializer::new(target) diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -310,6 +319,7 @@ impl Request { pp: &PatchParams, patch: &Patch<P>, ) -> Result<http::Request<Vec<u8>>, Error> { + validate_name(name)?; pp.validate(patch)?; let target = format!("{}/{}?", self.url_path, name); let mut qp = form_urlencoded::Serializer::new(target);
diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -324,6 +334,14 @@ impl Request { } } +/// Names must not be empty as otherwise API server would interpret a `get` as `list`, or a `delete` as `delete_collection` +fn validate_name(name: &str) -> Result<(), Error> { + if name.is_empty() { + return Err(Error::Validation("A non-empty name is required".into())); + } + Ok(()) +} + /// Extensive tests for Request of k8s_openapi::Resource structs /// /// Cheap sanity check to ensure type maps work as expected diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -331,7 +349,7 @@ impl Request { mod test { use crate::{ params::{GetParams, PostParams, VersionMatch, WatchParams}, - request::Request, + request::{Error, Request}, resource::Resource, }; use http::header; diff --git a/kube-core/src/request.rs b/kube-core/src/request.rs --- a/kube-core/src/request.rs +++ b/kube-core/src/request.rs @@ -490,6 +508,13 @@ mod test { ); } + #[test] + fn get_empty_name() { + let url = appsv1::Deployment::url_path(&(), Some("ns")); + let req = Request::new(url).get("", &GetParams::any()); + assert!(matches!(req, Err(Error::Validation(_)))); + } + #[test] fn list_path() { let url = appsv1::Deployment::url_path(&(), Some("ns"));
`Api::get("")` (get with empty name), will send a command to the k8s-api-server that looks like a `list` and thus will result in a serde error. ### Current and expected behavior When callind `Api::get()` with a `name:&str` which is `""` kube will generate an Api call to the k8s server which is equal to the list call (prob because the trailing / is ignored here https://github.com/kube-rs/kube/blob/0.92.1/kube-core/src/request.rs#L84 . The ApiServer will send a List, e.g. `PodList` (or whatever) back instead a single instance. As a result serde will always fail here https://github.com/kube-rs/kube/blob/0.92.1/kube-client/src/client/mod.rs#L249 while we could argue that the behavior from `kube` side is completely valid, it is a bit inconvenient and makes debugging a tiny bit harder. as an empty string is probably a invalid name for a Kubernetes ressource (needs to be verified), we can fail early on and don't need to do the actually request, especially dont fail with that serde error. For performance reason maybe only in debug mode. If you tell me where to make the change I can also provide an PR. (In case this bug is valid for you at all) ### Possible solution Check `&name` within `Api::get()` and return an error, maybe here: https://github.com/kube-rs/kube/blob/0.92.1/kube-core/src/request.rs#L83 and return `Error::Validation` ### Additional context _No response_ ### Environment AWS cluster: Server Version: v1.30.2-eks-db838b0 should be independent of this though ### Configuration and features ```toml kube = { version = "0.92", features = ["runtime", "client", "derive"] } k8s-openapi = { version = "0.22", features = ["v1_30"] } ``` ### Affected crates kube-core, kube-client ### Would you like to work on fixing this bug? yes `Api::get("")` (get with empty name), will send a command to the k8s-api-server that looks like a `list` and thus will result in a serde error. ### Current and expected behavior When callind `Api::get()` with a `name:&str` which is `""` kube will generate an Api call to the k8s server which is equal to the list call (prob because the trailing / is ignored here https://github.com/kube-rs/kube/blob/0.92.1/kube-core/src/request.rs#L84 . The ApiServer will send a List, e.g. `PodList` (or whatever) back instead a single instance. As a result serde will always fail here https://github.com/kube-rs/kube/blob/0.92.1/kube-client/src/client/mod.rs#L249 while we could argue that the behavior from `kube` side is completely valid, it is a bit inconvenient and makes debugging a tiny bit harder. as an empty string is probably a invalid name for a Kubernetes ressource (needs to be verified), we can fail early on and don't need to do the actually request, especially dont fail with that serde error. For performance reason maybe only in debug mode. If you tell me where to make the change I can also provide an PR. (In case this bug is valid for you at all) ### Possible solution Check `&name` within `Api::get()` and return an error, maybe here: https://github.com/kube-rs/kube/blob/0.92.1/kube-core/src/request.rs#L83 and return `Error::Validation` ### Additional context _No response_ ### Environment AWS cluster: Server Version: v1.30.2-eks-db838b0 should be independent of this though ### Configuration and features ```toml kube = { version = "0.92", features = ["runtime", "client", "derive"] } k8s-openapi = { version = "0.22", features = ["v1_30"] } ``` ### Affected crates kube-core, kube-client ### Would you like to work on fixing this bug? yes
Thanks for the report. I agree we should fail early on this. I imagine this should be handled using the `Error::Validation` variant exposed in kube-core/../request.rs. This variant commonly brings up user errors and is typically passed up early. We could probably return it in all the `get_` functions implemented on `Request` in request.rs (which I assume will error similarly) if `name.is_empty()`. Thanks for the report. I agree we should fail early on this. I imagine this should be handled using the `Error::Validation` variant exposed in kube-core/../request.rs. This variant commonly brings up user errors and is typically passed up early. We could probably return it in all the `get_` functions implemented on `Request` in request.rs (which I assume will error similarly) if `name.is_empty()`.
2024-07-20T20:21:54
0.92
e57b06011e30696e8f85e29b0d246460bae9c96e
[ "request::test::get_empty_name" ]
[ "admission::test::v1_webhook_unmarshals", "dynamic::test::can_parse_dynamic_object_into_pod", "kubelet_debug::test::node_portforward_test", "dynamic::test::raw_resource_in_default_group", "discovery::test_to_plural_native", "object::test::k8s_object_list_default_types", "object::test::simplified_k8s_obj...
[]
[]
lalrpop/lalrpop
898
lalrpop__lalrpop-898
[ "897" ]
1eb87a040d564746191af9c021c5a01470ec3625
diff --git a/lalrpop/src/lr1/lane_table/construct/merge.rs b/lalrpop/src/lr1/lane_table/construct/merge.rs --- a/lalrpop/src/lr1/lane_table/construct/merge.rs +++ b/lalrpop/src/lr1/lane_table/construct/merge.rs @@ -199,6 +199,7 @@ impl<'m> ContextSets<'m> { } fn union(&mut self, source: StateIndex, target: StateIndex) -> bool { + debug!("state_sets: {:?}", self.state_sets); let set1 = self.state_sets[&source]; let set2 = self.state_sets[&target]; let result = self.unify.unify_var_var(set1, set2).is_ok(); diff --git a/lalrpop/src/lr1/lane_table/lane/mod.rs b/lalrpop/src/lr1/lane_table/lane/mod.rs --- a/lalrpop/src/lr1/lane_table/lane/mod.rs +++ b/lalrpop/src/lr1/lane_table/lane/mod.rs @@ -60,6 +60,7 @@ impl<'trace, 'grammar, L: Lookahead> LaneTracer<'trace, 'grammar, L> { Action::Reduce(prod) => { let item = Item::lr0(prod, prod.symbols.len()); + self.table.add_lookahead(state, conflict, &TokenSet::new()); self.continue_trace(state, conflict, item, &mut visited_set); } } diff --git a/lalrpop/src/lr1/lane_table/lane/mod.rs b/lalrpop/src/lr1/lane_table/lane/mod.rs --- a/lalrpop/src/lr1/lane_table/lane/mod.rs +++ b/lalrpop/src/lr1/lane_table/lane/mod.rs @@ -72,6 +73,7 @@ impl<'trace, 'grammar, L: Lookahead> LaneTracer<'trace, 'grammar, L> { item: Lr0Item<'grammar>, visited: &mut Set<(StateIndex, Lr0Item<'grammar>)>, ) { + debug!("continue_trace: state={:?}, index={:?}", state, item.index); if !visited.insert((state, item)) { return; }
diff --git a/lalrpop/src/lr1/lane_table/test.rs b/lalrpop/src/lr1/lane_table/test.rs --- a/lalrpop/src/lr1/lane_table/test.rs +++ b/lalrpop/src/lr1/lane_table/test.rs @@ -104,6 +104,30 @@ Y: () = { ) } +/// A variation on G1 to omit the possibility of shifting +pub fn example_g2() -> Grammar { + normalized_grammar( + r#" +grammar; + +pub G = { + "a" X "d", + "a" Y "c", + "b" X "c", + "b" Y "d", +}; + +X = { + "e" +}; + +Y = { + "e" +}; +"#, + ) +} + fn build_table<'grammar>( grammar: &'grammar Grammar, goal: &str, diff --git a/lalrpop/src/lr1/lane_table/test.rs b/lalrpop/src/lr1/lane_table/test.rs --- a/lalrpop/src/lr1/lane_table/test.rs +++ b/lalrpop/src/lr1/lane_table/test.rs @@ -294,6 +318,27 @@ P: () = { ) } +// The G1 example has a non-conflicting shift in the state with the reduce/reduce conflict. This +// test exercises the case where the reduce/reduce is the only difference. +#[test] +fn example_g2_build() { + let _tls = Tls::test(); + let grammar = example_g2(); + + let _lr1_tls = Lr1Tls::install(grammar.terminals.clone()); + let lr0_err = build::build_lr0_states(&grammar, nt("__G")).unwrap_err(); + let states = build_states(&grammar, nt("__G")).expect("failed to build lane table states"); + + // we require more *states* than LR(0), not just different lookahead + assert_eq!(states.len() - lr0_err.states.len(), 1); + + let tree = interpret::interpret(&states, tokens!["a", "e", "d"]).unwrap(); + expect_debug(&tree, r#"[__G: [G: "a", [X: "e"], "d"]]"#); + + let tree = interpret::interpret(&states, tokens!["b", "e", "d"]).unwrap(); + expect_debug(&tree, r#"[__G: [G: "b", [Y: "e"], "d"]]"#); +} + #[test] fn large_conflict_1() { let _tls = Tls::test(); diff --git a/lalrpop/src/lr1/lane_table/test.rs b/lalrpop/src/lr1/lane_table/test.rs --- a/lalrpop/src/lr1/lane_table/test.rs +++ b/lalrpop/src/lr1/lane_table/test.rs @@ -320,7 +365,7 @@ fn large_conflict_1() { | S5 | | | ["a"] | ["r"] | {S16} | | S7 | | | ["c", "w"] | ["d"] | {S16} | | S16 | | | | | {S27} | -| S27 | ["s"] | ["k"] | | | {S32} | +| S27 | ["s"] | ["k"] | [] | [] | {S32} | | S32 | | | ["z"] | ["u"] | {S16} | "# .trim_start(),
Lalrpop 0.20.2 exception "no entry found for key" Repo: https://github.com/Storyyeller/cubiml-demo/commit/502da7af9ed5b37571df10cbcf4fb41a45fba1ab run "RUST_BACKTRACE=full lalrpop src/grammar.lalr" produces the error ``` processing file `src/grammar.lalr` thread 'main' panicked at /home/rsg/.cargo/registry/src/index.crates.io-6f17d22bba15001f/lalrpop-0.20.2/src/lr1/lane_table/construct/merge.rs:203:35: no entry found for key stack backtrace: 0: 0x563f5a075586 - <std::sys_common::backtrace::_print::DisplayBacktrace as core::fmt::Display>::fmt::h67bb2d10ecb8af10 1: 0x563f5a0987f0 - core::fmt::write::h5987d5bee25e3bd6 2: 0x563f5a072f9f - std::io::Write::write_fmt::h0876e0f4bddb923c 3: 0x563f5a075364 - std::sys_common::backtrace::print::h1d97d5831d0262fb 4: 0x563f5a0768c7 - std::panicking::default_hook::{{closure}}::h39fabb1b3e65f47a 5: 0x563f5a076629 - std::panicking::default_hook::h416520d6bb2cb53f 6: 0x563f5a076d58 - std::panicking::rust_panic_with_hook::h99ef5cbc4b3b0ae4 7: 0x563f5a076c32 - std::panicking::begin_panic_handler::{{closure}}::h2284f60633c08873 8: 0x563f5a075a86 - std::sys_common::backtrace::__rust_end_short_backtrace::h84bc49437da26b5c 9: 0x563f5a076984 - rust_begin_unwind 10: 0x563f59dd5685 - core::panicking::panic_fmt::hfa0af2309df9012f 11: 0x563f59dd5643 - core::option::expect_failed::hee4b38d562c1aee6 12: 0x563f59ebb3be - lalrpop::lr1::lane_table::construct::merge::ContextSets::union::h4b30069552aa0692 13: 0x563f59eba3fc - lalrpop::lr1::lane_table::construct::merge::Merge::walk::h8c444bf665268054 14: 0x563f59eb9dcd - lalrpop::lr1::lane_table::construct::merge::Merge::start::h5f6b8034a006f3f3 15: 0x563f59f0dce5 - lalrpop::lr1::lane_table::construct::LaneTableConstruct::construct::hb8c92e7d18984d4c 16: 0x563f59de8ed4 - lalrpop::lr1::lane_table::build_lane_table_states::h43ea42028cbdd64f 17: 0x563f59eeeb6b - lalrpop::lr1::build::build_lr1_states::h57c4adf8e06ee2bf 18: 0x563f59e88c21 - lalrpop::lr1::build_states::h595add96ad68ebc1 19: 0x563f59de390d - lalrpop::build::process_file_into::hc55a95faee152d69 20: 0x563f59ddae92 - lalrpop::build::process_file::h95c324464dd82c3a 21: 0x563f59dd8662 - lalrpop::api::Configuration::process_file::h909a3be569a4f8d5 22: 0x563f59dd7bbf - lalrpop::main::ha7aa5380d9a52229 23: 0x563f59ddab13 - std::sys_common::backtrace::__rust_begin_short_backtrace::heb7abe9d39a778b9 24: 0x563f59ddab31 - std::rt::lang_start::{{closure}}::h69aa2e285f69979b 25: 0x563f5a06dc81 - std::rt::lang_start_internal::h000d84e53c076064 26: 0x563f59dd7fe5 - main 27: 0x7f5c274f9083 - __libc_start_main at /build/glibc-e2p3jK/glibc-2.31/csu/../csu/libc-start.c:308:16 28: 0x563f59dd5e3e - _start 29: 0x0 - <unknown> ``` lalrpop version is 0.20.2
Thanks for the report! I'm looking into this. This part of the code is hit when the grammar is not LALR(1), so the lane table algorithm needs to split states in order to resolve inconsistencies. I checked our existing test coverage, and we have a few tests that hit this path, but all of them resolve shift-reduce conflicts, while this grammar needs to resolve a reduce-reduce conflict (Note that this is not an actual conflict in the grammar, but rather a step in the lane table algorithm, which initially treats the grammar as LR(0), and then resolves conflicts as needed in order to handle full LR(1) in a space efficient way). So I imagine this code path just doesn't have much test coverage. It looks to me as though the problem is that the conflicting state isn't being added to the lane table, leading to the panic reported here, since we attempt to access the state when resolving. I tried simply adding it to the lane table, but that seems to cause a lot of reduce/reduce conflicts in valid grammars - I'm not sure why. That's all pretty tentative. My next step will be to try to come up with a much more minimal reproducer that will be easier to reason about. We'll need that anyways to add test coverage for this case. Hopefully with a smaller reproducer it will be easier to figure out exactly what's going on here. My time over the next few days to work on this will likely be spotty, but I wanted to update that I'm looking at it and slowly narrowing in towards a root cause. The minimal example came together quicker than I expected. I can reproduce with this grammar: ``` grammar; pub G2 = { "a" X "d", "a" Y "c", "b" X "c", "b" Y "d", }; X = { "e" }; Y = { "e" }; ``` This is based on the G1 grammar described [here](https://smallcultfollowing.com/babysteps/blog/2017/03/17/the-lane-table-algorithm/), which we test. The difference is that while the actual conflict in G1 is reduce/reduce, it happens that the conflicting state also has a shift action. This grammar, which I'm calling G2 has a reduce/reduce conflict that can be resolved via the lane table, but no additional shift action. The code adds the original state to the lane table on a shift action, but not a reduce action, so the presence of a consistent shift action ensures that the missing state ends up in the lane table. I'm pretty sure we need to get that state into the table in this case as well, but my naive attempts to do that have broken other things, so I need to investigate exactly why. Okay, I think I have it. Some tweaks to exactly how I added the state to the lane table, and things are mostly working. The G2 grammar I posted above builds successfully, and the cubimpl grammar shared in the report no longer panics, but instead produces conflict messages. That grammar is pretty involved, so I have no idea if it should be valid or not, but we're not panicing anymore, so I suspect the conflicts are legitimate, particular since the more understandable G2 works. I need to do some cleanup and put G2 into a unit test, and then I'll submit a PR.
2024-05-17T22:00:48
0.20
1eb87a040d564746191af9c021c5a01470ec3625
[ "lr1::lane_table::test::example_g2_build", "lr1::lane_table::test::large_conflict_1" ]
[ "collections::multimap::push", "collections::multimap::push_nil", "lexer::dfa::overlap::null", "lexer::dfa::overlap::alphabet", "lexer::dfa::overlap::stagger", "lexer::dfa::overlap::empty_range", "lexer::dfa::overlap::repeat", "lexer::dfa::test::alternatives", "lexer::dfa::test::alternatives_contrac...
[]
[]
amethyst/legion
96
amethyst__legion-96
[ "92" ]
d0be105a76e42d3027724eb017d08245ec7c4b0d
diff --git a/legion_core/src/entity.rs b/legion_core/src/entity.rs --- a/legion_core/src/entity.rs +++ b/legion_core/src/entity.rs @@ -301,7 +301,19 @@ impl EntityAllocator { impl Drop for EntityAllocator { fn drop(&mut self) { for block in self.blocks.write().blocks.drain(..) { - if let Some(block) = block { + if let Some(mut block) = block { + // If any entity in the block is in an allocated state, clear + // and repopulate the free list. This forces all entities into an + // unallocated state. Bump versions of all entity indexes to + // ensure that we don't reuse the same entity. + if block.free.len() < block.versions.len() { + block.free.clear(); + for (i, version) in block.versions.iter_mut().enumerate() { + *version += Wrapping(1); + block.free.push(i as u32 + block.start); + } + } + self.allocator.lock().free(block); } }
diff --git a/tests/world_api.rs b/tests/world_api.rs --- a/tests/world_api.rs +++ b/tests/world_api.rs @@ -422,3 +422,24 @@ fn mutate_change_tag() { assert_eq!(2, query_model_5.iter(&world).count()); } + +// This test repeatedly creates a world with new entities and drops it, reproducing +// https://github.com/TomGillen/legion/issues/92 +#[test] +fn lots_of_deletes() { + let _ = tracing_subscriber::fmt::try_init(); + + let universe = Universe::new(); + + for _ in 0..10000 { + let shared = (Model(5),); + let components = vec![ + (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)), + (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)), + (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)), + ]; + + let mut world = universe.create_world(); + world.insert(shared, components).to_vec(); + } +}
Panic occurs when repeatedly deleting worlds with entities still allocated in them The below test case reproduces a bug I mentioned a couple days ago in discord. EntityAllocator::create_entity has a bit of code that may call BlockAllocator::allocate() and then immediately call EntityBlock::allocate() on the block that it returns. This call panics when it's unwrapped. The root cause appears to be that worlds with allocated entities do not return the entities to the entity allocator block when the world is dropped. I think this could be resolved by having any dropped world return all Entities back to the entity allocator. It also seems like any entity block that is getting returned to the pool shouldn't have any entities allocated from it. It may be worth adding a debug_assert!() for this. This test case fails 100%. It passes if world.delete(entity) is called for all entities. ```rust #[test] fn lots_of_deletes() { let _ = tracing_subscriber::fmt::try_init(); let universe = Universe::new(); for _ in 0..10000 { let shared = (Model(5),); let components = vec![ (Pos(1., 2., 3.), Rot(0.1, 0.2, 0.3)), (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)), (Pos(4., 5., 6.), Rot(0.4, 0.5, 0.6)), ]; let mut world = universe.create_world(); let entities = world.insert(shared, components).to_vec(); } } ```
I looked into making world implement Drop and free all entities, but iterating through all the archetypes to find the entities and free them in sequence is a bit complex and slow. I found another option that is quick and simple, if a little bit more error prone since it depends on implementation details. ```rust impl Drop for EntityAllocator { fn drop(&mut self) { for mut block in self.blocks.drain(..) { // If any entity in the block is in an allocated state, clear and repopulate the free // list, forcing all entities into an unallocated state, but without loosing version // info if block.free.len() < block.versions.len() { block.free.clear(); for i in 0..block.versions.len() { block.free.push(i as u32 + block.start); } } self.allocator.lock().free(block); } } } ``` Happy to PR this if you think this is a good way to go. Thanks! Versions need to be incremented when an entity ID is free'd, else that entity will be handed out again. If you think this is a reasonable solution other than the issue you mentioned, maybe we could bump all the versions by one? Or would you prefer a different approach? Bumping all the versions up one should work.
2020-02-07T14:52:06
0.2
d0be105a76e42d3027724eb017d08245ec7c4b0d
[ "lots_of_deletes" ]
[ "slice_vec_iterator_split", "slice_vec_iterate", "query_iter_chunks_tag", "query_cached_read_entity_data", "query_try_read_entity_data", "query_try_write_entity_data", "query_on_changed_no_changes", "query_read_entity_data", "query_read_entity_data_par", "query_mixed_entity_data_tuple", "query_w...
[]
[]
libreddit/libreddit
732
libreddit__libreddit-732
[ "689" ]
dfa57c890df0ef0b620294e2126714887c2605fa
diff --git a/README.md b/README.md --- a/README.md +++ b/README.md @@ -231,6 +231,8 @@ Assign a default value for each user-modifiable setting by passing environment v | `USE_HLS` | `["on", "off"]` | `off` | | `HIDE_HLS_NOTIFICATION` | `["on", "off"]` | `off` | | `AUTOPLAY_VIDEOS` | `["on", "off"]` | `off` | +| `HIDE_AWARDS` | `["on", "off"]` | `off` | +| `SUBSCRIPTIONS` | `+`-delimited list of subreddits (`sub1+sub2+sub3+...`) | _(none)_ | | `HIDE_AWARDS` | `["on", "off"]` | `off` | `DISABLE_VISIT_REDDIT_CONFIRMATION` | `["on", "off"]` | `off` | diff --git a/app.json b/app.json --- a/app.json +++ b/app.json @@ -50,6 +50,9 @@ "LIBREDDIT_BANNER": { "required": false }, + "LIBREDDIT_DEFAULT_SUBSCRIPTIONS": { + "required": false + }, "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION": { "required": false } diff --git a/src/client.rs b/src/client.rs --- a/src/client.rs +++ b/src/client.rs @@ -1,20 +1,20 @@ use cached::proc_macro::cached; use futures_lite::{future::Boxed, FutureExt}; -use hyper::{body, body::Buf, client, header, Body, Method, Request, Response, Uri, Client}; +use hyper::client::HttpConnector; +use hyper::{body, body::Buf, client, header, Body, Client, Method, Request, Response, Uri}; +use hyper_rustls::HttpsConnector; use libflate::gzip; +use once_cell::sync::Lazy; use percent_encoding::{percent_encode, CONTROLS}; use serde_json::Value; use std::{io, result::Result}; -use hyper::client::HttpConnector; -use hyper_rustls::HttpsConnector; -use once_cell::sync::Lazy; use crate::dbg_msg; use crate::server::RequestExt; const REDDIT_URL_BASE: &str = "https://www.reddit.com"; -static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(||{ +static CLIENT: Lazy<Client<HttpsConnector<HttpConnector>>> = Lazy::new(|| { let https = hyper_rustls::HttpsConnectorBuilder::new().with_native_roots().https_only().enable_http1().build(); client::Client::builder().build(https) }); diff --git a/src/client.rs b/src/client.rs --- a/src/client.rs +++ b/src/client.rs @@ -142,7 +142,14 @@ fn request(method: &'static Method, path: String, redirect: bool, quarantine: bo .header("Accept-Encoding", if method == Method::GET { "gzip" } else { "identity" }) .header("Accept-Language", "en-US,en;q=0.5") .header("Connection", "keep-alive") - .header("Cookie", if quarantine { "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D" } else { "" }) + .header( + "Cookie", + if quarantine { + "_options=%7B%22pref_quarantine_optin%22%3A%20true%2C%20%22pref_gated_sr_optin%22%3A%20true%7D" + } else { + "" + }, + ) .body(Body::empty()); async move { diff --git a/src/config.rs b/src/config.rs --- a/src/config.rs +++ b/src/config.rs @@ -52,6 +52,9 @@ pub struct Config { #[serde(rename = "LIBREDDIT_DEFAULT_HIDE_AWARDS")] pub(crate) default_hide_awards: Option<String>, + #[serde(rename = "LIBREDDIT_DEFAULT_SUBSCRIPTIONS")] + pub(crate) default_subscriptions: Option<String>, + #[serde(rename = "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION")] pub(crate) default_disable_visit_reddit_confirmation: Option<String>, diff --git a/src/config.rs b/src/config.rs --- a/src/config.rs +++ b/src/config.rs @@ -84,6 +87,7 @@ impl Config { default_use_hls: parse("LIBREDDIT_DEFAULT_USE_HLS"), default_hide_hls_notification: parse("LIBREDDIT_DEFAULT_HIDE_HLS"), default_hide_awards: parse("LIBREDDIT_DEFAULT_HIDE_AWARDS"), + default_subscriptions: parse("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"), default_disable_visit_reddit_confirmation: parse("LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION"), banner: parse("LIBREDDIT_BANNER"), } diff --git a/src/config.rs b/src/config.rs --- a/src/config.rs +++ b/src/config.rs @@ -104,6 +108,7 @@ fn get_setting_from_config(name: &str, config: &Config) -> Option<String> { "LIBREDDIT_DEFAULT_HIDE_HLS_NOTIFICATION" => config.default_hide_hls_notification.clone(), "LIBREDDIT_DEFAULT_WIDE" => config.default_wide.clone(), "LIBREDDIT_DEFAULT_HIDE_AWARDS" => config.default_hide_awards.clone(), + "LIBREDDIT_DEFAULT_SUBSCRIPTIONS" => config.default_subscriptions.clone(), "LIBREDDIT_DEFAULT_DISABLE_VISIT_REDDIT_CONFIRMATION" => config.default_disable_visit_reddit_confirmation.clone(), "LIBREDDIT_BANNER" => config.banner.clone(), _ => None, diff --git a/src/instance_info.rs b/src/instance_info.rs --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -139,6 +139,7 @@ impl InstanceInfo { ["Blur NSFW", &convert(&self.config.default_blur_nsfw)], ["Use HLS", &convert(&self.config.default_use_hls)], ["Hide HLS notification", &convert(&self.config.default_hide_hls_notification)], + ["Subscriptions", &convert(&self.config.default_subscriptions)], ]) .with_header_row(["Default preferences"]), ); diff --git a/src/instance_info.rs b/src/instance_info.rs --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -153,10 +154,10 @@ impl InstanceInfo { Deploy date: {}\n Deploy timestamp: {}\n Compile mode: {}\n + SFW only: {:?}\n Config:\n Banner: {:?}\n Hide awards: {:?}\n - SFW only: {:?}\n Default theme: {:?}\n Default front page: {:?}\n Default layout: {:?}\n diff --git a/src/instance_info.rs b/src/instance_info.rs --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -166,15 +167,16 @@ impl InstanceInfo { Default show NSFW: {:?}\n Default blur NSFW: {:?}\n Default use HLS: {:?}\n - Default hide HLS notification: {:?}\n", + Default hide HLS notification: {:?}\n + Default subscriptions: {:?}\n", self.crate_version, self.git_commit, self.deploy_date, self.deploy_unix_ts, self.compile_mode, + self.config.sfw_only, self.config.banner, self.config.default_hide_awards, - self.config.sfw_only, self.config.default_theme, self.config.default_front_page, self.config.default_layout, diff --git a/src/instance_info.rs b/src/instance_info.rs --- a/src/instance_info.rs +++ b/src/instance_info.rs @@ -184,7 +186,8 @@ impl InstanceInfo { self.config.default_show_nsfw, self.config.default_blur_nsfw, self.config.default_use_hls, - self.config.default_hide_hls_notification + self.config.default_hide_hls_notification, + self.config.default_subscriptions, ) } StringType::Html => self.to_table(), diff --git a/src/main.rs b/src/main.rs --- a/src/main.rs +++ b/src/main.rs @@ -161,7 +161,7 @@ async fn main() { let mut app = server::Server::new(); // Force evaluation of statics. In instance_info case, we need to evaluate - // the timestamp so deploy date is accurate - in config case, we need to + // the timestamp so deploy date is accurate - in config case, we need to // evaluate the configuration to avoid paying penalty at first request. Lazy::force(&config::CONFIG);
diff --git a/src/config.rs b/src/config.rs --- a/src/config.rs +++ b/src/config.rs @@ -147,3 +152,8 @@ fn test_alt_env_config_precedence() { write("libreddit.toml", config_to_write).unwrap(); assert_eq!(get_setting("LIBREDDIT_DEFAULT_COMMENT_SORT"), Some("top".into())) } +#[test] +#[sealed_test(env = [("LIBREDDIT_DEFAULT_SUBSCRIPTIONS", "news+bestof")])] +fn test_default_subscriptions() { + assert_eq!(get_setting("LIBREDDIT_DEFAULT_SUBSCRIPTIONS"), Some("news+bestof".into())); +}
LIBREDDIT_DEFAULT_SUBSCRIPTIONS setting is no longer read Since upgrading to v0.27.0, the $LIBREDDIT_DEFAULT_SUBSCRIPTIONS setting is not honoured as it was in previous versions. The result is that I see the a feed of random posts rather than the feeds that I am interested in. I think this was caused by the changes introduced by pull request #664. Thanks Keith
This configuration was undocumented so I didn't know to include it in my PR. I'm going to fix this right after #685 goes in to avoid a conflict. Yeah I realised that after posting so not surprising you didn't pick it up. Grateful for the fix in due course. Thanks Keith Thanks allot for your work. Sadly it seems as if it didn't make it into the 0.27.1 release. Would be great to see it fixed at some point. For the time being i'm forced to stick with 0.26 as i don't want to loose this setting. Apologies, I was hoping we could get #685 in before 0.27.1 - I'm going to add it as soon as it goes in so we can get it for the next release which should be soon as we've been working through the PR. It's a sizable change so we want to make sure it is done right. Appreciate the efforts. As a workaround, you can save a bookmark to https://your.libreddit.site/r/sub1+sub2+sub3/. Keith Great to see that blocking PR made it into the new release. Hi, apologies (again!) for the stagnation. I was planning on having this go right after 685 but we've cut a new release. I'm working on this right now!
2023-02-27T03:34:30
0.29
dfa57c890df0ef0b620294e2126714887c2605fa
[ "config::test_default_subscriptions" ]
[ "server::tests::test_determine_compressor", "utils::tests::format_num_works", "config::test_env_config_precedence", "config::test_alt_env_config_precedence", "config::test_config", "utils::tests::rewrite_urls_removes_backslashes", "config::test_env_var", "utils::tests::test_format_url", "server::tes...
[]
[]
libreddit/libreddit
461
libreddit__libreddit-461
[ "281" ]
ed11135af8e5a469e6e53ccded4350919ca065da
diff --git a/Cargo.lock b/Cargo.lock --- a/Cargo.lock +++ b/Cargo.lock @@ -540,7 +540,7 @@ checksum = "efaa7b300f3b5fe8eb6bf21ce3895e1751d9665086af2d64b42f19701015ff4f" [[package]] name = "libreddit" -version = "0.22.4" +version = "0.22.5" dependencies = [ "askama", "async-recursion", diff --git a/Cargo.toml b/Cargo.toml --- a/Cargo.toml +++ b/Cargo.toml @@ -3,7 +3,7 @@ name = "libreddit" description = " Alternative private front-end to Reddit" license = "AGPL-3.0" repository = "https://github.com/spikecodes/libreddit" -version = "0.22.4" +version = "0.22.5" authors = ["spikecodes <19519553+spikecodes@users.noreply.github.com>"] edition = "2021" diff --git a/src/post.rs b/src/post.rs --- a/src/post.rs +++ b/src/post.rs @@ -102,7 +102,7 @@ async fn parse_post(json: &serde_json::Value) -> Post { let body = if val(post, "removed_by_category") == "moderator" { format!("<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}\">view removed post</a></p></div>", permalink) } else { - rewrite_urls(&val(post, "selftext_html")).replace("\\", "") + rewrite_urls(&val(post, "selftext_html")) }; // Build a post using data parsed from Reddit post API diff --git a/src/post.rs b/src/post.rs --- a/src/post.rs +++ b/src/post.rs @@ -201,7 +201,7 @@ fn parse_comments(json: &serde_json::Value, post_link: &str, post_author: &str, let body = if val(&comment, "author") == "[deleted]" && val(&comment, "body") == "[removed]" { format!("<div class=\"md\"><p>[removed] — <a href=\"https://www.reveddit.com{}{}\">view removed comment</a></p></div>", post_link, id) } else { - rewrite_urls(&val(&comment, "body_html")).to_string() + rewrite_urls(&val(&comment, "body_html")) }; let author = Author { diff --git a/src/subreddit.rs b/src/subreddit.rs --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -336,10 +336,10 @@ pub async fn sidebar(req: Request<Body>) -> Result<Response<Body>, String> { match json(path, quarantined).await { // If success, receive JSON in response Ok(response) => template(WikiTemplate { - wiki: rewrite_urls(&val(&response, "description_html").replace("\\", "")), + wiki: rewrite_urls(&val(&response, "description_html")), // wiki: format!( // "{}<hr><h1>Moderators</h1><br><ul>{}</ul>", - // rewrite_urls(&val(&response, "description_html").replace("\\", "")), + // rewrite_urls(&val(&response, "description_html"), // moderators(&sub, quarantined).await.unwrap_or(vec!["Could not fetch moderators".to_string()]).join(""), // ), sub, diff --git a/src/subreddit.rs b/src/subreddit.rs --- a/src/subreddit.rs +++ b/src/subreddit.rs @@ -411,7 +411,7 @@ async fn subreddit(sub: &str, quarantined: bool) -> Result<Subreddit, String> { name: esc!(&res, "display_name"), title: esc!(&res, "title"), description: esc!(&res, "public_description"), - info: rewrite_urls(&val(&res, "description_html").replace("\\", "")), + info: rewrite_urls(&val(&res, "description_html")), // moderators: moderators_list(sub, quarantined).await.unwrap_or_default(), icon: format_url(&icon), members: format_num(members), diff --git a/src/utils.rs b/src/utils.rs --- a/src/utils.rs +++ b/src/utils.rs @@ -607,8 +607,12 @@ pub fn format_url(url: &str) -> String { // Rewrite Reddit links to Libreddit in body of text pub fn rewrite_urls(input_text: &str) -> String { + let text1 = - Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#).map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string()); + Regex::new(r#"href="(https|http|)://(www\.|old\.|np\.|amp\.|)(reddit\.com|redd\.it)/"#) + .map_or(String::new(), |re| re.replace_all(input_text, r#"href="/"#).to_string()) + // Remove (html-encoded) "\" from URLs. + .replace("%5C", "").replace(r"\", ""); // Rewrite external media previews to Libreddit Regex::new(r"https://external-preview\.redd\.it(.*)[^?]").map_or(String::new(), |re| {
diff --git a/src/utils.rs b/src/utils.rs --- a/src/utils.rs +++ b/src/utils.rs @@ -710,6 +714,7 @@ pub async fn error(req: Request<Body>, msg: String) -> Result<Response<Body>, St #[cfg(test)] mod tests { use super::format_num; + use super::rewrite_urls; #[test] fn format_num_works() { diff --git a/src/utils.rs b/src/utils.rs --- a/src/utils.rs +++ b/src/utils.rs @@ -719,4 +724,13 @@ mod tests { assert_eq!(format_num(1001), ("1.0k".to_string(), "1001".to_string())); assert_eq!(format_num(1_999_999), ("2.0m".to_string(), "1999999".to_string())); } + + #[test] + fn rewrite_urls_removes_backslashes() { + let comment_body_html = r#"<a href=\"https://www.reddit.com/r/linux%5C_gaming/comments/x/just%5C_a%5C_test%5C/\">https://www.reddit.com/r/linux\\_gaming/comments/x/just\\_a\\_test/</a>"#; + assert_eq!( + rewrite_urls(comment_body_html), + r#"<a href="https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/">https://www.reddit.com/r/linux_gaming/comments/x/just_a_test/</a>"# + ) + } }
'\_' in URLs needs to be changed to '_' ## Describe the bug Apparently new Reddit changes `_` to `\_` in URLs. An when new Reddit or the app display the URL they change it back again. However libreddit doesn't do that yet (and neither old.reddit). ## To reproduce See here: https://libredd.it/r/NatureIsFuckingLit/comments/pfywdx/isistius_brasiliensis_cookiecutter_shark_mouth/hb7tusx and here: https://old.reddit.com/r/NatureIsFuckingLit/comments/pfywdx/isistius_brasiliensis_cookiecutter_shark_mouth/hb7tusx/ But if opened in new Reddit the URL is correct: https://www.reddit.com/r/NatureIsFuckingLit/comments/pfywdx/isistius_brasiliensis_cookiecutter_shark_mouth/hb7tusx/ ## Expected behavior `\_` in Urls needs to be displayed as `_`
Sorry, I'm trying to reproduce the error but I'm not sure if I understand what the problem is. The Libreddit and old.reddit.com links work fine for me, is that intentional? And aside from the domain, all three links have the same URL paths. Sorry about the confusion, I was unclear in my description. I don't mean the Reddit links themselves, but the comment they link to. In libreddit and old.reddit the comment reads: `https://www.wikiwand.com/en/Cookiecutter\_shark` In new Reddit and the app the comment shows the working link: `https://www.wikiwand.com/en/Cookiecutter_shark` +1, just encountered this for the first time myself: https://libredd.it/r/linux_gaming/comments/qwsc86/game_feels_choppy/hl8ap3v/?context=3 Okay, I think I got it. While we are replacing backslashes all over the place, we never remove the HTML-encoded version of it, which also shows up sometimes ("%5C"). I have a working patch ready for PR soon.
2022-03-23T03:00:19
0.22
ed11135af8e5a469e6e53ccded4350919ca065da
[ "utils::tests::rewrite_urls_removes_backslashes" ]
[ "utils::tests::format_num_works" ]
[]
[]
parcel-bundler/lightningcss
885
parcel-bundler__lightningcss-885
[ "859", "884" ]
7f290350ed0cc53b1267d6810417f0611135eeee
diff --git a/napi/src/transformer.rs b/napi/src/transformer.rs --- a/napi/src/transformer.rs +++ b/napi/src/transformer.rs @@ -312,6 +312,7 @@ impl<'i> Visitor<'i, AtRule<'i>> for JsVisitor { CssRule::Nesting(..) => "nesting", CssRule::Viewport(..) => "viewport", CssRule::StartingStyle(..) => "starting-style", + CssRule::ViewTransition(..) => "view-transition", CssRule::Unknown(v) => { let name = v.name.as_ref(); if let Some(visit) = rule_map.custom(stage, "unknown", name) { diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -1,4 +1,4 @@ -/* tslint:disable */ +/* eslint-disable */ /** * This file was automatically generated by json-schema-to-typescript. * DO NOT MODIFY IT BY HAND. Instead, modify the source JSONSchema file, diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -89,6 +89,10 @@ export type Rule<D = Declaration, M = MediaQuery> = | { type: "starting-style"; value: StartingStyleRule<D, M>; } +| { + type: "view-transition"; + value: ViewTransitionRule; + } | { type: "ignored"; } diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -2342,6 +2346,12 @@ export type PropertyId = | { property: "view-transition-name"; } + | { + property: "view-transition-class"; + } + | { + property: "view-transition-group"; + } | { property: "color-scheme"; } diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -3819,7 +3829,15 @@ export type Declaration = } | { property: "view-transition-name"; - value: String; + value: ViewTransitionName; + } + | { + property: "view-transition-class"; + value: NoneOrCustomIdentList; + } + | { + property: "view-transition-group"; + value: ViewTransitionGroup; } | { property: "color-scheme"; diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -6406,6 +6424,21 @@ export type ContainerNameList = type: "names"; value: String[]; }; +/** + * A value for the [view-transition-name](https://drafts.csswg.org/css-view-transitions-1/#view-transition-name-prop) property. + */ +export type ViewTransitionName = + "none" | "auto" | String; +/** + * The `none` keyword, or a space-separated list of custom idents. + */ +export type NoneOrCustomIdentList = + "none" | String[]; +/** + * A value for the [view-transition-group](https://drafts.csswg.org/css-view-transitions-2/#view-transition-group-prop) property. + */ +export type ViewTransitionGroup = + "normal" | "contain" | "nearest" | String; /** * A [CSS-wide keyword](https://drafts.csswg.org/css-cascade-5/#defaulting-keywords). */ diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -6733,6 +6766,16 @@ export type PseudoClass = kind: "autofill"; vendorPrefix: VendorPrefix; } + | { + kind: "active-view-transition"; + } + | { + kind: "active-view-transition-type"; + /** + * A view transition type. + */ + type: String[]; + } | { kind: "local"; /** diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -6864,28 +6907,28 @@ export type PseudoElement = /** * A part name selector. */ - partName: ViewTransitionPartName; + part: ViewTransitionPartSelector; } | { kind: "view-transition-image-pair"; /** * A part name selector. */ - partName: ViewTransitionPartName; + part: ViewTransitionPartSelector; } | { kind: "view-transition-old"; /** * A part name selector. */ - partName: ViewTransitionPartName; + part: ViewTransitionPartSelector; } | { kind: "view-transition-new"; /** * A part name selector. */ - partName: ViewTransitionPartName; + part: ViewTransitionPartSelector; } | { kind: "custom"; diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -7413,6 +7456,28 @@ export type StyleQuery<D = Declaration> = | { operator: Operator; type: "operation"; }; +/** + * A property within a `@view-transition` rule. + * + * See [ViewTransitionRule](ViewTransitionRule). + */ +export type ViewTransitionProperty = + | { + property: "navigation"; + value: Navigation; + } + | { + property: "types"; + value: NoneOrCustomIdentList; + } + | { + property: "custom"; + value: CustomProperty; + }; +/** + * A value for the [navigation](https://drafts.csswg.org/css-view-transitions-2/#view-transition-navigation-descriptor) property in a `@view-transition` rule. + */ +export type Navigation = "none" | "auto"; export type DefaultAtRule = null; /** diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -9126,6 +9191,19 @@ export interface AttrOperation { operator: AttrSelectorOperator; value: string; } +/** + * A [view transition part selector](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#typedef-pt-name-selector). + */ +export interface ViewTransitionPartSelector { + /** + * A list of view transition classes. + */ + classes: String[]; + /** + * The view transition part name. + */ + name?: ViewTransitionPartName | null; +} /** * A [@keyframes](https://drafts.csswg.org/css-animations/#keyframes) rule. */ diff --git a/node/ast.d.ts b/node/ast.d.ts --- a/node/ast.d.ts +++ b/node/ast.d.ts @@ -9545,6 +9623,19 @@ export interface StartingStyleRule<D = Declaration, M = MediaQuery> { */ rules: Rule<D, M>[]; } +/** + * A [@view-transition](https://drafts.csswg.org/css-view-transitions-2/#view-transition-rule) rule. + */ +export interface ViewTransitionRule { + /** + * The location of the rule in the source file. + */ + loc: Location2; + /** + * Declarations in the `@view-transition` rule. + */ + properties: ViewTransitionProperty[]; +} /** * An unknown at-rule, stored as raw tokens. */ diff --git a/scripts/build-ast.js b/scripts/build-ast.js --- a/scripts/build-ast.js +++ b/scripts/build-ast.js @@ -75,6 +75,27 @@ compileFromFile('node/ast.json', { if (types[2].type === 'TSTypeLiteral' && types[2].members[0].key.name === 'timelinerange') { path.get('typeAnnotation.types.2').replaceWith(path.node.typeAnnotation.types[2].members[0].typeAnnotation.typeAnnotation); } + } else if ( + path.node.id.name === 'NoneOrCustomIdentList' && + path.node.typeAnnotation.type === 'TSUnionType' && + path.node.typeAnnotation.types[1].type === 'TSTypeLiteral' && + path.node.typeAnnotation.types[1].members[0].key.name === 'idents' + ) { + path.get('typeAnnotation.types.1').replaceWith(path.node.typeAnnotation.types[1].members[0].typeAnnotation.typeAnnotation); + } else if ( + path.node.id.name === 'ViewTransitionGroup' && + path.node.typeAnnotation.type === 'TSUnionType' && + path.node.typeAnnotation.types[3].type === 'TSTypeLiteral' && + path.node.typeAnnotation.types[3].members[0].key.name === 'custom' + ) { + path.get('typeAnnotation.types.3').replaceWith(path.node.typeAnnotation.types[3].members[0].typeAnnotation.typeAnnotation); + } else if ( + path.node.id.name === 'ViewTransitionName' && + path.node.typeAnnotation.type === 'TSUnionType' && + path.node.typeAnnotation.types[2].type === 'TSTypeLiteral' && + path.node.typeAnnotation.types[2].members[0].key.name === 'custom' + ) { + path.get('typeAnnotation.types.2').replaceWith(path.node.typeAnnotation.types[2].members[0].typeAnnotation.typeAnnotation); } } }); diff --git a/scripts/build-prefixes.js b/scripts/build-prefixes.js --- a/scripts/build-prefixes.js +++ b/scripts/build-prefixes.js @@ -331,6 +331,7 @@ let mdnFeatures = { lightDark: mdn.css.types.color['light-dark'].__compat.support, accentSystemColor: mdn.css.types.color['system-color'].accentcolor_accentcolortext.__compat.support, animationTimelineShorthand: mdn.css.properties.animation['animation-timeline_included'].__compat.support, + viewTransition: mdn.css.selectors['view-transition'].__compat.support, }; for (let key in mdn.css.types.length) { diff --git a/src/compat.rs b/src/compat.rs --- a/src/compat.rs +++ b/src/compat.rs @@ -212,6 +212,7 @@ pub enum Feature { VbUnit, VhUnit, ViUnit, + ViewTransition, ViewportPercentageUnitsDynamic, ViewportPercentageUnitsLarge, ViewportPercentageUnitsSmall, diff --git a/src/compat.rs b/src/compat.rs --- a/src/compat.rs +++ b/src/compat.rs @@ -3464,6 +3465,46 @@ impl Feature { return false; } } + Feature::ViewTransition => { + if let Some(version) = browsers.chrome { + if version < 7143424 { + return false; + } + } + if let Some(version) = browsers.edge { + if version < 7143424 { + return false; + } + } + if let Some(version) = browsers.opera { + if version < 4849664 { + return false; + } + } + if let Some(version) = browsers.safari { + if version < 1179648 { + return false; + } + } + if let Some(version) = browsers.ios_saf { + if version < 1179648 { + return false; + } + } + if let Some(version) = browsers.samsung { + if version < 1376256 { + return false; + } + } + if let Some(version) = browsers.android { + if version < 7143424 { + return false; + } + } + if browsers.firefox.is_some() || browsers.ie.is_some() { + return false; + } + } Feature::QUnit => { if let Some(version) = browsers.chrome { if version < 4128768 { diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -9,6 +9,7 @@ use crate::rules::layer::{LayerBlockRule, LayerStatementRule}; use crate::rules::property::PropertyRule; use crate::rules::scope::ScopeRule; use crate::rules::starting_style::StartingStyleRule; +use crate::rules::view_transition::ViewTransitionRule; use crate::rules::viewport::ViewportRule; use crate::rules::{ diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -214,6 +215,8 @@ pub enum AtRulePrelude<'i, T> { StartingStyle, /// A @scope rule prelude. Scope(Option<SelectorList<'i>>, Option<SelectorList<'i>>), + /// A @view-transition rule prelude. + ViewTransition, /// An unknown prelude. Unknown(CowArcStr<'i>, TokenList<'i>), /// A custom prelude. diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -249,7 +252,8 @@ impl<'i, T> AtRulePrelude<'i, T> { | Self::Import(..) | Self::CustomMedia(..) | Self::Viewport(..) - | Self::Charset => false, + | Self::Charset + | Self::ViewTransition => false, } } } diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -669,6 +673,9 @@ impl<'a, 'o, 'b, 'i, T: crate::traits::AtRuleParser<'i>> AtRuleParser<'i> for Ne AtRulePrelude::Scope(scope_start, scope_end) }, + "view-transition" => { + AtRulePrelude::ViewTransition + }, "nest" if self.is_in_style_rule => { self.options.warn(input.new_custom_error(ParserError::DeprecatedNestRule)); let selector_parser = SelectorParser { diff --git a/src/parser.rs b/src/parser.rs --- a/src/parser.rs +++ b/src/parser.rs @@ -833,6 +840,13 @@ impl<'a, 'o, 'b, 'i, T: crate::traits::AtRuleParser<'i>> AtRuleParser<'i> for Ne self.rules.0.push(CssRule::StartingStyle(StartingStyleRule { rules, loc })); Ok(()) } + AtRulePrelude::ViewTransition => { + self + .rules + .0 + .push(CssRule::ViewTransition(ViewTransitionRule::parse(input, loc)?)); + Ok(()) + } AtRulePrelude::Nest(selectors) => { let (declarations, rules) = self.parse_nested(input, true)?; self.rules.0.push(CssRule::Nesting(NestingRule { diff --git a/src/properties/mod.rs b/src/properties/mod.rs --- a/src/properties/mod.rs +++ b/src/properties/mod.rs @@ -133,7 +133,7 @@ use crate::traits::{Parse, ParseWithOptions, Shorthand, ToCss}; use crate::values::number::{CSSInteger, CSSNumber}; use crate::values::string::CowArcStr; use crate::values::{ - alpha::*, color::*, easing::EasingFunction, ident::CustomIdent, ident::DashedIdentReference, image::*, + alpha::*, color::*, easing::EasingFunction, ident::DashedIdentReference, ident::NoneOrCustomIdentList, image::*, length::*, position::*, rect::*, shape::FillRule, size::Size2D, time::Time, }; use crate::vendor_prefix::VendorPrefix; diff --git a/src/properties/mod.rs b/src/properties/mod.rs --- a/src/properties/mod.rs +++ b/src/properties/mod.rs @@ -1638,7 +1638,10 @@ define_properties! { "container": Container(Container<'i>) shorthand: true, // https://w3c.github.io/csswg-drafts/css-view-transitions-1/ - "view-transition-name": ViewTransitionName(CustomIdent<'i>), + "view-transition-name": ViewTransitionName(ViewTransitionName<'i>), + // https://drafts.csswg.org/css-view-transitions-2/ + "view-transition-class": ViewTransitionClass(NoneOrCustomIdentList<'i>), + "view-transition-group": ViewTransitionGroup(ViewTransitionGroup<'i>), // https://drafts.csswg.org/css-color-adjust/ "color-scheme": ColorScheme(ColorScheme), diff --git a/src/properties/transition.rs b/src/properties/transition.rs --- a/src/properties/transition.rs +++ b/src/properties/transition.rs @@ -10,6 +10,7 @@ use crate::prefixes::Feature; use crate::printer::Printer; use crate::properties::masking::get_webkit_mask_property; use crate::traits::{Parse, PropertyHandler, Shorthand, ToCss, Zero}; +use crate::values::ident::CustomIdent; use crate::values::{easing::EasingFunction, time::Time}; use crate::vendor_prefix::VendorPrefix; #[cfg(feature = "visitor")] diff --git a/src/properties/transition.rs b/src/properties/transition.rs --- a/src/properties/transition.rs +++ b/src/properties/transition.rs @@ -106,6 +107,50 @@ impl<'i> ToCss for Transition<'i> { } } +/// A value for the [view-transition-name](https://drafts.csswg.org/css-view-transitions-1/#view-transition-name-prop) property. +#[derive(Debug, Clone, PartialEq, Default, Parse, ToCss)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "kebab-case") +)] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +pub enum ViewTransitionName<'i> { + /// The element will not participate independently in a view transition. + #[default] + None, + /// The `auto` keyword. + Auto, + /// A custom name. + #[cfg_attr(feature = "serde", serde(borrow, untagged))] + Custom(CustomIdent<'i>), +} + +/// A value for the [view-transition-group](https://drafts.csswg.org/css-view-transitions-2/#view-transition-group-prop) property. +#[derive(Debug, Clone, PartialEq, Default, Parse, ToCss)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "kebab-case") +)] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +pub enum ViewTransitionGroup<'i> { + /// The `normal` keyword. + #[default] + Normal, + /// The `contain` keyword. + Contain, + /// The `nearest` keyword. + Nearest, + /// A custom group. + #[cfg_attr(feature = "serde", serde(borrow, untagged))] + Custom(CustomIdent<'i>), +} + #[derive(Default)] pub(crate) struct TransitionHandler<'i> { properties: Option<(SmallVec<[PropertyId<'i>; 1]>, VendorPrefix)>, diff --git a/src/rules/mod.rs b/src/rules/mod.rs --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -55,6 +55,7 @@ pub mod starting_style; pub mod style; pub mod supports; pub mod unknown; +pub mod view_transition; pub mod viewport; use self::font_palette_values::FontPaletteValuesRule; diff --git a/src/rules/mod.rs b/src/rules/mod.rs --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -97,6 +98,7 @@ use std::hash::{BuildHasherDefault, Hasher}; use style::StyleRule; use supports::SupportsRule; use unknown::UnknownAtRule; +use view_transition::ViewTransitionRule; use viewport::ViewportRule; #[derive(Clone)] diff --git a/src/rules/mod.rs b/src/rules/mod.rs --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -174,6 +176,8 @@ pub enum CssRule<'i, R = DefaultAtRule> { Scope(ScopeRule<'i, R>), /// A `@starting-style` rule. StartingStyle(StartingStyleRule<'i, R>), + /// A `@view-transition` rule. + ViewTransition(ViewTransitionRule<'i>), /// A placeholder for a rule that was removed. Ignored, /// An unknown at-rule. diff --git a/src/rules/mod.rs b/src/rules/mod.rs --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -318,6 +322,10 @@ impl<'i, 'de: 'i, R: serde::Deserialize<'de>> serde::Deserialize<'de> for CssRul let rule = StartingStyleRule::deserialize(deserializer)?; Ok(CssRule::StartingStyle(rule)) } + "view-transition" => { + let rule = ViewTransitionRule::deserialize(deserializer)?; + Ok(CssRule::ViewTransition(rule)) + } "ignored" => Ok(CssRule::Ignored), "unknown" => { let rule = UnknownAtRule::deserialize(deserializer)?; diff --git a/src/rules/mod.rs b/src/rules/mod.rs --- a/src/rules/mod.rs +++ b/src/rules/mod.rs @@ -358,6 +366,7 @@ impl<'a, 'i, T: ToCss> ToCss for CssRule<'i, T> { CssRule::StartingStyle(rule) => rule.to_css(dest), CssRule::Container(container) => container.to_css(dest), CssRule::Scope(scope) => scope.to_css(dest), + CssRule::ViewTransition(rule) => rule.to_css(dest), CssRule::Unknown(unknown) => unknown.to_css(dest), CssRule::Custom(rule) => rule.to_css(dest).map_err(|_| PrinterError { kind: PrinterErrorKind::FmtError, diff --git /dev/null b/src/rules/view_transition.rs new file mode 100644 --- /dev/null +++ b/src/rules/view_transition.rs @@ -0,0 +1,196 @@ +//! The `@view-transition` rule. + +use super::Location; +use crate::error::{ParserError, PrinterError}; +use crate::printer::Printer; +use crate::properties::custom::CustomProperty; +use crate::stylesheet::ParserOptions; +use crate::traits::{Parse, ToCss}; +use crate::values::ident::NoneOrCustomIdentList; +#[cfg(feature = "visitor")] +use crate::visitor::Visit; +use cssparser::*; + +/// A [@view-transition](https://drafts.csswg.org/css-view-transitions-2/#view-transition-rule) rule. +#[derive(Debug, PartialEq, Clone)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +pub struct ViewTransitionRule<'i> { + /// Declarations in the `@view-transition` rule. + #[cfg_attr(feature = "serde", serde(borrow))] + pub properties: Vec<ViewTransitionProperty<'i>>, + /// The location of the rule in the source file. + #[cfg_attr(feature = "visitor", skip_visit)] + pub loc: Location, +} + +/// A property within a `@view-transition` rule. +/// +/// See [ViewTransitionRule](ViewTransitionRule). +#[derive(Debug, Clone, PartialEq)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(tag = "property", content = "value", rename_all = "kebab-case") +)] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +pub enum ViewTransitionProperty<'i> { + /// The `navigation` property. + Navigation(Navigation), + /// The `types` property. + #[cfg_attr(feature = "serde", serde(borrow))] + Types(NoneOrCustomIdentList<'i>), + /// An unknown or unsupported property. + Custom(CustomProperty<'i>), +} + +/// A value for the [navigation](https://drafts.csswg.org/css-view-transitions-2/#view-transition-navigation-descriptor) +/// property in a `@view-transition` rule. +#[derive(Debug, Clone, PartialEq, Default, Parse, ToCss)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "kebab-case") +)] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +pub enum Navigation { + /// There will be no transition. + #[default] + None, + /// The transition will be enabled if the navigation is same-origin. + Auto, +} + +pub(crate) struct ViewTransitionDeclarationParser; + +impl<'i> cssparser::DeclarationParser<'i> for ViewTransitionDeclarationParser { + type Declaration = ViewTransitionProperty<'i>; + type Error = ParserError<'i>; + + fn parse_value<'t>( + &mut self, + name: CowRcStr<'i>, + input: &mut cssparser::Parser<'i, 't>, + ) -> Result<Self::Declaration, cssparser::ParseError<'i, Self::Error>> { + let state = input.state(); + match_ignore_ascii_case! { &name, + "navigation" => { + // https://drafts.csswg.org/css-view-transitions-2/#view-transition-navigation-descriptor + if let Ok(navigation) = Navigation::parse(input) { + return Ok(ViewTransitionProperty::Navigation(navigation)); + } + }, + "types" => { + // https://drafts.csswg.org/css-view-transitions-2/#types-cross-doc + if let Ok(types) = NoneOrCustomIdentList::parse(input) { + return Ok(ViewTransitionProperty::Types(types)); + } + }, + _ => return Err(input.new_custom_error(ParserError::InvalidDeclaration)) + } + + input.reset(&state); + return Ok(ViewTransitionProperty::Custom(CustomProperty::parse( + name.into(), + input, + &ParserOptions::default(), + )?)); + } +} + +/// Default methods reject all at rules. +impl<'i> AtRuleParser<'i> for ViewTransitionDeclarationParser { + type Prelude = (); + type AtRule = ViewTransitionProperty<'i>; + type Error = ParserError<'i>; +} + +impl<'i> QualifiedRuleParser<'i> for ViewTransitionDeclarationParser { + type Prelude = (); + type QualifiedRule = ViewTransitionProperty<'i>; + type Error = ParserError<'i>; +} + +impl<'i> RuleBodyItemParser<'i, ViewTransitionProperty<'i>, ParserError<'i>> for ViewTransitionDeclarationParser { + fn parse_qualified(&self) -> bool { + false + } + + fn parse_declarations(&self) -> bool { + true + } +} + +impl<'i> ViewTransitionRule<'i> { + pub(crate) fn parse<'t>( + input: &mut Parser<'i, 't>, + loc: Location, + ) -> Result<Self, ParseError<'i, ParserError<'i>>> { + let mut decl_parser = ViewTransitionDeclarationParser; + let mut parser = RuleBodyParser::new(input, &mut decl_parser); + let mut properties = vec![]; + while let Some(decl) = parser.next() { + if let Ok(decl) = decl { + properties.push(decl); + } + } + + Ok(ViewTransitionRule { properties, loc }) + } +} + +impl<'i> ToCss for ViewTransitionRule<'i> { + fn to_css<W>(&self, dest: &mut Printer<W>) -> Result<(), PrinterError> + where + W: std::fmt::Write, + { + #[cfg(feature = "sourcemap")] + dest.add_mapping(self.loc); + dest.write_str("@view-transition")?; + dest.whitespace()?; + dest.write_char('{')?; + dest.indent(); + let len = self.properties.len(); + for (i, prop) in self.properties.iter().enumerate() { + dest.newline()?; + prop.to_css(dest)?; + if i != len - 1 || !dest.minify { + dest.write_char(';')?; + } + } + dest.dedent(); + dest.newline()?; + dest.write_char('}') + } +} + +impl<'i> ToCss for ViewTransitionProperty<'i> { + fn to_css<W>(&self, dest: &mut Printer<W>) -> Result<(), PrinterError> + where + W: std::fmt::Write, + { + macro_rules! property { + ($prop: literal, $value: expr) => {{ + dest.write_str($prop)?; + dest.delim(':', false)?; + $value.to_css(dest) + }}; + } + + match self { + ViewTransitionProperty::Navigation(f) => property!("navigation", f), + ViewTransitionProperty::Types(t) => property!("types", t), + ViewTransitionProperty::Custom(custom) => { + dest.write_str(custom.name.as_ref())?; + dest.delim(':', false)?; + custom.value.to_css(dest, true) + } + } + } +} diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -1,7 +1,7 @@ //! CSS selectors. use crate::compat::Feature; -use crate::error::{ParserError, PrinterError}; +use crate::error::{ParserError, PrinterError, SelectorError}; use crate::parser::ParserFlags; use crate::printer::Printer; use crate::properties::custom::TokenList; diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -21,6 +21,7 @@ use parcel_selectors::{ attr::{AttrSelectorOperator, ParsedAttrSelectorOperation, ParsedCaseSensitivity}, parser::SelectorImpl, }; +use smallvec::SmallVec; use std::collections::HashSet; use std::fmt; diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -177,6 +178,9 @@ impl<'a, 'o, 'i> parcel_selectors::parser::Parser<'i> for SelectorParser<'a, 'o, "-webkit-autofill" => Autofill(VendorPrefix::WebKit), "-o-autofill" => Autofill(VendorPrefix::O), + // https://drafts.csswg.org/css-view-transitions-2/#pseudo-classes-for-selective-vt + "active-view-transition" => ActiveViewTransition, + // https://webkit.org/blog/363/styling-scrollbars/ "horizontal" => WebKitScrollbar(WebKitScrollbarPseudoClass::Horizontal), "vertical" => WebKitScrollbar(WebKitScrollbarPseudoClass::Vertical), diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -221,6 +225,11 @@ impl<'a, 'o, 'i> parcel_selectors::parser::Parser<'i> for SelectorParser<'a, 'o, Lang { languages } }, "dir" => Dir { direction: Direction::parse(parser)? }, + // https://drafts.csswg.org/css-view-transitions-2/#the-active-view-transition-type-pseudo + "active-view-transition-type" => { + let kind = Parse::parse(parser)?; + ActiveViewTransitionType { kind } + }, "local" if self.options.css_modules.is_some() => Local { selector: Box::new(Selector::parse(self, parser)?) }, "global" if self.options.css_modules.is_some() => Global { selector: Box::new(Selector::parse(self, parser)?) }, _ => { diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -303,10 +312,10 @@ impl<'a, 'o, 'i> parcel_selectors::parser::Parser<'i> for SelectorParser<'a, 'o, let pseudo_element = match_ignore_ascii_case! { &name, "cue" => CueFunction { selector: Box::new(Selector::parse(self, arguments)?) }, "cue-region" => CueRegionFunction { selector: Box::new(Selector::parse(self, arguments)?) }, - "view-transition-group" => ViewTransitionGroup { part_name: ViewTransitionPartName::parse(arguments)? }, - "view-transition-image-pair" => ViewTransitionImagePair { part_name: ViewTransitionPartName::parse(arguments)? }, - "view-transition-old" => ViewTransitionOld { part_name: ViewTransitionPartName::parse(arguments)? }, - "view-transition-new" => ViewTransitionNew { part_name: ViewTransitionPartName::parse(arguments)? }, + "view-transition-group" => ViewTransitionGroup { part: ViewTransitionPartSelector::parse(arguments)? }, + "view-transition-image-pair" => ViewTransitionImagePair { part: ViewTransitionPartSelector::parse(arguments)? }, + "view-transition-old" => ViewTransitionOld { part: ViewTransitionPartSelector::parse(arguments)? }, + "view-transition-new" => ViewTransitionNew { part: ViewTransitionPartSelector::parse(arguments)? }, _ => { if !name.starts_with('-') { self.options.warn(arguments.new_custom_error(SelectorParseErrorKind::UnsupportedPseudoElement(name.clone()))); diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -507,6 +516,15 @@ pub enum PseudoClass<'i> { #[cfg_attr(feature = "serde", serde(with = "PrefixWrapper"))] Autofill(VendorPrefix), + /// The [:active-view-transition](https://drafts.csswg.org/css-view-transitions-2/#the-active-view-transition-pseudo) pseudo class. + ActiveViewTransition, + /// The [:active-view-transition-type()](https://drafts.csswg.org/css-view-transitions-2/#the-active-view-transition-type-pseudo) pseudo class. + ActiveViewTransitionType { + /// A view transition type. + #[cfg_attr(feature = "serde", serde(rename = "type"))] + kind: SmallVec<[CustomIdent<'i>; 1]>, + }, + // CSS modules /// The CSS modules :local() pseudo class. Local { diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -763,6 +781,13 @@ where // https://html.spec.whatwg.org/multipage/semantics-other.html#selector-autofill Autofill(prefix) => write_prefixed!(prefix, "autofill"), + ActiveViewTransition => dest.write_str(":active-view-transition"), + ActiveViewTransitionType { kind } => { + dest.write_str(":active-view-transition-type(")?; + kind.to_css(dest)?; + dest.write_char(')') + } + Local { selector } => serialize_selector(selector, dest, context, false), Global { selector } => { let css_module = std::mem::take(&mut dest.css_module); diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -902,25 +927,25 @@ pub enum PseudoElement<'i> { #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] ViewTransitionGroup { /// A part name selector. - part_name: ViewTransitionPartName<'i>, + part: ViewTransitionPartSelector<'i>, }, /// The [::view-transition-image-pair()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-image-pair-pt-name-selector) functional pseudo element. #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] ViewTransitionImagePair { /// A part name selector. - part_name: ViewTransitionPartName<'i>, + part: ViewTransitionPartSelector<'i>, }, /// The [::view-transition-old()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-old-pt-name-selector) functional pseudo element. #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] ViewTransitionOld { /// A part name selector. - part_name: ViewTransitionPartName<'i>, + part: ViewTransitionPartSelector<'i>, }, /// The [::view-transition-new()](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#view-transition-new-pt-name-selector) functional pseudo element. #[cfg_attr(feature = "serde", serde(rename_all = "camelCase"))] ViewTransitionNew { /// A part name selector. - part_name: ViewTransitionPartName<'i>, + part: ViewTransitionPartSelector<'i>, }, /// An unknown pseudo element. Custom { diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -965,44 +990,17 @@ pub enum WebKitScrollbarPseudoElement { /// A [view transition part name](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#typedef-pt-name-selector). #[derive(PartialEq, Eq, Clone, Debug, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] #[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] pub enum ViewTransitionPartName<'i> { /// * + #[cfg_attr(feature = "serde", serde(rename = "*"))] All, /// <custom-ident> + #[cfg_attr(feature = "serde", serde(borrow, untagged))] Name(CustomIdent<'i>), } -#[cfg(feature = "serde")] -#[cfg_attr(docsrs, doc(cfg(feature = "serde")))] -impl<'i> serde::Serialize for ViewTransitionPartName<'i> { - fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> - where - S: serde::Serializer, - { - match self { - ViewTransitionPartName::All => serializer.serialize_str("*"), - ViewTransitionPartName::Name(name) => serializer.serialize_str(&name.0), - } - } -} - -#[cfg(feature = "serde")] -#[cfg_attr(docsrs, doc(cfg(feature = "serde")))] -impl<'i, 'de: 'i> serde::Deserialize<'de> for ViewTransitionPartName<'i> { - fn deserialize<D>(deserializer: D) -> Result<Self, D::Error> - where - D: serde::Deserializer<'de>, - { - let s = CowArcStr::deserialize(deserializer)?; - if s == "*" { - Ok(ViewTransitionPartName::All) - } else { - Ok(ViewTransitionPartName::Name(CustomIdent(s))) - } - } -} - #[cfg(feature = "jsonschema")] #[cfg_attr(docsrs, doc(cfg(feature = "jsonschema")))] impl<'a> schemars::JsonSchema for ViewTransitionPartName<'a> { diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -1041,6 +1039,55 @@ impl<'i> ToCss for ViewTransitionPartName<'i> { } } +/// A [view transition part selector](https://w3c.github.io/csswg-drafts/css-view-transitions-1/#typedef-pt-name-selector). +#[derive(PartialEq, Eq, Clone, Debug, Hash)] +#[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +pub struct ViewTransitionPartSelector<'i> { + /// The view transition part name. + #[cfg_attr(feature = "serde", serde(borrow))] + name: Option<ViewTransitionPartName<'i>>, + /// A list of view transition classes. + classes: Vec<CustomIdent<'i>>, +} + +impl<'i> Parse<'i> for ViewTransitionPartSelector<'i> { + fn parse<'t>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i, ParserError<'i>>> { + input.skip_whitespace(); + let name = input.try_parse(ViewTransitionPartName::parse).ok(); + let mut classes = Vec::new(); + while let Ok(token) = input.next_including_whitespace() { + if matches!(token, Token::Delim('.')) { + match input.next_including_whitespace() { + Ok(Token::Ident(id)) => classes.push(CustomIdent(id.into())), + _ => return Err(input.new_custom_error(ParserError::SelectorError(SelectorError::InvalidState))), + } + } else { + return Err(input.new_custom_error(ParserError::SelectorError(SelectorError::InvalidState))); + } + } + + Ok(ViewTransitionPartSelector { name, classes }) + } +} + +impl<'i> ToCss for ViewTransitionPartSelector<'i> { + fn to_css<W>(&self, dest: &mut Printer<W>) -> Result<(), PrinterError> + where + W: std::fmt::Write, + { + if let Some(name) = &self.name { + name.to_css(dest)?; + } + for class in &self.classes { + dest.write_char('.')?; + class.to_css(dest)?; + } + Ok(()) + } +} + impl<'i> cssparser::ToCss for PseudoElement<'i> { fn to_css<W>(&self, dest: &mut W) -> std::fmt::Result where diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -1138,24 +1185,24 @@ where }) } ViewTransition => dest.write_str("::view-transition"), - ViewTransitionGroup { part_name } => { + ViewTransitionGroup { part } => { dest.write_str("::view-transition-group(")?; - part_name.to_css(dest)?; + part.to_css(dest)?; dest.write_char(')') } - ViewTransitionImagePair { part_name } => { + ViewTransitionImagePair { part } => { dest.write_str("::view-transition-image-pair(")?; - part_name.to_css(dest)?; + part.to_css(dest)?; dest.write_char(')') } - ViewTransitionOld { part_name } => { + ViewTransitionOld { part } => { dest.write_str("::view-transition-old(")?; - part_name.to_css(dest)?; + part.to_css(dest)?; dest.write_char(')') } - ViewTransitionNew { part_name } => { + ViewTransitionNew { part } => { dest.write_str("::view-transition-new(")?; - part_name.to_css(dest)?; + part.to_css(dest)?; dest.write_char(')') } Custom { name: val } => { diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -1836,7 +1883,9 @@ pub(crate) fn is_compatible(selectors: &[Selector], targets: Targets) -> bool { | PseudoClass::Blank | PseudoClass::UserInvalid | PseudoClass::UserValid - | PseudoClass::Defined => return false, + | PseudoClass::Defined + | PseudoClass::ActiveViewTransition + | PseudoClass::ActiveViewTransitionType { .. } => return false, PseudoClass::Custom { .. } | _ => return false, } diff --git a/src/selector.rs b/src/selector.rs --- a/src/selector.rs +++ b/src/selector.rs @@ -1852,6 +1901,11 @@ pub(crate) fn is_compatible(selectors: &[Selector], targets: Targets) -> bool { PseudoElement::Backdrop(prefix) if *prefix == VendorPrefix::None => Feature::Dialog, PseudoElement::Cue => Feature::Cue, PseudoElement::CueFunction { selector: _ } => Feature::CueFunction, + PseudoElement::ViewTransition + | PseudoElement::ViewTransitionNew { .. } + | PseudoElement::ViewTransitionOld { .. } + | PseudoElement::ViewTransitionGroup { .. } + | PseudoElement::ViewTransitionImagePair { .. } => Feature::ViewTransition, PseudoElement::Custom { name: _ } | _ => return false, }, diff --git a/src/values/ident.rs b/src/values/ident.rs --- a/src/values/ident.rs +++ b/src/values/ident.rs @@ -75,6 +75,69 @@ impl<'i> CustomIdent<'i> { /// A list of CSS [`<custom-ident>`](https://www.w3.org/TR/css-values-4/#custom-idents) values. pub type CustomIdentList<'i> = SmallVec<[CustomIdent<'i>; 1]>; +/// The `none` keyword, or a space-separated list of custom idents. +#[derive(Debug, Clone, PartialEq, Default)] +#[cfg_attr(feature = "visitor", derive(Visit))] +#[cfg_attr(feature = "into_owned", derive(static_self::IntoOwned))] +#[cfg_attr( + feature = "serde", + derive(serde::Serialize, serde::Deserialize), + serde(rename_all = "kebab-case") +)] +#[cfg_attr(feature = "jsonschema", derive(schemars::JsonSchema))] +pub enum NoneOrCustomIdentList<'i> { + /// None. + #[default] + None, + /// A list of idents. + #[cfg_attr(feature = "serde", serde(borrow, untagged))] + Idents(SmallVec<[CustomIdent<'i>; 1]>), +} + +impl<'i> Parse<'i> for NoneOrCustomIdentList<'i> { + fn parse<'t>(input: &mut Parser<'i, 't>) -> Result<Self, ParseError<'i, ParserError<'i>>> { + let mut types = SmallVec::new(); + loop { + if let Ok(ident) = input.try_parse(CustomIdent::parse) { + if ident == "none" { + if types.is_empty() { + return Ok(NoneOrCustomIdentList::None); + } else { + return Err(input.new_custom_error(ParserError::InvalidValue)); + } + } + + types.push(ident); + } else { + return Ok(NoneOrCustomIdentList::Idents(types)); + } + } + } +} + +impl<'i> ToCss for NoneOrCustomIdentList<'i> { + fn to_css<W>(&self, dest: &mut Printer<W>) -> Result<(), PrinterError> + where + W: std::fmt::Write, + { + match self { + NoneOrCustomIdentList::None => dest.write_str("none"), + NoneOrCustomIdentList::Idents(types) => { + let mut first = true; + for ident in types { + if !first { + dest.write_char(' ')?; + } else { + first = false; + } + ident.to_css(dest)?; + } + Ok(()) + } + } + } +} + /// A CSS [`<dashed-ident>`](https://www.w3.org/TR/css-values-4/#dashed-idents) declaration. /// /// Dashed idents are used in cases where an identifier can be either author defined _or_ CSS-defined.
diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -182,6 +182,7 @@ mod tests { expected_exports: CssModuleExports, expected_references: CssModuleReferences, config: crate::css_modules::Config<'i>, + minify: bool, ) { let mut stylesheet = StyleSheet::parse( &source, diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -193,7 +194,12 @@ mod tests { ) .unwrap(); stylesheet.minify(MinifyOptions::default()).unwrap(); - let res = stylesheet.to_css(PrinterOptions::default()).unwrap(); + let res = stylesheet + .to_css(PrinterOptions { + minify, + ..Default::default() + }) + .unwrap(); assert_eq!(res.code, expected); assert_eq!(res.exports.unwrap(), expected_exports); assert_eq!(res.references.unwrap(), expected_references); diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -6856,6 +6862,19 @@ mod tests { ":root::view-transition {position: fixed}", ":root::view-transition{position:fixed}", ); + minify_test( + ":root:active-view-transition {position: fixed}", + ":root:active-view-transition{position:fixed}", + ); + minify_test( + ":root:active-view-transition-type(slide-in) {position: fixed}", + ":root:active-view-transition-type(slide-in){position:fixed}", + ); + minify_test( + ":root:active-view-transition-type(slide-in, reverse) {position: fixed}", + ":root:active-view-transition-type(slide-in,reverse){position:fixed}", + ); + for name in &[ "view-transition-group", "view-transition-image-pair", diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -6866,14 +6885,42 @@ mod tests { &format!(":root::{}(*) {{position: fixed}}", name), &format!(":root::{}(*){{position:fixed}}", name), ); + minify_test( + &format!(":root::{}(*.class) {{position: fixed}}", name), + &format!(":root::{}(*.class){{position:fixed}}", name), + ); + minify_test( + &format!(":root::{}(*.class.class) {{position: fixed}}", name), + &format!(":root::{}(*.class.class){{position:fixed}}", name), + ); minify_test( &format!(":root::{}(foo) {{position: fixed}}", name), &format!(":root::{}(foo){{position:fixed}}", name), ); + minify_test( + &format!(":root::{}(foo.class) {{position: fixed}}", name), + &format!(":root::{}(foo.class){{position:fixed}}", name), + ); + minify_test( + &format!(":root::{}(foo.bar.baz) {{position: fixed}}", name), + &format!(":root::{}(foo.bar.baz){{position:fixed}}", name), + ); minify_test( &format!(":root::{}(foo):only-child {{position: fixed}}", name), &format!(":root::{}(foo):only-child{{position:fixed}}", name), ); + minify_test( + &format!(":root::{}(foo.bar.baz):only-child {{position: fixed}}", name), + &format!(":root::{}(foo.bar.baz):only-child{{position:fixed}}", name), + ); + minify_test( + &format!(":root::{}(.foo) {{position: fixed}}", name), + &format!(":root::{}(.foo){{position:fixed}}", name), + ); + minify_test( + &format!(":root::{}(.foo.bar) {{position: fixed}}", name), + &format!(":root::{}(.foo.bar){{position:fixed}}", name), + ); error_test( &format!(":root::{}(foo):first-child {{position: fixed}}", name), ParserError::SelectorError(SelectorError::InvalidPseudoClassAfterPseudoElement), diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -6882,6 +6929,30 @@ mod tests { &format!(":root::{}(foo)::before {{position: fixed}}", name), ParserError::SelectorError(SelectorError::InvalidState), ); + error_test( + &format!(":root::{}(*.*) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); + error_test( + &format!(":root::{}(*. cls) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); + error_test( + &format!(":root::{}(foo .bar) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); + error_test( + &format!(":root::{}(*.cls. c) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); + error_test( + &format!(":root::{}(*.cls>cls) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); + error_test( + &format!(":root::{}(*.cls.foo.*) {{position: fixed}}", name), + ParserError::SelectorError(SelectorError::InvalidState), + ); } minify_test(".foo ::deep .bar {width: 20px}", ".foo ::deep .bar{width:20px}"); diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -23860,6 +23931,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -23906,6 +23978,7 @@ mod tests { // custom_idents: false, ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -23951,6 +24024,7 @@ mod tests { custom_idents: false, ..Default::default() }, + false, ); #[cfg(feature = "grid")] diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -23995,6 +24069,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); #[cfg(feature = "grid")] diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24033,6 +24108,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); #[cfg(feature = "grid")] diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24073,6 +24149,7 @@ mod tests { grid: false, ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24089,6 +24166,7 @@ mod tests { map! {}, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24123,6 +24201,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); // :global(:local(.hi)) { diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24155,6 +24234,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24184,6 +24264,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24221,6 +24302,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24240,6 +24322,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24259,6 +24342,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24278,6 +24362,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24297,6 +24382,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24327,6 +24413,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24348,6 +24435,7 @@ mod tests { pattern: crate::css_modules::Pattern::parse("test-[hash]-[local]").unwrap(), ..Default::default() }, + false, ); let stylesheet = StyleSheet::parse( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24409,6 +24497,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24478,6 +24567,7 @@ mod tests { dashed_idents: true, ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24497,6 +24587,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( r#" diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24514,6 +24605,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( r#" diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24531,6 +24623,7 @@ mod tests { }, HashMap::new(), Default::default(), + false, ); css_modules_test( r#" diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24551,6 +24644,7 @@ mod tests { animation: false, ..Default::default() }, + false, ); css_modules_test( r#" diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24569,6 +24663,7 @@ mod tests { }, HashMap::new(), crate::css_modules::Config { ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24591,6 +24686,7 @@ mod tests { pattern: crate::css_modules::Pattern::parse("[content-hash]-[local]").unwrap(), ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24616,6 +24712,7 @@ mod tests { }, HashMap::new(), crate::css_modules::Config { ..Default::default() }, + false, ); css_modules_test( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -24643,8 +24740,142 @@ mod tests { container: false, ..Default::default() }, + false, + ); + + css_modules_test( + ".foo { view-transition-name: bar }", + ".EgL3uq_foo{view-transition-name:EgL3uq_bar}", + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar" + }, + HashMap::new(), + Default::default(), + true, + ); + css_modules_test( + ".foo { view-transition-name: none }", + ".EgL3uq_foo{view-transition-name:none}", + map! { + "foo" => "EgL3uq_foo" + }, + HashMap::new(), + Default::default(), + true, + ); + css_modules_test( + ".foo { view-transition-name: auto }", + ".EgL3uq_foo{view-transition-name:auto}", + map! { + "foo" => "EgL3uq_foo" + }, + HashMap::new(), + Default::default(), + true, + ); + + css_modules_test( + ".foo { view-transition-class: bar baz qux }", + ".EgL3uq_foo{view-transition-class:EgL3uq_bar EgL3uq_baz EgL3uq_qux}", + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar", + "baz" => "EgL3uq_baz", + "qux" => "EgL3uq_qux" + }, + HashMap::new(), + Default::default(), + true, + ); + + css_modules_test( + ".foo { view-transition-group: contain }", + ".EgL3uq_foo{view-transition-group:contain}", + map! { + "foo" => "EgL3uq_foo" + }, + HashMap::new(), + Default::default(), + true, + ); + css_modules_test( + ".foo { view-transition-group: bar }", + ".EgL3uq_foo{view-transition-group:EgL3uq_bar}", + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar" + }, + HashMap::new(), + Default::default(), + true, + ); + + css_modules_test( + "@view-transition { types: foo bar baz }", + "@view-transition{types:EgL3uq_foo EgL3uq_bar EgL3uq_baz}", + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar", + "baz" => "EgL3uq_baz" + }, + HashMap::new(), + Default::default(), + true, + ); + + css_modules_test( + ":root:active-view-transition-type(foo, bar) { color: red }", + ":root:active-view-transition-type(EgL3uq_foo,EgL3uq_bar){color:red}", + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar" + }, + HashMap::new(), + Default::default(), + true, ); + for name in &[ + "view-transition-group", + "view-transition-image-pair", + "view-transition-new", + "view-transition-old", + ] { + css_modules_test( + &format!(":root::{}(foo) {{position: fixed}}", name), + &format!(":root::{}(EgL3uq_foo){{position:fixed}}", name), + map! { + "foo" => "EgL3uq_foo" + }, + HashMap::new(), + Default::default(), + true, + ); + css_modules_test( + &format!(":root::{}(.bar) {{position: fixed}}", name), + &format!(":root::{}(.EgL3uq_bar){{position:fixed}}", name), + map! { + "bar" => "EgL3uq_bar" + }, + HashMap::new(), + Default::default(), + true, + ); + css_modules_test( + &format!(":root::{}(foo.bar.baz) {{position: fixed}}", name), + &format!(":root::{}(EgL3uq_foo.EgL3uq_bar.EgL3uq_baz){{position:fixed}}", name), + map! { + "foo" => "EgL3uq_foo", + "bar" => "EgL3uq_bar", + "baz" => "EgL3uq_baz" + }, + HashMap::new(), + Default::default(), + true, + ); + } + // Stable hashes between project roots. fn test_project_root(project_root: &str, filename: &str, hash: &str) { let stylesheet = StyleSheet::parse( diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -28143,6 +28374,7 @@ mod tests { dashed_idents: true, ..Default::default() }, + false, ); } diff --git a/src/lib.rs b/src/lib.rs --- a/src/lib.rs +++ b/src/lib.rs @@ -28490,4 +28722,24 @@ mod tests { ".foo{--bar:currentcolor;--foo:1.1em;all:unset}", ); } + + #[test] + fn test_view_transition() { + minify_test( + "@view-transition { navigation: auto }", + "@view-transition{navigation:auto}", + ); + minify_test( + "@view-transition { navigation: auto; types: none; }", + "@view-transition{navigation:auto;types:none}", + ); + minify_test( + "@view-transition { navigation: auto; types: foo bar; }", + "@view-transition{navigation:auto;types:foo bar}", + ); + minify_test( + "@layer { @view-transition { navigation: auto; types: foo bar; } }", + "@layer{@view-transition{navigation:auto;types:foo bar}}", + ); + } }
Support `@view-transition` - MDN: https://developer.mozilla.org/en-US/docs/Web/CSS/@view-transition - Spec: https://drafts.csswg.org/css-view-transitions-2/#view-transition-rule View Transition Class Support in CSS Modules As part of View Transitions there's now a new type of "class" defined on the the element using view-transition-class either through CSS or using inline styles. https://developer.chrome.com/blog/view-transitions-update-io24#view-transition-class Ideally these would be supported to be namespaced by CSS modules when used inside a `.module.css` files and then exported by name to JS so that it can be referred to in JS like any other class. If I specify either `::view-transition-group(...)`, `::view-transition-image-pair(...)`, `::view-transition-old(...)` or `::view-transition-new(...)` with a class selector in the parenthesis, that should become a generated name and then exported to JS. ``` // my.module.css ::view-transition-group(.slideIn) { ... } ``` ``` import { slideIn } from './my.module.css'; <div style={{ viewTransitionClass: slideIn }} /> ``` Turns into: ``` ::view-transition-group(.xyz) { ... } ``` ``` const slideIn = 'xyz'; <div style={{ viewTransitionClass: slideIn }} /> ``` This becomes key to make reusable animations that can be applied to components without worrying about global namespaces and ensuring they're only included in the CSS if used. This works as expected with CSS Modules in the old Webpack loaders but errors with lightningcss (which affects both Parcel and Turbopack). Due to the parsing failing on the class names. ``` @parcel/transformer-css: Unexpected token Delim('.') 4 | > 5 | ::view-transition-new(.foo) { > | ^ 6 | animation-duration: 1s; 7 | } ``` Related to #859 but a different problem.
2025-01-01T11:25:06
1.0
7f290350ed0cc53b1267d6810417f0611135eeee
[ "tests::test_css_modules", "tests::test_view_transition", "tests::test_selectors" ]
[ "media_query::tests::test_negated_interval_parens", "media_query::tests::test_and", "tests::test_api", "tests::test_at_scope", "tests::test_all", "tests::test_border_spacing", "bundler::tests::test_source_map", "bundler::tests::test_license_comments", "tests::test_break", "tests::test_border_radiu...
[]
[]
lycheeverse/lychee
1,547
lycheeverse__lychee-1547
[ "1533" ]
e43086c2e912248c9e245d26c135e7ba941d4c47
diff --git /dev/null b/fixtures/configs/format.toml new file mode 100644 --- /dev/null +++ b/fixtures/configs/format.toml @@ -0,0 +1,1 @@ +format = "json" diff --git a/lychee-bin/src/options.rs b/lychee-bin/src/options.rs --- a/lychee-bin/src/options.rs +++ b/lychee-bin/src/options.rs @@ -46,9 +46,10 @@ const TIMEOUT_STR: &str = concatcp!(DEFAULT_TIMEOUT_SECS); const RETRY_WAIT_TIME_STR: &str = concatcp!(DEFAULT_RETRY_WAIT_TIME_SECS); /// The format to use for the final status report -#[derive(Debug, Deserialize, Default, Clone, Display, EnumIter, VariantNames)] +#[derive(Debug, Deserialize, Default, Clone, Display, EnumIter, VariantNames, PartialEq)] #[non_exhaustive] #[strum(serialize_all = "snake_case")] +#[serde(rename_all = "snake_case")] pub(crate) enum StatsFormat { #[default] Compact, diff --git a/lychee-bin/src/options.rs b/lychee-bin/src/options.rs --- a/lychee-bin/src/options.rs +++ b/lychee-bin/src/options.rs @@ -544,6 +545,7 @@ impl Config { exclude_link_local: false; exclude_loopback: false; exclude_mail: false; + format: StatsFormat::default(); remap: Vec::<String>::new(); fallback_extensions: Vec::<String>::new(); header: Vec::<String>::new();
diff --git a/lychee-bin/tests/cli.rs b/lychee-bin/tests/cli.rs --- a/lychee-bin/tests/cli.rs +++ b/lychee-bin/tests/cli.rs @@ -1723,4 +1723,26 @@ mod cli { Ok(()) } + + #[tokio::test] + async fn test_json_format_in_config() -> Result<()> { + let mock_server = mock_server!(StatusCode::OK); + let config = fixtures_path().join("configs").join("format.toml"); + let mut cmd = main_command(); + cmd.arg("--config") + .arg(config) + .arg("-") + .write_stdin(mock_server.uri()) + .env_clear() + .assert() + .success(); + + // Check that the output is in JSON format + let output = cmd.output().unwrap(); + let output = std::str::from_utf8(&output.stdout).unwrap(); + let json: serde_json::Value = serde_json::from_str(output)?; + assert_eq!(json["total"], 1); + + Ok(()) + } }
Setting the `format` flag within a custom `lychee.toml` config file does not work Hello, I'm just trying to see if I can define the format inside my custom `lychee.toml` config file, but I found a couple of issues: With the following config ```toml format = "json" ``` I get the following error message: ```bash [ERROR] Error while loading config: Cannot load configuration file `./lychee.toml`: Failed to parse configuration file Caused by: TOML parse error at line 38, column 10 | 38 | format = "json" | ^^^^^^ unknown variant `json`, expected one of `Compact`, `Detailed`, `Json`, `Markdown`, `Raw` ``` So then, I decided to change from `json` to `Json`, but it does not work and the output is shown in the default format `compact`.
2024-10-27T08:08:28
0.16
e43086c2e912248c9e245d26c135e7ba941d4c47
[ "cli::test_lycheecache_exclude_custom_status_codes", "cli::test_json_format_in_config" ]
[ "cli::test_dump_inputs_glob_all", "cli::test_dump_inputs_glob_md", "cli::test_dump_inputs_path", "cli::test_dump_inputs_stdin", "cli::test_basic_auth", "cli::test_dump_inputs_url", "cli::test_config_accept", "cli::test_dump_to_file", "cli::test_email_html_with_subject", "cli::test_email_markdown_w...
[ "cli::test_crates_io_quirk", "cli::test_check_github_no_token", "cli::test_caching_across_files", "cli::test_cookie_jar", "cli::test_email", "cli::test_exclude_email_by_default", "cli::test_exclude_multiple_urls", "cli::test_excluded_paths", "cli::test_formatted_file_output", "cli::test_include_hi...
[]
lycheeverse/lychee
1,546
lycheeverse__lychee-1546
[ "1538" ]
3094bbca335373156e5b73c89027a172e842a4e1
diff --git /dev/null b/fixtures/TEST_STYLESHEET_LINK.md new file mode 100644 --- /dev/null +++ b/fixtures/TEST_STYLESHEET_LINK.md @@ -0,0 +1,1 @@ +<link href="/@global/global.css" rel="stylesheet"> diff --git a/lychee-lib/src/extract/html/html5ever.rs b/lychee-lib/src/extract/html/html5ever.rs --- a/lychee-lib/src/extract/html/html5ever.rs +++ b/lychee-lib/src/extract/html/html5ever.rs @@ -92,7 +92,7 @@ impl TokenSink for LinkExtractor { return TokenSinkResult::Continue; } - for attr in attrs { + for attr in &attrs { let urls = LinkExtractor::extract_urls_from_elem_attr( &attr.name.local, &name, diff --git a/lychee-lib/src/extract/html/html5ever.rs b/lychee-lib/src/extract/html/html5ever.rs --- a/lychee-lib/src/extract/html/html5ever.rs +++ b/lychee-lib/src/extract/html/html5ever.rs @@ -104,8 +104,11 @@ impl TokenSink for LinkExtractor { Some(urls) => urls .into_iter() .filter(|url| { - // Only accept email addresses, which occur in `href` attributes - // and start with `mailto:`. Technically, email addresses could + // Only accept email addresses which + // - occur in `href` attributes + // - start with `mailto:` + // + // Technically, email addresses could // also occur in plain text, but we don't want to extract those // because of the high false positive rate. // diff --git a/lychee-lib/src/extract/html/html5ever.rs b/lychee-lib/src/extract/html/html5ever.rs --- a/lychee-lib/src/extract/html/html5ever.rs +++ b/lychee-lib/src/extract/html/html5ever.rs @@ -115,6 +118,18 @@ impl TokenSink for LinkExtractor { let is_phone = url.starts_with("tel:"); let is_href = attr.name.local.as_ref() == "href"; + if attrs.iter().any(|attr| { + &attr.name.local == "rel" && attr.value.contains("stylesheet") + }) { + // Skip virtual/framework-specific stylesheet paths that start with /@ or @ + // These are typically resolved by dev servers or build tools rather than being real URLs + // Examples: /@global/style.css, @tailwind/base.css as in + // `<link href="/@global/style.css" rel="stylesheet">` + if url.starts_with("/@") || url.starts_with('@') { + return false; + } + } + !is_email || (is_mailto && is_href) || (is_phone && is_href) }) .map(|url| RawUri { diff --git a/lychee-lib/src/extract/html/html5gum.rs b/lychee-lib/src/extract/html/html5gum.rs --- a/lychee-lib/src/extract/html/html5gum.rs +++ b/lychee-lib/src/extract/html/html5gum.rs @@ -183,6 +183,22 @@ impl LinkExtractor { return; } + // Skip virtual/framework-specific stylesheet paths that start with /@ or @ + // These are typically resolved by dev servers or build tools rather than being real URLs + // Examples: /@global/style.css, @tailwind/base.css + if self + .current_attributes + .get("rel") + .map_or(false, |rel| rel.contains("stylesheet")) + { + if let Some(href) = self.current_attributes.get("href") { + if href.starts_with("/@") || href.starts_with('@') { + self.current_attributes.clear(); + return; + } + } + } + let new_urls = self .extract_urls_from_elem_attr() .into_iter()
diff --git a/lychee-bin/tests/cli.rs b/lychee-bin/tests/cli.rs --- a/lychee-bin/tests/cli.rs +++ b/lychee-bin/tests/cli.rs @@ -231,6 +231,17 @@ mod cli { Ok(()) } + #[test] + fn test_stylesheet_misinterpreted_as_email() -> Result<()> { + test_json_output!( + "TEST_STYLESHEET_LINK.md", + MockResponseStats { + total: 0, + ..MockResponseStats::default() + } + ) + } + /// Test that a GitHub link can be checked without specifying the token. #[test] fn test_check_github_no_token() -> Result<()> { diff --git a/lychee-lib/src/extract/html/html5ever.rs b/lychee-lib/src/extract/html/html5ever.rs --- a/lychee-lib/src/extract/html/html5ever.rs +++ b/lychee-lib/src/extract/html/html5ever.rs @@ -466,4 +481,14 @@ mod tests { let uris = extract_html(input, false); assert!(uris.is_empty()); } + + #[test] + fn test_skip_emails_in_stylesheets() { + let input = r#" + <link href="/@global/global.css" rel="stylesheet"> + "#; + + let uris = extract_html(input, false); + assert!(uris.is_empty()); + } } diff --git a/lychee-lib/src/extract/html/html5gum.rs b/lychee-lib/src/extract/html/html5gum.rs --- a/lychee-lib/src/extract/html/html5gum.rs +++ b/lychee-lib/src/extract/html/html5gum.rs @@ -662,4 +678,14 @@ mod tests { let uris = extract_html(input, false); assert!(uris.is_empty()); } + + #[test] + fn test_skip_emails_in_stylesheets() { + let input = r#" + <link href="/@global/global.css" rel="stylesheet"> + "#; + + let uris = extract_html(input, false); + assert!(uris.is_empty()); + } }
Interpreting stylesheet link with `@` as mail > lychee 0.16.1 --- ```sh > lychee -v test.html ``` with `test.html`: ```html <link href="/@global/global.css" rel="stylesheet"> ``` results in: ```sh [EXCLUDED] mailto:/@global/global.css ``` Without the `@`, no check happens.
2024-10-27T07:18:28
0.16
e43086c2e912248c9e245d26c135e7ba941d4c47
[ "cli::test_stylesheet_misinterpreted_as_email" ]
[ "cli::test_dump_inputs_glob_all", "cli::test_dump_inputs_glob_md", "cli::test_dump_inputs_path", "cli::test_dump_inputs_stdin", "cli::test_dump_inputs_url", "cli::test_basic_auth", "cli::test_dump_to_file", "cli::test_config_smoketest", "cli::test_cache_config", "cli::test_config_accept", "cli::...
[ "cli::test_caching_across_files", "cli::test_check_github_no_token", "cli::test_crates_io_quirk", "cli::test_cookie_jar", "cli::test_email", "cli::test_exclude_email_by_default", "cli::test_excluded_paths", "cli::test_exclude_multiple_urls", "cli::test_formatted_file_output", "cli::test_include_hi...
[]
mitsuhiko/minijinja
624
mitsuhiko__minijinja-624
[ "623" ]
765039b495d59bceafa89aa5874c7f171512a086
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +15,8 @@ All notable changes to MiniJinja are documented here. - Reversing bytes and convergint them implicitly to strings will now work more consistently. #619 - Added type hints for the Python binding and relaxed maturin constraint. #590 +- `minijinja-cli` now allows the template name to be set to an empty + string when `--template` is used, to allow suppliying a data file. #624 ## 2.4.0 diff --git a/minijinja-cli/src/cli.rs b/minijinja-cli/src/cli.rs --- a/minijinja-cli/src/cli.rs +++ b/minijinja-cli/src/cli.rs @@ -409,12 +409,13 @@ pub fn execute() -> Result<i32, Error> { .map(|x| x.as_str()), ) { (None, Some(STDIN_STDOUT)) => (Cow::Borrowed(STDIN_STDOUT), None), + (None, Some("")) => bail!("Empty template names are only valid with --template."), (None, Some(rel_name)) => ( Cow::Owned(cwd.join(rel_name).to_string_lossy().to_string()), None, ), - (Some(source), Some(STDIN_STDOUT)) => (Cow::Borrowed("<string>"), Some(source.clone())), - _ => unreachable!(), + (Some(source), None | Some("")) => (Cow::Borrowed("<string>"), Some(source.clone())), + _ => bail!("When --template is used, a template cannot be passed as argument (only an empty argument is allowed)."), }; let mut output = Output::new(matches.get_one::<PathBuf>("output").unwrap())?; diff --git a/minijinja-cli/src/command.rs b/minijinja-cli/src/command.rs --- a/minijinja-cli/src/command.rs +++ b/minijinja-cli/src/command.rs @@ -3,6 +3,7 @@ /// application and by build.rs to generate shell completions. use std::path::PathBuf; +use clap::builder::ArgPredicate; use clap::{arg, command, value_parser, ArgAction, Command}; const ADVANCED: &str = "Advanced"; diff --git a/minijinja-cli/src/command.rs b/minijinja-cli/src/command.rs --- a/minijinja-cli/src/command.rs +++ b/minijinja-cli/src/command.rs @@ -324,9 +325,12 @@ pub(super) fn make_command() -> Command { This is the path to the input template in MiniJinja/Jinja2 syntax. \ If not provided this defaults to '-' which means the template is \ loaded from stdin. When the format is set to 'auto' which is the \ - default, the extension of the filename is used to detect the format.") + default, the extension of the filename is used to detect the format.\n\n\ + \ + This argument can be set to an empty string when --template is provided \ + to allow a data file to be supplied.") .default_value("-") - .conflicts_with("template"), + .default_value_if("template", ArgPredicate::IsPresent, None), arg!(data_file: [DATA_FILE] "Path to the data file") .long_help("\ Path to the data file in the given format.\n\n\
diff --git a/minijinja-cli/tests/test_basic.rs b/minijinja-cli/tests/test_basic.rs --- a/minijinja-cli/tests/test_basic.rs +++ b/minijinja-cli/tests/test_basic.rs @@ -656,6 +656,63 @@ fn test_template_string() { "###); } +#[test] +#[allow(clippy::suspicious_command_arg_space)] +fn test_empty_template_name_with_string_template() { + let input = file_with_contents_and_ext(r#"{"name": "Peter"}"#, ".json"); + assert_cmd_snapshot!( + cli() + .arg("-tHello {{ name }}") + .arg("") + .arg(input.path()) + .arg("--no-newline"), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + Hello Peter + ----- stderr ----- + "###); +} + +#[test] +#[allow(clippy::suspicious_command_arg_space)] +fn test_template_name_with_string_template_fails() { + let input = file_with_contents_and_ext(r#"{"name": "Peter"}"#, ".json"); + assert_cmd_snapshot!( + cli() + .arg("-tHello {{ name }}") + .arg("invalid.tmpl") + .arg(input.path()) + .arg("--no-newline"), + @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + error: When --template is used, a template cannot be passed as argument (only an empty argument is allowed). + "###); +} + +#[test] +fn test_empty_template_name_errors() { + let input = file_with_contents_and_ext(r#"{"name": "Peter"}"#, ".json"); + assert_cmd_snapshot!( + cli() + .arg("") + .arg(input.path()) + .arg("--no-newline"), + @r###" + success: false + exit_code: 1 + ----- stdout ----- + + ----- stderr ----- + error: Empty template names are only valid with --template. + "###); +} + #[test] fn test_print_config_fully_loaded() { assert_cmd_snapshot!(
feat(cli): ability to pass data on stdin while using template source passed by parameter 2.4's new `--template` flag is great, particularly for Nushellers for whom process substitution isn't available 😅 When the `--template <TEMPLATE_STRING>` is used, potentially the next file parameter could be assumed to be the `DATA_FILE`? ```bash $ minijinja-cli -t 'Hi {{ name }}' -D name=@mitsuhiko Hi @mitsuhiko # unable to pass the data as stdin (or other file)? $ jo name='\@mitsuhiko' | minijinja-cli -f json -t 'Hi {{ name }}' - error: the argument '--template <TEMPLATE_STRING>' cannot be used with '[TEMPLATE_FILE]' Usage: minijinja-cli --format <FORMAT> --template <TEMPLATE_STRING> [TEMPLATE_FILE] [DATA_FILE] For more information, try '--help'. ``` Thank you for everything 🙏
2024-10-31T18:52:57
2.4
84e0b845102356703f374f4be75a89567eee3b21
[ "test_empty_template_name_errors", "test_empty_template_name_with_string_template", "test_template_name_with_string_template_fails" ]
[ "test_json5", "test_cbor", "test_json", "test_explicit_format", "test_include", "test_line_statement", "test_context_stdin", "test_no_newline", "test_ini", "test_preserve_order_json", "test_print_config_fully_loaded", "test_load_config", "test_preserve_order_cbor", "test_preserve_order_tom...
[]
[ "test_help" ]
mitsuhiko/minijinja
611
mitsuhiko__minijinja-611
[ "610" ]
3e225c35d699a740e060f8b57876c02121d655f5
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,12 @@ All notable changes to MiniJinja are documented here. +## 2.4.1 + +- `minijinja-cli` now supports preservation of order in maps. #611 +- Fixed an issue where CBOR was not correctly deserialized in + `minijinja-cli`. #611 + ## 2.4.0 - Updated version of `minijinja-cli` with support for better documentation, diff --git a/minijinja-cli/Cargo.toml b/minijinja-cli/Cargo.toml --- a/minijinja-cli/Cargo.toml +++ b/minijinja-cli/Cargo.toml @@ -12,7 +12,7 @@ readme = "README.md" rust-version = "1.65" [features] -default = ["toml", "yaml", "querystring", "cbor", "datetime", "json5", "repl", "unicode", "contrib"] +default = ["toml", "yaml", "querystring", "cbor", "datetime", "json5", "repl", "unicode", "contrib", "preserve_order"] yaml = ["serde_yml"] querystring = ["serde_qs"] cbor = ["ciborium"] diff --git a/minijinja-cli/Cargo.toml b/minijinja-cli/Cargo.toml --- a/minijinja-cli/Cargo.toml +++ b/minijinja-cli/Cargo.toml @@ -24,6 +24,7 @@ unicode = ["minijinja/unicode"] ini = ["configparser"] contrib = ["minijinja-contrib"] toml = ["dep:toml", "home"] +preserve_order = ["minijinja/preserve_order"] [dependencies] anyhow = "1.0.74" diff --git a/minijinja-cli/README.md b/minijinja-cli/README.md --- a/minijinja-cli/README.md +++ b/minijinja-cli/README.md @@ -193,6 +193,7 @@ selected when the defaults are turned off: * `completions`: enables the generation of completions * `unicode`: enables the unicode identifier support * `contrib`: enables the `minijinja_contrib` based functionality including the `--py-compat` flag +* `preserve_order`: enables order preservation for maps Additionally if the `ASSET_OUT_DIR` environment variable is set during compilation manpage (and optionally completions) are generated into that diff --git a/minijinja-cli/src/cli.rs b/minijinja-cli/src/cli.rs --- a/minijinja-cli/src/cli.rs +++ b/minijinja-cli/src/cli.rs @@ -1,6 +1,6 @@ use std::borrow::Cow; use std::collections::BTreeMap; -use std::io::Write; +use std::io::{Read, Write}; use std::path::{Path, PathBuf}; use std::sync::Mutex; use std::{fs, io}; diff --git a/minijinja-cli/src/cli.rs b/minijinja-cli/src/cli.rs --- a/minijinja-cli/src/cli.rs +++ b/minijinja-cli/src/cli.rs @@ -67,13 +67,14 @@ fn load_data( selector: Option<&str>, ) -> Result<(BTreeMap<String, Value>, bool), Error> { let (contents, stdin_used) = if path == Path::new(STDIN_STDOUT) { - ( - io::read_to_string(io::stdin()).context("unable to read data from stdin")?, - true, - ) + let mut buf = Vec::<u8>::new(); + io::stdin() + .read_to_end(&mut buf) + .context("unable to read data from stdin")?; + (buf, true) } else { ( - fs::read_to_string(path) + fs::read(path) .with_context(|| format!("unable to read data file '{}'", path.display()))?, false, ) diff --git a/minijinja-cli/src/cli.rs b/minijinja-cli/src/cli.rs --- a/minijinja-cli/src/cli.rs +++ b/minijinja-cli/src/cli.rs @@ -89,24 +90,28 @@ fn load_data( }; let mut data: Value = match format { - "json" => preferred_json::from_str(&contents)?, + "json" => preferred_json::from_slice(&contents)?, #[cfg(feature = "querystring")] - "querystring" => Value::from(serde_qs::from_str::<BTreeMap<String, Value>>(&contents)?), + "querystring" => Value::from(serde_qs::from_bytes::<BTreeMap<String, Value>>(&contents)?), #[cfg(feature = "yaml")] "yaml" => { // for merge keys to work we need to manually call `apply_merge`. // For this reason we need to deserialize into a serde_yml::Value // before converting it into a final value. - let mut v: serde_yml::Value = serde_yml::from_str(&contents)?; + let mut v: serde_yml::Value = serde_yml::from_slice(&contents)?; v.apply_merge()?; Value::from_serialize(v) } #[cfg(feature = "toml")] - "toml" => toml::from_str(&contents)?, + "toml" => { + let contents = String::from_utf8(contents).context("invalid utf-8")?; + toml::from_str(&contents)? + } #[cfg(feature = "cbor")] - "cbor" => ciborium::from_reader(contents.as_bytes())?, + "cbor" => ciborium::from_reader(&contents[..])?, #[cfg(feature = "ini")] "ini" => { + let contents = String::from_utf8(contents).context("invalid utf-8")?; let mut config = configparser::ini::Ini::new(); config .read(contents)
diff --git a/minijinja-cli/tests/test_basic.rs b/minijinja-cli/tests/test_basic.rs --- a/minijinja-cli/tests/test_basic.rs +++ b/minijinja-cli/tests/test_basic.rs @@ -12,13 +12,13 @@ fn file_with_contents(contents: &str) -> NamedTempFile { file_with_contents_and_ext(contents, "") } -fn file_with_contents_and_ext(contents: &str, ext: &str) -> NamedTempFile { +fn file_with_contents_and_ext<X: AsRef<[u8]>>(contents: X, ext: &str) -> NamedTempFile { let mut f = tempfile::Builder::new() .prefix("minijinja-testfile--") .suffix(ext) .tempfile() .unwrap(); - f.write_all(contents.as_bytes()).unwrap(); + f.write_all(contents.as_ref()).unwrap(); f } diff --git a/minijinja-cli/tests/test_basic.rs b/minijinja-cli/tests/test_basic.rs --- a/minijinja-cli/tests/test_basic.rs +++ b/minijinja-cli/tests/test_basic.rs @@ -232,6 +232,29 @@ fn test_querystring() { "###); } +#[test] +#[cfg(feature = "cbor")] +fn test_cbor() { + let input = file_with_contents_and_ext( + [0xa1, 0x63, 0x66, 0x6f, 0x6f, 0x63, 0x62, 0x61, 0x72], + ".cbor", + ); + let tmpl = file_with_contents(r#"Hello {{ foo }}!"#); + + assert_cmd_snapshot!( + cli() + .arg(tmpl.path()) + .arg(input.path()), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + Hello bar! + + ----- stderr ----- + "###); +} + #[test] #[cfg(feature = "ini")] fn test_ini() { diff --git a/minijinja-cli/tests/test_basic.rs b/minijinja-cli/tests/test_basic.rs --- a/minijinja-cli/tests/test_basic.rs +++ b/minijinja-cli/tests/test_basic.rs @@ -270,6 +293,128 @@ fn test_ini() { "###); } +#[test] +#[cfg(feature = "preserve_order")] +fn test_preserve_order_json() { + let input = file_with_contents_and_ext(r#"{"x": {"c": 3, "a": 1, "b": 2}}"#, ".json"); + let tmpl = + file_with_contents("{% for key, value in x|items %}{{ key }}: {{ value }}\n{% endfor %}"); + + assert_cmd_snapshot!( + cli() + .arg(tmpl.path()) + .arg(input.path()), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + c: 3 + a: 1 + b: 2 + + + ----- stderr ----- + "###); +} + +#[test] +#[cfg(all(feature = "preserve_order", feature = "yaml"))] +fn test_preserve_order_yaml() { + let input = file_with_contents_and_ext( + r#" +x: + c: 3 + a: 1 + b: 2 +"#, + ".yaml", + ); + let tmpl = + file_with_contents("{% for key, value in x|items %}{{ key }}: {{ value }}\n{% endfor %}"); + + assert_cmd_snapshot!( + cli() + .arg(tmpl.path()) + .arg(input.path()), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + c: 3 + a: 1 + b: 2 + + + ----- stderr ----- + "###); +} + +#[test] +#[cfg(all(feature = "preserve_order", feature = "toml"))] +fn test_preserve_order_toml() { + let input = file_with_contents_and_ext( + r#" +[x] +c = 3 +a = 1 +b = 2 +"#, + ".toml", + ); + let tmpl = + file_with_contents("{% for key, value in x|items %}{{ key }}: {{ value }}\n{% endfor %}"); + + assert_cmd_snapshot!( + cli() + .arg(tmpl.path()) + .arg(input.path()), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + c: 3 + a: 1 + b: 2 + + + ----- stderr ----- + "###); +} + +#[test] +#[cfg(all(feature = "preserve_order", feature = "cbor"))] +fn test_preserve_order_cbor() { + let input = file_with_contents_and_ext( + [ + 0xa1, // map(1) + 0x61, 0x78, // "x" + 0xa3, // map(3) + 0x61, 0x63, 0x03, // "c": 3 + 0x61, 0x61, 0x01, // "a": 1 + 0x61, 0x62, 0x02, // "b": 2 + ], + ".cbor", + ); + let tmpl = + file_with_contents("{% for key, value in x|items %}{{ key }}: {{ value }}\n{% endfor %}"); + + assert_cmd_snapshot!( + cli() + .arg(tmpl.path()) + .arg(input.path()), + @r###" + success: true + exit_code: 0 + ----- stdout ----- + c: 3 + a: 1 + b: 2 + + + ----- stderr ----- + "###); +} + #[test] fn test_context_stdin() { let tmpl = file_with_contents(r#"Hello {{ foo }}!"#);
Keys gets reordered, unlike with the Python implementation Please consider the following input data: ```yaml --- fruits: cantaloupe: colour: orange banana: colour: yellow apple: colour: green ``` and this template: ```jinja2 {%- for fruit in fruits -%} {{ fruit }}: {{ fruits[fruit].colour }} {% endfor -%} ``` Running `minijinja-cli --py-compat --no-newline fruits.txt.j2 properties.yaml` on those result in the following output: ``` apple: green banana: yellow cantaloupe: orange ``` The `--py-compat` flag makes no difference. The need for `--no-newline` is surprising as the closing brace is already instructed to strip whitespace, but without unintended side effects. **Additional helpful information:** - Version of minijinja: 2.4.0 (85c9682) - Version of rustc: rustc 1.81.0 (eeb90cda1 2024-09-04) (built from a source tarball) - Operating system and version: OpenBSD 7.6 GENERIC.MP#338 amd64 **Expected result** Given the stated goal to _Stay as close as possible to Jinja2_ it is unexpected that the fruits gets reordered (i.e. sorted alphabetically?) when the Python implementation keeps their order as given in the yaml file. Compare with the results from e.g. running either `j2 fruits.txt.j2 properties.yaml` or `jinja2-3.9 fruits.txt.j2 properties.yaml`. Where j2 comes from j2cli ([kolypto/j2cli](https://github.com/kolypto/j2cli)) in Debian, version 0.3.12b-4 and jinja2-3.9 comes from py39-jinja2-cli-0.8.2 ([mattrobenholt/jinja2-cli](https://github.com/mattrobenolt/jinja2-cli)) in FreeBSD. ``` cantaloupe: orange banana: yellow apple: green ``` I have not looked at minijinja's source code. Neither have I investigated nor reflected on whether yaml is giving guarantees on retaining order, but in practise it has always done so for years on multiple machines. Given that the behaviour deviating, filing a bug seems appropriate.
2024-10-25T23:46:25
2.4
84e0b845102356703f374f4be75a89567eee3b21
[ "test_cbor" ]
[ "test_json5", "test_explicit_format", "test_include", "test_json", "test_load_config", "test_line_statement", "test_context_stdin", "test_no_include", "test_no_newline", "test_querystring", "test_ini", "test_print_config_fully_loaded", "test_help", "test_dump", "test_stdin_template", "...
[]
[]
mitsuhiko/minijinja
584
mitsuhiko__minijinja-584
[ "583" ]
4cf56e9c7083bbda17fe49e05677928bcfca92ca
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to MiniJinja are documented here. +## 2.3.1 + +- Fixes a regresion in `PartialEq` / `Eq` in `Value` caused by changes + in 2.3.0. #584 + ## 2.3.0 - Fixes some compiler warnings in Rust 1.81. #575 diff --git a/minijinja/src/value/merge_object.rs b/minijinja/src/value/merge_object.rs --- a/minijinja/src/value/merge_object.rs +++ b/minijinja/src/value/merge_object.rs @@ -1,7 +1,7 @@ use std::collections::BTreeSet; use std::sync::Arc; -use crate::value::{Enumerator, Object, ObjectExt, Value}; +use crate::value::{Enumerator, Object, Value}; /// Utility struct used by [`context!`](crate::context) to merge /// multiple values. diff --git a/minijinja/src/value/merge_object.rs b/minijinja/src/value/merge_object.rs --- a/minijinja/src/value/merge_object.rs +++ b/minijinja/src/value/merge_object.rs @@ -17,22 +17,14 @@ impl Object for MergeObject { } fn enumerate(self: &Arc<Self>) -> Enumerator { - self.mapped_enumerator(|this| { - let mut seen = BTreeSet::new(); - Box::new( - this.0 - .iter() - .flat_map(|v| v.try_iter().ok()) - .flatten() - .filter_map(move |v| { - if seen.contains(&v) { - None - } else { - seen.insert(v.clone()); - Some(v) - } - }), - ) - }) + // we collect here the whole internal object once on iteration so that + // we have an enumerator with a known length. + let items = self + .0 + .iter() + .flat_map(|v| v.try_iter().ok()) + .flatten() + .collect::<BTreeSet<_>>(); + Enumerator::Iter(Box::new(items.into_iter())) } } diff --git a/minijinja/src/value/mod.rs b/minijinja/src/value/mod.rs --- a/minijinja/src/value/mod.rs +++ b/minijinja/src/value/mod.rs @@ -493,12 +493,34 @@ impl PartialEq for Value { } match (a.repr(), b.repr()) { (ObjectRepr::Map, ObjectRepr::Map) => { - if a.enumerator_len() != b.enumerator_len() { + // only if we have known lengths can we compare the enumerators + // ahead of time. This function has a fallback for when a + // map has an unknown length. That's generally a bad idea, but + // it makes sense supporting regardless as silent failures are + // not a lot of fun. + let mut need_length_fallback = true; + if let (Some(a_len), Some(b_len)) = + (a.enumerator_len(), b.enumerator_len()) + { + if a_len != b_len { + return false; + } + need_length_fallback = false; + } + let mut a_count = 0; + if !a.try_iter_pairs().map_or(false, |mut ak| { + ak.all(|(k, v1)| { + a_count += 1; + b.get_value(&k).map_or(false, |v2| v1 == v2) + }) + }) { return false; } - a.try_iter_pairs().map_or(false, |mut ak| { - ak.all(|(k, v1)| b.get_value(&k).map_or(false, |v2| v1 == v2)) - }) + if !need_length_fallback { + true + } else { + a_count == b.try_iter().map_or(0, |x| x.count()) + } } ( ObjectRepr::Seq | ObjectRepr::Iterable,
diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -5,7 +5,7 @@ use insta::{assert_debug_snapshot, assert_snapshot}; use similar_asserts::assert_eq; use minijinja::value::{DynObject, Enumerator, Kwargs, Object, ObjectRepr, Rest, Value}; -use minijinja::{args, render, Environment, Error, ErrorKind}; +use minijinja::{args, context, render, Environment, Error, ErrorKind}; #[test] fn test_sort() { diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -1060,6 +1060,68 @@ fn test_float_eq() { assert_ne!(xa, xb); } +#[test] +fn test_eq_regression() { + // merged objects used to not have a length. let's make sure that they have + let vars = context! {}; + let new_vars = context! {..vars.clone()}; + assert_eq!(vars.len(), Some(0)); + assert_eq!(new_vars.len(), Some(0)); + assert_eq!(&vars, &new_vars); + + // we also want to make sure that objects with unknown lengths are properly checked. + #[derive(Debug)] + struct MadMap; + + impl Object for MadMap { + fn get_value(self: &Arc<Self>, key: &Value) -> Option<Value> { + match key.as_str()? { + "a" => Some(Value::from(1)), + "b" => Some(Value::from(2)), + _ => None, + } + } + + fn enumerate(self: &Arc<Self>) -> Enumerator { + let mut idx = 0; + Enumerator::Iter(Box::new(std::iter::from_fn(move || { + let new_idx = { + idx += 1; + idx + }; + match new_idx { + 1 => Some(Value::from("a")), + 2 => Some(Value::from("b")), + _ => None, + } + }))) + } + } + + let normal_map = context! { + a => 1, + b => 2 + }; + let mad_map = Value::from_object(MadMap); + assert_eq!(mad_map.len(), None); + assert_eq!(mad_map, normal_map); + assert_eq!(normal_map, mad_map); + assert_ne!( + mad_map, + context! { + a => 1, + b => 2, + c => 3, + } + ); + assert_ne!( + mad_map, + context! { + a => 1, + } + ); +} + #[test] fn test_sorting() { let mut values = vec![
Values comparison fails after 2.3.0 ### Discussed in https://github.com/mitsuhiko/minijinja/discussions/582 <div type='discussions-op-text'> <sup>Originally posted by **pando85** September 17, 2024</sup> Hello, I'm using minijinja since 2.0.0 and it is wonderful. But in the last update my program tests where broken because something like this: ``` #[test] fn test_minijinja() { let vars = context! {}; let new_vars = context! {..vars.clone()}; assert_eq!(vars, new_vars); } ``` Anyone could guide me about why in minijinja 2.2.0 this worked and now I get an error? Is this an intended behavior? Any workarounds? Thanks! </div>
2024-09-18T01:10:00
0.12
58722074ccd0d781bafcdbf21d38b59f07c2a5b8
[ "test_eq_regression" ]
[ "test_complex_key", "test_deserialize", "test_float_eq", "test_float_to_string", "test_call_kwargs", "test_kwargs_error", "test_filter_basics", "test_map_custom_iter", "test_obj_downcast", "test_downcast_arg", "test_object_btree_set", "test_object_hash_set", "test_object_btree_map", "test_...
[]
[]
mitsuhiko/minijinja
579
mitsuhiko__minijinja-579
[ "577" ]
22c2d4192416cac1826bb1cc5edbae99ffa7802f
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,12 +2,9 @@ All notable changes to MiniJinja are documented here. -## 2.3.1 - -- Fixes some compiler warnings in Rust 1.81. #575 - ## 2.3.0 +- Fixes some compiler warnings in Rust 1.81. #575 - Fixes incorrect ordering of maps when the keys of those maps were not in consistent order. #569 - Implemented the missing `groupby` filter. #570 diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -15,6 +12,8 @@ All notable changes to MiniJinja are documented here. Jinja2 and supports an optional flag to make it case sensitive. It also now lets one check individual attributes instead of values. #571 +- Changed sort order of `Ord` to avoid accidentally non total order + that could cause panics on Rust 1.81. #579 ## 2.2.0 diff --git a/minijinja/src/value/mod.rs b/minijinja/src/value/mod.rs --- a/minijinja/src/value/mod.rs +++ b/minijinja/src/value/mod.rs @@ -540,7 +540,11 @@ fn f64_total_cmp(left: f64, right: f64) -> Ordering { impl Ord for Value { fn cmp(&self, other: &Self) -> Ordering { - let value_ordering = match (&self.0, &other.0) { + let kind_ordering = self.kind().cmp(&other.kind()); + if matches!(kind_ordering, Ordering::Less | Ordering::Greater) { + return kind_ordering; + } + match (&self.0, &other.0) { (ValueRepr::None, ValueRepr::None) => Ordering::Equal, (ValueRepr::Undefined, ValueRepr::Undefined) => Ordering::Equal, (ValueRepr::String(ref a, _), ValueRepr::String(ref b, _)) => a.cmp(b), diff --git a/minijinja/src/value/mod.rs b/minijinja/src/value/mod.rs --- a/minijinja/src/value/mod.rs +++ b/minijinja/src/value/mod.rs @@ -550,34 +554,38 @@ impl Ord for Value { Some(ops::CoerceResult::F64(a, b)) => f64_total_cmp(a, b), Some(ops::CoerceResult::I128(a, b)) => a.cmp(&b), Some(ops::CoerceResult::Str(a, b)) => a.cmp(b), - None => match (self.kind(), other.kind()) { - (ValueKind::Seq, ValueKind::Seq) => match (self.try_iter(), other.try_iter()) { - (Ok(a), Ok(b)) => a.cmp(b), - _ => self.len().cmp(&other.len()), - }, - (ValueKind::Map, ValueKind::Map) => { - if let (Some(a), Some(b)) = (self.as_object(), other.as_object()) { - if a.is_same_object(b) { - Ordering::Equal - } else { - // This is not really correct. Because the keys can be in arbitrary - // order this could just sort really weirdly as a result. However - // we don't want to pay the cost of actually sorting the keys for - // ordering so we just accept this for now. - match (a.try_iter_pairs(), b.try_iter_pairs()) { - (Some(a), Some(b)) => a.cmp(b), - _ => self.len().cmp(&other.len()), + None => { + if let (Some(a), Some(b)) = (self.as_object(), other.as_object()) { + if a.is_same_object(b) { + Ordering::Equal + } else { + match (a.repr(), b.repr()) { + (ObjectRepr::Map, ObjectRepr::Map) => { + // This is not really correct. Because the keys can be in arbitrary + // order this could just sort really weirdly as a result. However + // we don't want to pay the cost of actually sorting the keys for + // ordering so we just accept this for now. + match (a.try_iter_pairs(), b.try_iter_pairs()) { + (Some(a), Some(b)) => a.cmp(b), + _ => unreachable!(), + } } + ( + ObjectRepr::Seq | ObjectRepr::Iterable, + ObjectRepr::Seq | ObjectRepr::Iterable, + ) => match (a.try_iter(), b.try_iter()) { + (Some(a), Some(b)) => a.cmp(b), + _ => unreachable!(), + }, + (_, _) => unreachable!(), } - } else { - unreachable!(); } + } else { + unreachable!() } - _ => Ordering::Equal, - }, + } }, - }; - value_ordering.then((self.kind() as usize).cmp(&(other.kind() as usize))) + } } }
diff --git a/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap b/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap --- a/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap +++ b/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap @@ -2,6 +2,7 @@ source: minijinja/tests/test_templates.rs description: "{{ \"World\"[0] == \"W\" }}\n{{ \"W\" == \"W\" }}\n{{ 1.0 == 1 }}\n{{ 1 != 2 }}\n{{ none == none }}\n{{ none != undefined }}\n{{ undefined == undefined }}\n{{ true == true }}\n{{ 1 == true }}\n{{ 0 == false }}\n{{ 1 != 0 }}\n{{ \"a\" < \"b\" }}\n{{ \"a\"[0] < \"b\" }}\n{{ false < true }}\n{{ 0 < true }}\n{{ [0, 0] == [0, 0] }}\n{{ [\"a\"] == [\"a\"[0]] }}" info: {} +input_file: minijinja/tests/inputs/coerce.txt --- true true diff --git a/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap b/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap --- a/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap +++ b/minijinja/tests/snapshots/test_templates__vm@coerce.txt.snap @@ -17,7 +18,6 @@ true true true true +false true true -true - diff --git a/minijinja/tests/snapshots/test_templates__vm@filters.txt.snap b/minijinja/tests/snapshots/test_templates__vm@filters.txt.snap --- a/minijinja/tests/snapshots/test_templates__vm@filters.txt.snap +++ b/minijinja/tests/snapshots/test_templates__vm@filters.txt.snap @@ -70,8 +70,8 @@ sort: [1, 2, 4, 9, 111] sort-reverse: [111, 9, 4, 2, 1] sort-case-insensitive: ["a", "B", "C", "z"] sort-case-sensitive: ["B", "C", "a", "z"] -sort-case-insensitive-mixed: [false, 0, true, 1, "false", "False", "true", "True"] -sort-case-sensitive-mixed: [false, 0, true, 1, "False", "True", "false", "true"] +sort-case-insensitive-mixed: [false, true, 0, 1, "false", "False", "true", "True"] +sort-case-sensitive-mixed: [false, true, 0, 1, "False", "True", "false", "true"] sort-attribute [{"name": "a"}, {"name": "b"}] d: true json: {"a":"b","c":"d"} diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -1,11 +1,11 @@ use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet, LinkedList, VecDeque}; use std::sync::Arc; -use insta::assert_snapshot; +use insta::{assert_debug_snapshot, assert_snapshot}; use similar_asserts::assert_eq; use minijinja::value::{DynObject, Enumerator, Kwargs, Object, ObjectRepr, Rest, Value}; -use minijinja::{args, render, Environment, Error}; +use minijinja::{args, render, Environment, Error, ErrorKind}; #[test] fn test_sort() { diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -69,13 +69,13 @@ fn test_sort_different_types() { [ undefined, none, + false, + true, -inf, -100, -75.0, -50.0, - false, 0, - true, 1, 30, 80, diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -1059,3 +1059,69 @@ fn test_float_eq() { let xb = Value::from(i64::MAX as f64); assert_ne!(xa, xb); } + +#[test] +fn test_sorting() { + let mut values = vec![ + Value::from(-f64::INFINITY), + Value::from(1.0), + Value::from(f64::NAN), + Value::from(f64::INFINITY), + Value::from(42.0), + Value::from(41), + Value::from(128), + Value::from(-2), + Value::from(-5.0), + Value::from(32i32), + Value::from(true), + Value::from(false), + Value::from(vec![1, 2, 3]), + Value::from(vec![1, 2, 3, 4]), + Value::from(vec![1]), + Value::from("whatever"), + Value::from("floats"), + Value::from("the"), + Value::from("boat"), + Value::UNDEFINED, + Value::from(()), + Value::from(Error::new(ErrorKind::InvalidOperation, "shit hit the fan")), + ]; + values.sort(); + assert_debug_snapshot!(&values, @r###" + [ + undefined, + none, + false, + true, + -inf, + -5.0, + -2, + 1.0, + 32, + 41, + 42.0, + 128, + inf, + NaN, + "boat", + "floats", + "the", + "whatever", + [ + 1, + ], + [ + 1, + 2, + 3, + ], + [ + 1, + 2, + 3, + 4, + ], + <invalid value: invalid operation: shit hit the fan>, + ] + "###); +}
Violation of `PartialEq`, `Eq`, `PartialOrd` and `Ord` contracts in `minijinja::Value` ## Description The `Ord` and `PartialEq` implementations of `minijinja::Value` work as follows: 1. Coerce the types of the two `Value`s to be equal - Equal types are left as-is - When both sides are integers, everything is converted to `i128` - When either side is a float, both sides are coerced to a float, - Otherwise try to coerce to `i128` on both sides 2. If this coercion was successful, do the comparison on the resulting values 2. Otherwise, try to compare the values as if they were objects This violates the transitivity property of the comparison traits, which, in addition to producing unexpected but niche behaviour, also can cause panics while sorting [due to the new sorting algorithms of Rust 1.81 panicking on invalid `Ord` implementations](https://blog.rust-lang.org/2024/09/05/Rust-1.81.0.html#new-sort-implementations). ## Reproduction steps I wrote a small program that demonstrates the issue ```rust use minijinja::Value; fn main() { let a = Value::from(i64::MAX as i128); let b = Value::from(i64::MAX as f64); let c = Value::from(i64::MAX as i128 + 1); assert_eq!(a, b, "Equal because they are the same value"); assert_eq!(b, c, "Equal because the float approximations are equal"); assert_ne!(a, c, "Not equal because they are different values"); } ``` Additional helpful information: - Version of minijinja: 2.2.0 - Version of rustc: 1.81.0 - Operating system and version: Ubuntu 22.04 Though none of the above should matter, I think this bug applies to all versions. ## What did you expect Equality and comparison operators should maintain their transitive properties. This is a hard problem with many corner cases. It would involve checking: - if the float is an integer, otherwise the comparison should be false - if the float is within the relevant integer range, otherwise the comparison should be false - and then the comparison should happen in integers, not floats This is much more complicated than the existing solution, so it's understandable you went this route, but it does have the issue described above. Also the above fix only works for `(Partial)Eq`, fixing `Ord` is much more complicated.
Unfortunately the strictness in the new sort behavior is very annoying to deal with for types like Jinja's `Value` type. I don't think the current implementation of `Ord` can cause a panic with the new sort algorithm but I'm not 100% sure. The code internally undergoes a coercion step that is lossy if one side is a float. That one is obviously incorrect but I'm not sure yet what the right fix is. Refs https://github.com/rust-lang/rust/issues/129561 That BTW is incorrect: `Value::from(i64::MAX as f64)`. The largest value that can be represented as a float is `2^53`. > That BTW is incorrect: `Value::from(i64::MAX as f64)`. The largest value that can be represented as a float is `2^53`. You are right, though my intent was not to showcase the first value where this goes wrong, just "a" value where this goes wrong. `i64::MAX` was a convenient constant for that.
2024-09-17T01:18:33
0.12
58722074ccd0d781bafcdbf21d38b59f07c2a5b8
[ "test_sorting", "test_sort_different_types" ]
[ "test_complex_key", "test_kwargs_error", "test_float_eq", "test_deserialize", "test_filter_basics", "test_float_to_string", "test_call_kwargs", "test_downcast_arg", "test_map_custom_iter", "test_obj_downcast", "test_object_btree_set", "test_object_hash_set", "test_object_btree_map", "test_...
[]
[]
mitsuhiko/minijinja
569
mitsuhiko__minijinja-569
[ "568" ]
42ea1a12ecc78d74d6005307542ba0482205e478
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to MiniJinja are documented here. +## 2.2.1 + +- Fixes incorrect ordering of maps when the keys of those maps + were not in consistent order. #569 + ## 2.2.0 - Fixes a bug where some enums did not deserialize correctly when diff --git a/minijinja/src/value/mod.rs b/minijinja/src/value/mod.rs --- a/minijinja/src/value/mod.rs +++ b/minijinja/src/value/mod.rs @@ -488,14 +488,29 @@ impl PartialEq for Value { Some(ops::CoerceResult::Str(a, b)) => a == b, None => { if let (Some(a), Some(b)) = (self.as_object(), other.as_object()) { - if a.repr() != b.repr() { - false - } else if let (Some(ak), Some(bk)) = - (a.try_iter_pairs(), b.try_iter_pairs()) - { - ak.eq(bk) - } else { - false + if a.is_same_object(b) { + return true; + } + match (a.repr(), b.repr()) { + (ObjectRepr::Map, ObjectRepr::Map) => { + if a.enumerator_len() != b.enumerator_len() { + return false; + } + a.try_iter_pairs().map_or(false, |mut ak| { + ak.all(|(k, v1)| b.get_value(&k).map_or(false, |v2| v1 == v2)) + }) + } + ( + ObjectRepr::Seq | ObjectRepr::Iterable, + ObjectRepr::Seq | ObjectRepr::Iterable, + ) => { + if let (Some(ak), Some(bk)) = (a.try_iter(), b.try_iter()) { + ak.eq(bk) + } else { + false + } + } + _ => false, } } else { false diff --git a/minijinja/src/value/mod.rs b/minijinja/src/value/mod.rs --- a/minijinja/src/value/mod.rs +++ b/minijinja/src/value/mod.rs @@ -540,13 +555,24 @@ impl Ord for Value { (Ok(a), Ok(b)) => a.cmp(b), _ => self.len().cmp(&other.len()), }, - (ValueKind::Map, ValueKind::Map) => match ( - self.as_object().and_then(|x| x.try_iter_pairs()), - other.as_object().and_then(|x| x.try_iter_pairs()), - ) { - (Some(a), Some(b)) => a.cmp(b), - _ => self.len().cmp(&other.len()), - }, + (ValueKind::Map, ValueKind::Map) => { + if let (Some(a), Some(b)) = (self.as_object(), other.as_object()) { + if a.is_same_object(b) { + Ordering::Equal + } else { + // This is not really correct. Because the keys can be in arbitrary + // order this could just sort really weirdly as a result. However + // we don't want to pay the cost of actually sorting the keys for + // ordering so we just accept this for now. + match (a.try_iter_pairs(), b.try_iter_pairs()) { + (Some(a), Some(b)) => a.cmp(b), + _ => self.len().cmp(&other.len()), + } + } + } else { + unreachable!(); + } + } _ => Ordering::Equal, }, }, diff --git a/minijinja/src/value/object.rs b/minijinja/src/value/object.rs --- a/minijinja/src/value/object.rs +++ b/minijinja/src/value/object.rs @@ -646,6 +646,11 @@ unsafe impl Sync for DynObject {} impl DynObject { impl_object_helpers!(pub &Self); + + /// Checks if this dyn object is the same as another. + pub(crate) fn is_same_object(&self, other: &DynObject) -> bool { + self.ptr == other.ptr && self.vtable == other.vtable + } } impl Hash for DynObject { diff --git a/minijinja/src/value/object.rs b/minijinja/src/value/object.rs --- a/minijinja/src/value/object.rs +++ b/minijinja/src/value/object.rs @@ -758,6 +763,10 @@ macro_rules! impl_str_map_helper { Box::new(this.keys().map(|k| intern(k.as_ref())).map(Value::from)) }) } + + fn enumerator_len(self: &Arc<Self>) -> Option<usize> { + Some(self.len()) + } } }; } diff --git a/minijinja/src/value/object.rs b/minijinja/src/value/object.rs --- a/minijinja/src/value/object.rs +++ b/minijinja/src/value/object.rs @@ -834,6 +843,10 @@ macro_rules! impl_value_map { fn enumerate(self: &Arc<Self>) -> Enumerator { self.$enumerator(|this| Box::new(this.keys().cloned())) } + + fn enumerator_len(self: &Arc<Self>) -> Option<usize> { + Some(self.len()) + } } impl<V> From<$map_type<Value, V>> for Value
diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -1016,3 +1016,36 @@ fn test_downcast_arg() { "A|B" ); } + +#[test] +fn test_map_eq() { + #[derive(Debug, Copy, Clone)] + struct Thing { + rev: bool, + } + + impl Object for Thing { + fn get_value(self: &Arc<Self>, key: &Value) -> Option<Value> { + match key.as_str()? { + "a" => Some(Value::from(1)), + "b" => Some(Value::from(2)), + _ => None, + } + } + + fn enumerate(self: &Arc<Self>) -> Enumerator { + if self.rev { + Enumerator::Str(&["b", "a"]) + } else { + Enumerator::Str(&["a", "b"]) + } + } + } + + let t1 = Value::from_object(Thing { rev: false }); + let t2 = Value::from_object(Thing { rev: true }); + + assert_snapshot!(t1.to_string(), @r###"{"a": 1, "b": 2}"###); + assert_snapshot!(t2.to_string(), @r###"{"b": 2, "a": 1}"###); + assert_eq!(t1, t2); +}
`Value::PartialEq` not comparing iterators correctly Thank you for implementing jinja template rendering in Rust! I noticed that the `PartialEq` trait for `Value` doesn't handle iterators correctly. I am specifically interested in comparing `Value`s of `HashMap`s. `PartialEq` iterates over two iterators item-by-item and compares whether two consumed items are equal. However, in the case of `HashMap` for example, the order is arbitrary. So, comparing two Values with HashMaps in it currently doesn't work. https://github.com/mitsuhiko/minijinja/blob/42ea1a12ecc78d74d6005307542ba0482205e478/minijinja/src/value/mod.rs#L493-L495 The following piece of code I would expect to succeed, but it is flaky, it fails sometimes but also succeeds sometimes: ```rust use std::collections::HashMap; use minijinja::Value; assert_eq!( Value::from( [("key1", "value1"), ("key2", "value2")] .iter() .cloned() .collect::<HashMap<&str, &str>>() ), Value::from( [("key1", "value1"), ("key2", "value2")] .iter() .cloned() .collect::<HashMap<&str, &str>>() ), ); ``` Is this intentional? If not, could we maybe, in the case of `HashMap`, use the [standard `PartialEq` implementation](https://doc.rust-lang.org/src/std/collections/hash/map.rs.html#1281-1287) of `HashMap`?
No, that's not intentional. The logic for all objects should be changed to have one object iterate and then perform lookups into the other. The same issue exists today for `Ord`.
2024-09-01T04:37:13
0.12
58722074ccd0d781bafcdbf21d38b59f07c2a5b8
[ "test_map_eq" ]
[ "test_complex_key", "test_float_to_string", "test_deserialize", "test_kwargs_error", "test_call_kwargs", "test_filter_basics", "test_map_custom_iter", "test_downcast_arg", "test_obj_downcast", "test_object_btree_set", "test_object_hash_set", "test_object_linked_list", "test_object_btree_map"...
[]
[]
mitsuhiko/minijinja
554
mitsuhiko__minijinja-554
[ "553" ]
c4423efae4a3b7e1199411e18a1c853aa0da616e
diff --git a/minijinja/src/value/deserialize.rs b/minijinja/src/value/deserialize.rs --- a/minijinja/src/value/deserialize.rs +++ b/minijinja/src/value/deserialize.rs @@ -246,10 +246,30 @@ impl<'de> de::Deserializer<'de> for ValueDeserializer { visitor.visit_enum(EnumDeserializer { variant, value }) } + #[inline] + fn deserialize_unit_struct<V>(self, _name: &'static str, visitor: V) -> Result<V::Value, Error> + where + V: Visitor<'de>, + { + self.deserialize_unit(visitor) + } + + #[inline] + fn deserialize_newtype_struct<V>( + self, + _name: &'static str, + visitor: V, + ) -> Result<V::Value, Error> + where + V: Visitor<'de>, + { + visitor.visit_newtype_struct(self) + } + forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit - seq bytes byte_buf map unit_struct - tuple_struct struct tuple ignored_any identifier newtype_struct + seq bytes byte_buf map + tuple_struct struct tuple ignored_any identifier } } diff --git a/minijinja/src/value/deserialize.rs b/minijinja/src/value/deserialize.rs --- a/minijinja/src/value/deserialize.rs +++ b/minijinja/src/value/deserialize.rs @@ -403,11 +423,31 @@ impl<'de, 'v> de::Deserializer<'de> for &'v Value { ValueDeserializer::new(self.clone()).deserialize_any(visitor) } + fn deserialize_option<V: de::Visitor<'de>>(self, visitor: V) -> Result<V::Value, Error> { + ValueDeserializer::new(self.clone()).deserialize_option(visitor) + } + + fn deserialize_enum<V: de::Visitor<'de>>( + self, + name: &'static str, + variants: &'static [&'static str], + visitor: V, + ) -> Result<V::Value, Error> { + ValueDeserializer::new(self.clone()).deserialize_enum(name, variants, visitor) + } + + fn deserialize_newtype_struct<V: de::Visitor<'de>>( + self, + name: &'static str, + visitor: V, + ) -> Result<V::Value, Error> { + ValueDeserializer::new(self.clone()).deserialize_newtype_struct(name, visitor) + } + forward_to_deserialize_any! { bool u8 u16 u32 u64 i8 i16 i32 i64 f32 f64 char str string unit seq bytes byte_buf map unit_struct tuple_struct struct tuple ignored_any identifier - option enum newtype_struct } }
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to MiniJinja are documented here. +## 2.1.3 + +- Fixes a bug where some enums did not deserialize correctly when + used with `ViaDeserialize`. #554 + ## 2.1.2 - Flush filter and test cache when processing extended template. diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -560,6 +560,7 @@ fn test_complex_key() { #[test] #[cfg(feature = "deserialization")] fn test_deserialize() { + use minijinja::value::{from_args, ViaDeserialize}; use serde::Deserialize; #[derive(Deserialize, Debug, PartialEq, Eq)] diff --git a/minijinja/tests/test_value.rs b/minijinja/tests/test_value.rs --- a/minijinja/tests/test_value.rs +++ b/minijinja/tests/test_value.rs @@ -572,6 +573,34 @@ fn test_deserialize() { let point = Point::deserialize(point_value).unwrap(); assert_eq!(point, Point { x: 42, y: -23 }); + + #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] + enum SimpleEnum { + B, + C, + D, + } + + #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] + enum TaggedUnion { + V(String), + } + + #[derive(Debug, serde::Serialize, serde::Deserialize, Eq, PartialEq)] + struct UnitStruct(String); + + let spe = Value::from_serialize(SimpleEnum::B); + let spu = Value::from_serialize(UnitStruct("hello".into())); + let spt = Value::from_serialize(TaggedUnion::V("workd".into())); + + let a: ( + ViaDeserialize<SimpleEnum>, + ViaDeserialize<UnitStruct>, + ViaDeserialize<TaggedUnion>, + ) = from_args(args!(spe, spu, spt)).unwrap(); + assert_eq!((a.0).0, SimpleEnum::B); + assert_eq!((a.1).0, UnitStruct("hello".into())); + assert_eq!((a.2).0, TaggedUnion::V("workd".into())); } #[test]
Serialized Value cannot Deserialize ## Description I'm using Value::from_serialize pass context to template. Then using `ViaDeserialize<Type>` deserialize the value. however, it failed. here are some log: <details> ```console called `Result::unwrap()` on an `Err` value: Error { kind: CannotDeserialize, detail: "invalid type: map, expected enum Type", name: "op.idl", line: 24 } ----------------------------------- op.idl ------------------------------------ 22 | {% for param in params %} 24 > {{ param.ty | to_ty }} {{ param.ident }}; i ^^^^^ cannot deserialize ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ Referenced variables: { base_name: <macro base_name>, in_params: 0, loop: <loop 0/1>, param: { "ident": "com", "ty": { "Path": [ "Command", ], }, } } ``` </details> here is my enum defination: ```rs enum Type { Array(TypeArray), Path(TypePath), Primative(PrimativeType), } ``` as far as I know, plain enum, unit struct cannot be deserialize. It seems enum would be serialized to &str? Additional helpful information: - Version of minijinja: v2.1.1 - Version of rustc: 1.80.0 - Operating system and version: macos 14.5 ## What did you expect Serialization and deserialization operations should be symmetric
I try 1.0.10, I report the same error. Can you provide a minimal reproduction case that shows the issue? I am not entirely sure what you are trying to attempt from the code provided. Here is a example to deserialzie Enum: <details> ```rust #[derive(Debug, serde::Serialize, serde::Deserialize)] enum SimpleEnum { B, C, D, } #[derive(Debug, serde::Serialize, serde::Deserialize)] enum TaggedUnion { V(String), } #[derive(Debug, serde::Serialize, serde::Deserialize)] struct UnitStruct(String); let mut env = Environment::new(); env.add_function("pe", |v: ViaDeserialize<SimpleEnum>| { println!("{:?}", v.0); }); env.add_function("pu", |v: ViaDeserialize<UnitStruct>| { println!("{:?}", v.0); }); env.add_function("pt", |v: ViaDeserialize<TaggedUnion>| { println!("{:?}", v.0); }); let mut ctx = HashMap::new(); ctx.insert("spe", Value::from_serialize(SimpleEnum::B)); ctx.insert("spu", Value::from_serialize(UnitStruct("hello".into()))); ctx.insert("spt", Value::from_serialize(TaggedUnion::V("workd".into()))); let _ = env .render_str( r#" {{ pe(spe) }} {# failed #} {{ pu(spu) }} {# failed #} {{ pt(spt) }} {# failed #} "#, ctx, ) .unwrap(); ``` </details> There are some interesting bugs clearly with the value deserializer. Basic testing shows that `SimpleEnum::deserialize(&Value::from("B"))` succeeds but `SimpleEnum::deserialize(Value::from("B"))` fails. I will get that fixed.
2024-08-09T17:46:10
0.12
58722074ccd0d781bafcdbf21d38b59f07c2a5b8
[ "test_deserialize" ]
[ "test_complex_key", "test_float_to_string", "test_call_kwargs", "test_filter_basics", "test_kwargs_error", "test_map_custom_iter", "test_downcast_arg", "test_obj_downcast", "test_object_hash_set", "test_object_btree_set", "test_object_btree_map", "test_object_hash_map", "test_object_vec", ...
[]
[]
mitsuhiko/minijinja
435
mitsuhiko__minijinja-435
[ "434" ]
f338684ca544f81ad6ee06f7ecb2bbc0c65677de
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,11 @@ All notable changes to MiniJinja are documented here. +## 1.0.14 + +- Fixed a bug with broken closure handling when working with nested + `{% macro %}` or `{% call %}` blocks. #435 + ## 1.0.13 - `minijinja-cli` now supports an `-o` or `--output` parameter to write diff --git a/minijinja/src/compiler/meta.rs b/minijinja/src/compiler/meta.rs --- a/minijinja/src/compiler/meta.rs +++ b/minijinja/src/compiler/meta.rs @@ -43,8 +43,7 @@ pub fn find_macro_closure<'a>(m: &ast::Macro<'a>) -> HashSet<&'a str> { nested_out: None, assigned: vec![Default::default()], }; - m.args.iter().for_each(|arg| track_assign(arg, &mut state)); - m.body.iter().for_each(|node| track_walk(node, &mut state)); + tracker_visit_macro(m, &mut state); state.out } diff --git a/minijinja/src/compiler/meta.rs b/minijinja/src/compiler/meta.rs --- a/minijinja/src/compiler/meta.rs +++ b/minijinja/src/compiler/meta.rs @@ -73,6 +72,15 @@ fn tracker_visit_expr_opt<'a>(expr: &Option<ast::Expr<'a>>, state: &mut Assignme } } +#[cfg(feature = "macros")] +fn tracker_visit_macro<'a>(m: &ast::Macro<'a>, state: &mut AssignmentTracker<'a>) { + m.args.iter().for_each(|arg| track_assign(arg, state)); + m.defaults + .iter() + .for_each(|expr| tracker_visit_expr(expr, state)); + m.body.iter().for_each(|node| track_walk(node, state)); +} + fn tracker_visit_expr<'a>(expr: &ast::Expr<'a>, state: &mut AssignmentTracker<'a>) { match expr { ast::Expr::Var(var) => { diff --git a/minijinja/src/compiler/meta.rs b/minijinja/src/compiler/meta.rs --- a/minijinja/src/compiler/meta.rs +++ b/minijinja/src/compiler/meta.rs @@ -247,9 +255,17 @@ fn track_walk<'a>(node: &ast::Stmt<'a>, state: &mut AssignmentTracker<'a>) { #[cfg(feature = "macros")] ast::Stmt::Macro(stmt) => { state.assign(stmt.name); + tracker_visit_macro(stmt, state); } #[cfg(feature = "macros")] - ast::Stmt::CallBlock(_) => {} + ast::Stmt::CallBlock(stmt) => { + tracker_visit_expr(&stmt.call.expr, state); + stmt.call + .args + .iter() + .for_each(|x| tracker_visit_expr(x, state)); + tracker_visit_macro(&stmt.macro_decl, state); + } ast::Stmt::Do(stmt) => { tracker_visit_expr(&stmt.call.expr, state); stmt.call
diff --git a/minijinja/tests/test_macros.rs b/minijinja/tests/test_macros.rs --- a/minijinja/tests/test_macros.rs +++ b/minijinja/tests/test_macros.rs @@ -2,6 +2,7 @@ use std::sync::atomic::AtomicBool; use std::sync::Arc; +use insta::assert_snapshot; use similar_asserts::assert_eq; use minijinja::value::{Kwargs, StructObject, Value}; diff --git a/minijinja/tests/test_macros.rs b/minijinja/tests/test_macros.rs --- a/minijinja/tests/test_macros.rs +++ b/minijinja/tests/test_macros.rs @@ -168,3 +169,45 @@ fn test_no_leak() { "{}<macro meh><macro foo>{}<macro foo>{}<macro foo>{}<macro foo>{}" ); } + +/// https://github.com/mitsuhiko/minijinja/issues/434 +#[test] +fn test_nested_macro_bug() { + let rv = render!( + r#" + {% set a = 42 %} + {% macro m1(var) -%} + {{ var }} + {%- endmacro %} + + {% macro m2(x=a) -%} + {{ m1(x) }} + {%- endmacro %} + + {{ m2() }} + "# + ); + assert_snapshot!(rv.trim(), @"42"); +} + +/// https://github.com/mitsuhiko/minijinja/issues/434 +#[test] +fn test_caller_bug() { + let rv = render!( + r#" + {% set a = 42 %} + {% set b = 23 %} + + {% macro m1(var) -%} + {{ caller(var) }} + {%- endmacro %} + + {% macro m2(x=a) -%} + {% call(var) m1(x) %}{{ var }}|{{ b }}{% endcall %} + {%- endmacro %} + + {{ m2() }} + "# + ); + assert_snapshot!(rv.trim(), @"42|23"); +}
"TemplateError: unknown function" when called from inside a macro ## Reproduction steps Running the (Python) code ```python import minijinja env = minijinja.Environment(undefined_behavior="strict") print(env.render_str(""" {% macro collapsible(title) -%} {{ caller() }} {%- endmacro %} {% macro autograder_ui_hint() -%} {% call collapsible("Tips for using the autograder") %} ABC {% endcall %} {%- endmacro %} {{ autograder_ui_hint() }}""")) ``` gives me ``` Traceback (most recent call last): File "/home/andreas/tmp/a.py", line 4, in <module> print(env.render_str(""" ^^^^^^^^^^^^^^^^^^ minijinja.TemplateError: unknown function: collapsible is unknown (in <string>:9) ---------------------------------- <string> ----------------------------------- 6 | {%- endmacro %} 7 | 8 | {% macro autograder_ui_hint() -%} 9 > {% call collapsible("Tips for using the autograder") %} i ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ unknown function 10 | ABC 11 | {% endcall %} 12 | {%- endmacro %} ~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ No referenced variables ------------------------------------------------------------------------------- ``` Additional helpful information: - Version of minijinja (Python): 1.0.13 (binary from PyPI) - Version of rustc: ? - Operating system and version: Linux/Debian 12 ## What did you expect Since `collapsible` is defined above, it should also be callable from within the `autograder_ui_hint` macro. Jinja2 (Python) allows this.
Thanks. It looks like the closure tracker does not properly recognize the need to build closures within macros. Will have a look at this.
2024-03-20T06:23:06
1.0
d904531ea45bd3c694bad01e658785afb40b8d6d
[ "test_nested_macro_bug", "test_caller_bug" ]
[ "test_context_merge_custom", "test_context_merge", "test_context", "test_args", "test_macro_passing", "test_render", "test_no_leak" ]
[]
[]
mitsuhiko/minijinja
269
mitsuhiko__minijinja-269
[ "266" ]
8d7215c96c9312b8fd8286149e0a15d36b8a087c
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -2,6 +2,13 @@ All notable changes to MiniJinja are documented here. +## 1.0.0 + +- Removed support for `State::current_call`. This property wasn't too useful + and unreliable. Supporting it properly for nested invocations would require + calls to take a mutable state or use interior mutability which did not seem + reasonable for this. (#269) + ## 0.34.0 - Updated `self_cell` and `percent-encoding` dependencies. (#264) diff --git a/minijinja-py/src/state.rs b/minijinja-py/src/state.rs --- a/minijinja-py/src/state.rs +++ b/minijinja-py/src/state.rs @@ -49,12 +49,6 @@ impl StateRef { with_state(|state| Ok(state.current_block().map(|x| x.into()))) } - /// Returns the current call - #[getter] - pub fn get_current_call(&self) -> PyResult<Option<String>> { - with_state(|state| Ok(state.current_call().map(|x| x.into()))) - } - /// Looks up a variable in the context #[pyo3(text_signature = "(self, name)")] pub fn lookup(&self, name: &str) -> PyResult<Py<PyAny>> { diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -144,7 +144,6 @@ impl<'env> Vm<'env> { env: self.env, ctx, current_block: None, - current_call: None, auto_escape: state.auto_escape(), instructions, blocks: BTreeMap::default(), diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -514,7 +513,6 @@ impl<'env> Vm<'env> { stack.push(out.end_capture(state.auto_escape)); } Instruction::ApplyFilter(name, arg_count, local_id) => { - state.current_call = Some(name); let filter = ctx_ok!(get_or_lookup_local(&mut loaded_filters, *local_id, || { state.env.get_filter(name) diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -578,16 +571,12 @@ impl<'env> Vm<'env> { format!("{name} is unknown"), )); } - - state.current_call = None; } Instruction::CallMethod(name, arg_count) => { - state.current_call = Some(name); let args = stack.slice_top(*arg_count); a = ctx_ok!(args[0].call_method(state, name, &args[1..])); stack.drop_top(*arg_count); stack.push(a); - state.current_call = None; } Instruction::CallObject(arg_count) => { let args = stack.slice_top(*arg_count); diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -602,13 +591,9 @@ impl<'env> Vm<'env> { stack.pop(); } Instruction::FastSuper => { - // Note that we don't store 'current_call' here since it - // would only be visible (and unused) internally. ctx_ok!(self.perform_super(state, out, false)); } Instruction::FastRecurse => { - // Note that we don't store 'current_call' here since it - // would only be visible (and unused) internally. recurse_loop!(false); } // Explanation on the behavior of `LoadBlocks` and rendering of diff --git a/minijinja/src/vm/state.rs b/minijinja/src/vm/state.rs --- a/minijinja/src/vm/state.rs +++ b/minijinja/src/vm/state.rs @@ -28,7 +28,6 @@ pub struct State<'vm, 'env> { pub(crate) env: &'env Environment<'env>, pub(crate) ctx: Context<'env>, pub(crate) current_block: Option<&'env str>, - pub(crate) current_call: Option<&'env str>, pub(crate) auto_escape: AutoEscape, pub(crate) instructions: &'vm Instructions<'env>, pub(crate) blocks: BTreeMap<&'env str, BlockStack<'vm, 'env>>, diff --git a/minijinja/src/vm/state.rs b/minijinja/src/vm/state.rs --- a/minijinja/src/vm/state.rs +++ b/minijinja/src/vm/state.rs @@ -45,7 +44,6 @@ impl<'vm, 'env> fmt::Debug for State<'vm, 'env> { let mut ds = f.debug_struct("State"); ds.field("name", &self.instructions.name()); ds.field("current_block", &self.current_block); - ds.field("current_call", &self.current_call); ds.field("auto_escape", &self.auto_escape); ds.field("ctx", &self.ctx); ds.field("env", &self.env); diff --git a/minijinja/src/vm/state.rs b/minijinja/src/vm/state.rs --- a/minijinja/src/vm/state.rs +++ b/minijinja/src/vm/state.rs @@ -66,7 +64,6 @@ impl<'vm, 'env> State<'vm, 'env> { env, ctx, current_block: None, - current_call: None, auto_escape, instructions, blocks,
diff --git a/minijinja-py/tests/test_state.py b/minijinja-py/tests/test_state.py --- a/minijinja-py/tests/test_state.py +++ b/minijinja-py/tests/test_state.py @@ -9,7 +9,6 @@ def my_func(state): assert state.name == "template-name" assert state.auto_escape is None assert state.current_block == "foo" - assert state.current_call == "my_func" assert state.lookup("bar") == 23 assert state.lookup("aha") is None assert state.lookup("my_func") is my_func diff --git a/minijinja-py/tests/test_state.py b/minijinja-py/tests/test_state.py --- a/minijinja-py/tests/test_state.py +++ b/minijinja-py/tests/test_state.py @@ -33,7 +32,6 @@ def my_func(state): assert state.name == "template-name" assert state.auto_escape is None assert state.current_block == "foo" - assert state.current_call == "my_func" assert state.lookup("bar") == 23 assert state.lookup("aha") is None assert state.env is env diff --git a/minijinja-py/tests/test_state.py b/minijinja-py/tests/test_state.py --- a/minijinja-py/tests/test_state.py +++ b/minijinja-py/tests/test_state.py @@ -57,7 +55,6 @@ def my_filter(state, value): assert state.name == "template-name" assert state.auto_escape is None assert state.current_block == "foo" - assert state.current_call == "myfilter" assert state.lookup("bar") == 23 assert state.lookup("aha") is None assert state.env is env diff --git a/minijinja-py/tests/test_state.py b/minijinja-py/tests/test_state.py --- a/minijinja-py/tests/test_state.py +++ b/minijinja-py/tests/test_state.py @@ -81,7 +78,6 @@ def my_test(state, value): assert state.name == "template-name" assert state.auto_escape is None assert state.current_block == "foo" - assert state.current_call == "mytest" assert state.lookup("bar") == 23 assert state.lookup("aha") is None assert state.env is env diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -529,10 +527,8 @@ impl<'env> Vm<'env> { a = ctx_ok!(filter.apply_to(state, args)); stack.drop_top(*arg_count); stack.push(a); - state.current_call = Some(name); } Instruction::PerformTest(name, arg_count, local_id) => { - state.current_call = Some(name); let test = ctx_ok!(get_or_lookup_local(&mut loaded_tests, *local_id, || { state.env.get_test(name) }) diff --git a/minijinja/src/vm/mod.rs b/minijinja/src/vm/mod.rs --- a/minijinja/src/vm/mod.rs +++ b/minijinja/src/vm/mod.rs @@ -543,11 +539,8 @@ impl<'env> Vm<'env> { let rv = ctx_ok!(test.perform(state, args)); stack.drop_top(*arg_count); stack.push(Value::from(rv)); - state.current_call = None; } Instruction::CallFunction(name, arg_count) => { - state.current_call = Some(name); - // super is a special function reserved for super-ing into blocks. if *name == "super" { if *arg_count != 0 { diff --git a/minijinja/src/vm/state.rs b/minijinja/src/vm/state.rs --- a/minijinja/src/vm/state.rs +++ b/minijinja/src/vm/state.rs @@ -107,13 +104,6 @@ impl<'vm, 'env> State<'vm, 'env> { self.current_block } - /// Returns the name of the item (filter, function, test, method) currently - /// being called. - #[inline(always)] - pub fn current_call(&self) -> Option<&str> { - self.current_call - } - /// Looks up a variable by name in the context. #[inline(always)] pub fn lookup(&self, name: &str) -> Option<Value> { diff --git a/minijinja/tests/snapshots/test_templates__vm@debug.txt.snap b/minijinja/tests/snapshots/test_templates__vm@debug.txt.snap --- a/minijinja/tests/snapshots/test_templates__vm@debug.txt.snap +++ b/minijinja/tests/snapshots/test_templates__vm@debug.txt.snap @@ -8,9 +8,6 @@ input_file: minijinja/tests/inputs/debug.txt State { name: "debug.txt", current_block: None, - current_call: Some( - "debug", - ), auto_escape: None, ctx: { "x": 0, diff --git a/minijinja/tests/test_templates.rs b/minijinja/tests/test_templates.rs --- a/minijinja/tests/test_templates.rs +++ b/minijinja/tests/test_templates.rs @@ -304,107 +304,6 @@ fn test_loop_changed() { assert_eq!(rv, "12345"); } -#[test] -fn test_current_call_state() { - use minijinja::value::{Object, Value}; - use std::fmt; - - #[derive(Debug)] - struct MethodAndFunc; - - impl fmt::Display for MethodAndFunc { - fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { - write!(f, "{self:?}") - } - } - - impl Object for MethodAndFunc { - fn call_method(&self, state: &State, name: &str, args: &[Value]) -> Result<Value, Error> { - assert_eq!(name, state.current_call().unwrap()); - let args = args - .iter() - .map(|v| v.to_string()) - .collect::<Vec<_>>() - .join(", "); - - Ok(format!("{}({args})", state.current_call().unwrap()).into()) - } - - fn call(&self, state: &State, args: &[Value]) -> Result<Value, Error> { - let args = args - .iter() - .map(|v| v.to_string()) - .collect::<Vec<_>>() - .join(", "); - - Ok(format!("{}({args})", state.current_call().unwrap()).into()) - } - } - - fn current_call(state: &State, value: Option<&str>) -> String { - format!("{}({})", state.current_call().unwrap(), value.unwrap_or("")) - } - - fn check_test(state: &State, value: &str) -> bool { - state.current_call() == Some(value) - } - - let mut env = Environment::new(); - env.add_function("fn_call_a", current_call); - env.add_function("fn_call_b", current_call); - env.add_filter("filter_call", current_call); - env.add_test("my_test", check_test); - env.add_test("another_test", check_test); - env.add_global("object", Value::from_object(MethodAndFunc)); - - env.add_template( - "test", - r#" - {{ fn_call_a() }} - {{ "foo" | filter_call }} - {{ fn_call_a() | filter_call }} - {{ fn_call_b() | filter_call }} - {{ fn_call_a(fn_call_b()) }} - {{ fn_call_a(fn_call_b()) | filter_call }} - - {{ "my_test" is my_test }} - {{ "another_test" is my_test }} - {{ "another_test" is another_test }} - - {{ object.foo() }} - {{ object.bar() }} - {{ object.foo(object.bar(object.baz())) }} - {{ object(object.bar()) }} - {{ object.baz(object()) }} - "#, - ) - .unwrap(); - - let tmpl = env.get_template("test").unwrap(); - let rv = tmpl.render(context!()).unwrap(); - assert_eq!( - rv, - r#" - fn_call_a() - filter_call(foo) - filter_call(fn_call_a()) - filter_call(fn_call_b()) - fn_call_a(fn_call_b()) - filter_call(fn_call_a(fn_call_b())) - - true - false - true - - foo() - bar() - foo(bar(baz())) - object(bar()) - baz(object()) - "# - ); -} - // ideally this would work, but unfortunately the way serde flatten works makes it // impossible for us to support with the internal optimizations in the value model. // see https://github.com/mitsuhiko/minijinja/issues/222
Remove State.current_call This requires a mutable reference to the state which is not always available. It also means that `current_call` is currently only set when the engine calls into values, but not when anything else calls. While this does not happen that frequently today, it does mean that it's not possible today to expose calls in any meaningful way to filters or others. This problem already exists internally as `map` for instance invokes other filters, but `current_call` is not updated. I'm not sure if `current_call` is particularly useful today. Maybe it could be removed? It might be possible to replicate the main uses of this via the `Object` traits for method calls (dynamic dispatch) and by potentially adding a way to hook the invocation of filters and tests.
2023-06-05T02:25:38
0.34
41aff0bb65c3ad079cb7e7f7b0243e720bda9d54
[ "test_vm" ]
[ "compiler::instructions::test_sizes", "compiler::lexer::test_find_marker", "compiler::lexer::test_is_basic_tag", "compiler::lexer::test_basic_identifiers", "filters::builtins::test_seq_object_borrow", "filters::builtins::test_basics", "filters::builtins::test_rest_args", "filters::builtins::test_optio...
[ "test_flattening - should panic", "test_flattening_sub_item_bad_lookup - should panic", "test_flattening_sub_item_bad_attr - should panic", "test_flattening_sub_item_shielded_print" ]
[]
moka-rs/moka
60
moka-rs__moka-60
[ "59" ]
3d928344310d53fc9c93105d6d6aeabc84ed047e
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,14 @@ # Moka &mdash; Change Log +## Version 0.6.3 + +### Fixed + +- Fix a bug in `get_or_insert_with` and `get_or_try_insert_with` methods of + `future::Cache`, which caused a panic if previously inserting task aborted. + ([#59][gh-issue-0059]) + + ## Version 0.6.2 ### Removed diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -160,6 +169,7 @@ [resolving-error-on-32bit]: https://github.com/moka-rs/moka#resolving-compile-errors-on-some-32-bit-platforms +[gh-issue-0059]: https://github.com/moka-rs/moka/issues/59/ [gh-issue-0043]: https://github.com/moka-rs/moka/issues/43/ [gh-issue-0038]: https://github.com/moka-rs/moka/issues/38/ [gh-issue-0031]: https://github.com/moka-rs/moka/issues/31/ diff --git a/Cargo.toml b/Cargo.toml --- a/Cargo.toml +++ b/Cargo.toml @@ -1,6 +1,6 @@ [package] name = "moka" -version = "0.6.2" +version = "0.6.3" authors = ["Tatsuya Kawano <tatsuya@hibaridb.org>"] edition = "2018" diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -7,8 +7,6 @@ use std::{ }; type ErrorObject = Arc<dyn Any + Send + Sync + 'static>; -type WaiterValue<V> = Option<Result<V, ErrorObject>>; -type Waiter<V> = Arc<RwLock<WaiterValue<V>>>; pub(crate) enum InitResult<V, E> { Initialized(V), diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -16,6 +14,77 @@ pub(crate) enum InitResult<V, E> { InitErr(Arc<E>), } +enum WaiterValue<V> { + Computing, + Ready(Result<V, ErrorObject>), + // https://github.com/moka-rs/moka/issues/43 + InitFuturePanicked, + // https://github.com/moka-rs/moka/issues/59 + EnclosingFutureAborted, +} + +type Waiter<V> = Arc<RwLock<WaiterValue<V>>>; + +struct WaiterGuard<'a, K, V, S> +// NOTE: We usually do not attach trait bounds to here at the struct definition, but +// the Drop trait requires these bounds here. +where + Arc<K>: Eq + Hash, + V: Clone, + S: BuildHasher, +{ + is_waiter_value_set: bool, + key: &'a Arc<K>, + type_id: TypeId, + value_initializer: &'a ValueInitializer<K, V, S>, + write_lock: &'a mut WaiterValue<V>, +} + +impl<'a, K, V, S> WaiterGuard<'a, K, V, S> +where + Arc<K>: Eq + Hash, + V: Clone, + S: BuildHasher, +{ + fn new( + key: &'a Arc<K>, + type_id: TypeId, + value_initializer: &'a ValueInitializer<K, V, S>, + write_lock: &'a mut WaiterValue<V>, + ) -> Self { + Self { + is_waiter_value_set: false, + key, + type_id, + value_initializer, + write_lock, + } + } + + fn set_waiter_value(&mut self, v: WaiterValue<V>) { + *self.write_lock = v; + self.is_waiter_value_set = true; + } +} + +impl<'a, K, V, S> Drop for WaiterGuard<'a, K, V, S> +where + Arc<K>: Eq + Hash, + V: Clone, + S: BuildHasher, +{ + fn drop(&mut self) { + if !self.is_waiter_value_set { + // Value is not set. This means the future containing + // `get_or_*_insert_with` has been aborted. Remove our waiter to prevent + // the issue described in https://github.com/moka-rs/moka/issues/59 + *self.write_lock = WaiterValue::EnclosingFutureAborted; + self.value_initializer.remove_waiter(self.key, self.type_id); + self.is_waiter_value_set = true; + } + } +} + pub(crate) struct ValueInitializer<K, V, S> { // TypeId is the type ID of the concrete error type of generic type E in // try_init_or_read(). We use the type ID as a part of the key to ensure that diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -44,8 +113,8 @@ where { // This closure will be called after the init closure has returned a value. // It will convert the returned value (from init) into an InitResult. - let post_init = |_key, value: V, lock: &mut WaiterValue<V>| { - *lock = Some(Ok(value.clone())); + let post_init = |_key, value: V, mut guard: WaiterGuard<'_, K, V, S>| { + guard.set_waiter_value(WaiterValue::Ready(Ok(value.clone()))); InitResult::Initialized(value) }; diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -64,14 +133,15 @@ where // This closure will be called after the init closure has returned a value. // It will convert the returned value (from init) into an InitResult. - let post_init = |key, value: Result<V, E>, lock: &mut WaiterValue<V>| match value { + let post_init = |key, value: Result<V, E>, mut guard: WaiterGuard<'_, K, V, S>| match value + { Ok(value) => { - *lock = Some(Ok(value.clone())); + guard.set_waiter_value(WaiterValue::Ready(Ok(value.clone()))); InitResult::Initialized(value) } Err(e) => { let err: ErrorObject = Arc::new(e); - *lock = Some(Err(Arc::clone(&err))); + guard.set_waiter_value(WaiterValue::Ready(Err(Arc::clone(&err)))); self.remove_waiter(key, type_id); InitResult::InitErr(err.downcast().unwrap()) } diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -91,7 +161,7 @@ where ) -> InitResult<V, E> where F: Future<Output = O>, - C: FnMut(&'a Arc<K>, O, &mut WaiterValue<V>) -> InitResult<V, E>, + C: FnMut(&'a Arc<K>, O, WaiterGuard<'_, K, V, S>) -> InitResult<V, E>, E: Send + Sync + 'static, { use futures_util::FutureExt; diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -102,19 +172,25 @@ where let mut retries = 0; loop { - let waiter = Arc::new(RwLock::new(None)); + let waiter = Arc::new(RwLock::new(WaiterValue::Computing)); let mut lock = waiter.write().await; match self.try_insert_waiter(key, type_id, &waiter) { None => { // Our waiter was inserted. Let's resolve the init future. + + // Create a guard. This will ensure to remove our waiter when the + // enclosing future has been aborted: + // https://github.com/moka-rs/moka/issues/59 + let mut waiter_guard = WaiterGuard::new(key, type_id, self, &mut lock); + // Catching panic is safe here as we do not try to resolve the future again. match AssertUnwindSafe(init).catch_unwind().await { // Resolved. - Ok(value) => return post_init(key, value, &mut lock), + Ok(value) => return post_init(key, value, waiter_guard), // Panicked. Err(payload) => { - *lock = None; + waiter_guard.set_waiter_value(WaiterValue::InitFuturePanicked); // Remove the waiter so that others can retry. self.remove_waiter(key, type_id); resume_unwind(payload); diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -126,22 +202,30 @@ where // for a read lock to become available. std::mem::drop(lock); match &*res.read().await { - Some(Ok(value)) => return ReadExisting(value.clone()), - Some(Err(e)) => return InitErr(Arc::clone(e).downcast().unwrap()), - // None means somebody else's init future has been panicked. - None => { + WaiterValue::Ready(Ok(value)) => return ReadExisting(value.clone()), + WaiterValue::Ready(Err(e)) => { + return InitErr(Arc::clone(e).downcast().unwrap()) + } + // Somebody else's init future has been panicked. + WaiterValue::InitFuturePanicked => { retries += 1; - if retries < MAX_RETRIES { - // Retry from the beginning. - continue; - } else { - panic!( - r#"Too many retries. Tried to read the return value from the `init` \ - future but failed {} times. Maybe the `init` kept panicking?"#, - retries - ); - } + panic_if_retry_exhausted_for_panicking(retries, MAX_RETRIES); + // Retry from the beginning. + continue; } + // Somebody else (a future containing `get_or_insert_with`/ + // `get_or_try_insert_with`) has been aborted. + WaiterValue::EnclosingFutureAborted => { + retries += 1; + panic_if_retry_exhausted_for_aborting(retries, MAX_RETRIES); + // Retry from the beginning. + continue; + } + // Unexpected state. + WaiterValue::Computing => panic!( + "Got unexpected state `Computing` after resolving `init` future. \ + This might be a bug in Moka" + ), } } } diff --git a/src/future/value_initializer.rs b/src/future/value_initializer.rs --- a/src/future/value_initializer.rs +++ b/src/future/value_initializer.rs @@ -168,3 +252,24 @@ where .insert_with_or_modify((key, type_id), || waiter, |_, w| Arc::clone(w)) } } + +fn panic_if_retry_exhausted_for_panicking(retries: usize, max: usize) { + if retries >= max { + panic!( + "Too many retries. Tried to read the return value from the `init` future \ + but failed {} times. Maybe the `init` kept panicking?", + retries + ); + } +} + +fn panic_if_retry_exhausted_for_aborting(retries: usize, max: usize) { + if retries >= max { + panic!( + "Too many retries. Tried to read the return value from the `init` future \ + but failed {} times. Maybe the future containing `get_or_insert_with`/\ + `get_or_try_insert_with` kept being aborted?", + retries + ); + } +} diff --git a/src/sync/value_initializer.rs b/src/sync/value_initializer.rs --- a/src/sync/value_initializer.rs +++ b/src/sync/value_initializer.rs @@ -131,8 +131,8 @@ where continue; } else { panic!( - r#"Too many retries. Tried to read the return value from the `init` \ - closure but failed {} times. Maybe the `init` kept panicking?"#, + "Too many retries. Tried to read the return value from the `init` \ + closure but failed {} times. Maybe the `init` kept panicking?", retries ); }
diff --git a/src/future/cache.rs b/src/future/cache.rs --- a/src/future/cache.rs +++ b/src/future/cache.rs @@ -1361,4 +1361,67 @@ mod tests { Ok(5) ); } + + #[tokio::test] + // https://github.com/moka-rs/moka/issues/59 + async fn abort_get_or_insert_with() { + use tokio::time::{sleep, Duration}; + + let cache = Cache::new(16); + let semaphore = Arc::new(tokio::sync::Semaphore::new(0)); + + let handle; + { + let cache_ref = cache.clone(); + let semaphore_ref = semaphore.clone(); + + handle = tokio::task::spawn(async move { + let _ = cache_ref + .get_or_insert_with(1, async move { + semaphore_ref.add_permits(1); + sleep(Duration::from_millis(50)).await; + unreachable!(); + }) + .await; + }); + } + + let _ = semaphore.acquire().await.expect("semaphore acquire failed"); + handle.abort(); + + assert_eq!(cache.get_or_insert_with(1, async { 5 }).await, 5); + } + + #[tokio::test] + // https://github.com/moka-rs/moka/issues/59 + async fn abort_get_or_try_insert_with() { + use tokio::time::{sleep, Duration}; + + let cache = Cache::new(16); + let semaphore = Arc::new(tokio::sync::Semaphore::new(0)); + + let handle; + { + let cache_ref = cache.clone(); + let semaphore_ref = semaphore.clone(); + + handle = tokio::task::spawn(async move { + let _ = cache_ref + .get_or_try_insert_with(1, async move { + semaphore_ref.add_permits(1); + sleep(Duration::from_millis(50)).await; + unreachable!(); + }) + .await as Result<_, Arc<Infallible>>; + }); + } + + let _ = semaphore.acquire().await.expect("semaphore acquire failed"); + handle.abort(); + + assert_eq!( + cache.get_or_try_insert_with(1, async { Ok(5) }).await as Result<_, Arc<Infallible>>, + Ok(5) + ); + } }
moka::future::Cache::get_or_insert_with() panics if previously inserting task aborted When writing a web server, it appears that `hyper::Server` can abort tasks, if the requester has gone away. `moka::future::Cache` does not like that and panics. Is this bug? Or is there a recommended way to deal with it? Minimized example: ```toml [package] name = "moka-future-bug" version = "0.1.0" edition = "2021" [dependencies] moka = { version = "0.6.2", features = ["future"] } tokio = { version = "1.15.0", features = ["full"] } ``` ```rust use moka::future::Cache; use std::time::Duration; #[tokio::main] async fn main() { let cache_a: Cache<(), ()> = Cache::new(1); let cache_b = cache_a.clone(); let handle = tokio::task::spawn(async move { cache_b .get_or_insert_with((), async { tokio::time::sleep(Duration::from_millis(1000)).await; }) .await; }); tokio::time::sleep(Duration::from_millis(500)).await; handle.abort(); cache_a.get_or_insert_with((), async {}).await; // panics! } ``` Backtrace: ``` thread 'main' panicked at 'Too many retries. Tried to read the return value from the `init` \ future but failed 200 times. Maybe the `init` kept panicking?', /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/moka-0.6.2/src/future/value_initializer.rs:138:33 stack backtrace: 0: rust_begin_unwind at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/std/src/panicking.rs:498:5 1: core::panicking::panic_fmt at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/panicking.rs:107:14 2: moka::future::value_initializer::ValueInitializer<K,V,S>::do_try_init::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/moka-0.6.2/src/future/value_initializer.rs:138:33 3: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/future/mod.rs:80:19 4: moka::future::value_initializer::ValueInitializer<K,V,S>::init_or_read::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/moka-0.6.2/src/future/value_initializer.rs:53:9 5: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/future/mod.rs:80:19 6: moka::future::cache::Cache<K,V,S>::get_or_insert_with_hash_and_fun::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/moka-0.6.2/src/future/cache.rs:621:15 7: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/future/mod.rs:80:19 8: moka::future::cache::Cache<K,V,S>::get_or_insert_with::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/moka-0.6.2/src/future/cache.rs:363:9 9: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/future/mod.rs:80:19 10: moka_future_bug::main::{{closure}} at ./src/main.rs:21:5 11: <core::future::from_generator::GenFuture<T> as core::future::future::Future>::poll at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/future/mod.rs:80:19 12: tokio::park::thread::CachedParkThread::block_on::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/park/thread.rs:263:54 13: tokio::coop::with_budget::{{closure}} at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/coop.rs:102:9 14: std::thread::local::LocalKey<T>::try_with at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/std/src/thread/local.rs:413:16 15: std::thread::local::LocalKey<T>::with at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/std/src/thread/local.rs:389:9 16: tokio::coop::with_budget at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/coop.rs:95:5 17: tokio::coop::budget at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/coop.rs:72:5 18: tokio::park::thread::CachedParkThread::block_on at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/park/thread.rs:263:31 19: tokio::runtime::enter::Enter::block_on at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/runtime/enter.rs:151:13 20: tokio::runtime::thread_pool::ThreadPool::block_on at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/runtime/thread_pool/mod.rs:77:9 21: tokio::runtime::Runtime::block_on at /home/niklas/.cargo/registry/src/github.com-1ecc6299db9ec823/tokio-1.15.0/src/runtime/mod.rs:463:43 22: moka_future_bug::main at ./src/main.rs:21:5 23: core::ops::function::FnOnce::call_once at /rustc/efec545293b9263be9edfb283a7aa66350b3acbf/library/core/src/ops/function.rs:227:5 note: Some details are omitted, run with `RUST_BACKTRACE=full` for a verbose backtrace. ```
Thank you for reporting. I confirmed the issue and I think it is a bug. I will investigate and fix it. Hey @tatsuya6502 lmk if you think I could help here Hi @barkanido — Thank you for the offer. I will do this by myself because this one is a bit tricky to solve. I will need some experiments.
2021-12-28T20:41:28
0.6
3d928344310d53fc9c93105d6d6aeabc84ed047e
[ "future::cache::tests::abort_get_or_insert_with", "future::cache::tests::abort_get_or_try_insert_with" ]
[ "common::deque::tests::iter", "common::frequency_sketch::tests::increment_max", "common::deque::tests::drop", "common::frequency_sketch::tests::increment_distinct", "common::deque::tests::basics", "common::frequency_sketch::tests::increment_once", "common::frequency_sketch::tests::index_of_around_zero",...
[]
[]
moka-rs/moka
156
moka-rs__moka-156
[ "155" ]
59542ca6db81618097210b2d5f0612ee61a492fa
diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,5 +1,18 @@ # Moka Cache &mdash; Change Log +## Version 0.8.6 + +### Fixed + +- Fix a bug caused `invalidate_all` and `invalidate_entries_if` of the following + caches will not invalidate entries inserted just before calling them + ([#155][gh-issue-0155]): + - `sync::Cache` + - `sync::SegmentedCache` + - `future::Cache` + - Experimental `dash::Cache` + + ## Version 0.8.5 ### Added diff --git a/CHANGELOG.md b/CHANGELOG.md --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -368,6 +381,7 @@ The minimum supported Rust version (MSRV) is now 1.51.0 (2021-03-25). [panic_in_quanta]: https://github.com/moka-rs/moka#integer-overflow-in-quanta-crate-on-some-x86_64-machines [resolving-error-on-32bit]: https://github.com/moka-rs/moka#compile-errors-on-some-32-bit-platforms +[gh-issue-0155]: https://github.com/moka-rs/moka/issues/155/ [gh-issue-0123]: https://github.com/moka-rs/moka/issues/123/ [gh-issue-0119]: https://github.com/moka-rs/moka/issues/119/ [gh-issue-0107]: https://github.com/moka-rs/moka/issues/107/ diff --git a/src/common/concurrent.rs b/src/common/concurrent.rs --- a/src/common/concurrent.rs +++ b/src/common/concurrent.rs @@ -81,6 +81,11 @@ impl<K> KeyDate<K> { pub(crate) fn last_modified(&self) -> Option<Instant> { self.entry_info.last_modified() } + + // #[cfg(any(feature = "sync", feature = "future"))] + pub(crate) fn is_dirty(&self) -> bool { + self.entry_info.is_dirty() + } } pub(crate) struct KeyHashDate<K> { diff --git a/src/common/concurrent.rs b/src/common/concurrent.rs --- a/src/common/concurrent.rs +++ b/src/common/concurrent.rs @@ -212,10 +217,6 @@ impl<K, V> ValueEntry<K, V> { write_order_q_node: other_nodes.write_order_q_node, } }; - // To prevent this updated ValueEntry from being evicted by an expiration policy, - // set the max value to the timestamps. They will be replaced with the real - // timestamps when applying writes. - entry_info.reset_timestamps(); Self { value, info: entry_info, diff --git a/src/common/concurrent.rs b/src/common/concurrent.rs --- a/src/common/concurrent.rs +++ b/src/common/concurrent.rs @@ -231,8 +232,16 @@ impl<K, V> ValueEntry<K, V> { self.info.is_admitted() } - pub(crate) fn set_is_admitted(&self, value: bool) { - self.info.set_is_admitted(value); + pub(crate) fn set_admitted(&self, value: bool) { + self.info.set_admitted(value); + } + + pub(crate) fn is_dirty(&self) -> bool { + self.info.is_dirty() + } + + pub(crate) fn set_dirty(&self, value: bool) { + self.info.set_dirty(value); } #[inline] diff --git a/src/common/concurrent/atomic_time/atomic_time.rs b/src/common/concurrent/atomic_time/atomic_time.rs --- a/src/common/concurrent/atomic_time/atomic_time.rs +++ b/src/common/concurrent/atomic_time/atomic_time.rs @@ -21,8 +21,10 @@ impl Default for AtomicInstant { // quanta v0.10.0 no longer provides `quanta::Instant::as_u64` method. impl AtomicInstant { - pub(crate) fn reset(&self) { - self.instant.store(std::u64::MAX, Ordering::Release); + pub(crate) fn new(timestamp: Instant) -> Self { + let ai = Self::default(); + ai.set_instant(timestamp); + ai } pub(crate) fn is_set(&self) -> bool { diff --git a/src/common/concurrent/atomic_time/atomic_time_compat.rs b/src/common/concurrent/atomic_time/atomic_time_compat.rs --- a/src/common/concurrent/atomic_time/atomic_time_compat.rs +++ b/src/common/concurrent/atomic_time/atomic_time_compat.rs @@ -15,8 +15,10 @@ impl Default for AtomicInstant { } impl AtomicInstant { - pub(crate) fn reset(&self) { - *self.instant.write() = None; + pub(crate) fn new(timestamp: Instant) -> Self { + let ai = Self::default(); + ai.set_instant(timestamp); + ai } pub(crate) fn is_set(&self) -> bool { diff --git a/src/common/concurrent/entry_info.rs b/src/common/concurrent/entry_info.rs --- a/src/common/concurrent/entry_info.rs +++ b/src/common/concurrent/entry_info.rs @@ -4,7 +4,14 @@ use super::AccessTime; use crate::common::{concurrent::atomic_time::AtomicInstant, time::Instant}; pub(crate) struct EntryInfo { + /// `is_admitted` indicates that the entry has been admitted to the + /// cache. When `false`, it means the entry is _temporary_ admitted to + /// the cache or evicted from the cache (so it should not have LRU nodes). is_admitted: AtomicBool, + /// `is_dirty` indicates that the entry has been inserted (or updated) + /// in the hash table, but the history of the insertion has not yet + /// been applied to the LRU deques and LFU estimator. + is_dirty: AtomicBool, last_accessed: AtomicInstant, last_modified: AtomicInstant, policy_weight: AtomicU32, diff --git a/src/common/concurrent/entry_info.rs b/src/common/concurrent/entry_info.rs --- a/src/common/concurrent/entry_info.rs +++ b/src/common/concurrent/entry_info.rs @@ -12,14 +19,15 @@ pub(crate) struct EntryInfo { impl EntryInfo { #[inline] - pub(crate) fn new(policy_weight: u32) -> Self { + pub(crate) fn new(timestamp: Instant, policy_weight: u32) -> Self { #[cfg(feature = "unstable-debug-counters")] super::debug_counters::InternalGlobalDebugCounters::entry_info_created(); Self { is_admitted: Default::default(), - last_accessed: Default::default(), - last_modified: Default::default(), + is_dirty: AtomicBool::new(true), + last_accessed: AtomicInstant::new(timestamp), + last_modified: AtomicInstant::new(timestamp), policy_weight: AtomicU32::new(policy_weight), } } diff --git a/src/common/concurrent/entry_info.rs b/src/common/concurrent/entry_info.rs --- a/src/common/concurrent/entry_info.rs +++ b/src/common/concurrent/entry_info.rs @@ -30,14 +38,18 @@ impl EntryInfo { } #[inline] - pub(crate) fn set_is_admitted(&self, value: bool) { + pub(crate) fn set_admitted(&self, value: bool) { self.is_admitted.store(value, Ordering::Release); } #[inline] - pub(crate) fn reset_timestamps(&self) { - self.last_accessed.reset(); - self.last_modified.reset(); + pub(crate) fn is_dirty(&self) -> bool { + self.is_dirty.load(Ordering::Acquire) + } + + #[inline] + pub(crate) fn set_dirty(&self, value: bool) { + self.is_dirty.store(value, Ordering::Release); } #[inline] diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -258,6 +258,7 @@ where #[inline] pub(crate) fn do_insert_with_hash(&self, key: Arc<K>, hash: u64, value: V) -> WriteOp<K, V> { + let ts = self.inner.current_time_from_expiration_clock(); let weight = self.inner.weigh(&key, &value); let mut insert_op = None; let mut update_op = None; diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -273,7 +274,7 @@ where // prevent this new ValueEntry from being evicted by an expiration policy. // 3. This method will update the policy_weight with the new weight. let old_weight = entry.policy_weight(); - *entry = self.new_value_entry_from(value.clone(), weight, entry); + *entry = self.new_value_entry_from(value.clone(), ts, weight, entry); update_op = Some(WriteOp::Upsert { key_hash: KeyHash::new(Arc::clone(&key), hash), value_entry: TrioArc::clone(entry), diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -283,7 +284,7 @@ where }) // Insert .or_insert_with(|| { - let entry = self.new_value_entry(value.clone(), weight); + let entry = self.new_value_entry(value.clone(), ts, weight); insert_op = Some(WriteOp::Upsert { key_hash: KeyHash::new(Arc::clone(&key), hash), value_entry: TrioArc::clone(&entry), diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -301,8 +302,13 @@ where } #[inline] - fn new_value_entry(&self, value: V, policy_weight: u32) -> TrioArc<ValueEntry<K, V>> { - let info = TrioArc::new(EntryInfo::new(policy_weight)); + fn new_value_entry( + &self, + value: V, + timestamp: Instant, + policy_weight: u32, + ) -> TrioArc<ValueEntry<K, V>> { + let info = TrioArc::new(EntryInfo::new(timestamp, policy_weight)); TrioArc::new(ValueEntry::new(value, info)) } diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -310,10 +316,16 @@ where fn new_value_entry_from( &self, value: V, + timestamp: Instant, policy_weight: u32, other: &ValueEntry<K, V>, ) -> TrioArc<ValueEntry<K, V>> { let info = TrioArc::clone(other.entry_info()); + // To prevent this updated ValueEntry from being evicted by an expiration policy, + // set the dirty flag to true. It will be reset to false when the write is applied. + info.set_dirty(true); + info.set_last_accessed(timestamp); + info.set_last_modified(timestamp); info.set_policy_weight(policy_weight); TrioArc::new(ValueEntry::new_from(value, info, other)) } diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -773,7 +785,6 @@ where use WriteOp::*; let freq = self.frequency_sketch.read(); let ch = &self.write_op_ch; - let ts = self.current_time_from_expiration_clock(); for _ in 0..count { match ch.try_recv() { diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -782,9 +793,7 @@ where value_entry: entry, old_weight, new_weight, - }) => { - self.handle_upsert(kh, entry, old_weight, new_weight, ts, deqs, &freq, counters) - } + }) => self.handle_upsert(kh, entry, old_weight, new_weight, deqs, &freq, counters), Ok(Remove(KvEntry { key: _key, entry })) => { Self::handle_remove(deqs, entry, counters) } diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -800,13 +809,11 @@ where entry: TrioArc<ValueEntry<K, V>>, old_weight: u32, new_weight: u32, - timestamp: Instant, deqs: &mut Deques<K>, freq: &FrequencySketch, counters: &mut EvictionCounters, ) { - entry.set_last_accessed(timestamp); - entry.set_last_modified(timestamp); + entry.set_dirty(false); if entry.is_admitted() { // The entry has been already admitted, so treat this as an update. diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -971,7 +978,7 @@ where if self.is_write_order_queue_enabled() { deqs.push_back_wo(KeyDate::new(key, entry.entry_info()), entry); } - entry.set_is_admitted(true); + entry.set_admitted(true); } fn handle_remove( diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -980,7 +987,7 @@ where counters: &mut EvictionCounters, ) { if entry.is_admitted() { - entry.set_is_admitted(false); + entry.set_admitted(false); counters.saturating_sub(1, entry.policy_weight()); // The following two unlink_* functions will unset the deq nodes. deqs.unlink_ao(&entry); diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -998,7 +1005,7 @@ where counters: &mut EvictionCounters, ) { if entry.is_admitted() { - entry.set_is_admitted(false); + entry.set_admitted(false); counters.saturating_sub(1, entry.policy_weight()); // The following two unlink_* functions will unset the deq nodes. Deques::unlink_ao_from_deque(ao_deq_name, ao_deq, &entry); diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -1052,6 +1059,7 @@ where for _ in 0..batch_size { // Peek the front node of the deque and check if it is expired. let key = deq.peek_front().and_then(|node| { + // TODO: Skip the entry if it is dirty. See `evict_lru_entries` method as an example. if is_expired_entry_ao(tti, va, &*node, now) { Some(Arc::clone(node.element.key())) } else { diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -1090,7 +1098,7 @@ where write_order_deq: &mut Deque<KeyDate<K>>, ) -> bool { if let Some(entry) = self.cache.get(key) { - if entry.last_accessed().is_none() { + if entry.is_dirty() { // The key exists and the entry has been updated. Deques::move_to_back_ao_in_deque(deq_name, deq, &entry); Deques::move_to_back_wo_in_deque(write_order_deq, &entry); diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -1124,6 +1132,7 @@ where let va = &self.valid_after(); for _ in 0..batch_size { let key = deqs.write_order.peek_front().and_then(|node| { + // TODO: Skip the entry if it is dirty. See `evict_lru_entries` method as an example. if is_expired_entry_wo(ttl, va, &*node, now) { Some(Arc::clone(node.element.key())) } else { diff --git a/src/dash/base_cache.rs b/src/dash/base_cache.rs --- a/src/dash/base_cache.rs +++ b/src/dash/base_cache.rs @@ -1181,15 +1190,20 @@ where } let maybe_key_and_ts = deq.peek_front().map(|node| { + let entry_info = node.element.entry_info(); ( Arc::clone(node.element.key()), - node.element.entry_info().last_modified(), + entry_info.is_dirty(), + entry_info.last_modified(), ) }); let (key, ts) = match maybe_key_and_ts { - Some((key, Some(ts))) => (key, ts), - Some((key, None)) => { + Some((key, false, Some(ts))) => (key, ts), + // TODO: Remove the second pattern `Some((_key, false, None))` once we change + // `last_modified` and `last_accessed` in `EntryInfo` from `Option<Instant>` to + // `Instant`. + Some((key, true, _)) | Some((key, false, None)) => { if self.try_skip_updated_entry(&key, DEQ_NAME, deq, write_order_deq) { continue; } else { diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -299,6 +299,7 @@ where #[inline] pub(crate) fn do_insert_with_hash(&self, key: Arc<K>, hash: u64, value: V) -> WriteOp<K, V> { + let ts = self.inner.current_time_from_expiration_clock(); let weight = self.inner.weigh(&key, &value); let op_cnt1 = Rc::new(AtomicU8::new(0)); let op_cnt2 = Rc::clone(&op_cnt1); diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -318,7 +319,7 @@ where hash, // on_insert || { - let entry = self.new_value_entry(value.clone(), weight); + let entry = self.new_value_entry(value.clone(), ts, weight); let cnt = op_cnt1.fetch_add(1, Ordering::Relaxed); op1 = Some(( cnt, diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -339,7 +340,7 @@ where // prevent this new ValueEntry from being evicted by an expiration policy. // 3. This method will update the policy_weight with the new weight. let old_weight = old_entry.policy_weight(); - let entry = self.new_value_entry_from(value.clone(), weight, old_entry); + let entry = self.new_value_entry_from(value.clone(), ts, weight, old_entry); let cnt = op_cnt2.fetch_add(1, Ordering::Relaxed); op2 = Some(( cnt, diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -374,8 +375,13 @@ where } #[inline] - fn new_value_entry(&self, value: V, policy_weight: u32) -> TrioArc<ValueEntry<K, V>> { - let info = TrioArc::new(EntryInfo::new(policy_weight)); + fn new_value_entry( + &self, + value: V, + timestamp: Instant, + policy_weight: u32, + ) -> TrioArc<ValueEntry<K, V>> { + let info = TrioArc::new(EntryInfo::new(timestamp, policy_weight)); TrioArc::new(ValueEntry::new(value, info)) } diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -383,10 +389,16 @@ where fn new_value_entry_from( &self, value: V, + timestamp: Instant, policy_weight: u32, other: &ValueEntry<K, V>, ) -> TrioArc<ValueEntry<K, V>> { let info = TrioArc::clone(other.entry_info()); + // To prevent this updated ValueEntry from being evicted by an expiration policy, + // set the dirty flag to true. It will be reset to false when the write is applied. + info.set_dirty(true); + info.set_last_accessed(timestamp); + info.set_last_modified(timestamp); info.set_policy_weight(policy_weight); TrioArc::new(ValueEntry::new_from(value, info, other)) } diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -954,7 +966,6 @@ where use WriteOp::*; let freq = self.frequency_sketch.read(); let ch = &self.write_op_ch; - let ts = self.current_time_from_expiration_clock(); for _ in 0..count { match ch.try_recv() { diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -963,9 +974,7 @@ where value_entry: entry, old_weight, new_weight, - }) => { - self.handle_upsert(kh, entry, old_weight, new_weight, ts, deqs, &freq, counters) - } + }) => self.handle_upsert(kh, entry, old_weight, new_weight, deqs, &freq, counters), Ok(Remove(KvEntry { key: _key, entry })) => { Self::handle_remove(deqs, entry, counters) } diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -981,13 +990,11 @@ where entry: TrioArc<ValueEntry<K, V>>, old_weight: u32, new_weight: u32, - timestamp: Instant, deqs: &mut Deques<K>, freq: &FrequencySketch, counters: &mut EvictionCounters, ) { - entry.set_last_accessed(timestamp); - entry.set_last_modified(timestamp); + entry.set_dirty(false); if entry.is_admitted() { // The entry has been already admitted, so treat this as an update. diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1154,7 +1161,7 @@ where if self.is_write_order_queue_enabled() { deqs.push_back_wo(KeyDate::new(key, entry.entry_info()), entry); } - entry.set_is_admitted(true); + entry.set_admitted(true); } fn handle_remove( diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1163,7 +1170,7 @@ where counters: &mut EvictionCounters, ) { if entry.is_admitted() { - entry.set_is_admitted(false); + entry.set_admitted(false); counters.saturating_sub(1, entry.policy_weight()); // The following two unlink_* functions will unset the deq nodes. deqs.unlink_ao(&entry); diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1181,7 +1188,7 @@ where counters: &mut EvictionCounters, ) { if entry.is_admitted() { - entry.set_is_admitted(false); + entry.set_admitted(false); counters.saturating_sub(1, entry.policy_weight()); // The following two unlink_* functions will unset the deq nodes. Deques::unlink_ao_from_deque(ao_deq_name, ao_deq, &entry); diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1235,6 +1242,7 @@ where for _ in 0..batch_size { // Peek the front node of the deque and check if it is expired. let key_hash = deq.peek_front().and_then(|node| { + // TODO: Skip the entry if it is dirty. See `evict_lru_entries` method as an example. if is_expired_entry_ao(tti, va, &*node, now) { Some((Arc::clone(node.element.key()), node.element.hash())) } else { diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1274,7 +1282,7 @@ where write_order_deq: &mut Deque<KeyDate<K>>, ) -> bool { if let Some(entry) = self.cache.get(key, hash) { - if entry.last_accessed().is_none() { + if entry.is_dirty() { // The key exists and the entry has been updated. Deques::move_to_back_ao_in_deque(deq_name, deq, &entry); Deques::move_to_back_wo_in_deque(write_order_deq, &entry); diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1308,6 +1316,7 @@ where let va = &self.valid_after(); for _ in 0..batch_size { let key = deqs.write_order.peek_front().and_then(|node| { + // TODO: Skip the entry if it is dirty. See `evict_lru_entries` method as an example. if is_expired_entry_wo(ttl, va, &*node, now) { Some(Arc::clone(node.element.key())) } else { diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1401,11 +1410,13 @@ where while len < batch_size { if let Some(kd) = iter.next() { - if let Some(ts) = kd.last_modified() { - let key = kd.key(); - let hash = self.hash(key); - candidates.push(KeyDateLite::new(key, hash, ts)); - len += 1; + if !kd.is_dirty() { + if let Some(ts) = kd.last_modified() { + let key = kd.key(); + let hash = self.hash(key); + candidates.push(KeyDateLite::new(key, hash, ts)); + len += 1; + } } } else { break; diff --git a/src/sync_base/base_cache.rs b/src/sync_base/base_cache.rs --- a/src/sync_base/base_cache.rs +++ b/src/sync_base/base_cache.rs @@ -1435,16 +1446,21 @@ where } let maybe_key_hash_ts = deq.peek_front().map(|node| { + let entry_info = node.element.entry_info(); ( Arc::clone(node.element.key()), node.element.hash(), - node.element.entry_info().last_modified(), + entry_info.is_dirty(), + entry_info.last_modified(), ) }); let (key, hash, ts) = match maybe_key_hash_ts { - Some((key, hash, Some(ts))) => (key, hash, ts), - Some((key, hash, None)) => { + Some((key, hash, false, Some(ts))) => (key, hash, ts), + // TODO: Remove the second pattern `Some((_key, false, None))` once we change + // `last_modified` and `last_accessed` in `EntryInfo` from `Option<Instant>` to + // `Instant`. + Some((key, hash, true, _)) | Some((key, hash, false, None)) => { if self.try_skip_updated_entry(&key, hash, DEQ_NAME, deq, write_order_deq) { continue; } else {
diff --git a/src/common/concurrent/entry_info.rs b/src/common/concurrent/entry_info.rs --- a/src/common/concurrent/entry_info.rs +++ b/src/common/concurrent/entry_info.rs @@ -78,3 +90,35 @@ impl AccessTime for EntryInfo { self.last_modified.set_instant(timestamp); } } + +#[cfg(test)] +mod test { + use super::EntryInfo; + + // Ignore this test by default as struct size may change in the future. + // #[ignore] + #[test] + fn check_struct_size() { + use std::mem::size_of; + + // As of Rust 1.61. + let size = if cfg!(target_pointer_width = "64") { + if cfg!(feature = "quanta") { + 24 + } else { + 72 + } + } else if cfg!(target_pointer_width = "32") { + if cfg!(feature = "quanta") { + 24 + } else { + 40 + } + } else { + // ignore + return; + }; + + assert_eq!(size_of::<EntryInfo>(), size); + } +} diff --git a/src/dash/cache.rs b/src/dash/cache.rs --- a/src/dash/cache.rs +++ b/src/dash/cache.rs @@ -818,7 +818,10 @@ mod tests { assert!(cache.contains_key(&"a")); assert!(cache.contains_key(&"b")); assert!(cache.contains_key(&"c")); - cache.sync(); + + // `cache.sync()` is no longer needed here before invalidating. The last + // modified timestamp of the entries were updated when they were inserted. + // https://github.com/moka-rs/moka/issues/155 cache.invalidate_all(); cache.sync(); diff --git a/src/future/cache.rs b/src/future/cache.rs --- a/src/future/cache.rs +++ b/src/future/cache.rs @@ -1374,7 +1374,10 @@ mod tests { assert!(cache.contains_key(&"a")); assert!(cache.contains_key(&"b")); assert!(cache.contains_key(&"c")); - cache.sync(); + + // `cache.sync()` is no longer needed here before invalidating. The last + // modified timestamp of the entries were updated when they were inserted. + // https://github.com/moka-rs/moka/issues/155 cache.invalidate_all(); cache.sync(); diff --git a/src/future/cache.rs b/src/future/cache.rs --- a/src/future/cache.rs +++ b/src/future/cache.rs @@ -1392,6 +1395,19 @@ mod tests { assert!(cache.contains_key(&"d")); } + // This test is for https://github.com/moka-rs/moka/issues/155 + #[tokio::test] + async fn invalidate_all_without_sync() { + let cache = Cache::new(1024); + + assert_eq!(cache.get(&0), None); + cache.insert(0, 1).await; + assert_eq!(cache.get(&0), Some(1)); + + cache.invalidate_all(); + assert_eq!(cache.get(&0), None); + } + #[tokio::test] async fn invalidate_entries_if() -> Result<(), Box<dyn std::error::Error>> { use std::collections::HashSet; diff --git a/src/sync/cache.rs b/src/sync/cache.rs --- a/src/sync/cache.rs +++ b/src/sync/cache.rs @@ -1194,7 +1194,10 @@ mod tests { assert!(cache.contains_key(&"a")); assert!(cache.contains_key(&"b")); assert!(cache.contains_key(&"c")); - cache.sync(); + + // `cache.sync()` is no longer needed here before invalidating. The last + // modified timestamp of the entries were updated when they were inserted. + // https://github.com/moka-rs/moka/issues/155 cache.invalidate_all(); cache.sync(); diff --git a/src/sync/segment.rs b/src/sync/segment.rs --- a/src/sync/segment.rs +++ b/src/sync/segment.rs @@ -874,7 +874,10 @@ mod tests { assert!(cache.contains_key(&"a")); assert!(cache.contains_key(&"b")); assert!(cache.contains_key(&"c")); - cache.sync(); + + // `cache.sync()` is no longer needed here before invalidating. The last + // modified timestamp of the entries were updated when they were inserted. + // https://github.com/moka-rs/moka/issues/155 cache.invalidate_all(); cache.sync();
Bug in `moka::future::Cache::invalidate_all`? Elements not being invalidated immediatelly. Hi. According to the documentation of `invalidate_all`: ``` pub fn invalidate_all(&self) Discards all cached values. This method returns immediately and a background thread will evict all the cached values inserted before the time when this method was called. It is guaranteed that the get method must not return these invalidated values even if they have not been evicted. Like the invalidate method, this method does not clear the historic popularity estimator of keys so that it retains the client activities of trying to retrieve an item. ``` From this I surmised that, even though the actual removal of elements occurs in the background, they would be somehow marked as invalid, and as such a subsequent `get` would not see them. However, this does not seem to be happening. Here's a small minimal example that exemplefies this ```rust #[tokio::test] async fn test_cache_invalidate() { let cache = Cache::new(1024 as u64); assert_eq!(cache.get(&0), None); cache.insert(0, 1).await; assert_eq!(cache.get(&0), Some(1)); cache.invalidate_all(); assert_eq!(cache.get(&0), None); } ``` This fails in the last line (the get returns `Some(1)`) Is this a bug or am I misreading the documentation?
2022-06-25T16:59:57
0.8
59542ca6db81618097210b2d5f0612ee61a492fa
[ "dash::cache::tests::invalidate_all", "future::cache::tests::invalidate_all", "future::cache::tests::invalidate_all_without_sync", "sync::cache::tests::invalidate_all", "sync::segment::tests::invalidate_all" ]
[ "cht::map::bucket::tests::get_insert_remove", "cht::segment::tests::concurrent_overlapped_growth", "cht::segment::tests::concurrent_insert_if_not_present", "cht::segment::tests::concurrent_insert_with_or_modify", "cht::segment::tests::concurrent_overlapped_insertion", "cht::segment::tests::concurrent_over...
[]
[]
Y2Z/monolith
321
Y2Z__monolith-321
[ "320" ]
7c61b462ddd8f3b0e982a8cba229b1ddf371457f
diff --git a/src/main.rs b/src/main.rs --- a/src/main.rs +++ b/src/main.rs @@ -175,12 +175,21 @@ fn main() { { match retrieve_asset(&mut cache, &client, &target_url, &target_url, &options, 0) { Ok((retrieved_data, final_url, media_type, charset)) => { - // Make sure the media type is text/html - if !media_type.eq_ignore_ascii_case("text/html") { - if !options.silent { - eprintln!("Unsupported document media type"); - } - process::exit(1); + // Provide output as text without processing it, the way browsers do + if !media_type.eq_ignore_ascii_case("text/html") + && !media_type.eq_ignore_ascii_case("application/xhtml+xml") + { + // Define output + let mut output = + Output::new(&options.output).expect("Could not prepare output"); + + // Write retrieved data into STDOUT or file + output + .write(&retrieved_data) + .expect("Could not write output"); + + // Nothing else to do past this point + process::exit(0); } if options diff --git a/src/main.rs b/src/main.rs --- a/src/main.rs +++ b/src/main.rs @@ -324,6 +333,6 @@ fn main() { // Define output let mut output = Output::new(&options.output).expect("Could not prepare output"); - // Write result into stdout or file - output.write(&result).expect("Could not write HTML output"); + // Write result into STDOUT or file + output.write(&result).expect("Could not write output"); }
diff --git a/tests/cli/data_url.rs b/tests/cli/data_url.rs --- a/tests/cli/data_url.rs +++ b/tests/cli/data_url.rs @@ -196,17 +196,14 @@ mod failing { let mut cmd = Command::cargo_bin(env!("CARGO_PKG_NAME")).unwrap(); let out = cmd.arg("data:,Hello%2C%20World!").output().unwrap(); - // STDERR should contain error description - assert_eq!( - String::from_utf8_lossy(&out.stderr), - "Unsupported document media type\n" - ); + // STDERR should be empty + assert_eq!(String::from_utf8_lossy(&out.stderr), ""); - // STDOUT should contain HTML - assert_eq!(String::from_utf8_lossy(&out.stdout), ""); + // STDOUT should contain text + assert_eq!(String::from_utf8_lossy(&out.stdout), "Hello, World!\n"); - // Exit code should be 1 - out.assert().code(1); + // Exit code should be 0 + out.assert().code(0); } #[test] diff --git a/tests/cli/data_url.rs b/tests/cli/data_url.rs --- a/tests/cli/data_url.rs +++ b/tests/cli/data_url.rs @@ -221,7 +218,7 @@ mod failing { // STDERR should be empty assert_eq!(String::from_utf8_lossy(&out.stderr), ""); - // STDOUT should contain HTML with no JS in it + // STDOUT should contain HTML without contents of local JS file assert_eq!( String::from_utf8_lossy(&out.stdout), "<html><head><script src=\"data:application/javascript;base64,\"></script></head><body></body></html>\n"
Unsupported document media type [This check](https://github.com/Y2Z/monolith/blob/7c61b462ddd8f3b0e982a8cba229b1ddf371457f/src/main.rs#L183) is probably too aggressive and definitely lacks any mechanism to override it. ``` $ monolith https://www.devever.net/~hl/strawberry_farewell https://www.devever.net/~hl/strawberry_farewell Unsupported document media type ```
Good point, if the browser is capable of opening the page, monolith should save it without complaints. The check will be removed from the next release, thank you for reporting this bug.
2022-11-10T21:59:48
2.6
7c61b462ddd8f3b0e982a8cba229b1ddf371457f
[ "cli::data_url::failing::bad_input_data_url" ]
[ "cli::basic::failing::bad_input_empty_target", "cli::basic::passing::print_help_information", "cli::basic::passing::print_version", "cli::base_url::passing::keep_existing_when_none_provided", "cli::data_url::passing::remove_css_from_data_url", "cli::basic::passing::stdin_target_input", "cli::data_url::p...
[]
[]
moonrepo/moon
399
moonrepo__moon-399
[ "398" ]
3b04fcbf7af702e14b5bc8c574ebebe4446750e9
diff --git /dev/null b/.yarn/versions/4d057106.yml new file mode 100644 --- /dev/null +++ b/.yarn/versions/4d057106.yml @@ -0,0 +1,9 @@ +releases: + "@moonrepo/cli": patch + "@moonrepo/core-linux-arm64-gnu": patch + "@moonrepo/core-linux-arm64-musl": patch + "@moonrepo/core-linux-x64-gnu": patch + "@moonrepo/core-linux-x64-musl": patch + "@moonrepo/core-macos-arm64": patch + "@moonrepo/core-macos-x64": patch + "@moonrepo/core-windows-x64-msvc": patch diff --git a/crates/cli/src/app.rs b/crates/cli/src/app.rs --- a/crates/cli/src/app.rs +++ b/crates/cli/src/app.rs @@ -289,10 +289,15 @@ pub enum Commands { )] Check { #[arg(help = "List of project IDs to explicitly check")] + #[clap(group = "projects")] ids: Vec<ProjectID>, #[arg(long, help = "Generate a run report for the current actions")] report: bool, + + #[arg(long, help = "Run check for all projects in the workspace")] + #[clap(group = "projects")] + all: bool, }, // moon ci diff --git a/crates/cli/src/commands/check.rs b/crates/cli/src/commands/check.rs --- a/crates/cli/src/commands/check.rs +++ b/crates/cli/src/commands/check.rs @@ -1,12 +1,16 @@ use crate::commands::run::{run, RunOptions}; use crate::helpers::load_workspace; +use moon_logger::trace; use moon_project::Project; use std::env; pub struct CheckOptions { pub report: bool, + pub all: bool, } +const LOG_TARGET: &str = "moon:check"; + pub async fn check( project_ids: &Vec<String>, options: CheckOptions, diff --git a/crates/cli/src/commands/check.rs b/crates/cli/src/commands/check.rs --- a/crates/cli/src/commands/check.rs +++ b/crates/cli/src/commands/check.rs @@ -15,9 +19,18 @@ pub async fn check( let mut projects: Vec<Project> = vec![]; // Load projects - if project_ids.is_empty() { + if options.all { + trace!(target: LOG_TARGET, "Running check on all projects"); + projects.extend(workspace.projects.all_projects()?); + } else if project_ids.is_empty() { + trace!(target: LOG_TARGET, "Loading from path"); projects.push(workspace.projects.load_from_path(env::current_dir()?)?); } else { + trace!( + target: LOG_TARGET, + "Running for specific projects: {}", + project_ids.join(", ") + ); for id in project_ids { projects.push(workspace.projects.load(id)?); } diff --git a/crates/cli/src/lib.rs b/crates/cli/src/lib.rs --- a/crates/cli/src/lib.rs +++ b/crates/cli/src/lib.rs @@ -80,7 +80,16 @@ pub async fn run_cli() { }) .await } - Commands::Check { ids, report } => check(ids, CheckOptions { report: *report }).await, + Commands::Check { ids, report, all } => { + check( + ids, + CheckOptions { + report: *report, + all: *all, + }, + ) + .await + } Commands::Clean { lifetime } => { clean(CleanOptions { cache_lifetime: lifetime.to_owned(), diff --git a/crates/project-graph/src/graph.rs b/crates/project-graph/src/graph.rs --- a/crates/project-graph/src/graph.rs +++ b/crates/project-graph/src/graph.rs @@ -199,6 +199,14 @@ impl ProjectGraph { Ok(()) } + /// Return a list of all projects in the graph. + #[track_caller] + pub fn all_projects(&self) -> Result<Vec<Project>, ProjectError> { + self.load_all()?; + let graph = self.graph.read().expect(READ_ERROR); + Ok(graph.raw_nodes().iter().map(|n| n.weight.clone()).collect()) + } + /// Find and return a project based on the initial path location. /// This will attempt to find the closest matching project source. #[track_caller]
diff --git a/crates/cli/tests/check_test.rs b/crates/cli/tests/check_test.rs --- a/crates/cli/tests/check_test.rs +++ b/crates/cli/tests/check_test.rs @@ -56,11 +56,31 @@ fn runs_tasks_from_multiple_project() { assert!(predicate::str::contains("depsA:dependencyOrder").eval(&output)); // dep of noop } +#[test] +fn runs_for_all_projects_even_when_not_in_root_dir() { + let fixture = create_sandbox_with_git("cases"); + let assert = create_moon_command(fixture.path().join("base")) + .arg("check") + .arg("--all") + .assert(); + assert.stderr(predicate::str::contains("all projects")); +} + +#[test] +fn runs_on_all_projects_from_root_directory() { + let fixture = create_sandbox_with_git("cases"); + let assert = create_moon_command(fixture.path()) + .arg("check") + .arg("--all") + .assert(); + assert.stderr(predicate::str::contains("all projects")); +} + mod reports { use super::*; #[test] - fn doesnt_create_a_report_by_default() { + fn does_not_create_a_report_by_default() { let fixture = create_sandbox_with_git("cases"); create_moon_command(fixture.path())
Run `check` on all projects if no project is specified I am writing a pre-push hook that builds and tests all projects. The `moon check` is the perfect command for this. However I have to list all the projects projects manually. This is error prone and I have to remember to update the command everytime I add a project. I propose a `--all` flag to the `moon check` command that will run the command on all the projects configured in the `.moon/workspace.yml` file.
Looking into the source code, I think this should be possible with just `moon check` (with no args). However that fails with the following error: <img width="484" alt="image" src="https://user-images.githubusercontent.com/60938164/198818730-4186058e-eb44-46d6-91b9-4d0dfb7fed23.png">
2022-10-29T17:32:47
1.0
3b04fcbf7af702e14b5bc8c574ebebe4446750e9
[ "runs_on_all_projects_from_root_directory", "runs_for_all_projects_even_when_not_in_root_dir" ]
[ "runs_tasks_in_project_using_cwd", "reports::does_not_create_a_report_by_default", "runs_tasks_in_project", "reports::creates_report_when_option_passed" ]
[ "runs_tasks_from_multiple_project" ]
[]