repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_debug.rs | crates/uv/tests/it/pip_debug.rs | use crate::common::{TestContext, uv_snapshot};
#[test]
fn debug_warn() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_debug(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: pip's `debug` is unsupported (consider using `uvx pip debug` instead)
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/workspace.rs | crates/uv/tests/it/workspace.rs | use std::collections::BTreeMap;
use std::env;
use std::path::PathBuf;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileWriteStr, PathChild};
use assert_fs::prelude::FileTouch;
use indoc::indoc;
use insta::{assert_json_snapshot, assert_snapshot};
use serde::{Deserialize, Serialize};
use crate::common::{TestContext, copy_dir_ignore, make_project, uv_snapshot};
fn workspaces_dir() -> PathBuf {
env::current_dir()
.unwrap()
.parent()
.unwrap()
.parent()
.unwrap()
.join("test")
.join("workspaces")
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_in_examples_bird_feeder() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace
.join("albatross-in-example")
.join("examples")
.join("bird-feeder");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-in-example/examples/bird-feeder)
+ iniconfig==2.0.0
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_in_examples() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace.join("albatross-in-example");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/workspace/albatross-in-example)
+ iniconfig==2.0.0
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_just_project() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace.join("albatross-just-project");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/workspace/albatross-just-project)
+ iniconfig==2.0.0
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_project_in_excluded() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace.join("albatross-project-in-excluded");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/workspace/albatross-project-in-excluded)
+ iniconfig==2.0.0
"
);
let current_dir = workspace
.join("albatross-project-in-excluded")
.join("excluded")
.join("bird-feeder");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 2 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-project-in-excluded/excluded/bird-feeder)
+ iniconfig==2.0.0
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 2 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
let current_dir = workspace
.join("albatross-project-in-excluded")
.join("packages")
.join("seeds");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: The project is marked as unmanaged: `[TEMP_DIR]/workspace/albatross-project-in-excluded/packages/seeds`
"
);
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_root_workspace() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace.join("albatross-root-workspace");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace)
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/seeds)
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Audited 5 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_root_workspace_bird_feeder() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace
.join("albatross-root-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: [TEMP_DIR]/workspace/albatross-root-workspace/.venv
Resolved 5 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/seeds)
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Audited 4 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_root_workspace_albatross() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace
.join("albatross-root-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: [TEMP_DIR]/workspace/albatross-root-workspace/.venv
Resolved 5 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-root-workspace/packages/seeds)
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Audited 4 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_albatross.py"));
}
#[test]
#[cfg(feature = "pypi")]
fn test_albatross_virtual_workspace() {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(workspaces_dir(), &workspace).unwrap();
let current_dir = workspace
.join("albatross-virtual-workspace")
.join("packages")
.join("bird-feeder");
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: [TEMP_DIR]/workspace/albatross-virtual-workspace/.venv
Resolved 7 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-virtual-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[TEMP_DIR]/workspace/albatross-virtual-workspace/packages/seeds)
+ sniffio==1.3.1
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
uv_snapshot!(context.filters(), context.sync().current_dir(¤t_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Audited 5 packages in [TIME]
"
);
context.assert_file(current_dir.join("check_installed_bird_feeder.py"));
}
/// Check that `uv run --package` works in a virtual workspace.
#[test]
#[cfg(feature = "pypi")]
fn test_uv_run_with_package_virtual_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join("albatross-virtual-workspace");
copy_dir_ignore(
workspaces_dir().join("albatross-virtual-workspace"),
&work_dir,
)?;
let mut filters = context.filters();
filters.push((
r"Using Python 3.12.\[X\] interpreter at: .*",
"Using Python 3.12.[X] interpreter at: [PYTHON]",
));
// Run from the `bird-feeder` member.
uv_snapshot!(filters, context
.run()
.arg("--package")
.arg("bird-feeder")
.arg("packages/bird-feeder/check_installed_bird_feeder.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 7 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/bird-feeder)
+ idna==3.6
+ seeds==1.0.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/seeds)
+ sniffio==1.3.1
"
);
uv_snapshot!(context.filters(), context
.run()
.arg("--package")
.arg("albatross")
.arg("packages/albatross/check_installed_albatross.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Resolved 7 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/albatross)
+ iniconfig==2.0.0
"
);
Ok(())
}
/// Check that `uv run` works from a virtual workspace root, which should sync all packages in the
/// workspace.
#[test]
#[cfg(feature = "pypi")]
fn test_uv_run_virtual_workspace_root() -> Result<()> {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join("albatross-virtual-workspace");
copy_dir_ignore(
workspaces_dir().join("albatross-virtual-workspace"),
&work_dir,
)?;
uv_snapshot!(context.filters(), context
.run()
.arg("packages/albatross/check_installed_albatross.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/albatross)
+ anyio==4.3.0
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/albatross-virtual-workspace/packages/seeds)
+ sniffio==1.3.1
"
);
Ok(())
}
/// Check that `uv run --package` works in a root workspace.
#[test]
#[cfg(feature = "pypi")]
fn test_uv_run_with_package_root_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join("albatross-root-workspace");
copy_dir_ignore(workspaces_dir().join("albatross-root-workspace"), &work_dir)?;
let mut filters = context.filters();
filters.push((
r"Using Python 3.12.\[X\] interpreter at: .*",
"Using Python 3.12.[X] interpreter at: [PYTHON]",
));
uv_snapshot!(filters, context
.run()
.arg("--package")
.arg("bird-feeder")
.arg("packages/bird-feeder/check_installed_bird_feeder.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/seeds)
"
);
uv_snapshot!(context.filters(), context
.run()
.arg("--package")
.arg("albatross")
.arg("check_installed_albatross.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Resolved 5 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/albatross-root-workspace)
"
);
Ok(())
}
/// Check that `uv run --isolated` creates isolated virtual environments.
#[test]
#[cfg(feature = "pypi")]
fn test_uv_run_isolate() -> Result<()> {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join("albatross-root-workspace");
copy_dir_ignore(workspaces_dir().join("albatross-root-workspace"), &work_dir)?;
let mut filters = context.filters();
filters.push((
r"Using Python 3.12.\[X\] interpreter at: .*",
"Using Python 3.12.[X] interpreter at: [PYTHON]",
));
// Install the root package.
uv_snapshot!(context.filters(), context
.run()
.arg("--package")
.arg("albatross")
.arg("check_installed_albatross.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ albatross==0.1.0 (from file://[TEMP_DIR]/albatross-root-workspace)
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/seeds)
"
);
// Run in `bird-feeder`. We shouldn't be able to import `albatross`, but we _can_ due to our
// virtual environment semantics. Specifically, we only make the changes necessary to run a
// given command, so we don't remove `albatross` from the environment.
uv_snapshot!(filters, context
.run()
.arg("--package")
.arg("bird-feeder")
.arg("check_installed_albatross.py")
.current_dir(&work_dir), @r"
success: true
exit_code: 0
----- stdout -----
Success
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Resolved 5 packages in [TIME]
Audited 4 packages in [TIME]
"
);
// If we `--isolated`, though, we use an isolated virtual environment, so `albatross` is not
// available.
// TODO(charlie): This should show the resolution output, but `--isolated` is coupled to
// `--no-project` right now.
uv_snapshot!(filters, context
.run()
.arg("--isolated")
.arg("--package")
.arg("bird-feeder")
.arg("check_installed_albatross.py")
.current_dir(&work_dir), @r#"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Installed 4 packages in [TIME]
+ bird-feeder==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/bird-feeder)
+ idna==3.6
+ iniconfig==2.0.0
+ seeds==1.0.0 (from file://[TEMP_DIR]/albatross-root-workspace/packages/seeds)
Traceback (most recent call last):
File "[TEMP_DIR]/albatross-root-workspace/check_installed_albatross.py", line 1, in <module>
from albatross import fly
ModuleNotFoundError: No module named 'albatross'
"#
);
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are.
fn workspace_lock_idempotence(workspace: &str, subdirectories: &[&str]) -> Result<()> {
let mut shared_lock = None;
for dir in subdirectories {
let context = TestContext::new("3.12");
let work_dir = context.temp_dir.join(workspace);
copy_dir_ignore(workspaces_dir().join(workspace), &work_dir)?;
context
.lock()
.current_dir(work_dir.join(dir))
.assert()
.success();
let lock = fs_err::read_to_string(work_dir.join("uv.lock"))?;
// Check the lockfile is the same for all resolutions.
if let Some(shared_lock) = &shared_lock {
assert_eq!(shared_lock, &lock);
} else {
shared_lock = Some(lock);
}
}
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are.
#[test]
#[cfg(feature = "pypi")]
fn workspace_lock_idempotence_root_workspace() -> Result<()> {
workspace_lock_idempotence(
"albatross-root-workspace",
&[".", "packages/bird-feeder", "packages/seeds"],
)?;
Ok(())
}
/// Check that the resolution is the same no matter where in the workspace we are, and that locking
/// works even if there is no root project.
#[test]
#[cfg(feature = "pypi")]
fn workspace_lock_idempotence_virtual_workspace() -> Result<()> {
workspace_lock_idempotence(
"albatross-virtual-workspace",
&[
".",
"packages/albatross",
"packages/bird-feeder",
"packages/seeds",
],
)?;
Ok(())
}
/// Extract just the sources from the lockfile, to test path resolution.
#[derive(Deserialize, Serialize, Debug, PartialEq)]
struct SourceLock {
package: Vec<Package>,
}
impl SourceLock {
fn sources(&self) -> BTreeMap<String, toml::Value> {
self.package
.iter()
.map(|package| (package.name.clone(), package.source.clone()))
.collect()
}
}
#[derive(Deserialize, Serialize, Debug, PartialEq)]
struct Package {
name: String,
source: toml::Value,
}
/// Test path dependencies from one workspace into another.
///
/// We have a main workspace with packages `a` and `b`, and a second workspace with `c`, `d` and
/// `e`. We have `a -> b`, `b -> c`, `c -> d`. `e` should not be installed.
#[test]
fn workspace_to_workspace_paths_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let main_workspace = context.temp_dir.child("main-workspace");
main_workspace
.child("pyproject.toml")
.write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&main_workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r#"
dependencies = ["c"]
[tool.uv.sources]
c = { path = "../../../other-workspace/packages/c", editable = true }
"#};
make_project(&main_workspace.join("packages").join("b"), "b", deps)?;
// Build the second workspace ...
let other_workspace = context.temp_dir.child("other-workspace");
other_workspace
.child("pyproject.toml")
.write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with c ...
let deps = indoc! {r#"
dependencies = ["d"]
[tool.uv.sources]
d = { workspace = true }
"#};
make_project(&other_workspace.join("packages").join("c"), "c", deps)?;
// ... and d ...
let deps = indoc! {r"
dependencies = []
"};
make_project(&other_workspace.join("packages").join("d"), "d", deps)?;
// ... and e.
let deps = indoc! {r#"
dependencies = ["numpy>=2.0.0,<3"]
"#};
make_project(&other_workspace.join("packages").join("e"), "e", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&main_workspace), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 4 packages in [TIME]
"###
);
let lock: SourceLock =
toml::from_str(&fs_err::read_to_string(main_workspace.join("uv.lock"))?)?;
assert_json_snapshot!(lock.sources(), @r###"
{
"a": {
"editable": "packages/a"
},
"b": {
"editable": "packages/b"
},
"c": {
"editable": "../other-workspace/packages/c"
},
"d": {
"editable": "../other-workspace/packages/d"
}
}
"###);
Ok(())
}
/// Ensure that workspace discovery errors if a member is missing a `pyproject.toml`.
#[test]
fn workspace_empty_member() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let workspace = context.temp_dir.child("workspace");
workspace.child("pyproject.toml").write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r"
"};
make_project(&workspace.join("packages").join("b"), "b", deps)?;
// ... and an empty c.
fs_err::create_dir_all(workspace.join("packages").join("c"))?;
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Workspace member `[TEMP_DIR]/workspace/packages/c` is missing a `pyproject.toml` (matches: `packages/*`)
"###
);
Ok(())
}
/// Ensure that workspace discovery ignores hidden directories.
#[test]
fn workspace_hidden_files() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let workspace = context.temp_dir.child("workspace");
workspace.child("pyproject.toml").write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r"
"};
make_project(&workspace.join("packages").join("b"), "b", deps)?;
// ... and a hidden c.
fs_err::create_dir_all(workspace.join("packages").join(".c"))?;
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 2 packages in [TIME]
"###
);
let lock: SourceLock = toml::from_str(&fs_err::read_to_string(workspace.join("uv.lock"))?)?;
assert_json_snapshot!(lock.sources(), @r###"
{
"a": {
"editable": "packages/a"
},
"b": {
"editable": "packages/b"
}
}
"###);
Ok(())
}
/// Ensure that workspace discovery accepts valid hidden directories.
#[test]
fn workspace_hidden_member() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let workspace = context.temp_dir.child("workspace");
workspace.child("pyproject.toml").write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r#"
dependencies = ["c"]
[tool.uv.sources]
c = { workspace = true }
"#};
make_project(&workspace.join("packages").join("b"), "b", deps)?;
// ... and a hidden (but valid) .c.
let deps = indoc! {r"
dependencies = []
"};
make_project(&workspace.join("packages").join(".c"), "c", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&workspace), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 3 packages in [TIME]
"###
);
let lock: SourceLock = toml::from_str(&fs_err::read_to_string(workspace.join("uv.lock"))?)?;
assert_json_snapshot!(lock.sources(), @r###"
{
"a": {
"editable": "packages/a"
},
"b": {
"editable": "packages/b"
},
"c": {
"editable": "packages/.c"
}
}
"###);
Ok(())
}
/// Ensure that workspace discovery accepts valid hidden directories.
#[test]
fn workspace_non_included_member() -> Result<()> {
let context = TestContext::new("3.12");
// Build the main workspace ...
let workspace = context.temp_dir.child("workspace");
workspace.child("pyproject.toml").write_str(indoc! {r#"
[tool.uv.workspace]
members = ["packages/*"]
"#})?;
// ... with a ...
let deps = indoc! {r#"
dependencies = ["b"]
[tool.uv.sources]
b = { workspace = true }
"#};
make_project(&workspace.join("packages").join("a"), "a", deps)?;
// ... and b.
let deps = indoc! {r"
dependencies = []
"};
make_project(&workspace.join("packages").join("b"), "b", deps)?;
// ... and c, which is _not_ a member, but also isn't explicitly excluded.
let deps = indoc! {r"
dependencies = []
"};
make_project(&workspace.join("c"), "c", deps)?;
// Locking from `c` should not include any workspace members.
uv_snapshot!(context.filters(), context.lock().current_dir(workspace.join("c")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 1 package in [TIME]
"###
);
let lock: SourceLock = toml::from_str(&fs_err::read_to_string(
workspace.join("c").join("uv.lock"),
)?)?;
assert_json_snapshot!(lock.sources(), @r###"
{
"c": {
"editable": "."
}
}
"###);
Ok(())
}
/// Ensure workspace members inherit sources from the root, if not specified in the member.
///
/// In such cases, relative paths should be resolved relative to the workspace root, rather than
/// relative to the member.
#[test]
fn workspace_inherit_sources() -> Result<()> {
let context = TestContext::new("3.12");
// Create the workspace root.
let workspace = context.temp_dir.child("workspace");
workspace.child("pyproject.toml").write_str(indoc! {r#"
[project]
name = "workspace"
version = "0.1.0"
dependencies = []
requires-python = ">=3.12"
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/workspace_dir.rs | crates/uv/tests/it/workspace_dir.rs | use std::env;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::PathChild;
use crate::common::{TestContext, copy_dir_ignore, uv_snapshot};
/// Test basic output for a simple workspace with one member.
#[test]
fn workspace_dir_simple() {
let context = TestContext::new("3.12");
// Initialize a workspace with one member
context.init().arg("foo").assert().success();
let workspace = context.temp_dir.child("foo");
uv_snapshot!(context.filters(), context.workspace_dir().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/foo
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
"
);
}
/// Workspace dir output when run with `--package`.
#[test]
fn workspace_dir_specific_package() {
let context = TestContext::new("3.12");
context.init().arg("foo").assert().success();
context.init().arg("foo/bar").assert().success();
let workspace = context.temp_dir.child("foo");
// root workspace
uv_snapshot!(context.filters(), context.workspace_dir().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/foo
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
"
);
// with --package bar
uv_snapshot!(context.filters(), context.workspace_dir().arg("--package").arg("bar").current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/foo/bar
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
"
);
}
/// Test output when run from a workspace member directory.
#[test]
fn workspace_metadata_from_member() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
let albatross_workspace = context
.workspace_root
.join("test/workspaces/albatross-root-workspace");
copy_dir_ignore(albatross_workspace, &workspace)?;
let member_dir = workspace.join("packages").join("bird-feeder");
uv_snapshot!(context.filters(), context.workspace_dir().current_dir(&member_dir), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/workspace
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
"
);
Ok(())
}
/// Test workspace dir error output for a non-existent package.
#[test]
fn workspace_dir_package_doesnt_exist() {
let context = TestContext::new("3.12");
// Initialize a workspace with one member
context.init().arg("foo").assert().success();
let workspace = context.temp_dir.child("foo");
uv_snapshot!(context.filters(), context.workspace_dir().arg("--package").arg("bar").current_dir(&workspace), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
error: Package `bar` not found in workspace.
"
);
}
/// Test workspace dir error output when not in a project.
#[test]
fn workspace_metadata_no_project() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.workspace_dir(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: The `uv workspace dir` command is experimental and may change without warning. Pass `--preview-features workspace-dir` to disable this warning.
error: No `pyproject.toml` found in current directory or any parent directory
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/publish.rs | crates/uv/tests/it/publish.rs | use crate::common::{TestContext, uv_snapshot, venv_bin_path};
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileTouch, FileWriteStr, PathChild};
use fs_err::OpenOptions;
use indoc::{formatdoc, indoc};
use serde_json::json;
use sha2::{Digest, Sha256};
use std::env;
use std::env::current_dir;
use std::io::Write;
use uv_static::EnvVars;
use wiremock::matchers::{basic_auth, method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
#[test]
fn username_password_no_longer_supported() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("-p")
.arg("dummy")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
"###
);
}
#[test]
fn invalid_token() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("__token__")
.arg("-p")
.arg("dummy")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Invalid or non-existent authentication information. See https://test.pypi.org/help/#invalid-auth for more information.
"###
);
}
/// Emulate a missing `permission` `id-token: write` situation.
#[test]
fn mixed_credentials() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("--username")
.arg("ferris")
.arg("--password")
.arg("ZmVycmlz")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg("--trusted-publishing")
.arg("always")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
// Emulate CI
.env(EnvVars::GITHUB_ACTIONS, "true"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/
error: a username and a password are not allowed when using trusted publishing
"###
);
}
/// Emulate a missing `permission` `id-token: write` situation.
#[test]
fn missing_trusted_publishing_permission() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg("--trusted-publishing")
.arg("always")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
// Emulate CI
.env(EnvVars::GITHUB_ACTIONS, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/
error: Failed to obtain token for trusted publishing
Caused by: Failed to obtain OIDC token: is the `id-token: write` permission missing?
Caused by: GitHub Actions detection error
Caused by: insufficient permissions: missing ACTIONS_ID_TOKEN_REQUEST_URL
"
);
}
/// Check the error when there are no credentials provided on GitHub Actions. Is it an incorrect
/// trusted publishing configuration?
#[test]
fn no_credentials() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
// Emulate CI
.env(EnvVars::GITHUB_ACTIONS, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/
Note: Neither credentials nor keyring are configured, and there was an error fetching the trusted publishing token. If you don't want to use trusted publishing, you can ignore this error, but you need to provide credentials.
error: Trusted publishing failed
Caused by: Failed to obtain OIDC token: is the `id-token: write` permission missing?
Caused by: GitHub Actions detection error
Caused by: insufficient permissions: missing ACTIONS_ID_TOKEN_REQUEST_URL
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/
Caused by: Failed to send POST request
Caused by: Missing credentials for https://test.pypi.org/legacy/
"
);
}
/// Hint people that it's not `--skip-existing` but `--check-url`.
#[test]
fn skip_existing_redirect() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.publish()
.arg("--skip-existing")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `uv publish` does not support `--skip-existing` because there is not a reliable way to identify when an upload fails due to an existing distribution. Instead, use `--check-url` to provide the URL to the simple API for your index. uv will check the index for existing distributions before attempting uploads.
"###
);
}
#[test]
fn dubious_filenames() {
let context = TestContext::new("3.12");
context.temp_dir.child("not-a-wheel.whl").touch().unwrap();
context.temp_dir.child("data.tar.gz").touch().unwrap();
context
.temp_dir
.child("not-sdist-1-2-3-asdf.zip")
.touch()
.unwrap();
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("-p")
.arg("dummy")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/")
.arg(context.temp_dir.join("*")), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: Skipping file that looks like a distribution, but is not a valid distribution filename: `[TEMP_DIR]/data.tar.gz`
warning: Skipping file that looks like a distribution, but is not a valid distribution filename: `[TEMP_DIR]/not-a-wheel.whl`
warning: Skipping file that looks like a distribution, but is not a valid distribution filename: `[TEMP_DIR]/not-sdist-1-2-3-asdf.zip`
error: No files found to publish
"
);
}
/// Check that we (don't) use the keyring and warn for missing keyring behaviors correctly.
#[test]
fn check_keyring_behaviours() {
let context = TestContext::new("3.12");
// Install our keyring plugin
context
.pip_install()
.arg(
context
.workspace_root
.join("test")
.join("packages")
.join("keyring_test_plugin"),
)
.assert()
.success();
// Ok: The keyring may be used for the index page.
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("-p")
.arg("dummy")
.arg("--keyring-provider")
.arg("subprocess")
.arg("--check-url")
.arg("https://test.pypi.org/simple/")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/?ok")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/?ok
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/?ok
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
"###
);
// Warn: The keyring is unused.
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("-p")
.arg("dummy")
.arg("--keyring-provider")
.arg("subprocess")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/?ok")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/?ok
warning: Using `--keyring-provider` with a password or token and no check URL has no effect
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/?ok
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
"###
);
// Warn: There is no keyring entry for the user dummy.
// https://github.com/astral-sh/uv/issues/7963#issuecomment-2453558043
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("--keyring-provider")
.arg("subprocess")
.arg("--check-url")
.arg("https://test.pypi.org/simple/")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/?ok")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/?ok
Keyring request for dummy@https://test.pypi.org/legacy/?ok
Keyring request for dummy@test.pypi.org
warning: Keyring has no password for URL `https://test.pypi.org/legacy/?ok` and username `dummy`
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
Keyring request for dummy@https://test.pypi.org/legacy/?ok
Keyring request for dummy@test.pypi.org
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/?ok
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
"
);
// Ok: There is a keyring entry for the user dummy.
// https://github.com/astral-sh/uv/issues/7963#issuecomment-2453558043
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("dummy")
.arg("--keyring-provider")
.arg("subprocess")
.arg("--publish-url")
.arg("https://test.pypi.org/legacy/?ok")
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
.env(EnvVars::KEYRING_TEST_CREDENTIALS, r#"{"https://test.pypi.org/legacy/?ok": {"dummy": "dummy"}}"#)
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to https://test.pypi.org/legacy/?ok
Keyring request for dummy@https://test.pypi.org/legacy/?ok
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
error: Failed to publish `../../test/links/ok-1.0.0-py3-none-any.whl` to https://test.pypi.org/legacy/?ok
Caused by: Upload failed with status code 403 Forbidden. Server says: 403 Username/Password authentication is no longer supported. Migrate to API Tokens or Trusted Publishers instead. See https://test.pypi.org/help/#apitoken and https://test.pypi.org/help/#trusted-publishers
"
);
}
#[test]
fn invalid_index() {
let context = TestContext::new("3.12");
let pyproject_toml = indoc! {r#"
[project]
name = "foo"
version = "0.1.0"
[[tool.uv.index]]
explicit = true
name = "foo"
url = "https://example.com"
[[tool.uv.index]]
name = "internal"
url = "https://internal.example.org"
"#};
context
.temp_dir
.child("pyproject.toml")
.write_str(pyproject_toml)
.unwrap();
let ok_wheel = current_dir()
.unwrap()
.join("../../test/links/ok-1.0.0-py3-none-any.whl");
// No such index
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("__token__")
.arg("-p")
.arg("dummy")
.arg("--index")
.arg("bar")
.arg(&ok_wheel)
.current_dir(context.temp_dir.path()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Index not found: `bar`. Found indexes: `foo`, `internal`
"###
);
// Index does not have a publish URL
uv_snapshot!(context.filters(), context.publish()
.arg("-u")
.arg("__token__")
.arg("-p")
.arg("dummy")
.arg("--index")
.arg("foo")
.arg(&ok_wheel)
.current_dir(context.temp_dir.path()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Index is missing a publish URL: `foo`
"###
);
}
/// Ensure that we read index credentials from the environment when publishing.
///
/// <https://github.com/astral-sh/uv/issues/11836#issuecomment-3022735011>
#[tokio::test]
async fn read_index_credential_env_vars_for_check_url() {
let context = TestContext::new("3.12");
let server = MockServer::start().await;
context
.init()
.arg("--name")
.arg("astral-test-private")
.arg(".")
.assert()
.success();
context.build().arg("--wheel").assert().success();
let mut file = OpenOptions::new()
.write(true)
.append(true)
.create(false)
.open(context.temp_dir.join("pyproject.toml"))
.unwrap();
file.write_all(
formatdoc! {
r#"
[[tool.uv.index]]
name = "private-index"
url = "{index_uri}/simple/"
publish-url = "{index_uri}/upload"
"#,
index_uri = server.uri()
}
.as_bytes(),
)
.unwrap();
let filename = "astral_test_private-0.1.0-py3-none-any.whl";
let wheel = context.temp_dir.join("dist").join(filename);
let sha256 = format!("{:x}", Sha256::digest(fs_err::read(&wheel).unwrap()));
let simple_index = json! ({
"files": [
{
"filename": filename,
"hashes": {
"sha256": sha256
},
"url": format!("{}/{}", server.uri(), filename),
}
]
});
Mock::given(method("GET"))
.and(path("/simple/astral-test-private/"))
.and(basic_auth("username", "secret"))
.respond_with(ResponseTemplate::new(200).set_body_raw(
simple_index.to_string().into_bytes(),
"application/vnd.pypi.simple.v1+json",
))
.mount(&server)
.await;
// Test that we fail without credentials
uv_snapshot!(context.filters(), context.publish()
.current_dir(&context.temp_dir)
.arg(&wheel)
.arg("--index")
.arg("private-index")
.arg("--trusted-publishing")
.arg("never"),
@r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Publishing 1 file to http://[LOCALHOST]/upload
Uploading astral_test_private-0.1.0-py3-none-any.whl ([SIZE])
error: Failed to publish `dist/astral_test_private-0.1.0-py3-none-any.whl` to http://[LOCALHOST]/upload
Caused by: Failed to send POST request
Caused by: Missing credentials for http://[LOCALHOST]/upload
"
);
// Test that it works with credentials
uv_snapshot!(context.filters(), context.publish()
.current_dir(&context.temp_dir)
.arg(&wheel)
.arg("--index")
.arg("private-index")
.env(EnvVars::index_username("PRIVATE_INDEX"), "username")
.env(EnvVars::index_password("PRIVATE_INDEX"), "secret")
.arg("--trusted-publishing")
.arg("never"),
@r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Publishing 1 file to http://[LOCALHOST]/upload
File astral_test_private-0.1.0-py3-none-any.whl already exists, skipping
"
);
}
/// Native GitLab CI trusted publishing using `PYPI_ID_TOKEN`
#[tokio::test]
async fn gitlab_trusted_publishing_pypi_id_token() {
let context = TestContext::new("3.12");
let server = MockServer::start().await;
// Audience endpoint (PyPI)
Mock::given(method("GET"))
.and(path("/_/oidc/audience"))
.respond_with(
ResponseTemplate::new(200).set_body_raw("{\"audience\":\"pypi\"}", "application/json"),
)
.mount(&server)
.await;
// Mint token endpoint returns a short-lived API token
Mock::given(method("POST"))
.and(path("/_/oidc/mint-token"))
.respond_with(
ResponseTemplate::new(200).set_body_raw("{\"token\":\"apitoken\"}", "application/json"),
)
.mount(&server)
.await;
// Upload endpoint requires the minted token as Basic auth
Mock::given(method("POST"))
.and(path("/upload"))
.and(basic_auth("__token__", "apitoken"))
.respond_with(ResponseTemplate::new(200))
.mount(&server)
.await;
uv_snapshot!(context.filters(), context.publish()
.arg("--trusted-publishing")
.arg("always")
.arg("--publish-url")
.arg(format!("{}/upload", server.uri()))
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
.env(EnvVars::GITLAB_CI, "true")
.env_remove(EnvVars::GITHUB_ACTIONS)
.env(EnvVars::PYPI_ID_TOKEN, "gitlab-oidc-jwt"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Publishing 1 file to http://[LOCALHOST]/upload
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
"
);
}
/// Native GitLab CI trusted publishing using `TESTPYPI_ID_TOKEN`
#[tokio::test]
async fn gitlab_trusted_publishing_testpypi_id_token() {
let context = TestContext::new("3.12");
let server = MockServer::start().await;
// Audience endpoint (TestPyPI)
Mock::given(method("GET"))
.and(path("/_/oidc/audience"))
.respond_with(
ResponseTemplate::new(200)
.set_body_raw("{\"audience\":\"testpypi\"}", "application/json"),
)
.mount(&server)
.await;
// Mint token endpoint returns a short-lived API token
Mock::given(method("POST"))
.and(path("/_/oidc/mint-token"))
.respond_with(
ResponseTemplate::new(200).set_body_raw("{\"token\":\"apitoken\"}", "application/json"),
)
.mount(&server)
.await;
// Upload endpoint requires the minted token as Basic auth
Mock::given(method("POST"))
.and(path("/upload"))
.and(basic_auth("__token__", "apitoken"))
.respond_with(ResponseTemplate::new(200))
.mount(&server)
.await;
uv_snapshot!(context.filters(), context.publish()
.arg("--trusted-publishing")
.arg("always")
.arg("--publish-url")
.arg(format!("{}/upload", server.uri()))
.arg("../../test/links/ok-1.0.0-py3-none-any.whl")
// Emulate GitLab CI with TESTPYPI_ID_TOKEN present
.env(EnvVars::GITLAB_CI, "true")
.env_remove(EnvVars::GITHUB_ACTIONS)
.env(EnvVars::TESTPYPI_ID_TOKEN, "gitlab-oidc-jwt"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Publishing 1 file to http://[LOCALHOST]/upload
Uploading ok-1.0.0-py3-none-any.whl ([SIZE])
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/cache_prune.rs | crates/uv/tests/it/cache_prune.rs | use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use indoc::indoc;
use uv_static::EnvVars;
use crate::common::TestContext;
use crate::common::uv_snapshot;
/// `cache prune` should be a no-op if there's nothing out-of-date in the cache.
#[test]
fn prune_no_op() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
let filters: Vec<_> = context
.filters()
.into_iter()
.chain(std::iter::once((r"Removed \d+ files", "Removed [N] files")))
.collect();
uv_snapshot!(&filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
No unused entries found
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
Ok(())
}
/// `cache prune` should remove any stale top-level directories from the cache.
#[test]
fn prune_stale_directory() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// Add a stale directory to the cache.
let simple = context.cache_dir.child("simple-v4");
simple.create_dir_all()?;
let filters: Vec<_> = context
.filters()
.into_iter()
.chain(std::iter::once((r"Removed \d+ files", "Removed [N] files")))
.collect();
uv_snapshot!(&filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache bucket: [CACHE_DIR]/simple-v4
Removed 1 directory
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
Ok(())
}
/// `cache prune` should remove all cached environments from the cache.
#[test]
fn prune_cached_env() {
let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let filters: Vec<_> = context
.filters()
.into_iter()
.chain(std::iter::once((r"Removed \d+ files", "Removed [N] files")))
.collect();
uv_snapshot!(&filters, context.tool_run()
.arg("pytest@8.0.0")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.0.0
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ pytest==8.0.0
"###);
let filters: Vec<_> = context
.filters()
.into_iter()
.chain([
// The cache entry does not have a stable key, so we filter it out
(
r"\[CACHE_DIR\](\\|\/)(.*?)(\\|\/).*",
"[CACHE_DIR]/$2/[ENTRY]",
),
])
.collect();
uv_snapshot!(filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache environment: [CACHE_DIR]/environments-v2/[ENTRY]
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
}
/// `cache prune` should remove any stale symlink from the cache.
#[test]
fn prune_stale_symlink() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// Remove the wheels directory, causing the symlink to become stale.
let wheels = context.cache_dir.child("wheels-v5");
fs_err::remove_dir_all(wheels)?;
let filters: Vec<_> = context
.filters()
.into_iter()
.chain([
// The cache entry does not have a stable key, so we filter it out
(
r"\[CACHE_DIR\](\\|\/)(.*?)(\\|\/).*",
"[CACHE_DIR]/$2/[ENTRY]",
),
])
.collect();
uv_snapshot!(filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed 44 files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
Ok(())
}
#[tokio::test]
async fn prune_force() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_counts();
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("typing-extensions\niniconfig")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// When unlocked, `--force` should still take a lock
uv_snapshot!(context.filters(), context.prune().arg("--verbose").arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
No unused entries found
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
// Add a stale directory to the cache.
let simple = context.cache_dir.child("simple-v4");
simple.create_dir_all()?;
// When locked, `--force` should proceed without blocking
let _cache = uv_cache::Cache::from_path(context.cache_dir.path())
.with_exclusive_lock()
.await;
uv_snapshot!(context.filters(), context.prune().arg("--verbose").arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Lock is busy for `[CACHE_DIR]/`
DEBUG Cache is currently in use, proceeding due to `--force`
Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling cache bucket: [CACHE_DIR]/simple-v4
Removed 1 directory
");
Ok(())
}
/// `cache prune --ci` should remove all unzipped archives.
#[test]
fn prune_unzipped() -> Result<()> {
let context = TestContext::new("3.12").with_exclude_newer("2025-01-01T00:00Z");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! { r"
source-distribution==0.0.1
iniconfig
" })?;
let filters: Vec<_> = std::iter::once((r"Removed \d+ files", "Removed [N] files"))
.chain(context.filters())
.collect();
// Install a requirement, to populate the cache.
uv_snapshot!(&filters, context.pip_install().arg("-r").arg("requirements.txt").arg("--reinstall"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ iniconfig==2.0.0
+ source-distribution==0.0.1
"###);
uv_snapshot!(&filters, context.prune().arg("--ci"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Pruning cache at: [CACHE_DIR]/
Removed [N] files ([SIZE])
"###);
context.venv().arg("--clear").assert().success();
// Reinstalling the source distribution should not require re-downloading the source
// distribution.
requirements_txt.write_str(indoc! { r"
source-distribution==0.0.1
" })?;
uv_snapshot!(&filters, context.pip_install().arg("-r").arg("requirements.txt").arg("--offline"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ source-distribution==0.0.1
"###);
// But reinstalling the other package should require a download, since we pruned the wheel.
requirements_txt.write_str(indoc! { r"
iniconfig
" })?;
uv_snapshot!(&filters, context.pip_install().arg("-r").arg("requirements.txt").arg("--offline"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because all versions of iniconfig need to be downloaded from a registry and you require iniconfig, we can conclude that your requirements are unsatisfiable.
hint: Pre-releases are available for `iniconfig` in the requested range (e.g., 0.2.dev0), but pre-releases weren't enabled (try: `--prerelease=allow`)
hint: Packages were unavailable because the network was disabled. When the network is disabled, registry packages may only be read from the cache.
");
Ok(())
}
/// `cache prune` should remove any stale source distribution revisions.
#[test]
fn prune_stale_revision() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.temp_dir.child("src").child("__init__.py").touch()?;
context.temp_dir.child("README").touch()?;
let filters: Vec<_> = context
.filters()
.into_iter()
.chain(std::iter::once((r"Removed \d+ files", "Removed [N] files")))
.collect();
// Install the same package twice, with `--reinstall`.
uv_snapshot!(&filters, context
.pip_install()
.arg(".")
.arg("--reinstall"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ project==0.1.0 (from file://[TEMP_DIR]/)
"###);
uv_snapshot!(&filters, context
.pip_install()
.arg(".")
.arg("--reinstall"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ project==0.1.0 (from file://[TEMP_DIR]/)
"###);
let filters: Vec<_> = filters
.into_iter()
.chain([
// The cache entry does not have a stable key, so we filter it out
(
r"\[CACHE_DIR\](\\|\/)(.*?)(\\|\/).*",
"[CACHE_DIR]/$2/[ENTRY]",
),
])
.collect();
// Pruning should remove the unused revision.
uv_snapshot!(&filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
DEBUG Removing dangling source revision: [CACHE_DIR]/sdists-v9/[ENTRY]
DEBUG Removing dangling cache archive: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
// Uninstall and reinstall the package. We should use the cached version.
uv_snapshot!(&filters, context
.pip_uninstall()
.arg("."), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- project==0.1.0 (from file://[TEMP_DIR]/)
"###);
uv_snapshot!(&filters, context
.pip_install()
.arg("."), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ project==0.1.0 (from file://[TEMP_DIR]/)
"###);
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_upgrade.rs | crates/uv/tests/it/python_upgrade.rs | use crate::common::{TestContext, uv_snapshot};
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::FileTouch;
use assert_fs::prelude::PathChild;
use uv_static::EnvVars;
#[test]
fn python_upgrade() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install an earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Don't accept patch version as argument to upgrade command
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10.17"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: `uv python upgrade` only accepts minor versions, got: 3.10.17
");
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// Should be a no-op when already upgraded
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python 3.10 is already on the latest supported patch release
");
// Should reinstall on `--reinstall`
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
~ cpython-3.10.19-[PLATFORM] (python3.10)
");
// Install an earlier pre-release version
uv_snapshot!(context.filters(), context.python_install().arg("3.14.0rc2"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.0rc2 in [TIME]
+ cpython-3.14.0rc2-[PLATFORM] (python3.14)
");
// Upgrade the pre-release version
uv_snapshot!(context.filters(), context.python_upgrade(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `uv python upgrade` is experimental and may change without warning. Pass `--preview-features python-upgrade` to disable this warning
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
}
#[test]
fn python_upgrade_without_version() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Should be a no-op when no versions have been installed
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
There are no installed versions to upgrade
");
// Install earlier patch versions for different minor versions
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.11.8").arg("3.12.8").arg("3.13.1"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 3 versions in [TIME]
+ cpython-3.11.8-[PLATFORM] (python3.11)
+ cpython-3.12.8-[PLATFORM] (python3.12)
+ cpython-3.13.1-[PLATFORM] (python3.13)
");
let mut filters = context.filters().clone();
filters.push((r"3.13.\d+", "3.13.[X]"));
// Upgrade one patch version
uv_snapshot!(filters, context.python_upgrade().arg("--preview").arg("3.13"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.13.[X] in [TIME]
+ cpython-3.13.[X]-[PLATFORM] (python3.13)
");
// Providing no minor version to `uv python upgrade` should upgrade the rest
// of the patch versions
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.11.14-[PLATFORM] (python3.11)
+ cpython-3.12.12-[PLATFORM] (python3.12)
");
// Should be a no-op when every version is already upgraded
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
All versions already on latest supported patch release
");
}
#[test]
fn python_upgrade_transparent_from_venv() {
let context: TestContext = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install an earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create a virtual environment
uv_snapshot!(context.filters(), context.venv(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
let second_venv = ".venv2";
// Create a second virtual environment with minor version request
uv_snapshot!(context.filters(), context.venv().arg(second_venv).arg("-p").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv2
Activate with: source .venv2/[BIN]/activate
");
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version")
.env(EnvVars::VIRTUAL_ENV, second_venv), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// First virtual environment should reflect upgraded patch
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.19
----- stderr -----
"
);
// Second virtual environment should reflect upgraded patch
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version")
.env(EnvVars::VIRTUAL_ENV, second_venv), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.19
----- stderr -----
"
);
}
// Installing Python in preview mode should not prevent virtual environments
// from transparently upgrading.
#[test]
fn python_upgrade_transparent_from_venv_preview() {
let context: TestContext = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install an earlier patch version using `--preview`
uv_snapshot!(context.filters(), context.python_install().arg("3.10.17").arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create a virtual environment
uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// Virtual environment should reflect upgraded patch
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.19
----- stderr -----
"
);
}
#[test]
fn python_upgrade_ignored_with_python_pin() {
let context: TestContext = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install an earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create a virtual environment
uv_snapshot!(context.filters(), context.venv(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
// Pin to older patch version
uv_snapshot!(context.filters(), context.python_pin().arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.10.17`
----- stderr -----
");
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// Virtual environment should continue to respect pinned patch version
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
}
// Virtual environments record patch versions. `uv venv -p 3.x.y` will
// prevent transparent upgrades.
#[test]
fn python_no_transparent_upgrade_with_venv_patch_specification() {
let context: TestContext = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install an earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create a virtual environment with a patch version
uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// The virtual environment Python version remains the same.
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
}
// Transparent upgrades should work for virtual environments created within
// virtual environments.
#[test]
fn python_transparent_upgrade_venv_venv() {
let context: TestContext = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_filtered_virtualenv_bin()
.with_managed_python_dirs();
// Install an earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create an initial virtual environment
uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
let venv_python = if cfg!(windows) {
context.venv.child("Scripts/python.exe")
} else {
context.venv.child("bin/python")
};
let second_venv = ".venv2";
// Create a new virtual environment from within a virtual environment
uv_snapshot!(context.filters(), context.venv()
.arg(second_venv)
.arg("-p").arg(venv_python.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17 interpreter at: .venv/[BIN]/python
Creating virtual environment at: .venv2
Activate with: source .venv2/[BIN]/activate
");
// Check version from within second virtual environment
uv_snapshot!(context.filters(), context.run()
.arg("python").arg("--version")
.env(EnvVars::VIRTUAL_ENV, second_venv), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
");
// Should have transparently upgraded in second virtual environment
uv_snapshot!(context.filters(), context.run()
.arg("python").arg("--version")
.env(EnvVars::VIRTUAL_ENV, second_venv), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.19
----- stderr -----
"
);
}
// Transparent upgrades should work for virtual environments created using
// the `venv` module.
#[test]
fn python_upgrade_transparent_from_venv_module() {
let context = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_filtered_python_install_bin();
let bin_dir = context.temp_dir.child("bin");
// Install earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.9"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.9 in [TIME]
+ cpython-3.12.9-[PLATFORM] (python3.12)
");
// Create a virtual environment using venv module
uv_snapshot!(context.filters(), context.run().arg("python").arg("-m").arg("venv").arg(context.venv.as_os_str()).arg("--without-pip")
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.12.9
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.12 in [TIME]
+ cpython-3.12.12-[PLATFORM] (python3.12)
"
);
// Virtual environment should reflect upgraded patch
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.12.12
----- stderr -----
"
);
}
// Transparent Python upgrades should work in environments created using
// the `venv` module within an existing virtual environment.
#[test]
fn python_upgrade_transparent_from_venv_module_in_venv() {
let context = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_filtered_python_install_bin();
let bin_dir = context.temp_dir.child("bin");
// Install earlier patch version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.10.17"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.17 in [TIME]
+ cpython-3.10.17-[PLATFORM] (python3.10)
");
// Create first virtual environment
uv_snapshot!(context.filters(), context.venv().arg("-p").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.10.17
Creating virtual environment at: .venv
Activate with: source .venv/[BIN]/activate
");
let second_venv = ".venv2";
// Create a virtual environment using `venv`` module from within the first virtual environment.
uv_snapshot!(context.filters(), context.run()
.arg("python").arg("-m").arg("venv").arg(second_venv).arg("--without-pip")
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
// Check version within second virtual environment
uv_snapshot!(context.filters(), context.run()
.env(EnvVars::VIRTUAL_ENV, second_venv)
.arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.17
----- stderr -----
"
);
// Upgrade patch version
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.10.19 in [TIME]
+ cpython-3.10.19-[PLATFORM] (python3.10)
"
);
// Second virtual environment should reflect upgraded patch.
uv_snapshot!(context.filters(), context.run()
.env(EnvVars::VIRTUAL_ENV, second_venv)
.arg("python").arg("--version"), @r"
success: true
exit_code: 0
----- stdout -----
Python 3.10.19
----- stderr -----
"
);
}
// Tests that `uv python upgrade 3.12` will warn if trying to install over non-managed
// interpreter.
#[test]
fn python_upgrade_force_install() -> Result<()> {
let context = TestContext::new_with_versions(&["3.13"])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_empty_python_install_mirror()
.with_managed_python_dirs();
context
.bin_dir
.child(format!("python3.12{}", std::env::consts::EXE_SUFFIX))
.touch()?;
// Try to upgrade with a non-managed interpreter installed in `bin`.
uv_snapshot!(context.filters(), context.python_upgrade().arg("--preview").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Executable already exists at `[BIN]/python3.12` but is not managed by uv; use `uv python install 3.12 --force` to replace it
Installed Python 3.12.12 in [TIME]
+ cpython-3.12.12-[PLATFORM]
");
// Force the `bin` install.
uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("--force").arg("--preview").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.12 in [TIME]
+ cpython-3.12.12-[PLATFORM] (python3.12)
");
Ok(())
}
#[test]
fn python_upgrade_implementation() {
let context = TestContext::new_with_versions(&[])
.with_python_download_cache()
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_empty_python_install_mirror()
.with_managed_python_dirs();
// Install pypy
context.python_install().arg("pypy@3.11").assert().success();
// Run the upgrade, we should not install cpython
uv_snapshot!(context.filters(), context.python_upgrade(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `uv python upgrade` is experimental and may change without warning. Pass `--preview-features python-upgrade` to disable this warning
All versions already on latest supported patch release
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_list.rs | crates/uv/tests/it/pip_list.rs | use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::fixture::ChildPath;
use assert_fs::fixture::FileWriteStr;
use assert_fs::fixture::PathChild;
use assert_fs::prelude::*;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn list_empty_columns() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_list()
.arg("--format")
.arg("columns"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
}
#[test]
fn list_empty_freeze() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_list()
.arg("--format")
.arg("freeze"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
}
#[test]
fn list_empty_json() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_list()
.arg("--format")
.arg("json"), @r###"
success: true
exit_code: 0
----- stdout -----
[]
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn list_single_no_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ markupsafe==2.1.3
"###
);
context.assert_command("import markupsafe").success();
uv_snapshot!(context.pip_list(), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
---------- -------
markupsafe 2.1.3
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_outdated_columns() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==3.0.0")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.0.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
uv_snapshot!(context.pip_list().arg("--outdated"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Latest Type
------- ------- ------ -----
anyio 3.0.0 4.3.0 wheel
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_outdated_json() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==3.0.0")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.0.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
uv_snapshot!(context.pip_list().arg("--outdated").arg("--format").arg("json"), @r###"
success: true
exit_code: 0
----- stdout -----
[{"name":"anyio","version":"3.0.0","latest_version":"4.3.0","latest_filetype":"wheel"}]
----- stderr -----
"###
);
Ok(())
}
#[test]
fn list_outdated_freeze() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_list().arg("--outdated").arg("--format").arg("freeze"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--outdated` cannot be used with `--format freeze`
"###
);
}
#[test]
#[cfg(feature = "git")]
fn list_outdated_git() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc::indoc! {r"
iniconfig==1.0.0
uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.1
"})?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ iniconfig==1.0.0
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
"###
);
uv_snapshot!(context.pip_list().arg("--outdated"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Latest Type
--------- ------- ------ -----
iniconfig 1.0.0 2.0.0 wheel
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_outdated_index() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==3.0.0")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.0.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
uv_snapshot!(context.pip_list()
.arg("--outdated")
.arg("--index-url")
.arg("https://test.pypi.org/simple"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Latest Type
------- ------- ------ -----
anyio 3.0.0 3.5.0 wheel
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_editable() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list(), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Editable project location
[UNDERLINE]
anyio 4.3.0
idna 3.6
poetry-editable 0.1.0 [WORKSPACE]/test/packages/poetry_editable
sniffio 1.3.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn list_editable_only() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Editable project location
[UNDERLINE]
poetry-editable 0.1.0 [WORKSPACE]/test/packages/poetry_editable
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--exclude-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
[UNDERLINE]
anyio 4.3.0
idna 3.6
sniffio 1.3.1
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--editable")
.arg("--exclude-editable"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--editable' cannot be used with '--exclude-editable'
Usage: uv pip list --cache-dir [CACHE_DIR] --editable --exclude-newer <EXCLUDE_NEWER>
For more information, try '--help'.
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn list_exclude() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--exclude")
.arg("numpy"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Editable project location
[UNDERLINE]
anyio 4.3.0
idna 3.6
poetry-editable 0.1.0 [WORKSPACE]/test/packages/poetry_editable
sniffio 1.3.1
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--exclude")
.arg("poetry-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
[UNDERLINE]
anyio 4.3.0
idna 3.6
sniffio 1.3.1
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--exclude")
.arg("numpy")
.arg("--exclude")
.arg("poetry-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
[UNDERLINE]
anyio 4.3.0
idna 3.6
sniffio 1.3.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
#[cfg(not(windows))]
fn list_format_json() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters: Vec<_> = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect();
uv_snapshot!(filters, context.pip_list()
.arg("--format=json"), @r###"
success: true
exit_code: 0
----- stdout -----
[{"name":"anyio","version":"4.3.0"},{"name":"idna","version":"3.6"},{"name":"poetry-editable","version":"0.1.0","editable_project_location":"[WORKSPACE]/test/packages/poetry_editable"},{"name":"sniffio","version":"1.3.1"}]
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=json")
.arg("--editable"), @r###"
success: true
exit_code: 0
----- stdout -----
[{"name":"poetry-editable","version":"0.1.0","editable_project_location":"[WORKSPACE]/test/packages/poetry_editable"}]
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=json")
.arg("--exclude-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
[{"name":"anyio","version":"4.3.0"},{"name":"idna","version":"3.6"},{"name":"sniffio","version":"1.3.1"}]
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn list_format_freeze() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context
.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
idna==3.6
poetry-editable==0.1.0
sniffio==1.3.1
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze")
.arg("--editable"), @r###"
success: true
exit_code: 0
----- stdout -----
poetry-editable==0.1.0
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze")
.arg("--exclude-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
idna==3.6
sniffio==1.3.1
----- stderr -----
"###
);
}
#[test]
fn list_legacy_editable() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
let target = context.temp_dir.child("zstandard_project");
target.child("zstd").create_dir_all()?;
target.child("zstd").child("__init__.py").write_str("")?;
target.child("zstandard.egg-info").create_dir_all()?;
target
.child("zstandard.egg-info")
.child("PKG-INFO")
.write_str(
"Metadata-Version: 2.1
Name: zstandard
Version: 0.22.0
",
)?;
site_packages
.child("zstandard.egg-link")
.write_str(target.path().to_str().unwrap())?;
site_packages.child("easy-install.pth").write_str(&format!(
"something\n{}\nanother thing\n",
target.path().to_str().unwrap()
))?;
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version Editable project location
[UNDERLINE]
zstandard 0.22.0 [TEMP_DIR]/zstandard_project
----- stderr -----
"###
);
Ok(())
}
#[test]
fn list_legacy_editable_invalid_version() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
let target = context.temp_dir.child("paramiko_project");
target.child("paramiko.egg-info").create_dir_all()?;
target
.child("paramiko.egg-info")
.child("PKG-INFO")
.write_str(
"Metadata-Version: 1.0
Name: paramiko
Version: 0.1-bulbasaur
",
)?;
site_packages
.child("paramiko.egg-link")
.write_str(target.path().to_str().unwrap())?;
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--editable"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to read metadata from: `[SITE_PACKAGES]/paramiko.egg-link`
Caused by: after parsing `0.1-b`, found `ulbasaur`, which is not part of a valid version
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_ignores_quiet_flag_format_freeze() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context
.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/poetry_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ poetry-editable==0.1.0 (from file://[WORKSPACE]/test/packages/poetry_editable)
+ sniffio==1.3.1
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze")
.arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
idna==3.6
poetry-editable==0.1.0
sniffio==1.3.1
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze")
.arg("--editable")
.arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
poetry-editable==0.1.0
----- stderr -----
"###
);
uv_snapshot!(filters, context.pip_list()
.arg("--format=freeze")
.arg("--exclude-editable")
.arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
idna==3.6
sniffio==1.3.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn list_target() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let target = context.temp_dir.child("target");
// Install packages to a target directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--target")
.arg(target.path())
.assert()
.success();
// List packages in the target directory.
uv_snapshot!(context.filters(), context.pip_list()
.arg("--target")
.arg(target.path()), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
---------- -------
markupsafe 2.1.3
tomli 2.0.1
----- stderr -----
"###
);
// Without --target, the packages should not be visible.
uv_snapshot!(context.pip_list(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn list_prefix() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let prefix = context.temp_dir.child("prefix");
// Install packages to a prefix directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--prefix")
.arg(prefix.path())
.assert()
.success();
// List packages in the prefix directory.
uv_snapshot!(context.filters(), context.pip_list()
.arg("--prefix")
.arg(prefix.path()), @r###"
success: true
exit_code: 0
----- stdout -----
Package Version
---------- -------
markupsafe 2.1.3
tomli 2.0.1
----- stderr -----
"###
);
// Without --prefix, the packages should not be visible.
uv_snapshot!(context.pip_list(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_install.rs | crates/uv/tests/it/pip_install.rs | use std::io::Cursor;
use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use flate2::write::GzEncoder;
use fs_err as fs;
use fs_err::File;
use indoc::{formatdoc, indoc};
use predicates::prelude::predicate;
use url::Url;
use wiremock::{
Mock, MockServer, ResponseTemplate,
matchers::{basic_auth, method, path},
};
#[cfg(feature = "git")]
use crate::common::{self, decode_token};
use crate::common::{
DEFAULT_PYTHON_VERSION, TestContext, build_vendor_links_url, download_to_disk, get_bin,
packse_index_url, uv_snapshot, venv_bin_path,
};
use uv_fs::Simplified;
use uv_static::EnvVars;
#[test]
fn missing_requirements_txt() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: File not found: `requirements.txt`
"###
);
requirements_txt.assert(predicates::path::missing());
}
#[test]
fn empty_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.touch()?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Requirements file `requirements.txt` does not contain any dependencies
Audited in [TIME]
"###
);
Ok(())
}
#[test]
fn missing_pyproject_toml() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: File not found: `pyproject.toml`
"###
);
}
#[test]
fn missing_find_links() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_missing_file_error();
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--find-links")
.arg("./missing")
.arg("--strict"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to read `--find-links` directory: [TEMP_DIR]/missing
Caused by: [OS ERROR 2]
"###
);
Ok(())
}
#[test]
fn invalid_pyproject_toml_syntax() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str("123 - 456")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("pyproject.toml"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: Failed to parse `pyproject.toml` during settings discovery:
TOML parse error at line 1, column 5
|
1 | 123 - 456
| ^
key with no value, expected `=`
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 1, column 5
|
1 | 123 - 456
| ^
key with no value, expected `=`
"
);
Ok(())
}
#[test]
fn invalid_pyproject_toml_project_schema() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str("[project]")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 1, column 1
|
1 | [project]
| ^^^^^^^^^
`pyproject.toml` is using the `[project]` table, but the required `project.name` field is not set
"###
);
Ok(())
}
#[test]
fn invalid_pyproject_toml_option_schema() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r"
[tool.uv]
index-url = true
"})?;
uv_snapshot!(context.pip_install()
.arg("iniconfig"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Failed to parse `pyproject.toml` during settings discovery:
TOML parse error at line 2, column 13
|
2 | index-url = true
| ^^^^
invalid type: boolean `true`, expected a string
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###
);
Ok(())
}
#[test]
fn invalid_pyproject_toml_option_unknown_field() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[tool.uv]
unknown = "field"
[build-system]
requires = ["setuptools"]
build-backend = "setuptools.build_meta"
"#})?;
let mut filters = context.filters();
filters.push((
"expected one of `required-version`, `native-tls`, .*",
"expected one of `required-version`, `native-tls`, [...]",
));
uv_snapshot!(filters, context.pip_install()
.arg("-r")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Failed to parse `pyproject.toml` during settings discovery:
TOML parse error at line 2, column 1
|
2 | unknown = "field"
| ^^^^^^^
unknown field `unknown`, expected one of `required-version`, `native-tls`, [...]
Resolved in [TIME]
Audited in [TIME]
"###
);
Ok(())
}
#[test]
fn invalid_toml_filename() -> Result<()> {
let context = TestContext::new("3.12");
let test_toml = context.temp_dir.child("test.toml");
test_toml.touch()?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("test.toml"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `test.toml` is not a valid PEP 751 filename: expected TOML file to start with `pylock.` and end with `.toml` (e.g., `pylock.toml`, `pylock.dev.toml`)
"
);
Ok(())
}
#[test]
fn invalid_uv_toml_option_disallowed_automatic_discovery() -> Result<()> {
let context = TestContext::new("3.12");
let uv_toml = context.temp_dir.child("uv.toml");
uv_toml.write_str(indoc! {r"
managed = true
"})?;
uv_snapshot!(context.pip_install()
.arg("iniconfig"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `uv.toml`. The `managed` field is not allowed in a `uv.toml` file. `managed` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead.
"###
);
Ok(())
}
#[test]
fn invalid_uv_toml_option_disallowed_command_line() -> Result<()> {
let context = TestContext::new("3.12");
let uv_toml = context.temp_dir.child("foo.toml");
uv_toml.write_str(indoc! {r"
managed = true
"})?;
uv_snapshot!(context.pip_install()
.arg("iniconfig")
.arg("--config-file")
.arg("foo.toml"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `foo.toml`. The `managed` field is not allowed in a `uv.toml` file. `managed` is only applicable in the context of a project, and should be placed in a `pyproject.toml` file instead.
"
);
Ok(())
}
#[test]
fn cache_uv_toml_credentials() -> Result<()> {
let context = TestContext::new("3.12");
let uv_toml = context.temp_dir.child("uv.toml");
uv_toml.write_str(indoc! {r#"
[pip]
extra-index-url = ["https://public:heron@pypi-proxy.fly.dev/basic-auth/simple/"]
"#})?;
// Provide an extra index with the same username and URL as in `uv.toml` but
// no password.
uv_snapshot!(context.pip_install()
.arg("iniconfig")
.arg("--extra-index-url")
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple/"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"
);
Ok(())
}
/// For indirect, non-user controlled pyproject.toml, we don't enforce correctness.
///
/// If we fail to extract the PEP 621 metadata, we fall back to treating it as a source
/// tree, as there are some cases where the `pyproject.toml` may not be a valid PEP
/// 621 file, but might still resolve under PEP 517. (If the source tree doesn't
/// resolve under PEP 517, we'll catch that later.)
///
/// For example, Hatch's "Context formatting" API is not compliant with PEP 621, as
/// it expects dynamic processing by the build backend for the static metadata
/// fields. See: <https://hatch.pypa.io/latest/config/context/>
#[test]
fn invalid_pyproject_toml_requirement_indirect() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("path_dep/pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "project"
version = "0.1.0"
dependencies = ["flask==1.0.x"]
"#,
)?;
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("./path_dep")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to build `project @ file://[TEMP_DIR]/path_dep`
├─▶ The build backend returned an error
╰─▶ Call to `setuptools.build_meta:__legacy__.build_wheel` failed (exit status: 1)
[stdout]
configuration error: `project.dependencies[0]` must be pep508
DESCRIPTION:
Project dependency specification according to PEP 508
GIVEN VALUE:
"flask==1.0.x"
OFFENDING RULE: 'format'
DEFINITION:
{
"$id": "#/definitions/dependency",
"title": "Dependency",
"type": "string",
"format": "pep508"
}
[stderr]
Traceback (most recent call last):
File "<string>", line 14, in <module>
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 325, in get_requires_for_build_wheel
return self._get_build_requires(config_settings, requirements=['wheel'])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
self.run_setup()
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 487, in run_setup
super().run_setup(setup_script=setup_script)
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
exec(code, locals())
File "<string>", line 1, in <module>
File "[CACHE_DIR]/builds-v0/[TMP]/__init__.py", line 104, in setup
return distutils.core.setup(**attrs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/builds-v0/[TMP]/core.py", line 159, in setup
dist.parse_config_files()
File "[CACHE_DIR]/builds-v0/[TMP]/_virtualenv.py", line 20, in parse_config_files
result = old_parse_config_files(self, *args, **kwargs)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/builds-v0/[TMP]/dist.py", line 631, in parse_config_files
pyprojecttoml.apply_configuration(self, filename, ignore_option_errors)
File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 68, in apply_configuration
config = read_configuration(filepath, True, ignore_option_errors, dist)
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 129, in read_configuration
validate(subset, filepath)
File "[CACHE_DIR]/builds-v0/[TMP]/pyprojecttoml.py", line 57, in validate
raise ValueError(f"{error}/n{summary}") from None
ValueError: invalid pyproject.toml config: `project.dependencies[0]`.
configuration error: `project.dependencies[0]` must be pep508
hint: This usually indicates a problem with the package or the build environment.
"###
);
Ok(())
}
#[test]
fn invalid_python_version() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.pip_install()
.arg("flask")
.arg("--python-version")
.arg("311"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value '311' for '--python-version <PYTHON_VERSION>': Python version `311` has an invalid major version (311)
For more information, try '--help'.
"
);
}
#[test]
fn missing_pip() {
uv_snapshot!(Command::new(get_bin()).arg("install"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: unrecognized subcommand 'install'
tip: a similar subcommand exists: 'uv pip install'
Usage: uv [OPTIONS] <COMMAND>
For more information, try '--help'.
"###);
}
#[test]
fn no_solution() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_install()
.arg("flask>=3.0.2")
.arg("WerkZeug<1.0.0")
.arg("--strict"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only flask<=3.0.2 is available and flask==3.0.2 depends on werkzeug>=3.0.0, we can conclude that flask>=3.0.2 depends on werkzeug>=3.0.0.
And because you require flask>=3.0.2 and werkzeug<1.0.0, we can conclude that your requirements are unsatisfiable.
"###);
}
/// Install a package from the command line into a virtual environment.
#[test]
fn install_package() {
let context = TestContext::new("3.12");
// Install Flask.
uv_snapshot!(context.pip_install()
.arg("Flask")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
}
/// Install a package from a `requirements.txt` into a virtual environment.
#[test]
fn install_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
// Install Flask.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
// Install iniconfig (which shouldn't remove other packages).
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("iniconfig")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"
);
context.assert_command("import flask").success();
Ok(())
}
/// Install a package from a `requirements.txt` passed via `-r -` into a virtual environment.
#[test]
#[allow(clippy::disallowed_types)]
fn install_from_stdin() -> Result<()> {
let context = TestContext::new("3.12");
// Install Flask.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("-")
.arg("--strict").stdin(std::fs::File::open(requirements_txt)?), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
Ok(())
}
/// Install a package from a `requirements.txt` passed via `-r /dev/stdin` into a virtual environment.
#[test]
#[cfg(not(windows))]
#[allow(clippy::disallowed_types)]
fn install_from_dev_stdin() -> Result<()> {
let context = TestContext::new("3.12");
// Install Flask.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("/dev/stdin")
.arg("--strict").stdin(std::fs::File::open(requirements_txt)?), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
Ok(())
}
/// Install a package from a remote `requirements.txt` into a virtual environment.
#[tokio::test]
async fn install_remote_requirements_txt() -> Result<()> {
let context = TestContext::new("3.12");
let username = "user";
let password = "password";
let requirements_txt = "Flask";
let server_url = start_requirements_server(username, password, requirements_txt).await;
let mut requirements_url = Url::parse(&format!("{}/requirements.txt", &server_url))?;
// Should fail without credentials
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg(requirements_url.as_str())
.arg("--strict"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Error while accessing remote requirements file: `http://[LOCALHOST]/requirements.txt`
"
);
let _ = requirements_url.set_username(username);
let _ = requirements_url.set_password(Some(password));
// Should succeed with credentials
uv_snapshot!(context.pip_install()
.arg("-r")
.arg(requirements_url.as_str())
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
let requirements_txt = "iniconfig";
// Update the mock server to serve a new requirements.txt
let server_url = start_requirements_server(username, password, requirements_txt).await;
let mut requirements_url = Url::parse(&format!("{}/requirements.txt", &server_url))?;
let _ = requirements_url.set_username(username);
let _ = requirements_url.set_password(Some(password));
uv_snapshot!(context.pip_install()
.arg("-r")
.arg(requirements_url.as_str())
.arg("--strict"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"
);
context.assert_command("import flask").success();
Ok(())
}
async fn start_requirements_server(
username: &str,
password: &str,
requirements_txt: &str,
) -> String {
let server = MockServer::start().await;
Mock::given(method("GET"))
.and(path("/requirements.txt"))
.and(basic_auth(username, password))
.respond_with(ResponseTemplate::new(200).set_body_string(requirements_txt))
.mount(&server)
.await;
Mock::given(method("GET"))
.and(path("/requirements.txt"))
.respond_with(ResponseTemplate::new(401))
.mount(&server)
.await;
server.uri()
}
/// Warn (but don't fail) when unsupported flags are set in the `requirements.txt`.
#[test]
fn install_unsupported_flag() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
--pre
--prefer-binary :all:
iniconfig
"})?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Ignoring unsupported option in `requirements.txt`: `--pre` (hint: pass `--pre` on the command line instead)
warning: Ignoring unsupported option in `requirements.txt`: `--prefer-binary`
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###
);
Ok(())
}
/// Install a requirements file with pins that conflict
///
/// This is likely to occur in the real world when compiled on one platform then installed on another.
#[test]
fn install_requirements_txt_conflicting_pins() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
// We pin `click` to a conflicting requirement
requirements_txt.write_str(
r"
blinker==1.7.0
click==7.0.0
flask==3.0.2
itsdangerous==2.1.2
jinja2==3.1.3
markupsafe==2.1.5
werkzeug==3.0.1
",
)?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because flask==3.0.2 depends on click>=8.1.3 and you require click==7.0.0, we can conclude that your requirements and flask==3.0.2 are incompatible.
And because you require flask==3.0.2, we can conclude that your requirements are unsatisfiable.
"###
);
Ok(())
}
#[test]
fn install_with_dependencies_from_script() -> Result<()> {
let context = TestContext::new("3.12");
let script = context.temp_dir.child("script.py");
script.write_str(indoc! {r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "anyio",
# ]
# ///
import anyio
"#})?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("script.py")
.arg("--strict"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"
);
// Update the script file.
script.write_str(indoc! {r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "anyio",
# "iniconfig",
# ]
# ///
import anyio
"#})?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("script.py")
.arg("--strict"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"
);
Ok(())
}
/// Install a `pyproject.toml` file with a `poetry` section.
#[test]
fn install_pyproject_toml_poetry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
iniconfig = { version = "*", optional = true }
[tool.poetry.extras]
test = ["iniconfig"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("pyproject.toml")
.arg("--extra")
.arg("test"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==3.7.1
+ idna==3.6
+ iniconfig==2.0.0
+ sniffio==1.3.1
"###
);
Ok(())
}
/// Respect installed versions when resolving.
#[test]
fn respect_installed_and_reinstall() -> Result<()> {
let context = TestContext::new("3.12");
// Install Flask.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask==2.3.2")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==2.3.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
context.assert_command("import flask").success();
// Re-install Flask. We should respect the existing version.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###
);
context.assert_command("import flask").success();
// Install a newer version of Flask. We should upgrade it.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask==2.3.3")?;
let context = context.with_filtered_counts();
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Uninstalled [N] packages in [TIME]
Installed [N] packages in [TIME]
- flask==2.3.2
+ flask==2.3.3
"###
);
// Re-install Flask. We should upgrade it.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--reinstall-package")
.arg("Flask")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Uninstalled [N] packages in [TIME]
Installed [N] packages in [TIME]
- flask==2.3.3
+ flask==3.0.2
"###
);
// Re-install Flask. We should install even though the version is current
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("Flask")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--reinstall-package")
.arg("Flask")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Uninstalled [N] packages in [TIME]
Installed [N] packages in [TIME]
~ flask==3.0.2
"###
);
Ok(())
}
/// Respect installed versions when resolving.
#[test]
fn reinstall_extras() -> Result<()> {
let context = TestContext::new("3.12");
// Install httpx.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("httpx")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ anyio==4.3.0
+ certifi==2024.2.2
+ h11==0.14.0
+ httpcore==1.0.4
+ httpx==0.27.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
context.assert_command("import httpx").success();
// Re-install httpx, with an extra.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("httpx[http2]")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 10 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ h2==4.1.0
+ hpack==4.0.0
+ hyperframe==6.0.1
"###
);
context.assert_command("import httpx").success();
Ok(())
}
/// Warn, but don't fail, when uninstalling incomplete packages.
#[test]
fn reinstall_incomplete() -> Result<()> {
let context = TestContext::new("3.12");
// Install anyio.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
"###
);
// Manually remove the `RECORD` file.
fs_err::remove_file(context.site_packages().join("anyio-3.7.0.dist-info/RECORD"))?;
// Re-install anyio.
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==4.0.0")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("-r")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 1 package in [TIME]
warning: Failed to uninstall package at [SITE_PACKAGES]/anyio-3.7.0.dist-info due to missing `RECORD` file. Installation may result in an incomplete environment.
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- anyio==3.7.0
+ anyio==4.0.0
"###
);
Ok(())
}
#[test]
fn exact_install_removes_extraneous_packages() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_counts();
// Install anyio
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio==3.7.0")?;
uv_snapshot!(context.filters(), context.pip_install()
.arg("--exact")
.arg("-r")
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/build_backend.rs | crates/uv/tests/it/build_backend.rs | use crate::common::{TestContext, uv_snapshot, venv_bin_path};
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileTouch, FileWriteBin, FileWriteStr, PathChild, PathCreateDir};
use flate2::bufread::GzDecoder;
use fs_err::File;
use indoc::{formatdoc, indoc};
use std::env;
use std::io::BufReader;
use std::path::Path;
use std::process::Command;
use tempfile::TempDir;
use uv_static::EnvVars;
const BUILT_BY_UV_TEST_SCRIPT: &str = indoc! {r#"
from built_by_uv import greet
from built_by_uv.arithmetic.circle import area
print(greet())
print(f"Area of a circle with r=2: {area(2)}")
"#};
/// Test that build backend works if we invoke it directly.
///
/// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel.
#[test]
#[cfg(feature = "pypi")]
fn built_by_uv_direct_wheel() -> Result<()> {
let context = TestContext::new("3.12");
let built_by_uv = Path::new("../../test/packages/built-by-uv");
let temp_dir = TempDir::new()?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(temp_dir.path())
.current_dir(built_by_uv), @r###"
success: true
exit_code: 0
----- stdout -----
built_by_uv-0.1.0-py3-none-any.whl
----- stderr -----
"###);
context
.pip_install()
.arg(temp_dir.path().join("built_by_uv-0.1.0-py3-none-any.whl"))
.assert()
.success();
uv_snapshot!(context.python_command()
.arg("-c")
.arg(BUILT_BY_UV_TEST_SCRIPT), @r###"
success: true
exit_code: 0
----- stdout -----
Hello 👋
Area of a circle with r=2: 12.56636
----- stderr -----
"###);
uv_snapshot!(Command::new("say-hi")
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r###"
success: true
exit_code: 0
----- stdout -----
Hi from a script!
----- stderr -----
"###);
Ok(())
}
/// Test that source tree -> source dist -> wheel works.
///
/// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel,
/// so we call the build backend directly.
#[test]
#[cfg(feature = "pypi")]
fn built_by_uv_direct() -> Result<()> {
let context = TestContext::new("3.12");
let built_by_uv = Path::new("../../test/packages/built-by-uv");
let sdist_dir = TempDir::new()?;
uv_snapshot!(context
.build_backend()
.arg("build-sdist")
.arg(sdist_dir.path())
.current_dir(built_by_uv), @r###"
success: true
exit_code: 0
----- stdout -----
built_by_uv-0.1.0.tar.gz
----- stderr -----
"###);
let sdist_tree = TempDir::new()?;
let sdist_reader = BufReader::new(File::open(
sdist_dir.path().join("built_by_uv-0.1.0.tar.gz"),
)?);
tar::Archive::new(GzDecoder::new(sdist_reader)).unpack(sdist_tree.path())?;
drop(sdist_dir);
let wheel_dir = TempDir::new()?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(wheel_dir.path())
.current_dir(sdist_tree.path().join("built_by_uv-0.1.0")), @r###"
success: true
exit_code: 0
----- stdout -----
built_by_uv-0.1.0-py3-none-any.whl
----- stderr -----
"###);
drop(sdist_tree);
context
.pip_install()
.arg(wheel_dir.path().join("built_by_uv-0.1.0-py3-none-any.whl"))
.assert()
.success();
drop(wheel_dir);
uv_snapshot!(context.python_command()
.arg("-c")
.arg(BUILT_BY_UV_TEST_SCRIPT), @r###"
success: true
exit_code: 0
----- stdout -----
Hello 👋
Area of a circle with r=2: 12.56636
----- stderr -----
"###);
Ok(())
}
/// Test that editables work.
///
/// We can't test end-to-end here including the PEP 517 bridge code since we don't have a uv wheel,
/// so we call the build backend directly.
#[test]
#[cfg(feature = "pypi")]
fn built_by_uv_editable() -> Result<()> {
let context = TestContext::new("3.12");
let built_by_uv = Path::new("../../test/packages/built-by-uv");
// Without the editable, pytest fails.
context.pip_install().arg("pytest").assert().success();
context
.python_command()
.arg("-m")
.arg("pytest")
.current_dir(built_by_uv)
.assert()
.failure();
// Build and install the editable. Normally, this should be one step with the editable never
// been seen, but we have to split it for the test.
let wheel_dir = TempDir::new()?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(wheel_dir.path())
.current_dir(built_by_uv), @r###"
success: true
exit_code: 0
----- stdout -----
built_by_uv-0.1.0-py3-none-any.whl
----- stderr -----
"###);
context
.pip_install()
.arg(wheel_dir.path().join("built_by_uv-0.1.0-py3-none-any.whl"))
.assert()
.success();
drop(wheel_dir);
// Now, pytest passes.
uv_snapshot!(context.python_command()
.arg("-m")
.arg("pytest")
// Avoid showing absolute paths and column dependent layout
.arg("--quiet")
.arg("--capture=no")
.current_dir(built_by_uv), @r###"
success: true
exit_code: 0
----- stdout -----
..
2 passed in [TIME]
----- stderr -----
"###);
Ok(())
}
#[cfg(all(unix, feature = "git"))]
#[test]
fn preserve_executable_bit() -> Result<()> {
use std::io::Write;
let context = TestContext::new("3.12");
let project_dir = context.temp_dir.path().join("preserve_executable_bit");
context
.init()
.arg("--lib")
.arg(&project_dir)
.assert()
.success();
fs_err::OpenOptions::new()
.write(true)
.append(true)
.open(project_dir.join("pyproject.toml"))?
.write_all(
indoc! {r#"
[tool.uv.build-backend.data]
scripts = "scripts"
"#}
.as_bytes(),
)?;
fs_err::create_dir(project_dir.join("scripts"))?;
fs_err::write(
project_dir.join("scripts").join("greet.sh"),
indoc! {r#"
echo "Hi from the shell"
"#},
)?;
context
.build_backend()
.arg("build-wheel")
.arg(context.temp_dir.path())
.current_dir(project_dir)
.assert()
.success();
let wheel = context
.temp_dir
.path()
.join("preserve_executable_bit-0.1.0-py3-none-any.whl");
context.pip_install().arg(wheel).assert().success();
uv_snapshot!(Command::new("greet.sh")
.env(EnvVars::PATH, venv_bin_path(&context.venv)), @r###"
success: true
exit_code: 0
----- stdout -----
Hi from the shell
----- stderr -----
"###);
Ok(())
}
/// Test `tool.uv.build-backend.module-name`.
///
/// We include only the module specified by `module-name`, ignoring the project name and all other
/// potential modules.
#[test]
fn rename_module() -> Result<()> {
let context = TestContext::new("3.12");
let temp_dir = TempDir::new()?;
context
.temp_dir
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = "foo"
version = "1.0.0"
[tool.uv.build-backend]
module-name = "bar"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
// This is the module we would usually include, but due to the renaming by `module-name` must
// ignore.
context
.temp_dir
.child("src/foo/__init__.py")
.write_str(r#"print("Hi from foo")"#)?;
// This module would be ignored from just `project.name`, but is selected due to the renaming.
context
.temp_dir
.child("src/bar/__init__.py")
.write_str(r#"print("Hi from bar")"#)?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
foo-1.0.0-py3-none-any.whl
----- stderr -----
"###);
context
.pip_install()
.arg(temp_dir.path().join("foo-1.0.0-py3-none-any.whl"))
.assert()
.success();
// Importing the module with the `module-name` name succeeds.
uv_snapshot!(context.python_command()
.arg("-c")
.arg("import bar"), @r###"
success: true
exit_code: 0
----- stdout -----
Hi from bar
----- stderr -----
"###);
// Importing the package name fails, it was overridden by `module-name`.
uv_snapshot!(context.python_command()
.arg("-c")
.arg("import foo"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Traceback (most recent call last):
File "<string>", line 1, in <module>
ModuleNotFoundError: No module named 'foo'
"###);
Ok(())
}
/// Test `tool.uv.build-backend.module-name` for editable builds.
#[test]
fn rename_module_editable_build() -> Result<()> {
let context = TestContext::new("3.12");
let temp_dir = TempDir::new()?;
context
.temp_dir
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = "foo"
version = "1.0.0"
[tool.uv.build-backend]
module-name = "bar"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
context
.temp_dir
.child("src/bar/__init__.py")
.write_str(r#"print("Hi from bar")"#)?;
uv_snapshot!(context
.build_backend()
.arg("build-editable")
.arg(temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
foo-1.0.0-py3-none-any.whl
----- stderr -----
"###);
context
.pip_install()
.arg(temp_dir.path().join("foo-1.0.0-py3-none-any.whl"))
.assert()
.success();
// Importing the module with the `module-name` name succeeds.
uv_snapshot!(context.python_command()
.arg("-c")
.arg("import bar"), @r###"
success: true
exit_code: 0
----- stdout -----
Hi from bar
----- stderr -----
"###);
Ok(())
}
/// Check that the build succeeds even if the module name mismatches by case.
#[test]
fn build_module_name_normalization() -> Result<()> {
let context = TestContext::new("3.12");
let wheel_dir = context.temp_dir.path().join("dist");
fs_err::create_dir(&wheel_dir)?;
context
.temp_dir
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = "django-plugin"
version = "1.0.0"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
[tool.uv.build-backend]
module-name = "Django_plugin"
"#})?;
fs_err::create_dir_all(context.temp_dir.join("src"))?;
// Error case 1: No matching module.
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(&wheel_dir), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Expected a Python module at: src/Django_plugin/__init__.py
");
fs_err::create_dir_all(context.temp_dir.join("src/Django_plugin"))?;
// Error case 2: A matching module, but no `__init__.py`.
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(&wheel_dir), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Expected a Python module at: src/Django_plugin/__init__.py
");
// Use `Django_plugin` instead of `django_plugin`
context
.temp_dir
.child("src/Django_plugin/__init__.py")
.write_str(r#"print("Hi from bar")"#)?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(&wheel_dir), @r"
success: true
exit_code: 0
----- stdout -----
django_plugin-1.0.0-py3-none-any.whl
----- stderr -----
");
context
.pip_install()
.arg("--no-index")
.arg("--find-links")
.arg(&wheel_dir)
.arg("django-plugin")
.assert()
.success();
uv_snapshot!(context.python_command()
.arg("-c")
.arg("import Django_plugin"), @r"
success: true
exit_code: 0
----- stdout -----
Hi from bar
----- stderr -----
");
// Former error case 3, now accepted: Multiple modules a matching name.
// Requires a case-sensitive filesystem.
#[cfg(target_os = "linux")]
{
context
.temp_dir
.child("src/django_plugin/__init__.py")
.write_str(r#"print("Hi from bar")"#)?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(&wheel_dir), @r"
success: true
exit_code: 0
----- stdout -----
django_plugin-1.0.0-py3-none-any.whl
----- stderr -----
");
}
Ok(())
}
#[test]
fn build_sdist_with_long_path() -> Result<()> {
let context = TestContext::new("3.12");
let temp_dir = TempDir::new()?;
context
.temp_dir
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = "foo"
version = "1.0.0"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
context
.temp_dir
.child("src/foo/__init__.py")
.write_str(r#"print("Hi from foo")"#)?;
let long_path = format!("src/foo/l{}ng/__init__.py", "o".repeat(100));
context
.temp_dir
.child(long_path)
.write_str(r#"print("Hi from foo")"#)?;
uv_snapshot!(context
.build_backend()
.arg("build-sdist")
.arg(temp_dir.path()), @r###"
success: true
exit_code: 0
----- stdout -----
foo-1.0.0.tar.gz
----- stderr -----
"###);
Ok(())
}
#[test]
fn sdist_error_without_module() -> Result<()> {
let context = TestContext::new("3.12");
let temp_dir = TempDir::new()?;
context
.temp_dir
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = "foo"
version = "1.0.0"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
uv_snapshot!(context
.build_backend()
.arg("build-sdist")
.arg(temp_dir.path()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Expected a Python module at: src/foo/__init__.py
");
fs_err::create_dir(context.temp_dir.join("src"))?;
uv_snapshot!(context
.build_backend()
.arg("build-sdist")
.arg(temp_dir.path()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Expected a Python module at: src/foo/__init__.py
");
Ok(())
}
#[test]
fn complex_namespace_packages() -> Result<()> {
let context = TestContext::new("3.12");
let dist = context.temp_dir.child("dist");
dist.create_dir_all()?;
let init_py_a = indoc! {"
def one():
return 1
"};
let init_py_b = indoc! {"
from complex_project.part_a import one
def two():
return one() + one()
"};
let projects = [
("complex-project", "part_a", init_py_a),
("complex-project", "part_b", init_py_b),
];
for (project_name, part_name, init_py) in projects {
let project = context
.temp_dir
.child(format!("{project_name}-{part_name}"));
let project_name_dist_info = project_name.replace('-', "_");
let pyproject_toml = formatdoc! {r#"
[project]
name = "{project_name}-{part_name}"
version = "1.0.0"
[tool.uv.build-backend]
module-name = "{project_name_dist_info}.{part_name}"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
};
project.child("pyproject.toml").write_str(&pyproject_toml)?;
project
.child("src")
.child(project_name_dist_info)
.child(part_name)
.child("__init__.py")
.write_str(init_py)?;
context
.build()
.arg(project.path())
.arg("--out-dir")
.arg(dist.path())
.assert()
.success();
}
uv_snapshot!(
context.filters(),
context
.pip_install()
.arg("complex-project-part-a")
.arg("complex-project-part-b")
.arg("--offline")
.arg("--find-links")
.arg(dist.path()),
@r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ complex-project-part-a==1.0.0
+ complex-project-part-b==1.0.0
"
);
uv_snapshot!(context.python_command()
.arg("-c")
.arg("from complex_project.part_b import two; print(two())"),
@r"
success: true
exit_code: 0
----- stdout -----
2
----- stderr -----
"
);
// Test editable installs
uv_snapshot!(
context.filters(),
context
.pip_install()
.arg("-e")
.arg("complex-project-part_a")
.arg("-e")
.arg("complex-project-part_b")
.arg("--offline"),
@r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME]
- complex-project-part-a==1.0.0
+ complex-project-part-a==1.0.0 (from file://[TEMP_DIR]/complex-project-part_a)
- complex-project-part-b==1.0.0
+ complex-project-part-b==1.0.0 (from file://[TEMP_DIR]/complex-project-part_b)
"
);
uv_snapshot!(context.python_command()
.arg("-c")
.arg("from complex_project.part_b import two; print(two())"),
@r"
success: true
exit_code: 0
----- stdout -----
2
----- stderr -----
"
);
Ok(())
}
#[test]
fn license_glob_without_matches_errors() -> Result<()> {
let context = TestContext::new("3.12");
let project = context.temp_dir.child("missing-license");
context
.init()
.arg("--lib")
.arg(project.path())
.assert()
.success();
project
.child("LICENSE.txt")
.write_str("permissive license")?;
project.child("pyproject.toml").write_str(indoc! {r#"
[project]
name = "missing-license"
version = "1.0.0"
license-files = ["abc", "LICENSE.txt"]
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
})?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(context.temp_dir.path())
.current_dir(project.path()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid project metadata
Caused by: `project.license-files` glob `abc` did not match any files
");
Ok(())
}
#[test]
fn license_file_must_be_utf8() -> Result<()> {
let context = TestContext::new("3.12");
let project = context.temp_dir.child("license-utf8");
context
.init()
.arg("--lib")
.arg(project.path())
.assert()
.success();
project.child("pyproject.toml").write_str(indoc! {r#"
[project]
name = "license-utf8"
version = "1.0.0"
license-files = ["LICENSE.bin"]
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
})?;
project.child("LICENSE.bin").write_binary(&[0xff])?;
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(context.temp_dir.path())
.current_dir(project.path()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid project metadata
Caused by: License file `LICENSE.bin` must be UTF-8 encoded
");
Ok(())
}
/// Test that a symlinked file (here: license) gets included.
#[test]
#[cfg(unix)]
fn symlinked_file() -> Result<()> {
let context = TestContext::new("3.12");
let project = context.temp_dir.child("project");
context
.init()
.arg("--lib")
.arg(project.path())
.assert()
.success();
project.child("pyproject.toml").write_str(indoc! {r#"
[project]
name = "project"
version = "1.0.0"
license-files = ["LICENSE"]
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#
})?;
let license_file = context.temp_dir.child("LICENSE");
let license_symlink = project.child("LICENSE");
let license_text = "Project license";
license_file.write_str(license_text)?;
fs_err::os::unix::fs::symlink(license_file.path(), license_symlink.path())?;
uv_snapshot!(context
.build_backend()
.arg("build-sdist")
.arg(context.temp_dir.path())
.current_dir(project.path()), @r"
success: true
exit_code: 0
----- stdout -----
project-1.0.0.tar.gz
----- stderr -----
");
uv_snapshot!(context
.build_backend()
.arg("build-wheel")
.arg(context.temp_dir.path())
.current_dir(project.path()), @r"
success: true
exit_code: 0
----- stdout -----
project-1.0.0-py3-none-any.whl
----- stderr -----
");
uv_snapshot!(context.filters(), context.pip_install().arg("project-1.0.0-py3-none-any.whl"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ project==1.0.0 (from file://[TEMP_DIR]/project-1.0.0-py3-none-any.whl)
");
// Check that we included the actual license text and not a broken symlink.
let installed_license = context
.site_packages()
.join("project-1.0.0.dist-info")
.join("licenses")
.join("LICENSE");
assert!(
fs_err::symlink_metadata(&installed_license)?
.file_type()
.is_file()
);
let license = fs_err::read_to_string(&installed_license)?;
assert_eq!(license, license_text);
Ok(())
}
/// Ignore invalid build backend settings when not building.
///
/// They may be from another `uv_build` version that has a different schema.
#[test]
fn invalid_build_backend_settings_are_ignored() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "built-by-uv"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv.build-backend]
# Error: `source-include` must be a list
source-include = "data/build-script.py"
[build-system]
requires = ["uv_build>=10000,<10001"]
build-backend = "uv_build"
"#})?;
// Since we are not building, this must pass without complaining about the error in
// `tool.uv.build-backend`.
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
");
Ok(())
}
/// Error when there is a relative module root outside the project root, such as
/// `tool.uv.build-backend.module-root = ".."`.
#[test]
fn error_on_relative_module_root_outside_project_root() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv.build-backend]
module-root = ".."
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
context.temp_dir.child("__init__.py").touch()?;
uv_snapshot!(context.filters(), context.build(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
× Failed to build `[TEMP_DIR]/`
╰─▶ Module root must be inside the project: ..
");
uv_snapshot!(context.filters(), context.build().arg("--wheel"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building wheel (uv build backend)...
× Failed to build `[TEMP_DIR]/`
╰─▶ Module root must be inside the project: ..
");
Ok(())
}
/// Error when there is a relative data directory outside the project root, such as
/// `tool.uv.build-backend.data.headers = "../headers"`.
#[test]
fn error_on_relative_data_dir_outside_project_root() -> Result<()> {
let context = TestContext::new("3.12");
let project = context.temp_dir.child("project");
project.create_dir_all()?;
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv.build-backend.data]
headers = "../header"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
"#})?;
let project_module = project.child("src/project");
project_module.create_dir_all()?;
project_module.child("__init__.py").touch()?;
context.temp_dir.child("headers").create_dir_all()?;
uv_snapshot!(context.filters(), context.build().arg("project"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
× Failed to build `[TEMP_DIR]/project`
╰─▶ The path for the data directory headers must be inside the project: ../header
");
uv_snapshot!(context.filters(), context.build().arg("project").arg("--wheel"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building wheel (uv build backend)...
× Failed to build `[TEMP_DIR]/project`
╰─▶ The path for the data directory headers must be inside the project: ../header
");
Ok(())
}
/// Show an explicit error when there is a venv in source tree.
#[test]
fn venv_in_source_tree() {
let context = TestContext::new("3.12");
context
.init()
.arg("--lib")
.arg("--name")
.arg("foo")
.assert()
.success();
context
.venv()
.arg(context.temp_dir.join("src").join("foo").join(".venv"))
.assert()
.success();
uv_snapshot!(context.filters(), context.build(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
× Failed to build `[TEMP_DIR]/`
╰─▶ Virtual environments must not be added to source distributions or wheels, remove the directory or exclude it from the build: src/foo/.venv
");
uv_snapshot!(context.filters(), context.build().arg("--wheel"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building wheel (uv build backend)...
× Failed to build `[TEMP_DIR]/`
╰─▶ Virtual environments must not be added to source distributions or wheels, remove the directory or exclude it from the build: src/foo/.venv
");
}
/// Show a warning when the build backend is passed redundant module names
#[test]
fn warn_on_redundant_module_names() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[build-system]
requires = ["uv_build>=0.7,<10000"]
build-backend = "uv_build"
[tool.uv.build-backend]
module-name = ["foo", "foo.bar", "foo", "foo.bar.baz", "foobar", "bar", "foobar.baz", "baz.bar"]
"#})?;
let foo_module = context.temp_dir.child("src/foo");
foo_module.create_dir_all()?;
foo_module.child("__init__.py").touch()?;
let foobar_module = context.temp_dir.child("src/foobar");
foobar_module.create_dir_all()?;
foobar_module.child("__init__.py").touch()?;
let bazbar_module = context.temp_dir.child("src/baz/bar");
bazbar_module.create_dir_all()?;
bazbar_module.child("__init__.py").touch()?;
let bar_module = context.temp_dir.child("src/bar");
bar_module.create_dir_all()?;
bar_module.child("__init__.py").touch()?;
// Warnings should be printed when invoking `uv build`
uv_snapshot!(context.filters(), context.build(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
warning: Ignoring redundant module names in `tool.uv.build-backend.module-name`: `foo.bar`, `foo`, `foo.bar.baz`, `foobar.baz`
Building wheel from source distribution (uv build backend)...
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
");
// But warnings shouldn't be printed in cases when the user might not
// control the thing being built. Sources being enabled is a workable proxy
// for this.
uv_snapshot!(context.filters(), context.build().arg("--no-sources"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
Building wheel from source distribution (uv build backend)...
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
");
Ok(())
}
#[test]
fn invalid_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
context
.temp_dir
.child("child")
.child("pyproject.toml")
.write_str(indoc! {r#"
[project]
name = 1
version = "1.0.0"
[build-system]
requires = ["uv_build>=0.9,<10000"]
build-backend = "uv_build"
"#})?;
uv_snapshot!(context.filters(), context.build().arg("child"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution (uv build backend)...
× Failed to build `[TEMP_DIR]/child`
├─▶ Invalid metadata format in: child/pyproject.toml
╰─▶ TOML parse error at line 2, column 8
|
2 | name = 1
| ^
invalid type: integer `1`, expected a string
");
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/show_settings.rs | crates/uv/tests/it/show_settings.rs | use std::path::Path;
use std::process::Command;
use assert_fs::prelude::*;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
/// Add shared arguments to a command.
///
/// In particular, remove any user-defined environment variables and set any machine-specific
/// environment variables to static values.
fn add_shared_args(mut command: Command, cwd: &Path) -> Command {
command
.env_clear()
.env(EnvVars::UV_LINK_MODE, "clone")
.env(EnvVars::UV_CONCURRENT_DOWNLOADS, "50")
.env(EnvVars::UV_CONCURRENT_BUILDS, "16")
.env(EnvVars::UV_CONCURRENT_INSTALLS, "8")
// Set an explicit `XDG_CONFIG_DIRS` to avoid loading system configuration.
.env(EnvVars::XDG_CONFIG_DIRS, cwd)
// Set an explicit `XDG_CONFIG_HOME` to avoid loading user configuration.
.env(EnvVars::XDG_CONFIG_HOME, cwd);
if cfg!(unix) {
// Avoid locale issues in tests
command.env(EnvVars::LC_ALL, "C");
}
command
}
/// Read from a `uv.toml` file in the current directory.
#[test]
#[cfg_attr(
windows,
ignore = "Configuration tests are not yet supported on Windows"
)]
fn resolve_uv_toml() -> anyhow::Result<()> {
let context = TestContext::new("3.12");
// Write a `uv.toml` file to the directory.
let config = context.temp_dir.child("uv.toml");
config.write_str(indoc::indoc! {r#"
[pip]
resolution = "lowest-direct"
generate-hashes = true
index-url = "https://pypi.org/simple"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>3.0.0")?;
// Resolution should use the lowest direct version, and generate hashes.
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
.arg("--show-settings")
.arg("requirements.in"), @r#"
success: true
exit_code: 0
----- stdout -----
GlobalSettings {
required_version: None,
quiet: 0,
verbose: 0,
color: Auto,
network_settings: NetworkSettings {
connectivity: Online,
native_tls: false,
allow_insecure_host: [],
timeout: [TIME],
retries: 3,
},
concurrency: Concurrency {
downloads: 50,
builds: 16,
installs: 8,
},
show_settings: true,
preview: Preview {
flags: PreviewFeatures(
0x0,
),
},
python_preference: Managed,
python_downloads: Automatic,
no_progress: false,
installer_metadata: true,
}
CacheSettings {
no_cache: false,
cache_dir: Some(
"[CACHE_DIR]/",
),
}
PipCompileSettings {
format: None,
src_file: [
"requirements.in",
],
constraints: [],
overrides: [],
excludes: [],
build_constraints: [],
constraints_from_workspace: [],
overrides_from_workspace: [],
excludes_from_workspace: [],
build_constraints_from_workspace: [],
environments: SupportedEnvironments(
[],
),
refresh: None(
Timestamp(
SystemTime {
tv_sec: [TIME],
tv_nsec: [TIME],
},
),
),
settings: PipSettings {
index_locations: IndexLocations {
indexes: [
Index {
name: None,
url: Pypi(
VerbatimUrl {
url: DisplaySafeUrl {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"pypi.org",
),
),
port: None,
path: "/simple",
query: None,
fragment: None,
},
given: Some(
"https://pypi.org/simple",
),
},
),
explicit: false,
default: true,
origin: None,
format: Simple,
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
cache_control: None,
},
],
flat_index: [],
no_index: false,
},
python: None,
install_mirrors: PythonInstallMirrors {
python_install_mirror: None,
pypy_install_mirror: None,
python_downloads_json_url: None,
},
system: false,
extras: ExtrasSpecification(
ExtrasSpecificationInner {
include: Some(
[],
),
exclude: [],
only_extras: false,
history: ExtrasSpecificationHistory {
extra: [],
only_extra: [],
no_extra: [],
all_extras: false,
no_default_extras: false,
defaults: List(
[],
),
},
},
),
groups: [],
break_system_packages: false,
target: None,
prefix: None,
index_strategy: FirstIndex,
keyring_provider: Disabled,
torch_backend: None,
build_isolation: Isolate,
extra_build_dependencies: ExtraBuildDependencies(
{},
),
extra_build_variables: ExtraBuildVariables(
{},
),
build_options: BuildOptions {
no_binary: None,
no_build: None,
},
allow_empty_requirements: false,
strict: false,
dependency_mode: Transitive,
resolution: LowestDirect,
prerelease: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
dependency_metadata: DependencyMetadata(
{},
),
output_file: None,
no_strip_extras: false,
no_strip_markers: false,
no_annotate: false,
no_header: false,
custom_compile_command: None,
generate_hashes: true,
config_setting: ConfigSettings(
{},
),
config_settings_package: PackageConfigSettings(
{},
),
python_version: None,
python_platform: None,
universal: false,
exclude_newer: ExcludeNewer {
global: None,
package: ExcludeNewerPackage(
{},
),
},
no_emit_package: [],
emit_index_url: false,
emit_find_links: false,
emit_build_options: false,
emit_marker_expression: false,
emit_index_annotation: false,
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
sources: Enabled,
hash_checking: Some(
Verify,
),
upgrade: None,
reinstall: None,
},
}
----- stderr -----
"#
);
// Resolution should use the highest version, and generate hashes.
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
.arg("--show-settings")
.arg("requirements.in")
.arg("--resolution=highest"), @r#"
success: true
exit_code: 0
----- stdout -----
GlobalSettings {
required_version: None,
quiet: 0,
verbose: 0,
color: Auto,
network_settings: NetworkSettings {
connectivity: Online,
native_tls: false,
allow_insecure_host: [],
timeout: [TIME],
retries: 3,
},
concurrency: Concurrency {
downloads: 50,
builds: 16,
installs: 8,
},
show_settings: true,
preview: Preview {
flags: PreviewFeatures(
0x0,
),
},
python_preference: Managed,
python_downloads: Automatic,
no_progress: false,
installer_metadata: true,
}
CacheSettings {
no_cache: false,
cache_dir: Some(
"[CACHE_DIR]/",
),
}
PipCompileSettings {
format: None,
src_file: [
"requirements.in",
],
constraints: [],
overrides: [],
excludes: [],
build_constraints: [],
constraints_from_workspace: [],
overrides_from_workspace: [],
excludes_from_workspace: [],
build_constraints_from_workspace: [],
environments: SupportedEnvironments(
[],
),
refresh: None(
Timestamp(
SystemTime {
tv_sec: [TIME],
tv_nsec: [TIME],
},
),
),
settings: PipSettings {
index_locations: IndexLocations {
indexes: [
Index {
name: None,
url: Pypi(
VerbatimUrl {
url: DisplaySafeUrl {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"pypi.org",
),
),
port: None,
path: "/simple",
query: None,
fragment: None,
},
given: Some(
"https://pypi.org/simple",
),
},
),
explicit: false,
default: true,
origin: None,
format: Simple,
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
cache_control: None,
},
],
flat_index: [],
no_index: false,
},
python: None,
install_mirrors: PythonInstallMirrors {
python_install_mirror: None,
pypy_install_mirror: None,
python_downloads_json_url: None,
},
system: false,
extras: ExtrasSpecification(
ExtrasSpecificationInner {
include: Some(
[],
),
exclude: [],
only_extras: false,
history: ExtrasSpecificationHistory {
extra: [],
only_extra: [],
no_extra: [],
all_extras: false,
no_default_extras: false,
defaults: List(
[],
),
},
},
),
groups: [],
break_system_packages: false,
target: None,
prefix: None,
index_strategy: FirstIndex,
keyring_provider: Disabled,
torch_backend: None,
build_isolation: Isolate,
extra_build_dependencies: ExtraBuildDependencies(
{},
),
extra_build_variables: ExtraBuildVariables(
{},
),
build_options: BuildOptions {
no_binary: None,
no_build: None,
},
allow_empty_requirements: false,
strict: false,
dependency_mode: Transitive,
resolution: Highest,
prerelease: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
dependency_metadata: DependencyMetadata(
{},
),
output_file: None,
no_strip_extras: false,
no_strip_markers: false,
no_annotate: false,
no_header: false,
custom_compile_command: None,
generate_hashes: true,
config_setting: ConfigSettings(
{},
),
config_settings_package: PackageConfigSettings(
{},
),
python_version: None,
python_platform: None,
universal: false,
exclude_newer: ExcludeNewer {
global: None,
package: ExcludeNewerPackage(
{},
),
},
no_emit_package: [],
emit_index_url: false,
emit_find_links: false,
emit_build_options: false,
emit_marker_expression: false,
emit_index_annotation: false,
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
sources: Enabled,
hash_checking: Some(
Verify,
),
upgrade: None,
reinstall: None,
},
}
----- stderr -----
"#
);
// Resolution should use the highest version, and omit hashes.
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
.arg("--show-settings")
.arg("requirements.in")
.arg("--resolution=highest")
.arg("--no-generate-hashes"), @r#"
success: true
exit_code: 0
----- stdout -----
GlobalSettings {
required_version: None,
quiet: 0,
verbose: 0,
color: Auto,
network_settings: NetworkSettings {
connectivity: Online,
native_tls: false,
allow_insecure_host: [],
timeout: [TIME],
retries: 3,
},
concurrency: Concurrency {
downloads: 50,
builds: 16,
installs: 8,
},
show_settings: true,
preview: Preview {
flags: PreviewFeatures(
0x0,
),
},
python_preference: Managed,
python_downloads: Automatic,
no_progress: false,
installer_metadata: true,
}
CacheSettings {
no_cache: false,
cache_dir: Some(
"[CACHE_DIR]/",
),
}
PipCompileSettings {
format: None,
src_file: [
"requirements.in",
],
constraints: [],
overrides: [],
excludes: [],
build_constraints: [],
constraints_from_workspace: [],
overrides_from_workspace: [],
excludes_from_workspace: [],
build_constraints_from_workspace: [],
environments: SupportedEnvironments(
[],
),
refresh: None(
Timestamp(
SystemTime {
tv_sec: [TIME],
tv_nsec: [TIME],
},
),
),
settings: PipSettings {
index_locations: IndexLocations {
indexes: [
Index {
name: None,
url: Pypi(
VerbatimUrl {
url: DisplaySafeUrl {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"pypi.org",
),
),
port: None,
path: "/simple",
query: None,
fragment: None,
},
given: Some(
"https://pypi.org/simple",
),
},
),
explicit: false,
default: true,
origin: None,
format: Simple,
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
cache_control: None,
},
],
flat_index: [],
no_index: false,
},
python: None,
install_mirrors: PythonInstallMirrors {
python_install_mirror: None,
pypy_install_mirror: None,
python_downloads_json_url: None,
},
system: false,
extras: ExtrasSpecification(
ExtrasSpecificationInner {
include: Some(
[],
),
exclude: [],
only_extras: false,
history: ExtrasSpecificationHistory {
extra: [],
only_extra: [],
no_extra: [],
all_extras: false,
no_default_extras: false,
defaults: List(
[],
),
},
},
),
groups: [],
break_system_packages: false,
target: None,
prefix: None,
index_strategy: FirstIndex,
keyring_provider: Disabled,
torch_backend: None,
build_isolation: Isolate,
extra_build_dependencies: ExtraBuildDependencies(
{},
),
extra_build_variables: ExtraBuildVariables(
{},
),
build_options: BuildOptions {
no_binary: None,
no_build: None,
},
allow_empty_requirements: false,
strict: false,
dependency_mode: Transitive,
resolution: Highest,
prerelease: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
dependency_metadata: DependencyMetadata(
{},
),
output_file: None,
no_strip_extras: false,
no_strip_markers: false,
no_annotate: false,
no_header: false,
custom_compile_command: None,
generate_hashes: false,
config_setting: ConfigSettings(
{},
),
config_settings_package: PackageConfigSettings(
{},
),
python_version: None,
python_platform: None,
universal: false,
exclude_newer: ExcludeNewer {
global: None,
package: ExcludeNewerPackage(
{},
),
},
no_emit_package: [],
emit_index_url: false,
emit_find_links: false,
emit_build_options: false,
emit_marker_expression: false,
emit_index_annotation: false,
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
sources: Enabled,
hash_checking: Some(
Verify,
),
upgrade: None,
reinstall: None,
},
}
----- stderr -----
"#
);
Ok(())
}
/// Read from a `pyproject.toml` file in the current directory.
///
/// We prefer `uv.toml` when both are present, but respect `pyproject.toml` otherwise.
#[test]
#[cfg_attr(
windows,
ignore = "Configuration tests are not yet supported on Windows"
)]
fn resolve_pyproject_toml() -> anyhow::Result<()> {
let context = TestContext::new("3.12");
// Write a `uv.toml` file to the directory.
let config = context.temp_dir.child("uv.toml");
config.write_str(indoc::indoc! {r#"
[pip]
resolution = "lowest-direct"
generate-hashes = true
index-url = "https://pypi.org/simple"
"#})?;
// Write a `pyproject.toml` file to the directory.
let pyproject = context.temp_dir.child("pyproject.toml");
pyproject.write_str(indoc::indoc! {r#"
[project]
name = "example"
version = "0.0.0"
"#})?;
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio>3.0.0")?;
// Resolution should use the lowest direct version, and generate hashes.
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
.arg("--show-settings")
.arg("requirements.in"), @r#"
success: true
exit_code: 0
----- stdout -----
GlobalSettings {
required_version: None,
quiet: 0,
verbose: 0,
color: Auto,
network_settings: NetworkSettings {
connectivity: Online,
native_tls: false,
allow_insecure_host: [],
timeout: [TIME],
retries: 3,
},
concurrency: Concurrency {
downloads: 50,
builds: 16,
installs: 8,
},
show_settings: true,
preview: Preview {
flags: PreviewFeatures(
0x0,
),
},
python_preference: Managed,
python_downloads: Automatic,
no_progress: false,
installer_metadata: true,
}
CacheSettings {
no_cache: false,
cache_dir: Some(
"[CACHE_DIR]/",
),
}
PipCompileSettings {
format: None,
src_file: [
"requirements.in",
],
constraints: [],
overrides: [],
excludes: [],
build_constraints: [],
constraints_from_workspace: [],
overrides_from_workspace: [],
excludes_from_workspace: [],
build_constraints_from_workspace: [],
environments: SupportedEnvironments(
[],
),
refresh: None(
Timestamp(
SystemTime {
tv_sec: [TIME],
tv_nsec: [TIME],
},
),
),
settings: PipSettings {
index_locations: IndexLocations {
indexes: [
Index {
name: None,
url: Pypi(
VerbatimUrl {
url: DisplaySafeUrl {
scheme: "https",
cannot_be_a_base: false,
username: "",
password: None,
host: Some(
Domain(
"pypi.org",
),
),
port: None,
path: "/simple",
query: None,
fragment: None,
},
given: Some(
"https://pypi.org/simple",
),
},
),
explicit: false,
default: true,
origin: None,
format: Simple,
publish_url: None,
authenticate: Auto,
ignore_error_codes: None,
cache_control: None,
},
],
flat_index: [],
no_index: false,
},
python: None,
install_mirrors: PythonInstallMirrors {
python_install_mirror: None,
pypy_install_mirror: None,
python_downloads_json_url: None,
},
system: false,
extras: ExtrasSpecification(
ExtrasSpecificationInner {
include: Some(
[],
),
exclude: [],
only_extras: false,
history: ExtrasSpecificationHistory {
extra: [],
only_extra: [],
no_extra: [],
all_extras: false,
no_default_extras: false,
defaults: List(
[],
),
},
},
),
groups: [],
break_system_packages: false,
target: None,
prefix: None,
index_strategy: FirstIndex,
keyring_provider: Disabled,
torch_backend: None,
build_isolation: Isolate,
extra_build_dependencies: ExtraBuildDependencies(
{},
),
extra_build_variables: ExtraBuildVariables(
{},
),
build_options: BuildOptions {
no_binary: None,
no_build: None,
},
allow_empty_requirements: false,
strict: false,
dependency_mode: Transitive,
resolution: LowestDirect,
prerelease: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
dependency_metadata: DependencyMetadata(
{},
),
output_file: None,
no_strip_extras: false,
no_strip_markers: false,
no_annotate: false,
no_header: false,
custom_compile_command: None,
generate_hashes: true,
config_setting: ConfigSettings(
{},
),
config_settings_package: PackageConfigSettings(
{},
),
python_version: None,
python_platform: None,
universal: false,
exclude_newer: ExcludeNewer {
global: None,
package: ExcludeNewerPackage(
{},
),
},
no_emit_package: [],
emit_index_url: false,
emit_find_links: false,
emit_build_options: false,
emit_marker_expression: false,
emit_index_annotation: false,
annotation_style: Split,
link_mode: Clone,
compile_bytecode: false,
sources: Enabled,
hash_checking: Some(
Verify,
),
upgrade: None,
reinstall: None,
},
}
----- stderr -----
"#
);
// Remove the `uv.toml` file.
fs_err::remove_file(config.path())?;
// Resolution should use the highest version, and omit hashes.
uv_snapshot!(context.filters(), add_shared_args(context.pip_compile(), context.temp_dir.path())
.arg("--show-settings")
.arg("requirements.in"), @r#"
success: true
exit_code: 0
----- stdout -----
GlobalSettings {
required_version: None,
quiet: 0,
verbose: 0,
color: Auto,
network_settings: NetworkSettings {
connectivity: Online,
native_tls: false,
allow_insecure_host: [],
timeout: [TIME],
retries: 3,
},
concurrency: Concurrency {
downloads: 50,
builds: 16,
installs: 8,
},
show_settings: true,
preview: Preview {
flags: PreviewFeatures(
0x0,
),
},
python_preference: Managed,
python_downloads: Automatic,
no_progress: false,
installer_metadata: true,
}
CacheSettings {
no_cache: false,
cache_dir: Some(
"[CACHE_DIR]/",
),
}
PipCompileSettings {
format: None,
src_file: [
"requirements.in",
],
constraints: [],
overrides: [],
excludes: [],
build_constraints: [],
constraints_from_workspace: [],
overrides_from_workspace: [],
excludes_from_workspace: [],
build_constraints_from_workspace: [],
environments: SupportedEnvironments(
[],
),
refresh: None(
Timestamp(
SystemTime {
tv_sec: [TIME],
tv_nsec: [TIME],
},
),
),
settings: PipSettings {
index_locations: IndexLocations {
indexes: [],
flat_index: [],
no_index: false,
},
python: None,
install_mirrors: PythonInstallMirrors {
python_install_mirror: None,
pypy_install_mirror: None,
python_downloads_json_url: None,
},
system: false,
extras: ExtrasSpecification(
ExtrasSpecificationInner {
include: Some(
[],
),
exclude: [],
only_extras: false,
history: ExtrasSpecificationHistory {
extra: [],
only_extra: [],
no_extra: [],
all_extras: false,
no_default_extras: false,
defaults: List(
[],
),
},
},
),
groups: [],
break_system_packages: false,
target: None,
prefix: None,
index_strategy: FirstIndex,
keyring_provider: Disabled,
torch_backend: None,
build_isolation: Isolate,
extra_build_dependencies: ExtraBuildDependencies(
{},
),
extra_build_variables: ExtraBuildVariables(
{},
),
build_options: BuildOptions {
no_binary: None,
no_build: None,
},
allow_empty_requirements: false,
strict: false,
dependency_mode: Transitive,
resolution: Highest,
prerelease: IfNecessaryOrExplicit,
fork_strategy: RequiresPython,
dependency_metadata: DependencyMetadata(
{},
),
output_file: None,
no_strip_extras: false,
no_strip_markers: false,
no_annotate: false,
no_header: false,
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/version.rs | crates/uv/tests/it/version.rs | use anyhow::{Ok, Result};
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::indoc;
use insta::assert_snapshot;
use crate::common::TestContext;
use crate::common::uv_snapshot;
// Print the version
#[test]
fn version_get() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version(), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31
----- stderr -----
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Print the version (json format)
#[test]
fn version_get_json() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--output-format").arg("json"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"package_name": "myproject",
"version": "1.10.31",
"commit_info": null
}
----- stderr -----
"#);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Print the version (--short)
#[test]
fn version_get_short() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--short"), @r"
success: true
exit_code: 0
----- stdout -----
1.10.31
----- stderr -----
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Set the version
#[test]
fn version_set_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("1.1.1"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31 => 1.1.1
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r###"
[project]
name = "myproject"
version = "1.1.1"
requires-python = ">=3.12"
"###
);
Ok(())
}
// Set the version (--short)
#[test]
fn version_set_value_short() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("1.1.1")
.arg("--short"), @r"
success: true
exit_code: 0
----- stdout -----
1.1.1
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r###"
[project]
name = "myproject"
version = "1.1.1"
requires-python = ">=3.12"
"###
);
Ok(())
}
// Bump patch version
#[test]
fn version_bump_patch() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31 => 1.10.32
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.32"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn version_bump_patch_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch=40"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31 => 1.10.40
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.40"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn version_bump_minor_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("minor=10"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.2.3 => 1.10.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn version_bump_major_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "2.3.4"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major=7"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 2.3.4 => 7.0.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "7.0.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump patch version (--short)
#[test]
fn version_bump_patch_short() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch")
.arg("--short"), @r"
success: true
exit_code: 0
----- stdout -----
1.10.32
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.10.32"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn version_bump_patch_value_must_increase() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.0.12"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch=11"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: 0.0.12 => 0.0.11 didn't increase the version; provide the exact version to force an update
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.0.12"
requires-python = ">=3.12"
"#
);
Ok(())
}
/// Preserve comments immediately preceding the version when bumping
#[test]
fn version_bump_preserves_preceding_comments() -> Result<()> {
let context: TestContext = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "hello-world"
# pre-1: stays above version
# pre-2: stays below pre-1
version = "0.1.0" # eol: stays on same line
# after-version: remains after version
description = "Add your description here"
"#,
)?;
// Bump patch version
context
.version()
.arg("--bump")
.arg("patch")
.assert()
.success();
// Ensure comments are preserved around the version entry
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "hello-world"
# pre-1: stays above version
# pre-2: stays below pre-1
version = "0.1.1" # eol: stays on same line
# after-version: remains after version
description = "Add your description here"
"#
);
Ok(())
}
// Bump minor version
#[test]
fn version_bump_minor() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("minor"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31 => 1.11.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.11.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// bump major version
#[test]
fn version_major_version() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31 => 2.0.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "2.0.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump patch but the input version is missing a component
#[test]
fn version_patch_uncompleted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.1"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.1 => 0.1.1
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.1.1"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump minor but the input version is missing a component
#[test]
fn version_minor_uncompleted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.1"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("minor"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.1 => 0.2
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.2"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump major but the input version is missing a component
#[test]
fn version_major_uncompleted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.1"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.1 => 1.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump major but the input version is .dev
#[test]
fn version_major_dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31.dev10"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31.dev10 => 2.0.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "2.0.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump major but the input version is a complex mess
#[test]
fn version_major_complex_mess() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1!2a3.post4.dev5+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1!2a3.post4.dev5+deadbeef6 => 1!3+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1!3+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Pass a ton of bump flags to a complex version
// The flags are in a messy order and some are duplicated,
// Under extremely permissive semantics this could be allowed, but right
// now it fails for a dozen reasons!
#[test]
fn many_bump_complex() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major")
.arg("--bump").arg("patch")
.arg("--bump").arg("alpha")
.arg("--bump").arg("minor")
.arg("--bump").arg("dev")
.arg("--bump").arg("minor")
.arg("--bump").arg("post")
.arg("--bump").arg("post"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--bump post` cannot be used with another `--bump` value, got: major, patch, alpha, minor, dev, minor, post, post
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump stable
#[test]
fn bump_stable() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("stable"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump alpha
#[test]
fn bump_alpha() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("alpha"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4a6+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4a6+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump beta
#[test]
fn bump_beta() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("beta"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4b1+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4b1+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_beta_with_value_existing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3b4"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("beta=42"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.2.3b4 => 1.2.3b42
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.2.3b42"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_beta_with_value_new() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("beta=5")
.arg("--bump").arg("patch"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.2.3 => 1.2.4b5
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.2.4b5"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump rc
#[test]
fn bump_rc() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("rc"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4rc1+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4rc1+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump post
#[test]
fn bump_post() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("post"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4a5.post7+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post7+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_post_with_value_clears_dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3.post4.dev9"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("post=10"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.2.3.post4.dev9 => 1.2.3.post10
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "1.2.3.post10"
requires-python = ">=3.12"
"#
);
Ok(())
}
// --bump dev
#[test]
fn bump_dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev7+deadbeef6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("dev"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 9!2.3.4a5.post6.dev7+deadbeef6 => 9!2.3.4a5.post6.dev8+deadbeef6
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "9!2.3.4a5.post6.dev8+deadbeef6"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_dev_with_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.1.0.dev4"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("dev=42"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.1.0.dev4 => 0.1.0.dev42
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.1.0.dev42"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_patch_and_dev_value() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.0.1"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch")
.arg("--bump").arg("dev=66463664"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.0.1 => 0.0.2.dev66463664
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.0.2.dev66463664"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_patch_and_dev_explicit_values_sorted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "0.1.2.dev3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("dev=0")
.arg("--bump").arg("patch=10"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 0.1.2.dev3 => 0.1.10.dev0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "0.1.10.dev0"
requires-python = ">=3.12"
"#
);
Ok(())
}
// Bump major but the input version is .post
#[test]
fn version_major_post() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.10.31.post10"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("major"), @r"
success: true
exit_code: 0
----- stdout -----
myproject 1.10.31.post10 => 2.0.0
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
");
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
assert_snapshot!(
pyproject,
@r#"
[project]
name = "myproject"
version = "2.0.0"
requires-python = ">=3.12"
"#
);
Ok(())
}
#[test]
fn bump_stable_with_value_fails() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("stable=1"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--bump stable` does not accept a value
");
Ok(())
}
#[test]
fn bump_empty_value_fails() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("patch="), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--bump` values cannot be empty
");
Ok(())
}
#[test]
fn bump_invalid_numeric_value_fails() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "1.2.3"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("dev=foo"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid numeric value `foo` for `--bump dev`
");
Ok(())
}
// --bump stable but it decreases the version
#[test]
fn bump_decrease_stable() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "2.3.4.post6"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("stable"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: 2.3.4.post6 => 2.3.4 didn't increase the version; provide the exact version to force an update
");
Ok(())
}
// --bump alpha but it decreases the version by reverting beta
#[test]
fn bump_decrease_alpha_beta() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "2.3.4b5"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("alpha"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: 2.3.4b5 => 2.3.4a1 didn't increase the version; provide the exact version to force an update
");
Ok(())
}
// --bump alpha but it decreases the version from a stable
#[test]
fn bump_decrease_alpha_stable() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
version = "2.3.4"
requires-python = ">=3.12"
"#,
)?;
uv_snapshot!(context.filters(), context.version()
.arg("--bump").arg("alpha"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: 2.3.4 => 2.3.4a1 didn't increase the version; when bumping to a pre-release version you also need to increase a release version component, e.g., with `--bump <major|minor|patch>`
");
Ok(())
}
// --bump major twice
#[test]
fn bump_double_major() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "myproject"
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/cache_clean.rs | crates/uv/tests/it/cache_clean.rs | use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use uv_cache::Cache;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
/// `cache clean` should remove all packages.
#[test]
fn clean_all() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("typing-extensions\niniconfig")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
uv_snapshot!(context.with_filtered_counts().filters(), context.clean().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Clearing cache at: [CACHE_DIR]/
DEBUG Released lock at `[CACHE_DIR]/.lock`
Removed [N] files ([SIZE])
");
Ok(())
}
#[tokio::test]
async fn clean_force() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_counts();
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("typing-extensions\niniconfig")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// When unlocked, `--force` should still take a lock
uv_snapshot!(context.filters(), context.clean().arg("--verbose").arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Clearing cache at: [CACHE_DIR]/
DEBUG Released lock at `[CACHE_DIR]/.lock`
Removed [N] files ([SIZE])
");
// Install a requirement, to re-populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// When locked, `--force` should proceed without blocking
let _cache = uv_cache::Cache::from_path(context.cache_dir.path())
.with_exclusive_lock()
.await;
uv_snapshot!(context.filters(), context.clean().arg("--verbose").arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Lock is busy for `[CACHE_DIR]/`
DEBUG Cache is currently in use, proceeding due to `--force`
Clearing cache at: [CACHE_DIR]/
Removed [N] files ([SIZE])
");
Ok(())
}
/// `cache clean iniconfig` should remove a single package (`iniconfig`).
#[test]
fn clean_package_pypi() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio\niniconfig")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
// Assert that the `.rkyv` file is created for `iniconfig`.
let rkyv = context
.cache_dir
.child("simple-v18")
.child("pypi")
.child("iniconfig.rkyv");
assert!(
rkyv.exists(),
"Expected the `.rkyv` file to exist for `iniconfig`"
);
let filters: Vec<_> = context
.filters()
.into_iter()
.chain([
// The cache entry does not have a stable key, so we filter it out.
(
r"\[CACHE_DIR\](\\|\/)(.+)(\\|\/).*",
"[CACHE_DIR]/$2/[ENTRY]",
),
// The file count varies by operating system, so we filter it out.
("Removed \\d+ files?", "Removed [N] files"),
])
.collect();
uv_snapshot!(&filters, context.clean().arg("--verbose").arg("iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
// Assert that the `.rkyv` file is removed for `iniconfig`.
assert!(
!rkyv.exists(),
"Expected the `.rkyv` file to be removed for `iniconfig`"
);
// Running `uv cache prune` should have no effect.
uv_snapshot!(&filters, context.prune().arg("--verbose"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
Pruning cache at: [CACHE_DIR]/
No unused entries found
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
Ok(())
}
/// `cache clean iniconfig` should remove a single package (`iniconfig`).
#[test]
fn clean_package_index() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio\niniconfig")?;
// Install a requirement, to populate the cache.
context
.pip_sync()
.arg("requirements.txt")
.arg("--index-url")
.arg("https://test.pypi.org/simple")
.assert()
.success();
// Assert that the `.rkyv` file is created for `iniconfig`.
let rkyv = context
.cache_dir
.child("simple-v18")
.child("index")
.child("e8208120cae3ba69")
.child("iniconfig.rkyv");
assert!(
rkyv.exists(),
"Expected the `.rkyv` file to exist for `iniconfig`"
);
let filters: Vec<_> = context
.filters()
.into_iter()
.chain([
// The cache entry does not have a stable key, so we filter it out.
(
r"\[CACHE_DIR\](\\|\/)(.+)(\\|\/).*",
"[CACHE_DIR]/$2/[ENTRY]",
),
// The file count varies by operating system, so we filter it out.
("Removed \\d+ files?", "Removed [N] files"),
])
.collect();
uv_snapshot!(&filters, context.clean().arg("--verbose").arg("iniconfig"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
DEBUG uv [VERSION] ([COMMIT] DATE)
DEBUG Acquired exclusive lock for `[CACHE_DIR]/`
DEBUG Removing dangling cache entry: [CACHE_DIR]/archive-v0/[ENTRY]
Removed [N] files ([SIZE])
DEBUG Released lock at `[CACHE_DIR]/.lock`
");
// Assert that the `.rkyv` file is removed for `iniconfig`.
assert!(
!rkyv.exists(),
"Expected the `.rkyv` file to be removed for `iniconfig`"
);
Ok(())
}
#[tokio::test]
async fn cache_timeout() {
let context = TestContext::new("3.12");
// Simulate another uv process running and locking the cache, e.g., with a source build.
let _cache = Cache::from_path(context.cache_dir.path())
.with_exclusive_lock()
.await;
uv_snapshot!(context.filters(), context.clean().env(EnvVars::UV_LOCK_TIMEOUT, "1"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Cache is currently in-use, waiting for other uv processes to finish (use `--force` to override)
error: Timeout ([TIME]) when waiting for lock on `[CACHE_DIR]/` at `[CACHE_DIR]/.lock`, is another uv process running? You can set `UV_LOCK_TIMEOUT` to increase the timeout.
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/sync.rs | crates/uv/tests/it/sync.rs | use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::{fixture::ChildPath, prelude::*};
use indoc::{formatdoc, indoc};
use insta::assert_snapshot;
use predicates::prelude::predicate;
use tempfile::tempdir_in;
use uv_fs::Simplified;
use uv_static::EnvVars;
use crate::common::{TestContext, download_to_disk, packse_index_url, uv_snapshot, venv_bin_path};
#[test]
fn sync() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running `uv sync` should generate a lockfile.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
");
assert!(context.temp_dir.child("uv.lock").exists());
Ok(())
}
#[test]
fn locked() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#,
)?;
// Running with `--locked` should error, if no lockfile is present.
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
"###);
// Lock the initial requirements.
context.lock().assert().success();
let existing = context.read("uv.lock");
// Update the requirements.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running with `--locked` should error.
uv_snapshot!(context.filters(), context.sync().arg("--locked"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
"###);
let updated = context.read("uv.lock");
// And the lockfile should be unchanged.
assert_eq!(existing, updated);
Ok(())
}
#[test]
fn frozen() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#,
)?;
// Running with `--frozen` should error, if no lockfile is present.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to find lockfile at `uv.lock`. To create a lockfile, run `uv lock` or `uv sync`.
"###);
context.lock().assert().success();
// Update the requirements.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running with `--frozen` should install the stale lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
");
Ok(())
}
#[test]
fn empty() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r"
[tool.uv.workspace]
members = []
",
)?;
// Running `uv sync` should generate an empty lockfile.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved in [TIME]
Audited in [TIME]
");
assert!(context.temp_dir.child("uv.lock").exists());
// Running `uv sync` again should succeed.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved in [TIME]
Audited in [TIME]
");
Ok(())
}
/// Sync an individual package within a workspace.
#[test]
fn package() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "root"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["child", "anyio>3"]
[tool.uv.sources]
child = { workspace = true }
[tool.uv.workspace]
members = ["child"]
"#,
)?;
let src = context.temp_dir.child("src").child("albatross");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
let child = context.temp_dir.child("child");
fs_err::create_dir_all(&child)?;
let pyproject_toml = child.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "child"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>=1"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
let src = child.child("src").child("albatross");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
uv_snapshot!(context.filters(), context.sync().arg("--package").arg("child"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
");
Ok(())
}
/// Sync multiple packages within a workspace.
#[test]
fn multiple_packages() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "root"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["foo", "bar", "baz"]
[tool.uv.sources]
foo = { workspace = true }
bar = { workspace = true }
baz = { workspace = true }
[tool.uv.workspace]
members = ["packages/*"]
"#,
)?;
context
.temp_dir
.child("packages")
.child("foo")
.child("pyproject.toml")
.write_str(
r#"
[project]
name = "foo"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio"]
"#,
)?;
context
.temp_dir
.child("packages")
.child("bar")
.child("pyproject.toml")
.write_str(
r#"
[project]
name = "bar"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions"]
"#,
)?;
context
.temp_dir
.child("packages")
.child("baz")
.child("pyproject.toml")
.write_str(
r#"
[project]
name = "baz"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Sync `foo` and `bar`.
uv_snapshot!(context.filters(), context.sync()
.arg("--package").arg("foo")
.arg("--package").arg("bar"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 9 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ anyio==4.3.0
+ bar==0.1.0 (from file://[TEMP_DIR]/packages/bar)
+ foo==0.1.0 (from file://[TEMP_DIR]/packages/foo)
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
");
// Sync `foo`, `bar`, and `baz`.
uv_snapshot!(context.filters(), context.sync()
.arg("--package").arg("foo")
.arg("--package").arg("bar")
.arg("--package").arg("baz"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 9 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ baz==0.1.0 (from file://[TEMP_DIR]/packages/baz)
+ iniconfig==2.0.0
");
Ok(())
}
/// Test json output
#[test]
fn sync_json() -> Result<()> {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin();
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
uv_snapshot!(context.filters(), context.sync()
.arg("--output-format").arg("json"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"target": "project",
"project": {
"path": "[TEMP_DIR]/",
"workspace": {
"path": "[TEMP_DIR]/"
}
},
"sync": {
"environment": {
"path": "[VENV]/",
"python": {
"path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
},
"action": "check",
"changes": [
{
"name": "iniconfig",
"version": "2.0.0",
"action": "installed"
}
]
},
"lock": {
"path": "[TEMP_DIR]/uv.lock",
"action": "create"
},
"dry_run": false
}
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"#);
assert!(context.temp_dir.child("uv.lock").exists());
uv_snapshot!(context.filters(), context.sync()
.arg("--frozen")
.arg("--output-format").arg("json"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"target": "project",
"project": {
"path": "[TEMP_DIR]/",
"workspace": {
"path": "[TEMP_DIR]/"
}
},
"sync": {
"environment": {
"path": "[VENV]/",
"python": {
"path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
},
"action": "check",
"changes": []
},
"lock": {
"path": "[TEMP_DIR]/uv.lock",
"action": "use"
},
"dry_run": false
}
----- stderr -----
Audited 1 package in [TIME]
"#);
uv_snapshot!(context.filters(), context.sync()
.arg("--locked")
.arg("--output-format").arg("json"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"target": "project",
"project": {
"path": "[TEMP_DIR]/",
"workspace": {
"path": "[TEMP_DIR]/"
}
},
"sync": {
"environment": {
"path": "[VENV]/",
"python": {
"path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
},
"action": "check",
"changes": []
},
"lock": {
"path": "[TEMP_DIR]/uv.lock",
"action": "check"
},
"dry_run": false
}
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
"#);
// Invalidate the lockfile by changing the requirements.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig<2"]
"#,
)?;
uv_snapshot!(context.filters(), context.sync()
.arg("--locked")
.arg("--output-format").arg("json"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
");
// Test that JSON output is shown even with --quiet flag
uv_snapshot!(context.filters(), context.sync()
.arg("--quiet")
.arg("--frozen")
.arg("--output-format").arg("json"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"target": "project",
"project": {
"path": "[TEMP_DIR]/",
"workspace": {
"path": "[TEMP_DIR]/"
}
},
"sync": {
"environment": {
"path": "[VENV]/",
"python": {
"path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
},
"action": "check",
"changes": []
},
"lock": {
"path": "[TEMP_DIR]/uv.lock",
"action": "use"
},
"dry_run": false
}
----- stderr -----
"#);
Ok(())
}
/// Test --dry json output
#[test]
fn sync_dry_json() -> Result<()> {
let context = TestContext::new_with_versions(&["3.12"])
.with_filtered_python_names()
.with_filtered_virtualenv_bin();
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running `uv sync` should report intent to create the environment and lockfile
uv_snapshot!(context.filters(), context.sync()
.arg("--output-format").arg("json")
.arg("--dry-run"), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"target": "project",
"project": {
"path": "[TEMP_DIR]/",
"workspace": {
"path": "[TEMP_DIR]/"
}
},
"sync": {
"environment": {
"path": "[VENV]/",
"python": {
"path": "[VENV]/[BIN]/[PYTHON]",
"version": "3.12.[X]",
"implementation": "cpython"
}
},
"action": "create",
"changes": [
{
"name": "iniconfig",
"version": "2.0.0",
"action": "installed"
}
]
},
"lock": {
"path": "[TEMP_DIR]/uv.lock",
"action": "create"
},
"dry_run": true
}
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 2 packages in [TIME]
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
"#);
Ok(())
}
/// Ensure that we use the maximum Python version when a workspace contains mixed requirements.
#[test]
fn mixed_requires_python() -> Result<()> {
let context = TestContext::new_with_versions(&["3.9", "3.12"]);
// Create a workspace root with a minimum Python requirement of Python 3.12.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "albatross"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["bird-feeder", "anyio>3"]
[tool.uv.sources]
bird-feeder = { workspace = true }
[tool.uv.workspace]
members = ["packages/*"]
"#,
)?;
let src = context.temp_dir.child("src").child("albatross");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
// Create a child with a minimum Python requirement of Python 3.9.
let child = context.temp_dir.child("packages").child("bird-feeder");
child.create_dir_all()?;
let src = context.temp_dir.child("src").child("bird_feeder");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
let pyproject_toml = child.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "bird-feeder"
version = "0.1.0"
requires-python = ">=3.9"
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
// Running `uv sync` should succeed, locking for Python 3.12.
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ bird-feeder==0.1.0 (from file://[TEMP_DIR]/packages/bird-feeder)
+ idna==3.6
+ sniffio==1.3.1
");
// Running `uv sync` again should fail.
uv_snapshot!(context.filters(), context.sync().arg("-p").arg("3.9"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Using CPython 3.9.[X] interpreter at: [PYTHON-3.9]
error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.12` (from workspace member `albatross`'s `project.requires-python`).
");
Ok(())
}
/// Ensure that group requires-python solves an actual problem
#[test]
#[cfg(not(windows))]
#[cfg(feature = "python-eol")]
fn group_requires_python_useful_defaults() -> Result<()> {
let context = TestContext::new_with_versions(&["3.8", "3.9"]);
// Require 3.8 for our project, but have a dev-dependency on a version of sphinx that needs 3.9
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "pharaohs-tomp"
version = "0.1.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[dependency-groups]
dev = ["sphinx>=7.2.6"]
"#,
)?;
let src = context.temp_dir.child("src").child("albatross");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
// Running `uv sync --no-dev` should ideally succeed, locking for Python 3.8.
// ...but once we pick the 3.8 interpreter the lock freaks out because it sees
// that the dependency-group containing sphinx will never successfully install,
// even though it's not enabled!
uv_snapshot!(context.filters(), context.sync()
.arg("--no-dev"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Using CPython 3.8.[X] interpreter at: [PYTHON-3.8]
Creating virtual environment at: .venv
× No solution found when resolving dependencies for split (markers: python_full_version == '3.8.*'):
╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used.
And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used.
And because pharaohs-tomp:dev depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:dev, we can conclude that your project's requirements are unsatisfiable.
hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9).
");
// Running `uv sync` should always fail, as now sphinx is involved
uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies for split (markers: python_full_version == '3.8.*'):
╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used.
And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used.
And because pharaohs-tomp:dev depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:dev, we can conclude that your project's requirements are unsatisfiable.
hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9).
");
// Adding group requires python should fix it
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "pharaohs-tomp"
version = "0.1.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[dependency-groups]
dev = ["sphinx>=7.2.6"]
[tool.uv.dependency-groups]
dev = {requires-python = ">=3.9"}
"#,
)?;
// Running `uv sync --no-dev` should succeed, still using the Python 3.8.
uv_snapshot!(context.filters(), context.sync()
.arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 29 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ exceptiongroup==1.2.0
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
");
// Running `uv sync` should succeed, bumping to Python 3.9 as sphinx is now involved.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.9.[X] interpreter at: [PYTHON-3.9]
Removed virtual environment at: .venv
Creating virtual environment at: .venv
Resolved 29 packages in [TIME]
Prepared 22 packages in [TIME]
Installed 27 packages in [TIME]
+ alabaster==0.7.16
+ anyio==4.3.0
+ babel==2.14.0
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ docutils==0.20.1
+ exceptiongroup==1.2.0
+ idna==3.6
+ imagesize==1.4.1
+ importlib-metadata==7.1.0
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ packaging==24.0
+ pygments==2.17.2
+ requests==2.31.0
+ sniffio==1.3.1
+ snowballstemmer==2.2.0
+ sphinx==7.2.6
+ sphinxcontrib-applehelp==1.0.8
+ sphinxcontrib-devhelp==1.0.6
+ sphinxcontrib-htmlhelp==2.0.5
+ sphinxcontrib-jsmath==1.0.1
+ sphinxcontrib-qthelp==1.0.7
+ sphinxcontrib-serializinghtml==1.1.10
+ typing-extensions==4.10.0
+ urllib3==2.2.1
+ zipp==3.18.1
");
Ok(())
}
/// Ensure that group requires-python solves an actual problem
#[test]
#[cfg(not(windows))]
#[cfg(feature = "python-eol")]
fn group_requires_python_useful_non_defaults() -> Result<()> {
let context = TestContext::new_with_versions(&["3.8", "3.9"]);
// Require 3.8 for our project, but have a dev-dependency on a version of sphinx that needs 3.9
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "pharaohs-tomp"
version = "0.1.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[dependency-groups]
mygroup = ["sphinx>=7.2.6"]
"#,
)?;
let src = context.temp_dir.child("src").child("albatross");
src.create_dir_all()?;
let init = src.child("__init__.py");
init.touch()?;
// Running `uv sync` should ideally succeed, locking for Python 3.8.
// ...but once we pick the 3.8 interpreter the lock freaks out because it sees
// that the dependency-group containing sphinx will never successfully install,
// even though it's not enabled, or even a default!
uv_snapshot!(context.filters(), context.sync(), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Using CPython 3.8.[X] interpreter at: [PYTHON-3.8]
Creating virtual environment at: .venv
× No solution found when resolving dependencies for split (markers: python_full_version == '3.8.*'):
╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used.
And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used.
And because pharaohs-tomp:mygroup depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:mygroup, we can conclude that your project's requirements are unsatisfiable.
hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9).
");
// Running `uv sync --group mygroup` should definitely fail, as now sphinx is involved
uv_snapshot!(context.filters(), context.sync()
.arg("--group").arg("mygroup"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies for split (markers: python_full_version == '3.8.*'):
╰─▶ Because the requested Python version (>=3.8) does not satisfy Python>=3.9 and sphinx==7.2.6 depends on Python>=3.9, we can conclude that sphinx==7.2.6 cannot be used.
And because only sphinx<=7.2.6 is available, we can conclude that sphinx>=7.2.6 cannot be used.
And because pharaohs-tomp:mygroup depends on sphinx>=7.2.6 and your project requires pharaohs-tomp:mygroup, we can conclude that your project's requirements are unsatisfiable.
hint: The `requires-python` value (>=3.8) includes Python versions that are not supported by your dependencies (e.g., sphinx==7.2.6 only supports >=3.9). Consider using a more restrictive `requires-python` value (like >=3.9).
");
// Adding group requires python should fix it
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "pharaohs-tomp"
version = "0.1.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[dependency-groups]
mygroup = ["sphinx>=7.2.6"]
[tool.uv.dependency-groups]
mygroup = {requires-python = ">=3.9"}
"#,
)?;
// Running `uv sync` should succeed, locking for the previous picked Python 3.8.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 29 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ anyio==4.3.0
+ exceptiongroup==1.2.0
+ idna==3.6
+ sniffio==1.3.1
+ typing-extensions==4.10.0
");
// Running `uv sync --group mygroup` should pass, bumping the interpreter to 3.9,
// as the group requires-python saves us
uv_snapshot!(context.filters(), context.sync()
.arg("--group").arg("mygroup"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.9.[X] interpreter at: [PYTHON-3.9]
Removed virtual environment at: .venv
Creating virtual environment at: .venv
Resolved 29 packages in [TIME]
Prepared 22 packages in [TIME]
Installed 27 packages in [TIME]
+ alabaster==0.7.16
+ anyio==4.3.0
+ babel==2.14.0
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ docutils==0.20.1
+ exceptiongroup==1.2.0
+ idna==3.6
+ imagesize==1.4.1
+ importlib-metadata==7.1.0
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ packaging==24.0
+ pygments==2.17.2
+ requests==2.31.0
+ sniffio==1.3.1
+ snowballstemmer==2.2.0
+ sphinx==7.2.6
+ sphinxcontrib-applehelp==1.0.8
+ sphinxcontrib-devhelp==1.0.6
+ sphinxcontrib-htmlhelp==2.0.5
+ sphinxcontrib-jsmath==1.0.1
+ sphinxcontrib-qthelp==1.0.7
+ sphinxcontrib-serializinghtml==1.1.10
+ typing-extensions==4.10.0
+ urllib3==2.2.1
+ zipp==3.18.1
");
Ok(())
}
#[test]
fn check() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running `uv sync --check` should fail.
uv_snapshot!(context.filters(), context.sync().arg("--check"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Would use project environment at: .venv
Resolved 2 packages in [TIME]
Would create lockfile at: uv.lock
Would download 1 package
Would install 1 package
+ iniconfig==2.0.0
The environment is outdated; run `uv sync` to update the environment
"###);
// Sync the environment.
uv_snapshot!(context.filters(), context.sync(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
");
assert!(context.temp_dir.child("uv.lock").exists());
// Running `uv sync --check` should pass now that the environment is up to date.
uv_snapshot!(context.filters(), context.sync().arg("--check"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Would use project environment at: .venv
Resolved 2 packages in [TIME]
Found up-to-date lockfile at: uv.lock
Audited 1 package in [TIME]
Would make no changes
");
Ok(())
}
/// Sync development dependencies in a (legacy) non-project workspace root.
#[test]
fn sync_legacy_non_project_dev_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[tool.uv]
dev-dependencies = ["anyio>3", "requests[socks]", "typing-extensions ; sys_platform == ''"]
[tool.uv.workspace]
members = ["child"]
"#,
)?;
context
.temp_dir
.child("src")
.child("albatross")
.child("__init__.py")
.touch()?;
let child = context.temp_dir.child("child");
let pyproject_toml = child.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "child"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig>=1"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
child
.child("src")
.child("child")
.child("__init__.py")
.touch()?;
// Syncing with `--no-dev` should omit all dependencies except `iniconfig`.
uv_snapshot!(context.filters(), context.sync().arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 11 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ child==0.1.0 (from file://[TEMP_DIR]/child)
+ iniconfig==2.0.0
");
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/network.rs | crates/uv/tests/it/network.rs | use std::{env, io};
use assert_fs::fixture::{ChildPath, FileWriteStr, PathChild};
use http::StatusCode;
use serde_json::json;
use uv_static::EnvVars;
use wiremock::matchers::method;
use wiremock::{Mock, MockServer, ResponseTemplate};
use crate::common::{TestContext, uv_snapshot};
fn connection_reset(_request: &wiremock::Request) -> io::Error {
io::Error::new(io::ErrorKind::ConnectionReset, "Connection reset by peer")
}
/// Answers with a retryable HTTP status 500.
async fn http_error_server() -> (MockServer, String) {
let server = MockServer::start().await;
Mock::given(method("GET"))
.respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR))
.mount(&server)
.await;
let mock_server_uri = server.uri();
(server, mock_server_uri)
}
/// Answers with a retryable connection reset IO error.
async fn io_error_server() -> (MockServer, String) {
let server = MockServer::start().await;
Mock::given(method("GET"))
.respond_with_err(connection_reset)
.mount(&server)
.await;
let mock_server_uri = server.uri();
(server, mock_server_uri)
}
/// Answers with a retryable HTTP status 500 for 2 times, then with a retryable connection reset
/// IO error.
///
/// Tests different errors paths inside uv, which retries 3 times by default, for a total for 4
/// requests.
async fn mixed_error_server() -> (MockServer, String) {
let server = MockServer::start().await;
Mock::given(method("GET"))
.respond_with_err(connection_reset)
.up_to_n_times(2)
.mount(&server)
.await;
Mock::given(method("GET"))
.respond_with(ResponseTemplate::new(StatusCode::INTERNAL_SERVER_ERROR))
.up_to_n_times(2)
.mount(&server)
.await;
let mock_server_uri = server.uri();
(server, mock_server_uri)
}
/// Check the simple index error message when the server returns HTTP status 500, a retryable error.
#[tokio::test]
async fn simple_http_500() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = http_error_server().await;
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg("tqdm")
.arg("--index-url")
.arg(&mock_server_uri)
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Request failed after 3 retries
Caused by: Failed to fetch: `[SERVER]/tqdm/`
Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/tqdm/)
");
}
/// Check the simple index error message when the server returns a retryable IO error.
#[tokio::test]
async fn simple_io_err() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = io_error_server().await;
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg("tqdm")
.arg("--index-url")
.arg(&mock_server_uri)
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Request failed after 3 retries
Caused by: Failed to fetch: `[SERVER]/tqdm/`
Caused by: error sending request for url ([SERVER]/tqdm/)
Caused by: client error (SendRequest)
Caused by: connection closed before message completed
");
}
/// Check the find links error message when the server returns HTTP status 500, a retryable error.
#[tokio::test]
async fn find_links_http_500() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = http_error_server().await;
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg("tqdm")
.arg("--no-index")
.arg("--find-links")
.arg(&mock_server_uri)
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to read `--find-links` URL: [SERVER]/
Caused by: Request failed after 3 retries
Caused by: Failed to fetch: `[SERVER]/`
Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/)
");
}
/// Check the find links error message when the server returns a retryable IO error.
#[tokio::test]
async fn find_links_io_error() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = io_error_server().await;
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg("tqdm")
.arg("--no-index")
.arg("--find-links")
.arg(&mock_server_uri)
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to read `--find-links` URL: [SERVER]/
Caused by: Request failed after 3 retries
Caused by: Failed to fetch: `[SERVER]/`
Caused by: error sending request for url ([SERVER]/)
Caused by: client error (SendRequest)
Caused by: connection closed before message completed
");
}
/// Check the error message for a find links index page, a non-streaming request, when the server
/// returns different kinds of retryable errors.
#[tokio::test]
async fn find_links_mixed_error() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = mixed_error_server().await;
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg("tqdm")
.arg("--no-index")
.arg("--find-links")
.arg(&mock_server_uri)
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to read `--find-links` URL: [SERVER]/
Caused by: Request failed after 3 retries
Caused by: Failed to fetch: `[SERVER]/`
Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/)
");
}
/// Check the direct package URL error message when the server returns HTTP status 500, a retryable
/// error.
#[tokio::test]
async fn direct_url_http_500() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = http_error_server().await;
let tqdm_url = format!(
"{mock_server_uri}/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl"
);
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg(format!("tqdm @ {tqdm_url}"))
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download `tqdm @ [SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
├─▶ Request failed after 3 retries
├─▶ Failed to fetch: `[SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
╰─▶ HTTP status server error (500 Internal Server Error) for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl)
");
}
/// Check the direct package URL error message when the server returns a retryable IO error.
#[tokio::test]
async fn direct_url_io_error() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = io_error_server().await;
let tqdm_url = format!(
"{mock_server_uri}/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl"
);
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg(format!("tqdm @ {tqdm_url}"))
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r#"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download `tqdm @ [SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
├─▶ Request failed after 3 retries
├─▶ Failed to fetch: `[SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
├─▶ error sending request for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl)
├─▶ client error (SendRequest)
╰─▶ connection closed before message completed
"#);
}
/// Check the error message for direct package URL, a streaming request, when the server returns
/// different kinds of retryable errors.
#[tokio::test]
async fn direct_url_mixed_error() {
let context = TestContext::new("3.12");
let (_server_drop_guard, mock_server_uri) = mixed_error_server().await;
let tqdm_url = format!(
"{mock_server_uri}/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl"
);
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg(format!("tqdm @ {tqdm_url}"))
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download `tqdm @ [SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
├─▶ Request failed after 3 retries
├─▶ Failed to fetch: `[SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl`
╰─▶ HTTP status server error (500 Internal Server Error) for url ([SERVER]/packages/d0/30/dc54f88dd4a2b5dc8a0279bdd7270e735851848b762aeb1c1184ed1f6b14/tqdm-4.67.1-py3-none-any.whl)
");
}
fn write_python_downloads_json(context: &TestContext, mock_server_uri: &String) -> ChildPath {
let python_downloads_json = context.temp_dir.child("python_downloads.json");
let interpreter = json!({
"cpython-3.10.0-darwin-aarch64-none": {
"arch": {
"family": "aarch64",
"variant": null
},
"libc": "none",
"major": 3,
"minor": 10,
"name": "cpython",
"os": "darwin",
"patch": 0,
"prerelease": "",
"sha256": null,
"url": format!("{mock_server_uri}/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst"),
"variant": null
}
});
python_downloads_json
.write_str(&serde_json::to_string(&interpreter).unwrap())
.unwrap();
python_downloads_json
}
/// Check the Python install error message when the server returns HTTP status 500, a retryable
/// error.
#[tokio::test]
async fn python_install_http_500() {
let context = TestContext::new("3.12")
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
let (_server_drop_guard, mock_server_uri) = http_error_server().await;
let python_downloads_json = write_python_downloads_json(&context, &mock_server_uri);
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.python_install()
.arg("cpython-3.10.0-darwin-aarch64-none")
.arg("--python-downloads-json-url")
.arg(python_downloads_json.path())
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Failed to install cpython-3.10.0-macos-aarch64-none
Caused by: Request failed after 3 retries
Caused by: Failed to download [SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst
Caused by: HTTP status server error (500 Internal Server Error) for url ([SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst)
");
}
/// Check the Python install error message when the server returns a retryable IO error.
#[tokio::test]
async fn python_install_io_error() {
let context = TestContext::new("3.12")
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
let (_server_drop_guard, mock_server_uri) = io_error_server().await;
let python_downloads_json = write_python_downloads_json(&context, &mock_server_uri);
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.python_install()
.arg("cpython-3.10.0-darwin-aarch64-none")
.arg("--python-downloads-json-url")
.arg(python_downloads_json.path())
.env_remove(EnvVars::UV_HTTP_RETRIES)
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Failed to install cpython-3.10.0-macos-aarch64-none
Caused by: Request failed after 3 retries
Caused by: Failed to download [SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst
Caused by: error sending request for url ([SERVER]/astral-sh/python-build-standalone/releases/download/20211017/cpython-3.10.0-aarch64-apple-darwin-pgo%2Blto-20211017T1616.tar.zst)
Caused by: client error (SendRequest)
Caused by: connection closed before message completed
");
}
#[tokio::test]
async fn install_http_retries() {
let context = TestContext::new("3.12");
let server = MockServer::start().await;
// Create a server that always fails, so we can see the number of retries used
Mock::given(method("GET"))
.respond_with(ResponseTemplate::new(503))
.mount(&server)
.await;
uv_snapshot!(context.filters(), context.pip_install()
.arg("anyio")
.arg("--index")
.arg(server.uri())
.env(EnvVars::UV_HTTP_RETRIES, "foo"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse environment variable `UV_HTTP_RETRIES` with invalid value `foo`: invalid digit found in string
"
);
uv_snapshot!(context.filters(), context.pip_install()
.arg("anyio")
.arg("--index")
.arg(server.uri())
.env(EnvVars::UV_HTTP_RETRIES, "-1"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse environment variable `UV_HTTP_RETRIES` with invalid value `-1`: invalid digit found in string
"
);
uv_snapshot!(context.filters(), context.pip_install()
.arg("anyio")
.arg("--index")
.arg(server.uri())
.env(EnvVars::UV_HTTP_RETRIES, "999999999999"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse environment variable `UV_HTTP_RETRIES` with invalid value `999999999999`: number too large to fit in target type
"
);
uv_snapshot!(context.filters(), context.pip_install()
.arg("anyio")
.arg("--index")
.arg(server.uri())
.env(EnvVars::UV_HTTP_RETRIES, "5")
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Request failed after 5 retries
Caused by: Failed to fetch: `http://[LOCALHOST]/anyio/`
Caused by: HTTP status server error (503 Service Unavailable) for url (http://[LOCALHOST]/anyio/)
"
);
}
/// Test problem details with a 403 error containing license compliance information
#[tokio::test]
async fn rfc9457_problem_details_license_violation() {
let context = TestContext::new("3.12");
let server = MockServer::start().await;
let problem_json = r#"{
"type": "https://example.com/probs/license-violation",
"title": "License Compliance Issue",
"status": 403,
"detail": "This package version has a license that violates organizational policy."
}"#;
// Mock HEAD request to return 200 OK
Mock::given(method("HEAD"))
.respond_with(ResponseTemplate::new(StatusCode::OK))
.mount(&server)
.await;
// Mock GET request to return 403 with problem details
Mock::given(method("GET"))
.respond_with(
ResponseTemplate::new(StatusCode::FORBIDDEN)
.set_body_raw(problem_json, "application/problem+json"),
)
.mount(&server)
.await;
let mock_server_uri = server.uri();
let tqdm_url = format!("{mock_server_uri}/packages/tqdm-4.67.1-py3-none-any.whl");
let filters = vec![(mock_server_uri.as_str(), "[SERVER]")];
uv_snapshot!(filters, context
.pip_install()
.arg(format!("tqdm @ {tqdm_url}"))
.env_remove(EnvVars::UV_HTTP_RETRIES), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download `tqdm @ [SERVER]/packages/tqdm-4.67.1-py3-none-any.whl`
├─▶ Failed to fetch: `[SERVER]/packages/tqdm-4.67.1-py3-none-any.whl`
├─▶ Server message: License Compliance Issue, This package version has a license that violates organizational policy.
╰─▶ HTTP status client error (403 Forbidden) for url ([SERVER]/packages/tqdm-4.67.1-py3-none-any.whl)
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tool_dir.rs | crates/uv/tests/it/tool_dir.rs | use assert_fs::fixture::PathChild;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn tool_dir() {
let context = TestContext::new("3.12");
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_dir()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/tools
----- stderr -----
"###);
}
#[test]
fn tool_dir_bin() {
let context = TestContext::new("3.12");
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_dir().arg("--bin")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/bin
----- stderr -----
"###);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/cache_size.rs | crates/uv/tests/it/cache_size.rs | use assert_cmd::assert::OutputAssertExt;
use crate::common::{TestContext, uv_snapshot};
/// Test that `cache size` returns 0 for an empty cache directory (raw output).
#[test]
fn cache_size_empty_raw() {
let context = TestContext::new("3.12");
// Clean cache first to ensure truly empty state
context.clean().assert().success();
uv_snapshot!(context.cache_size().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
0
----- stderr -----
");
}
/// Test that `cache size` returns raw bytes after installing packages.
#[test]
fn cache_size_with_packages_raw() {
let context = TestContext::new("3.12");
// Install a requirement to populate the cache.
context.pip_install().arg("iniconfig").assert().success();
// Check cache size is now positive (raw bytes).
uv_snapshot!(context.with_filtered_cache_size().filters(), context.cache_size().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
[SIZE]
----- stderr -----
");
}
/// Test that `cache size --human` returns human-readable format after installing packages.
#[test]
fn cache_size_with_packages_human() {
let context = TestContext::new("3.12");
// Install a requirement to populate the cache.
context.pip_install().arg("iniconfig").assert().success();
// Check cache size with --human flag
uv_snapshot!(context.with_filtered_cache_size().filters(), context.cache_size().arg("--preview").arg("--human"), @r"
success: true
exit_code: 0
----- stdout -----
[SIZE]
----- stderr -----
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_find.rs | crates/uv/tests/it/python_find.rs | use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::{FileTouch, PathChild};
use assert_fs::{fixture::FileWriteStr, prelude::PathCreateDir};
use indoc::indoc;
use uv_platform::{Arch, Os};
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot, venv_bin_path};
#[test]
fn python_find() {
let mut context: TestContext =
TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_python_sources();
// No interpreters on the path
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, ""), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found in [PYTHON SOURCES]
");
// We find the first interpreter on the path
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Request Python 3.12
uv_snapshot!(context.filters(), context.python_find().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Request Python 3.12
uv_snapshot!(context.filters(), context.python_find().arg("==3.12.*"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Request Python 3.11
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
"###);
// Request CPython
uv_snapshot!(context.filters(), context.python_find().arg("cpython"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Request CPython 3.12
uv_snapshot!(context.filters(), context.python_find().arg("cpython@3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Request CPython 3.12 via partial key syntax
uv_snapshot!(context.filters(), context.python_find().arg("cpython-3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Request Python 3.12 via partial key syntax with placeholders
uv_snapshot!(context.filters(), context.python_find().arg("any-3.12-any"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Request CPython 3.12 for the current platform
let os = Os::from_env();
let arch = Arch::from_env();
uv_snapshot!(context.filters(), context.python_find()
.arg(format!("cpython-3.12-{os}-{arch}")), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// Request PyPy (which should be missing)
uv_snapshot!(context.filters(), context.python_find().arg("pypy"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for PyPy in [PYTHON SOURCES]
");
// Swap the order of the Python versions
context.python_versions.reverse();
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// Request Python 3.11
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
"###);
}
#[test]
fn python_find_pin() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
// Pin to a version
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
"###);
// We should find the pinned version, not the first on the path
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Unless explicitly requested
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
"###);
// Or `--no-config` is used
uv_snapshot!(context.filters(), context.python_find().arg("--no-config"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
let child_dir = context.temp_dir.child("child");
child_dir.create_dir_all().unwrap();
// We should also find pinned versions in the parent directory
uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
uv_snapshot!(context.filters(), context.python_pin().arg("3.11").current_dir(&child_dir), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.11`
----- stderr -----
"###);
// Unless the child directory also has a pin
uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
"###);
}
#[test]
fn python_find_pin_arbitrary_name() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
// Try to pin to an arbitrary name
uv_snapshot!(context.filters(), context.python_pin().arg("foo"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requests for arbitrary names (e.g., `foo`) are not supported in version files
");
// Pin to an arbitrary name, bypassing uv
context
.temp_dir
.child(".python-version")
.write_str("foo")
.unwrap();
// The arbitrary name should be ignored
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
warning: Ignoring unsupported Python request `foo` in version file: [TEMP_DIR]/.python-version
");
// The pin should be updatable
uv_snapshot!(context.filters(), context.python_pin().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.11`
----- stderr -----
warning: Ignoring unsupported Python request `foo` in version file: [TEMP_DIR]/.python-version
");
// Warnings shouldn't appear afterwards...
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.11` -> `3.12`
----- stderr -----
");
// Pin in a sub-directory
context.temp_dir.child("foo").create_dir_all().unwrap();
context
.temp_dir
.child("foo")
.child(".python-version")
.write_str("foo")
.unwrap();
// The arbitrary name should be ignored, but we won't walk up to the parent `.python-version`
// file (which contains 3.12); this behavior is a little questionable but we probably want to
// ignore all empty version files if we want to change this?
uv_snapshot!(context.filters(), context.python_find().current_dir(context.temp_dir.child("foo").path()), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
warning: Ignoring unsupported Python request `foo` in version file: [TEMP_DIR]/foo/.python-version
");
}
#[test]
fn python_find_project() {
let context: TestContext = TestContext::new_with_versions(&["3.10", "3.11", "3.12"]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml
.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = ["anyio==3.7.0"]
"#})
.unwrap();
// We should respect the project's required version, not the first on the path
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Unless explicitly requested
uv_snapshot!(context.filters(), context.python_find().arg("3.10"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
warning: The requested interpreter resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`)
");
// Or `--no-project` is used
uv_snapshot!(context.filters(), context.python_find().arg("--no-project"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
");
// But a pin should take precedence
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
"###);
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Create a pin that's incompatible with the project
uv_snapshot!(context.filters(), context.python_pin().arg("3.10").arg("--no-workspace"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.12` -> `3.10`
----- stderr -----
"###);
// We should warn on subsequent uses, but respect the pinned version?
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
warning: The Python request from `.python-version` resolved to Python 3.10.[X], which is incompatible with the project's Python requirement: `>=3.11` (from `project.requires-python`)
Use `uv python pin` to update the `.python-version` file to a compatible version
");
// Unless the pin file is outside the project, in which case we should just ignore it
let child_dir = context.temp_dir.child("child");
child_dir.create_dir_all().unwrap();
let pyproject_toml = child_dir.child("pyproject.toml");
pyproject_toml
.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = ["anyio==3.7.0"]
"#})
.unwrap();
uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
}
#[test]
fn virtual_empty() {
// testing how `uv python find` reacts to a pyproject with no `[project]` and nothing useful to it
let context = TestContext::new_with_versions(&["3.10", "3.11", "3.12"]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml
.write_str(indoc! {r#"
[tool.mycooltool]
wow = "someconfig"
"#})
.unwrap();
// Ask for the python
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
");
// Ask for the python (--no-project)
uv_snapshot!(context.filters(), context.python_find()
.arg("--no-project"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
");
// Ask for specific python (3.11)
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Create a pin
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
"###);
// Ask for the python
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Ask for specific python (3.11)
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Ask for the python (--no-project)
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
}
#[test]
fn virtual_dependency_group() {
// testing basic `uv python find` functionality
// when the pyproject.toml is fully virtual (no `[project]`, but `[dependency-groups]` defined,
// which really shouldn't matter)
let context = TestContext::new_with_versions(&["3.10", "3.11", "3.12"]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml
.write_str(indoc! {r#"
[dependency-groups]
foo = ["sortedcontainers"]
bar = ["iniconfig"]
dev = ["sniffio"]
"#})
.unwrap();
// Ask for the python
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
");
// Ask for the python (--no-project)
uv_snapshot!(context.filters(), context.python_find()
.arg("--no-project"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.10]
----- stderr -----
");
// Ask for specific python (3.11)
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Create a pin
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
"###);
// Ask for the python
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
// Ask for specific python (3.11)
uv_snapshot!(context.filters(), context.python_find().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Ask for the python (--no-project)
uv_snapshot!(context.filters(), context.python_find(), @r###"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
"###);
}
#[test]
fn python_find_venv() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"])
// Enable additional filters for Windows compatibility
.with_filtered_exe_suffix()
.with_filtered_python_names()
.with_filtered_virtualenv_bin();
// Create a virtual environment
uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.12").arg("-q"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
// We should find it first
// TODO(zanieb): On Windows, this has in a different display path for virtual environments which
// is super annoying and requires some changes to how we represent working directories in the
// test context to resolve.
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/[PYTHON]
----- stderr -----
");
// Even if the `VIRTUAL_ENV` is not set (the test context includes this by default)
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/[PYTHON]
----- stderr -----
");
let child_dir = context.temp_dir.child("child");
child_dir.create_dir_all().unwrap();
// Unless the system flag is passed
uv_snapshot!(context.filters(), context.python_find().arg("--system"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Or, `UV_SYSTEM_PYTHON` is set
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_SYSTEM_PYTHON, "1"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Unless, `--no-system` is included
// TODO(zanieb): Report this as a bug upstream — this should be allowed.
uv_snapshot!(context.filters(), context.python_find().arg("--no-system").env(EnvVars::UV_SYSTEM_PYTHON, "1"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--no-system' cannot be used with '--system'
Usage: uv python find --cache-dir [CACHE_DIR] [REQUEST]
For more information, try '--help'.
");
// We should find virtual environments from a child directory
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/[PYTHON]
----- stderr -----
");
// A virtual environment in the child directory takes precedence over the parent
uv_snapshot!(context.filters(), context.venv().arg("--python").arg("3.11").arg("-q").current_dir(&child_dir), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().current_dir(&child_dir).env_remove(EnvVars::VIRTUAL_ENV), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
");
// But if we delete the parent virtual environment
fs_err::remove_dir_all(context.temp_dir.child(".venv")).unwrap();
// And query from there... we should not find the child virtual environment
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
// Unless, it is requested by path
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().arg("child/.venv"), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
");
// Or activated via `VIRTUAL_ENV`
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, child_dir.join(".venv").as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
");
// Or at the front of the PATH
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, child_dir.join(".venv").join("bin").as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
");
// This holds even if there are other directories before it in the path, as long as they do
// not contain a Python executable
#[cfg(not(windows))]
{
let path = std::env::join_paths(&[
context.temp_dir.to_path_buf(),
child_dir.join(".venv").join("bin"),
])
.unwrap();
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/.venv/[BIN]/[PYTHON]
----- stderr -----
");
}
// But, if there's an executable _before_ the virtual environment — we prefer that
#[cfg(not(windows))]
{
let path = std::env::join_paths(
std::env::split_paths(&context.python_path())
.chain(std::iter::once(child_dir.join(".venv").join("bin"))),
)
.unwrap();
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, path.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
}
}
#[cfg(unix)]
#[test]
fn python_find_unsupported_version() {
let context: TestContext = TestContext::new_with_versions(&["3.12"]);
// Request a low version
uv_snapshot!(context.filters(), context.python_find().arg("3.6"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid version request: Python <3.7 is not supported but 3.6 was requested.
"###);
// Request a low version with a patch
uv_snapshot!(context.filters(), context.python_find().arg("3.6.9"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid version request: Python <3.7 is not supported but 3.6.9 was requested.
"###);
// Request a really low version
uv_snapshot!(context.filters(), context.python_find().arg("2.6"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid version request: Python <3.7 is not supported but 2.6 was requested.
"###);
// Request a really low version with a patch
uv_snapshot!(context.filters(), context.python_find().arg("2.6.8"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid version request: Python <3.7 is not supported but 2.6.8 was requested.
"###);
// Request a future version
uv_snapshot!(context.filters(), context.python_find().arg("4.2"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python 4.2 in virtual environments, managed installations, or search path
"###);
// Request a low version with a range
uv_snapshot!(context.filters(), context.python_find().arg("<3.0"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python <3.0 in virtual environments, managed installations, or search path
"###);
// Request free-threaded Python on unsupported version
uv_snapshot!(context.filters(), context.python_find().arg("3.12t"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Invalid version request: Python <3.13 does not support free-threading but 3.12+freethreaded was requested.
");
}
#[test]
fn python_find_venv_invalid() {
let context: TestContext = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix();
// We find the virtual environment
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/[PYTHON]
----- stderr -----
");
// If the binaries are missing from a virtual environment, we fail
fs_err::remove_dir_all(venv_bin_path(&context.venv)).unwrap();
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to inspect Python interpreter from active virtual environment at `.venv/[BIN]/[PYTHON]`
Caused by: Python interpreter not found at `[VENV]/[BIN]/[PYTHON]`
");
// Unless the virtual environment is not active
uv_snapshot!(context.filters(), context.python_find(), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// If there's not a `pyvenv.cfg` file, it's also non-fatal, we ignore the environment
fs_err::remove_file(context.venv.join("pyvenv.cfg")).unwrap();
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
}
#[test]
fn python_find_managed() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_python_sources()
.with_versions_as_managed(&["3.12"]);
// We find the managed interpreter
uv_snapshot!(context.filters(), context.python_find().arg("--managed-python"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// Request an interpreter that cannot be satisfied
uv_snapshot!(context.filters(), context.python_find().arg("--managed-python").arg("3.11"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python 3.11 in virtual environments or managed installations
");
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_python_sources()
.with_versions_as_managed(&["3.11"]);
// We find the unmanaged interpreter with managed Python disabled
uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// Request an interpreter that cannot be satisfied
uv_snapshot!(context.filters(), context.python_find().arg("--no-managed-python").arg("3.11"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python 3.11 in [PYTHON SOURCES]
");
// We find the unmanaged interpreter with system Python preferred
uv_snapshot!(context.filters(), context.python_find().arg("--python-preference").arg("system"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.12]
----- stderr -----
");
// But, if no system Python meets the request, we'll use the managed interpreter
uv_snapshot!(context.filters(), context.python_find().arg("--python-preference").arg("system").arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
[PYTHON-3.11]
----- stderr -----
");
}
/// See: <https://github.com/astral-sh/uv/issues/11825>
///
/// This test will not succeed on macOS if using a Homebrew provided interpreter. The interpreter
/// reports `sys.executable` as the canonicalized path instead of `[TEMP_DIR]/...`. For this reason,
/// it's marked as requiring our `python-managed` feature — but it does not enforce that these are
/// used in the test context.
#[test]
#[cfg(unix)]
#[cfg(feature = "python-managed")]
fn python_required_python_major_minor() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
// Find the Python 3.11 executable.
let path = &context.python_versions.first().unwrap().1;
// Symlink it to `python3.11`.
fs_err::create_dir_all(context.temp_dir.child("child")).unwrap();
fs_err::os::unix::fs::symlink(path, context.temp_dir.child("child").join("python3.11"))
.unwrap();
// Find `python3.11`, which is `>=3.11.4`.
uv_snapshot!(context.filters(), context.python_find().arg(">=3.11.4, <3.12").env(EnvVars::UV_TEST_PYTHON_PATH, context.temp_dir.child("child").path()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/python3.11
----- stderr -----
");
// Find `python3.11`, which is `>3.11.4`.
uv_snapshot!(context.filters(), context.python_find().arg(">3.11.4, <3.12").env(EnvVars::UV_TEST_PYTHON_PATH, context.temp_dir.child("child").path()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/child/python3.11
----- stderr -----
");
// Fail to find any matching Python interpreter.
uv_snapshot!(context.filters(), context.python_find().arg(">3.11.255, <3.12").env(EnvVars::UV_TEST_PYTHON_PATH, context.temp_dir.child("child").path()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python >3.11.[X], <3.12 in virtual environments, managed installations, or search path
"###);
}
#[test]
fn python_find_script() {
let context = TestContext::new("3.13")
.with_filtered_virtualenv_bin()
.with_filtered_python_names()
.with_filtered_exe_suffix();
uv_snapshot!(context.filters(), context.init().arg("--script").arg("foo.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `foo.py`
");
uv_snapshot!(context.filters(), context.sync().arg("--script").arg("foo.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Creating script environment at: [CACHE_DIR]/environments-v2/foo-[HASH]
Resolved in [TIME]
Audited in [TIME]
");
uv_snapshot!(context.filters(), context.python_find().arg("--script").arg("foo.py"), @r"
success: true
exit_code: 0
----- stdout -----
[CACHE_DIR]/environments-v2/foo-[HASH]/[BIN]/[PYTHON]
----- stderr -----
");
}
#[test]
fn python_find_script_no_environment() {
let context = TestContext::new("3.13")
.with_filtered_virtualenv_bin()
.with_filtered_python_names()
.with_filtered_exe_suffix();
let script = context.temp_dir.child("foo.py");
script
.write_str(indoc! {r"
# /// script
# dependencies = []
# ///
"})
.unwrap();
uv_snapshot!(context.filters(), context.python_find().arg("--script").arg("foo.py"), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/[PYTHON]
----- stderr -----
");
}
#[test]
fn python_find_script_python_not_found() {
let context = TestContext::new_with_versions(&[]).with_filtered_python_sources();
let script = context.temp_dir.child("foo.py");
script
.write_str(indoc! {r"
# /// script
# dependencies = []
# ///
"})
.unwrap();
uv_snapshot!(context.filters(), context.python_find().arg("--script").arg("foo.py"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
No interpreter found in [PYTHON SOURCES]
hint: A managed Python download is available, but Python downloads are set to 'never'
");
}
#[test]
fn python_find_script_no_such_version() {
let context = TestContext::new("3.13")
.with_filtered_virtualenv_bin()
.with_filtered_python_names()
.with_filtered_exe_suffix()
.with_filtered_python_sources();
let script = context.temp_dir.child("foo.py");
script
.write_str(indoc! {r#"
# /// script
# requires-python = ">=3.13"
# dependencies = []
# ///
"#})
.unwrap();
uv_snapshot!(context.filters(), context.sync().arg("--script").arg("foo.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Creating script environment at: [CACHE_DIR]/environments-v2/foo-[HASH]
Resolved in [TIME]
Audited in [TIME]
");
script
.write_str(indoc! {r#"
# /// script
# requires-python = ">=3.15"
# dependencies = []
# ///
"#})
.unwrap();
uv_snapshot!(context.filters(), context.python_find().arg("--script").arg("foo.py"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
No interpreter found for Python >=3.15 in [PYTHON SOURCES]
");
}
#[test]
fn python_find_show_version() {
let context: TestContext =
TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_python_sources();
// No interpreters found
uv_snapshot!(context.filters(), context.python_find().env(EnvVars::UV_TEST_PYTHON_PATH, "").arg("--show-version"), @r"
success: false
exit_code: 2
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tool_list.rs | crates/uv/tests/it/tool_list.rs | use crate::common::{self, TestContext, uv_snapshot};
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::PathChild;
use fs_err as fs;
use insta::assert_snapshot;
use uv_static::EnvVars;
#[test]
fn tool_list() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
uv_snapshot!(context.filters(), context.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0
- black
- blackd
----- stderr -----
"###);
}
#[test]
fn tool_list_paths() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
uv_snapshot!(context.filters(), context.tool_list().arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
----- stderr -----
"###);
}
#[cfg(windows)]
#[test]
fn tool_list_paths_windows() {
let context = TestContext::new("3.12")
.clear_filters()
.with_filtered_windows_temp_dir();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
uv_snapshot!(context.filters_without_standard_filters(), context.tool_list().arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 ([TEMP_DIR]\tools\black)
- black ([TEMP_DIR]\bin\black.exe)
- blackd ([TEMP_DIR]\bin\blackd.exe)
----- stderr -----
"###);
}
#[test]
fn tool_list_empty() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
No tools installed
"###);
}
#[test]
fn tool_list_missing_receipt() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
fs_err::remove_file(tool_dir.join("black").join("uv-receipt.toml")).unwrap();
uv_snapshot!(context.filters(), context.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Ignoring malformed tool `black` (run `uv tool uninstall black` to remove)
"###);
}
#[test]
fn tool_list_bad_environment() -> Result<()> {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `ruff`
context
.tool_install()
.arg("ruff==0.3.4")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
let venv_path = common::venv_bin_path(tool_dir.path().join("black"));
// Remove the python interpreter for black
fs::remove_dir_all(venv_path.clone())?;
uv_snapshot!(
context.filters(),
context
.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()),
@r"
success: true
exit_code: 0
----- stdout -----
ruff v0.3.4
- ruff
----- stderr -----
warning: Invalid environment at `tools/black`: missing Python executable at `tools/black/[BIN]/[PYTHON]` (run `uv tool install black --reinstall` to reinstall)
"
);
Ok(())
}
#[test]
fn tool_list_deprecated() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Ensure that we have a modern tool receipt.
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black", specifier = "==24.2.0" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
// Replace with a legacy receipt.
fs::write(
tool_dir.join("black").join("uv-receipt.toml"),
r#"
[tool]
requirements = ["black==24.2.0"]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
"#,
)?;
// Ensure that we can still list the tool.
uv_snapshot!(context.filters(), context.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0
- black
- blackd
----- stderr -----
"###);
// Replace with an invalid receipt.
fs::write(
tool_dir.join("black").join("uv-receipt.toml"),
r#"
[tool]
requirements = ["black<>24.2.0"]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
"#,
)?;
// Ensure that listing fails.
uv_snapshot!(context.filters(), context.tool_list()
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Ignoring malformed tool `black` (run `uv tool uninstall black` to remove)
"###);
Ok(())
}
#[test]
fn tool_list_show_version_specifiers() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` with a version specifier
context
.tool_install()
.arg("black<24.3.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `flask`
context
.tool_install()
.arg("flask")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
uv_snapshot!(context.filters(), context.tool_list().arg("--show-version-specifiers")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: <24.3.0]
- black
- blackd
flask v3.0.2
- flask
----- stderr -----
"###);
// with paths
uv_snapshot!(context.filters(), context.tool_list().arg("--show-version-specifiers").arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: <24.3.0] ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
----- stderr -----
"###);
}
#[test]
fn tool_list_show_with() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` without additional requirements
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `flask` with additional requirements
context
.tool_install()
.arg("flask")
.arg("--with")
.arg("requests")
.arg("--with")
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `ruff` with version specifier and additional requirements
context
.tool_install()
.arg("ruff==0.3.4")
.arg("--with")
.arg("requests")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Test with --show-with
uv_snapshot!(context.filters(), context.tool_list().arg("--show-with")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0
- black
- blackd
flask v3.0.2 [with: requests, black==24.2.0]
- flask
ruff v0.3.4 [with: requests]
- ruff
----- stderr -----
"###);
// Test with both --show-with and --show-paths
uv_snapshot!(context.filters(), context.tool_list().arg("--show-with").arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 [with: requests, black==24.2.0] ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
ruff v0.3.4 [with: requests] ([TEMP_DIR]/tools/ruff)
- ruff ([TEMP_DIR]/bin/ruff)
----- stderr -----
"###);
// Test with both --show-with and --show-version-specifiers
uv_snapshot!(context.filters(), context.tool_list().arg("--show-with").arg("--show-version-specifiers")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: ==24.2.0]
- black
- blackd
flask v3.0.2 [with: requests, black==24.2.0]
- flask
ruff v0.3.4 [required: ==0.3.4] [with: requests]
- ruff
----- stderr -----
"###);
// Test with all flags
uv_snapshot!(context.filters(), context.tool_list()
.arg("--show-with")
.arg("--show-version-specifiers")
.arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: ==24.2.0] ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 [with: requests, black==24.2.0] ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
ruff v0.3.4 [required: ==0.3.4] [with: requests] ([TEMP_DIR]/tools/ruff)
- ruff ([TEMP_DIR]/bin/ruff)
----- stderr -----
"###);
}
#[test]
fn tool_list_show_extras() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` without extras
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `flask` with extras and additional requirements
context
.tool_install()
.arg("flask[async,dotenv]")
.arg("--with")
.arg("requests")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Test with --show-extras only
uv_snapshot!(context.filters(), context.tool_list().arg("--show-extras")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0
- black
- blackd
flask v3.0.2 [extras: async, dotenv]
- flask
----- stderr -----
"###);
// Test with both --show-extras and --show-with
uv_snapshot!(context.filters(), context.tool_list().arg("--show-extras").arg("--show-with")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0
- black
- blackd
flask v3.0.2 [extras: async, dotenv] [with: requests]
- flask
----- stderr -----
"###);
// Test with --show-extras and --show-paths
uv_snapshot!(context.filters(), context.tool_list().arg("--show-extras").arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 [extras: async, dotenv] ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
----- stderr -----
"###);
// Test with --show-extras and --show-version-specifiers
uv_snapshot!(context.filters(), context.tool_list().arg("--show-extras").arg("--show-version-specifiers")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: ==24.2.0]
- black
- blackd
flask v3.0.2 [extras: async, dotenv]
- flask
----- stderr -----
"###);
// Test with all flags including --show-extras
uv_snapshot!(context.filters(), context.tool_list()
.arg("--show-extras")
.arg("--show-with")
.arg("--show-version-specifiers")
.arg("--show-paths")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: ==24.2.0] ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 [extras: async, dotenv] [with: requests] ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
----- stderr -----
"###);
}
#[test]
fn tool_list_show_python() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` with python 3.12
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Test with --show-python
uv_snapshot!(context.filters(), context.tool_list().arg("--show-python")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [CPython 3.12.[X]]
- black
- blackd
----- stderr -----
"###);
}
#[test]
fn tool_list_show_all() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` without extras
context
.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Install `flask` with extras and additional requirements
context
.tool_install()
.arg("flask[async,dotenv]")
.arg("--with")
.arg("requests")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Test with all flags
uv_snapshot!(context.filters(), context.tool_list()
.arg("--show-extras")
.arg("--show-with")
.arg("--show-version-specifiers")
.arg("--show-paths")
.arg("--show-python")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black v24.2.0 [required: ==24.2.0] [CPython 3.12.[X]] ([TEMP_DIR]/tools/black)
- black ([TEMP_DIR]/bin/black)
- blackd ([TEMP_DIR]/bin/blackd)
flask v3.0.2 [extras: async, dotenv] [with: requests] [CPython 3.12.[X]] ([TEMP_DIR]/tools/flask)
- flask ([TEMP_DIR]/bin/flask)
----- stderr -----
"###);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/help.rs | crates/uv/tests/it/help.rs | use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn help() {
let context = TestContext::new_with_versions(&[]);
// The `uv help` command should show the long help message
uv_snapshot!(context.filters(), context.help(), @r#"
success: true
exit_code: 0
----- stdout -----
An extremely fast Python package manager.
Usage: uv [OPTIONS] <COMMAND>
Commands:
auth Manage authentication
run Run a command or script
init Create a new project
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
format Format Python code in the project
tool Run and install commands provided by Python packages
python Manage Python versions and installations
pip Manage Python packages with a pip-compatible interface
venv Create a virtual environment
build Build Python packages into source distributions and wheels
publish Upload distributions to an index
cache Manage uv's cache
self Manage the uv executable
generate-shell-completion Generate shell completion
help Display documentation for a command
Cache options:
-n, --no-cache Avoid reading from or writing to the cache, instead using a temporary
directory for the duration of the operation [env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR] Path to the cache directory [env: UV_CACHE_DIR=]
Python options:
--managed-python Require use of uv-managed Python versions [env: UV_MANAGED_PYTHON=]
--no-managed-python Disable use of uv-managed Python versions [env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads Disable automatic downloads of Python. [env:
"UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
Use quiet output
-v, --verbose...
Use verbose output
--color <COLOR_CHOICE>
Control the use of color in output [possible values: auto, always, never]
--native-tls
Whether to load TLS certificates from the platform's native certificate store [env:
UV_NATIVE_TLS=]
--offline
Disable network access [env: UV_OFFLINE=]
--allow-insecure-host <ALLOW_INSECURE_HOST>
Allow insecure connections to a host [env: UV_INSECURE_HOST=]
--no-progress
Hide all progress outputs [env: UV_NO_PROGRESS=]
--directory <DIRECTORY>
Change to the given directory prior to running the command [env: UV_WORKING_DIR=]
--project <PROJECT>
Discover a project in the given directory [env: UV_PROJECT=]
--config-file <CONFIG_FILE>
The path to a `uv.toml` file to use for configuration [env: UV_CONFIG_FILE=]
--no-config
Avoid discovering configuration files (`pyproject.toml`, `uv.toml`) [env: UV_NO_CONFIG=]
-h, --help
Display the concise help for this command
-V, --version
Display the uv version
Use `uv help <command>` for more information on a specific command.
----- stderr -----
"#);
}
#[test]
fn help_flag() {
let context = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.command().arg("--help"), @r#"
success: true
exit_code: 0
----- stdout -----
An extremely fast Python package manager.
Usage: uv [OPTIONS] <COMMAND>
Commands:
auth Manage authentication
run Run a command or script
init Create a new project
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
format Format Python code in the project
tool Run and install commands provided by Python packages
python Manage Python versions and installations
pip Manage Python packages with a pip-compatible interface
venv Create a virtual environment
build Build Python packages into source distributions and wheels
publish Upload distributions to an index
cache Manage uv's cache
self Manage the uv executable
help Display documentation for a command
Cache options:
-n, --no-cache Avoid reading from or writing to the cache, instead using a temporary
directory for the duration of the operation [env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR] Path to the cache directory [env: UV_CACHE_DIR=]
Python options:
--managed-python Require use of uv-managed Python versions [env: UV_MANAGED_PYTHON=]
--no-managed-python Disable use of uv-managed Python versions [env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads Disable automatic downloads of Python. [env:
"UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
Use quiet output
-v, --verbose...
Use verbose output
--color <COLOR_CHOICE>
Control the use of color in output [possible values: auto, always, never]
--native-tls
Whether to load TLS certificates from the platform's native certificate store [env:
UV_NATIVE_TLS=]
--offline
Disable network access [env: UV_OFFLINE=]
--allow-insecure-host <ALLOW_INSECURE_HOST>
Allow insecure connections to a host [env: UV_INSECURE_HOST=]
--no-progress
Hide all progress outputs [env: UV_NO_PROGRESS=]
--directory <DIRECTORY>
Change to the given directory prior to running the command [env: UV_WORKING_DIR=]
--project <PROJECT>
Discover a project in the given directory [env: UV_PROJECT=]
--config-file <CONFIG_FILE>
The path to a `uv.toml` file to use for configuration [env: UV_CONFIG_FILE=]
--no-config
Avoid discovering configuration files (`pyproject.toml`, `uv.toml`) [env: UV_NO_CONFIG=]
-h, --help
Display the concise help for this command
-V, --version
Display the uv version
Use `uv help` for more details.
----- stderr -----
"#);
}
#[test]
fn help_short_flag() {
let context = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.command().arg("-h"), @r#"
success: true
exit_code: 0
----- stdout -----
An extremely fast Python package manager.
Usage: uv [OPTIONS] <COMMAND>
Commands:
auth Manage authentication
run Run a command or script
init Create a new project
add Add dependencies to the project
remove Remove dependencies from the project
version Read or update the project's version
sync Update the project's environment
lock Update the project's lockfile
export Export the project's lockfile to an alternate format
tree Display the project's dependency tree
format Format Python code in the project
tool Run and install commands provided by Python packages
python Manage Python versions and installations
pip Manage Python packages with a pip-compatible interface
venv Create a virtual environment
build Build Python packages into source distributions and wheels
publish Upload distributions to an index
cache Manage uv's cache
self Manage the uv executable
help Display documentation for a command
Cache options:
-n, --no-cache Avoid reading from or writing to the cache, instead using a temporary
directory for the duration of the operation [env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR] Path to the cache directory [env: UV_CACHE_DIR=]
Python options:
--managed-python Require use of uv-managed Python versions [env: UV_MANAGED_PYTHON=]
--no-managed-python Disable use of uv-managed Python versions [env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads Disable automatic downloads of Python. [env:
"UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
Use quiet output
-v, --verbose...
Use verbose output
--color <COLOR_CHOICE>
Control the use of color in output [possible values: auto, always, never]
--native-tls
Whether to load TLS certificates from the platform's native certificate store [env:
UV_NATIVE_TLS=]
--offline
Disable network access [env: UV_OFFLINE=]
--allow-insecure-host <ALLOW_INSECURE_HOST>
Allow insecure connections to a host [env: UV_INSECURE_HOST=]
--no-progress
Hide all progress outputs [env: UV_NO_PROGRESS=]
--directory <DIRECTORY>
Change to the given directory prior to running the command [env: UV_WORKING_DIR=]
--project <PROJECT>
Discover a project in the given directory [env: UV_PROJECT=]
--config-file <CONFIG_FILE>
The path to a `uv.toml` file to use for configuration [env: UV_CONFIG_FILE=]
--no-config
Avoid discovering configuration files (`pyproject.toml`, `uv.toml`) [env: UV_NO_CONFIG=]
-h, --help
Display the concise help for this command
-V, --version
Display the uv version
Use `uv help` for more details.
----- stderr -----
"#);
}
#[test]
fn help_subcommand() {
let context = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.help().arg("python"), @r#"
success: true
exit_code: 0
----- stdout -----
Manage Python versions and installations
Generally, uv first searches for Python in a virtual environment, either active or in a
`.venv` directory in the current working directory or any parent directory. If a virtual
environment is not required, uv will then search for a Python interpreter. Python
interpreters are found by searching for Python executables in the `PATH` environment
variable.
On Windows, the registry is also searched for Python executables.
By default, uv will download Python if a version cannot be found. This behavior can be
disabled with the `--no-python-downloads` flag or the `python-downloads` setting.
The `--python` option allows requesting a different interpreter.
The following Python version request formats are supported:
- `<version>` e.g. `3`, `3.12`, `3.12.3`
- `<version-specifier>` e.g. `>=3.12,<3.13`
- `<version><short-variant>` (e.g., `3.13t`, `3.12.0d`)
- `<version>+<variant>` (e.g., `3.13+freethreaded`, `3.12.0+debug`)
- `<implementation>` e.g. `cpython` or `cp`
- `<implementation>@<version>` e.g. `cpython@3.12`
- `<implementation><version>` e.g. `cpython3.12` or `cp312`
- `<implementation><version-specifier>` e.g. `cpython>=3.12,<3.13`
- `<implementation>-<version>-<os>-<arch>-<libc>` e.g. `cpython-3.12.3-macos-aarch64-none`
Additionally, a specific system Python interpreter can often be requested with:
- `<executable-path>` e.g. `/opt/homebrew/bin/python3`
- `<executable-name>` e.g. `mypython3`
- `<install-dir>` e.g. `/some/environment/`
When the `--python` option is used, normal discovery rules apply but discovered interpreters
are checked for compatibility with the request, e.g., if `pypy` is requested, uv will first
check if the virtual environment contains a PyPy interpreter then check if each executable
in the path is a PyPy interpreter.
uv supports discovering CPython, PyPy, and GraalPy interpreters. Unsupported interpreters
will be skipped during discovery. If an unsupported interpreter implementation is requested,
uv will exit with an error.
Usage: uv python [OPTIONS] <COMMAND>
Commands:
list List the available Python installations
install Download and install Python versions
upgrade Upgrade installed Python versions
find Search for a Python installation
pin Pin to a specific Python version
dir Show the uv Python installation directory
uninstall Uninstall Python versions
update-shell Ensure that the Python executable directory is on the `PATH`
Cache options:
-n, --no-cache
Avoid reading from or writing to the cache, instead using a temporary directory for the
duration of the operation
[env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR]
Path to the cache directory.
Defaults to `$XDG_CACHE_HOME/uv` or `$HOME/.cache/uv` on macOS and Linux, and
`%LOCALAPPDATA%/uv/cache` on Windows.
To view the location of the cache directory, run `uv cache dir`.
[env: UV_CACHE_DIR=]
Python options:
--managed-python
Require use of uv-managed Python versions.
By default, uv prefers using Python versions it manages. However, it will use system
Python versions if a uv-managed Python is not installed. This option disables use of
system Python versions.
[env: UV_MANAGED_PYTHON=]
--no-managed-python
Disable use of uv-managed Python versions.
Instead, uv will search for a suitable Python version on the system.
[env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads
Disable automatic downloads of Python. [env: "UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
Use quiet output.
Repeating this option, e.g., `-qq`, will enable a silent mode in which uv will write no
output to stdout.
-v, --verbose...
Use verbose output.
You can configure fine-grained logging using the `RUST_LOG` environment variable.
(<https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives>)
--color <COLOR_CHOICE>
Control the use of color in output.
By default, uv will automatically detect support for colors when writing to a terminal.
Possible values:
- auto: Enables colored output only when the output is going to a terminal or TTY with
support
- always: Enables colored output regardless of the detected environment
- never: Disables colored output
--native-tls
Whether to load TLS certificates from the platform's native certificate store.
By default, uv loads certificates from the bundled `webpki-roots` crate. The
`webpki-roots` are a reliable set of trust roots from Mozilla, and including them in uv
improves portability and performance (especially on macOS).
However, in some cases, you may want to use the platform's native certificate store,
especially if you're relying on a corporate trust root (e.g., for a mandatory proxy)
that's included in your system's certificate store.
[env: UV_NATIVE_TLS=]
--offline
Disable network access.
When disabled, uv will only use locally cached data and locally available files.
[env: UV_OFFLINE=]
--allow-insecure-host <ALLOW_INSECURE_HOST>
Allow insecure connections to a host.
Can be provided multiple times.
Expects to receive either a hostname (e.g., `localhost`), a host-port pair (e.g.,
`localhost:8080`), or a URL (e.g., `https://localhost`).
WARNING: Hosts included in this list will not be verified against the system's certificate
store. Only use `--allow-insecure-host` in a secure network with verified sources, as it
bypasses SSL verification and could expose you to MITM attacks.
[env: UV_INSECURE_HOST=]
--no-progress
Hide all progress outputs.
For example, spinners or progress bars.
[env: UV_NO_PROGRESS=]
--directory <DIRECTORY>
Change to the given directory prior to running the command.
Relative paths are resolved with the given directory as the base.
See `--project` to only change the project root directory.
[env: UV_WORKING_DIR=]
--project <PROJECT>
Discover a project in the given directory.
All `pyproject.toml`, `uv.toml`, and `.python-version` files will be discovered by walking
up the directory tree from the project root, as will the project's virtual environment
(`.venv`).
Other command-line arguments (such as relative paths) will be resolved relative to the
current working directory.
See `--directory` to change the working directory entirely.
This setting has no effect when used in the `uv pip` interface.
[env: UV_PROJECT=]
--config-file <CONFIG_FILE>
The path to a `uv.toml` file to use for configuration.
While uv configuration can be included in a `pyproject.toml` file, it is not allowed in
this context.
[env: UV_CONFIG_FILE=]
--no-config
Avoid discovering configuration files (`pyproject.toml`, `uv.toml`).
Normally, configuration files are discovered in the current directory, parent directories,
or user configuration directories.
[env: UV_NO_CONFIG=]
-h, --help
Display the concise help for this command
Use `uv help python <command>` for more information on a specific command.
----- stderr -----
"#);
}
#[test]
fn help_subsubcommand() {
let context = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.help().env_remove(EnvVars::UV_PYTHON_INSTALL_DIR).arg("python").arg("install"), @r#"
success: true
exit_code: 0
----- stdout -----
Download and install Python versions.
Supports CPython and PyPy. CPython distributions are downloaded from the Astral
`python-build-standalone` project. PyPy distributions are downloaded from `python.org`. The
available Python versions are bundled with each uv release. To install new Python versions, you may
need upgrade uv.
Python versions are installed into the uv Python directory, which can be retrieved with `uv python
dir`.
By default, Python executables are added to a directory on the path with a minor version suffix,
e.g., `python3.13`. To install `python3` and `python`, use the `--default` flag. Use `uv python dir
--bin` to see the target directory.
Multiple Python versions may be requested.
See `uv help python` to view supported request formats.
Usage: uv python install [OPTIONS] [TARGETS]...
Arguments:
[TARGETS]...
The Python version(s) to install.
If not provided, the requested Python version(s) will be read from the `UV_PYTHON`
environment variable then `.python-versions` or `.python-version` files. If none of the
above are present, uv will check if it has installed any Python versions. If not, it will
install the latest stable version of Python.
See `uv help python` to view supported request formats.
[env: UV_PYTHON=]
Options:
-i, --install-dir <INSTALL_DIR>
The directory to store the Python installation in.
If provided, `UV_PYTHON_INSTALL_DIR` will need to be set for subsequent operations for uv
to discover the Python installation.
See `uv python dir` to view the current Python installation directory. Defaults to
`~/.local/share/uv/python`.
[env: UV_PYTHON_INSTALL_DIR=]
--no-bin
Do not install a Python executable into the `bin` directory.
This can also be set with `UV_PYTHON_INSTALL_BIN=0`.
--no-registry
Do not register the Python installation in the Windows registry.
This can also be set with `UV_PYTHON_INSTALL_REGISTRY=0`.
--mirror <MIRROR>
Set the URL to use as the source for downloading Python installations.
The provided URL will replace
`https://github.com/astral-sh/python-build-standalone/releases/download` in, e.g.,
`https://github.com/astral-sh/python-build-standalone/releases/download/20240713/cpython-3.12.4%2B20240713-aarch64-apple-darwin-install_only.tar.gz`.
Distributions can be read from a local directory by using the `file://` URL scheme.
--pypy-mirror <PYPY_MIRROR>
Set the URL to use as the source for downloading PyPy installations.
The provided URL will replace `https://downloads.python.org/pypy` in, e.g.,
`https://downloads.python.org/pypy/pypy3.8-v7.3.7-osx64.tar.bz2`.
Distributions can be read from a local directory by using the `file://` URL scheme.
--python-downloads-json-url <PYTHON_DOWNLOADS_JSON_URL>
URL pointing to JSON of custom Python installations
-r, --reinstall
Reinstall the requested Python version, if it's already installed.
By default, uv will exit successfully if the version is already installed.
-f, --force
Replace existing Python executables during installation.
By default, uv will refuse to replace executables that it does not manage.
Implies `--reinstall`.
-U, --upgrade
Upgrade existing Python installations to the latest patch version.
By default, uv will not upgrade already-installed Python versions to newer patch releases.
With `--upgrade`, uv will upgrade to the latest available patch version for the specified
minor version(s).
If the requested versions are not yet installed, uv will install them.
This option is only supported for minor version requests, e.g., `3.12`; uv will exit with
an error if a patch version, e.g., `3.12.2`, is requested.
--default
Use as the default Python version.
By default, only a `python{major}.{minor}` executable is installed, e.g., `python3.10`.
When the `--default` flag is used, `python{major}`, e.g., `python3`, and `python`
executables are also installed.
Alternative Python variants will still include their tag. For example, installing
3.13+freethreaded with `--default` will include `python3t` and `pythont` instead of
`python3` and `python`.
If multiple Python versions are requested, uv will exit with an error.
Cache options:
-n, --no-cache
Avoid reading from or writing to the cache, instead using a temporary directory for the
duration of the operation
[env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR]
Path to the cache directory.
Defaults to `$XDG_CACHE_HOME/uv` or `$HOME/.cache/uv` on macOS and Linux, and
`%LOCALAPPDATA%/uv/cache` on Windows.
To view the location of the cache directory, run `uv cache dir`.
[env: UV_CACHE_DIR=]
Python options:
--managed-python
Require use of uv-managed Python versions.
By default, uv prefers using Python versions it manages. However, it will use system
Python versions if a uv-managed Python is not installed. This option disables use of
system Python versions.
[env: UV_MANAGED_PYTHON=]
--no-managed-python
Disable use of uv-managed Python versions.
Instead, uv will search for a suitable Python version on the system.
[env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads
Disable automatic downloads of Python. [env: "UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
Use quiet output.
Repeating this option, e.g., `-qq`, will enable a silent mode in which uv will write no
output to stdout.
-v, --verbose...
Use verbose output.
You can configure fine-grained logging using the `RUST_LOG` environment variable.
(<https://docs.rs/tracing-subscriber/latest/tracing_subscriber/filter/struct.EnvFilter.html#directives>)
--color <COLOR_CHOICE>
Control the use of color in output.
By default, uv will automatically detect support for colors when writing to a terminal.
Possible values:
- auto: Enables colored output only when the output is going to a terminal or TTY with
support
- always: Enables colored output regardless of the detected environment
- never: Disables colored output
--native-tls
Whether to load TLS certificates from the platform's native certificate store.
By default, uv loads certificates from the bundled `webpki-roots` crate. The
`webpki-roots` are a reliable set of trust roots from Mozilla, and including them in uv
improves portability and performance (especially on macOS).
However, in some cases, you may want to use the platform's native certificate store,
especially if you're relying on a corporate trust root (e.g., for a mandatory proxy)
that's included in your system's certificate store.
[env: UV_NATIVE_TLS=]
--offline
Disable network access.
When disabled, uv will only use locally cached data and locally available files.
[env: UV_OFFLINE=]
--allow-insecure-host <ALLOW_INSECURE_HOST>
Allow insecure connections to a host.
Can be provided multiple times.
Expects to receive either a hostname (e.g., `localhost`), a host-port pair (e.g.,
`localhost:8080`), or a URL (e.g., `https://localhost`).
WARNING: Hosts included in this list will not be verified against the system's certificate
store. Only use `--allow-insecure-host` in a secure network with verified sources, as it
bypasses SSL verification and could expose you to MITM attacks.
[env: UV_INSECURE_HOST=]
--no-progress
Hide all progress outputs.
For example, spinners or progress bars.
[env: UV_NO_PROGRESS=]
--directory <DIRECTORY>
Change to the given directory prior to running the command.
Relative paths are resolved with the given directory as the base.
See `--project` to only change the project root directory.
[env: UV_WORKING_DIR=]
--project <PROJECT>
Discover a project in the given directory.
All `pyproject.toml`, `uv.toml`, and `.python-version` files will be discovered by walking
up the directory tree from the project root, as will the project's virtual environment
(`.venv`).
Other command-line arguments (such as relative paths) will be resolved relative to the
current working directory.
See `--directory` to change the working directory entirely.
This setting has no effect when used in the `uv pip` interface.
[env: UV_PROJECT=]
--config-file <CONFIG_FILE>
The path to a `uv.toml` file to use for configuration.
While uv configuration can be included in a `pyproject.toml` file, it is not allowed in
this context.
[env: UV_CONFIG_FILE=]
--no-config
Avoid discovering configuration files (`pyproject.toml`, `uv.toml`).
Normally, configuration files are discovered in the current directory, parent directories,
or user configuration directories.
[env: UV_NO_CONFIG=]
-h, --help
Display the concise help for this command
----- stderr -----
"#);
}
#[test]
fn help_flag_subcommand() {
let context = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.command().arg("python").arg("--help"), @r#"
success: true
exit_code: 0
----- stdout -----
Manage Python versions and installations
Usage: uv python [OPTIONS] <COMMAND>
Commands:
list List the available Python installations
install Download and install Python versions
upgrade Upgrade installed Python versions
find Search for a Python installation
pin Pin to a specific Python version
dir Show the uv Python installation directory
uninstall Uninstall Python versions
update-shell Ensure that the Python executable directory is on the `PATH`
Cache options:
-n, --no-cache Avoid reading from or writing to the cache, instead using a temporary
directory for the duration of the operation [env: UV_NO_CACHE=]
--cache-dir [CACHE_DIR] Path to the cache directory [env: UV_CACHE_DIR=]
Python options:
--managed-python Require use of uv-managed Python versions [env: UV_MANAGED_PYTHON=]
--no-managed-python Disable use of uv-managed Python versions [env: UV_NO_MANAGED_PYTHON=]
--no-python-downloads Disable automatic downloads of Python. [env:
"UV_PYTHON_DOWNLOADS=never"]
Global options:
-q, --quiet...
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/lock.rs | crates/uv/tests/it/lock.rs | use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::{formatdoc, indoc};
use insta::assert_snapshot;
use std::io::BufReader;
use url::Url;
#[cfg(feature = "git")]
use crate::common::{READ_ONLY_GITHUB_TOKEN, decode_token};
use crate::common::{
TestContext, build_vendor_links_url, download_to_disk, packse_index_url, uv_snapshot,
venv_bin_path,
};
use uv_fs::Simplified;
use uv_static::EnvVars;
#[test]
fn lock_wheel_registry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/b3/fefbf7e78ab3b805dec67d698dc18dd505af7a18a8dd08868c9b4fa736b5/anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce", size = 142737, upload-time = "2023-05-27T11:12:46.688Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/fe/7ce1926952c8a403b35029e194555558514b365ad77d75125f521a2bec62/anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0", size = 80873, upload-time = "2023-05-27T11:12:44.474Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "anyio" },
]
[package.metadata]
requires-dist = [{ name = "anyio", specifier = "==3.7.0" }]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
// Re-run with `--offline`. We shouldn't need a network connection to validate an
// already-correct lockfile with immutable metadata.
uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
"###);
// Re-install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
"###);
Ok(())
}
/// Lock a requirement from PyPI.
#[test]
fn lock_sdist_registry() -> Result<()> {
let context = TestContext::new("3.12").with_exclude_newer("2025-01-29T00:00:00Z");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["source-distribution==0.0.1"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2025-01-29T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "source-distribution" },
]
[package.metadata]
requires-dist = [{ name = "source-distribution", specifier = "==0.0.1" }]
[[package]]
name = "source-distribution"
version = "0.0.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/10/1f/57aa4cce1b1abf6b433106676e15f9fa2c92ed2bd4cf77c3b50a9e9ac773/source_distribution-0.0.1.tar.gz", hash = "sha256:1f83ed7498336c7f2ab9b002cf22583d91115ebc624053dc4eb3a45694490106", size = 2157, upload-time = "2024-05-24T01:00:30.259Z" }
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ source-distribution==0.0.1
"###);
// Re-install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
Ok(())
}
/// Lock a Git requirement using `tool.uv.sources`.
#[test]
#[cfg(feature = "git")]
fn lock_sdist_git() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage"]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "0.0.1" }
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.1" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.1#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Re-run with `--offline`. We shouldn't need a network connection to validate an
// already-correct lockfile with immutable metadata.
uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
"###);
// Re-lock with a precise commit that maps to the same tag.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage"]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?rev=0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?rev=0dacfd662c64cb4ceb16e6cf65a157a8b715b979#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Re-lock with a different commit.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage"]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Updated uv-public-pypackage v0.1.0 (0dacfd66) -> v0.1.0 (b270df1a)
");
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?rev=b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?rev=b270df1a2fb5d012294e9aaf05e7e0bab1e6a389#b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }
"#
);
});
// Re-lock with a different tag (which matches the new commit).
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage"]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "0.0.2" }
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.2" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.2#b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }
"#
);
});
Ok(())
}
/// Lock a Git requirement using PEP 508.
#[test]
#[cfg(feature = "git")]
fn lock_sdist_git_subdirectory() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["example-pkg-a @ git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "example-pkg-a"
version = "1"
source = { git = "https://github.com/pypa/sample-namespace-packages.git?subdirectory=pkg_resources%2Fpkg_a&rev=df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45" }
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "example-pkg-a" },
]
[package.metadata]
requires-dist = [{ name = "example-pkg-a", git = "https://github.com/pypa/sample-namespace-packages.git?subdirectory=pkg_resources%2Fpkg_a&rev=df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45" }]
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ example-pkg-a==1 (from git+https://github.com/pypa/sample-namespace-packages.git@df7530eeb8fa0cb7dbb8ecb28363e8e36bfa2f45#subdirectory=pkg_resources/pkg_a)
"###);
// Re-install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
Ok(())
}
/// Lock a Git requirement using PEP 508.
#[test]
#[cfg(feature = "git")]
fn lock_sdist_git_pep508() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.1"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0.0.1" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0.0.1#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Re-lock with a precise commit that maps to the same tag.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0dacfd662c64cb4ceb16e6cf65a157a8b715b979"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0dacfd662c64cb4ceb16e6cf65a157a8b715b979#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Re-lock with a different commit.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@b270df1a2fb5d012294e9aaf05e7e0bab1e6a389"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Updated uv-public-pypackage v0.1.0 (0dacfd66) -> v0.1.0 (b270df1a)
");
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage.git?rev=b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage.git?rev=b270df1a2fb5d012294e9aaf05e7e0bab1e6a389#b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }
"#
);
});
// Re-lock with a different tag (which matches the new commit).
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage.git@0.0.2"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0.0.2" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage.git?rev=0.0.2#b270df1a2fb5d012294e9aaf05e7e0bab1e6a389" }
"#
);
});
Ok(())
}
/// Lock a Git requirement using `tool.uv.sources` with a short revision.
#[test]
#[cfg(feature = "git")]
fn lock_sdist_git_short_rev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["uv-public-pypackage"]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd6" }
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?rev=0dacfd6" }]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?rev=0dacfd6#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Re-run with `--offline`. We shouldn't need a network connection to validate an
// already-correct lockfile with immutable metadata.
uv_snapshot!(context.filters(), context.lock().arg("--locked").arg("--offline").arg("--no-cache"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
"###);
// Re-install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
"###);
Ok(())
}
/// Lock a requirement from a direct URL to a wheel.
#[test]
fn lock_wheel_url() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio @ https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "4.3.0"
source = { url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8" },
]
[package.metadata]
requires-dist = [
{ name = "anyio", extras = ["trio"], marker = "extra == 'test'" },
{ name = "coverage", extras = ["toml"], marker = "extra == 'test'", specifier = ">=7" },
{ name = "exceptiongroup", marker = "python_full_version < '3.11'", specifier = ">=1.0.2" },
{ name = "exceptiongroup", marker = "extra == 'test'", specifier = ">=1.2.0" },
{ name = "hypothesis", marker = "extra == 'test'", specifier = ">=4.0" },
{ name = "idna", specifier = ">=2.8" },
{ name = "packaging", marker = "extra == 'doc'" },
{ name = "psutil", marker = "extra == 'test'", specifier = ">=5.9" },
{ name = "pytest", marker = "extra == 'test'", specifier = ">=7.0" },
{ name = "pytest-mock", marker = "extra == 'test'", specifier = ">=3.6.1" },
{ name = "sniffio", specifier = ">=1.1" },
{ name = "sphinx", marker = "extra == 'doc'", specifier = ">=7" },
{ name = "sphinx-autodoc-typehints", marker = "extra == 'doc'", specifier = ">=1.2.0" },
{ name = "sphinx-rtd-theme", marker = "extra == 'doc'" },
{ name = "trio", marker = "extra == 'trio'", specifier = ">=0.23" },
{ name = "trustme", marker = "extra == 'test'" },
{ name = "typing-extensions", marker = "python_full_version < '3.11'", specifier = ">=4.1" },
{ name = "uvloop", marker = "platform_python_implementation == 'CPython' and sys_platform != 'win32' and extra == 'test'", specifier = ">=0.17" },
]
provides-extras = ["doc", "test", "trio"]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "anyio" },
]
[package.metadata]
requires-dist = [{ name = "anyio", url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl" }]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/workflow.rs | crates/uv/tests/it/workflow.rs | use crate::common::{TestContext, diff_snapshot, uv_snapshot};
use anyhow::Result;
use assert_fs::fixture::{FileWriteStr, PathChild};
use insta::assert_snapshot;
#[test]
fn packse_add_remove_one_package() {
let context = TestContext::new("3.12");
context.copy_ecosystem_project("packse");
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 49 packages in [TIME]
");
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(lock);
});
let diff = context.diff_lock(|context| {
let mut add_cmd = context.add();
add_cmd.arg("--no-sync").arg("tzdata");
add_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r#"
--- old
+++ new
@@ -307,20 +307,21 @@
name = "packse"
version = "0.0.0"
source = { editable = "." }
dependencies = [
{ name = "chevron-blue" },
{ name = "hatchling" },
{ name = "msgspec" },
{ name = "pyyaml" },
{ name = "setuptools" },
{ name = "twine" },
+ { name = "tzdata" },
]
[package.optional-dependencies]
index = [
{ name = "pypiserver" },
]
serve = [
{ name = "pypiserver" },
{ name = "watchfiles" },
]
@@ -335,20 +336,21 @@
[package.metadata]
requires-dist = [
{ name = "chevron-blue", specifier = ">=0.2.1" },
{ name = "hatchling", specifier = ">=1.20.0" },
{ name = "msgspec", specifier = ">=0.18.4" },
{ name = "packse", extras = ["index"], marker = "extra == 'serve'" },
{ name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" },
{ name = "pyyaml", specifier = ">=6.0.1" },
{ name = "setuptools", specifier = ">=69.1.1" },
{ name = "twine", specifier = ">=4.0.2" },
+ { name = "tzdata", specifier = ">=2024.1" },
{ name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" },
]
provides-extras = ["index", "serve"]
[package.metadata.requires-dev]
dev = [
{ name = "psutil", specifier = ">=5.9.7" },
{ name = "pytest", specifier = ">=7.4.3" },
{ name = "syrupy", specifier = ">=4.6.0" },
]
@@ -601,20 +603,29 @@
{ name = "rfc3986" },
{ name = "rich" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d3/cc/8025ad5102a5c754023092143b8b511e184ec087dfbfb357d7d88fb82bff/twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4", size = 222119, upload-time = "2024-02-11T19:59:40.377Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/d4/4db90c4a2b8c1006ea3e6291f36b50b66e45887cf17b3b958b5d646fb837/twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0", size = 37138, upload-time = "2024-02-11T19:59:38.163Z" },
]
[[package]]
+name = "tzdata"
+version = "2024.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559, upload-time = "2024-02-11T23:22:40.2Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370, upload-time = "2024-02-11T23:22:38.223Z" },
+]
+
+[[package]]
name = "urllib3"
version = "2.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7a/50/7fd50a27caa0652cd4caf224aa87741ea41d3265ad13f010886167cfcc79/urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19", size = 291020, upload-time = "2024-02-18T03:55:57.539Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d", size = 121067, upload-time = "2024-02-18T03:55:54.704Z" },
]
[[package]]
name = "watchfiles"
"#);
});
let diff = context.diff_lock(|context| {
let mut remove_cmd = context.remove();
remove_cmd.arg("--no-sync").arg("tzdata");
remove_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r#"
--- old
+++ new
@@ -307,21 +307,20 @@
name = "packse"
version = "0.0.0"
source = { editable = "." }
dependencies = [
{ name = "chevron-blue" },
{ name = "hatchling" },
{ name = "msgspec" },
{ name = "pyyaml" },
{ name = "setuptools" },
{ name = "twine" },
- { name = "tzdata" },
]
[package.optional-dependencies]
index = [
{ name = "pypiserver" },
]
serve = [
{ name = "pypiserver" },
{ name = "watchfiles" },
]
@@ -336,21 +335,20 @@
[package.metadata]
requires-dist = [
{ name = "chevron-blue", specifier = ">=0.2.1" },
{ name = "hatchling", specifier = ">=1.20.0" },
{ name = "msgspec", specifier = ">=0.18.4" },
{ name = "packse", extras = ["index"], marker = "extra == 'serve'" },
{ name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" },
{ name = "pyyaml", specifier = ">=6.0.1" },
{ name = "setuptools", specifier = ">=69.1.1" },
{ name = "twine", specifier = ">=4.0.2" },
- { name = "tzdata", specifier = ">=2024.1" },
{ name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" },
]
provides-extras = ["index", "serve"]
[package.metadata.requires-dev]
dev = [
{ name = "psutil", specifier = ">=5.9.7" },
{ name = "pytest", specifier = ">=7.4.3" },
{ name = "syrupy", specifier = ">=4.6.0" },
]
@@ -600,29 +598,20 @@
{ name = "readme-renderer" },
{ name = "requests" },
{ name = "requests-toolbelt" },
{ name = "rfc3986" },
{ name = "rich" },
{ name = "urllib3" },
]
sdist = { url = "https://files.pythonhosted.org/packages/d3/cc/8025ad5102a5c754023092143b8b511e184ec087dfbfb357d7d88fb82bff/twine-5.0.0.tar.gz", hash = "sha256:89b0cc7d370a4b66421cc6102f269aa910fe0f1861c124f573cf2ddedbc10cf4", size = 222119, upload-time = "2024-02-11T19:59:40.377Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9a/d4/4db90c4a2b8c1006ea3e6291f36b50b66e45887cf17b3b958b5d646fb837/twine-5.0.0-py3-none-any.whl", hash = "sha256:a262933de0b484c53408f9edae2e7821c1c45a3314ff2df9bdd343aa7ab8edc0", size = 37138, upload-time = "2024-02-11T19:59:38.163Z" },
-]
-
-[[package]]
-name = "tzdata"
-version = "2024.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559, upload-time = "2024-02-11T23:22:40.2Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370, upload-time = "2024-02-11T23:22:38.223Z" },
]
[[package]]
name = "urllib3"
version = "2.2.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/7a/50/7fd50a27caa0652cd4caf224aa87741ea41d3265ad13f010886167cfcc79/urllib3-2.2.1.tar.gz", hash = "sha256:d0570876c61ab9e520d776c38acbbb5b05a776d3f9ff98a5c8fd5162a444cf19", size = 291020, upload-time = "2024-02-18T03:55:57.539Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/a2/73/a68704750a7679d0b6d3ad7aa8d4da8e14e151ae82e6fee774e6e0d05ec8/urllib3-2.2.1-py3-none-any.whl", hash = "sha256:450b20ec296a467077128bff42b73080516e71b56ff59a60a02bef2232c4fa9d", size = 121067, upload-time = "2024-02-18T03:55:54.704Z" },
]
"#);
});
// Back to where we started.
let new_lock = context.read("uv.lock");
let diff = diff_snapshot(&lock, &new_lock);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r###""###);
});
}
#[test]
fn packse_add_remove_existing_package_noop() {
let context = TestContext::new("3.12");
context.copy_ecosystem_project("packse");
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 49 packages in [TIME]
");
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(lock);
});
let diff = context.diff_lock(|context| {
let mut add_cmd = context.add();
add_cmd.arg("--no-sync").arg("pyyaml");
add_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @"");
});
}
/// This test adds a new direct dependency that was already a
/// transitive dependency.
#[test]
fn packse_promote_transitive_to_direct_then_remove() {
let context = TestContext::new("3.12");
context.copy_ecosystem_project("packse");
uv_snapshot!(context.filters(), context.lock(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 49 packages in [TIME]
");
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(lock);
});
let diff = context.diff_lock(|context| {
let mut add_cmd = context.add();
add_cmd.arg("--no-sync").arg("sniffio");
add_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r###"
--- old
+++ new
@@ -306,20 +306,21 @@
[[package]]
name = "packse"
version = "0.0.0"
source = { editable = "." }
dependencies = [
{ name = "chevron-blue" },
{ name = "hatchling" },
{ name = "msgspec" },
{ name = "pyyaml" },
{ name = "setuptools" },
+ { name = "sniffio" },
{ name = "twine" },
]
[package.optional-dependencies]
index = [
{ name = "pypiserver" },
]
serve = [
{ name = "pypiserver" },
{ name = "watchfiles" },
@@ -334,20 +335,21 @@
[package.metadata]
requires-dist = [
{ name = "chevron-blue", specifier = ">=0.2.1" },
{ name = "hatchling", specifier = ">=1.20.0" },
{ name = "msgspec", specifier = ">=0.18.4" },
{ name = "packse", extras = ["index"], marker = "extra == 'serve'" },
{ name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" },
{ name = "pyyaml", specifier = ">=6.0.1" },
{ name = "setuptools", specifier = ">=69.1.1" },
+ { name = "sniffio", specifier = ">=1.3.1" },
{ name = "twine", specifier = ">=4.0.2" },
{ name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" },
]
provides-extras = ["index", "serve"]
[package.metadata.requires-dev]
dev = [
{ name = "psutil", specifier = ">=5.9.7" },
{ name = "pytest", specifier = ">=7.4.3" },
{ name = "syrupy", specifier = ">=4.6.0" },
"###);
});
let diff = context.diff_lock(|context| {
let mut remove_cmd = context.remove();
remove_cmd.arg("--no-sync").arg("sniffio");
remove_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r###"
--- old
+++ new
@@ -306,21 +306,20 @@
[[package]]
name = "packse"
version = "0.0.0"
source = { editable = "." }
dependencies = [
{ name = "chevron-blue" },
{ name = "hatchling" },
{ name = "msgspec" },
{ name = "pyyaml" },
{ name = "setuptools" },
- { name = "sniffio" },
{ name = "twine" },
]
[package.optional-dependencies]
index = [
{ name = "pypiserver" },
]
serve = [
{ name = "pypiserver" },
{ name = "watchfiles" },
@@ -335,21 +334,20 @@
[package.metadata]
requires-dist = [
{ name = "chevron-blue", specifier = ">=0.2.1" },
{ name = "hatchling", specifier = ">=1.20.0" },
{ name = "msgspec", specifier = ">=0.18.4" },
{ name = "packse", extras = ["index"], marker = "extra == 'serve'" },
{ name = "pypiserver", marker = "extra == 'index'", specifier = ">=2.0.1" },
{ name = "pyyaml", specifier = ">=6.0.1" },
{ name = "setuptools", specifier = ">=69.1.1" },
- { name = "sniffio", specifier = ">=1.3.1" },
{ name = "twine", specifier = ">=4.0.2" },
{ name = "watchfiles", marker = "extra == 'serve'", specifier = ">=0.21.0" },
]
provides-extras = ["index", "serve"]
[package.metadata.requires-dev]
dev = [
{ name = "psutil", specifier = ">=5.9.7" },
{ name = "pytest", specifier = ">=7.4.3" },
{ name = "syrupy", specifier = ">=4.6.0" },
"###);
});
// Back to where we started.
let new_lock = context.read("uv.lock");
let diff = diff_snapshot(&lock, &new_lock);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r###""###);
});
}
#[test]
fn jax_instability() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "uv-lock-instability"
version = "0.1.0"
description = "whatever"
requires-python = ">=3.9.0"
dependencies = ["jax==0.4.17"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 8 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(lock);
});
let diff = context.diff_lock(|context| {
let mut add_cmd = context.add();
add_cmd.arg("--no-sync").arg("tzdata");
add_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r#"
--- old
+++ new
@@ -9,21 +9,21 @@
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "importlib-metadata"
version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "zipp" },
+ { name = "zipp", marker = "python_full_version < '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a0/fc/c4e6078d21fc4fa56300a241b87eae76766aa380a23fc450fc85bb7bf547/importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2", size = 52120, upload-time = "2024-03-20T19:51:32.429Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", size = 24409, upload-time = "2024-03-20T19:51:30.241Z" },
]
[[package]]
name = "jax"
version = "0.4.17"
source = { registry = "https://pypi.org/simple" }
@@ -150,28 +150,41 @@
{ url = "https://files.pythonhosted.org/packages/f3/31/91a2a3c5eb85d2bfa86d7c98f2df5d77dcdefb3d80ca9f9037ad04393acf/scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c", size = 45816713, upload-time = "2024-01-20T21:12:26.619Z" },
{ url = "https://files.pythonhosted.org/packages/ed/be/49a3f999dc91f1a653847f38c34763dcdeaa8a327f3665bdfe9bf5555109/scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35", size = 38929252, upload-time = "2024-01-20T21:12:33.197Z" },
{ url = "https://files.pythonhosted.org/packages/32/48/f605bad3e610efe05a51b56698578f7a98f900513a4bad2c9f12df845cd6/scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067", size = 31356374, upload-time = "2024-01-20T21:12:39.176Z" },
{ url = "https://files.pythonhosted.org/packages/5f/40/ac3cc2719c67c97a88d746e93fda89b9447b65a47e408fdd415c370bab2a/scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371", size = 34787482, upload-time = "2024-01-20T21:12:47.32Z" },
{ url = "https://files.pythonhosted.org/packages/a6/9d/f864266894b67cdb5731ab531afba68713da3d6d8252f698ccab775d3f68/scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490", size = 38473470, upload-time = "2024-01-20T21:12:53.545Z" },
{ url = "https://files.pythonhosted.org/packages/43/e7/a170210e15434befff4dad019aa301a5c350f573b925a68dd84a57d86b43/scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc", size = 38659602, upload-time = "2024-01-20T21:13:00.074Z" },
{ url = "https://files.pythonhosted.org/packages/92/f6/eb15f6086c82e62d98ae9f8644c518003e34c03b2ac25683ea932bb30047/scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e", size = 46211895, upload-time = "2024-01-20T21:13:09.431Z" },
]
[[package]]
+name = "tzdata"
+version = "2024.1"
+source = { registry = "https://pypi.org/simple" }
+sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559, upload-time = "2024-02-11T23:22:40.2Z" }
+wheels = [
+ { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370, upload-time = "2024-02-11T23:22:38.223Z" },
+]
+
+[[package]]
name = "uv-lock-instability"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "jax" },
+ { name = "tzdata" },
]
[package.metadata]
-requires-dist = [{ name = "jax", specifier = "==0.4.17" }]
+requires-dist = [
+ { name = "jax", specifier = "==0.4.17" },
+ { name = "tzdata", specifier = ">=2024.1" },
+]
[[package]]
name = "zipp"
version = "3.18.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3e/ef/65da662da6f9991e87f058bc90b91a935ae655a16ae5514660d6460d1298/zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715", size = 21220, upload-time = "2024-03-14T21:09:04.099Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/0a/ba9d0ee9536d3ef73a3448e931776e658b36f128d344e175bc32b092a8bf/zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b", size = 8247, upload-time = "2024-03-14T21:09:02.638Z" },
]
"#);
});
let diff = context.diff_lock(|context| {
let mut remove_cmd = context.remove();
remove_cmd.arg("--no-sync").arg("tzdata");
remove_cmd
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r#"
--- old
+++ new
@@ -150,41 +150,28 @@
{ url = "https://files.pythonhosted.org/packages/f3/31/91a2a3c5eb85d2bfa86d7c98f2df5d77dcdefb3d80ca9f9037ad04393acf/scipy-1.12.0-cp312-cp312-win_amd64.whl", hash = "sha256:e646d8571804a304e1da01040d21577685ce8e2db08ac58e543eaca063453e1c", size = 45816713, upload-time = "2024-01-20T21:12:26.619Z" },
{ url = "https://files.pythonhosted.org/packages/ed/be/49a3f999dc91f1a653847f38c34763dcdeaa8a327f3665bdfe9bf5555109/scipy-1.12.0-cp39-cp39-macosx_10_9_x86_64.whl", hash = "sha256:913d6e7956c3a671de3b05ccb66b11bc293f56bfdef040583a7221d9e22a2e35", size = 38929252, upload-time = "2024-01-20T21:12:33.197Z" },
{ url = "https://files.pythonhosted.org/packages/32/48/f605bad3e610efe05a51b56698578f7a98f900513a4bad2c9f12df845cd6/scipy-1.12.0-cp39-cp39-macosx_12_0_arm64.whl", hash = "sha256:bba1b0c7256ad75401c73e4b3cf09d1f176e9bd4248f0d3112170fb2ec4db067", size = 31356374, upload-time = "2024-01-20T21:12:39.176Z" },
{ url = "https://files.pythonhosted.org/packages/5f/40/ac3cc2719c67c97a88d746e93fda89b9447b65a47e408fdd415c370bab2a/scipy-1.12.0-cp39-cp39-manylinux_2_17_aarch64.manylinux2014_aarch64.whl", hash = "sha256:730badef9b827b368f351eacae2e82da414e13cf8bd5051b4bdfd720271a5371", size = 34787482, upload-time = "2024-01-20T21:12:47.32Z" },
{ url = "https://files.pythonhosted.org/packages/a6/9d/f864266894b67cdb5731ab531afba68713da3d6d8252f698ccab775d3f68/scipy-1.12.0-cp39-cp39-manylinux_2_17_x86_64.manylinux2014_x86_64.whl", hash = "sha256:6546dc2c11a9df6926afcbdd8a3edec28566e4e785b915e849348c6dd9f3f490", size = 38473470, upload-time = "2024-01-20T21:12:53.545Z" },
{ url = "https://files.pythonhosted.org/packages/43/e7/a170210e15434befff4dad019aa301a5c350f573b925a68dd84a57d86b43/scipy-1.12.0-cp39-cp39-musllinux_1_1_x86_64.whl", hash = "sha256:196ebad3a4882081f62a5bf4aeb7326aa34b110e533aab23e4374fcccb0890dc", size = 38659602, upload-time = "2024-01-20T21:13:00.074Z" },
{ url = "https://files.pythonhosted.org/packages/92/f6/eb15f6086c82e62d98ae9f8644c518003e34c03b2ac25683ea932bb30047/scipy-1.12.0-cp39-cp39-win_amd64.whl", hash = "sha256:b360f1b6b2f742781299514e99ff560d1fe9bd1bff2712894b52abe528d1fd1e", size = 46211895, upload-time = "2024-01-20T21:13:09.431Z" },
]
[[package]]
-name = "tzdata"
-version = "2024.1"
-source = { registry = "https://pypi.org/simple" }
-sdist = { url = "https://files.pythonhosted.org/packages/74/5b/e025d02cb3b66b7b76093404392d4b44343c69101cc85f4d180dd5784717/tzdata-2024.1.tar.gz", hash = "sha256:2674120f8d891909751c38abcdfd386ac0a5a1127954fbc332af6b5ceae07efd", size = 190559, upload-time = "2024-02-11T23:22:40.2Z" }
-wheels = [
- { url = "https://files.pythonhosted.org/packages/65/58/f9c9e6be752e9fcb8b6a0ee9fb87e6e7a1f6bcab2cdc73f02bb7ba91ada0/tzdata-2024.1-py2.py3-none-any.whl", hash = "sha256:9068bc196136463f5245e51efda838afa15aaeca9903f49050dfa2679db4d252", size = 345370, upload-time = "2024-02-11T23:22:38.223Z" },
-]
-
-[[package]]
name = "uv-lock-instability"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "jax" },
- { name = "tzdata" },
]
[package.metadata]
-requires-dist = [
- { name = "jax", specifier = "==0.4.17" },
- { name = "tzdata", specifier = ">=2024.1" },
-]
+requires-dist = [{ name = "jax", specifier = "==0.4.17" }]
[[package]]
name = "zipp"
version = "3.18.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/3e/ef/65da662da6f9991e87f058bc90b91a935ae655a16ae5514660d6460d1298/zipp-3.18.1.tar.gz", hash = "sha256:2884ed22e7d8961de1c9a05142eb69a247f120291bc0206a00a7642f09b5b715", size = 21220, upload-time = "2024-03-14T21:09:04.099Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/0a/ba9d0ee9536d3ef73a3448e931776e658b36f128d344e175bc32b092a8bf/zipp-3.18.1-py3-none-any.whl", hash = "sha256:206f5a15f2af3dbaee80769fb7dc6f249695e940acca08dfb2a4769fe61e538b", size = 8247, upload-time = "2024-03-14T21:09:02.638Z" },
]
"#);
});
// Back to where we started.
//
// Note that this is wrong! This demonstrates that `uv` sometimes does
// not produce a stable resolution.
//
// See: https://github.com/astral-sh/uv/issues/6063
// See: https://github.com/astral-sh/uv/issues/6158
let new_lock = context.read("uv.lock");
let diff = diff_snapshot(&lock, &new_lock);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(diff, @r#"
--- old
+++ new
@@ -9,21 +9,21 @@
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "importlib-metadata"
version = "7.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
- { name = "zipp" },
+ { name = "zipp", marker = "python_full_version < '3.10'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/a0/fc/c4e6078d21fc4fa56300a241b87eae76766aa380a23fc450fc85bb7bf547/importlib_metadata-7.1.0.tar.gz", hash = "sha256:b78938b926ee8d5f020fc4772d487045805a55ddbad2ecf21c6d60938dc7fcd2", size = 52120, upload-time = "2024-03-20T19:51:32.429Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/2d/0a/679461c511447ffaf176567d5c496d1de27cbe34a87df6677d7171b2fbd4/importlib_metadata-7.1.0-py3-none-any.whl", hash = "sha256:30962b96c0c223483ed6cc7280e7f0199feb01a0e40cfae4d4450fc6fab1f570", size = 24409, upload-time = "2024-03-20T19:51:30.241Z" },
]
[[package]]
name = "jax"
version = "0.4.17"
source = { registry = "https://pypi.org/simple" }
"#);
});
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/lock_conflict.rs | crates/uv/tests/it/lock_conflict.rs | use anyhow::Result;
use assert_fs::prelude::*;
use insta::assert_snapshot;
use crate::common::{TestContext, uv_snapshot};
// All of the tests in this file should use `tool.uv.conflicts` in some way.
//
// They are split from `lock.rs` somewhat arbitrarily. Mostly because there are
// a lot of them, and `lock.rs` was growing large enough as it is.
/// This tests a "basic" case for specifying conflicting extras.
///
/// Namely, we check that 1) without declaring them conflicting,
/// resolution fails, 2) declaring them conflicting, resolution
/// succeeds, 3) install succeeds, 4) install fails when requesting two
/// or more extras that are declared to conflict with each other.
///
/// This test was inspired by:
/// <https://github.com/astral-sh/uv/issues/8024>
#[test]
fn extra_basic() -> Result<()> {
let context = TestContext::new("3.12");
// First we test that resolving with two extras that have
// conflicting dependencies fails.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because project[extra2] depends on sortedcontainers==2.4.0 and project[extra1] depends on sortedcontainers==2.3.0, we can conclude that project[extra1] and project[extra2] are incompatible.
And because your project requires project[extra1] and project[extra2], we can conclude that your project's requirements are unsatisfiable.
"###);
// And now with the same extra configuration, we tell uv about
// the conflicting extras, which forces it to resolve each in
// their own fork.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
conflicts = [[
{ package = "project", extra = "extra1" },
{ package = "project", extra = "extra2" },
]]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
[package.optional-dependencies]
extra1 = [
{ name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } },
]
extra2 = [
{ name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } },
]
[package.metadata]
requires-dist = [
{ name = "sortedcontainers", marker = "extra == 'extra1'", specifier = "==2.3.0" },
{ name = "sortedcontainers", marker = "extra == 'extra2'", specifier = "==2.4.0" },
]
provides-extras = ["extra1", "extra2"]
[[package]]
name = "sortedcontainers"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509, upload-time = "2020-11-09T00:03:52.258Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479, upload-time = "2020-11-09T00:03:50.723Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
]
"#
);
});
// Re-run with `--locked`.
uv_snapshot!(context.filters(), context.lock().arg("--locked"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
// Another install, but with one of the extras enabled.
uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=extra1"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ sortedcontainers==2.3.0
"###);
// Another install, but with the other extra enabled.
uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--extra=extra2"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- sortedcontainers==2.3.0
+ sortedcontainers==2.4.0
"###);
// And finally, installing both extras should error.
uv_snapshot!(context.filters(), context.sync().arg("--frozen").arg("--all-extras"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Extras `extra1` and `extra2` are incompatible with the declared conflicts: {`project[extra1]`, `project[extra2]`}
"###);
// As should exporting them.
uv_snapshot!(context.filters(), context.export().arg("--frozen").arg("--all-extras"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Extras `extra1` and `extra2` are incompatible with the declared conflicts: {`project[extra1]`, `project[extra2]`}
"###);
Ok(())
}
/// Like `lock_conflicting_extra_basic`, but defines three conflicting
/// extras instead of two.
#[test]
fn extra_basic_three_extras() -> Result<()> {
let context = TestContext::new("3.12");
// First we test that resolving with two extras that have
// conflicting dependencies fails.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.2.0"]
extra2 = ["sortedcontainers==2.3.0"]
project3 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because project[extra2] depends on sortedcontainers==2.3.0 and project[extra1] depends on sortedcontainers==2.2.0, we can conclude that project[extra1] and project[extra2] are incompatible.
And because your project requires project[extra1] and project[extra2], we can conclude that your project's requirements are unsatisfiable.
"###);
// And now with the same extra configuration, we tell uv about
// the conflicting extras, which forces it to resolve each in
// their own fork.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
{ extra = "project3" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.2.0"]
extra2 = ["sortedcontainers==2.3.0"]
project3 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
conflicts = [[
{ package = "project", extra = "extra1" },
{ package = "project", extra = "extra2" },
{ package = "project", extra = "project3" },
]]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
[package.optional-dependencies]
extra1 = [
{ name = "sortedcontainers", version = "2.2.0", source = { registry = "https://pypi.org/simple" } },
]
extra2 = [
{ name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } },
]
project3 = [
{ name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } },
]
[package.metadata]
requires-dist = [
{ name = "sortedcontainers", marker = "extra == 'extra1'", specifier = "==2.2.0" },
{ name = "sortedcontainers", marker = "extra == 'extra2'", specifier = "==2.3.0" },
{ name = "sortedcontainers", marker = "extra == 'project3'", specifier = "==2.4.0" },
]
provides-extras = ["extra1", "extra2", "project3"]
[[package]]
name = "sortedcontainers"
version = "2.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/83/c9/466c0f9b42a0563366bb7c39906d9c6673315f81516f55e3a23a99f52234/sortedcontainers-2.2.0.tar.gz", hash = "sha256:331f5b7acb6bdfaf0b0646f5f86c087e414c9ae9d85e2076ad2eacb17ec2f4ff", size = 30402, upload-time = "2020-06-07T04:54:25.487Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/0c/75/4f79725a6ad966f1985d96c5aeda0b27d00c23afa14e8566efcdee1380ad/sortedcontainers-2.2.0-py2.py3-none-any.whl", hash = "sha256:f0694fbe8d090fab0fbabbfecad04756fbbb35dc3c0f89e0f6965396fe815d25", size = 29386, upload-time = "2020-06-07T04:54:23.921Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509, upload-time = "2020-11-09T00:03:52.258Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479, upload-time = "2020-11-09T00:03:50.723Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
]
"#
);
});
Ok(())
}
/// This tests that extras don't conflict with one another when they are in
/// distinct groups of extras.
#[test]
fn extra_multiple_not_conflicting1() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
[
{ extra = "project3" },
{ extra = "project4" },
],
]
[project.optional-dependencies]
extra1 = []
extra2 = []
project3 = []
project4 = []
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
"###);
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
// extra1/extra2 conflict!
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=extra1").arg("--extra=extra2"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Extras `extra1` and `extra2` are incompatible with the declared conflicts: {`project[extra1]`, `project[extra2]`}
"###);
// project3/project4 conflict!
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=project3").arg("--extra=project4"),
@r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Extras `project3` and `project4` are incompatible with the declared conflicts: {`project[project3]`, `project[project4]`}
"###);
// ... but extra1/project3 does not.
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=extra1").arg("--extra=project3"),
@r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
// ... and neither does extra2/project3.
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=extra2").arg("--extra=project3"),
@r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
// And similarly, with project 4.
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=extra1").arg("--extra=project4"),
@r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
// ... and neither does extra2/project3.
uv_snapshot!(
context.filters(),
context.sync().arg("--frozen").arg("--extra=extra2").arg("--extra=project4"),
@r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
"###);
Ok(())
}
/// This tests that if the user has conflicting extras, but puts them in two
/// distinct groups of extras, then resolution still fails. (Because the only
/// way to resolve them in different forks is to define the extras as directly
/// conflicting.)
#[test]
fn extra_multiple_not_conflicting2() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["sortedcontainers==2.3.0"]
project4 = ["sortedcontainers==2.4.0"]
"#,
)?;
// Fails, as expected.
uv_snapshot!(context.filters(), context.lock(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because project[extra2] depends on sortedcontainers==2.4.0 and project[extra1] depends on sortedcontainers==2.3.0, we can conclude that project[extra1] and project[extra2] are incompatible.
And because your project requires project[extra1] and project[extra2], we can conclude that your project's requirements are unsatisfiable.
"###);
// If we define extra1/extra2 as conflicting and project3/project4
// as conflicting, that still isn't enough! That's because extra1
// conflicts with project4 and extra2 conflicts with project3.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
[
{ extra = "project3" },
{ extra = "project4" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["sortedcontainers==2.3.0"]
project4 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies for split (included: project[extra2], project[project3]; excluded: project[extra1], project[project4]):
╰─▶ Because project[project3] depends on sortedcontainers==2.3.0 and project[extra2] depends on sortedcontainers==2.4.0, we can conclude that project[extra2] and project[project3] are incompatible.
And because your project requires project[extra2] and project[project3], we can conclude that your project's requirements are unsatisfiable.
");
// One could try to declare all pairs of conflicting extras as
// conflicting, but this doesn't quite work either. For example,
// the first group of conflicting extra, extra1/extra2,
// specifically allows project4 to be co-mingled with extra1 (and
// similarly, project3 with extra2), which are conflicting.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
[
{ extra = "project3" },
{ extra = "project4" },
],
[
{ extra = "extra1" },
{ extra = "project4" },
],
[
{ extra = "extra2" },
{ extra = "project3" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["sortedcontainers==2.3.0"]
project4 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// We can also fix this by just putting them all in one big
// group, even though extra1/project3 don't conflict and
// extra2/project4 don't conflict.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
{ extra = "project3" },
{ extra = "project4" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["sortedcontainers==2.3.0"]
project4 = ["sortedcontainers==2.4.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// This tests that we handle two independent sets of conflicting
/// extras correctly.
#[test]
fn extra_multiple_independent() -> Result<()> {
let context = TestContext::new("3.12");
// If we don't declare any conflicting extras, then resolution
// will of course fail.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["anyio==4.1.0"]
project4 = ["anyio==4.2.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because project[extra2] depends on sortedcontainers==2.4.0 and project[extra1] depends on sortedcontainers==2.3.0, we can conclude that project[extra1] and project[extra2] are incompatible.
And because your project requires project[extra1] and project[extra2], we can conclude that your project's requirements are unsatisfiable.
"###);
// OK, responding to the error, we declare our anyio extras
// as conflicting. But now we should see sortedcontainers as
// conflicting.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "project3" },
{ extra = "project4" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["anyio==4.1.0"]
project4 = ["anyio==4.2.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies for split (included: project[project4]; excluded: project[project3]):
╰─▶ Because project[extra2] depends on sortedcontainers==2.4.0 and project[extra1] depends on sortedcontainers==2.3.0, we can conclude that project[extra1] and project[extra2] are incompatible.
And because your project requires project[extra1] and project[extra2], we can conclude that your project's requirements are unsatisfiable.
");
// Once we declare ALL our conflicting extras, resolution succeeds.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
[
{ extra = "project3" },
{ extra = "project4" },
],
]
[project.optional-dependencies]
extra1 = ["sortedcontainers==2.3.0"]
extra2 = ["sortedcontainers==2.4.0"]
project3 = ["anyio==4.1.0"]
project4 = ["anyio==4.2.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
conflicts = [[
{ package = "project", extra = "extra1" },
{ package = "project", extra = "extra2" },
], [
{ package = "project", extra = "project3" },
{ package = "project", extra = "project4" },
]]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "4.1.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/6e/57/075e07fb01ae2b740289ec9daec670f60c06f62d04b23a68077fd5d73fab/anyio-4.1.0.tar.gz", hash = "sha256:5a0bec7085176715be77df87fc66d6c9d70626bd752fcc85f57cdbee5b3760da", size = 155773, upload-time = "2023-11-22T23:23:54.066Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/85/4f/d010eca6914703d8e6be222165d02c3e708ed909cdb2b7af3743667f302e/anyio-4.1.0-py3-none-any.whl", hash = "sha256:56a415fbc462291813a94528a779597226619c8e78af7de0507333f700011e5f", size = 83924, upload-time = "2023-11-22T23:23:52.595Z" },
]
[[package]]
name = "anyio"
version = "4.2.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f", size = 158770, upload-time = "2023-12-16T17:06:57.709Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee", size = 85481, upload-time = "2023-12-16T17:06:55.989Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
[package.optional-dependencies]
extra1 = [
{ name = "sortedcontainers", version = "2.3.0", source = { registry = "https://pypi.org/simple" } },
]
extra2 = [
{ name = "sortedcontainers", version = "2.4.0", source = { registry = "https://pypi.org/simple" } },
]
project3 = [
{ name = "anyio", version = "4.1.0", source = { registry = "https://pypi.org/simple" } },
]
project4 = [
{ name = "anyio", version = "4.2.0", source = { registry = "https://pypi.org/simple" } },
]
[package.metadata]
requires-dist = [
{ name = "anyio", marker = "extra == 'project3'", specifier = "==4.1.0" },
{ name = "anyio", marker = "extra == 'project4'", specifier = "==4.2.0" },
{ name = "sortedcontainers", marker = "extra == 'extra1'", specifier = "==2.3.0" },
{ name = "sortedcontainers", marker = "extra == 'extra2'", specifier = "==2.4.0" },
]
provides-extras = ["extra1", "extra2", "project3", "project4"]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.3.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/14/10/6a9481890bae97da9edd6e737c9c3dec6aea3fc2fa53b0934037b35c89ea/sortedcontainers-2.3.0.tar.gz", hash = "sha256:59cc937650cf60d677c16775597c89a960658a09cf7c1a668f86e1e4464b10a1", size = 30509, upload-time = "2020-11-09T00:03:52.258Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/20/4d/a7046ae1a1a4cc4e9bbed194c387086f06b25038be596543d026946330c9/sortedcontainers-2.3.0-py2.py3-none-any.whl", hash = "sha256:37257a32add0a3ee490bb170b599e93095eed89a55da91fa9f48753ea12fd73f", size = 29479, upload-time = "2020-11-09T00:03:50.723Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
]
"#
);
});
Ok(())
}
#[test]
fn extra_config_change_ignore_lockfile() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
[tool.uv]
conflicts = [
[
{ extra = "extra1" },
{ extra = "extra2" },
],
]
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tree.rs | crates/uv/tests/it/tree.rs | use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::{formatdoc, indoc};
use insta::assert_snapshot;
use url::Url;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn nested_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"scikit-learn==1.4.1.post1"
]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── scikit-learn v1.4.1.post1
├── joblib v1.3.2
├── numpy v1.26.4
├── scipy v1.12.0
│ └── numpy v1.26.4
└── threadpoolctl v3.4.0
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn nested_platform_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"jupyter-client"
]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--python-platform").arg("linux"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── jupyter-client v8.6.1
├── jupyter-core v5.7.2
│ ├── platformdirs v4.2.0
│ └── traitlets v5.14.2
├── python-dateutil v2.9.0.post0
│ └── six v1.16.0
├── pyzmq v25.1.2
├── tornado v6.4
└── traitlets v5.14.2
----- stderr -----
Resolved 12 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── jupyter-client v8.6.1
├── jupyter-core v5.7.2
│ ├── platformdirs v4.2.0
│ ├── pywin32 v306
│ └── traitlets v5.14.2
├── python-dateutil v2.9.0.post0
│ └── six v1.16.0
├── pyzmq v25.1.2
│ └── cffi v1.16.0
│ └── pycparser v2.21
├── tornado v6.4
└── traitlets v5.14.2
----- stderr -----
Resolved 12 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn invert() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"scikit-learn==1.4.1.post1"
]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
joblib v1.3.2
└── scikit-learn v1.4.1.post1
└── project v0.1.0
numpy v1.26.4
├── scikit-learn v1.4.1.post1 (*)
└── scipy v1.12.0
└── scikit-learn v1.4.1.post1 (*)
threadpoolctl v3.4.0
└── scikit-learn v1.4.1.post1 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--invert").arg("--no-dedupe"), @r###"
success: true
exit_code: 0
----- stdout -----
joblib v1.3.2
└── scikit-learn v1.4.1.post1
└── project v0.1.0
numpy v1.26.4
├── scikit-learn v1.4.1.post1
│ └── project v0.1.0
└── scipy v1.12.0
└── scikit-learn v1.4.1.post1
└── project v0.1.0
threadpoolctl v3.4.0
└── scikit-learn v1.4.1.post1
└── project v0.1.0
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn frozen() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── anyio v4.3.0
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
// Update the project dependencies.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
// Running with `--frozen` should show the stale tree.
uv_snapshot!(context.filters(), context.tree().arg("--frozen"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── anyio v4.3.0
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
"###
);
Ok(())
}
#[test]
fn outdated() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.0.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--outdated").arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── anyio v3.0.0 (latest: v4.3.0)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 4 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn platform_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"black"
]
"#,
)?;
// When `--universal` is _not_ provided, `colorama` should _not_ be included.
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.tree(), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── black v24.3.0
├── click v8.1.7
├── mypy-extensions v1.0.0
├── packaging v24.0
├── pathspec v0.12.1
└── platformdirs v4.2.0
----- stderr -----
Resolved 8 packages in [TIME]
"###);
// Unless `--python-platform` is set to `windows`, in which case it should be included.
uv_snapshot!(context.filters(), context.tree().arg("--python-platform").arg("windows"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── black v24.3.0
├── click v8.1.7
│ └── colorama v0.4.6
├── mypy-extensions v1.0.0
├── packaging v24.0
├── pathspec v0.12.1
└── platformdirs v4.2.0
----- stderr -----
Resolved 8 packages in [TIME]
"###);
// When `--universal` is _not_ provided, should include `colorama`, even though it's only
// included on Windows.
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── black v24.3.0
├── click v8.1.7
│ └── colorama v0.4.6
├── mypy-extensions v1.0.0
├── packaging v24.0
├── pathspec v0.12.1
└── platformdirs v4.2.0
----- stderr -----
Resolved 8 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn platform_dependencies_inverted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"click"
]
"#,
)?;
// When `--universal` is _not_ provided, `colorama` should _not_ be included.
uv_snapshot!(context.filters(), context.tree().arg("--invert").arg("--python-platform").arg("linux"), @r###"
success: true
exit_code: 0
----- stdout -----
click v8.1.7
└── project v0.1.0
----- stderr -----
Resolved 3 packages in [TIME]
"###);
// Unless `--python-platform` is set to `windows`, in which case it should be included.
uv_snapshot!(context.filters(), context.tree().arg("--invert").arg("--python-platform").arg("windows"), @r#"
success: true
exit_code: 0
----- stdout -----
colorama v0.4.6
└── click v8.1.7
└── project v0.1.0
----- stderr -----
Resolved 3 packages in [TIME]
"#);
Ok(())
}
#[test]
fn repeated_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"anyio < 2 ; sys_platform == 'win32'",
"anyio > 2 ; sys_platform == 'linux'",
]
"#,
)?;
// Should include both versions of `anyio`, which have different dependencies.
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── anyio v1.4.0
│ ├── async-generator v1.10
│ ├── idna v3.6
│ └── sniffio v1.3.1
└── anyio v4.3.0
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 6 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
/// In this case, a package is included twice at the same version, but pointing to different direct
/// URLs.
#[test]
fn repeated_version() -> Result<()> {
let context = TestContext::new("3.12");
let v1 = context.temp_dir.child("v1");
fs_err::create_dir_all(&v1)?;
let pyproject_toml = v1.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "dependency"
version = "0.0.1"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#,
)?;
let v2 = context.temp_dir.child("v2");
fs_err::create_dir_all(&v2)?;
let pyproject_toml = v2.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "dependency"
version = "0.0.1"
requires-python = ">=3.12"
dependencies = ["anyio==3.0.0"]
"#,
)?;
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(&formatdoc! {
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"dependency @ {} ; sys_platform == 'darwin'",
"dependency @ {} ; sys_platform != 'darwin'",
]
"#,
Url::from_file_path(context.temp_dir.join("v1")).unwrap(),
Url::from_file_path(context.temp_dir.join("v2")).unwrap(),
})?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── dependency v0.0.1
│ └── anyio v3.7.0
│ ├── idna v3.6
│ └── sniffio v1.3.1
└── dependency v0.0.1
└── anyio v3.0.0
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn dev_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[tool.uv]
dev-dependencies = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree(), @r"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── iniconfig v2.0.0
└── anyio v4.3.0 (group: dev)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 5 packages in [TIME]
"
);
uv_snapshot!(context.filters(), context.tree().arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── iniconfig v2.0.0
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 5 packages in [TIME]
"
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn dev_dependencies_inverted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[tool.uv]
dev-dependencies = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal").arg("--invert"), @r"
success: true
exit_code: 0
----- stdout -----
idna v3.6
└── anyio v4.3.0
└── project v0.1.0 (group: dev)
iniconfig v2.0.0
└── project v0.1.0
sniffio v1.3.1
└── anyio v4.3.0 (*)
(*) Package tree already displayed
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 5 packages in [TIME]
"
);
uv_snapshot!(context.filters(), context.tree().arg("--universal").arg("--invert").arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
iniconfig v2.0.0
└── project v0.1.0
----- stderr -----
warning: The `tool.uv.dev-dependencies` field (used in `pyproject.toml`) is deprecated and will be removed in a future release; use `dependency-groups.dev` instead
Resolved 5 packages in [TIME]
"
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn optional_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig", "flask[dotenv]"]
[project.optional-dependencies]
async = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── flask[dotenv] v3.0.2
│ ├── blinker v1.7.0
│ ├── click v8.1.7
│ │ └── colorama v0.4.6
│ ├── itsdangerous v2.1.2
│ ├── jinja2 v3.1.3
│ │ └── markupsafe v2.1.5
│ ├── werkzeug v3.0.1
│ │ └── markupsafe v2.1.5
│ └── python-dotenv v1.0.1 (extra: dotenv)
├── iniconfig v2.0.0
└── anyio v4.3.0 (extra: async)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 14 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn optional_dependencies_inverted() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig", "flask[dotenv]"]
[project.optional-dependencies]
async = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal").arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
blinker v1.7.0
└── flask v3.0.2
└── project[dotenv] v0.1.0
colorama v0.4.6
└── click v8.1.7
└── flask v3.0.2 (*)
idna v3.6
└── anyio v4.3.0
└── project v0.1.0 (extra: async)
iniconfig v2.0.0
└── project v0.1.0
itsdangerous v2.1.2
└── flask v3.0.2 (*)
markupsafe v2.1.5
├── jinja2 v3.1.3
│ └── flask v3.0.2 (*)
└── werkzeug v3.0.1
└── flask v3.0.2 (*)
python-dotenv v1.0.1
└── flask v3.0.2 (extra: dotenv) (*)
sniffio v1.3.1
└── anyio v4.3.0 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 14 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn package() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["scikit-learn==1.4.1.post1", "pandas"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree(), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── pandas v2.2.1
│ ├── numpy v1.26.4
│ ├── python-dateutil v2.9.0.post0
│ │ └── six v1.16.0
│ ├── pytz v2024.1
│ └── tzdata v2024.1
└── scikit-learn v1.4.1.post1
├── joblib v1.3.2
├── numpy v1.26.4
├── scipy v1.12.0
│ └── numpy v1.26.4
└── threadpoolctl v3.4.0
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--package").arg("scipy"), @r###"
success: true
exit_code: 0
----- stdout -----
scipy v1.12.0
└── numpy v1.26.4
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--package").arg("numpy").arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
numpy v1.26.4
├── pandas v2.2.1
│ └── project v0.1.0
├── scikit-learn v1.4.1.post1
│ └── project v0.1.0
└── scipy v1.12.0
└── scikit-learn v1.4.1.post1 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn group() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions"]
[dependency-groups]
foo = ["anyio"]
bar = ["iniconfig"]
dev = ["sniffio"]
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.tree(), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── typing-extensions v4.10.0
└── sniffio v1.3.1 (group: dev)
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tree().arg("--only-group").arg("bar"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── iniconfig v2.0.0 (group: bar)
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tree().arg("--group").arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── typing-extensions v4.10.0
├── sniffio v1.3.1 (group: dev)
└── anyio v4.3.0 (group: foo)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tree().arg("--group").arg("foo").arg("--group").arg("bar"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── typing-extensions v4.10.0
├── iniconfig v2.0.0 (group: bar)
├── sniffio v1.3.1 (group: dev)
└── anyio v4.3.0 (group: foo)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tree().arg("--all-groups"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── typing-extensions v4.10.0
├── iniconfig v2.0.0 (group: bar)
├── sniffio v1.3.1 (group: dev)
└── anyio v4.3.0 (group: foo)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tree().arg("--all-groups").arg("--no-group").arg("bar"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── typing-extensions v4.10.0
├── sniffio v1.3.1 (group: dev)
└── anyio v4.3.0 (group: foo)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
#[test]
fn cycle() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["testtools==2.3.0", "fixtures==3.0.0"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── fixtures v3.0.0
│ ├── pbr v6.0.0
│ ├── six v1.16.0
│ └── testtools v2.3.0
│ ├── extras v1.0.0
│ ├── fixtures v3.0.0 (*)
│ ├── pbr v6.0.0
│ ├── python-mimeparse v1.6.0
│ ├── six v1.16.0
│ ├── traceback2 v1.4.0
│ │ └── linecache2 v1.0.0
│ └── unittest2 v1.1.0
│ ├── argparse v1.4.0
│ ├── six v1.16.0
│ └── traceback2 v1.4.0 (*)
└── testtools v2.3.0 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--package").arg("traceback2").arg("--package").arg("six"), @r###"
success: true
exit_code: 0
----- stdout -----
six v1.16.0
traceback2 v1.4.0
└── linecache2 v1.0.0
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--package").arg("traceback2").arg("--package").arg("six").arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
six v1.16.0
├── fixtures v3.0.0
│ ├── project v0.1.0
│ └── testtools v2.3.0
│ ├── fixtures v3.0.0 (*)
│ └── project v0.1.0
├── testtools v2.3.0 (*)
└── unittest2 v1.1.0
└── testtools v2.3.0 (*)
traceback2 v1.4.0
├── testtools v2.3.0 (*)
└── unittest2 v1.1.0 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 11 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn workspace_dev() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio"]
[dependency-groups]
dev = ["child"]
[tool.uv.workspace]
members = ["child"]
[tool.uv.sources]
child = { workspace = true }
"#,
)?;
let child = context.temp_dir.child("child");
let pyproject_toml = child.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "child"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
├── anyio v4.3.0
│ ├── idna v3.6
│ └── sniffio v1.3.1
└── child v0.1.0 (group: dev)
└── iniconfig v2.0.0
child v0.1.0 (*)
(*) Package tree already displayed
----- stderr -----
Resolved 6 packages in [TIME]
"
);
// Under `--no-dev`, the member should still be included, since we show the entire workspace.
// But it shouldn't be considered a dependency of the root.
uv_snapshot!(context.filters(), context.tree().arg("--universal").arg("--no-dev"), @r"
success: true
exit_code: 0
----- stdout -----
project v0.1.0
└── anyio v4.3.0
├── idna v3.6
└── sniffio v1.3.1
child v0.1.0
└── iniconfig v2.0.0
----- stderr -----
Resolved 6 packages in [TIME]
"
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn non_project() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[tool.uv.workspace]
members = []
[dependency-groups]
async = ["anyio"]
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio v4.3.0 (group: async)
├── idna v3.6
└── sniffio v1.3.1
----- stderr -----
warning: No `requires-python` value found in the workspace. Defaulting to `>=3.12`.
Resolved 3 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn non_project_member() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[tool.uv.workspace]
members = ["child"]
[dependency-groups]
async = ["anyio"]
"#,
)?;
let child = context.temp_dir.child("child");
child.child("pyproject.toml").write_str(
r#"
[project]
name = "child"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig", "sniffio", "anyio"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
uv_snapshot!(context.filters(), context.tree().arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio v4.3.0 (group: async)
├── idna v3.6
└── sniffio v1.3.1
child v0.1.0
├── anyio v4.3.0 (*)
├── iniconfig v2.0.0
└── sniffio v1.3.1
(*) Package tree already displayed
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
uv_snapshot!(context.filters(), context.tree().arg("--universal").arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
idna v3.6
└── anyio v4.3.0
└── child v0.1.0
iniconfig v2.0.0
└── child v0.1.0
sniffio v1.3.1
├── anyio v4.3.0 (*)
└── child v0.1.0
(*) Package tree already displayed
----- stderr -----
Resolved 5 packages in [TIME]
"###
);
// `uv tree` should update the lockfile
let lock = context.read("uv.lock");
assert!(!lock.is_empty());
Ok(())
}
#[test]
fn script() -> Result<()> {
let context = TestContext::new("3.12");
let script = context.temp_dir.child("script.py");
script.write_str(indoc! {r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "requests<3",
# "rich",
# ]
# ///
import requests
from rich.pretty import pprint
resp = requests.get("https://peps.python.org/api/peps.json")
data = resp.json()
pprint([(k, v["title"]) for k, v in data.items()][:10])
"#})?;
uv_snapshot!(context.filters(), context.tree().arg("--script").arg(script.path()), @r###"
success: true
exit_code: 0
----- stdout -----
rich v13.7.1
├── markdown-it-py v3.0.0
│ └── mdurl v0.1.2
└── pygments v2.17.2
requests v2.31.0
├── certifi v2024.2.2
├── charset-normalizer v3.3.2
├── idna v3.6
└── urllib3 v2.2.1
----- stderr -----
Resolved 9 packages in [TIME]
"###);
// If the lockfile didn't exist already, it shouldn't be persisted to disk.
assert!(!context.temp_dir.child("uv.lock").exists());
// Explicitly lock the script.
uv_snapshot!(context.filters(), context.lock().arg("--script").arg(script.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 9 packages in [TIME]
"###);
let lock = context.read("script.py.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.11"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/self_update.rs | crates/uv/tests/it/self_update.rs | use std::process::Command;
use axoupdater::{
ReleaseSourceType,
test::helpers::{RuntestArgs, perform_runtest},
};
use uv_static::EnvVars;
use crate::common::{TestContext, get_bin, uv_snapshot};
#[test]
fn check_self_update() {
// To maximally emulate behaviour in practice, this test actually modifies CARGO_HOME
// and therefore should only be run in CI by default, where it can't hurt developers.
// We use the "CI" env-var that CI machines tend to run
if std::env::var(EnvVars::CI)
.map(|s| s.is_empty())
.unwrap_or(true)
{
return;
}
// Configure the runtest
let args = RuntestArgs {
app_name: "uv".to_owned(),
package: "uv".to_owned(),
owner: "astral-sh".to_owned(),
bin: get_bin(),
binaries: vec!["uv".to_owned()],
args: vec!["self".to_owned(), "update".to_owned()],
release_type: ReleaseSourceType::GitHub,
};
// install and update the application
let installed_bin = perform_runtest(&args);
// check that the binary works like normal
let status = Command::new(installed_bin)
.arg("--version")
.status()
.expect("failed to run 'uv --version'");
assert!(status.success(), "'uv --version' returned non-zero");
}
#[test]
fn test_self_update_offline_error() {
let context = TestContext::new("3.12");
uv_snapshot!(context.self_update().arg("--offline"),
@r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Self-update is not possible because network connectivity is disabled (i.e., with `--offline`)
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/build.rs | crates/uv/tests/it/build.rs | use crate::common::{DEFAULT_PYTHON_VERSION, TestContext, uv_snapshot};
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use fs_err::File;
use indoc::indoc;
use insta::assert_snapshot;
use predicates::prelude::predicate;
use std::env::current_dir;
use uv_static::EnvVars;
use zip::ZipArchive;
#[test]
fn build_basic() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Build the specified path.
uv_snapshot!(&filters, context.build().arg("project"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built project/dist/project-0.1.0.tar.gz
Successfully built project/dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
fs_err::remove_dir_all(project.child("dist"))?;
// Build the current working directory.
uv_snapshot!(&filters, context.build().current_dir(project.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
fs_err::remove_dir_all(project.child("dist"))?;
// Error if there's nothing to build.
uv_snapshot!(&filters, context.build(), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
× Failed to build `[TEMP_DIR]/`
╰─▶ [TEMP_DIR]/ does not appear to be a Python project, as neither `pyproject.toml` nor `setup.py` are present in the directory
"###);
// Build to a specified path.
uv_snapshot!(&filters, context.build().arg("--out-dir").arg("out").current_dir(project.path()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built out/project-0.1.0.tar.gz
Successfully built out/project-0.1.0-py3-none-any.whl
"###);
project
.child("out")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("out")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
Ok(())
}
#[test]
fn build_sdist() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Build the specified path.
uv_snapshot!(&filters, context.build().arg("--sdist").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Successfully built dist/project-0.1.0.tar.gz
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
Ok(())
}
#[test]
fn build_wheel() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Build the specified path.
uv_snapshot!(&filters, context.build().arg("--wheel").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building wheel...
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::missing());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
Ok(())
}
#[test]
fn build_sdist_wheel() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Build the specified path.
uv_snapshot!(&filters, context.build().arg("--sdist").arg("--wheel").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel...
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
Ok(())
}
#[test]
fn build_wheel_from_sdist() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Build the sdist.
uv_snapshot!(&filters, context.build().arg("--sdist").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Successfully built dist/project-0.1.0.tar.gz
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
// Error if `--wheel` is not specified.
uv_snapshot!(&filters, context.build().arg("./dist/project-0.1.0.tar.gz").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
× Failed to build `[TEMP_DIR]/project/dist/project-0.1.0.tar.gz`
╰─▶ Pass `--wheel` explicitly to build a wheel from a source distribution
"###);
// Error if `--sdist` is specified.
uv_snapshot!(&filters, context.build().arg("./dist/project-0.1.0.tar.gz").arg("--sdist").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
× Failed to build `[TEMP_DIR]/project/dist/project-0.1.0.tar.gz`
╰─▶ Building an `--sdist` from a source distribution is not supported
"###);
// Build the wheel from the sdist.
uv_snapshot!(&filters, context.build().arg("./dist/project-0.1.0.tar.gz").arg("--wheel").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building wheel from source distribution...
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
// Passing a wheel is an error.
uv_snapshot!(&filters, context.build().arg("./dist/project-0.1.0-py3-none-any.whl").arg("--wheel").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
× Failed to build `[TEMP_DIR]/project/dist/project-0.1.0-py3-none-any.whl`
╰─▶ `dist/project-0.1.0-py3-none-any.whl` is not a valid build source. Expected to receive a source directory, or a source distribution ending in one of: `.tar.gz`, `.zip`, `.tar.bz2`, `.tar.lz`, `.tar.lzma`, `.tar.xz`, `.tar.zst`, `.tar`, `.tbz`, `.tgz`, `.tlz`, or `.txz`.
"###);
Ok(())
}
#[test]
fn build_fail() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
project.child("setup.py").write_str(
r#"
from setuptools import setup
setup(
name="project",
version="0.1.0",
packages=["project"],
install_requires=["foo==3.7.0"],
)
"#,
)?;
// Build the specified path.
uv_snapshot!(&filters, context.build().arg("project"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
Traceback (most recent call last):
File "<string>", line 14, in <module>
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 328, in get_requires_for_build_sdist
return self._get_build_requires(config_settings, requirements=[])
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 295, in _get_build_requires
self.run_setup()
File "[CACHE_DIR]/builds-v0/[TMP]/build_meta.py", line 311, in run_setup
exec(code, locals())
File "<string>", line 2
from setuptools import setup
IndentationError: unexpected indent
× Failed to build `[TEMP_DIR]/project`
├─▶ The build backend returned an error
╰─▶ Call to `setuptools.build_meta.build_sdist` failed (exit status: 1)
hint: This usually indicates a problem with the package or the build environment.
"###);
Ok(())
}
#[test]
fn build_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([
(r"\\\.", ""),
(r"\[project\]", "[PKG]"),
(r"\[member\]", "[PKG]"),
])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[tool.uv.workspace]
members = ["packages/*"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
let member = project.child("packages").child("member");
fs_err::create_dir_all(member.path())?;
member.child("pyproject.toml").write_str(
r#"
[project]
name = "member"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
member
.child("src")
.child("member")
.child("__init__.py")
.touch()?;
member.child("README").touch()?;
let r#virtual = project.child("packages").child("virtual");
fs_err::create_dir_all(r#virtual.path())?;
r#virtual.child("pyproject.toml").write_str(
r#"
[project]
name = "virtual"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
r#virtual
.child("src")
.child("virtual")
.child("__init__.py")
.touch()?;
r#virtual.child("README").touch()?;
// Build the member.
uv_snapshot!(&filters, context.build().arg("--package").arg("member").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built dist/member-0.1.0.tar.gz
Successfully built dist/member-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("member-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("member-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
// Build all packages.
uv_snapshot!(&filters, context.build().arg("--all").arg("--no-build-logs").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
[PKG] Building source distribution...
[PKG] Building source distribution...
[PKG] Building wheel from source distribution...
[PKG] Building wheel from source distribution...
Successfully built dist/member-0.1.0.tar.gz
Successfully built dist/member-0.1.0-py3-none-any.whl
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
project
.child("dist")
.child("member-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("member-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
// If a source is provided, discover the workspace from the source.
uv_snapshot!(&filters, context.build().arg("./project").arg("--package").arg("member"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built project/dist/member-0.1.0.tar.gz
Successfully built project/dist/member-0.1.0-py3-none-any.whl
"###);
// If a source is provided, discover the workspace from the source.
uv_snapshot!(&filters, context.build().arg("./project").arg("--all").arg("--no-build-logs"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
[PKG] Building source distribution...
[PKG] Building source distribution...
[PKG] Building wheel from source distribution...
[PKG] Building wheel from source distribution...
Successfully built project/dist/member-0.1.0.tar.gz
Successfully built project/dist/member-0.1.0-py3-none-any.whl
Successfully built project/dist/project-0.1.0.tar.gz
Successfully built project/dist/project-0.1.0-py3-none-any.whl
"###);
// Fail when `--package` is provided without a workspace.
uv_snapshot!(&filters, context.build().arg("--package").arg("member"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--package` was provided, but no workspace was found
Caused by: No `pyproject.toml` found in current directory or any parent directory
"###);
// Fail when `--all` is provided without a workspace.
uv_snapshot!(&filters, context.build().arg("--all"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `--all-packages` was provided, but no workspace was found
Caused by: No `pyproject.toml` found in current directory or any parent directory
"###);
// Fail when `--package` is a non-existent member without a workspace.
uv_snapshot!(&filters, context.build().arg("--package").arg("fail").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Package `fail` not found in workspace
"###);
Ok(())
}
#[test]
fn build_all_with_failure() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([
(r"\\\.", ""),
(r"\[project\]", "[PKG]"),
(r"\[member-\w+\]", "[PKG]"),
])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[tool.uv.workspace]
members = ["packages/*"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
let member_a = project.child("packages").child("member_a");
fs_err::create_dir_all(member_a.path())?;
let member_b = project.child("packages").child("member_b");
fs_err::create_dir_all(member_b.path())?;
member_a.child("pyproject.toml").write_str(
r#"
[project]
name = "member_a"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
member_a
.child("src")
.child("member_a")
.child("__init__.py")
.touch()?;
member_a.child("README").touch()?;
member_b.child("pyproject.toml").write_str(
r#"
[project]
name = "member_b"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
member_b
.child("src")
.child("member_b")
.child("__init__.py")
.touch()?;
member_b.child("README").touch()?;
// member_b build should fail
member_b.child("setup.py").write_str(
r#"
from setuptools import setup
setup(
name="project",
version="0.1.0",
packages=["project"],
install_requires=["foo==3.7.0"],
)
"#,
)?;
// Build all the packages
uv_snapshot!(&filters, context.build().arg("--all").arg("--no-build-logs").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
[PKG] Building source distribution...
[PKG] Building source distribution...
[PKG] Building source distribution...
[PKG] Building wheel from source distribution...
[PKG] Building wheel from source distribution...
Successfully built dist/member_a-0.1.0.tar.gz
Successfully built dist/member_a-0.1.0-py3-none-any.whl
× Failed to build `member-b @ [TEMP_DIR]/project/packages/member_b`
├─▶ The build backend returned an error
╰─▶ Call to `setuptools.build_meta.build_sdist` failed (exit status: 1)
hint: This usually indicates a problem with the package or the build environment.
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
// project and member_a should be built, regardless of member_b build failure
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
project
.child("dist")
.child("member_a-0.1.0.tar.gz")
.assert(predicate::path::is_file());
project
.child("dist")
.child("member_a-0.1.0-py3-none-any.whl")
.assert(predicate::path::is_file());
Ok(())
}
#[test]
fn build_constraints() -> Result<()> {
let context = TestContext::new("3.12");
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let constraints = project.child("constraints.txt");
constraints.write_str("hatchling==0.1.0")?;
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling>=1.0"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
uv_snapshot!(&filters, context.build().arg("--build-constraint").arg("constraints.txt").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
× Failed to build `[TEMP_DIR]/project`
├─▶ Failed to resolve requirements from `build-system.requires`
├─▶ No solution found when resolving: `hatchling>=1.0`
╰─▶ Because you require hatchling>=1.0 and hatchling==0.1.0, we can conclude that your requirements are unsatisfiable.
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::missing());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
Ok(())
}
#[test]
fn build_sha() -> Result<()> {
let context = TestContext::new(DEFAULT_PYTHON_VERSION);
let filters = context
.filters()
.into_iter()
.chain([(r"\\\.", "")])
.collect::<Vec<_>>();
let project = context.temp_dir.child("project");
let pyproject_toml = project.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.8"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#,
)?;
project
.child("src")
.child("project")
.child("__init__.py")
.touch()?;
project.child("README").touch()?;
// Reject an incorrect hash.
let constraints = project.child("constraints.txt");
constraints.write_str(indoc::indoc! {r"
hatchling==1.22.4 \
--hash=sha256:a248cb506794bececcddeddb1678bc722f9cfcacf02f98f7c0af6b9ed893caf2 \
--hash=sha256:e16da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
packaging==24.0 \
--hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \
--hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9
# via hatchling
pathspec==0.12.1 \
--hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \
--hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712
# via hatchling
pluggy==1.4.0 \
--hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \
--hash=sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be
# via hatchling
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
# via hatchling
trove-classifiers==2024.3.3 \
--hash=sha256:3a84096861b385ec422c79995d1f6435dde47a9b63adaa3c886e53232ba7e6e0 \
--hash=sha256:df7edff9c67ff86b733628998330b180e81d125b1e096536d83ac0fd79673fdc
# via hatchling
"})?;
uv_snapshot!(&filters, context.build().arg("--build-constraint").arg("constraints.txt").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
× Failed to build `[TEMP_DIR]/project`
├─▶ Failed to install requirements from `build-system.requires`
├─▶ Failed to download `hatchling==1.22.4`
╰─▶ Hash mismatch for `hatchling==1.22.4`
Expected:
sha256:a248cb506794bececcddeddb1678bc722f9cfcacf02f98f7c0af6b9ed893caf2
sha256:e16da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
Computed:
sha256:f56da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::missing());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
fs_err::remove_dir_all(project.child("dist"))?;
// Reject a missing hash with `--requires-hashes`.
uv_snapshot!(&filters, context.build().arg("--build-constraint").arg("constraints.txt").arg("--require-hashes").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
× Failed to build `[TEMP_DIR]/project`
├─▶ Failed to install requirements from `build-system.requires`
├─▶ Failed to download `hatchling==1.22.4`
╰─▶ Hash mismatch for `hatchling==1.22.4`
Expected:
sha256:a248cb506794bececcddeddb1678bc722f9cfcacf02f98f7c0af6b9ed893caf2
sha256:e16da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
Computed:
sha256:f56da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::missing());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
fs_err::remove_dir_all(project.child("dist"))?;
// Reject a missing hash.
let constraints = project.child("constraints.txt");
constraints.write_str("hatchling==1.22.4")?;
uv_snapshot!(&filters, context.build().arg("--build-constraint").arg("constraints.txt").arg("--require-hashes").current_dir(&project), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Building source distribution...
× Failed to build `[TEMP_DIR]/project`
├─▶ Failed to resolve requirements from `build-system.requires`
├─▶ No solution found when resolving: `hatchling`
╰─▶ In `--require-hashes` mode, all requirements must be pinned upfront with `==`, but found: `hatchling`
"###);
project
.child("dist")
.child("project-0.1.0.tar.gz")
.assert(predicate::path::missing());
project
.child("dist")
.child("project-0.1.0-py3-none-any.whl")
.assert(predicate::path::missing());
fs_err::remove_dir_all(project.child("dist"))?;
// Accept a correct hash.
let constraints = project.child("constraints.txt");
constraints.write_str(indoc::indoc! {r"
hatchling==1.22.4 \
--hash=sha256:8a2dcec96d7fb848382ef5848e5ac43fdae641f35a08a3fab5116bd495f3416e \
--hash=sha256:f56da5bfc396af7b29daa3164851dd04991c994083f56cb054b5003675caecdc
packaging==24.0 \
--hash=sha256:2ddfb553fdf02fb784c234c7ba6ccc288296ceabec964ad2eae3777778130bc5 \
--hash=sha256:eb82c5e3e56209074766e6885bb04b8c38a0c015d0a30036ebe7ece34c9989e9
# via hatchling
pathspec==0.12.1 \
--hash=sha256:a0d503e138a4c123b27490a4f7beda6a01c6f288df0e4a8b79c7eb0dc7b4cc08 \
--hash=sha256:a482d51503a1ab33b1c67a6c3813a26953dbdc71c31dacaef9a838c4e29f5712
# via hatchling
pluggy==1.4.0 \
--hash=sha256:7db9f7b503d67d1c5b95f59773ebb58a8c1c288129a88665838012cfb07b8981 \
--hash=sha256:8c85c2876142a764e5b7548e7d9a0e0ddb46f5185161049a79b7e974454223be
# via hatchling
tomli==2.0.1 \
--hash=sha256:939de3e7a6161af0c887ef91b7d41a53e7c5a1ca976325f429cb46ea9bc30ecc \
--hash=sha256:de526c12914f0c550d15924c62d72abc48d6fe7364aa87328337a31007fe8a4f
# via hatchling
trove-classifiers==2024.3.3 \
--hash=sha256:3a84096861b385ec422c79995d1f6435dde47a9b63adaa3c886e53232ba7e6e0 \
--hash=sha256:df7edff9c67ff86b733628998330b180e81d125b1e096536d83ac0fd79673fdc
# via hatchling
"})?;
uv_snapshot!(&filters, context.build().arg("--build-constraint").arg("constraints.txt").current_dir(&project), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Building source distribution...
Building wheel from source distribution...
Successfully built dist/project-0.1.0.tar.gz
Successfully built dist/project-0.1.0-py3-none-any.whl
"###);
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_module.rs | crates/uv/tests/it/python_module.rs | use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::{FileTouch, FileWriteStr, PathChild, PathCreateDir};
use indoc::{formatdoc, indoc};
use uv_fs::Simplified;
use uv_static::EnvVars;
use crate::common::{TestContext, site_packages_path, uv_snapshot};
/// Filter the user scheme, which differs between Windows and Unix.
fn user_scheme_bin_filter() -> (String, String) {
if cfg!(windows) {
(
r"\[USER_CONFIG_DIR\][\\/]Python[\\/]Python\d+".to_string(),
"[USER_SCHEME]".to_string(),
)
} else {
(r"\[HOME\]/\.local".to_string(), "[USER_SCHEME]".to_string())
}
}
// Override sys.base_prefix with a path that's guaranteed not to contain
// uv, as otherwise the tests may pick up an already installed uv
// when testing against the system Python install. See #15368.
const TEST_SCRIPT: &str = "
import sys
import uv
sys.base_prefix = '/dev/null'
print(uv.find_uv_bin())
";
#[test]
fn find_uv_bin_venv() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_target() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a target directory
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv"))
.arg("--target")
.arg("target"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the target directory
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT)
.env(EnvVars::PYTHONPATH, context.temp_dir.child("target").path()), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/target/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_prefix() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a prefix directory
let prefix = context.temp_dir.child("prefix");
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv"))
.arg("--prefix")
.arg(prefix.path()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: .venv/[BIN]/[PYTHON]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the prefix directory
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT)
.env(
EnvVars::PYTHONPATH,
site_packages_path(&context.temp_dir.join("prefix"), "python3.12"),
), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/prefix/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_base_prefix() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Test base prefix fallback by mutating sys.base_prefix
// First, create a "base" environment with fake-uv installed
let base_venv = context.temp_dir.child("base-venv");
context.venv().arg(base_venv.path()).assert().success();
// Install fake-uv in the "base" venv
uv_snapshot!(context.filters(), context.pip_install()
.arg("--python")
.arg(base_venv.path())
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using Python 3.12.[X] environment at: base-venv
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
context.venv().assert().success();
// Mutate `base_prefix` to simulate lookup in a system Python installation
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(format!(r#"import sys, uv; sys.base_prefix = "{}"; print(uv.find_uv_bin())"#, base_venv.path().portable_display()))
.env(EnvVars::PYTHONPATH, site_packages_path(base_venv.path(), "python3.12")), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/base-venv/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_in_ephemeral_environment() -> anyhow::Result<()> {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Create a minimal pyproject.toml
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "test-project"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = []
"#
})?;
// We should find the binary in an ephemeral `--with` environment
uv_snapshot!(context.filters(), context.run()
.arg("--with")
.arg(context.workspace_root.join("test/packages/fake-uv"))
.arg("python")
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[CACHE_DIR]/archive-v0/[HASH]/[BIN]/uv
----- stderr -----
Resolved 1 package in [TIME]
Audited in [TIME]
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
Ok(())
}
#[test]
fn find_uv_bin_in_parent_of_ephemeral_environment() -> anyhow::Result<()> {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Add the fake-uv package as a dependency
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(&formatdoc! { r#"
[project]
name = "test-project"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["uv"]
[tool.uv.sources]
uv = {{ path = "{}" }}
"#,
context.workspace_root.join("test/packages/fake-uv").portable_display()
})?;
// When running in an ephemeral environment, we should find the binary in the project
// environment
uv_snapshot!(context.filters(), context.run()
.arg("--with")
.arg("anyio")
.arg("python")
.arg("-c")
.arg(TEST_SCRIPT),
@r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"
);
Ok(())
}
#[test]
fn find_uv_bin_user_bin() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Add uv to `~/.local/bin`
let bin = if cfg!(unix) {
context.home_dir.child(".local").child("bin")
} else {
context
.user_config_dir
.child("Python")
.child("Python312")
.child("Scripts")
};
bin.create_dir_all().unwrap();
bin.child(format!("uv{}", std::env::consts::EXE_SUFFIX))
.touch()
.unwrap();
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment first
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
// Remove the virtual environment one for some reason
fs_err::remove_file(if cfg!(unix) {
context.venv.child("bin").child("uv")
} else {
context.venv.child("Scripts").child("uv.exe")
})
.unwrap();
// We should find the binary in the bin now
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[USER_SCHEME]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_error_message() {
let mut context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Add filters for Python bin directories using with_filtered_path
// This inserts at the beginning, so these filters are applied first
let python_info: Vec<_> = context.python_versions.clone();
for (version, executable) in &python_info {
let bin_dir = if cfg!(windows) {
// On Windows, the Python executable is in the root, not the bin directory
executable
.canonicalize()
.unwrap()
.parent()
.unwrap()
.join("Scripts")
} else {
executable
.canonicalize()
.unwrap()
.parent()
.unwrap()
.to_path_buf()
};
context = context.with_filtered_path(&bin_dir, &format!("PYTHON-BIN-{version}"));
}
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// Remove the virtual environment executable for some reason
fs_err::remove_file(if cfg!(unix) {
context.venv.child("bin").child("uv")
} else {
context.venv.child("Scripts").child("uv.exe")
})
.unwrap();
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r#"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Traceback (most recent call last):
File "<string>", line 6, in <module>
File "[SITE_PACKAGES]/uv/_find_uv.py", line 50, in find_uv_bin
raise UvNotFound(
uv._find_uv.UvNotFound: Could not find the uv binary in any of the following locations:
- [VENV]/[BIN]
- /dev/null/[BIN]
- [SITE_PACKAGES]/[BIN]
- [USER_SCHEME]/[BIN]
"#
);
}
#[cfg(feature = "python-eol")]
#[test]
fn find_uv_bin_py38() {
let context = TestContext::new("3.8")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py39() {
let context = TestContext::new("3.9")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py310() {
let context = TestContext::new("3.10")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py311() {
let context = TestContext::new("3.11")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py312() {
let context = TestContext::new("3.12")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py313() {
let context = TestContext::new("3.13")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
#[test]
fn find_uv_bin_py314() {
let context = TestContext::new("3.14")
.with_filtered_python_names()
.with_filtered_virtualenv_bin()
.with_filtered_exe_suffix()
.with_filter(user_scheme_bin_filter())
// Target installs always use "bin" on all platforms. On Windows,
// `with_filtered_virtualenv_bin` only filters "Scripts", not "bin"
.with_filter((r"[\\/]bin".to_string(), "/[BIN]".to_string()));
// Install in a virtual environment
uv_snapshot!(context.filters(), context.pip_install()
.arg(context.workspace_root.join("test/packages/fake-uv")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv==0.1.0 (from file://[WORKSPACE]/test/packages/fake-uv)
"
);
// We should find the binary in the virtual environment
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg(TEST_SCRIPT), @r"
success: true
exit_code: 0
----- stdout -----
[VENV]/[BIN]/uv
----- stderr -----
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_freeze.rs | crates/uv/tests/it/pip_freeze.rs | use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::fixture::ChildPath;
use assert_fs::prelude::*;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn freeze_many() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
// Run `pip sync`.
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Run `pip freeze`.
uv_snapshot!(context.pip_freeze()
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
tomli==2.0.1
----- stderr -----
"###
);
Ok(())
}
/// List a package with multiple installed distributions in a virtual environment.
#[test]
#[cfg(unix)]
fn freeze_duplicate() -> Result<()> {
use uv_fs::copy_dir_all;
// Sync a version of `pip` into a virtual environment.
let context1 = TestContext::new("3.12");
let requirements_txt = context1.temp_dir.child("requirements.txt");
requirements_txt.write_str("pip==21.3.1")?;
// Run `pip sync`.
context1
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Sync a different version of `pip` into a virtual environment.
let context2 = TestContext::new("3.12");
let requirements_txt = context2.temp_dir.child("requirements.txt");
requirements_txt.write_str("pip==22.1.1")?;
// Run `pip sync`.
context2
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Copy the virtual environment to a new location.
copy_dir_all(
context2.site_packages().join("pip-22.1.1.dist-info"),
context1.site_packages().join("pip-22.1.1.dist-info"),
)?;
// Run `pip freeze`.
uv_snapshot!(context1.filters(), context1.pip_freeze().arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
pip==21.3.1
pip==22.1.1
----- stderr -----
warning: The package `pip` has multiple installed distributions:
- [SITE_PACKAGES]/pip-21.3.1.dist-info
- [SITE_PACKAGES]/pip-22.1.1.dist-info
"###
);
Ok(())
}
/// List a direct URL package in a virtual environment.
#[test]
fn freeze_url() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("anyio\niniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl")?;
// Run `pip sync`.
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Run `pip freeze`.
uv_snapshot!(context.pip_freeze()
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
----- stderr -----
warning: The package `anyio` requires `idna>=2.8`, but it's not installed
warning: The package `anyio` requires `sniffio>=1.1`, but it's not installed
"###
);
Ok(())
}
#[test]
fn freeze_with_editable() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(&format!(
"anyio\n-e {}",
context
.workspace_root
.join("test/packages/poetry_editable")
.display()
))?;
// Run `pip sync`.
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
-e file://[WORKSPACE]/test/packages/poetry_editable
----- stderr -----
warning: The package `anyio` requires `idna>=2.8`, but it's not installed
warning: The package `anyio` requires `sniffio>=1.1`, but it's not installed
"###
);
// Exclude editable package.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--exclude-editable")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
anyio==4.3.0
----- stderr -----
warning: The package `anyio` requires `idna>=2.8`, but it's not installed
warning: The package `anyio` requires `sniffio>=1.1`, but it's not installed
"###
);
Ok(())
}
/// Show an `.egg-info` package in a virtual environment.
#[test]
fn freeze_with_egg_info() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
// Manually create an `.egg-info` directory.
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.create_dir_all()?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("top_level.txt")
.write_str("zstd")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("SOURCES.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("PKG-INFO")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("dependency_links.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("entry_points.txt")
.write_str("")?;
// Manually create the package directory.
site_packages.child("zstd").create_dir_all()?;
site_packages
.child("zstd")
.child("__init__.py")
.write_str("")?;
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
zstandard==0.22.0
----- stderr -----
"###);
Ok(())
}
/// Show an `.egg-info` package in a virtual environment. In this case, the filename omits the
/// Python version.
#[test]
fn freeze_with_egg_info_no_py() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
// Manually create an `.egg-info` directory.
site_packages
.child("zstandard-0.22.0.egg-info")
.create_dir_all()?;
site_packages
.child("zstandard-0.22.0.egg-info")
.child("top_level.txt")
.write_str("zstd")?;
site_packages
.child("zstandard-0.22.0.egg-info")
.child("SOURCES.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0.egg-info")
.child("PKG-INFO")
.write_str("")?;
site_packages
.child("zstandard-0.22.0.egg-info")
.child("dependency_links.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0.egg-info")
.child("entry_points.txt")
.write_str("")?;
// Manually create the package directory.
site_packages.child("zstd").create_dir_all()?;
site_packages
.child("zstd")
.child("__init__.py")
.write_str("")?;
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
zstandard==0.22.0
----- stderr -----
"###);
Ok(())
}
/// Show a set of `.egg-info` files in a virtual environment.
#[test]
fn freeze_with_egg_info_file() -> Result<()> {
let context = TestContext::new("3.11");
let site_packages = ChildPath::new(context.site_packages());
// Manually create a `.egg-info` file with python version.
site_packages
.child("pycurl-7.45.1-py3.11.egg-info")
.write_str(indoc::indoc! {"
Metadata-Version: 1.1
Name: pycurl
Version: 7.45.1
"})?;
// Manually create another `.egg-info` file with no python version.
site_packages
.child("vtk-9.2.6.egg-info")
.write_str(indoc::indoc! {"
Metadata-Version: 1.1
Name: vtk
Version: 9.2.6
"})?;
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
pycurl==7.45.1
vtk==9.2.6
----- stderr -----
"###);
Ok(())
}
#[test]
fn freeze_with_legacy_editable() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
let target = context.temp_dir.child("zstandard_project");
target.child("zstd").create_dir_all()?;
target.child("zstd").child("__init__.py").write_str("")?;
target.child("zstandard.egg-info").create_dir_all()?;
target
.child("zstandard.egg-info")
.child("PKG-INFO")
.write_str(
"Metadata-Version: 2.1
Name: zstandard
Version: 0.22.0
",
)?;
site_packages
.child("zstandard.egg-link")
.write_str(target.path().to_str().unwrap())?;
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
-e [TEMP_DIR]/zstandard_project
----- stderr -----
"###);
Ok(())
}
#[test]
fn freeze_path() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let target = context.temp_dir.child("install-path");
// Run `pip sync`.
context
.pip_sync()
.arg(requirements_txt.path())
.arg("--target")
.arg(target.path())
.assert()
.success();
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--path")
.arg(target.path()), @r"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
tomli==2.0.1
----- stderr -----
");
Ok(())
}
#[test]
fn freeze_multiple_paths() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt1 = context.temp_dir.child("requirements1.txt");
requirements_txt1.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let requirements_txt2 = context.temp_dir.child("requirements2.txt");
requirements_txt2.write_str("MarkupSafe==2.1.3\nrequests==2.31.0")?;
let target1 = context.temp_dir.child("install-path1");
let target2 = context.temp_dir.child("install-path2");
// Run `pip sync`.
for (target, requirements_txt) in [
(target1.path(), requirements_txt1),
(target2.path(), requirements_txt2),
] {
context
.pip_sync()
.arg(requirements_txt.path())
.arg("--target")
.arg(target)
.assert()
.success();
}
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze().arg("--path").arg(target1.path()).arg("--path").arg(target2.path()), @r"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
requests==2.31.0
tomli==2.0.1
----- stderr -----
");
Ok(())
}
// We follow pip in just ignoring nonexistent paths
#[test]
fn freeze_nonexistent_path() {
let context = TestContext::new("3.12");
let nonexistent_dir = {
let dir = context.temp_dir.child("blahblah");
assert!(!dir.exists());
dir
};
// Run `pip freeze`.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--path")
.arg(nonexistent_dir.path()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
");
}
#[test]
fn freeze_with_quiet_flag() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
// Run `pip sync`.
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Run `pip freeze` with `--quiet` flag.
uv_snapshot!(context.pip_freeze().arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
tomli==2.0.1
----- stderr -----
"###
);
Ok(())
}
#[test]
fn freeze_target() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let target = context.temp_dir.child("target");
// Install packages to a target directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--target")
.arg(target.path())
.assert()
.success();
// Freeze packages in the target directory.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--target")
.arg(target.path()), @r###"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
tomli==2.0.1
----- stderr -----
"###
);
// Without --target, the packages should not be visible.
uv_snapshot!(context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
#[test]
fn freeze_prefix() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3\ntomli==2.0.1")?;
let prefix = context.temp_dir.child("prefix");
// Install packages to a prefix directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--prefix")
.arg(prefix.path())
.assert()
.success();
// Freeze packages in the prefix directory.
uv_snapshot!(context.filters(), context.pip_freeze()
.arg("--prefix")
.arg(prefix.path()), @r###"
success: true
exit_code: 0
----- stdout -----
markupsafe==2.1.3
tomli==2.0.1
----- stderr -----
"###
);
// Without --prefix, the packages should not be visible.
uv_snapshot!(context.pip_freeze(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_check.rs | crates/uv/tests/it/pip_check.rs | use anyhow::Result;
use assert_fs::fixture::FileWriteStr;
use assert_fs::fixture::PathChild;
use crate::common::TestContext;
use crate::common::uv_snapshot;
#[test]
fn check_compatible_packages() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
uv_snapshot!(context.pip_check(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Checked 5 packages in [TIME]
All installed packages are compatible
"###
);
Ok(())
}
// requests 2.31.0 requires idna (<4,>=2.5)
// this test force-installs idna 2.4 to trigger a failure.
#[test]
fn check_incompatible_packages() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
let requirements_txt_idna = context.temp_dir.child("requirements_idna.txt");
requirements_txt_idna.write_str("idna==2.4")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements_idna.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- idna==3.6
+ idna==2.4
warning: The package `requests` requires `idna>=2.5,<4`, but `2.4` is installed
"###
);
uv_snapshot!(context.pip_check(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Checked 5 packages in [TIME]
Found 1 incompatibility
The package `requests` requires `idna>=2.5,<4`, but `2.4` is installed
"###
);
Ok(())
}
// requests 2.31.0 requires idna (<4,>=2.5) and urllib3<3,>=1.21.1
// this test force-installs idna 2.4 and urllib3 1.20 to trigger a failure
// with multiple incompatible packages.
#[test]
fn check_multiple_incompatible_packages() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
let requirements_txt_two = context.temp_dir.child("requirements_two.txt");
requirements_txt_two.write_str("idna==2.4\nurllib3==1.20")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements_two.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Uninstalled 2 packages in [TIME]
Installed 2 packages in [TIME]
- idna==3.6
+ idna==2.4
- urllib3==2.2.1
+ urllib3==1.20
warning: The package `requests` requires `idna>=2.5,<4`, but `2.4` is installed
warning: The package `requests` requires `urllib3>=1.21.1,<3`, but `1.20` is installed
"###
);
uv_snapshot!(context.pip_check(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Checked 5 packages in [TIME]
Found 2 incompatibilities
The package `requests` requires `idna>=2.5,<4`, but `2.4` is installed
The package `requests` requires `urllib3>=1.21.1,<3`, but `1.20` is installed
"###
);
Ok(())
}
#[test]
fn check_python_version() {
let context = TestContext::new("3.12");
uv_snapshot!(context
.pip_install()
.arg("urllib3")
.arg("--strict"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ urllib3==2.2.1
"
);
uv_snapshot!(context.filters(), context.pip_check().arg("--python-version").arg("3.7"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Checked 1 package in [TIME]
Found 1 incompatibility
The package `urllib3` requires Python >=3.8, but `3.12.[X]` is installed
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_compile.rs | crates/uv/tests/it/pip_compile.rs | #![allow(clippy::disallowed_types)]
use std::env::current_dir;
use std::fs;
use std::io::Cursor;
use anyhow::Result;
use assert_fs::prelude::*;
use flate2::write::GzEncoder;
use fs_err::File;
use indoc::indoc;
use url::Url;
use wiremock::matchers::{method, path};
use wiremock::{Mock, MockServer, ResponseTemplate};
use uv_fs::Simplified;
use uv_static::EnvVars;
use crate::common::{
DEFAULT_PYTHON_VERSION, TestContext, download_to_disk, packse_index_url, uv_snapshot,
};
#[test]
fn compile_requirements_in() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
anyio==3.7.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file with a `--annotation-style=line` flag.
#[test]
fn compile_requirements_in_annotation_line() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.arg("--annotation-style=line")
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line requirements.in
anyio==3.7.0 # via -r requirements.in
idna==3.6 # via anyio
sniffio==1.3.1 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
/// Resolve a specific version of `anyio` from a `requirements.in` file on stdin
/// when passed a path of `-`.
#[test]
fn compile_requirements_in_stdin() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
uv_snapshot!(context
.pip_compile()
.stdin(fs::File::open(requirements_in)?)
.arg("-"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] -
anyio==3.7.0
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###);
Ok(())
}
#[test]
fn missing_requirements_in() {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: File not found: `requirements.in`
"###
);
requirements_in.assert(predicates::path::missing());
}
#[test]
fn missing_venv() -> Result<()> {
let context = TestContext::new("3.12");
context.temp_dir.child("requirements.in").touch()?;
fs_err::remove_dir_all(context.venv.path())?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
----- stderr -----
warning: Requirements file `requirements.in` does not contain any dependencies
Resolved in [TIME]
"###
);
context.venv.assert(predicates::path::missing());
Ok(())
}
#[test]
fn empty_output() -> Result<()> {
let context = TestContext::new("3.12");
context
.temp_dir
.child("requirements.in")
.write_str("iniconfig==1.1.1")?;
context.temp_dir.child("requirements.txt").touch()?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--output-file")
.arg("requirements.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --output-file requirements.txt
iniconfig==1.1.1
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file. Despite the version being
/// dynamic, we shouldn't need to build the package, since the requirements are static.
#[test]
fn compile_pyproject_toml_dynamic_version() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
dynamic = ["version"]
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a specific version of `anyio` from a `pyproject.toml` file with `--annotation-style=line`.
#[test]
fn compile_pyproject_toml_with_line_annotation() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
version = "0.1.0"
dependencies = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("--annotation-style=line")
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] --annotation-style=line pyproject.toml
anyio==3.7.0 # via project (pyproject.toml)
idna==3.6 # via anyio
sniffio==1.3.1 # via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
#[test]
fn compile_pyproject_toml_eager_validation() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
dynamic = ["version"]
requires-python = ">=3.10"
dependencies = ["anyio==4.7.0"]
[tool.uv.sources]
anyio = { workspace = true }
"#})?;
// This should fail without attempting to build the package.
uv_snapshot!(context
.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse entry: `anyio`
Caused by: `anyio` references a workspace in `tool.uv.sources` (e.g., `anyio = { workspace = true }`), but is not a workspace member
"###);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file.
#[test]
fn compile_constraints_txt() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio==3.7.0
# via -r requirements.in
idna==3.3
# via
# -c constraints.txt
# anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with an inline constraint.
#[test]
fn compile_constraints_inline() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio==3.7.0")?;
requirements_in.write_str("-c constraints.txt")?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("idna<3.4")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in
----- stderr -----
Resolved in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that
/// uses markers.
#[test]
fn compile_constraints_markers() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("anyio")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
// If constraints are ignored, these will conflict
constraints_txt.write_str("sniffio==1.2.0;python_version<='3.7'")?;
constraints_txt.write_str("sniffio==1.3.0;python_version>'3.7'")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
anyio==4.3.0
# via -r requirements.in
idna==3.6
# via anyio
sniffio==1.3.0
# via
# -c constraints.txt
# anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from a `requirements.in` file, with a `constraints.txt` file that uses an
/// extra. The constraint should be enforced, but the extra should _not_ be included in the output
/// (though it currently _is_ included).
#[test]
fn compile_constraint_extra() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("flask")?;
// Constrain a transitive dependency based on the Python version
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("flask[dotenv]<24.3.0")?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt
blinker==1.7.0
# via flask
click==8.1.7
# via flask
flask==3.0.2
# via
# -c constraints.txt
# -r requirements.in
itsdangerous==2.1.2
# via flask
jinja2==3.1.3
# via flask
markupsafe==2.1.5
# via
# jinja2
# werkzeug
werkzeug==3.0.1
# via flask
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// This is a regression test for a case where `uv pip compile --universal`
/// would include dependencies with marker expressions that always evaluate
/// to false (for example, `python_version < '0'`).
///
/// See: <https://github.com/astral-sh/uv/issues/8676>
#[test]
fn compile_constraints_omit_impossible_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str(
"\
apache-airflow[microsoft.azure]==2.3.4
",
)?;
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str(
"\
apache-airflow-providers-microsoft-azure==4.2.0
click==8.1.3
colorama==0.4.5
msal-extensions==1.0.0
portalocker==2.5.1
",
)?;
let filters: Vec<_> = [
// 3.10 may not be installed
(
"warning: The requested Python version 3.10 is not available; .* will be used to build dependencies instead.\n",
"",
),
// These aren't used on Windows, so we filter them out.
(".*colorama==.*\n", ""),
(".*tzdata==.*\n", ""),
]
.into_iter()
.chain(context.filters())
.collect();
uv_snapshot!(filters, context.pip_compile()
.arg("requirements.in")
.arg("--constraint")
.arg("constraints.txt")
.arg("--annotation-style")
.arg("line")
.arg("--python-version")
.arg("3.10")
.arg("--universal"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] requirements.in --constraint constraints.txt --annotation-style line --python-version 3.10 --universal
a2wsgi==1.10.4 # via connexion
adal==1.2.7 # via azure-kusto-data, msrestazure
aiohttp==3.9.3 # via apache-airflow-providers-http
aiosignal==1.3.1 # via aiohttp
alembic==1.13.1 # via apache-airflow
anyio==4.3.0 # via httpx, starlette
apache-airflow==2.3.4 # via apache-airflow-providers-microsoft-azure, -r requirements.in
apache-airflow-providers-common-sql==1.4.0 # via apache-airflow-providers-sqlite
apache-airflow-providers-ftp==3.3.1 # via apache-airflow
apache-airflow-providers-http==4.3.0 # via apache-airflow
apache-airflow-providers-imap==3.1.1 # via apache-airflow
apache-airflow-providers-microsoft-azure==4.2.0 # via apache-airflow, -c constraints.txt
apache-airflow-providers-sqlite==3.3.2 # via apache-airflow
apispec==3.3.2 # via flask-appbuilder
argcomplete==3.2.3 # via apache-airflow
asgiref==3.8.1 # via apache-airflow-providers-http, connexion, flask
async-timeout==4.0.3 ; python_full_version < '3.11' # via aiohttp
attrs==23.2.0 # via aiohttp, cattrs, jsonschema, referencing
azure-batch==14.1.0 # via apache-airflow-providers-microsoft-azure
azure-common==1.1.28 # via azure-batch, azure-mgmt-containerinstance, azure-mgmt-datafactory, azure-mgmt-datalake-store, azure-mgmt-resource, azure-storage-common, azure-storage-file
azure-core==1.30.1 # via azure-cosmos, azure-identity, azure-keyvault-secrets, azure-mgmt-core, azure-servicebus, azure-storage-blob, msrest
azure-cosmos==4.6.0 # via apache-airflow-providers-microsoft-azure
azure-datalake-store==0.0.53 # via apache-airflow-providers-microsoft-azure
azure-identity==1.15.0 # via apache-airflow-providers-microsoft-azure
azure-keyvault-secrets==4.8.0 # via apache-airflow-providers-microsoft-azure
azure-kusto-data==0.0.45 # via apache-airflow-providers-microsoft-azure
azure-mgmt-containerinstance==1.5.0 # via apache-airflow-providers-microsoft-azure
azure-mgmt-core==1.4.0 # via azure-mgmt-datafactory, azure-mgmt-resource
azure-mgmt-datafactory==1.1.0 # via apache-airflow-providers-microsoft-azure
azure-mgmt-datalake-nspkg==3.0.1 # via azure-mgmt-datalake-store
azure-mgmt-datalake-store==0.5.0 # via apache-airflow-providers-microsoft-azure
azure-mgmt-nspkg==3.0.2 # via azure-mgmt-datalake-nspkg
azure-mgmt-resource==23.0.1 # via apache-airflow-providers-microsoft-azure
azure-nspkg==3.0.2 # via azure-mgmt-nspkg
azure-servicebus==7.12.1 ; platform_machine != 'aarch64' # via apache-airflow-providers-microsoft-azure
azure-storage-blob==12.8.1 # via apache-airflow-providers-microsoft-azure
azure-storage-common==2.1.0 # via apache-airflow-providers-microsoft-azure, azure-storage-file
azure-storage-file==2.1.0 # via apache-airflow-providers-microsoft-azure
babel==2.14.0 # via flask-babel
blinker==1.7.0 # via apache-airflow
cachelib==0.9.0 # via flask-caching
cattrs==23.2.3 # via apache-airflow
certifi==2024.2.2 # via httpcore, httpx, msrest, requests
cffi==1.16.0 # via azure-datalake-store, cryptography
charset-normalizer==3.3.2 # via requests
click==8.1.3 # via flask, flask-appbuilder, -c constraints.txt
colorlog==4.8.0 # via apache-airflow
connexion==3.0.6 # via apache-airflow
cron-descriptor==1.4.3 # via apache-airflow
croniter==2.0.3 # via apache-airflow
cryptography==42.0.5 # via adal, apache-airflow, azure-identity, azure-storage-blob, azure-storage-common, msal, pyjwt
deprecated==1.2.14 # via apache-airflow
dill==0.3.8 # via apache-airflow
dnspython==2.6.1 # via email-validator
docutils==0.20.1 # via python-daemon
email-validator==1.3.1 # via flask-appbuilder
exceptiongroup==1.2.0 ; python_full_version < '3.11' # via anyio, cattrs
flask==2.2.5 # via apache-airflow, connexion, flask-appbuilder, flask-babel, flask-caching, flask-jwt-extended, flask-login, flask-session, flask-sqlalchemy, flask-wtf
flask-appbuilder==4.1.3 # via apache-airflow
flask-babel==2.0.0 # via flask-appbuilder
flask-caching==2.1.0 # via apache-airflow
flask-jwt-extended==4.6.0 # via flask-appbuilder
flask-login==0.6.3 # via apache-airflow, flask-appbuilder
flask-session==0.7.0 # via apache-airflow
flask-sqlalchemy==2.5.1 # via flask-appbuilder
flask-wtf==0.15.1 # via apache-airflow, flask-appbuilder
frozenlist==1.4.1 # via aiohttp, aiosignal
graphviz==0.20.3 # via apache-airflow
greenlet==3.0.3 ; platform_machine == 'AMD64' or platform_machine == 'WIN32' or platform_machine == 'aarch64' or platform_machine == 'amd64' or platform_machine == 'ppc64le' or platform_machine == 'win32' or platform_machine == 'x86_64' # via sqlalchemy
gunicorn==21.2.0 # via apache-airflow
h11==0.14.0 # via httpcore
httpcore==1.0.4 # via httpx
httpx==0.27.0 # via apache-airflow, connexion
idna==3.6 # via anyio, email-validator, httpx, requests, yarl
inflection==0.5.1 # via connexion
isodate==0.6.1 # via azure-keyvault-secrets, azure-mgmt-resource, azure-servicebus, msrest
itsdangerous==2.1.2 # via apache-airflow, flask, flask-wtf
jinja2==3.1.3 # via apache-airflow, connexion, flask, flask-babel, python-nvd3, swagger-ui-bundle
jsonschema==4.21.1 # via apache-airflow, connexion, flask-appbuilder
jsonschema-specifications==2023.12.[X] # via jsonschema
lazy-object-proxy==1.10.0 # via apache-airflow
linkify-it-py==2.0.3 # via apache-airflow
lockfile==0.12.2 # via apache-airflow, python-daemon
mako==1.3.2 # via alembic
markdown==3.6 # via apache-airflow
markdown-it-py==3.0.0 # via apache-airflow, mdit-py-plugins, rich
markupsafe==2.1.5 # via apache-airflow, jinja2, mako, werkzeug, wtforms
marshmallow==3.21.1 # via flask-appbuilder, marshmallow-enum, marshmallow-oneofschema, marshmallow-sqlalchemy
marshmallow-enum==1.5.1 # via flask-appbuilder
marshmallow-oneofschema==3.1.1 # via apache-airflow
marshmallow-sqlalchemy==0.26.1 # via flask-appbuilder
mdit-py-plugins==0.4.0 # via apache-airflow
mdurl==0.1.2 # via markdown-it-py
msal==1.28.0 # via azure-datalake-store, azure-identity, msal-extensions
msal-extensions==1.0.0 # via azure-identity, -c constraints.txt
msgspec==0.18.6 # via flask-session
msrest==0.7.1 # via azure-mgmt-containerinstance, azure-mgmt-datafactory, azure-storage-blob, msrestazure
msrestazure==0.6.4 # via azure-batch, azure-kusto-data, azure-mgmt-containerinstance, azure-mgmt-datalake-store
multidict==6.0.5 # via aiohttp, yarl
oauthlib==3.2.2 # via requests-oauthlib
packaging==24.0 # via apache-airflow, gunicorn, marshmallow
pathspec==0.9.0 # via apache-airflow
pendulum==3.0.0 # via apache-airflow
pluggy==1.4.0 # via apache-airflow
portalocker==2.5.1 # via msal-extensions, -c constraints.txt
prison==0.2.1 # via flask-appbuilder
psutil==5.9.8 # via apache-airflow
pycparser==2.21 # via cffi
pygments==2.17.2 # via apache-airflow, rich
pyjwt==2.8.0 # via adal, apache-airflow, flask-appbuilder, flask-jwt-extended, msal
python-daemon==3.0.1 # via apache-airflow
python-dateutil==2.9.0.post0 # via adal, apache-airflow, azure-kusto-data, azure-storage-common, croniter, flask-appbuilder, pendulum, time-machine
python-multipart==0.0.9 # via connexion
python-nvd3==0.15.0 # via apache-airflow
python-slugify==8.0.4 # via apache-airflow, python-nvd3
pytz==2024.1 # via croniter, flask-babel
pywin32==306 ; sys_platform == 'win32' # via portalocker
pyyaml==6.0.1 # via apispec, connexion
referencing==0.34.0 # via jsonschema, jsonschema-specifications
requests==2.31.0 # via adal, apache-airflow-providers-http, azure-core, azure-datalake-store, azure-kusto-data, azure-storage-common, connexion, msal, msrest, requests-oauthlib, requests-toolbelt
requests-oauthlib==2.0.0 # via msrest
requests-toolbelt==1.0.0 # via apache-airflow-providers-http
rich==13.7.1 # via apache-airflow
rpds-py==0.18.0 # via jsonschema, referencing
setproctitle==1.3.3 # via apache-airflow
setuptools==69.2.0 # via python-daemon
six==1.16.0 # via azure-core, isodate, msrestazure, prison, python-dateutil
sniffio==1.3.1 # via anyio, httpx
sqlalchemy==1.4.52 # via alembic, apache-airflow, flask-appbuilder, flask-sqlalchemy, marshmallow-sqlalchemy, sqlalchemy-jsonfield, sqlalchemy-utils
sqlalchemy-jsonfield==1.0.2 # via apache-airflow
sqlalchemy-utils==0.41.2 # via flask-appbuilder
sqlparse==0.4.4 # via apache-airflow-providers-common-sql
starlette==0.37.2 # via connexion
swagger-ui-bundle==1.1.0 # via connexion
tabulate==0.9.0 # via apache-airflow
tenacity==8.2.3 # via apache-airflow
termcolor==2.4.0 # via apache-airflow
text-unidecode==1.3 # via python-slugify
time-machine==2.14.1 ; implementation_name != 'pypy' # via pendulum
typing-extensions==4.10.0 # via alembic, anyio, apache-airflow, asgiref, azure-core, azure-cosmos, azure-keyvault-secrets, azure-servicebus, cattrs, connexion
uc-micro-py==1.0.3 # via linkify-it-py
unicodecsv==0.14.1 # via apache-airflow
urllib3==2.2.1 # via requests
werkzeug==3.0.1 # via apache-airflow, connexion, flask, flask-jwt-extended, flask-login
wrapt==1.16.0 # via deprecated
wtforms==2.3.3 # via flask-appbuilder, flask-wtf
yarl==1.9.4 # via aiohttp
----- stderr -----
Resolved 149 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an optional extra in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra foo
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Resolve a package from an extra with non-normalized names in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_name_normalization() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies."FrIeNdLy-._.-bArD" = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("FRiENDlY-...-_-BARd"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra FRiENDlY-...-_-BARd
anyio==3.7.0
# via project (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Request an extra that does not exist in a `pyproject.toml` file.
#[test]
fn compile_pyproject_toml_extra_missing() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[build-system]
requires = ["setuptools>=42"]
[project]
name = "project"
version = "0.1.0"
dependencies = []
optional-dependencies.foo = [
"anyio==3.7.0",
]
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("bar"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Requested extra not found: bar
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section.
#[test]
fn compile_pyproject_toml_poetry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
pytest = { version = "*", optional = true }
[tool.poetry.extras]
test = ["pytest"]
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml")
.arg("--extra")
.arg("test"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml --extra test
anyio==3.7.1
# via poetry-editable (pyproject.toml)
idna==3.6
# via anyio
iniconfig==2.0.0
# via pytest
packaging==24.0
# via pytest
pluggy==1.4.0
# via pytest
pytest==8.1.1
# via poetry-editable (pyproject.toml)
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 7 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section without a
/// `dependencies` field, which should be treated as an empty list.
#[test]
fn compile_pyproject_toml_poetry_empty_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile --cache-dir [CACHE_DIR] pyproject.toml
anyio==3.7.1
# via poetry-editable (pyproject.toml)
idna==3.6
# via anyio
sniffio==1.3.1
# via anyio
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file with a `poetry` section and a `project` section with an invalid
/// `dependencies` field.
#[test]
fn compile_pyproject_toml_poetry_invalid_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"[project]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[tool.poetry]
name = "poetry-editable"
version = "0.1.0"
description = ""
authors = ["Astral Software Inc. <hey@astral.sh>"]
[project.dependencies]
python = "^3.12"
msgspec = "^0.18.4"
[tool.poetry.dependencies]
python = "^3.10"
anyio = "^3"
[build-system]
requires = ["poetry-core"]
build-backend = "poetry.core.masonry.api"
"#,
)?;
uv_snapshot!(context.filters(), context.pip_compile()
.arg("pyproject.toml"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 13, column 1
|
13 | [project.dependencies]
| ^^^^^^^^^^^^^^^^^^^^^^
invalid type: map, expected a sequence
"###
);
Ok(())
}
/// Compile a `pyproject.toml` file that uses setuptools as the build backend.
#[test]
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_dir.rs | crates/uv/tests/it/python_dir.rs | use assert_fs::fixture::PathChild;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn python_dir() {
let context = TestContext::new("3.12");
let python_dir = context.temp_dir.child("python");
uv_snapshot!(context.filters(), context.python_dir()
.env(EnvVars::UV_PYTHON_INSTALL_DIR, python_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/python
----- stderr -----
"###);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/auth.rs | crates/uv/tests/it/auth.rs | use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::{fixture::PathChild, prelude::FileWriteStr};
#[cfg(feature = "native-auth")]
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
#[cfg(feature = "native-auth")]
fn add_package_native_auth_realm() -> Result<()> {
let context = TestContext::new("3.12").with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc::indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.11, <4"
dependencies = []
"#
})?;
// Try to add a package without credentials.
uv_snapshot!(context.add().arg("anyio").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because anyio was not found in the package registry and your project depends on anyio, we can conclude that your project's requirements are unsatisfiable.
hint: An index URL (https://pypi-proxy.fly.dev/basic-auth/simple) could not be queried due to a lack of valid authentication credentials (401 Unauthorized).
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
"
);
// Login to the domain
uv_snapshot!(context.auth_login()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/
"
);
// Try to add the original package without credentials again. This should use credentials
// storied in the system keyring.
uv_snapshot!(context.add().arg("anyio").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"
);
// Logout of the domain
uv_snapshot!(context.auth_logout()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for public@https://pypi-proxy.fly.dev/
"
);
// Authentication should fail again
uv_snapshot!(context.add().arg("iniconfig").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because iniconfig was not found in the package registry and your project depends on iniconfig, we can conclude that your project's requirements are unsatisfiable.
hint: An index URL (https://pypi-proxy.fly.dev/basic-auth/simple) could not be queried due to a lack of valid authentication credentials (401 Unauthorized).
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
"
);
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn add_package_native_auth() -> Result<()> {
let context = TestContext::new("3.12").with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Configure `pyproject.toml` with native keyring provider.
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc::indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.11, <4"
dependencies = []
"#
})?;
// Try to add a package without credentials.
uv_snapshot!(context.add().arg("anyio").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because anyio was not found in the package registry and your project depends on anyio, we can conclude that your project's requirements are unsatisfiable.
hint: An index URL (https://pypi-proxy.fly.dev/basic-auth/simple) could not be queried due to a lack of valid authentication credentials (401 Unauthorized).
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
"
);
// Login to the index
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
// Try to add the original package without credentials again. This should use
// credentials storied in the system keyring.
uv_snapshot!(context.add().arg("anyio").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"
);
// Logout of the index
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
// Authentication should fail again
uv_snapshot!(context.add().arg("iniconfig").arg("--default-index").arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because iniconfig was not found in the package registry and your project depends on iniconfig, we can conclude that your project's requirements are unsatisfiable.
hint: An index URL (https://pypi-proxy.fly.dev/basic-auth/simple) could not be queried due to a lack of valid authentication credentials (401 Unauthorized).
help: If you want to add the package regardless of the failed resolution, provide the `--frozen` flag to skip locking and syncing.
"
);
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn token_native_auth() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Without persisted credentials
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for https://pypi-proxy.fly.dev/basic-auth/simple
");
// Without persisted credentials (with a username in the request)
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for public@https://pypi-proxy.fly.dev/basic-auth/simple
");
// Login to the index
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
// Show the credentials
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Without the username
// TODO(zanieb): Add a hint here if we can?
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for https://pypi-proxy.fly.dev/basic-auth/simple
");
// With a mismatched username
// TODO(zanieb): Add a hint here if we can?
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("private")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for private@https://pypi-proxy.fly.dev/basic-auth/simple
");
// Login to the index with a token
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--token")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for https://pypi-proxy.fly.dev/basic-auth
"
);
// Retrieve the token without a username
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.status()?;
// Retrieve token using URL with embedded username (no --username needed)
uv_snapshot!(context.auth_token()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Conflict between --username and URL username is rejected
uv_snapshot!(context.auth_token()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("different")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot specify a username both via the URL and CLI; found `--username different` and `public`
");
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn token_native_auth_realm() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Without persisted credentials
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Without persisted credentials (with a username in the request)
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for public@https://pypi-proxy.fly.dev/
");
// Login to the index
uv_snapshot!(context.auth_login()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/
"
);
// Show the credentials
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Show the credentials for a child URL
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Without the username
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// Without the username
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
// With a mismatched username
// TODO(zanieb): Add a hint here if we can?
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("private")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for private@https://pypi-proxy.fly.dev/
");
// With a mismatched port
uv_snapshot!(context.auth_token()
.arg("https://pypi-proxy.fly.dev:1000")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to fetch credentials for public@https://pypi-proxy.fly.dev:1000/
");
// Login to the index with a token
uv_snapshot!(context.auth_login()
.arg("pypi-proxy.fly.dev")
.arg("--token")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for https://pypi-proxy.fly.dev/
"
);
// Retrieve the token without a username
uv_snapshot!(context.auth_token()
.arg("pypi-proxy.fly.dev")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
context
.auth_logout()
.arg("pypi-proxy.fly.dev")
.arg("--username")
.arg("public")
.status()?;
// Retrieve token using URL with embedded username (no --username needed)
uv_snapshot!(context.auth_token()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
heron
----- stderr -----
");
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn login_native_auth() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Without a service name
uv_snapshot!(context.auth_login(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<SERVICE>
Usage: uv auth login --cache-dir [CACHE_DIR] <SERVICE>
For more information, try '--help'.
");
// Without a username (or token)
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No username provided; did you mean to provide `--username` or `--token`?
");
// Without a password
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No password provided; did you mean to provide `--password` or `--token`?
");
// Successful
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn login_token_native_auth() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("__token__")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Successful with token
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--token")
.arg("test-token")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for https://pypi-proxy.fly.dev/basic-auth
"
);
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn logout_native_auth() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Without a service name
uv_snapshot!(context.auth_logout(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<SERVICE>
Usage: uv auth logout --cache-dir [CACHE_DIR] <SERVICE>
For more information, try '--help'.
");
// Logout before logging in
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for https://pypi-proxy.fly.dev/basic-auth
");
// Logout before logging in (with a username)
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to remove credentials for public@https://pypi-proxy.fly.dev/basic-auth
Caused by: No matching entry found in secure storage
");
// Login with a username
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
// Logout without a username
// TODO(zanieb): Add a hint here if we can?
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Unable to remove credentials for https://pypi-proxy.fly.dev/basic-auth
Caused by: No matching entry found in secure storage
");
// Logout with a username
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for public@https://pypi-proxy.fly.dev/basic-auth
");
// Login again
context
.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.assert()
.success();
// Logout with a username in the URL
uv_snapshot!(context.auth_logout()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for public@https://pypi-proxy.fly.dev/basic-auth
");
// Conflict between --username and a URL username is rejected
uv_snapshot!(context.auth_logout()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("foo")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot specify a username both via the URL and CLI; found `--username foo` and `public`
");
// Conflict between --token and a URL username is rejected
uv_snapshot!(context.auth_login()
.arg("https://public@pypi-proxy.fly.dev/basic-auth/simple")
.arg("--token")
.arg("foo")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: When using `--token`, a username cannot not be provided; found: public
");
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn logout_token_native_auth() -> Result<()> {
let context = TestContext::new_with_versions(&[]).with_real_home();
// Clear state before the test
context
.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth")
.status()?;
// Login with a token
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--token")
.arg("test-token")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for https://pypi-proxy.fly.dev/basic-auth
"
);
// Logout without a username
uv_snapshot!(context.auth_logout()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Removed credentials for https://pypi-proxy.fly.dev/basic-auth
");
Ok(())
}
#[test]
#[cfg(feature = "native-auth")]
fn login_native_auth_url() {
let context = TestContext::new_with_versions(&[]).with_real_home();
// A domain-only service name gets https:// prepended
uv_snapshot!(context.auth_login()
.arg("example.com")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@https://example.com/
");
// HTTP URLs are not allowed - only HTTPS
uv_snapshot!(context.auth_login()
.arg("http://example.com")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'http://example.com' for '<SERVICE>': HTTPS is required for non-local hosts
For more information, try '--help'.
");
// HTTP URLs are fine for localhost
uv_snapshot!(context.auth_login()
.arg("http://localhost:1324")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@http://localhost:1324/
");
uv_snapshot!(context.auth_login()
.arg("https://example.com")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@https://example.com/
");
// A domain-only service with a path also gets https:// prepended
uv_snapshot!(context.auth_login()
.arg("example.com/simple")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@https://example.com/
");
// An invalid URL is rejected
uv_snapshot!(context.auth_login()
.arg("not a valid url")
.arg("--username")
.arg("test")
.arg("--password")
.arg("test")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: invalid value 'not a valid url' for '<SERVICE>': invalid international domain name
For more information, try '--help'.
");
// URL with embedded credentials works
uv_snapshot!(context.auth_login()
.arg("https://test:password@example.com/simple")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@https://example.com/
");
// URL with embedded username and separate password works
uv_snapshot!(context.auth_login()
.arg("https://test@example.com/simple")
.arg("--password")
.arg("password")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for test@https://example.com/
");
// Conflict between --username and URL username is rejected
uv_snapshot!(context.auth_login()
.arg("https://test@example.com/simple")
.arg("--username")
.arg("different")
.arg("--password")
.arg("password")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot specify a username both via the URL and CLI; found `--username different` and `test`
");
// Conflict between --password and URL password is rejected
uv_snapshot!(context.auth_login()
.arg("https://test:password@example.com/simple")
.arg("--password")
.arg("different")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot specify a password both via the URL and CLI
");
// Conflict between --token and URL credentials is rejected
uv_snapshot!(context.auth_login()
.arg("https://test:password@example.com/simple")
.arg("--token")
.arg("some-token")
.env(EnvVars::UV_PREVIEW_FEATURES, "native-auth"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: When using `--token`, a username cannot not be provided; found: test
");
}
#[test]
fn login_text_store() {
let context = TestContext::new_with_versions(&[]);
// Login with a username and password
uv_snapshot!(context.auth_login()
.arg("https://pypi-proxy.fly.dev/basic-auth/simple")
.arg("--username")
.arg("public")
.arg("--password")
.arg("heron"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for public@https://pypi-proxy.fly.dev/basic-auth
"
);
// Login with a token
uv_snapshot!(context.auth_login()
.arg("https://example.com/simple")
.arg("--token")
.arg("test-token"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Stored credentials for https://example.com/
"
);
// Empty username should fail
uv_snapshot!(context.auth_login()
.arg("https://example.com/simple")
.arg("--username")
.arg("")
.arg("--password")
.arg("testpass"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Username cannot be empty
"
);
// Empty password should fail
uv_snapshot!(context.auth_login()
.arg("https://example.com/simple")
.arg("--username")
.arg("testuser")
.arg("--password")
.arg(""), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Password cannot be empty
"
);
// HTTP should fail
uv_snapshot!(context.auth_login()
.arg("http://example.com/simple")
.arg("--username")
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tool_run.rs | crates/uv/tests/it/tool_run.rs | use crate::common::{TestContext, uv_snapshot};
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::prelude::*;
use indoc::indoc;
use uv_fs::copy_dir_all;
use uv_static::EnvVars;
#[test]
fn tool_run_args() {
let context = TestContext::new("3.12").with_filtered_counts();
let mut filters = context.filters();
filters.push((
r"Usage: uv tool run \[OPTIONS\] (?s).*",
"[UV TOOL RUN HELP]",
));
filters.push((r"usage: pytest \[options\] (?s).*", "[PYTEST HELP]"));
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// We treat arguments before the command as uv tool run arguments
uv_snapshot!(filters, context.tool_run()
.arg("--help")
.arg("pytest")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
Run a command provided by a Python package
[UV TOOL RUN HELP]
");
// We don't treat arguments after the command as uv tool run arguments
uv_snapshot!(filters, context.tool_run()
.arg("pytest")
.arg("--help")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
[PYTEST HELP]
");
// Can use `--` to separate uv arguments from the command arguments.
uv_snapshot!(filters, context.tool_run()
.arg("--")
.arg("pytest")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.1.1
----- stderr -----
Resolved [N] packages in [TIME]
"###);
}
#[test]
fn tool_run_at_version() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("pytest@8.0.0")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.0.0
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ pytest==8.0.0
"###);
// Empty versions are just treated as package and command names
uv_snapshot!(context.filters(), context.tool_run()
.arg("pytest@")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `pytest@`
Caused by: Expected URL
pytest@
^
"###);
// Invalid versions are just treated as package and command names
uv_snapshot!(context.filters(), context.tool_run()
.arg("pytest@invalid")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to resolve tool requirement
╰─▶ Distribution not found at: file://[TEMP_DIR]/invalid
"###);
let filters = context
.filters()
.into_iter()
.chain([(
// The error message is different on Windows
"Caused by: program not found",
"Caused by: No such file or directory (os error 2)",
)])
.collect::<Vec<_>>();
// When `--from` is used, `@` is not treated as a version request
uv_snapshot!(filters, context.tool_run()
.arg("--from")
.arg("pytest")
.arg("pytest@8.0.0")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 1 package in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ pytest==8.1.1
An executable named `pytest@8.0.0` is not provided by package `pytest`.
The following executables are available:
- py.test
- pytest
");
}
#[test]
fn tool_run_from_version() {
let context = TestContext::new("3.12");
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("pytest==8.0.0")
.arg("pytest")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.0.0
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.4.0
+ pytest==8.0.0
"###);
}
#[test]
fn tool_run_constraints() {
let context = TestContext::new("3.12");
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let constraints_txt = context.temp_dir.child("constraints.txt");
constraints_txt.write_str("pluggy<1.4.0").unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--constraints")
.arg("constraints.txt")
.arg("pytest")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.0.2
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.3.0
+ pytest==8.0.2
"###);
}
#[test]
fn tool_run_overrides() {
let context = TestContext::new("3.12");
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let overrides_txt = context.temp_dir.child("overrides.txt");
overrides_txt.write_str("pluggy<1.4.0").unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--overrides")
.arg("overrides.txt")
.arg("pytest")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
pytest 8.1.1
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ iniconfig==2.0.0
+ packaging==24.0
+ pluggy==1.3.0
+ pytest==8.1.1
"###);
}
#[test]
fn tool_run_suggest_valid_commands() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("black")
.arg("orange")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
An executable named `orange` is not provided by package `black`.
The following executables are available:
- black
- blackd
");
uv_snapshot!(context.filters(), context.tool_run()
.arg("fastapi-cli")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ fastapi-cli==0.0.1
+ importlib-metadata==1.7.0
+ zipp==3.18.1
Package `fastapi-cli` does not provide any executables.
");
}
#[test]
fn tool_run_warn_executable_not_in_from() {
// FastAPI 0.111 is only available from this date onwards.
let context = TestContext::new("3.12")
.with_exclude_newer("2024-05-04T00:00:00Z")
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let mut filters = context.filters();
filters.push(("\\+ uvloop(.+)\n ", ""));
// Strip off the `fastapi` command output.
filters.push(("(?s)fastapi` instead.*", "fastapi` instead."));
uv_snapshot!(filters, context.tool_run()
.arg("--from")
.arg("fastapi")
.arg("fastapi")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Resolved 35 packages in [TIME]
Prepared 35 packages in [TIME]
Installed 35 packages in [TIME]
+ annotated-types==0.6.0
+ anyio==4.3.0
+ certifi==2024.2.2
+ click==8.1.7
+ dnspython==2.6.1
+ email-validator==2.1.1
+ fastapi==0.111.0
+ fastapi-cli==0.0.2
+ h11==0.14.0
+ httpcore==1.0.5
+ httptools==0.6.1
+ httpx==0.27.0
+ idna==3.7
+ jinja2==3.1.3
+ markdown-it-py==3.0.0
+ markupsafe==2.1.5
+ mdurl==0.1.2
+ orjson==3.10.3
+ pydantic==2.7.1
+ pydantic-core==2.18.2
+ pygments==2.17.2
+ python-dotenv==1.0.1
+ python-multipart==0.0.9
+ pyyaml==6.0.1
+ rich==13.7.1
+ shellingham==1.5.4
+ sniffio==1.3.1
+ starlette==0.37.2
+ typer==0.12.3
+ typing-extensions==4.11.0
+ ujson==5.9.0
+ uvicorn==0.29.0
+ watchfiles==0.21.0
+ websockets==12.0
warning: An executable named `fastapi` is not provided by package `fastapi` but is available via the dependency `fastapi-cli`. Consider using `uv tool run --from fastapi-cli fastapi` instead.
");
}
#[test]
fn tool_run_from_install() {
let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` at a specific version.
context
.tool_install()
.arg("black==24.1.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Verify that `tool run black` uses the already-installed version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.1.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
"###);
// Verify that `--isolated` uses an isolated environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("--isolated")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `tool run black` at a different version installs the new version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("black@24.1.1")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.1.1 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.1.1
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `--with` installs a new version.
// TODO(charlie): This could (in theory) layer the `--with` requirements on top of the existing
// environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("--with")
.arg("iniconfig")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ iniconfig==2.0.0
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `tool run black` at a different version (via `--from`) installs the new version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("black==24.2.0")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.2.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
}
#[test]
fn tool_run_from_install_constraints() {
let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `flask` at a specific version.
context
.tool_install()
.arg("flask==3.0.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.assert()
.success();
// Verify that `tool run flask` uses the already-installed version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.0
Werkzeug 3.0.1
----- stderr -----
"###);
// Verify that `tool run flask` with a compatible constraint uses the already-installed version.
context
.temp_dir
.child("constraints.txt")
.write_str("werkzeug<4.0.0")
.unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--constraints")
.arg("constraints.txt")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.0
Werkzeug 3.0.1
----- stderr -----
"###);
// Verify that `tool run flask` with an incompatible constraint installs a new version.
context
.temp_dir
.child("constraints.txt")
.write_str("werkzeug<3.0.0")
.unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--constraints")
.arg("constraints.txt")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 2.3.3
Werkzeug 2.3.8
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==2.3.3
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==2.3.8
"###);
// Verify that `tool run flask` with a compatible override uses the already-installed version.
context
.temp_dir
.child("override.txt")
.write_str("werkzeug==3.0.1")
.unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--override")
.arg("override.txt")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.0
Werkzeug 3.0.1
----- stderr -----
"###);
// Verify that `tool run flask` with an incompatible override installs a new version.
context
.temp_dir
.child("override.txt")
.write_str("werkzeug==3.0.0")
.unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--override")
.arg("override.txt")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.2
Werkzeug 3.0.0
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.0
"###);
// Verify that an override that enables a new extra also invalidates the environment.
context
.temp_dir
.child("override.txt")
.write_str("flask[dotenv]")
.unwrap();
uv_snapshot!(context.filters(), context.tool_run()
.arg("--override")
.arg("override.txt")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.2
Werkzeug 3.0.1
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ python-dotenv==1.0.1
+ werkzeug==3.0.1
"###);
}
#[test]
fn tool_run_cache() {
let context = TestContext::new_with_versions(&["3.11", "3.12"]).with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Verify that `tool run black` installs the latest version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// Verify that `tool run black` uses the cached version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
"###);
// Verify that `--refresh` allows cache reuse.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("--refresh")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
");
// Verify that `--refresh-package` allows cache reuse.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("--refresh-package")
.arg("packaging")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
");
// Verify that varying the interpreter leads to a fresh environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.11")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.11.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
// But that re-invoking with the previous interpreter retains the cached version.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
"###);
// Verify that `--with` leads to a fresh environment.
uv_snapshot!(context.filters(), context.tool_run()
.arg("-p")
.arg("3.12")
.arg("--with")
.arg("iniconfig")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ iniconfig==2.0.0
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
}
#[test]
fn tool_run_url() {
let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("flask @ https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.3
Werkzeug 3.0.1
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.3 (from https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl)
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###);
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl")
.arg("flask")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.3
Werkzeug 3.0.1
----- stderr -----
Resolved [N] packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tool_run()
.arg("flask @ https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.3
Werkzeug 3.0.1
----- stderr -----
Resolved [N] packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.tool_run()
.arg("https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.3
Werkzeug 3.0.1
----- stderr -----
Resolved [N] packages in [TIME]
"###);
}
/// Test running a tool with a Git requirement.
#[test]
#[cfg(feature = "git")]
fn tool_run_git() {
let context = TestContext::new("3.12").with_filtered_counts();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("git+https://github.com/psf/black@24.2.0")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: no)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.2.0 (from git+https://github.com/psf/black@6fdf8a4af28071ed1d079c01122b34c5d587207a)
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
"###);
uv_snapshot!(context.filters(), context.tool_run()
.arg("black @ git+https://github.com/psf/black@24.2.0")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: no)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
"###);
// Clear the cache.
fs_err::remove_dir_all(&context.cache_dir).expect("Failed to remove cache dir.");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("git+https://github.com/psf/black@24.2.0")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: no)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.2.0 (from git+https://github.com/psf/black@6fdf8a4af28071ed1d079c01122b34c5d587207a)
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("black @ git+https://github.com/psf/black@24.2.0")
.arg("black")
.arg("--version")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: no)
Python (CPython) 3.12.[X]
----- stderr -----
Resolved [N] packages in [TIME]
"###);
}
/// Test running a tool with a Git LFS enabled requirement.
#[test]
#[cfg(feature = "git-lfs")]
fn tool_run_git_lfs() {
let context = TestContext::new("3.13")
.with_filtered_counts()
.with_filtered_exe_suffix()
.with_git_lfs_config();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--lfs")
.arg("git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from test-lfs-repo!
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f#lfs=true)
"###);
uv_snapshot!(context.filters(), context.tool_run()
.arg("--lfs")
.arg("test-lfs-repo @ git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from test-lfs-repo!
----- stderr -----
Resolved [N] packages in [TIME]
"###);
// Clear the cache.
fs_err::remove_dir_all(&context.cache_dir).expect("Failed to remove cache dir.");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f")
.arg("--lfs")
.arg("test-lfs-repo-assets")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
Hello from test-lfs-repo! LFS_TEST=True ANOTHER_LFS_TEST=True
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f#lfs=true)
");
uv_snapshot!(context.filters(), context.tool_run()
.arg("--from")
.arg("test-lfs-repo @ git+https://github.com/astral-sh/test-lfs-repo@c6d77ab63d91104f32ab5e5ae2943f4d26ff875f")
.arg("--lfs")
.arg("test-lfs-repo-assets")
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tool_install.rs | crates/uv/tests/it/tool_install.rs | #[cfg(any(feature = "git", feature = "git-lfs"))]
use std::collections::BTreeSet;
use std::process::Command;
use anyhow::Result;
use assert_fs::{
assert::PathAssert,
fixture::{FileTouch, FileWriteStr, PathChild},
};
use indoc::indoc;
use insta::assert_snapshot;
use predicates::prelude::predicate;
use uv_fs::copy_dir_all;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn tool_install() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
tool_dir.child("black").assert(predicate::path::is_dir());
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::exists());
let executable = bin_dir.child(format!("black{}", std::env::consts::EXE_SUFFIX));
assert!(executable.exists());
// On Windows, we can't snapshot an executable file.
#[cfg(not(windows))]
insta::with_settings!({
filters => context.filters(),
}, {
// Should run black in the virtual environment
assert_snapshot!(fs_err::read_to_string(executable).unwrap(), @r###"
#![TEMP_DIR]/tools/black/bin/python
# -*- coding: utf-8 -*-
import sys
from black import patched_main
if __name__ == "__main__":
if sys.argv[0].endswith("-script.pyw"):
sys.argv[0] = sys.argv[0][:-11]
elif sys.argv[0].endswith(".exe"):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(patched_main())
"###);
});
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
uv_snapshot!(context.filters(), Command::new("black").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.3.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
"###);
// Install another tool
uv_snapshot!(context.filters(), context.tool_install()
.arg("flask")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
Installed 1 executable: flask
"###);
tool_dir.child("flask").assert(predicate::path::is_dir());
assert!(
bin_dir
.child(format!("flask{}", std::env::consts::EXE_SUFFIX))
.exists()
);
#[cfg(not(windows))]
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(fs_err::read_to_string(bin_dir.join("flask")).unwrap(), @r###"
#![TEMP_DIR]/tools/flask/bin/python
# -*- coding: utf-8 -*-
import sys
from flask.cli import main
if __name__ == "__main__":
if sys.argv[0].endswith("-script.pyw"):
sys.argv[0] = sys.argv[0][:-11]
elif sys.argv[0].endswith(".exe"):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(main())
"###);
});
uv_snapshot!(context.filters(), Command::new("flask").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
Flask 3.0.2
Werkzeug 3.0.1
----- stderr -----
"###);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(fs_err::read_to_string(tool_dir.join("flask").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "flask" }]
entrypoints = [
{ name = "flask", install-path = "[TEMP_DIR]/bin/flask", from = "flask" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
}
#[test]
fn tool_install_with_global_python() -> Result<()> {
let context = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let uv = context.user_config_dir.child("uv");
let versions = uv.child(".python-version");
versions.write_str("3.11")?;
// Install a tool
uv_snapshot!(context.filters(), context.tool_install()
.arg("flask")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
Installed 1 executable: flask
"###);
tool_dir.child("flask").assert(predicate::path::is_dir());
assert!(
bin_dir
.child(format!("flask{}", std::env::consts::EXE_SUFFIX))
.exists()
);
uv_snapshot!(context.filters(), Command::new("flask").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.11.[X]
Flask 3.0.2
Werkzeug 3.0.1
----- stderr -----
"###);
// Change global version
uv_snapshot!(context.filters(), context.python_pin().arg("3.12").arg("--global"),
@r"
success: true
exit_code: 0
----- stdout -----
Updated `[UV_USER_CONFIG_DIR]/.python-version` from `3.11` -> `3.12`
----- stderr -----
"
);
// Install flask again
uv_snapshot!(context.filters(), context.tool_install()
.arg("flask")
.arg("--reinstall")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Uninstalled [N] packages in [TIME]
Installed [N] packages in [TIME]
~ blinker==1.7.0
~ click==8.1.7
~ flask==3.0.2
~ itsdangerous==2.1.2
~ jinja2==3.1.3
~ markupsafe==2.1.5
~ werkzeug==3.0.1
Installed 1 executable: flask
");
// Currently, when reinstalling a tool we use the original version the tool
// was installed with, not the most up-to-date global version
uv_snapshot!(context.filters(), Command::new("flask").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.11.[X]
Flask 3.0.2
Werkzeug 3.0.1
----- stderr -----
"###);
Ok(())
}
#[test]
fn tool_install_with_editable() -> Result<()> {
let context = TestContext::new("3.12")
.with_exclude_newer("2025-01-18T00:00:00Z")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let anyio_local = context.temp_dir.child("src").child("anyio_local");
copy_dir_all(
context.workspace_root.join("test/packages/anyio_local"),
&anyio_local,
)?;
uv_snapshot!(context.filters(), context.tool_install()
.arg("--with-editable")
.arg("./src/anyio_local")
.arg("--with")
.arg("iniconfig")
.arg("executable-application")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ anyio==4.3.0+foo (from file://[TEMP_DIR]/src/anyio_local)
+ executable-application==0.3.0
+ iniconfig==2.0.0
Installed 1 executable: app
"###);
Ok(())
}
#[test]
fn tool_install_with_compatible_build_constraints() -> Result<()> {
let context = TestContext::new("3.9")
.with_exclude_newer("2024-05-04T00:00:00Z")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let constraints_txt = context.temp_dir.child("build_constraints.txt");
constraints_txt.write_str("setuptools>=40")?;
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("--with")
.arg("requests==1.2")
.arg("--build-constraints")
.arg("build_constraints.txt")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==24.4.2
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.1
+ requests==1.2.0
+ tomli==2.0.1
+ typing-extensions==4.11.0
Installed 2 executables: black, blackd
");
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::exists());
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [
{ name = "black" },
{ name = "requests", specifier = "==1.2" },
]
build-constraint-dependencies = [{ name = "setuptools", specifier = ">=40" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-05-04T00:00:00Z"
"###);
});
Ok(())
}
#[test]
fn tool_install_with_incompatible_build_constraints() -> Result<()> {
let context = TestContext::new("3.9")
.with_exclude_newer("2024-05-04T00:00:00Z")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let constraints_txt = context.temp_dir.child("build_constraints.txt");
constraints_txt.write_str("setuptools==2")?;
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("--with")
.arg("requests==1.2")
.arg("--build-constraints")
.arg("build_constraints.txt")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download and build `requests==1.2.0`
├─▶ Failed to resolve requirements from `setup.py` build
├─▶ No solution found when resolving: `setuptools>=40.8.0`
╰─▶ Because you require setuptools>=40.8.0 and setuptools==2, we can conclude that your requirements are unsatisfiable.
");
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::missing());
Ok(())
}
#[test]
fn tool_install_suggest_other_packages_with_executable() {
// FastAPI 0.111 is only available from this date onwards.
let context = TestContext::new("3.12")
.with_exclude_newer("2024-05-04T00:00:00Z")
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
let mut filters = context.filters();
filters.push(("\\+ uvloop(.+)\n ", ""));
uv_snapshot!(filters, context.tool_install()
.arg("fastapi==0.111.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str()), @r"
success: false
exit_code: 2
----- stdout -----
No executables are provided by package `fastapi`; removing tool
hint: An executable with the name `fastapi` is available via dependency `fastapi-cli`.
Did you mean `uv tool install fastapi-cli`?
----- stderr -----
Resolved 35 packages in [TIME]
Prepared 35 packages in [TIME]
Installed 35 packages in [TIME]
+ annotated-types==0.6.0
+ anyio==4.3.0
+ certifi==2024.2.2
+ click==8.1.7
+ dnspython==2.6.1
+ email-validator==2.1.1
+ fastapi==0.111.0
+ fastapi-cli==0.0.2
+ h11==0.14.0
+ httpcore==1.0.5
+ httptools==0.6.1
+ httpx==0.27.0
+ idna==3.7
+ jinja2==3.1.3
+ markdown-it-py==3.0.0
+ markupsafe==2.1.5
+ mdurl==0.1.2
+ orjson==3.10.3
+ pydantic==2.7.1
+ pydantic-core==2.18.2
+ pygments==2.17.2
+ python-dotenv==1.0.1
+ python-multipart==0.0.9
+ pyyaml==6.0.1
+ rich==13.7.1
+ shellingham==1.5.4
+ sniffio==1.3.1
+ starlette==0.37.2
+ typer==0.12.3
+ typing-extensions==4.11.0
+ ujson==5.9.0
+ uvicorn==0.29.0
+ watchfiles==0.21.0
+ websockets==12.0
error: Failed to install entrypoints for `fastapi`
");
}
/// Test installing a tool at a version
#[test]
fn tool_install_version() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black`
uv_snapshot!(context.filters(), context.tool_install()
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.2.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
tool_dir.child("black").assert(predicate::path::is_dir());
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::exists());
let executable = bin_dir.child(format!("black{}", std::env::consts::EXE_SUFFIX));
assert!(executable.exists());
// On Windows, we can't snapshot an executable file.
#[cfg(not(windows))]
insta::with_settings!({
filters => context.filters(),
}, {
// Should run black in the virtual environment
assert_snapshot!(fs_err::read_to_string(executable).unwrap(), @r###"
#![TEMP_DIR]/tools/black/bin/python
# -*- coding: utf-8 -*-
import sys
from black import patched_main
if __name__ == "__main__":
if sys.argv[0].endswith("-script.pyw"):
sys.argv[0] = sys.argv[0][:-11]
elif sys.argv[0].endswith(".exe"):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(patched_main())
"###);
});
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black", specifier = "==24.2.0" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
uv_snapshot!(context.filters(), Command::new("black").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
black, 24.2.0 (compiled: yes)
Python (CPython) 3.12.[X]
----- stderr -----
"###);
}
/// Test an editable installation of a tool.
#[test]
fn tool_install_editable() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` as an editable package.
uv_snapshot!(context.filters(), context.tool_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/black_editable"))
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ black==0.1.0 (from file://[WORKSPACE]/test/packages/black_editable)
Installed 1 executable: black
"###);
tool_dir.child("black").assert(predicate::path::is_dir());
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::exists());
let executable = bin_dir.child(format!("black{}", std::env::consts::EXE_SUFFIX));
assert!(executable.exists());
// On Windows, we can't snapshot an executable file.
#[cfg(not(windows))]
insta::with_settings!({
filters => context.filters(),
}, {
// Should run black in the virtual environment
assert_snapshot!(fs_err::read_to_string(&executable).unwrap(), @r###"
#![TEMP_DIR]/tools/black/bin/python
# -*- coding: utf-8 -*-
import sys
from black import main
if __name__ == "__main__":
if sys.argv[0].endswith("-script.pyw"):
sys.argv[0] = sys.argv[0][:-11]
elif sys.argv[0].endswith(".exe"):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(main())
"###);
});
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black", editable = "[WORKSPACE]/test/packages/black_editable" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
uv_snapshot!(context.filters(), Command::new("black").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Hello world!
----- stderr -----
"###);
// Request `black`. It should reinstall from the registry.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME]
Installed 1 executable: black
"###);
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
// Request `black` at a different version. It should install a new version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("--from")
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Uninstalled 1 package in [TIME]
Installed 6 packages in [TIME]
- black==0.1.0 (from file://[WORKSPACE]/test/packages/black_editable)
+ black==24.2.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black", specifier = "==24.2.0" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
}
/// Ensure that we remove any existing entrypoints upon error.
#[test]
fn tool_install_remove_on_empty() -> Result<()> {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Request `black`. It should reinstall from the registry.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
// Install `black` as an editable package, but without any entrypoints.
let black = context.temp_dir.child("black");
fs_err::create_dir_all(black.path())?;
let pyproject_toml = black.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "black"
version = "0.1.0"
description = "Black without any entrypoints"
authors = []
dependencies = []
requires-python = ">=3.11,<3.13"
[build-system]
requires = ["hatchling"]
build-backend = "hatchling.build"
"#
})?;
let src = black.child("src").child("black");
fs_err::create_dir_all(src.path())?;
let init = src.child("__init__.py");
init.touch()?;
uv_snapshot!(context.filters(), context.tool_install()
.arg("-e")
.arg(black.path())
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: false
exit_code: 2
----- stdout -----
No executables are provided by package `black`; removing tool
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Uninstalled 6 packages in [TIME]
Installed 1 package in [TIME]
- black==24.3.0
+ black==0.1.0 (from file://[TEMP_DIR]/black)
- click==8.1.7
- mypy-extensions==1.0.0
- packaging==24.0
- pathspec==0.12.1
- platformdirs==4.2.0
error: Failed to install entrypoints for `black`
");
// Re-request `black`. It should reinstall, without requiring `--force`.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
{ name = "blackd", install-path = "[TEMP_DIR]/bin/blackd", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
Ok(())
}
/// Test an editable installation of a tool using `--from`.
#[test]
fn tool_install_editable_from() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` as an editable package.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("-e")
.arg("--from")
.arg(context.workspace_root.join("test/packages/black_editable"))
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ black==0.1.0 (from file://[WORKSPACE]/test/packages/black_editable)
Installed 1 executable: black
"###);
tool_dir.child("black").assert(predicate::path::is_dir());
tool_dir
.child("black")
.child("uv-receipt.toml")
.assert(predicate::path::exists());
let executable = bin_dir.child(format!("black{}", std::env::consts::EXE_SUFFIX));
assert!(executable.exists());
// On Windows, we can't snapshot an executable file.
#[cfg(not(windows))]
insta::with_settings!({
filters => context.filters(),
}, {
// Should run black in the virtual environment
assert_snapshot!(fs_err::read_to_string(&executable).unwrap(), @r###"
#![TEMP_DIR]/tools/black/bin/python
# -*- coding: utf-8 -*-
import sys
from black import main
if __name__ == "__main__":
if sys.argv[0].endswith("-script.pyw"):
sys.argv[0] = sys.argv[0][:-11]
elif sys.argv[0].endswith(".exe"):
sys.argv[0] = sys.argv[0][:-4]
sys.exit(main())
"###);
});
insta::with_settings!({
filters => context.filters(),
}, {
// We should have a tool receipt
assert_snapshot!(fs_err::read_to_string(tool_dir.join("black").join("uv-receipt.toml")).unwrap(), @r###"
[tool]
requirements = [{ name = "black", editable = "[WORKSPACE]/test/packages/black_editable" }]
entrypoints = [
{ name = "black", install-path = "[TEMP_DIR]/bin/black", from = "black" },
]
[tool.options]
exclude-newer = "2024-03-25T00:00:00Z"
"###);
});
uv_snapshot!(context.filters(), Command::new("black").arg("--version").env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
Hello world!
----- stderr -----
"###);
}
/// Test installing a tool with `uv tool install --from`
#[test]
fn tool_install_from() {
let context = TestContext::new("3.12").with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` using `--from` to specify the version
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("--from")
.arg("black==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ black==24.2.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
// Attempt to install `black` using `--from` with a different package name
uv_snapshot!(context.filters(), context.tool_install()
.arg("black")
.arg("--from")
.arg("flask==24.2.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Package name (`flask`) provided with `--from` does not match install request (`black`)
"###);
// Attempt to install `black` using `--from` with a different version
uv_snapshot!(context.filters(), context.tool_install()
.arg("black==24.2.0")
.arg("--from")
.arg("black==24.3.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Package requirement (`black==24.3.0`) provided with `--from` conflicts with install request (`black==24.2.0`)
"###);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/format.rs | crates/uv/tests/it/format.rs | use anyhow::Result;
use assert_fs::prelude::*;
use indoc::indoc;
use insta::assert_snapshot;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn format_project() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format(), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_missing_pyproject_toml() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format(), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_missing_project_in_pyproject_toml() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
// Create an empty pyproject.toml with no [project] section
context.temp_dir.child("pyproject.toml");
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format(), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_unmanaged_project() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
[tool.uv]
managed = false
"#})?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format(), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_from_project_root() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
let subdir = context.temp_dir.child("subdir");
fs_err::create_dir_all(&subdir)?;
// Using format from a subdirectory should still run in the project root
uv_snapshot!(context.filters(), context.format().current_dir(&subdir), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_no_project() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format().arg("--no-project"), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the file was formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_relative_project() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("project").child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create an unformatted Python file in the relative project
let relative_project_main_py = context.temp_dir.child("project").child("main.py");
relative_project_main_py.write_str(indoc! {r"
x = 1
"})?;
// Create another unformatted Python file in the root directory
let root_main_py = context.temp_dir.child("main.py");
root_main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format().arg("--project").arg("project"), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the relative project file was formatted
let relative_project_content = fs_err::read_to_string(&relative_project_main_py)?;
assert_snapshot!(relative_project_content, @r"
x = 1
");
// Check that the root file was not formatted
let root_content = fs_err::read_to_string(&root_main_py)?;
assert_snapshot!(root_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_fails_malformed_pyproject() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str("malformed pyproject.toml")?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: Failed to parse `pyproject.toml` during settings discovery:
TOML parse error at line 1, column 11
|
1 | malformed pyproject.toml
| ^
key with no value, expected `=`
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
error: Failed to parse: `pyproject.toml`
Caused by: TOML parse error at line 1, column 11
|
1 | malformed pyproject.toml
| ^
key with no value, expected `=`
");
// Check that the file is not formatted
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_check() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format().arg("--check"), @r"
success: false
exit_code: 1
----- stdout -----
Would reformat: main.py
1 file would be reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Verify the file wasn't modified
let content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_diff() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create an unformatted Python file
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format().arg("--diff"), @r#"
success: false
exit_code: 1
----- stdout -----
--- main.py
+++ main.py
@@ -1 +1 @@
-x = 1
+x = 1
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
1 file would be reformatted
"#);
// Verify the file wasn't modified
let content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_with_ruff_args() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create a Python file with a long line
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r#"
def hello():
print("This is a very long line that should normally be wrapped by the formatter but we will configure it to have a longer line length")
"#})?;
// Run format with custom line length
uv_snapshot!(context.filters(), context.format().arg("--").arg("main.py").arg("--line-length").arg("200"), @r"
success: true
exit_code: 0
----- stdout -----
1 file left unchanged
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
// Check that the line wasn't wrapped (since we set a long line length)
let formatted_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(formatted_content, @r#"
def hello():
print("This is a very long line that should normally be wrapped by the formatter but we will configure it to have a longer line length")
"#);
Ok(())
}
#[test]
fn format_specific_files() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
// Create multiple unformatted Python files
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
let utils_py = context.temp_dir.child("utils.py");
utils_py.write_str(indoc! {r"
x = 1
"})?;
uv_snapshot!(context.filters(), context.format().arg("--").arg("main.py"), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
let main_content = fs_err::read_to_string(&main_py)?;
assert_snapshot!(main_content, @r"
x = 1
");
// Unchanged
let utils_content = fs_err::read_to_string(&utils_py)?;
assert_snapshot!(utils_content, @r"
x = 1
");
Ok(())
}
#[test]
fn format_version_option() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = []
"#})?;
let main_py = context.temp_dir.child("main.py");
main_py.write_str(indoc! {r"
x = 1
"})?;
// Run format with specific Ruff version
// TODO(zanieb): It'd be nice to assert on the version used here somehow? Maybe we should emit
// the version we're using to stderr? Alas there's not a way to get the Ruff version from the
// format command :)
uv_snapshot!(context.filters(), context.format().arg("--version").arg("0.8.2"), @r"
success: true
exit_code: 0
----- stdout -----
1 file reformatted
----- stderr -----
warning: `uv format` is experimental and may change without warning. Pass `--preview-features format` to disable this warning.
");
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/run.rs | crates/uv/tests/it/run.rs | #![allow(clippy::disallowed_types)]
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::{fixture::ChildPath, prelude::*};
use indoc::indoc;
use insta::assert_snapshot;
use predicates::{prelude::predicate, str::contains};
use std::path::Path;
use uv_fs::copy_dir_all;
use uv_python::PYTHON_VERSION_FILENAME;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn run_with_python_version() -> Result<()> {
let context = TestContext::new_with_versions(&["3.12", "3.11", "3.9"]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.11, <4"
dependencies = [
"anyio==3.6.0 ; python_version == '3.11'",
"anyio==3.7.0 ; python_version == '3.12'",
]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#
})?;
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
import importlib.metadata
import platform
print(platform.python_version())
print(importlib.metadata.version("anyio"))
"#
})?;
// Our tests change files in <1s, so we must disable CPython bytecode caching with `-B` or we'll
// get stale files, see https://github.com/python/cpython/issues/75953.
let mut command = context.run();
let command_with_args = command.arg("python").arg("-B").arg("main.py");
uv_snapshot!(context.filters(), command_with_args, @r###"
success: true
exit_code: 0
----- stdout -----
3.12.[X]
3.7.0
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==3.7.0
+ foo==1.0.0 (from file://[TEMP_DIR]/)
+ idna==3.6
+ sniffio==1.3.1
"###);
// This is the same Python, no reinstallation.
let mut command = context.run();
let command_with_args = command
.arg("-p")
.arg("3.12")
.arg("python")
.arg("-B")
.arg("main.py");
uv_snapshot!(context.filters(), command_with_args, @r###"
success: true
exit_code: 0
----- stdout -----
3.12.[X]
3.7.0
----- stderr -----
Resolved 5 packages in [TIME]
Audited 4 packages in [TIME]
"###);
// This time, we target Python 3.11 instead.
let mut command = context.run();
let command_with_args = command
.arg("-p")
.arg("3.11")
.arg("python")
.arg("-B")
.arg("main.py")
.env_remove(EnvVars::VIRTUAL_ENV);
uv_snapshot!(context.filters(), command_with_args, @r###"
success: true
exit_code: 0
----- stdout -----
3.11.[X]
3.6.0
----- stderr -----
Using CPython 3.11.[X] interpreter at: [PYTHON-3.11]
Removed virtual environment at: .venv
Creating virtual environment at: .venv
Resolved 5 packages in [TIME]
Prepared 1 package in [TIME]
Installed 4 packages in [TIME]
+ anyio==3.6.0
+ foo==1.0.0 (from file://[TEMP_DIR]/)
+ idna==3.6
+ sniffio==1.3.1
"###);
// This time, we target Python 3.9 instead.
let mut command = context.run();
let command_with_args = command
.arg("-p")
.arg("3.9")
.arg("python")
.arg("-B")
.arg("main.py")
.env_remove(EnvVars::VIRTUAL_ENV);
uv_snapshot!(context.filters(), command_with_args, @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Using CPython 3.9.[X] interpreter at: [PYTHON-3.9]
error: The requested interpreter resolved to Python 3.9.[X], which is incompatible with the project's Python requirement: `>=3.11, <4` (from `project.requires-python`)
");
Ok(())
}
#[test]
fn run_args() -> Result<()> {
let context = TestContext::new("3.12");
let mut filters = context.filters();
filters.push((r"Usage: (uv|\.exe) run \[OPTIONS\] (?s).*", "[UV RUN HELP]"));
filters.push((r"usage: .*(\n|.*)*", "usage: [PYTHON HELP]"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = []
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#
})?;
// We treat arguments before the command as uv arguments
uv_snapshot!(filters, context.run().arg("--help").arg("python"), @r"
success: true
exit_code: 0
----- stdout -----
Run a command or script
[UV RUN HELP]
");
// We don't treat arguments after the command as uv arguments
uv_snapshot!(filters, context.run().arg("python").arg("--help"), @r"
success: true
exit_code: 0
----- stdout -----
usage: [PYTHON HELP]
");
// Can use `--` to separate uv arguments from the command arguments.
uv_snapshot!(filters, context.run().arg("--").arg("python").arg("--version"), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME]
"###);
Ok(())
}
/// Run without specifying any arguments.
///
/// This should list the available scripts.
#[test]
fn run_no_args() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = []
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#
})?;
// Run without specifying any arguments.
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.run(), @r###"
success: false
exit_code: 2
----- stdout -----
Provide a command or script to invoke with `uv run <command>` or `uv run <script>.py`.
The following commands are available in the environment:
- python
- python3
- python3.12
See `uv run --help` for more information.
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ foo==1.0.0 (from file://[TEMP_DIR]/)
"###);
#[cfg(windows)]
uv_snapshot!(context.filters(), context.run(), @r###"
success: false
exit_code: 2
----- stdout -----
Provide a command or script to invoke with `uv run <command>` or `uv run <script>.py`.
The following commands are available in the environment:
- pydoc.bat
- python
- pythonw
See `uv run --help` for more information.
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ foo==1.0.0 (from file://[TEMP_DIR]/)
"###);
Ok(())
}
/// Run a PEP 723-compatible script. The script should take precedence over the workspace
/// dependencies.
#[test]
fn run_pep723_script() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#
})?;
// If the script contains a PEP 723 tag, we should install its requirements.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "iniconfig",
# ]
# ///
import iniconfig
"#
})?;
// Running the script should install the requirements.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###);
// Running again should use the existing environment.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
// But neither invocation should create a lockfile.
assert!(!context.temp_dir.child("main.py.lock").exists());
// Otherwise, the script requirements should _not_ be available, but the project requirements
// should.
let test_non_script = context.temp_dir.child("main.py");
test_non_script.write_str(indoc! { r"
import iniconfig
"
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ foo==1.0.0 (from file://[TEMP_DIR]/)
+ idna==3.6
+ sniffio==1.3.1
Traceback (most recent call last):
File "[TEMP_DIR]/main.py", line 1, in <module>
import iniconfig
ModuleNotFoundError: No module named 'iniconfig'
"###);
// But the script should be runnable.
let test_non_script = context.temp_dir.child("main.py");
test_non_script.write_str(indoc! { r#"
import idna
print("Hello, world!")
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
Resolved 6 packages in [TIME]
Audited 4 packages in [TIME]
"###);
// If the script contains a PEP 723 tag, it can omit the dependencies field.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# ///
print("Hello, world!")
"#
})?;
// Running the script should install the requirements.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
"###);
// Running a script with `--locked` should warn.
uv_snapshot!(context.filters(), context.run().arg("--locked").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
warning: No lockfile found for Python script (ignoring `--locked`); run `uv lock --script` to generate a lockfile
"###);
// If the script can't be resolved, we should reference the script.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "add",
# ]
# ///
"#
})?;
// Running a script with `--group` should warn.
uv_snapshot!(context.filters(), context.run().arg("--group").arg("foo").arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving script dependencies:
╰─▶ Because there are no versions of add and you require add, we can conclude that your requirements are unsatisfiable.
"###);
// If the script can't be resolved, we should reference the script.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "add",
# ]
# ///
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving script dependencies:
╰─▶ Because there are no versions of add and you require add, we can conclude that your requirements are unsatisfiable.
"###);
// If the script contains an unclosed PEP 723 tag, we should error.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "iniconfig",
# ]
# ///
import iniconfig
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("--no-project").arg("main.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: An opening tag (`# /// script`) was found without a closing tag (`# ///`). Ensure that every line between the opening and closing tags (including empty lines) starts with a leading `#`.
"###);
Ok(())
}
#[test]
fn run_pep723_script_requires_python() -> Result<()> {
let context = TestContext::new_with_versions(&["3.9", "3.11"]);
// If we have a `.python-version` that's incompatible with the script, we should error.
let python_version = context.temp_dir.child(PYTHON_VERSION_FILENAME);
python_version.write_str("3.9")?;
// If the script contains a PEP 723 tag, we should install its requirements.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "iniconfig",
# ]
# ///
import iniconfig
x: str | int = "hello"
print(x)
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r#"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: The Python request from `.python-version` resolved to Python 3.9.[X], which is incompatible with the script's Python requirement: `>=3.11`
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
Traceback (most recent call last):
File "[TEMP_DIR]/main.py", line 10, in <module>
x: str | int = "hello"
TypeError: unsupported operand type(s) for |: 'type' and 'type'
"#);
// Delete the `.python-version` file to allow the script to run.
fs_err::remove_file(&python_version)?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
hello
----- stderr -----
Resolved 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###);
Ok(())
}
/// Run a `.pyw` script. The script should be executed with `pythonw.exe`.
#[test]
fn run_pythonw_script() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#
})?;
let test_script = context.temp_dir.child("main.pyw");
test_script.write_str(indoc! { r"
import anyio
"
})?;
uv_snapshot!(context.filters(), context.run().arg("main.pyw"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 4 packages in [TIME]
Installed 4 packages in [TIME]
+ anyio==4.3.0
+ foo==1.0.0 (from file://[TEMP_DIR]/)
+ idna==3.6
+ sniffio==1.3.1
"###);
Ok(())
}
/// Run a PEP 723-compatible script with `tool.uv` metadata.
#[test]
#[cfg(feature = "git")]
fn run_pep723_script_metadata() -> Result<()> {
let context = TestContext::new("3.12");
// If the script contains a PEP 723 tag, we should install its requirements.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "iniconfig>1",
# ]
#
# [tool.uv]
# resolution = "lowest-direct"
# ///
import iniconfig
"#
})?;
// Running the script should fail without network access.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==1.0.1
"###);
// Respect `tool.uv.sources`.
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "uv-public-pypackage",
# ]
#
# [tool.uv.sources]
# uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", rev = "0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
# ///
import uv_public_pypackage
"#
})?;
// The script should succeed with the specified source.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
"###);
Ok(())
}
/// Run a PEP 723-compatible script with a `[[tool.uv.index]]`.
#[test]
fn run_pep723_script_index() -> Result<()> {
let context = TestContext::new("3.12");
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "idna>=2",
# ]
#
# [[tool.uv.index]]
# name = "test"
# url = "https://test.pypi.org/simple"
# explicit = true
#
# [tool.uv.sources]
# idna = { index = "test" }
# ///
import idna
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ idna==2.7
"###);
Ok(())
}
/// Run a PEP 723-compatible script with `tool.uv` constraints.
#[test]
fn run_pep723_script_constraints() -> Result<()> {
let context = TestContext::new("3.12");
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "anyio>=3",
# ]
#
# [tool.uv]
# constraint-dependencies = ["idna<=3"]
# ///
import anyio
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.0
+ sniffio==1.3.1
"###);
Ok(())
}
/// Run a PEP 723-compatible script with `tool.uv` overrides.
#[test]
fn run_pep723_script_overrides() -> Result<()> {
let context = TestContext::new("3.12");
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "anyio>=3",
# ]
#
# [tool.uv]
# override-dependencies = ["idna<=2"]
# ///
import anyio
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==2.0
+ sniffio==1.3.1
"###);
Ok(())
}
/// Run a PEP 723-compatible script with `tool.uv` build constraints.
#[test]
fn run_pep723_script_build_constraints() -> Result<()> {
let context = TestContext::new("3.9");
let test_script = context.temp_dir.child("main.py");
// Incompatible build constraints.
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.9"
# dependencies = [
# "anyio>=3",
# "requests==1.2"
# ]
#
# [tool.uv]
# build-constraint-dependencies = ["setuptools==1"]
# ///
import anyio
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to download and build `requests==1.2.0`
├─▶ Failed to resolve requirements from `setup.py` build
├─▶ No solution found when resolving: `setuptools>=40.8.0`
╰─▶ Because you require setuptools>=40.8.0 and setuptools==1, we can conclude that your requirements are unsatisfiable.
"###);
// Compatible build constraints.
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.9"
# dependencies = [
# "anyio>=3",
# "requests==1.2"
# ]
#
# [tool.uv]
# build-constraint-dependencies = ["setuptools>=40"]
# ///
import anyio
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ anyio==4.3.0
+ exceptiongroup==1.2.0
+ idna==3.6
+ requests==1.2.0
+ sniffio==1.3.1
+ typing-extensions==4.10.0
"###);
Ok(())
}
/// Run a PEP 723-compatible script with a lockfile.
#[test]
fn run_pep723_script_lock() -> Result<()> {
let context = TestContext::new("3.12");
let test_script = context.temp_dir.child("main.py");
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "iniconfig",
# ]
# ///
import iniconfig
print("Hello, world!")
"#
})?;
// Without a lockfile, running with `--locked` should warn.
uv_snapshot!(context.filters(), context.run().arg("--locked").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
warning: No lockfile found for Python script (ignoring `--locked`); run `uv lock --script` to generate a lockfile
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###);
// Explicitly lock the script.
uv_snapshot!(context.filters(), context.lock().arg("--script").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
"###);
let lock = context.read("main.py.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.11"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[manifest]
requirements = [{ name = "iniconfig" }]
[[package]]
name = "iniconfig"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
]
"#
);
});
// Run the script.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME]
"###);
// With a lockfile, running with `--locked` should not warn.
uv_snapshot!(context.filters(), context.run().arg("--locked").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
Resolved 1 package in [TIME]
Audited 1 package in [TIME]
"###);
// Modify the metadata.
test_script.write_str(indoc! { r#"
# /// script
# requires-python = ">=3.11"
# dependencies = [
# "anyio",
# ]
# ///
import anyio
print("Hello, world!")
"#
})?;
// Re-running the script with `--locked` should error.
uv_snapshot!(context.filters(), context.run().arg("--locked").arg("main.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
error: The lockfile at `uv.lock` needs to be updated, but `--locked` was provided. To update the lockfile, run `uv lock`.
"###);
// Re-running the script with `--frozen` should also error, but at runtime.
uv_snapshot!(context.filters(), context.run().arg("--frozen").arg("main.py"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
Traceback (most recent call last):
File "[TEMP_DIR]/main.py", line 8, in <module>
import anyio
ModuleNotFoundError: No module named 'anyio'
"###);
// Re-running the script should update the lockfile.
uv_snapshot!(context.filters(), context.run().arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello, world!
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"###);
let lock = context.read("main.py.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.11"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[manifest]
requirements = [{ name = "anyio" }]
[[package]]
name = "anyio"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
"#
);
});
Ok(())
}
/// With `managed = false`, we should avoid installing the project itself.
#[test]
fn run_managed_false() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
[tool.uv]
managed = false
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("python").arg("--version"), @r###"
success: true
exit_code: 0
----- stdout -----
Python 3.12.[X]
----- stderr -----
"###);
Ok(())
}
#[test]
fn run_exact() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["iniconfig"]
"#
})?;
uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import iniconfig"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ iniconfig==2.0.0
"###);
// Remove `iniconfig`.
pyproject_toml.write_str(indoc! { r#"
[project]
name = "foo"
version = "1.0.0"
requires-python = ">=3.8"
dependencies = ["anyio"]
"#
})?;
// By default, `uv run` uses inexact semantics, so both `iniconfig` and `anyio` should still be available.
uv_snapshot!(context.filters(), context.run().arg("python").arg("-c").arg("import iniconfig; import anyio"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==4.3.0
+ idna==3.6
+ sniffio==1.3.1
"###);
// But under `--exact`, `iniconfig` should not be available.
uv_snapshot!(context.filters(), context.run().arg("--exact").arg("python").arg("-c").arg("import iniconfig"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Uninstalled 1 package in [TIME]
- iniconfig==2.0.0
Traceback (most recent call last):
File "<string>", line 1, in <module>
ModuleNotFoundError: No module named 'iniconfig'
"###);
Ok(())
}
#[test]
fn run_with() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_pin.rs | crates/uv/tests/it/python_pin.rs | use std::path::PathBuf;
use crate::common::{TestContext, uv_snapshot};
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileWriteStr, PathChild, PathCreateDir};
use insta::assert_snapshot;
use uv_platform::{Arch, Os};
use uv_python::{PYTHON_VERSION_FILENAME, PYTHON_VERSIONS_FILENAME};
use uv_static::EnvVars;
#[test]
fn python_pin() {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
// Without arguments, we attempt to read the current pin (which does not exist yet)
uv_snapshot!(context.filters(), context.python_pin(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No Python version file found; specify a version to create one
");
// Given an argument, we pin to that version
uv_snapshot!(context.filters(), context.python_pin().arg("any"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `any`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r#"any"#);
// Without arguments, we read the current pin
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
any
----- stderr -----
"###);
// We should not mutate the file
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r#"any"#);
// Request Python 3.12
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `any` -> `3.12`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
3.12
"###);
// Request Python 3.11
uv_snapshot!(context.filters(), context.python_pin().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.12` -> `3.11`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
3.11
"###);
// Request CPython
uv_snapshot!(context.filters(), context.python_pin().arg("cpython"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.11` -> `cpython`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
cpython
"###);
// Request CPython 3.12
uv_snapshot!(context.filters(), context.python_pin().arg("cpython@3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython` -> `cpython@3.12`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
cpython@3.12
"###);
// Request CPython 3.12 via non-canonical syntax
uv_snapshot!(context.filters(), context.python_pin().arg("cp3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `cpython@3.12`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
cpython@3.12
"###);
// Request CPython 3.12 via partial key syntax
uv_snapshot!(context.filters(), context.python_pin().arg("cpython-3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython@3.12` -> `cpython-3.12-any-any-any`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
cpython-3.12-any-any-any
"###);
// Request a specific path
uv_snapshot!(context.filters(), context.python_pin().arg(&context.python_versions.first().unwrap().1), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython-3.12-any-any-any` -> `[PYTHON-3.11]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @r###"
[PYTHON-3.11]
"###);
});
// Request an implementation that is not installed
// (skip on Windows because the snapshot is different and the behavior is not platform dependent)
#[cfg(unix)]
{
uv_snapshot!(context.filters(), context.python_pin().arg("pypy"), @r"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `[PYTHON-3.11]` -> `pypy`
----- stderr -----
warning: No interpreter found for PyPy in managed installations or search path
");
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
pypy
"###);
}
// Request a version that is not installed
// (skip on Windows because the snapshot is different and the behavior is not platform dependent)
#[cfg(unix)]
{
uv_snapshot!(context.filters(), context.python_pin().arg("3.7"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `pypy` -> `3.7`
----- stderr -----
warning: No interpreter found for Python 3.7 in managed installations or search path
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(python_version, @r###"
3.7
"###);
}
}
// If there is no project-level `.python-version` file, respect the global pin.
#[test]
fn python_pin_global_if_no_local() -> Result<()> {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
let uv = context.user_config_dir.child("uv");
uv.create_dir_all()?;
// Without arguments, we attempt to read the current pin (which does not exist yet)
uv_snapshot!(context.filters(), context.python_pin(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No Python version file found; specify a version to create one
");
// Given an argument, we globally pin to that version
uv_snapshot!(context.filters(), context.python_pin().arg("3.11").arg("--global"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `[UV_USER_CONFIG_DIR]/.python-version` to `3.11`
----- stderr -----
");
// If no local pin, use global.
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.11
----- stderr -----
"###);
Ok(())
}
// If there is a project-level `.python-version` file, it takes precedence over
// the global pin.
#[test]
fn python_pin_global_use_local_if_available() -> Result<()> {
let context: TestContext = TestContext::new_with_versions(&["3.11", "3.12"]);
let uv = context.user_config_dir.child("uv");
uv.create_dir_all()?;
// Given an argument, we globally pin to that version
uv_snapshot!(context.filters(), context.python_pin().arg("3.12").arg("--global"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `[UV_USER_CONFIG_DIR]/.python-version` to `3.12`
----- stderr -----
");
// With no local, we get the global pin
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
----- stderr -----
"###);
let mut global_version_path = PathBuf::from(uv.path());
global_version_path.push(PYTHON_VERSION_FILENAME);
let global_python_version = context.read(&global_version_path);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(global_python_version, @r###"
3.12
"###);
});
// Request Python 3.11 for local .python-version
uv_snapshot!(context.filters(), context.python_pin().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.11`
----- stderr -----
"###);
// Local should override global
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.11
----- stderr -----
"###);
// We should still be able to check global pin
uv_snapshot!(context.filters(), context.python_pin().arg("--global"), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
----- stderr -----
"###);
// Local .python-version exists and has the right version.
let local_python_version = context.read(PYTHON_VERSION_FILENAME);
assert_snapshot!(local_python_version, @r###"
3.11
"###);
// Global .python-version still exists and has the right version.
let global_python_version = context.read(&global_version_path);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(global_python_version, @r###"
3.12
"###);
});
Ok(())
}
#[test]
fn python_pin_global_creates_parent_dirs() {
let context: TestContext = TestContext::new_with_versions(&["3.12"]);
let uv_global_config_dir = context.user_config_dir.child("uv");
assert!(
!uv_global_config_dir.exists(),
"Global config directory should not exist yet."
);
uv_snapshot!(context.filters(), context.python_pin().arg("3.12").arg("--global"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `[UV_USER_CONFIG_DIR]/.python-version` to `3.12`
----- stderr -----
");
assert!(
uv_global_config_dir.exists(),
"Global config directory should be automatically created (if missing) after global pin."
);
}
/// We do not need a Python interpreter to pin without `--resolved`
/// (skip on Windows because the snapshot is different and the behavior is not platform dependent)
#[cfg(unix)]
#[test]
fn python_pin_no_python() {
let context: TestContext = TestContext::new_with_versions(&[]);
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
warning: No interpreter found for Python 3.12 in managed installations or search path
");
}
#[test]
fn python_pin_compatible_with_requires_python() -> Result<()> {
let context: TestContext =
TestContext::new_with_versions(&["3.10", "3.11"]).with_filtered_python_sources();
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = ["iniconfig"]
"#,
)?;
uv_snapshot!(context.filters(), context.python_pin().arg("3.10"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: The requested Python version `3.10` is incompatible with the project `requires-python` value of `>=3.11`.
"###);
// Request a implementation version that is incompatible
uv_snapshot!(context.filters(), context.python_pin().arg("cpython@3.10"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: The requested Python version `cpython@3.10` is incompatible with the project `requires-python` value of `>=3.11`.
"###);
// Request an incompatible version with project discovery turned off
uv_snapshot!(context.filters(), context.python_pin().arg("cpython@3.10").arg("--no-project"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `cpython@3.10`
----- stderr -----
"###);
// And, as an alias, workspace discovery
uv_snapshot!(context.filters(), context.python_pin().arg("cpython@3.10").arg("--no-workspace"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `cpython@3.10`
----- stderr -----
"###);
// Request a complex version range that resolves to an incompatible version
uv_snapshot!(context.filters(), context.python_pin().arg(">3.8,<3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython@3.10` -> `>3.8, <3.11`
----- stderr -----
warning: The requested Python version `>3.8, <3.11` resolves to `3.10.[X]` which is incompatible with the project `requires-python` value of `>=3.11`.
"###);
// Request a version that is compatible
uv_snapshot!(context.filters(), context.python_pin().arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `>3.8, <3.11` -> `3.11`
----- stderr -----
"###);
// Request a version that is compatible and uses a Python variant
uv_snapshot!(context.filters(), context.python_pin().arg("3.13t"), @r"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.11` -> `3.13+freethreaded`
----- stderr -----
warning: No interpreter found for Python 3.13+freethreaded in [PYTHON SOURCES]
");
// Request a implementation version that is compatible
uv_snapshot!(context.filters(), context.python_pin().arg("cpython@3.11"), @r"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `3.13+freethreaded` -> `cpython@3.11`
----- stderr -----
");
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @r###"
cpython@3.11
"###);
});
// Updating `requires-python` should affect `uv python pin` compatibilities.
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["iniconfig"]
"#,
)?;
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
cpython@3.11
----- stderr -----
warning: The pinned Python version `cpython@3.11` is incompatible with the project `requires-python` value of `>=3.12`.
"###);
// Request a implementation that resolves to a compatible version
uv_snapshot!(context.filters(), context.python_pin().arg("cpython"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython@3.11` -> `cpython`
----- stderr -----
warning: The requested Python version `cpython` resolves to `3.10.[X]` which is incompatible with the project `requires-python` value of `>=3.12`.
"###);
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
cpython
----- stderr -----
warning: The pinned Python version `cpython` resolves to `3.10.[X]` which is incompatible with the project `requires-python` value of `>=3.12`.
"###);
// Request a complex version range that resolves to a compatible version
uv_snapshot!(context.filters(), context.python_pin().arg(">3.8,<3.12"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `cpython` -> `>3.8, <3.12`
----- stderr -----
warning: The requested Python version `>3.8, <3.12` resolves to `3.10.[X]` which is incompatible with the project `requires-python` value of `>=3.12`.
"###);
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
>3.8, <3.12
----- stderr -----
warning: The pinned Python version `>3.8, <3.12` resolves to `3.10.[X]` which is incompatible with the project `requires-python` value of `>=3.12`.
"###);
Ok(())
}
#[test]
fn warning_pinned_python_version_not_installed() -> Result<()> {
let context: TestContext = TestContext::new_with_versions(&["3.10", "3.11"]);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.11"
dependencies = ["iniconfig"]
"#,
)?;
let python_version_file = context.temp_dir.child(PYTHON_VERSION_FILENAME);
python_version_file.write_str(r"3.12")?;
if cfg!(windows) {
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
----- stderr -----
warning: Failed to resolve pinned Python version `3.12`: No interpreter found for Python 3.12 in managed installations, search path, or registry
"###);
} else {
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
----- stderr -----
warning: Failed to resolve pinned Python version `3.12`: No interpreter found for Python 3.12 in managed installations or search path
"###);
}
Ok(())
}
/// We do need a Python interpreter for `--resolved` pins
#[test]
fn python_pin_resolve_no_python() {
let context: TestContext = TestContext::new_with_versions(&[]).with_filtered_python_sources();
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("3.12"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python 3.12 in [PYTHON SOURCES]
hint: A managed Python download is available for Python 3.12, but Python downloads are set to 'never'
");
}
#[test]
fn python_pin_resolve() {
let context: TestContext = TestContext::new_with_versions(&["3.12", "3.13"]);
// We pin the first interpreter on the path
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("any"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `[PYTHON-3.12]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.12]");
});
// Request Python 3.13
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("3.13"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `[PYTHON-3.12]` -> `[PYTHON-3.13]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request Python 3.13
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("3.13"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `[PYTHON-3.13]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request CPython
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("cpython"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `[PYTHON-3.13]` -> `[PYTHON-3.12]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.12]");
});
// Request CPython 3.13
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("cpython@3.13"), @r###"
success: true
exit_code: 0
----- stdout -----
Updated `.python-version` from `[PYTHON-3.12]` -> `[PYTHON-3.13]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request CPython 3.13 via partial key syntax
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("cpython-3.13"), @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `[PYTHON-3.13]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request CPython 3.13 for the current platform
let os = Os::from_env();
let arch = Arch::from_env();
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved")
.arg(format!("cpython-3.13-{os}-{arch}"))
, @r###"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `[PYTHON-3.13]`
----- stderr -----
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request an implementation that is not installed
// (skip on Windows because the snapshot is different and the behavior is not platform dependent)
#[cfg(unix)]
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("pypy"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for PyPy in managed installations or search path
hint: A managed Python download is available for PyPy, but Python downloads are set to 'never'
");
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
// Request a version that is not installed
// (skip on Windows because the snapshot is different and the behavior is not platform dependent)
#[cfg(unix)]
uv_snapshot!(context.filters(), context.python_pin().arg("--resolved").arg("3.7"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for Python 3.7 in managed installations or search path
"###);
let python_version = context.read(PYTHON_VERSION_FILENAME);
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(python_version, @"[PYTHON-3.13]");
});
}
#[test]
fn python_pin_with_comments() -> Result<()> {
let context = TestContext::new_with_versions(&[]);
let content = indoc::indoc! {r"
3.12
# 3.11
3.10
"};
let version_file = context.temp_dir.child(PYTHON_VERSION_FILENAME);
version_file.write_str(content)?;
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
3.10
----- stderr -----
"###);
fs_err::remove_file(version_file)?;
let versions_file = context.temp_dir.child(PYTHON_VERSIONS_FILENAME);
versions_file.write_str(content)?;
uv_snapshot!(context.filters(), context.python_pin(), @r###"
success: true
exit_code: 0
----- stdout -----
3.12
3.10
----- stderr -----
"###);
Ok(())
}
#[test]
#[cfg(feature = "python-managed")]
fn python_pin_install() {
let context: TestContext = TestContext::new_with_versions(&[]).with_filtered_python_sources();
// Should not install 3.12 when downloads are not automatic
uv_snapshot!(context.filters(), context.python_pin().arg("3.12"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
warning: No interpreter found for Python 3.12 in [PYTHON SOURCES]
");
uv_snapshot!(context.filters(), context.python_pin().arg("3.12").env(EnvVars::UV_PYTHON_DOWNLOADS, "auto"), @r"
success: true
exit_code: 0
----- stdout -----
Pinned `.python-version` to `3.12`
----- stderr -----
");
}
#[test]
fn python_pin_rm() {
let context: TestContext = TestContext::new_with_versions(&["3.12"]);
uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No Python version file found
");
// Create and remove a local pin
context.python_pin().arg("3.12").assert().success();
uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
success: true
exit_code: 0
----- stdout -----
Removed Python version file at `.python-version`
----- stderr -----
");
uv_snapshot!(context.filters(), context.python_pin().arg("--rm").arg("--global"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No global Python pin found
");
// Global does not detect the local pin
context.python_pin().arg("3.12").assert().success();
uv_snapshot!(context.filters(), context.python_pin().arg("--rm").arg("--global"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No global Python pin found
");
context
.python_pin()
.arg("3.12")
.arg("--global")
.assert()
.success();
uv_snapshot!(context.filters(), context.python_pin().arg("--rm").arg("--global"), @r"
success: true
exit_code: 0
----- stdout -----
Removed global Python pin at `[UV_USER_CONFIG_DIR]/.python-version`
----- stderr -----
");
// Add the global pin again
context
.python_pin()
.arg("3.12")
.arg("--global")
.assert()
.success();
// Remove the local pin
uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
success: true
exit_code: 0
----- stdout -----
Removed Python version file at `.python-version`
----- stderr -----
");
// The global pin should not be removed without `--global`
uv_snapshot!(context.filters(), context.python_pin().arg("--rm"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No Python version file found; use `--rm --global` to remove the global pin
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/init.rs | crates/uv/tests/it/init.rs | use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::indoc;
use insta::assert_snapshot;
use predicates::prelude::predicate;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn init() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
let pyproject = context.read("foo/pyproject.toml");
let _ = context.read("foo/README.md");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
// Run `uv lock` in the new project.
uv_snapshot!(context.filters(), context.lock().current_dir(context.temp_dir.join("foo")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 1 package in [TIME]
"###);
let python_version = context.read("foo/.python-version");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
python_version, @"3.12"
);
});
}
#[test]
fn init_bare() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--bare"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
// No extra files should be created
context
.temp_dir
.child("foo/README.md")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/hello.py")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.python-version")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.git")
.assert(predicate::path::missing());
let pyproject = context.read("foo/pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"###
);
});
}
/// Run `uv init --app` to create an application project
#[test]
fn init_application() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let pyproject_toml = child.join("pyproject.toml");
let main_py = child.join("main.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
let hello = fs_err::read_to_string(main_py)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
hello, @r###"
def main():
print("Hello from foo!")
if __name__ == "__main__":
main()
"###
);
});
uv_snapshot!(context.filters(), context.run().current_dir(&child).arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from foo!
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 1 package in [TIME]
Audited in [TIME]
"###);
Ok(())
}
/// When `main.py` already exists, we don't create it again
#[test]
fn init_application_hello_exists() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let pyproject_toml = child.join("pyproject.toml");
let main_py = child.child("main.py");
main_py.touch()?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
let hello = fs_err::read_to_string(main_py)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
hello, @""
);
});
Ok(())
}
/// When other Python files already exists, we still create `main.py`
#[test]
fn init_application_other_python_exists() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let pyproject_toml = child.join("pyproject.toml");
let main_py = child.join("main.py");
let other_py = child.child("foo.py");
other_py.touch()?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
let hello = fs_err::read_to_string(main_py)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
hello, @r###"
def main():
print("Hello from foo!")
if __name__ == "__main__":
main()
"###
);
});
Ok(())
}
/// Run `uv init --app --package` to create a packaged application project
#[test]
fn init_application_package() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let pyproject_toml = child.join("pyproject.toml");
let init_py = child.join("src").join("foo").join("__init__.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--app").arg("--package"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
[project.scripts]
foo = "foo:main"
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
let init = fs_err::read_to_string(init_py)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
init, @r###"
def main() -> None:
print("Hello from foo!")
"###
);
});
uv_snapshot!(context.filters(), context.run().current_dir(&child).arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from foo!
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ foo==0.1.0 (from file://[TEMP_DIR]/foo)
"###);
Ok(())
}
/// Run `uv init --lib` to create an library project
#[test]
fn init_library() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let pyproject_toml = child.join("pyproject.toml");
let init_py = child.join("src").join("foo").join("__init__.py");
let py_typed = child.join("src").join("foo").join("py.typed");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--lib"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(&pyproject_toml)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
let init = fs_err::read_to_string(init_py)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
init, @r###"
def hello() -> str:
return "Hello from foo!"
"###
);
});
let py_typed = fs_err::read_to_string(py_typed)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
py_typed, @""
);
});
uv_snapshot!(context.filters(), context.run().current_dir(&child).arg("python").arg("-c").arg("import foo; print(foo.hello())"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from foo!
----- stderr -----
warning: `VIRTUAL_ENV=[VENV]/` does not match the project environment path `.venv` and will be ignored; use `--active` to target the active environment instead
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Creating virtual environment at: .venv
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ foo==0.1.0 (from file://[TEMP_DIR]/foo)
"###);
Ok(())
}
/// Test the uv build backend with using `uv init --package --preview`. To be merged with the regular
/// init lib test once the uv build backend becomes the stable default.
#[test]
fn init_package_preview() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--package").arg("--preview"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(child.join("pyproject.toml"))?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
[project.scripts]
foo = "foo:main"
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
Ok(())
}
#[test]
fn init_bare_lib() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--bare").arg("--lib"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
// No extra files should be created
context
.temp_dir
.child("foo/README.md")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/src")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.git")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.python-version")
.assert(predicate::path::missing());
let pyproject = context.read("foo/pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
}
#[test]
fn init_bare_package() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--bare").arg("--package"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
// No extra files should be created
context
.temp_dir
.child("foo/README.md")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/src")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.git")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.python-version")
.assert(predicate::path::missing());
let pyproject = context.read("foo/pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
}
#[test]
fn init_bare_opt_in() {
let context = TestContext::new("3.12");
// With `--bare`, you can still opt-in to extras
// TODO(zanieb): Add option for `--readme`
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--bare")
.arg("--description").arg("foo")
.arg("--pin-python")
.arg("--vcs").arg("git"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
context
.temp_dir
.child("foo/README.md")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/src")
.assert(predicate::path::missing());
context
.temp_dir
.child("foo/.git")
.assert(predicate::path::is_dir());
context
.temp_dir
.child("foo/.python-version")
.assert(predicate::path::is_file());
let pyproject = context.read("foo/pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "foo"
requires-python = ">=3.12"
dependencies = []
"###
);
});
}
// General init --script correctness test
#[test]
fn init_script() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let script = child.join("main.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `main.py`
"###);
let script = fs_err::read_to_string(&script)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
script, @r###"
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
def main() -> None:
print("Hello from main.py!")
if __name__ == "__main__":
main()
"###
);
});
uv_snapshot!(context.filters(), context.run().current_dir(&child).arg("python").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
Hello from main.py!
----- stderr -----
"###);
Ok(())
}
/// Using `--bare` with `--script` omits the default script content.
#[test]
fn init_script_bare() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let script = child.join("main.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("--bare").arg("main.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `main.py`
"###);
let script = fs_err::read_to_string(&script)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
script, @r###"
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
"###
);
});
Ok(())
}
// Ensure python versions passed as arguments are present in file metadata
#[test]
fn init_script_python_version() -> Result<()> {
let context = TestContext::new("3.11");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let script = child.join("version.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("version.py").arg("--python").arg("3.11"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `version.py`
"###);
let script = fs_err::read_to_string(&script)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
script, @r###"
# /// script
# requires-python = ">=3.11"
# dependencies = []
# ///
def main() -> None:
print("Hello from version.py!")
if __name__ == "__main__":
main()
"###
);
});
Ok(())
}
// Init script should create parent directories if they don't exist
#[test]
fn init_script_create_directory() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let script = child.join("test").join("dir.py");
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("test/dir.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `test/dir.py`
"###);
let script = fs_err::read_to_string(&script)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
script, @r###"
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
def main() -> None:
print("Hello from dir.py!")
if __name__ == "__main__":
main()
"###
);
});
Ok(())
}
// Init script should fail if file is already a PEP 723 script
#[test]
fn init_script_file_conflicts() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("name_conflict.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `name_conflict.py`
"###);
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("name_conflict.py"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `name_conflict.py` is already a PEP 723 script; use `uv run` to execute it
"###);
let contents = "print(\"Hello, world!\")";
fs_err::write(child.join("existing_script.py"), contents)?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--script").arg("existing_script.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `existing_script.py`
"###);
let existing_script = fs_err::read_to_string(child.join("existing_script.py"))?;
assert_snapshot!(
existing_script, @r###"
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
print("Hello, world!")
"###
);
Ok(())
}
// Init script should not trash an existing shebang.
#[test]
fn init_script_shebang() -> Result<()> {
let context = TestContext::new("3.12");
let script_path = context.temp_dir.child("script.py");
let contents = "#! /usr/bin/env python3\nprint(\"Hello, world!\")";
fs_err::write(&script_path, contents)?;
uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: If you execute script.py directly, it might ignore its inline metadata.
Consider replacing its shebang with: #!/usr/bin/env -S uv run --script
Initialized script at `script.py`
");
let resulting_script = fs_err::read_to_string(&script_path)?;
assert_snapshot!(resulting_script, @r#"
#! /usr/bin/env python3
#
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
print("Hello, world!")
"#
);
// If the shebang already contains `uv`, the result is the same, but we suppress the warning.
let contents = "#!/usr/bin/env -S uv run --script\nprint(\"Hello, world!\")";
fs_err::write(&script_path, contents)?;
uv_snapshot!(context.filters(), context.init().arg("--script").arg("script.py"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `script.py`
");
let resulting_script = fs_err::read_to_string(&script_path)?;
assert_snapshot!(resulting_script, @r#"
#!/usr/bin/env -S uv run --script
#
# /// script
# requires-python = ">=3.12"
# dependencies = []
# ///
print("Hello, world!")
"#
);
Ok(())
}
// Make sure that `uv init --script` picks the latest non-pre-release version of Python
// for the `requires-python` constraint.
#[cfg(feature = "python-patch")]
#[test]
fn init_script_picks_latest_stable_version() -> Result<()> {
let managed_versions = &["3.14.0rc2", "3.13", "3.12"];
// If we do not mark these versions as managed, they would have `PythonSource::SearchPath(First)`, which
// would mean that pre-releases would be preferred without opt-in (see `PythonSource::allows_prereleases`).
let context =
TestContext::new_with_versions(managed_versions).with_versions_as_managed(managed_versions);
let script_path = context.temp_dir.join("main.py");
uv_snapshot!(context.filters(), context.init().arg("--script").arg("main.py"), @r#"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized script at `main.py`
"#);
let resulting_script = fs_err::read_to_string(&script_path)?;
assert_snapshot!(
resulting_script, @r#"
# /// script
# requires-python = ">=3.13"
# dependencies = []
# ///
def main() -> None:
print("Hello from main.py!")
if __name__ == "__main__":
main()
"#
);
Ok(())
}
/// Run `uv init --lib` with an existing py.typed file
#[test]
fn init_py_typed_exists() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
let foo = child.child("src").child("foo");
foo.create_dir_all()?;
let py_typed = foo.join("py.typed");
fs_err::write(&py_typed, "partial")?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--lib"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let py_typed = fs_err::read_to_string(py_typed)?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
py_typed, @"partial"
);
});
Ok(())
}
/// Using `uv init --lib --no-package` isn't allowed
#[test]
fn init_library_no_package() -> Result<()> {
let context = TestContext::new("3.12");
let child = context.temp_dir.child("foo");
child.create_dir_all()?;
uv_snapshot!(context.filters(), context.init().current_dir(&child).arg("--lib").arg("--no-package"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--lib' cannot be used with '--no-package'
Usage: uv init --cache-dir [CACHE_DIR] --lib [PATH]
For more information, try '--help'.
"###);
Ok(())
}
/// Ensure that `uv init` initializes the cache.
#[test]
fn init_cache() -> Result<()> {
let context = TestContext::new("3.12");
fs_err::remove_dir_all(&context.cache_dir)?;
uv_snapshot!(context.filters(), context.init().arg("foo"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
Ok(())
}
#[test]
fn init_no_readme() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--no-readme"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
let pyproject = context.read("foo/pyproject.toml");
let _ = fs_err::read_to_string(context.temp_dir.join("foo/README.md")).unwrap_err();
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
requires-python = ">=3.12"
dependencies = []
"###
);
});
}
#[test]
fn init_no_pin_python() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.init().arg("foo").arg("--no-pin-python"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
let pyproject = context.read("foo/pyproject.toml");
let _ = fs_err::read_to_string(context.temp_dir.join("foo/.python-version")).unwrap_err();
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
}
#[test]
fn init_library_current_dir() -> Result<()> {
let context = TestContext::new("3.12");
let dir = context.temp_dir.join("foo");
fs_err::create_dir(&dir)?;
uv_snapshot!(context.filters(), context.init().arg("--lib").current_dir(&dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(dir.join("pyproject.toml"))?;
let init_py = fs_err::read_to_string(dir.join("src/foo/__init__.py"))?;
let _ = fs_err::read_to_string(dir.join("README.md")).unwrap();
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
[build-system]
requires = ["uv_build>=[CURRENT_VERSION],<[NEXT_BREAKING]"]
build-backend = "uv_build"
"#
);
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
init_py, @r###"
def hello() -> str:
return "Hello from foo!"
"###
);
});
// Run `uv lock` in the new project.
uv_snapshot!(context.filters(), context.lock().current_dir(&dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 1 package in [TIME]
"###);
Ok(())
}
#[test]
fn init_application_current_dir() -> Result<()> {
let context = TestContext::new("3.12");
let dir = context.temp_dir.join("foo");
fs_err::create_dir(&dir)?;
uv_snapshot!(context.filters(), context.init().arg("--app").current_dir(&dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo`
"###);
let pyproject = fs_err::read_to_string(dir.join("pyproject.toml"))?;
let main_py = fs_err::read_to_string(dir.join("main.py"))?;
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r###"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
"###
);
});
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
main_py, @r###"
def main():
print("Hello from foo!")
if __name__ == "__main__":
main()
"###
);
});
// Run `uv lock` in the new project.
uv_snapshot!(context.filters(), context.lock().current_dir(&dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Using CPython 3.12.[X] interpreter at: [PYTHON-3.12]
Resolved 1 package in [TIME]
"###);
Ok(())
}
#[test]
fn init_dot_args() -> Result<()> {
let context = TestContext::new("3.12");
let dir = context.temp_dir.join("foo");
fs_err::create_dir(&dir)?;
uv_snapshot!(context.filters(), context.init().current_dir(&dir).arg(".").arg("--lib"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Initialized project `foo` at `[TEMP_DIR]/foo`
"###);
let pyproject = fs_err::read_to_string(dir.join("pyproject.toml"))?;
let init_py = fs_err::read_to_string(dir.join("src/foo/__init__.py"))?;
let _ = fs_err::read_to_string(dir.join("README.md")).unwrap();
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject, @r#"
[project]
name = "foo"
version = "0.1.0"
description = "Add your description here"
readme = "README.md"
requires-python = ">=3.12"
dependencies = []
[build-system]
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_install_scenarios.rs | crates/uv/tests/it/pip_install_scenarios.rs | //! DO NOT EDIT
//!
//! Generated with `./scripts/sync_scenarios.sh`
//! Scenarios from <https://github.com/astral-sh/packse/tree/0.3.53/scenarios>
//!
#![cfg(all(feature = "python", feature = "pypi", unix))]
use std::process::Command;
use uv_static::EnvVars;
use crate::common::{TestContext, build_vendor_links_url, packse_index_url, uv_snapshot};
/// Create a `pip install` command with options shared across all scenarios.
fn command(context: &TestContext) -> Command {
let mut command = context.pip_install();
command
.arg("--index-url")
.arg(packse_index_url())
.arg("--find-links")
.arg(build_vendor_links_url());
command.env_remove(EnvVars::UV_EXCLUDE_NEWER);
command
}
/// The user requires an exact version of package `a` but only other versions exist
///
/// ```text
/// requires-exact-version-does-not-exist
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a==2.0.0
/// │ └── unsatisfied: no matching version
/// └── a
/// └── a-1.0.0
/// ```
#[test]
fn requires_exact_version_does_not_exist() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"requires-exact-version-does-not-exist-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("requires-exact-version-does-not-exist-a==2.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because there is no version of package-a==2.0.0 and you require package-a==2.0.0, we can conclude that your requirements are unsatisfiable.
");
context.assert_not_installed("requires_exact_version_does_not_exist_a");
}
/// The user requires a version of `a` greater than `1.0.0` but only smaller or equal versions exist
///
/// ```text
/// requires-greater-version-does-not-exist
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a>1.0.0
/// │ └── unsatisfied: no matching version
/// └── a
/// ├── a-0.1.0
/// └── a-1.0.0
/// ```
#[test]
fn requires_greater_version_does_not_exist() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"requires-greater-version-does-not-exist-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("requires-greater-version-does-not-exist-a>1.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only package-a<=1.0.0 is available and you require package-a>1.0.0, we can conclude that your requirements are unsatisfiable.
");
context.assert_not_installed("requires_greater_version_does_not_exist_a");
}
/// The user requires a version of `a` less than `1.0.0` but only larger versions exist
///
/// ```text
/// requires-less-version-does-not-exist
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a<2.0.0
/// │ └── unsatisfied: no matching version
/// └── a
/// ├── a-2.0.0
/// ├── a-3.0.0
/// └── a-4.0.0
/// ```
#[test]
fn requires_less_version_does_not_exist() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"requires-less-version-does-not-exist-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("requires-less-version-does-not-exist-a<2.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only package-a>=2.0.0 is available and you require package-a<2.0.0, we can conclude that your requirements are unsatisfiable.
");
context.assert_not_installed("requires_less_version_does_not_exist_a");
}
/// The user requires any version of package `a` which does not exist.
///
/// ```text
/// requires-package-does-not-exist
/// ├── environment
/// │ └── python3.12
/// └── root
/// └── requires a
/// └── unsatisfied: no versions for package
/// ```
#[test]
fn requires_package_does_not_exist() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"requires-package-does-not-exist-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("requires-package-does-not-exist-a")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because package-a was not found in the package registry and you require package-a, we can conclude that your requirements are unsatisfiable.
");
context.assert_not_installed("requires_package_does_not_exist_a");
}
/// The user requires package `a` but `a` requires package `b` which does not exist
///
/// ```text
/// transitive-requires-package-does-not-exist
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires b
/// └── unsatisfied: no versions for package
/// ```
#[test]
fn transitive_requires_package_does_not_exist() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"transitive-requires-package-does-not-exist-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("transitive-requires-package-does-not-exist-a")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because package-b was not found in the package registry and package-a==1.0.0 depends on package-b, we can conclude that package-a==1.0.0 cannot be used.
And because only package-a==1.0.0 is available and you require package-a, we can conclude that your requirements are unsatisfiable.
");
context.assert_not_installed("transitive_requires_package_does_not_exist_a");
}
/// There is a non-contiguous range of compatible versions for the requested package `a`, but another dependency `c` excludes the range. This is the same as `dependency-excludes-range-of-compatible-versions` but some of the versions of `a` are incompatible for another reason e.g. dependency on non-existent package `d`.
///
/// ```text
/// dependency-excludes-non-contiguous-range-of-compatible-versions
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a
/// │ │ ├── satisfied by a-1.0.0
/// │ │ ├── satisfied by a-2.0.0
/// │ │ ├── satisfied by a-2.1.0
/// │ │ ├── satisfied by a-2.2.0
/// │ │ ├── satisfied by a-2.3.0
/// │ │ ├── satisfied by a-2.4.0
/// │ │ └── satisfied by a-3.0.0
/// │ ├── requires b<3.0.0,>=2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ └── requires c
/// │ ├── satisfied by c-1.0.0
/// │ └── satisfied by c-2.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ │ └── requires b==1.0.0
/// │ │ └── satisfied by b-1.0.0
/// │ ├── a-2.0.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ ├── a-2.1.0
/// │ │ ├── requires b==2.0.0
/// │ │ │ └── satisfied by b-2.0.0
/// │ │ └── requires d
/// │ │ └── unsatisfied: no versions for package
/// │ ├── a-2.2.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ ├── a-2.3.0
/// │ │ ├── requires b==2.0.0
/// │ │ │ └── satisfied by b-2.0.0
/// │ │ └── requires d
/// │ │ └── unsatisfied: no versions for package
/// │ ├── a-2.4.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ └── a-3.0.0
/// │ └── requires b==3.0.0
/// │ └── satisfied by b-3.0.0
/// ├── b
/// │ ├── b-1.0.0
/// │ ├── b-2.0.0
/// │ └── b-3.0.0
/// └── c
/// ├── c-1.0.0
/// │ └── requires a<2.0.0
/// │ └── satisfied by a-1.0.0
/// └── c-2.0.0
/// └── requires a>=3.0.0
/// └── satisfied by a-3.0.0
/// ```
#[test]
fn dependency_excludes_non_contiguous_range_of_compatible_versions() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"dependency-excludes-non-contiguous-range-of-compatible-versions-",
"package-",
));
uv_snapshot!(filters, command(&context)
.arg("dependency-excludes-non-contiguous-range-of-compatible-versions-a")
.arg("dependency-excludes-non-contiguous-range-of-compatible-versions-b<3.0.0,>=2.0.0")
.arg("dependency-excludes-non-contiguous-range-of-compatible-versions-c")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because package-a==1.0.0 depends on package-b==1.0.0 and only the following versions of package-a are available:
package-a==1.0.0
package-a>=2.0.0
we can conclude that package-a<2.0.0 depends on package-b==1.0.0.
And because only package-a<=3.0.0 is available, we can conclude that package-a<2.0.0 depends on package-b==1.0.0. (1)
Because only the following versions of package-c are available:
package-c==1.0.0
package-c==2.0.0
and package-c==1.0.0 depends on package-a<2.0.0, we can conclude that package-c<2.0.0 depends on package-a<2.0.0.
And because package-c==2.0.0 depends on package-a>=3.0.0, we can conclude that all versions of package-c depend on one of:
package-a<2.0.0
package-a>=3.0.0
And because we know from (1) that package-a<2.0.0 depends on package-b==1.0.0, we can conclude that package-a!=3.0.0, package-b!=1.0.0, all versions of package-c are incompatible.
And because package-a==3.0.0 depends on package-b==3.0.0, we can conclude that all versions of package-c depend on one of:
package-b<=1.0.0
package-b>=3.0.0
And because you require package-b>=2.0.0,<3.0.0 and package-c, we can conclude that your requirements are unsatisfiable.
");
// Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails.
context
.assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_a");
context
.assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_b");
context
.assert_not_installed("dependency_excludes_non_contiguous_range_of_compatible_versions_c");
}
/// There is a range of compatible versions for the requested package `a`, but another dependency `c` excludes that range.
///
/// ```text
/// dependency-excludes-range-of-compatible-versions
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a
/// │ │ ├── satisfied by a-1.0.0
/// │ │ ├── satisfied by a-2.0.0
/// │ │ ├── satisfied by a-2.1.0
/// │ │ ├── satisfied by a-2.2.0
/// │ │ ├── satisfied by a-2.3.0
/// │ │ └── satisfied by a-3.0.0
/// │ ├── requires b<3.0.0,>=2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ └── requires c
/// │ ├── satisfied by c-1.0.0
/// │ └── satisfied by c-2.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ │ └── requires b==1.0.0
/// │ │ └── satisfied by b-1.0.0
/// │ ├── a-2.0.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ ├── a-2.1.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ ├── a-2.2.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ ├── a-2.3.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ └── a-3.0.0
/// │ └── requires b==3.0.0
/// │ └── satisfied by b-3.0.0
/// ├── b
/// │ ├── b-1.0.0
/// │ ├── b-2.0.0
/// │ └── b-3.0.0
/// └── c
/// ├── c-1.0.0
/// │ └── requires a<2.0.0
/// │ └── satisfied by a-1.0.0
/// └── c-2.0.0
/// └── requires a>=3.0.0
/// └── satisfied by a-3.0.0
/// ```
#[test]
fn dependency_excludes_range_of_compatible_versions() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"dependency-excludes-range-of-compatible-versions-",
"package-",
));
uv_snapshot!(filters, command(&context)
.arg("dependency-excludes-range-of-compatible-versions-a")
.arg("dependency-excludes-range-of-compatible-versions-b<3.0.0,>=2.0.0")
.arg("dependency-excludes-range-of-compatible-versions-c")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because package-a==1.0.0 depends on package-b==1.0.0 and only the following versions of package-a are available:
package-a==1.0.0
package-a>=2.0.0
we can conclude that package-a<2.0.0 depends on package-b==1.0.0.
And because only package-a<=3.0.0 is available, we can conclude that package-a<2.0.0 depends on package-b==1.0.0. (1)
Because only the following versions of package-c are available:
package-c==1.0.0
package-c==2.0.0
and package-c==1.0.0 depends on package-a<2.0.0, we can conclude that package-c<2.0.0 depends on package-a<2.0.0.
And because package-c==2.0.0 depends on package-a>=3.0.0, we can conclude that all versions of package-c depend on one of:
package-a<2.0.0
package-a>=3.0.0
And because we know from (1) that package-a<2.0.0 depends on package-b==1.0.0, we can conclude that package-a!=3.0.0, package-b!=1.0.0, all versions of package-c are incompatible.
And because package-a==3.0.0 depends on package-b==3.0.0, we can conclude that all versions of package-c depend on one of:
package-b<=1.0.0
package-b>=3.0.0
And because you require package-b>=2.0.0,<3.0.0 and package-c, we can conclude that your requirements are unsatisfiable.
");
// Only the `2.x` versions of `a` are available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`, but all available versions of `c` exclude that range of `a` so resolution fails.
context.assert_not_installed("dependency_excludes_range_of_compatible_versions_a");
context.assert_not_installed("dependency_excludes_range_of_compatible_versions_b");
context.assert_not_installed("dependency_excludes_range_of_compatible_versions_c");
}
/// Only one version of the requested package `a` is compatible, but the user has banned that version.
///
/// ```text
/// excluded-only-compatible-version
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a!=2.0.0
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-3.0.0
/// │ └── requires b<3.0.0,>=2.0.0
/// │ └── satisfied by b-2.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ │ └── requires b==1.0.0
/// │ │ └── satisfied by b-1.0.0
/// │ ├── a-2.0.0
/// │ │ └── requires b==2.0.0
/// │ │ └── satisfied by b-2.0.0
/// │ └── a-3.0.0
/// │ └── requires b==3.0.0
/// │ └── satisfied by b-3.0.0
/// └── b
/// ├── b-1.0.0
/// ├── b-2.0.0
/// └── b-3.0.0
/// ```
#[test]
fn excluded_only_compatible_version() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"excluded-only-compatible-version-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("excluded-only-compatible-version-a!=2.0.0")
.arg("excluded-only-compatible-version-b<3.0.0,>=2.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only the following versions of package-a are available:
package-a==1.0.0
package-a==2.0.0
package-a==3.0.0
and package-a==1.0.0 depends on package-b==1.0.0, we can conclude that package-a<2.0.0 depends on package-b==1.0.0.
And because package-a==3.0.0 depends on package-b==3.0.0, we can conclude that all of:
package-a<2.0.0
package-a>2.0.0
depend on one of:
package-b==1.0.0
package-b==3.0.0
And because you require one of:
package-a<2.0.0
package-a>2.0.0
and package-b>=2.0.0,<3.0.0, we can conclude that your requirements are unsatisfiable.
");
// Only `a==1.2.0` is available since `a==1.0.0` and `a==3.0.0` require incompatible versions of `b`. The user has excluded that version of `a` so resolution fails.
context.assert_not_installed("excluded_only_compatible_version_a");
context.assert_not_installed("excluded_only_compatible_version_b");
}
/// Only one version of the requested package is available, but the user has banned that version.
///
/// ```text
/// excluded-only-version
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a!=1.0.0
/// │ └── unsatisfied: no matching version
/// └── a
/// └── a-1.0.0
/// ```
#[test]
fn excluded_only_version() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"excluded-only-version-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("excluded-only-version-a!=1.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only package-a==1.0.0 is available and you require one of:
package-a<1.0.0
package-a>1.0.0
we can conclude that your requirements are unsatisfiable.
");
// Only `a==1.0.0` is available but the user excluded it.
context.assert_not_installed("excluded_only_version_a");
}
/// Multiple optional dependencies are requested for the package via an 'all' extra.
///
/// ```text
/// all-extras-required
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[all]
/// │ ├── satisfied by a-1.0.0
/// │ ├── satisfied by a-1.0.0[all]
/// │ ├── satisfied by a-1.0.0[extra_b]
/// │ └── satisfied by a-1.0.0[extra_c]
/// ├── a
/// │ ├── a-1.0.0
/// │ ├── a-1.0.0[all]
/// │ │ ├── requires a[extra_b]
/// │ │ │ ├── satisfied by a-1.0.0
/// │ │ │ ├── satisfied by a-1.0.0[all]
/// │ │ │ ├── satisfied by a-1.0.0[extra_b]
/// │ │ │ └── satisfied by a-1.0.0[extra_c]
/// │ │ └── requires a[extra_c]
/// │ │ ├── satisfied by a-1.0.0
/// │ │ ├── satisfied by a-1.0.0[all]
/// │ │ ├── satisfied by a-1.0.0[extra_b]
/// │ │ └── satisfied by a-1.0.0[extra_c]
/// │ ├── a-1.0.0[extra_b]
/// │ │ └── requires b
/// │ │ └── satisfied by b-1.0.0
/// │ └── a-1.0.0[extra_c]
/// │ └── requires c
/// │ └── satisfied by c-1.0.0
/// ├── b
/// │ └── b-1.0.0
/// └── c
/// └── c-1.0.0
/// ```
#[test]
fn all_extras_required() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"all-extras-required-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("all-extras-required-a[all]")
, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ package-a==1.0.0
+ package-b==1.0.0
+ package-c==1.0.0
");
context.assert_installed("all_extras_required_a", "1.0.0");
context.assert_installed("all_extras_required_b", "1.0.0");
context.assert_installed("all_extras_required_c", "1.0.0");
}
/// Optional dependencies are requested for the package, the extra is only available on an older version.
///
/// ```text
/// extra-does-not-exist-backtrack
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra]
/// │ ├── satisfied by a-2.0.0
/// │ ├── satisfied by a-3.0.0
/// │ ├── satisfied by a-1.0.0
/// │ └── satisfied by a-1.0.0[extra]
/// ├── a
/// │ ├── a-2.0.0
/// │ ├── a-3.0.0
/// │ ├── a-1.0.0
/// │ └── a-1.0.0[extra]
/// │ └── requires b==1.0.0
/// │ └── satisfied by b-1.0.0
/// └── b
/// └── b-1.0.0
/// ```
#[test]
fn extra_does_not_exist_backtrack() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"extra-does-not-exist-backtrack-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("extra-does-not-exist-backtrack-a[extra]")
, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ package-a==3.0.0
warning: The package `package-a==3.0.0` does not have an extra named `extra`
");
// The resolver should not backtrack to `a==1.0.0` because missing extras are allowed during resolution. `b` should not be installed.
context.assert_installed("extra_does_not_exist_backtrack_a", "3.0.0");
}
/// One of two incompatible optional dependencies are requested for the package.
///
/// ```text
/// extra-incompatible-with-extra-not-requested
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra_c]
/// │ ├── satisfied by a-1.0.0
/// │ ├── satisfied by a-1.0.0[extra_b]
/// │ └── satisfied by a-1.0.0[extra_c]
/// ├── a
/// │ ├── a-1.0.0
/// │ ├── a-1.0.0[extra_b]
/// │ │ └── requires b==1.0.0
/// │ │ └── satisfied by b-1.0.0
/// │ └── a-1.0.0[extra_c]
/// │ └── requires b==2.0.0
/// │ └── satisfied by b-2.0.0
/// └── b
/// ├── b-1.0.0
/// └── b-2.0.0
/// ```
#[test]
fn extra_incompatible_with_extra_not_requested() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"extra-incompatible-with-extra-not-requested-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("extra-incompatible-with-extra-not-requested-a[extra_c]")
, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ package-a==1.0.0
+ package-b==2.0.0
");
// Because the user does not request both extras, it is okay that one is incompatible with the other.
context.assert_installed("extra_incompatible_with_extra_not_requested_a", "1.0.0");
context.assert_installed("extra_incompatible_with_extra_not_requested_b", "2.0.0");
}
/// Multiple optional dependencies are requested for the package, but they have conflicting requirements with each other.
///
/// ```text
/// extra-incompatible-with-extra
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra_b,extra_c]
/// │ ├── satisfied by a-1.0.0
/// │ ├── satisfied by a-1.0.0[extra_b]
/// │ └── satisfied by a-1.0.0[extra_c]
/// ├── a
/// │ ├── a-1.0.0
/// │ ├── a-1.0.0[extra_b]
/// │ │ └── requires b==1.0.0
/// │ │ └── satisfied by b-1.0.0
/// │ └── a-1.0.0[extra_c]
/// │ └── requires b==2.0.0
/// │ └── satisfied by b-2.0.0
/// └── b
/// ├── b-1.0.0
/// └── b-2.0.0
/// ```
#[test]
fn extra_incompatible_with_extra() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"extra-incompatible-with-extra-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("extra-incompatible-with-extra-a[extra_b,extra_c]")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only package-a[extra-b]==1.0.0 is available and package-a[extra-b]==1.0.0 depends on package-b==1.0.0, we can conclude that all versions of package-a[extra-b] depend on package-b==1.0.0.
And because package-a[extra-c]==1.0.0 depends on package-b==2.0.0 and only package-a[extra-c]==1.0.0 is available, we can conclude that all versions of package-a[extra-b] and all versions of package-a[extra-c] are incompatible.
And because you require package-a[extra-b] and package-a[extra-c], we can conclude that your requirements are unsatisfiable.
");
// Because both `extra_b` and `extra_c` are requested and they require incompatible versions of `b`, `a` cannot be installed.
context.assert_not_installed("extra_incompatible_with_extra_a");
}
/// Optional dependencies are requested for the package, but the extra is not compatible with other requested versions.
///
/// ```text
/// extra-incompatible-with-root
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a[extra]
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-1.0.0[extra]
/// │ └── requires b==2.0.0
/// │ └── satisfied by b-2.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ └── a-1.0.0[extra]
/// │ └── requires b==1.0.0
/// │ └── satisfied by b-1.0.0
/// └── b
/// ├── b-1.0.0
/// └── b-2.0.0
/// ```
#[test]
fn extra_incompatible_with_root() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"extra-incompatible-with-root-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("extra-incompatible-with-root-a[extra]")
.arg("extra-incompatible-with-root-b==2.0.0")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because only package-a[extra]==1.0.0 is available and package-a[extra]==1.0.0 depends on package-b==1.0.0, we can conclude that all versions of package-a[extra] depend on package-b==1.0.0.
And because you require package-a[extra] and package-b==2.0.0, we can conclude that your requirements are unsatisfiable.
");
// Because the user requested `b==2.0.0` but the requested extra requires `b==1.0.0`, the dependencies cannot be satisfied.
context.assert_not_installed("extra_incompatible_with_root_a");
context.assert_not_installed("extra_incompatible_with_root_b");
}
/// Optional dependencies are requested for the package.
///
/// ```text
/// extra-required
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra]
/// │ ├── satisfied by a-1.0.0
/// │ └── satisfied by a-1.0.0[extra]
/// ├── a
/// │ ├── a-1.0.0
/// │ └── a-1.0.0[extra]
/// │ └── requires b
/// │ └── satisfied by b-1.0.0
/// └── b
/// └── b-1.0.0
/// ```
#[test]
fn extra_required() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"extra-required-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("extra-required-a[extra]")
, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ package-a==1.0.0
+ package-b==1.0.0
");
context.assert_installed("extra_required_a", "1.0.0");
context.assert_installed("extra_required_b", "1.0.0");
}
/// Optional dependencies are requested for the package, but the extra does not exist.
///
/// ```text
/// missing-extra
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra]
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// ```
#[test]
fn missing_extra() {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"missing-extra-", "package-"));
uv_snapshot!(filters, command(&context)
.arg("missing-extra-a[extra]")
, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ package-a==1.0.0
warning: The package `package-a==1.0.0` does not have an extra named `extra`
");
// Missing extras are ignored during resolution.
context.assert_installed("missing_extra_a", "1.0.0");
}
/// Multiple optional dependencies are requested for the package.
///
/// ```text
/// multiple-extras-required
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ └── requires a[extra_b,extra_c]
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/tool_upgrade.rs | crates/uv/tests/it/tool_upgrade.rs | use assert_fs::prelude::*;
use insta::assert_snapshot;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn tool_upgrade_empty() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.arg("-p")
.arg("3.13")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
// Install the latest `babel`.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.14.0
Installed 1 executable: pybabel
"###);
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.arg("-p")
.arg("3.12")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
}
#[test]
fn tool_upgrade_name() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade `babel` by installing from PyPI, which should upgrade to the latest version.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.14.0
- babel==2.6.0
+ babel==2.14.0
- pytz==2018.5
Installed 1 executable: pybabel
"###);
}
#[test]
fn tool_upgrade_multiple_names() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `python-dotenv` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("python-dotenv")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ python-dotenv==0.10.2.post2
Installed 1 executable: dotenv
"###);
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade `babel` and `python-dotenv` from PyPI.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("python-dotenv")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.14.0
- babel==2.6.0
+ babel==2.14.0
- pytz==2018.5
Installed 1 executable: pybabel
Updated python-dotenv v0.10.2.post2 -> v1.0.1
- python-dotenv==0.10.2.post2
+ python-dotenv==1.0.1
Installed 1 executable: dotenv
"###);
}
#[test]
fn tool_upgrade_pinned_hint() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install a specific version of `babel` so the receipt records an exact pin.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel==2.6.0")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Attempt to upgrade `babel`; it should remain pinned and emit a hint explaining why.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Modified babel environment
- pytz==2018.5
+ pytz==2024.1
hint: `babel` is pinned to `2.6.0` (installed with an exact version pin); reinstall with `uv tool install babel@latest` to upgrade to a new version.
"###);
}
#[test]
fn tool_upgrade_pinned_hint_with_mixed_constraint() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install a specific version of `babel` with an additional constraint to ensure the requirement
// contains multiple specifiers while still including an exact pin.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel>=2.0,==2.6.0")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Attempt to upgrade `babel`; it should remain pinned and emit a hint explaining why.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Modified babel environment
- pytz==2018.5
+ pytz==2024.1
hint: `babel` is pinned to `2.6.0` (installed with an exact version pin); reinstall with `uv tool install babel@latest` to upgrade to a new version.
"###);
}
#[test]
fn tool_upgrade_all() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `python-dotenv` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("python-dotenv")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ python-dotenv==0.10.2.post2
Installed 1 executable: dotenv
"###);
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade all from PyPI.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.14.0
- babel==2.6.0
+ babel==2.14.0
- pytz==2018.5
Installed 1 executable: pybabel
Updated python-dotenv v0.10.2.post2 -> v1.0.1
- python-dotenv==0.10.2.post2
+ python-dotenv==1.0.1
Installed 1 executable: dotenv
"###);
}
#[test]
fn tool_upgrade_non_existing_package() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Attempt to upgrade `black`.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Failed to upgrade black
Caused by: `black` is not installed; run `uv tool install black` to install
"###);
// Attempt to upgrade all.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
}
#[test]
fn tool_upgrade_not_stop_if_upgrade_fails() -> anyhow::Result<()> {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `python-dotenv` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("python-dotenv")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ python-dotenv==0.10.2.post2
Installed 1 executable: dotenv
"###);
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Break the receipt for python-dotenv
tool_dir
.child("python-dotenv")
.child("uv-receipt.toml")
.write_str("Invalid receipt")?;
// Upgrade all from PyPI.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--all")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.14.0
- babel==2.6.0
+ babel==2.14.0
- pytz==2018.5
Installed 1 executable: pybabel
error: Failed to upgrade python-dotenv
Caused by: `python-dotenv` is missing a valid receipt; run `uv tool install --force python-dotenv` to reinstall
"###);
Ok(())
}
#[test]
fn tool_upgrade_settings() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `black` with `lowest-direct`.
uv_snapshot!(context.filters(), context.tool_install()
.arg("black>=23")
.arg("--resolution=lowest-direct")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ black==23.1.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables: black, blackd
"###);
// Upgrade `black`. This should be a no-op, since the resolution is set to `lowest-direct`.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("black")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Nothing to upgrade
"###);
// Upgrade `black`, but override the resolution.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("black")
.arg("--resolution=highest")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated black v23.1.0 -> v24.3.0
- black==23.1.0
+ black==24.3.0
Installed 2 executables: black, blackd
"###);
}
#[test]
fn tool_upgrade_respect_constraints() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel<2.10")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade `babel` from PyPI. It should be updated, but not beyond the constraint.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.9.1
- babel==2.6.0
+ babel==2.9.1
- pytz==2018.5
+ pytz==2024.1
Installed 1 executable: pybabel
"###);
}
#[test]
fn tool_upgrade_constraint() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade `babel`, but apply a constraint inline.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel<2.12.0")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.6.0 -> v2.11.0
- babel==2.6.0
+ babel==2.11.0
- pytz==2018.5
+ pytz==2024.1
Installed 1 executable: pybabel
"###);
// Upgrade `babel`, but apply a constraint via `--upgrade-package`.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.arg("--upgrade-package")
.arg("babel<2.14.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `--upgrade-package` is enabled by default on `uv tool upgrade`
Updated babel v2.11.0 -> v2.13.1
- babel==2.11.0
+ babel==2.13.1
- pytz==2024.1
+ setuptools==69.2.0
Installed 1 executable: pybabel
"###);
// Upgrade `babel` without a constraint.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Updated babel v2.13.1 -> v2.14.0
- babel==2.13.1
+ babel==2.14.0
- setuptools==69.2.0
Installed 1 executable: pybabel
"###);
// Passing `--upgrade` explicitly should warn.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.arg("--upgrade")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: `--upgrade` is enabled by default on `uv tool upgrade`
Nothing to upgrade
"###);
}
/// Upgrade a tool, but only by upgrading one of it's `--with` dependencies, and not the tool
/// itself.
#[test]
fn tool_upgrade_with() {
let context = TestContext::new("3.12")
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `babel` from Test PyPI, to get an outdated version.
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel==2.6.0")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
// Upgrade `babel` from PyPI. It shouldn't be updated, but `pytz` should be.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("babel")
.arg("--index-url")
.arg("https://pypi.org/simple/")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Modified babel environment
- pytz==2018.5
+ pytz==2024.1
hint: `babel` is pinned to `2.6.0` (installed with an exact version pin); reinstall with `uv tool install babel@latest` to upgrade to a new version.
"###);
}
#[test]
fn tool_upgrade_python() {
let context = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel==2.6.0")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--python").arg("3.11")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
uv_snapshot!(
context.filters(),
context.tool_upgrade().arg("babel")
.arg("--python").arg("3.12")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
Upgraded tool environment for `babel` to Python 3.12
"###
);
insta::with_settings!({
filters => context.filters(),
}, {
let content = fs_err::read_to_string(tool_dir.join("babel").join("pyvenv.cfg")).unwrap();
let lines: Vec<&str> = content.split('\n').collect();
assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]");
});
}
#[test]
fn tool_upgrade_python_with_all() {
let context = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
uv_snapshot!(context.filters(), context.tool_install()
.arg("babel==2.6.0")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--python").arg("3.11")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
"###);
uv_snapshot!(context.filters(), context.tool_install()
.arg("python-dotenv")
.arg("--index-url")
.arg("https://test.pypi.org/simple/")
.arg("--python").arg("3.11")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ python-dotenv==0.10.2.post2
Installed 1 executable: dotenv
"###);
uv_snapshot!(
context.filters(),
context.tool_upgrade().arg("--all")
.arg("--python").arg("3.12")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.6.0
+ pytz==2018.5
Installed 1 executable: pybabel
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ python-dotenv==0.10.2.post2
Installed 1 executable: dotenv
Upgraded tool environments for `babel` and `python-dotenv` to Python 3.12
"###
);
insta::with_settings!({
filters => context.filters(),
}, {
let content = fs_err::read_to_string(tool_dir.join("babel").join("pyvenv.cfg")).unwrap();
let lines: Vec<&str> = content.split('\n').collect();
assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]");
});
insta::with_settings!({
filters => context.filters(),
}, {
let content = fs_err::read_to_string(tool_dir.join("python-dotenv").join("pyvenv.cfg")).unwrap();
let lines: Vec<&str> = content.split('\n').collect();
assert_snapshot!(lines[lines.len() - 3], @"version_info = 3.12.[X]");
});
}
/// Upgrade a tool together with any additional entrypoints from other
/// packages.
#[test]
fn test_tool_upgrade_additional_entrypoints() {
let context = TestContext::new_with_versions(&["3.11", "3.12"])
.with_filtered_counts()
.with_filtered_exe_suffix();
let tool_dir = context.temp_dir.child("tools");
let bin_dir = context.temp_dir.child("bin");
// Install `babel` entrypoint, and all additional ones from `black` too.
uv_snapshot!(context.filters(), context.tool_install()
.arg("--python")
.arg("3.11")
.arg("--with-executables-from")
.arg("black")
.arg("babel==2.14.0")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.14.0
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables from `black`: black, blackd
Installed 1 executable: pybabel
");
// Upgrade python, and make sure that all the entrypoints above get
// re-installed.
uv_snapshot!(context.filters(), context.tool_upgrade()
.arg("--python")
.arg("3.12")
.arg("babel")
.env(EnvVars::UV_TOOL_DIR, tool_dir.as_os_str())
.env(EnvVars::XDG_BIN_HOME, bin_dir.as_os_str())
.env(EnvVars::PATH, bin_dir.as_os_str()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ babel==2.14.0
+ black==24.3.0
+ click==8.1.7
+ mypy-extensions==1.0.0
+ packaging==24.0
+ pathspec==0.12.1
+ platformdirs==4.2.0
Installed 2 executables from `black`: black, blackd
Installed 1 executable: pybabel
Upgraded tool environment for `babel` to Python 3.12
");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/lock_scenarios.rs | crates/uv/tests/it/lock_scenarios.rs | //! DO NOT EDIT
//!
//! Generated with `./scripts/sync_scenarios.sh`
//! Scenarios from <https://github.com/astral-sh/packse/tree/0.3.53/scenarios>
//!
#![cfg(all(feature = "python", feature = "pypi"))]
#![allow(clippy::needless_raw_string_hashes)]
#![allow(clippy::doc_markdown)]
#![allow(clippy::doc_lazy_continuation)]
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use insta::assert_snapshot;
use uv_static::EnvVars;
use crate::common::{TestContext, packse_index_url, uv_snapshot};
/// There are two packages, `a` and `b`. We select `a` with `a==2.0.0` first, and then `b`, but `a==2.0.0` conflicts with all new versions of `b`, so we backtrack through versions of `b`.
///
/// We need to detect this conflict and prioritize `b` over `a` instead of backtracking down to the too old version of `b==1.0.0` that doesn't depend on `a` anymore.
///
/// ```text
/// wrong-backtracking-basic
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires b
/// │ ├── satisfied by b-1.0.0
/// │ ├── satisfied by b-2.0.0
/// │ ├── satisfied by b-2.0.1
/// │ ├── satisfied by b-2.0.2
/// │ ├── satisfied by b-2.0.3
/// │ ├── satisfied by b-2.0.4
/// │ ├── satisfied by b-2.0.5
/// │ ├── satisfied by b-2.0.6
/// │ ├── satisfied by b-2.0.7
/// │ ├── satisfied by b-2.0.8
/// │ └── satisfied by b-2.0.9
/// ├── a
/// │ ├── a-1.0.0
/// │ └── a-2.0.0
/// ├── b
/// │ ├── b-1.0.0
/// │ │ └── requires too-old
/// │ │ └── satisfied by too-old-1.0.0
/// │ ├── b-2.0.0
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.1
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.2
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.3
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.4
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.5
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.6
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.7
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-2.0.8
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ └── b-2.0.9
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── too-old
/// └── too-old-1.0.0
/// ```
#[test]
fn wrong_backtracking_basic() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"wrong-backtracking-basic-", "package-"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''wrong-backtracking-basic-a''',
'''wrong-backtracking-basic-b''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"
);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters,
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "package-a" },
{ name = "package-b" },
]
[package.metadata]
requires-dist = [
{ name = "package-a" },
{ name = "package-b" },
]
[[package]]
name = "package-a"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_a-1.0.0.tar.gz", hash = "sha256:b4abd2c802ca129d5855225fe456f2a36068c50d0ae545e37a8e08ef0f580b38" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_a-1.0.0-py3-none-any.whl", hash = "sha256:d669cb8614076ad7fc83f46b97abb94d86ada4ad5341d070874e96640ef808ad" },
]
[[package]]
name = "package-b"
version = "2.0.9"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
dependencies = [
{ name = "package-a" },
]
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_b-2.0.9.tar.gz", hash = "sha256:aec746d9adae60458015ad7c11b1b9c589031928c07d9c13f1dff23d473b2480" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_basic_b-2.0.9-py3-none-any.whl", hash = "sha256:30c0b2450c13c06d70ccb8804e41d3be9dacc911e27f30ac58b7880d8fe8e705" },
]
"#
);
});
// Assert the idempotence of `uv lock` when resolving from the lockfile (`--locked`).
context
.lock()
.arg("--locked")
.env_remove(EnvVars::UV_EXCLUDE_NEWER)
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
/// There are three packages, `a`, `b` and `b-inner`. Unlike wrong-backtracking-basic, `b` depends on `b-inner` and `a` and `b-inner` conflict, to add a layer of indirection.
///
/// We select `a` with `a==2.0.0` first, then `b`, and then `b-inner`, but `a==2.0.0` conflicts with all new versions of `b-inner`, so we backtrack through versions of `b-inner`.
///
/// We need to detect this conflict and prioritize `b` and `b-inner` over `a` instead of backtracking down to the too old version of `b-inner==1.0.0` that doesn't depend on `a` anymore.
///
/// ```text
/// wrong-backtracking-indirect
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires b
/// │ └── satisfied by b-1.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ └── a-2.0.0
/// ├── b
/// │ └── b-1.0.0
/// │ └── requires b-inner
/// │ ├── satisfied by b-inner-1.0.0
/// │ ├── satisfied by b-inner-2.0.0
/// │ ├── satisfied by b-inner-2.0.1
/// │ ├── satisfied by b-inner-2.0.2
/// │ ├── satisfied by b-inner-2.0.3
/// │ ├── satisfied by b-inner-2.0.4
/// │ ├── satisfied by b-inner-2.0.5
/// │ ├── satisfied by b-inner-2.0.6
/// │ ├── satisfied by b-inner-2.0.7
/// │ ├── satisfied by b-inner-2.0.8
/// │ └── satisfied by b-inner-2.0.9
/// ├── b-inner
/// │ ├── b-inner-1.0.0
/// │ │ └── requires too-old
/// │ │ └── satisfied by too-old-1.0.0
/// │ ├── b-inner-2.0.0
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.1
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.2
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.3
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.4
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.5
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.6
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.7
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ ├── b-inner-2.0.8
/// │ │ └── requires a==1.0.0
/// │ │ └── satisfied by a-1.0.0
/// │ └── b-inner-2.0.9
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── too-old
/// └── too-old-1.0.0
/// ```
#[test]
fn wrong_backtracking_indirect() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"wrong-backtracking-indirect-", "package-"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''wrong-backtracking-indirect-a''',
'''wrong-backtracking-indirect-b''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
"
);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters,
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "package-a" },
{ name = "package-b" },
]
[package.metadata]
requires-dist = [
{ name = "package-a" },
{ name = "package-b" },
]
[[package]]
name = "package-a"
version = "2.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_a-2.0.0.tar.gz", hash = "sha256:8f28371115dab396e098ce46f514d6bdf15a42c81cc75aa78c675db61e1ed67e" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_a-2.0.0-py3-none-any.whl", hash = "sha256:ea2a2e5008c2ca39195650f532c8ff6c129a91ca92018490fe53f9f0d323414e" },
]
[[package]]
name = "package-b"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
dependencies = [
{ name = "package-b-inner" },
]
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_b-1.0.0.tar.gz", hash = "sha256:2463de4ba18fe6b1f03b8458724a399c85c98a354a1861ea02e485757f096e3b" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_b-1.0.0-py3-none-any.whl", hash = "sha256:cf4fc24449def13876f05f0c8cae59e4104ab9694b58ad6e92cc3fe25ecea6b3" },
]
[[package]]
name = "package-b-inner"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
dependencies = [
{ name = "package-too-old" },
]
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_b_inner-1.0.0.tar.gz", hash = "sha256:e1ddc7be17380b754483067727ad9fa4e40f2f9b837982e9b9124ee9425ad72e" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_b_inner-1.0.0-py3-none-any.whl", hash = "sha256:c58bcad2e57e160ec81d3e7a057f2c9a1a5fb74a3e3d18d82d79ab0dc5ce85dd" },
]
[[package]]
name = "package-too-old"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_too_old-1.0.0.tar.gz", hash = "sha256:de078b8acaad7f58f194407633aac7fda37550f5fbe778ecd837599ed3872a4d" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/wrong_backtracking_indirect_too_old-1.0.0-py3-none-any.whl", hash = "sha256:d6ddc9421418ce70582869cf38c4f0322fc4061be36edc501555a616d560e7ba" },
]
"#
);
});
// Assert the idempotence of `uv lock` when resolving from the lockfile (`--locked`).
context
.lock()
.arg("--locked")
.env_remove(EnvVars::UV_EXCLUDE_NEWER)
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
/// This test ensures that multiple non-conflicting but also
/// non-overlapping dependency specifications with the same package name
/// are allowed and supported.
///
/// At time of writing, this provokes a fork in the resolver, but it
/// arguably shouldn't since the requirements themselves do not conflict
/// with one another. However, this does impact resolution. Namely, it
/// leaves the `a>=1` fork free to choose `a==2.0.0` since it behaves as if
/// the `a<2` constraint doesn't exist.
///
/// ```text
/// fork-allows-non-conflicting-non-overlapping-dependencies
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a>=1; sys_platform == "linux"
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires a<2; sys_platform == "darwin"
/// │ └── satisfied by a-1.0.0
/// └── a
/// ├── a-1.0.0
/// └── a-2.0.0
/// ```
#[test]
fn fork_allows_non_conflicting_non_overlapping_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"fork-allows-non-conflicting-non-overlapping-dependencies-",
"package-",
));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''fork-allows-non-conflicting-non-overlapping-dependencies-a>=1; sys_platform == "linux"''',
'''fork-allows-non-conflicting-non-overlapping-dependencies-a<2; sys_platform == "darwin"''',
]
requires-python = ">=3.12"
"###
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"
);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters,
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
resolution-markers = [
"sys_platform == 'darwin'",
"sys_platform == 'linux'",
"sys_platform != 'darwin' and sys_platform != 'linux'",
]
[[package]]
name = "package-a"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_non_overlapping_dependencies_a-1.0.0.tar.gz", hash = "sha256:836b578e798d4aaba37ea42ebde338fd422d61d2bc4a93524b9c9cf77a7539d7" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_non_overlapping_dependencies_a-1.0.0-py3-none-any.whl", hash = "sha256:fce4343aac09c16fee45735fd638efc462aa97496c3e3332b6f8babdbd1e1e4d" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "package-a", marker = "sys_platform == 'darwin' or sys_platform == 'linux'" },
]
[package.metadata]
requires-dist = [
{ name = "package-a", marker = "sys_platform == 'darwin'", specifier = "<2" },
{ name = "package-a", marker = "sys_platform == 'linux'", specifier = ">=1" },
]
"#
);
});
// Assert the idempotence of `uv lock` when resolving from the lockfile (`--locked`).
context
.lock()
.arg("--locked")
.env_remove(EnvVars::UV_EXCLUDE_NEWER)
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
/// This test ensures that multiple non-conflicting dependency
/// specifications with the same package name are allowed and supported.
///
/// This test exists because the universal resolver forks itself based on
/// duplicate dependency specifications by looking at package name. So at
/// first glance, a case like this could perhaps cause an errant fork.
/// While it's difficult to test for "does not create a fork" (at time of
/// writing, the implementation does not fork), we can at least check that
/// this case is handled correctly without issue. Namely, forking should
/// only occur when there are duplicate dependency specifications with
/// disjoint marker expressions.
///
/// ```text
/// fork-allows-non-conflicting-repeated-dependencies
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a>=1
/// │ │ ├── satisfied by a-1.0.0
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires a<2
/// │ └── satisfied by a-1.0.0
/// └── a
/// ├── a-1.0.0
/// └── a-2.0.0
/// ```
#[test]
fn fork_allows_non_conflicting_repeated_dependencies() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"fork-allows-non-conflicting-repeated-dependencies-",
"package-",
));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''fork-allows-non-conflicting-repeated-dependencies-a>=1''',
'''fork-allows-non-conflicting-repeated-dependencies-a<2''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
"
);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters,
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[[package]]
name = "package-a"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_repeated_dependencies_a-1.0.0.tar.gz", hash = "sha256:4666c8498ab4aa641bacb39c2fb379ed87730d4de89bd7797c388a3c748f9f89" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_allows_non_conflicting_repeated_dependencies_a-1.0.0-py3-none-any.whl", hash = "sha256:77f0eb64e8c3bef8dec459d90c0394069eccb40d0c6c978d97bc1c4089d7d626" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "package-a" },
]
[package.metadata]
requires-dist = [
{ name = "package-a", specifier = "<2" },
{ name = "package-a", specifier = ">=1" },
]
"#
);
});
// Assert the idempotence of `uv lock` when resolving from the lockfile (`--locked`).
context
.lock()
.arg("--locked")
.env_remove(EnvVars::UV_EXCLUDE_NEWER)
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
/// An extremely basic test of universal resolution. In this case, the resolution
/// should contain two distinct versions of `a` depending on `sys_platform`.
///
/// ```text
/// fork-basic
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a>=2; sys_platform == "linux"
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires a<2; sys_platform == "darwin"
/// │ └── satisfied by a-1.0.0
/// └── a
/// ├── a-1.0.0
/// └── a-2.0.0
/// ```
#[test]
fn fork_basic() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"fork-basic-", "package-"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''fork-basic-a>=2; sys_platform == "linux"''',
'''fork-basic-a<2; sys_platform == "darwin"''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"
);
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters,
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
resolution-markers = [
"sys_platform == 'darwin'",
"sys_platform == 'linux'",
"sys_platform != 'darwin' and sys_platform != 'linux'",
]
[[package]]
name = "package-a"
version = "1.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
resolution-markers = [
"sys_platform == 'darwin'",
]
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_basic_a-1.0.0.tar.gz", hash = "sha256:a81cba8fd1453d8fdf35ba4b3d8c536d2f9fa4e2ceb6312f497ec608a5262663" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_basic_a-1.0.0-py3-none-any.whl", hash = "sha256:900f299c08b6e0c1ec013259c13ff9279f5672fb395418fd00f937a3830a7edb" },
]
[[package]]
name = "package-a"
version = "2.0.0"
source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }
resolution-markers = [
"sys_platform == 'linux'",
]
sdist = { url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_basic_a-2.0.0.tar.gz", hash = "sha256:3bbd7b86a9b7870ddcfdf343ed8555f414729053b171b3b072c1fef21478feb2" }
wheels = [
{ url = "https://astral-sh.github.io/packse/PACKSE_VERSION/files/fork_basic_a-2.0.0-py3-none-any.whl", hash = "sha256:4fa08d0429882d46c4c5262630b174b44aa633fc9d0c3f6a90e17406d6d7eb5a" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "package-a", version = "1.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'darwin'" },
{ name = "package-a", version = "2.0.0", source = { registry = "https://astral-sh.github.io/packse/PACKSE_VERSION/simple-html/" }, marker = "sys_platform == 'linux'" },
]
[package.metadata]
requires-dist = [
{ name = "package-a", marker = "sys_platform == 'darwin'", specifier = "<2" },
{ name = "package-a", marker = "sys_platform == 'linux'", specifier = ">=2" },
]
"#
);
});
// Assert the idempotence of `uv lock` when resolving from the lockfile (`--locked`).
context
.lock()
.arg("--locked")
.env_remove(EnvVars::UV_EXCLUDE_NEWER)
.arg("--index-url")
.arg(packse_index_url())
.assert()
.success();
Ok(())
}
/// We have a conflict after forking. This scenario exists to test the error message.
///
/// ```text
/// conflict-in-fork
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a>=2; sys_platform == "os1"
/// │ │ └── satisfied by a-2.0.0
/// │ └── requires a<2; sys_platform == "os2"
/// │ └── satisfied by a-1.0.0
/// ├── a
/// │ ├── a-1.0.0
/// │ │ ├── requires b
/// │ │ │ └── satisfied by b-1.0.0
/// │ │ └── requires c
/// │ │ └── satisfied by c-1.0.0
/// │ └── a-2.0.0
/// ├── b
/// │ └── b-1.0.0
/// │ └── requires d==1
/// │ └── satisfied by d-1.0.0
/// ├── c
/// │ └── c-1.0.0
/// │ └── requires d==2
/// │ └── satisfied by d-2.0.0
/// └── d
/// ├── d-1.0.0
/// └── d-2.0.0
/// ```
#[test]
fn conflict_in_fork() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"conflict-in-fork-", "package-"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''conflict-in-fork-a>=2; sys_platform == "os1"''',
'''conflict-in-fork-a<2; sys_platform == "os2"''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies for split (markers: sys_platform == 'os2'):
╰─▶ Because only package-b==1.0.0 is available and package-b==1.0.0 depends on package-d==1, we can conclude that all versions of package-b depend on package-d==1.
And because package-c==1.0.0 depends on package-d==2 and only package-c==1.0.0 is available, we can conclude that all versions of package-b and all versions of package-c are incompatible.
And because package-a==1.0.0 depends on package-b and package-c, we can conclude that package-a==1.0.0 cannot be used.
And because only the following versions of package-a{sys_platform == 'os2'} are available:
package-a{sys_platform == 'os2'}==1.0.0
package-a{sys_platform == 'os2'}>=2
and your project depends on package-a{sys_platform == 'os2'}<2, we can conclude that your project's requirements are unsatisfiable.
hint: The resolution failed for an environment that is not the current one, consider limiting the environments with `tool.uv.environments`.
"
);
Ok(())
}
/// This test ensures that conflicting dependency specifications lead to an
/// unsatisfiable result.
///
/// In particular, this is a case that should not fork even though there
/// are conflicting requirements because their marker expressions are
/// overlapping. (Well, there aren't any marker expressions here, which
/// means they are both unconditional.)
///
/// ```text
/// fork-conflict-unsatisfiable
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a>=2
/// │ │ ├── satisfied by a-2.0.0
/// │ │ └── satisfied by a-3.0.0
/// │ └── requires a<2
/// │ └── satisfied by a-1.0.0
/// └── a
/// ├── a-1.0.0
/// ├── a-2.0.0
/// └── a-3.0.0
/// ```
#[test]
fn fork_conflict_unsatisfiable() -> Result<()> {
let context = TestContext::new("3.12");
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"fork-conflict-unsatisfiable-", "package-"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r###"
[project]
name = "project"
version = "0.1.0"
dependencies = [
'''fork-conflict-unsatisfiable-a>=2''',
'''fork-conflict-unsatisfiable-a<2''',
]
requires-python = ">=3.12"
"###,
)?;
let mut cmd = context.lock();
cmd.env_remove(EnvVars::UV_EXCLUDE_NEWER);
cmd.arg("--index-url").arg(packse_index_url());
uv_snapshot!(filters, cmd, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because your project depends on package-a>=2 and package-a<2, we can conclude that your project's requirements are unsatisfiable.
"
);
Ok(())
}
/// This tests that sibling dependencies of a package that provokes a
/// fork are correctly filtered out of forks where they are otherwise
/// impossible.
///
/// In this case, a previous version of the universal resolver would
/// include both `b` and `c` in *both* of the forks produced by the
/// conflicting dependency specifications on `a`. This in turn led to
/// transitive dependency specifications on both `d==1.0.0` and `d==2.0.0`.
/// Since the universal resolver only forks based on local conditions, this
/// led to a failed resolution.
///
/// The correct thing to do here is to ensure that `b` is only part of the
/// `a==4.4.0` fork and `c` is only par of the `a==4.3.0` fork.
///
/// ```text
/// fork-filter-sibling-dependencies
/// ├── environment
/// │ └── python3.12
/// ├── root
/// │ ├── requires a==4.4.0; sys_platform == "linux"
/// │ │ └── satisfied by a-4.4.0
/// │ ├── requires a==4.3.0; sys_platform == "darwin"
/// │ │ └── satisfied by a-4.3.0
/// │ ├── requires b==1.0.0; sys_platform == "linux"
/// │ │ └── satisfied by b-1.0.0
/// │ └── requires c==1.0.0; sys_platform == "darwin"
/// │ └── satisfied by c-1.0.0
/// ├── a
/// │ ├── a-4.3.0
/// │ └── a-4.4.0
/// ├── b
/// │ └── b-1.0.0
/// │ └── requires d==1.0.0
/// │ └── satisfied by d-1.0.0
/// ├── c
/// │ └── c-1.0.0
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_compile_scenarios.rs | crates/uv/tests/it/pip_compile_scenarios.rs | //! DO NOT EDIT
//!
//! Generated with `./scripts/sync_scenarios.sh`
//! Scenarios from <https://github.com/astral-sh/packse/tree/0.3.53/scenarios>
//!
#![cfg(all(feature = "python", feature = "pypi", unix))]
use std::env;
use std::process::Command;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::{FileWriteStr, PathChild};
use predicates::prelude::predicate;
use uv_static::EnvVars;
use crate::common::{
TestContext, build_vendor_links_url, get_bin, packse_index_url, python_path_with_versions,
uv_snapshot,
};
/// Provision python binaries and return a `pip compile` command with options shared across all scenarios.
fn command(context: &TestContext, python_versions: &[&str]) -> Command {
let python_path = python_path_with_versions(&context.temp_dir, python_versions)
.expect("Failed to create Python test path");
let mut command = Command::new(get_bin());
command
.arg("pip")
.arg("compile")
.arg("requirements.in")
.arg("--index-url")
.arg(packse_index_url())
.arg("--find-links")
.arg(build_vendor_links_url());
context.add_shared_options(&mut command, true);
command.env_remove(EnvVars::UV_EXCLUDE_NEWER);
command.env(EnvVars::UV_TEST_PYTHON_PATH, python_path);
command
}
/// The user requires a package which requires a compatible Python version, but they request an incompatible Python version for package resolution.
///
/// ```text
/// compatible-python-incompatible-override
/// ├── environment
/// │ └── python3.11
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.10
/// ```
#[test]
fn compatible_python_incompatible_override() -> Result<()> {
let context = TestContext::new("3.11");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"compatible-python-incompatible-override-", "package-"));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("compatible-python-incompatible-override-a==1.0.0")?;
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.9")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: The requested Python version 3.9 is not available; 3.11.[X] will be used to build dependencies instead.
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (>=3.9) does not satisfy Python>=3.10 and package-a==1.0.0 depends on Python>=3.10, we can conclude that package-a==1.0.0 cannot be used.
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
hint: The `--python-version` value (>=3.9) includes Python versions that are not supported by your dependencies (e.g., package-a==1.0.0 only supports >=3.10). Consider using a higher `--python-version` value.
"
);
output.assert().failure();
Ok(())
}
/// The user requires a package which requires a incompatible Python version, but they request a compatible Python version for package resolution. There are only source distributions available for the package. The user has a compatible Python version installed elsewhere on their system.
///
/// ```text
/// incompatible-python-compatible-override-available-no-wheels
/// ├── environment
/// │ ├── python3.11
/// │ └── python3.9 (active)
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.10 (incompatible with environment)
/// ```
#[test]
fn incompatible_python_compatible_override_available_no_wheels() -> Result<()> {
let context = TestContext::new("3.9");
let python_versions = &["3.11"];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"incompatible-python-compatible-override-available-no-wheels-",
"package-",
));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("incompatible-python-compatible-override-available-no-wheels-a==1.0.0")?;
// Since there is a compatible Python version available on the system, it should be used to build the source distributions.
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.11")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
Resolved 1 package in [TIME]
"
);
output.assert().success().stdout(predicate::str::contains(
"incompatible-python-compatible-override-available-no-wheels-a==1.0.0",
));
Ok(())
}
/// The user requires a package which requires a incompatible Python version, but they request a compatible Python version for package resolution. There is a wheel available for the package, but it does not have a compatible tag.
///
/// ```text
/// incompatible-python-compatible-override-no-compatible-wheels
/// ├── environment
/// │ └── python3.9
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.10 (incompatible with environment)
/// ```
#[test]
fn incompatible_python_compatible_override_no_compatible_wheels() -> Result<()> {
let context = TestContext::new("3.9");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"incompatible-python-compatible-override-no-compatible-wheels-",
"package-",
));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("incompatible-python-compatible-override-no-compatible-wheels-a==1.0.0")?;
// Since there are no compatible wheels for the package and it is not compatible with the local installation, we cannot build the source distribution to determine its dependencies. However, the source distribution includes static metadata, which we can use to determine dependencies without building the package.
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.11")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.11 is not available; 3.9.[X] will be used to build dependencies instead.
Resolved 1 package in [TIME]
"
);
output.assert().success();
Ok(())
}
/// The user requires a package which requires a incompatible Python version, but they request a compatible Python version for package resolution. There are only source distributions available for the compatible version of the package, but there is an incompatible version with a wheel available.
///
/// ```text
/// incompatible-python-compatible-override-other-wheel
/// ├── environment
/// │ └── python3.9
/// ├── root
/// │ └── requires a
/// │ ├── satisfied by a-1.0.0
/// │ └── satisfied by a-2.0.0
/// └── a
/// ├── a-1.0.0
/// │ └── requires python>=3.10 (incompatible with environment)
/// └── a-2.0.0
/// └── requires python>=3.12 (incompatible with environment)
/// ```
#[test]
fn incompatible_python_compatible_override_other_wheel() -> Result<()> {
let context = TestContext::new("3.9");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"incompatible-python-compatible-override-other-wheel-",
"package-",
));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("incompatible-python-compatible-override-other-wheel-a")?;
// Since there are no wheels for the version of the package compatible with the target and it is not compatible with the local installation, we cannot build the source distribution to determine its dependencies. However, the source distribution includes static metadata, which we can use to determine dependencies without building the package.
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.11")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.11 is not available; 3.9.[X] will be used to build dependencies instead.
Resolved 1 package in [TIME]
"
);
output.assert().success();
Ok(())
}
/// The user requires a package which requires a incompatible Python version, but they request a compatible Python version for package resolution. There are only source distributions available for the package.
///
/// ```text
/// incompatible-python-compatible-override-unavailable-no-wheels
/// ├── environment
/// │ └── python3.9
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.10 (incompatible with environment)
/// ```
#[test]
fn incompatible_python_compatible_override_unavailable_no_wheels() -> Result<()> {
let context = TestContext::new("3.9");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((
r"incompatible-python-compatible-override-unavailable-no-wheels-",
"package-",
));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in
.write_str("incompatible-python-compatible-override-unavailable-no-wheels-a==1.0.0")?;
// Since there are no wheels for the package and it is not compatible with the local installation, we cannot build the source distribution to determine its dependencies. However, the source distribution includes static metadata, which we can use to determine dependencies without building the package.
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.11")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.11 is not available; 3.9.[X] will be used to build dependencies instead.
Resolved 1 package in [TIME]
"
);
output.assert().success();
Ok(())
}
/// The user requires a package which requires a Python version greater than the current version, but they use an alternative Python version for package resolution.
///
/// ```text
/// incompatible-python-compatible-override
/// ├── environment
/// │ └── python3.9
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.10 (incompatible with environment)
/// ```
#[test]
fn incompatible_python_compatible_override() -> Result<()> {
let context = TestContext::new("3.9");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"incompatible-python-compatible-override-", "package-"));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("incompatible-python-compatible-override-a==1.0.0")?;
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.11")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.11
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.11 is not available; 3.9.[X] will be used to build dependencies instead.
Resolved 1 package in [TIME]
"
);
output.assert().success().stdout(predicate::str::contains(
"incompatible-python-compatible-override-a==1.0.0",
));
Ok(())
}
/// The user requires a package which requires a Python version with a patch version and the user provides a target version without a patch version.
///
/// ```text
/// python-patch-override-no-patch
/// ├── environment
/// │ └── python3.9.20
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.9.4
/// ```
#[cfg(feature = "python-patch")]
#[test]
fn python_patch_override_no_patch() -> Result<()> {
let context = TestContext::new("3.9.20");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"python-patch-override-no-patch-", "package-"));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("python-patch-override-no-patch-a==1.0.0")?;
// Since the resolver is asked to solve with 3.9, the minimum compatible Python requirement is treated as 3.9.0.
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.9")
, @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× No solution found when resolving dependencies:
╰─▶ Because the requested Python version (>=3.9) does not satisfy Python>=3.9.4 and package-a==1.0.0 depends on Python>=3.9.4, we can conclude that package-a==1.0.0 cannot be used.
And because you require package-a==1.0.0, we can conclude that your requirements are unsatisfiable.
hint: The `--python-version` value (>=3.9) includes Python versions that are not supported by your dependencies (e.g., package-a==1.0.0 only supports >=3.9.4). Consider using a higher `--python-version` value.
"
);
output.assert().failure();
Ok(())
}
/// The user requires a package which requires a Python version with a patch version and the user provides a target version with a compatible patch version.
///
/// ```text
/// python-patch-override-patch-compatible
/// ├── environment
/// │ └── python3.9.20
/// ├── root
/// │ └── requires a==1.0.0
/// │ └── satisfied by a-1.0.0
/// └── a
/// └── a-1.0.0
/// └── requires python>=3.9.0
/// ```
#[cfg(feature = "python-patch")]
#[test]
fn python_patch_override_patch_compatible() -> Result<()> {
let context = TestContext::new("3.9.20");
let python_versions = &[];
// In addition to the standard filters, swap out package names for shorter messages
let mut filters = context.filters();
filters.push((r"python-patch-override-patch-compatible-", "package-"));
let requirements_in = context.temp_dir.child("requirements.in");
requirements_in.write_str("python-patch-override-patch-compatible-a==1.0.0")?;
let output = uv_snapshot!(filters, command(&context, python_versions)
.arg("--python-version=3.9.0")
, @r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv pip compile requirements.in --cache-dir [CACHE_DIR] --python-version=3.9.0
package-a==1.0.0
# via -r requirements.in
----- stderr -----
warning: The requested Python version 3.9.0 is not available; 3.9.20 will be used to build dependencies instead.
Resolved 1 package in [TIME]
"
);
output.assert().success().stdout(predicate::str::contains(
"python-patch-override-patch-compatible-a==1.0.0",
));
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_show.rs | crates/uv/tests/it/pip_show.rs | use std::env::current_dir;
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::fixture::FileWriteStr;
use assert_fs::fixture::PathChild;
use indoc::indoc;
use uv_static::EnvVars;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn show_empty() {
let context = TestContext::new("3.12");
uv_snapshot!(context.pip_show(), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: Please provide a package name or names.
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn show_requires_multiple() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
uv_snapshot!(context.filters(), context.pip_show()
.arg("requests"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: requests
Version: 2.31.0
Location: [SITE_PACKAGES]/
Requires: certifi, charset-normalizer, idna, urllib3
Required-by:
----- stderr -----
"###
);
Ok(())
}
/// Asserts that the Python version marker in the metadata is correctly evaluated.
/// `click` v8.1.7 requires `importlib-metadata`, but only when `python_version < "3.8"`.
#[test]
#[cfg(feature = "pypi")]
fn show_python_version_marker() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("click==8.1.7")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ click==8.1.7
"###
);
context.assert_command("import click").success();
let mut filters = context.filters();
if cfg!(windows) {
filters.push(("Requires: colorama", "Requires:"));
}
uv_snapshot!(filters, context.pip_show()
.arg("click"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: click
Version: 8.1.7
Location: [SITE_PACKAGES]/
Requires:
Required-by:
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_found_single_package() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ markupsafe==2.1.3
"###
);
context.assert_command("import markupsafe").success();
uv_snapshot!(context.filters(), context.pip_show()
.arg("markupsafe"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: markupsafe
Version: 2.1.3
Location: [SITE_PACKAGES]/
Requires:
Required-by:
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_found_multiple_packages() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
MarkupSafe==2.1.3
pip==21.3.1
"
})?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ markupsafe==2.1.3
+ pip==21.3.1
"###
);
context.assert_command("import markupsafe").success();
uv_snapshot!(context.filters(), context.pip_show()
.arg("markupsafe")
.arg("pip"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: markupsafe
Version: 2.1.3
Location: [SITE_PACKAGES]/
Requires:
Required-by:
---
Name: pip
Version: 21.3.1
Location: [SITE_PACKAGES]/
Requires:
Required-by:
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_found_one_out_of_three() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
MarkupSafe==2.1.3
pip==21.3.1
"
})?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ markupsafe==2.1.3
+ pip==21.3.1
"###
);
context.assert_command("import markupsafe").success();
uv_snapshot!(context.filters(), context.pip_show()
.arg("markupsafe")
.arg("flask")
.arg("django"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: markupsafe
Version: 2.1.3
Location: [SITE_PACKAGES]/
Requires:
Required-by:
----- stderr -----
warning: Package(s) not found for: django, flask
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_found_one_out_of_two_quiet() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
MarkupSafe==2.1.3
pip==21.3.1
"
})?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ markupsafe==2.1.3
+ pip==21.3.1
"###
);
context.assert_command("import markupsafe").success();
// Flask isn't installed, but markupsafe is, so the command should succeed.
uv_snapshot!(context.pip_show()
.arg("markupsafe")
.arg("flask")
.arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_empty_quiet() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
MarkupSafe==2.1.3
pip==21.3.1
"
})?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ markupsafe==2.1.3
+ pip==21.3.1
"###
);
context.assert_command("import markupsafe").success();
// Flask isn't installed, so the command should fail.
uv_snapshot!(context.pip_show()
.arg("flask")
.arg("--quiet"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_editable() -> Result<()> {
let context = TestContext::new("3.12");
// Install the editable package.
context
.pip_install()
.arg("-e")
.arg("../../test/packages/poetry_editable")
.current_dir(current_dir()?)
.env(
EnvVars::CARGO_TARGET_DIR,
"../../../target/target_install_editable",
)
.assert()
.success();
uv_snapshot!(context.filters(), context.pip_show()
.arg("poetry-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: poetry-editable
Version: 0.1.0
Location: [SITE_PACKAGES]/
Editable project location: [WORKSPACE]/test/packages/poetry_editable
Requires: anyio
Required-by:
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_required_by_multiple() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(indoc! {r"
anyio==4.0.0
requests==2.31.0
"
})?;
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ anyio==4.0.0
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ sniffio==1.3.1
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
// idna is required by anyio and requests
uv_snapshot!(context.filters(), context.pip_show()
.arg("idna"), @r###"
success: true
exit_code: 0
----- stdout -----
Name: idna
Version: 3.6
Location: [SITE_PACKAGES]/
Requires:
Required-by: anyio, requests
----- stderr -----
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_files() {
let context = TestContext::new("3.12");
uv_snapshot!(context
.pip_install()
.arg("requests==2.31.0")
.arg("--strict"), @r#"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"#
);
// Windows has a different files order.
#[cfg(not(windows))]
uv_snapshot!(context.filters(), context.pip_show().arg("requests").arg("--files"), @r#"
success: true
exit_code: 0
----- stdout -----
Name: requests
Version: 2.31.0
Location: [SITE_PACKAGES]/
Requires: certifi, charset-normalizer, idna, urllib3
Required-by:
Files:
requests-2.31.0.dist-info/INSTALLER
requests-2.31.0.dist-info/LICENSE
requests-2.31.0.dist-info/METADATA
requests-2.31.0.dist-info/RECORD
requests-2.31.0.dist-info/REQUESTED
requests-2.31.0.dist-info/WHEEL
requests-2.31.0.dist-info/top_level.txt
requests/__init__.py
requests/__version__.py
requests/_internal_utils.py
requests/adapters.py
requests/api.py
requests/auth.py
requests/certs.py
requests/compat.py
requests/cookies.py
requests/exceptions.py
requests/help.py
requests/hooks.py
requests/models.py
requests/packages.py
requests/sessions.py
requests/status_codes.py
requests/structures.py
requests/utils.py
----- stderr -----
"#);
}
#[test]
#[cfg(feature = "pypi")]
fn show_target() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
let target = context.temp_dir.child("target");
// Install packages to a target directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--target")
.arg(target.path())
.assert()
.success();
// Show package in the target directory.
uv_snapshot!(context.filters(), context.pip_show()
.arg("markupsafe")
.arg("--target")
.arg(target.path()), @r###"
success: true
exit_code: 0
----- stdout -----
Name: markupsafe
Version: 2.1.3
Location: [TEMP_DIR]/target
Requires:
Required-by:
----- stderr -----
"###
);
// Without --target, the package should not be found.
uv_snapshot!(context.pip_show().arg("markupsafe"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: Package(s) not found for: markupsafe
"###
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn show_prefix() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
let prefix = context.temp_dir.child("prefix");
// Install packages to a prefix directory.
context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--prefix")
.arg(prefix.path())
.assert()
.success();
// Show package in the prefix directory.
uv_snapshot!(context.filters(), context.pip_show()
.arg("markupsafe")
.arg("--prefix")
.arg(prefix.path()), @r###"
success: true
exit_code: 0
----- stdout -----
Name: markupsafe
Version: 2.1.3
Location: [TEMP_DIR]/prefix/[PYTHON-LIB]/site-packages
Requires:
Required-by:
----- stderr -----
"###
);
// Without --prefix, the package should not be found.
uv_snapshot!(context.pip_show().arg("markupsafe"), @r###"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
warning: Package(s) not found for: markupsafe
"###
);
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/edit.rs | crates/uv/tests/it/edit.rs | #![allow(clippy::disallowed_types)]
#[cfg(feature = "git")]
mod conditional_imports {
pub(crate) use crate::common::{READ_ONLY_GITHUB_TOKEN, decode_token};
}
#[cfg(feature = "git")]
use conditional_imports::*;
use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::{formatdoc, indoc};
use insta::assert_snapshot;
use std::path::Path;
use url::Url;
use wiremock::{Mock, MockServer, ResponseTemplate, matchers::method, matchers::path};
#[cfg(feature = "git-lfs")]
use uv_cache_key::{RepositoryUrl, cache_digest};
use uv_fs::Simplified;
use uv_static::EnvVars;
use crate::common::{TestContext, packse_index_url, uv_snapshot, venv_bin_path};
/// Add a PyPI requirement.
#[test]
fn add_registry() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context.add().arg("anyio==3.7.0"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject_toml, @r###"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"anyio==3.7.0",
]
"###
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/b3/fefbf7e78ab3b805dec67d698dc18dd505af7a18a8dd08868c9b4fa736b5/anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce", size = 142737, upload-time = "2023-05-27T11:12:46.688Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/fe/7ce1926952c8a403b35029e194555558514b365ad77d75125f521a2bec62/anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0", size = 80873, upload-time = "2023-05-27T11:12:44.474Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "anyio" },
]
[package.metadata]
requires-dist = [{ name = "anyio", specifier = "==3.7.0" }]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
"#
);
});
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 3 packages in [TIME]
");
Ok(())
}
/// Add a Git requirement.
#[test]
#[cfg(feature = "git")]
fn add_git() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#})?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
");
// Adding with an ambiguous Git reference should treat it as a revision.
uv_snapshot!(context.filters(), context.add().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.1"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
");
uv_snapshot!(context.filters(), context.add().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage").arg("--tag=0.0.1"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Audited 4 packages in [TIME]
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject_toml, @r###"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"anyio==3.7.0",
"uv-public-pypackage",
]
[tool.uv.sources]
uv-public-pypackage = { git = "https://github.com/astral-test/uv-public-pypackage", tag = "0.0.1" }
"###
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/b3/fefbf7e78ab3b805dec67d698dc18dd505af7a18a8dd08868c9b4fa736b5/anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce", size = 142737, upload-time = "2023-05-27T11:12:46.688Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/fe/7ce1926952c8a403b35029e194555558514b365ad77d75125f521a2bec62/anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0", size = 80873, upload-time = "2023-05-27T11:12:44.474Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "anyio" },
{ name = "uv-public-pypackage" },
]
[package.metadata]
requires-dist = [
{ name = "anyio", specifier = "==3.7.0" },
{ name = "uv-public-pypackage", git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.1" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "uv-public-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-public-pypackage?tag=0.0.1#0dacfd662c64cb4ceb16e6cf65a157a8b715b979" }
"#
);
});
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 4 packages in [TIME]
");
Ok(())
}
/// Add a Git requirement from a private repository, with credentials. The resolution should
/// succeed, but the `pyproject.toml` should omit the credentials.
#[test]
#[cfg(feature = "git")]
fn add_git_private_source() -> Result<()> {
let context = TestContext::new("3.12");
let token = decode_token(READ_ONLY_GITHUB_TOKEN);
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context.add().arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071)
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject_toml, @r###"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"uv-private-pypackage",
]
[tool.uv.sources]
uv-private-pypackage = { git = "https://github.com/astral-test/uv-private-pypackage" }
"###
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-private-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-private-pypackage", git = "https://github.com/astral-test/uv-private-pypackage" }]
[[package]]
name = "uv-private-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-private-pypackage#d780faf0ac91257d4d5a4f0c5a0e4509608c0071" }
"#
);
});
// Install from the lockfile.
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
");
Ok(())
}
/// Add a Git requirement from a private repository, with credentials. Since `--raw-sources` is
/// specified, the `pyproject.toml` should retain the credentials.
#[test]
#[cfg(feature = "git")]
fn add_git_private_raw() -> Result<()> {
let context = TestContext::new("3.12");
let token = decode_token(READ_ONLY_GITHUB_TOKEN);
let mut filters = context.filters();
filters.push((&token, "***"));
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(filters, context.add().arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage")).arg("--raw-sources"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071)
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => filters.clone()
}, {
assert_snapshot!(
pyproject_toml, @r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"uv-private-pypackage @ git+https://***@github.com/astral-test/uv-private-pypackage",
]
"#
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => filters.clone(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "uv-private-pypackage" },
]
[package.metadata]
requires-dist = [{ name = "uv-private-pypackage", git = "https://github.com/astral-test/uv-private-pypackage" }]
[[package]]
name = "uv-private-pypackage"
version = "0.1.0"
source = { git = "https://github.com/astral-test/uv-private-pypackage#d780faf0ac91257d4d5a4f0c5a0e4509608c0071" }
"#
);
});
// Install from the lockfile.
uv_snapshot!(filters, context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited 1 package in [TIME]
");
Ok(())
}
#[tokio::test]
#[cfg(feature = "git")]
async fn add_git_private_rate_limited_by_github_rest_api_403_response() -> Result<()> {
let context = TestContext::new("3.12");
let token = decode_token(READ_ONLY_GITHUB_TOKEN);
let server = MockServer::start().await;
Mock::given(method("GET"))
.respond_with(ResponseTemplate::new(403))
.expect(1)
.mount(&server)
.await;
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context
.add()
.arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage"))
.env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri()), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071)
");
Ok(())
}
#[tokio::test]
#[cfg(feature = "git")]
async fn add_git_private_rate_limited_by_github_rest_api_429_response() -> Result<()> {
use uv_client::DEFAULT_RETRIES;
let context = TestContext::new("3.12");
let token = decode_token(READ_ONLY_GITHUB_TOKEN);
let server = MockServer::start().await;
Mock::given(method("GET"))
.respond_with(ResponseTemplate::new(429))
.expect(1 + u64::from(DEFAULT_RETRIES)) // Middleware retries on 429 by default
.mount(&server)
.await;
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context
.add()
.arg(format!("uv-private-pypackage @ git+https://{token}@github.com/astral-test/uv-private-pypackage"))
.env(EnvVars::UV_GITHUB_FAST_PATH_URL, server.uri())
.env(EnvVars::UV_TEST_NO_HTTP_RETRY_DELAY, "true")
.env_remove(EnvVars::UV_HTTP_RETRIES), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-private-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-private-pypackage@d780faf0ac91257d4d5a4f0c5a0e4509608c0071)
");
Ok(())
}
#[test]
#[cfg(feature = "git")]
fn add_git_error() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 1 package in [TIME]
"###);
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Audited in [TIME]
");
// Provide a tag without a Git source.
uv_snapshot!(context.filters(), context.add().arg("flask").arg("--tag").arg("0.0.1"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `flask` did not resolve to a Git repository, but a Git reference (`--tag 0.0.1`) was provided.
"###);
// Provide a tag with a non-Git source.
uv_snapshot!(context.filters(), context.add().arg("flask @ https://files.pythonhosted.org/packages/61/80/ffe1da13ad9300f87c93af113edd0638c75138c42a0994becfacac078c06/flask-3.0.3-py3-none-any.whl").arg("--branch").arg("0.0.1"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: `flask` did not resolve to a Git repository, but a Git reference (`--branch 0.0.1`) was provided.
"###);
Ok(())
}
#[test]
#[cfg(feature = "git")]
fn add_git_branch() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = []
"#})?;
uv_snapshot!(context.filters(), context.add().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage").arg("--branch").arg("test-branch"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
");
Ok(())
}
#[test]
#[cfg(feature = "git-lfs")]
fn add_git_lfs() -> Result<()> {
let context = TestContext::new("3.13").with_git_lfs_config();
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = []
"#})?;
// Gather cache locations
let git_cache = context.cache_dir.child("git-v0");
let git_checkouts = git_cache.child("checkouts");
let git_db = git_cache.child("db");
let repo_url = RepositoryUrl::parse("https://github.com/astral-sh/test-lfs-repo")?;
let lfs_db_bucket_objects = git_db
.child(cache_digest(&repo_url))
.child(".git")
.child("lfs");
let ok_checkout_file = git_checkouts
.child(cache_digest(&repo_url.with_lfs(Some(true))))
.child("657500f")
.child(".ok");
uv_snapshot!(context.filters(), context.add()
.arg("--no-cache")
.arg("test-lfs-repo @ git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("657500f0703dc173ac5d68dfa1d7e8c985c84424")
.arg("--lfs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@657500f0703dc173ac5d68dfa1d7e8c985c84424#lfs=true)
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject_toml, @r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.13"
dependencies = [
"test-lfs-repo",
]
[tool.uv.sources]
test-lfs-repo = { git = "https://github.com/astral-sh/test-lfs-repo", rev = "657500f0703dc173ac5d68dfa1d7e8c985c84424", lfs = true }
"#
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.13"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "project"
version = "0.1.0"
source = { virtual = "." }
dependencies = [
{ name = "test-lfs-repo" },
]
[package.metadata]
requires-dist = [{ name = "test-lfs-repo", git = "https://github.com/astral-sh/test-lfs-repo?lfs=true&rev=657500f0703dc173ac5d68dfa1d7e8c985c84424" }]
[[package]]
name = "test-lfs-repo"
version = "0.1.0"
source = { git = "https://github.com/astral-sh/test-lfs-repo?lfs=true&rev=657500f0703dc173ac5d68dfa1d7e8c985c84424#657500f0703dc173ac5d68dfa1d7e8c985c84424" }
"#
);
});
// Change revision as an unnamed requirement
uv_snapshot!(context.filters(), context.add()
.arg("--no-cache")
.arg("git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("4e82e85f6a8b8825d614ea23c550af55b2b7738c")
.arg("--lfs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@657500f0703dc173ac5d68dfa1d7e8c985c84424#lfs=true)
+ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@4e82e85f6a8b8825d614ea23c550af55b2b7738c#lfs=true)
");
// Test LFS not found scenario resulting in an incomplete fetch cache
// The filters below will remove any boilerplate before what we actually want to match.
// They help handle slightly different output in uv-distribution/src/source/mod.rs between
// calls to `git` and `git_metadata` functions which don't have guaranteed execution order.
// In addition, we can get different error codes depending on where the failure occurs,
// although we know the error code cannot be 0.
let mut filters = context.filters();
filters.push((r"exit_code: -?[1-9]\d*", "exit_code: [ERROR_CODE]"));
filters.push((
"(?s)(----- stderr -----).*?The source distribution `[^`]+` is missing Git LFS artifacts.*",
"$1\n[PREFIX]The source distribution `[DISTRIBUTION]` is missing Git LFS artifacts",
));
uv_snapshot!(filters, context.add()
.env(EnvVars::UV_INTERNAL__TEST_LFS_DISABLED, "1")
.arg("git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("657500f0703dc173ac5d68dfa1d7e8c985c84424")
.arg("--lfs"), @r"
success: false
exit_code: [ERROR_CODE]
----- stdout -----
----- stderr -----
[PREFIX]The source distribution `[DISTRIBUTION]` is missing Git LFS artifacts
");
// There should be no .ok entry as LFS operations failed
assert!(!ok_checkout_file.exists(), "Found unexpected .ok file.");
// Test LFS recovery from an incomplete fetch cache
uv_snapshot!(context.filters(), context.add()
.arg("git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("657500f0703dc173ac5d68dfa1d7e8c985c84424")
.arg("--lfs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
- test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@4e82e85f6a8b8825d614ea23c550af55b2b7738c#lfs=true)
+ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@657500f0703dc173ac5d68dfa1d7e8c985c84424#lfs=true)
");
// Verify that we can import the module and access LFS content
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg("import test_lfs_repo.lfs_module"), @r#"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"#);
// Now let's delete some of the LFS entries from our db...
fs_err::remove_file(&ok_checkout_file)?;
fs_err::remove_dir_all(&lfs_db_bucket_objects)?;
// Test LFS recovery from an incomplete db and non-fresh checkout
uv_snapshot!(context.filters(), context.add()
.arg("git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("657500f0703dc173ac5d68dfa1d7e8c985c84424")
.arg("--reinstall")
.arg("--lfs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 1 package in [TIME]
Uninstalled 1 package in [TIME]
Installed 1 package in [TIME]
~ test-lfs-repo==0.1.0 (from git+https://github.com/astral-sh/test-lfs-repo@657500f0703dc173ac5d68dfa1d7e8c985c84424#lfs=true)
");
// Verify that we can import the module and access LFS content
uv_snapshot!(context.filters(), context.python_command()
.arg("-c")
.arg("import test_lfs_repo.lfs_module"), @r#"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"#);
// Verify our db and checkout recovered
assert!(ok_checkout_file.exists());
assert!(lfs_db_bucket_objects.exists());
// Exercise the sdist cache
uv_snapshot!(context.filters(), context.add()
.arg("git+https://github.com/astral-sh/test-lfs-repo")
.arg("--rev").arg("657500f0703dc173ac5d68dfa1d7e8c985c84424")
.arg("--lfs"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Audited 1 package in [TIME]
");
Ok(())
}
/// Add a Git requirement using the `--raw-sources` API.
#[test]
#[cfg(feature = "git")]
fn add_git_raw() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(indoc! {r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
"#})?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 4 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.sync().arg("--frozen"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Prepared 3 packages in [TIME]
Installed 3 packages in [TIME]
+ anyio==3.7.0
+ idna==3.6
+ sniffio==1.3.1
");
// Use an ambiguous tag reference, which would otherwise not resolve.
uv_snapshot!(context.filters(), context.add().arg("uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.1").arg("--raw-sources"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 1 package in [TIME]
Installed 1 package in [TIME]
+ uv-public-pypackage==0.1.0 (from git+https://github.com/astral-test/uv-public-pypackage@0dacfd662c64cb4ceb16e6cf65a157a8b715b979)
");
let pyproject_toml = context.read("pyproject.toml");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
pyproject_toml, @r###"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"anyio==3.7.0",
"uv-public-pypackage @ git+https://github.com/astral-test/uv-public-pypackage@0.0.1",
]
"###
);
});
let lock = context.read("uv.lock");
insta::with_settings!({
filters => context.filters(),
}, {
assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "anyio"
version = "3.7.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "idna" },
{ name = "sniffio" },
]
sdist = { url = "https://files.pythonhosted.org/packages/c6/b3/fefbf7e78ab3b805dec67d698dc18dd505af7a18a8dd08868c9b4fa736b5/anyio-3.7.0.tar.gz", hash = "sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce", size = 142737, upload-time = "2023-05-27T11:12:46.688Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/68/fe/7ce1926952c8a403b35029e194555558514b365ad77d75125f521a2bec62/anyio-3.7.0-py3-none-any.whl", hash = "sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0", size = 80873, upload-time = "2023-05-27T11:12:44.474Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/extract.rs | crates/uv/tests/it/extract.rs | #![cfg(feature = "r2")]
use backon::{BackoffBuilder, Retryable};
use futures::TryStreamExt;
use tokio_util::compat::FuturesAsyncReadCompatExt;
async fn unzip(url: &str) -> anyhow::Result<(), uv_extract::Error> {
let backoff = backon::ExponentialBuilder::default()
.with_min_delay(std::time::Duration::from_millis(500))
.with_max_times(5)
.build();
let download = || async {
let response = reqwest::get(url).await?;
Ok::<_, reqwest::Error>(response)
};
let response = download.retry(backoff).await.unwrap();
let reader = response
.bytes_stream()
.map_err(std::io::Error::other)
.into_async_read();
let target = tempfile::TempDir::new().map_err(uv_extract::Error::Io)?;
uv_extract::stream::unzip(reader.compat(), target.path()).await
}
#[tokio::test]
async fn malo_accept_comment() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/comment.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_data_descriptor_zip64() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/data_descriptor_zip64.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_data_descriptor() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/data_descriptor.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_deflate() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/deflate.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_normal_deflate_zip64_extra() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/normal_deflate_zip64_extra.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_normal_deflate() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/normal_deflate.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_store() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/store.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_subdir() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/subdir.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_accept_zip64_eocd() {
unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/accept/zip64_eocd.zip").await.unwrap();
insta::assert_debug_snapshot!((), @"()");
}
#[tokio::test]
async fn malo_iffy_8bitcomment() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/8bitcomment.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
ZipInZip,
)
");
}
#[tokio::test]
async fn malo_iffy_extra3byte() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/extra3byte.zip").await;
insta::assert_debug_snapshot!(result, @r"
Ok(
(),
)
");
}
#[tokio::test]
async fn malo_iffy_non_ascii_original_name() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/non_ascii_original_name.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
LocalHeaderNotUtf8 {
offset: 0,
},
)
");
}
#[tokio::test]
async fn malo_iffy_nosubdir() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/nosubdir.zip").await;
insta::assert_debug_snapshot!(result, @r"
Ok(
(),
)
");
}
#[tokio::test]
async fn malo_iffy_prefix() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/prefix.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
AsyncZip(
UnexpectedHeaderError(
1482184792,
67324752,
),
),
)
");
}
#[tokio::test]
async fn malo_iffy_suffix_not_comment() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/suffix_not_comment.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
TrailingContents,
)
");
}
#[tokio::test]
async fn malo_iffy_zip64_eocd_extensible_data() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/zip64_eocd_extensible_data.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
ExtensibleData,
)
");
}
#[tokio::test]
async fn malo_iffy_zip64_extra_too_long() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/zip64_extra_too_long.zip").await;
insta::assert_debug_snapshot!(result, @r"
Err(
AsyncZip(
Zip64ExtendedInformationFieldTooLong {
expected: 16,
actual: 8,
},
),
)
");
}
#[tokio::test]
async fn malo_iffy_zip64_extra_too_short() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/iffy/zip64_extra_too_short.zip").await;
insta::assert_debug_snapshot!(result, @r#"
Err(
BadCompressedSize {
path: "fixme",
computed: 7,
expected: 4294967295,
},
)
"#);
}
#[tokio::test]
async fn malo_reject_cd_extra_entry() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/cd_extra_entry.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
MissingLocalFileHeader {
path: "fixme",
offset: 0,
}
"#);
}
#[tokio::test]
async fn malo_reject_cd_missing_entry() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/cd_missing_entry.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
MissingCentralDirectoryEntry {
path: "two",
offset: 42,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_bad_crc_0() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_bad_crc_0.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadCrc32 {
path: "fixme",
computed: 2183870971,
expected: 0,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_bad_crc() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_bad_crc.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadCrc32 {
path: "fixme",
computed: 907060870,
expected: 1,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_bad_csize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_bad_csize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadCompressedSize {
path: "fixme",
computed: 7,
expected: 8,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_bad_usize_no_sig() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_bad_usize_no_sig.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadUncompressedSize {
path: "fixme",
computed: 5,
expected: 6,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_bad_usize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_bad_usize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadUncompressedSize {
path: "fixme",
computed: 5,
expected: 6,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_zip64_csize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_zip64_csize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadCompressedSize {
path: "fixme",
computed: 7,
expected: 8,
}
"#);
}
#[tokio::test]
async fn malo_reject_data_descriptor_zip64_usize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/data_descriptor_zip64_usize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadUncompressedSize {
path: "fixme",
computed: 5,
expected: 6,
}
"#);
}
#[tokio::test]
async fn malo_reject_dupe_eocd() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/dupe_eocd.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @"TrailingContents");
}
#[tokio::test]
async fn malo_reject_shortextra() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/shortextra.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r"
AsyncZip(
InvalidExtraFieldHeader(
9,
),
)
");
}
#[tokio::test]
async fn malo_reject_zip64_extra_csize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/zip64_extra_csize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadCompressedSize {
path: "fixme",
computed: 7,
expected: 8,
}
"#);
}
#[tokio::test]
async fn malo_reject_zip64_extra_usize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/reject/zip64_extra_usize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadUncompressedSize {
path: "fixme",
computed: 5,
expected: 6,
}
"#);
}
#[tokio::test]
async fn malo_malicious_second_unicode_extra() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/second_unicode_extra.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r"
AsyncZip(
DuplicateExtraFieldHeader(
28789,
),
)
");
}
#[tokio::test]
async fn malo_malicious_short_usize_zip64() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/short_usize_zip64.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r"
AsyncZip(
Zip64ExtendedInformationFieldTooLong {
expected: 16,
actual: 0,
},
)
");
}
#[tokio::test]
async fn malo_malicious_short_usize() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/short_usize.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r#"
BadUncompressedSize {
path: "file",
computed: 51,
expected: 9,
}
"#);
}
#[tokio::test]
async fn malo_malicious_zip64_eocd_confusion() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/zip64_eocd_confusion.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @"ExtensibleData");
}
#[tokio::test]
async fn malo_malicious_unicode_extra_chain() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/unicode_extra_chain.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @r"
AsyncZip(
DuplicateExtraFieldHeader(
28789,
),
)
");
}
#[tokio::test]
async fn malo_malicious_zipinzip() {
let result = unzip("https://pub-c6f28d316acd406eae43501e51ad30fa.r2.dev/0723f54ceb33a4fdc7f2eddc19635cd704d61c84/malicious/zipinzip.zip").await.unwrap_err();
insta::assert_debug_snapshot!(result, @"ZipInZip");
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/main.rs | crates/uv/tests/it/main.rs | //! this is the single integration test, as documented by matklad
//! in <https://matklad.github.io/2021/02/27/delete-cargo-integration-tests.html>
pub(crate) mod common;
mod auth;
mod branching_urls;
#[cfg(all(feature = "python", feature = "pypi"))]
mod build;
#[cfg(feature = "python")]
mod build_backend;
#[cfg(all(feature = "python", feature = "pypi"))]
mod cache_clean;
#[cfg(all(feature = "python", feature = "pypi"))]
mod cache_prune;
#[cfg(all(feature = "python", feature = "pypi"))]
mod cache_size;
#[cfg(all(feature = "python", feature = "pypi", feature = "test-ecosystem"))]
mod ecosystem;
#[cfg(all(feature = "python", feature = "pypi"))]
mod edit;
#[cfg(all(feature = "python", feature = "pypi"))]
mod export;
#[cfg(all(feature = "python", feature = "pypi"))]
mod format;
mod help;
#[cfg(all(feature = "python", feature = "pypi", feature = "git"))]
mod init;
#[cfg(all(feature = "python", feature = "pypi"))]
mod lock;
#[cfg(all(feature = "python", feature = "pypi"))]
mod lock_conflict;
#[cfg(all(feature = "python", feature = "pypi"))]
mod lock_exclude_newer_relative;
mod lock_scenarios;
mod network;
#[cfg(all(feature = "python", feature = "pypi"))]
mod pip_check;
#[cfg(all(feature = "python", feature = "pypi"))]
mod pip_compile;
mod pip_compile_scenarios;
#[cfg(all(feature = "python", feature = "pypi"))]
mod pip_freeze;
#[cfg(all(feature = "python", feature = "pypi"))]
mod pip_install;
mod pip_install_scenarios;
mod pip_list;
mod pip_show;
#[cfg(all(feature = "python", feature = "pypi"))]
mod pip_sync;
mod pip_debug;
mod pip_tree;
mod pip_uninstall;
#[cfg(feature = "pypi")]
mod publish;
mod python_dir;
#[cfg(feature = "python")]
mod python_find;
#[cfg(feature = "python")]
mod python_list;
#[cfg(all(feature = "python", feature = "pypi"))]
mod python_module;
#[cfg(feature = "python-managed")]
mod python_install;
#[cfg(feature = "python")]
mod python_pin;
#[cfg(feature = "python-managed")]
mod python_upgrade;
#[cfg(all(feature = "python", feature = "pypi"))]
mod run;
#[cfg(feature = "self-update")]
mod self_update;
#[cfg(all(feature = "python", feature = "pypi"))]
mod show_settings;
#[cfg(all(feature = "python", feature = "pypi"))]
mod sync;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_dir;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_install;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_list;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_run;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_uninstall;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tool_upgrade;
#[cfg(all(feature = "python", feature = "pypi"))]
mod tree;
#[cfg(feature = "python")]
mod venv;
mod version;
#[cfg(all(feature = "python", feature = "pypi"))]
mod workflow;
mod extract;
mod workspace;
mod workspace_dir;
mod workspace_list;
mod workspace_metadata;
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_tree.rs | crates/uv/tests/it/pip_tree.rs | #![cfg(not(windows))]
use assert_cmd::assert::OutputAssertExt;
use std::process::Command;
use assert_fs::fixture::FileTouch;
use assert_fs::fixture::FileWriteStr;
use assert_fs::fixture::PathChild;
use assert_fs::fixture::PathCreateDir;
use indoc::indoc;
use uv_static::EnvVars;
use crate::common::get_bin;
use crate::common::{TestContext, uv_snapshot};
#[test]
fn no_package() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn prune_last_in_the_subgroup() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
uv_snapshot!(context.filters(), context.pip_tree().arg("--prune").arg("certifi"), @r###"
success: true
exit_code: 0
----- stdout -----
requests v2.31.0
├── charset-normalizer v3.3.2
├── idna v3.6
└── urllib3 v2.2.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn single_package() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
requests v2.31.0
├── certifi v2024.2.2
├── charset-normalizer v3.3.2
├── idna v3.6
└── urllib3 v2.2.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn nested_dependencies() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
├── itsdangerous v2.1.2
├── jinja2 v3.1.3
│ └── markupsafe v2.1.5
└── werkzeug v3.0.1
└── markupsafe v2.1.5
----- stderr -----
"###
);
}
/// Identical test as `invert` since `--reverse` is simply an alias for `--invert`.
#[test]
#[cfg(feature = "pypi")]
fn reverse() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree().arg("--reverse"), @r###"
success: true
exit_code: 0
----- stdout -----
blinker v1.7.0
└── flask v3.0.2
click v8.1.7
└── flask v3.0.2
itsdangerous v2.1.2
└── flask v3.0.2
markupsafe v2.1.5
├── jinja2 v3.1.3
│ └── flask v3.0.2
└── werkzeug v3.0.1
└── flask v3.0.2
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn invert() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree().arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
blinker v1.7.0
└── flask v3.0.2
click v8.1.7
└── flask v3.0.2
itsdangerous v2.1.2
└── flask v3.0.2
markupsafe v2.1.5
├── jinja2 v3.1.3
│ └── flask v3.0.2
└── werkzeug v3.0.1
└── flask v3.0.2
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn depth() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--depth")
.arg("0")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
----- stderr -----
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--depth")
.arg("1")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
├── itsdangerous v2.1.2
├── jinja2 v3.1.3
└── werkzeug v3.0.1
----- stderr -----
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--depth")
.arg("2")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
├── itsdangerous v2.1.2
├── jinja2 v3.1.3
│ └── markupsafe v2.1.5
└── werkzeug v3.0.1
└── markupsafe v2.1.5
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn prune() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--prune")
.arg("werkzeug")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
├── itsdangerous v2.1.2
└── jinja2 v3.1.3
└── markupsafe v2.1.5
----- stderr -----
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--prune")
.arg("werkzeug")
.arg("--prune")
.arg("jinja2")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
└── itsdangerous v2.1.2
markupsafe v2.1.5
----- stderr -----
"###
);
uv_snapshot!(context.filters(), Command::new(get_bin())
.arg("pip")
.arg("tree")
.arg("--cache-dir")
.arg(context.cache_dir.path())
.arg("--prune")
.arg("werkzeug")
.env(EnvVars::VIRTUAL_ENV, context.venv.as_os_str())
.env(EnvVars::UV_NO_WRAP, "1")
.current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
flask v3.0.2
├── blinker v1.7.0
├── click v8.1.7
├── itsdangerous v2.1.2
└── jinja2 v3.1.3
└── markupsafe v2.1.5
----- stderr -----
"###
);
}
/// Ensure `pip tree` behaves correctly after a package has been removed.
#[test]
#[cfg(feature = "pypi")]
fn removed_dependency() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
uv_snapshot!(context.filters(), context
.pip_uninstall()
.arg("requests"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- requests==2.31.0
"###
);
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
certifi v2024.2.2
charset-normalizer v3.3.2
idna v3.6
urllib3 v2.2.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn multiple_packages() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt
.write_str(
r"
requests==2.31.0
click==8.1.7
",
)
.unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ click==8.1.7
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
click v8.1.7
requests v2.31.0
├── certifi v2024.2.2
├── charset-normalizer v3.3.2
├── idna v3.6
└── urllib3 v2.2.1
----- stderr -----
"###
);
}
/// Show the installed tree in the presence of a cycle.
#[test]
#[cfg(feature = "pypi")]
fn cycle() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt
.write_str(
r"
testtools==2.3.0
fixtures==3.0.0
",
)
.unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 10 packages in [TIME]
Prepared 10 packages in [TIME]
Installed 10 packages in [TIME]
+ argparse==1.4.0
+ extras==1.0.0
+ fixtures==3.0.0
+ linecache2==1.0.0
+ pbr==6.0.0
+ python-mimeparse==1.6.0
+ six==1.16.0
+ testtools==2.3.0
+ traceback2==1.4.0
+ unittest2==1.1.0
"###
);
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
testtools v2.3.0
├── extras v1.0.0
├── fixtures v3.0.0
│ ├── pbr v6.0.0
│ ├── six v1.16.0
│ └── testtools v2.3.0 (*)
├── pbr v6.0.0
├── python-mimeparse v1.6.0
├── six v1.16.0
├── traceback2 v1.4.0
│ └── linecache2 v1.0.0
└── unittest2 v1.1.0
├── argparse v1.4.0
├── six v1.16.0
└── traceback2 v1.4.0 (*)
(*) Package tree already displayed
----- stderr -----
"###
);
}
/// Both `pendulum` and `boto3` depend on `python-dateutil`.
#[test]
#[cfg(feature = "pypi")]
fn multiple_packages_shared_descendant() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt
.write_str(
r"
pendulum
time-machine
",
)
.unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ pendulum==3.0.0
+ python-dateutil==2.9.0.post0
+ six==1.16.0
+ time-machine==2.14.1
+ tzdata==2024.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
pendulum v3.0.0
├── python-dateutil v2.9.0.post0
│ └── six v1.16.0
├── time-machine v2.14.1
│ └── python-dateutil v2.9.0.post0 (*)
└── tzdata v2024.1
(*) Package tree already displayed
----- stderr -----
"###
);
}
/// Test the interaction between `--no-dedupe` and `--invert`.
#[test]
#[cfg(feature = "pypi")]
fn no_dedupe_and_invert() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt
.write_str(
r"
pendulum
time-machine
",
)
.unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ pendulum==3.0.0
+ python-dateutil==2.9.0.post0
+ six==1.16.0
+ time-machine==2.14.1
+ tzdata==2024.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree().arg("--no-dedupe").arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
six v1.16.0
└── python-dateutil v2.9.0.post0
├── pendulum v3.0.0
└── time-machine v2.14.1
└── pendulum v3.0.0
tzdata v2024.1
└── pendulum v3.0.0
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn no_dedupe() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt
.write_str(
r"
pendulum
time-machine
",
)
.unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ pendulum==3.0.0
+ python-dateutil==2.9.0.post0
+ six==1.16.0
+ time-machine==2.14.1
+ tzdata==2024.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree()
.arg("--no-dedupe"), @r###"
success: true
exit_code: 0
----- stdout -----
pendulum v3.0.0
├── python-dateutil v2.9.0.post0
│ └── six v1.16.0
├── time-machine v2.14.1
│ └── python-dateutil v2.9.0.post0
│ └── six v1.16.0
└── tzdata v2024.1
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "git")]
fn with_editable() {
let context = TestContext::new("3.12");
// Install the editable package.
uv_snapshot!(context.filters(), context
.pip_install()
.arg("-e")
.arg(context.workspace_root.join("test/packages/hatchling_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 2 packages in [TIME]
Prepared 2 packages in [TIME]
Installed 2 packages in [TIME]
+ hatchling-editable==0.1.0 (from file://[WORKSPACE]/test/packages/hatchling_editable)
+ iniconfig==2.0.1.dev6+g9cae431 (from git+https://github.com/pytest-dev/iniconfig@9cae43103df70bac6fde7b9f35ad11a9f1be0cb4)
"###
);
let filters = context
.filters()
.into_iter()
.chain(vec![(r"\-\-\-\-\-\-+.*", "[UNDERLINE]"), (" +", " ")])
.collect::<Vec<_>>();
uv_snapshot!(filters, context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
hatchling-editable v0.1.0
└── iniconfig v2.0.1.dev6+g9cae431
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn package_flag() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree()
.arg("--package")
.arg("werkzeug"),
@r###"
success: true
exit_code: 0
----- stdout -----
werkzeug v3.0.1
└── markupsafe v2.1.5
----- stderr -----
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree()
.arg("--package")
.arg("werkzeug")
.arg("--package")
.arg("jinja2"),
@r###"
success: true
exit_code: 0
----- stdout -----
jinja2 v3.1.3
└── markupsafe v2.1.5
werkzeug v3.0.1
└── markupsafe v2.1.5
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn show_version_specifiers_simple() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
uv_snapshot!(context.filters(), context.pip_tree().arg("--show-version-specifiers"), @r###"
success: true
exit_code: 0
----- stdout -----
requests v2.31.0
├── certifi v2024.2.2 [required: >=2017.4.17]
├── charset-normalizer v3.3.2 [required: >=2, <4]
├── idna v3.6 [required: >=2.5, <4]
└── urllib3 v2.2.1 [required: >=1.21.1, <3]
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn show_version_specifiers_with_invert() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree()
.arg("--show-version-specifiers")
.arg("--invert"), @r###"
success: true
exit_code: 0
----- stdout -----
blinker v1.7.0
└── flask v3.0.2 [requires: blinker >=1.6.2]
click v8.1.7
└── flask v3.0.2 [requires: click >=8.1.3]
itsdangerous v2.1.2
└── flask v3.0.2 [requires: itsdangerous >=2.1.2]
markupsafe v2.1.5
├── jinja2 v3.1.3 [requires: markupsafe >=2.0]
│ └── flask v3.0.2 [requires: jinja2 >=3.1.2]
└── werkzeug v3.0.1 [requires: markupsafe >=2.1.1]
└── flask v3.0.2 [requires: werkzeug >=3.0.0]
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn show_version_specifiers_with_package() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 7 packages in [TIME]
Prepared 7 packages in [TIME]
Installed 7 packages in [TIME]
+ blinker==1.7.0
+ click==8.1.7
+ flask==3.0.2
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree()
.arg("--show-version-specifiers")
.arg("--package")
.arg("werkzeug"), @r###"
success: true
exit_code: 0
----- stdout -----
werkzeug v3.0.1
└── markupsafe v2.1.5 [required: >=2.1.1]
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn print_output_even_with_quite_flag() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("requests==2.31.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 5 packages in [TIME]
Prepared 5 packages in [TIME]
Installed 5 packages in [TIME]
+ certifi==2024.2.2
+ charset-normalizer==3.3.2
+ idna==3.6
+ requests==2.31.0
+ urllib3==2.2.1
"###
);
context.assert_command("import requests").success();
uv_snapshot!(context.filters(), context.pip_tree().arg("--quiet"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###
);
}
#[test]
#[cfg(feature = "pypi")]
fn outdated() {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("flask==2.0.0").unwrap();
uv_snapshot!(context
.pip_install()
.arg("-r")
.arg("requirements.txt")
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 6 packages in [TIME]
Prepared 6 packages in [TIME]
Installed 6 packages in [TIME]
+ click==8.1.7
+ flask==2.0.0
+ itsdangerous==2.1.2
+ jinja2==3.1.3
+ markupsafe==2.1.5
+ werkzeug==3.0.1
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree().arg("--outdated"), @r###"
success: true
exit_code: 0
----- stdout -----
flask v2.0.0 (latest: v3.0.2)
├── click v8.1.7
├── itsdangerous v2.1.2
├── jinja2 v3.1.3
│ └── markupsafe v2.1.5
└── werkzeug v3.0.1
└── markupsafe v2.1.5
----- stderr -----
"###
);
}
/// Test that dependencies with multiple marker-specific requirements
/// are only displayed once in the tree.
#[test]
#[cfg(feature = "pypi")]
fn no_duplicate_dependencies_with_markers() {
const PY_PROJECT: &str = indoc! {r#"
[project]
name = "debug"
version = "0.1.0"
requires-python = ">=3.12.0"
dependencies = [
"sniffio>=1.0.0; python_version >= '3.11'",
"sniffio>=1.0.1; python_version >= '3.12'",
"sniffio>=1.0.2; python_version >= '3.13'",
]
[build-system]
requires = ["uv_build>=0.8.22,<10000"]
build-backend = "uv_build"
"#};
let context = TestContext::new_with_versions(&["3.12", "3.13"]).with_filtered_counts();
let project = context.temp_dir.child("debug");
project.create_dir_all().unwrap();
project.child("src/debug").create_dir_all().unwrap();
project.child("src/debug/__init__.py").touch().unwrap();
project
.child("pyproject.toml")
.write_str(PY_PROJECT)
.unwrap();
context.reset_venv();
uv_snapshot!(context.filters(), context
.pip_install()
.arg(project.path())
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ debug==0.1.0 (from file://[TEMP_DIR]/debug)
+ sniffio==1.3.1
"###
);
// Ensure that the dependency is only listed once, even though `debug` declares multiple
// marker-specific requirements for the same dependency.
uv_snapshot!(context.filters(), context.pip_tree(), @r###"
success: true
exit_code: 0
----- stdout -----
debug v0.1.0
└── sniffio v1.3.1
----- stderr -----
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree().arg("--show-version-specifiers"),
@r###"
success: true
exit_code: 0
----- stdout -----
debug v0.1.0
└── sniffio v1.3.1 [required: >=1.0.1]
----- stderr -----
"###
);
context
.venv()
.arg("--clear")
.arg("--python")
.arg("3.13")
.assert()
.success();
uv_snapshot!(context.filters(), context
.pip_install()
.arg(project.path())
.arg("--strict"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved [N] packages in [TIME]
Prepared [N] packages in [TIME]
Installed [N] packages in [TIME]
+ debug==0.1.0 (from file://[TEMP_DIR]/debug)
+ sniffio==1.3.1
"###
);
uv_snapshot!(
context.filters(),
context.pip_tree().arg("--show-version-specifiers"),
@r###"
success: true
exit_code: 0
----- stdout -----
debug v0.1.0
└── sniffio v1.3.1 [required: >=1.0.2]
----- stderr -----
"###
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/export.rs | crates/uv/tests/it/export.rs | #![allow(clippy::disallowed_types)]
#[cfg(feature = "git")]
use crate::common::{READ_ONLY_GITHUB_SSH_DEPLOY_KEY, READ_ONLY_GITHUB_TOKEN, decode_token};
use crate::common::{TestContext, apply_filters, uv_snapshot};
use anyhow::{Ok, Result};
use assert_cmd::assert::OutputAssertExt;
use assert_fs::prelude::*;
use indoc::{formatdoc, indoc};
use insta::assert_snapshot;
#[cfg(feature = "git")]
use std::path::Path;
use std::process::Stdio;
#[cfg(feature = "git")]
use uv_fs::Simplified;
use uv_static::EnvVars;
#[test]
fn requirements_txt_dependency() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
# via project
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_export_no_header() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio==3.7.0"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export().arg("--no-header"), @r###"
success: true
exit_code: 0
----- stdout -----
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
# via project
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 4 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_dependency_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["flask[dotenv]"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
blinker==1.7.0 \
--hash=sha256:c3f865d4d54db7abc53758a01601cf343fe55b84c1de4e3fa910e420b438d5b9 \
--hash=sha256:e6820ff6fa4e4d1d8e2747c2283749c3f547e4fee112b98555cdcdae32996182
# via flask
click==8.1.7 \
--hash=sha256:ae74fb96c20a0277a1d615f1e4d73c8414f5a98db8b799a7931d1582f3390c28 \
--hash=sha256:ca9853ad459e787e2192211578cc907e7594e294c7ccc834310722b41b9ca6de
# via flask
colorama==0.4.6 ; sys_platform == 'win32' \
--hash=sha256:08695f5cb7ed6e0531a20572697297273c47b8cae5a63ffc6d6ed5c201be6e44 \
--hash=sha256:4f1d9991f5acc0ca119f9d443620b77f9d6b33703e51011c16baf57afb285fc6
# via click
flask==3.0.2 \
--hash=sha256:3232e0e9c850d781933cf0207523d1ece087eb8d87b23777ae38456e2fbe7c6e \
--hash=sha256:822c03f4b799204250a7ee84b1eddc40665395333973dfb9deebfe425fefcb7d
# via project
itsdangerous==2.1.2 \
--hash=sha256:2c2349112351b88699d8d4b6b075022c0808887cb7ad10069318a8b0bc88db44 \
--hash=sha256:5dbbc68b317e5e42f327f9021763545dc3fc3bfe22e6deb96aaf1fc38874156a
# via flask
jinja2==3.1.3 \
--hash=sha256:7d6d50dd97d52cbc355597bd845fabfbac3f551e1f99619e39a35ce8c370b5fa \
--hash=sha256:ac8bd6544d4bb2c9792bf3a159e80bba8fda7f07e81bc3aed565432d5925ba90
# via flask
markupsafe==2.1.5 \
--hash=sha256:3c6b973f22eb18a789b1460b4b91bf04ae3f0c4234a0a6aa6b0a92f6f7b951d4 \
--hash=sha256:58c98fee265677f63a4385256a6d7683ab1832f3ddd1e66fe948d5880c21a169 \
--hash=sha256:823b65d8706e32ad2df51ed89496147a42a2a6e01c13cfb6ffb8b1e92bc910bb \
--hash=sha256:8590b4ae07a35970728874632fed7bd57b26b0102df2d2b233b6d9d82f6c62ad \
--hash=sha256:8dec4936e9c3100156f8a2dc89c4b88d5c435175ff03413b443469c7c8c5f4d1 \
--hash=sha256:ac07bad82163452a6884fe8fa0963fb98c2346ba78d779ec06bd7a6262132aee \
--hash=sha256:bec0a414d016ac1a18862a519e54b2fd0fc8bbfd6890376898a6c0891dd82e9f \
--hash=sha256:d050b3361367a06d752db6ead6e7edeb0009be66bc3bae0ee9d97fb326badc2a \
--hash=sha256:d283d37a890ba4c1ae73ffadf8046435c76e7bc2247bbb63c00bd1a709c6544b \
--hash=sha256:ea3d8a3d18833cf4304cd2fc9cbb1efe188ca9b5efef2bdac7adc20594a0e46b \
--hash=sha256:f5dfb42c4604dddc8e4305050aa6deb084540643ed5804d7455b5df8fe16f5e5
# via
# jinja2
# werkzeug
python-dotenv==1.0.1 \
--hash=sha256:e324ee90a023d808f1959c46bcbc04446a10ced277783dc6ee09987c37ec10ca \
--hash=sha256:f7b63ef50f1b690dddf550d03497b66d609393b40b564ed0d674909a68ebf16a
# via flask
werkzeug==3.0.1 \
--hash=sha256:507e811ecea72b18a404947aded4b3390e1db8f826b494d76550ef45bb3b1dcc \
--hash=sha256:90a285dc0e42ad56b34e696398b8122ee4c681833fb35b8334a095d82c56da10
# via flask
----- stderr -----
Resolved 10 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_project_extra() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["typing-extensions"]
[project.optional-dependencies]
async = ["anyio==3.7.0"]
pytest = ["iniconfig"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb
# via project
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--extra").arg("pytest").arg("--extra").arg("async").arg("--no-extra").arg("pytest"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR] --extra pytest --extra async --no-extra pytest
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
# via project
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb
# via project
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--extra").arg("pytest"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR] --extra pytest
-e .
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
# via project
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb
# via project
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--all-extras"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR] --all-extras
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
# via project
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
# via project
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb
# via project
----- stderr -----
Resolved 6 packages in [TIME]
"###);
uv_snapshot!(context.filters(), context.export().arg("--all-extras").arg("--no-extra").arg("pytest"), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR] --all-extras --no-extra pytest
-e .
anyio==3.7.0 \
--hash=sha256:275d9973793619a5374e1c89a4f4ad3f4b0a5510a2b5b939444bee8f4c4d37ce \
--hash=sha256:eddca883c4175f14df8aedce21054bfca3adb70ffe76a9f607aef9d7fa2ea7f0
# via project
idna==3.6 \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
sniffio==1.3.1 \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
typing-extensions==4.10.0 \
--hash=sha256:69b1a937c3a517342112fb4c6df7e72fc39a38e7891a5730ed4985b5214b5475 \
--hash=sha256:b0abd7c89e8fb96f98db18d86106ff1d90ab692004eb746cf6eda2682f91b3cb
# via project
----- stderr -----
Resolved 6 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_prune() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"jupyter-client"
]
"#,
)?;
// project v0.1.0
// └── jupyter-client v8.6.1
// ├── jupyter-core v5.7.2
// │ ├── platformdirs v4.2.0
// │ └── traitlets v5.14.2
// ├── python-dateutil v2.9.0.post0
// │ └── six v1.16.0
// ├── pyzmq v25.1.2
// ├── tornado v6.4
// └── traitlets v5.14.2
uv_snapshot!(
context.filters(),
context.export()
.arg("--no-hashes")
.arg("--prune")
.arg("jupyter-core"),
@r"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR] --no-hashes --prune jupyter-core
cffi==1.16.0 ; implementation_name == 'pypy'
# via pyzmq
jupyter-client==8.6.1
# via project
pycparser==2.21 ; implementation_name == 'pypy'
# via cffi
python-dateutil==2.9.0.post0
# via jupyter-client
pyzmq==25.1.2
# via jupyter-client
six==1.16.0
# via python-dateutil
tornado==6.4
# via jupyter-client
traitlets==5.14.2
# via jupyter-client
----- stderr -----
Resolved 12 packages in [TIME]
"
);
Ok(())
}
#[test]
fn requirements_txt_dependency_marker() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = ["anyio ; sys_platform == 'darwin'", "iniconfig"]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
anyio==4.3.0 ; sys_platform == 'darwin' \
--hash=sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8 \
--hash=sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6
# via project
idna==3.6 ; sys_platform == 'darwin' \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via anyio
iniconfig==2.0.0 \
--hash=sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3 \
--hash=sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374
# via project
sniffio==1.3.1 ; sys_platform == 'darwin' \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via anyio
----- stderr -----
Resolved 5 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_dependency_multiple_markers() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.10"
dependencies = [
"trio ; python_version > '3.11'",
"trio ; sys_platform == 'win32'",
]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
context.lock().assert().success();
// Note that the `python_version > '3.11'` markers disappear due to `requires-python = ">=3.12"`
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
attrs==23.2.0 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30 \
--hash=sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1
# via
# outcome
# trio
cffi==1.16.0 ; (python_full_version >= '3.12' and implementation_name != 'pypy' and os_name == 'nt') or (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'win32') \
--hash=sha256:2c56b361916f390cd758a57f2e16233eb4f64bcbeee88a4881ea90fca14dc6ab \
--hash=sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235 \
--hash=sha256:9f90389693731ff1f659e55c7d1640e2ec43ff725cc61b04b2f9c6d8d017df6a \
--hash=sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b \
--hash=sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0 \
--hash=sha256:db8e577c19c0fda0beb7e0d4e09e0ba74b1e4c092e0e40bfa12fe05b6f6d75ba \
--hash=sha256:e6024675e67af929088fda399b2094574609396b1decb609c55fa58b028a32a1
# via trio
exceptiongroup==1.2.0 ; python_full_version < '3.11' and sys_platform == 'win32' \
--hash=sha256:4bfd3996ac73b41e9b9628b04e079f193850720ea5945fc96a08633c66912f14 \
--hash=sha256:91f5c769735f051a4290d52edd0858999b57e5876e9f85937691bd4c9fa3ed68
# via trio
idna==3.6 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca \
--hash=sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f
# via trio
outcome==1.3.0.post0 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8 \
--hash=sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b
# via trio
pycparser==2.21 ; (python_full_version >= '3.12' and implementation_name != 'pypy' and os_name == 'nt') or (implementation_name != 'pypy' and os_name == 'nt' and sys_platform == 'win32') \
--hash=sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9 \
--hash=sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206
# via cffi
sniffio==1.3.1 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2 \
--hash=sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc
# via trio
sortedcontainers==2.4.0 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88 \
--hash=sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0
# via trio
trio==0.25.0 ; python_full_version >= '3.12' or sys_platform == 'win32' \
--hash=sha256:9b41f5993ad2c0e5f62d0acca320ec657fdb6b2a2c22b8c7aed6caf154475c4e \
--hash=sha256:e6458efe29cc543e557a91e614e2b51710eba2961669329ce9c862d50c6e8e81
# via project
----- stderr -----
Resolved 10 packages in [TIME]
"###);
Ok(())
}
#[test]
fn requirements_txt_dependency_conflicting_markers() -> Result<()> {
let context = TestContext::new("3.12");
let pyproject_toml = context.temp_dir.child("pyproject.toml");
pyproject_toml.write_str(
r#"
[project]
name = "project"
version = "0.1.0"
requires-python = ">=3.12"
dependencies = [
"trio==0.25.0 ; sys_platform == 'darwin'",
"trio==0.10.0 ; sys_platform == 'win32'",
]
[build-system]
requires = ["setuptools>=42"]
build-backend = "setuptools.build_meta"
"#,
)?;
uv_snapshot!(context.filters(), context.lock(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 11 packages in [TIME]
"###);
let lock = context.read("uv.lock");
insta::with_settings!(
{
filters => context.filters(),
},
{
insta::assert_snapshot!(
lock, @r#"
version = 1
revision = 3
requires-python = ">=3.12"
resolution-markers = [
"sys_platform == 'darwin'",
"sys_platform == 'win32'",
"sys_platform != 'darwin' and sys_platform != 'win32'",
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "async-generator"
version = "1.10"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/ce/b6/6fa6b3b598a03cba5e80f829e0dadbb49d7645f523d209b2fb7ea0bbb02a/async_generator-1.10.tar.gz", hash = "sha256:6ebb3d106c12920aaae42ccb6f787ef5eefdcdd166ea3d628fa8476abe712144", size = 29870, upload-time = "2018-08-01T03:36:21.69Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/71/52/39d20e03abd0ac9159c162ec24b93fbcaa111e8400308f2465432495ca2b/async_generator-1.10-py3-none-any.whl", hash = "sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b", size = 18857, upload-time = "2018-08-01T03:36:20.029Z" },
]
[[package]]
name = "attrs"
version = "23.2.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e3/fc/f800d51204003fa8ae392c4e8278f256206e7a919b708eef054f5f4b650d/attrs-23.2.0.tar.gz", hash = "sha256:935dc3b529c262f6cf76e50877d35a4bd3c1de194fd41f47a2b7ae8f19971f30", size = 780820, upload-time = "2023-12-31T06:30:32.926Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e0/44/827b2a91a5816512fcaf3cc4ebc465ccd5d598c45cefa6703fcf4a79018f/attrs-23.2.0-py3-none-any.whl", hash = "sha256:99b87a485a5820b23b879f04c2305b44b951b502fd64be915879d77a7e8fc6f1", size = 60752, upload-time = "2023-12-31T06:30:30.772Z" },
]
[[package]]
name = "cffi"
version = "1.16.0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "pycparser", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/68/ce/95b0bae7968c65473e1298efb042e10cafc7bafc14d9e4f154008241c91d/cffi-1.16.0.tar.gz", hash = "sha256:bcb3ef43e58665bbda2fb198698fcae6776483e0c4a631aa5647806c25e02cc0", size = 512873, upload-time = "2023-09-28T18:02:04.656Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c9/6e/751437067affe7ac0944b1ad4856ec11650da77f0dd8f305fae1117ef7bb/cffi-1.16.0-cp312-cp312-win32.whl", hash = "sha256:b2ca4e77f9f47c55c194982e10f058db063937845bb2b7a86c84a6cfe0aefa8b", size = 173564, upload-time = "2023-09-28T18:01:23.527Z" },
{ url = "https://files.pythonhosted.org/packages/e9/63/e285470a4880a4f36edabe4810057bd4b562c6ddcc165eacf9c3c7210b40/cffi-1.16.0-cp312-cp312-win_amd64.whl", hash = "sha256:68678abf380b42ce21a5f2abde8efee05c114c2fdb2e9eef2efdb0257fba1235", size = 181956, upload-time = "2023-09-28T18:01:24.971Z" },
]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "outcome"
version = "1.3.0.post0"
source = { registry = "https://pypi.org/simple" }
dependencies = [
{ name = "attrs", marker = "sys_platform == 'darwin' or sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/98/df/77698abfac98571e65ffeb0c1fba8ffd692ab8458d617a0eed7d9a8d38f2/outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8", size = 21060, upload-time = "2023-10-26T04:26:04.361Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/55/8b/5ab7257531a5d830fc8000c476e63c935488d74609b50f9384a643ec0a62/outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b", size = 10692, upload-time = "2023-10-26T04:26:02.532Z" },
]
[[package]]
name = "project"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "trio", version = "0.10.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'win32'" },
{ name = "trio", version = "0.25.0", source = { registry = "https://pypi.org/simple" }, marker = "sys_platform == 'darwin'" },
]
[package.metadata]
requires-dist = [
{ name = "trio", marker = "sys_platform == 'darwin'", specifier = "==0.25.0" },
{ name = "trio", marker = "sys_platform == 'win32'", specifier = "==0.10.0" },
]
[[package]]
name = "pycparser"
version = "2.21"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/5e/0b/95d387f5f4433cb0f53ff7ad859bd2c6051051cebbb564f139a999ab46de/pycparser-2.21.tar.gz", hash = "sha256:e644fdec12f7872f86c58ff790da456218b10f863970249516d60a5eaca77206", size = 170877, upload-time = "2021-11-06T12:48:46.095Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/62/d5/5f610ebe421e85889f2e55e33b7f9a6795bd982198517d912eb1c76e1a53/pycparser-2.21-py2.py3-none-any.whl", hash = "sha256:8ee45429555515e1f6b185e78100aea234072576aa43ab53aefcae078162fca9", size = 118697, upload-time = "2021-11-06T12:50:13.61Z" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
[[package]]
name = "sortedcontainers"
version = "2.4.0"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/e8/c4/ba2f8066cceb6f23394729afe52f3bf7adec04bf9ed2c820b39e19299111/sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88", size = 30594, upload-time = "2021-05-16T22:03:42.897Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/32/46/9cb0e58b2deb7f82b84065f37f3bffeb12413f947f9388e4cac22c4621ce/sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0", size = 29575, upload-time = "2021-05-16T22:03:41.177Z" },
]
[[package]]
name = "trio"
version = "0.10.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"sys_platform == 'win32'",
]
dependencies = [
{ name = "async-generator", marker = "sys_platform == 'win32'" },
{ name = "attrs", marker = "sys_platform == 'win32'" },
{ name = "cffi", marker = "os_name == 'nt' and sys_platform == 'win32'" },
{ name = "idna", marker = "sys_platform == 'win32'" },
{ name = "outcome", marker = "sys_platform == 'win32'" },
{ name = "sniffio", marker = "sys_platform == 'win32'" },
{ name = "sortedcontainers", marker = "sys_platform == 'win32'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/e6/20/37be7b5f47db6a9fbf905b5de5386e5b7193c45d07becb750db6f03cd117/trio-0.10.0.tar.gz", hash = "sha256:d323cc15f6406d15954af91e5e34af2001cc24163fdde29e3f88a227a1b53ab0", size = 402511, upload-time = "2019-01-08T09:59:04.649Z" }
[[package]]
name = "trio"
version = "0.25.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"sys_platform == 'darwin'",
]
dependencies = [
{ name = "attrs", marker = "sys_platform == 'darwin'" },
{ name = "idna", marker = "sys_platform == 'darwin'" },
{ name = "outcome", marker = "sys_platform == 'darwin'" },
{ name = "sniffio", marker = "sys_platform == 'darwin'" },
{ name = "sortedcontainers", marker = "sys_platform == 'darwin'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/b4/51/4f5ae37ec58768b9c30e5bc5b89431a7baf3fa9d0dda98983af6ef55eb47/trio-0.25.0.tar.gz", hash = "sha256:9b41f5993ad2c0e5f62d0acca320ec657fdb6b2a2c22b8c7aed6caf154475c4e", size = 551863, upload-time = "2024-03-17T02:53:47.736Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/17/c9/f86f89f14d52f9f2f652ce24cb2f60141a51d087db1563f3fba94ba07346/trio-0.25.0-py3-none-any.whl", hash = "sha256:e6458efe29cc543e557a91e614e2b51710eba2961669329ce9c862d50c6e8e81", size = 467161, upload-time = "2024-03-17T02:53:45.462Z" },
]
"#
);
}
);
uv_snapshot!(context.filters(), context.export(), @r###"
success: true
exit_code: 0
----- stdout -----
# This file was autogenerated by uv via the following command:
# uv export --cache-dir [CACHE_DIR]
-e .
async-generator==1.10 ; sys_platform == 'win32' \
--hash=sha256:01c7bf666359b4967d2cda0000cc2e4af16a0ae098cbffcb8472fb9e8ad6585b \
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/workspace_list.rs | crates/uv/tests/it/workspace_list.rs | use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::PathChild;
use crate::common::{TestContext, copy_dir_ignore, uv_snapshot};
/// Test basic list output for a simple workspace with one member.
#[test]
fn workspace_list_simple() {
let context = TestContext::new("3.12");
// Initialize a workspace with one member
context.init().arg("foo").assert().success();
let workspace = context.temp_dir.child("foo");
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
foo
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
uv_snapshot!(context.filters(), context.workspace_list().arg("--paths").current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/foo
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
}
/// Test list output for a root workspace (workspace with a root package).
#[test]
fn workspace_list_root_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-root-workspace"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
albatross
bird-feeder
seeds
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
Ok(())
}
/// Test list output for a virtual workspace (no root package).
#[test]
fn workspace_list_virtual_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-virtual-workspace"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
albatross
bird-feeder
seeds
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
Ok(())
}
/// Test list output when run from a workspace member directory.
#[test]
fn workspace_list_from_member() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-root-workspace"),
&workspace,
)?;
let member_dir = workspace.join("packages").join("bird-feeder");
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&member_dir), @r"
success: true
exit_code: 0
----- stdout -----
albatross
bird-feeder
seeds
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
Ok(())
}
/// Test list output for a workspace with multiple packages.
#[test]
fn workspace_list_multiple_members() {
let context = TestContext::new("3.12");
// Initialize workspace root
context.init().arg("pkg-a").assert().success();
let workspace_root = context.temp_dir.child("pkg-a");
// Add more members
context
.init()
.arg("pkg-b")
.current_dir(&workspace_root)
.assert()
.success();
context
.init()
.arg("pkg-c")
.current_dir(&workspace_root)
.assert()
.success();
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&workspace_root), @r"
success: true
exit_code: 0
----- stdout -----
pkg-a
pkg-b
pkg-c
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
uv_snapshot!(context.filters(), context.workspace_list().arg("--paths").current_dir(&workspace_root), @r"
success: true
exit_code: 0
----- stdout -----
[TEMP_DIR]/pkg-a
[TEMP_DIR]/pkg-a/pkg-b
[TEMP_DIR]/pkg-a/pkg-c
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
}
/// Test list output for a single project (not a workspace).
#[test]
fn workspace_list_single_project() {
let context = TestContext::new("3.12");
context.init().arg("my-project").assert().success();
let project = context.temp_dir.child("my-project");
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&project), @r"
success: true
exit_code: 0
----- stdout -----
my-project
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
}
/// Test list output with excluded packages.
#[test]
fn workspace_list_with_excluded() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-project-in-excluded"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_list().current_dir(&workspace), @r"
success: true
exit_code: 0
----- stdout -----
albatross
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
"
);
Ok(())
}
/// Test list error output when not in a project.
#[test]
fn workspace_list_no_project() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.workspace_list(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: The `uv workspace list` command is experimental and may change without warning. Pass `--preview-features workspace-list` to disable this warning.
error: No `pyproject.toml` found in current directory or any parent directory
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/workspace_metadata.rs | crates/uv/tests/it/workspace_metadata.rs | use anyhow::Result;
use assert_cmd::assert::OutputAssertExt;
use assert_fs::fixture::PathChild;
use crate::common::{TestContext, copy_dir_ignore, uv_snapshot};
/// Test basic metadata output for a simple workspace with one member.
#[test]
fn workspace_metadata_simple() {
let context = TestContext::new("3.12");
// Initialize a workspace with one member
context.init().arg("foo").assert().success();
let workspace = context.temp_dir.child("foo");
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&workspace), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/foo",
"members": [
{
"name": "foo",
"path": "[TEMP_DIR]/foo"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
}
/// Test metadata for a root workspace (workspace with a root package).
#[test]
fn workspace_metadata_root_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-root-workspace"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&workspace), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/workspace",
"members": [
{
"name": "albatross",
"path": "[TEMP_DIR]/workspace"
},
{
"name": "bird-feeder",
"path": "[TEMP_DIR]/workspace/packages/bird-feeder"
},
{
"name": "seeds",
"path": "[TEMP_DIR]/workspace/packages/seeds"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
Ok(())
}
/// Test metadata for a virtual workspace (no root package).
#[test]
fn workspace_metadata_virtual_workspace() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-virtual-workspace"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&workspace), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/workspace",
"members": [
{
"name": "albatross",
"path": "[TEMP_DIR]/workspace/packages/albatross"
},
{
"name": "bird-feeder",
"path": "[TEMP_DIR]/workspace/packages/bird-feeder"
},
{
"name": "seeds",
"path": "[TEMP_DIR]/workspace/packages/seeds"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
Ok(())
}
/// Test metadata when run from a workspace member directory.
#[test]
fn workspace_metadata_from_member() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-root-workspace"),
&workspace,
)?;
let member_dir = workspace.join("packages").join("bird-feeder");
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&member_dir), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/workspace",
"members": [
{
"name": "albatross",
"path": "[TEMP_DIR]/workspace"
},
{
"name": "bird-feeder",
"path": "[TEMP_DIR]/workspace/packages/bird-feeder"
},
{
"name": "seeds",
"path": "[TEMP_DIR]/workspace/packages/seeds"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
Ok(())
}
/// Test metadata for a workspace with multiple packages.
#[test]
fn workspace_metadata_multiple_members() {
let context = TestContext::new("3.12");
// Initialize workspace root
context.init().arg("pkg-a").assert().success();
let workspace_root = context.temp_dir.child("pkg-a");
// Add more members
context
.init()
.arg("pkg-b")
.current_dir(&workspace_root)
.assert()
.success();
context
.init()
.arg("pkg-c")
.current_dir(&workspace_root)
.assert()
.success();
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&workspace_root), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/pkg-a",
"members": [
{
"name": "pkg-a",
"path": "[TEMP_DIR]/pkg-a"
},
{
"name": "pkg-b",
"path": "[TEMP_DIR]/pkg-a/pkg-b"
},
{
"name": "pkg-c",
"path": "[TEMP_DIR]/pkg-a/pkg-c"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
}
/// Test metadata for a single project (not a workspace).
#[test]
fn workspace_metadata_single_project() {
let context = TestContext::new("3.12");
context.init().arg("my-project").assert().success();
let project = context.temp_dir.child("my-project");
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&project), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/my-project",
"members": [
{
"name": "my-project",
"path": "[TEMP_DIR]/my-project"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
}
/// Test metadata with excluded packages.
#[test]
fn workspace_metadata_with_excluded() -> Result<()> {
let context = TestContext::new("3.12");
let workspace = context.temp_dir.child("workspace");
copy_dir_ignore(
context
.workspace_root
.join("test/workspaces/albatross-project-in-excluded"),
&workspace,
)?;
uv_snapshot!(context.filters(), context.workspace_metadata().current_dir(&workspace), @r#"
success: true
exit_code: 0
----- stdout -----
{
"schema": {
"version": "preview"
},
"workspace_root": "[TEMP_DIR]/workspace",
"members": [
{
"name": "albatross",
"path": "[TEMP_DIR]/workspace"
}
]
}
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
"#
);
Ok(())
}
/// Test metadata error when not in a project.
#[test]
fn workspace_metadata_no_project() {
let context = TestContext::new("3.12");
uv_snapshot!(context.filters(), context.workspace_metadata(), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
warning: The `uv workspace metadata` command is experimental and may change without warning. Pass `--preview-features workspace-metadata` to disable this warning.
error: No `pyproject.toml` found in current directory or any parent directory
"
);
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/pip_uninstall.rs | crates/uv/tests/it/pip_uninstall.rs | use std::process::Command;
use anyhow::Result;
use assert_cmd::prelude::*;
use assert_fs::fixture::ChildPath;
use assert_fs::prelude::*;
use crate::common::{TestContext, get_bin, uv_snapshot};
#[test]
fn no_arguments() {
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("uninstall")
.env_clear(), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<PACKAGE|--requirements <REQUIREMENTS>>
Usage: uv pip uninstall <PACKAGE|--requirements <REQUIREMENTS>>
For more information, try '--help'.
"###
);
}
#[test]
fn invalid_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("uninstall")
.arg("flask==1.0.x")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Failed to parse: `flask==1.0.x`
Caused by: after parsing `1.0`, found `.x`, which is not part of a valid version
flask==1.0.x
^^^^^^^
"###);
Ok(())
}
#[test]
fn missing_requirements_txt() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("requirements.txt")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: File not found: `requirements.txt`
"###
);
Ok(())
}
#[test]
fn invalid_requirements_txt_requirement() -> Result<()> {
let temp_dir = assert_fs::TempDir::new()?;
let requirements_txt = temp_dir.child("requirements.txt");
requirements_txt.write_str("flask==1.0.x")?;
uv_snapshot!(Command::new(get_bin())
.arg("pip")
.arg("uninstall")
.arg("-r")
.arg("requirements.txt")
.current_dir(&temp_dir), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Couldn't parse requirement in `requirements.txt` at position 0
Caused by: after parsing `1.0`, found `.x`, which is not part of a valid version
flask==1.0.x
^^^^^^^
"###);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn uninstall() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
context.assert_command("import markupsafe").success();
uv_snapshot!(context.pip_uninstall()
.arg("MarkupSafe"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- markupsafe==2.1.3
"###
);
context.assert_command("import markupsafe").failure();
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn missing_record() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str("MarkupSafe==2.1.3")?;
context
.pip_sync()
.arg("requirements.txt")
.assert()
.success();
context.assert_command("import markupsafe").success();
// Delete the RECORD file.
let dist_info = context.site_packages().join("MarkupSafe-2.1.3.dist-info");
fs_err::remove_file(dist_info.join("RECORD"))?;
uv_snapshot!(context.filters(), context.pip_uninstall()
.arg("MarkupSafe"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: Cannot uninstall package; `RECORD` file not found at: [SITE_PACKAGES]/MarkupSafe-2.1.3.dist-info/RECORD
"
);
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn uninstall_editable_by_name() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(&format!(
"-e {}",
context
.workspace_root
.join("test/packages/flit_editable")
.as_os_str()
.to_str()
.expect("Path is valid unicode")
))?;
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
context.assert_command("import flit_editable").success();
// Uninstall the editable by name.
uv_snapshot!(context.filters(), context.pip_uninstall()
.arg("flit-editable"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- flit-editable==0.1.0 (from file://[WORKSPACE]/test/packages/flit_editable)
"###
);
context.assert_command("import flit_editable").failure();
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn uninstall_by_path() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(
context
.workspace_root
.join("test/packages/flit_editable")
.as_os_str()
.to_str()
.expect("Path is valid unicode"),
)?;
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
context.assert_command("import flit_editable").success();
// Uninstall the editable by path.
uv_snapshot!(context.filters(), context.pip_uninstall()
.arg(context.workspace_root.join("test/packages/flit_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- flit-editable==0.1.0 (from file://[WORKSPACE]/test/packages/flit_editable)
"###
);
context.assert_command("import flit_editable").failure();
Ok(())
}
#[test]
#[cfg(feature = "pypi")]
fn uninstall_duplicate_by_path() -> Result<()> {
let context = TestContext::new("3.12");
let requirements_txt = context.temp_dir.child("requirements.txt");
requirements_txt.write_str(
context
.workspace_root
.join("test/packages/flit_editable")
.as_os_str()
.to_str()
.expect("Path is valid unicode"),
)?;
context
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
context.assert_command("import flit_editable").success();
// Uninstall the editable by both path and name.
uv_snapshot!(context.filters(), context.pip_uninstall()
.arg("flit-editable")
.arg(context.workspace_root.join("test/packages/flit_editable")), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- flit-editable==0.1.0 (from file://[WORKSPACE]/test/packages/flit_editable)
"###
);
context.assert_command("import flit_editable").failure();
Ok(())
}
/// Uninstall a duplicate package in a virtual environment.
#[test]
#[cfg(feature = "pypi")]
fn uninstall_duplicate() -> Result<()> {
use uv_fs::copy_dir_all;
// Sync a version of `pip` into a virtual environment.
let context1 = TestContext::new("3.12");
let requirements_txt = context1.temp_dir.child("requirements.txt");
requirements_txt.write_str("pip==21.3.1")?;
// Run `pip sync`.
context1
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Sync a different version of `pip` into a virtual environment.
let context2 = TestContext::new("3.12");
let requirements_txt = context2.temp_dir.child("requirements.txt");
requirements_txt.write_str("pip==22.1.1")?;
// Run `pip sync`.
context2
.pip_sync()
.arg(requirements_txt.path())
.assert()
.success();
// Copy the virtual environment to a new location.
copy_dir_all(
context2.site_packages().join("pip-22.1.1.dist-info"),
context1.site_packages().join("pip-22.1.1.dist-info"),
)?;
// Run `pip uninstall`.
uv_snapshot!(context1.pip_uninstall()
.arg("pip"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 2 packages in [TIME]
- pip==21.3.1
- pip==22.1.1
"###
);
Ok(())
}
/// Uninstall a `.egg-info` package in a virtual environment.
#[test]
fn uninstall_egg_info() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
// Manually create a `.egg-info` directory.
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.create_dir_all()?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("top_level.txt")
.write_str("zstd")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("SOURCES.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("PKG-INFO")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("dependency_links.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("entry_points.txt")
.write_str("")?;
// Manually create the package directory.
site_packages.child("zstd").create_dir_all()?;
site_packages
.child("zstd")
.child("__init__.py")
.write_str("")?;
// Run `pip uninstall`.
uv_snapshot!(context.pip_uninstall()
.arg("zstandard"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- zstandard==0.22.0
"###);
Ok(())
}
fn normcase(s: &str) -> String {
if cfg!(windows) {
s.replace('/', "\\").to_lowercase()
} else {
s.to_owned()
}
}
/// Uninstall a legacy editable package in a virtual environment.
#[test]
fn uninstall_legacy_editable() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
let target = context.temp_dir.child("zstandard_project");
target.child("zstd").create_dir_all()?;
target.child("zstd").child("__init__.py").write_str("")?;
target.child("zstandard.egg-info").create_dir_all()?;
target
.child("zstandard.egg-info")
.child("PKG-INFO")
.write_str(
"Metadata-Version: 2.1
Name: zstandard
Version: 0.22.0
",
)?;
site_packages
.child("zstandard.egg-link")
.write_str(target.path().to_str().unwrap())?;
site_packages.child("easy-install.pth").write_str(&format!(
"something\n{}\nanother thing\n",
normcase(target.path().to_str().unwrap())
))?;
// Run `pip uninstall`.
uv_snapshot!(context.pip_uninstall()
.arg("zstandard"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Uninstalled 1 package in [TIME]
- zstandard==0.22.0
"###);
// The entry in `easy-install.pth` should be removed.
assert_eq!(
fs_err::read_to_string(site_packages.child("easy-install.pth"))?,
"something\nanother thing\n",
"easy-install.pth should not contain the path to the uninstalled package"
);
// The `.egg-link` file should be removed.
assert!(!site_packages.child("zstandard.egg-link").exists());
// The `.egg-info` directory should still exist.
assert!(target.child("zstandard.egg-info").exists());
Ok(())
}
#[test]
fn dry_run_uninstall_egg_info() -> Result<()> {
let context = TestContext::new("3.12");
let site_packages = ChildPath::new(context.site_packages());
// Manually create a `.egg-info` directory.
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.create_dir_all()?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("top_level.txt")
.write_str("zstd")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("SOURCES.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("PKG-INFO")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("dependency_links.txt")
.write_str("")?;
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.child("entry_points.txt")
.write_str("")?;
// Manually create the package directory.
site_packages.child("zstd").create_dir_all()?;
site_packages
.child("zstd")
.child("__init__.py")
.write_str("")?;
// Run `pip uninstall`.
uv_snapshot!(context.pip_uninstall()
.arg("--dry-run")
.arg("zstandard"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Would uninstall 1 package
- zstandard==0.22.0
"###);
// The `.egg-info` directory should still exist.
assert!(
site_packages
.child("zstandard-0.22.0-py3.12.egg-info")
.exists()
);
// The package directory should still exist.
assert!(site_packages.child("zstd").child("__init__.py").exists());
Ok(())
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/branching_urls.rs | crates/uv/tests/it/branching_urls.rs | use std::env;
use anyhow::Result;
use indoc::indoc;
use insta::assert_snapshot;
use crate::common::{TestContext, make_project, uv_snapshot};
/// The root package has diverging URLs for disjoint markers:
/// ```toml
/// dependencies = [
/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'",
/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
/// ]
/// ```
#[test]
#[cfg(feature = "pypi")]
fn branching_urls_disjoint() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Valid, disjoint split
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'",
]
"# };
make_project(context.temp_dir.path(), "a", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
Ok(())
}
/// The root package has diverging URLs, but their markers are not disjoint:
/// ```toml
/// dependencies = [
/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.11'",
/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
/// ]
/// ```
#[test]
#[cfg(feature = "pypi")]
fn branching_urls_overlapping() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Conflicting split
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.11'",
]
"# };
make_project(context.temp_dir.path(), "a", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to resolve dependencies for `a` (v0.1.0)
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
"
);
Ok(())
}
/// The root package has diverging URLs, but transitive dependencies have conflicting URLs.
///
/// Requirements:
/// ```text
/// a -> anyio (allowed forking urls to force a split)
/// a -> b -> b1 -> https://../iniconfig-1.1.1-py3-none-any.whl
/// a -> b -> b2 -> https://../iniconfig-2.0.0-py3-none-any.whl
/// ```
#[test]
#[cfg(feature = "pypi")]
fn root_package_splits_but_transitive_conflict() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Force a split
"anyio==4.3.0 ; python_version >= '3.12'",
"anyio==4.2.0 ; python_version < '3.12'",
"b"
]
[tool.uv.sources]
b = { path = "b" }
"# };
make_project(context.temp_dir.path(), "a", deps)?;
let deps = indoc! {r#"
dependencies = [
"b1",
"b2",
]
[tool.uv.sources]
b1 = { path = "../b1" }
b2 = { path = "../b2" }
"# };
make_project(&context.temp_dir.path().join("b"), "b", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl",
]
"# };
make_project(&context.temp_dir.path().join("b1"), "b1", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
]
"# };
make_project(&context.temp_dir.path().join("b2"), "b2", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to resolve dependencies for `b2` (v0.1.0)
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version >= '3.12'`:
- https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl
- https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl
help: `b2` (v0.1.0) was included because `a` (v0.1.0) depends on `b` (v0.1.0) which depends on `b2`
"
);
Ok(())
}
/// The root package has diverging URLs, and transitive dependencies through an intermediate
/// package have one URL for each side.
///
/// Requirements:
/// ```text
/// a -> anyio==4.4.0 ; python_version >= '3.12'
/// a -> anyio==4.3.0 ; python_version < '3.12'
/// a -> b -> b1 ; python_version < '3.12' -> https://../iniconfig-1.1.1-py3-none-any.whl
/// a -> b -> b2 ; python_version >= '3.12' -> https://../iniconfig-2.0.0-py3-none-any.whl
/// ```
#[test]
#[cfg(feature = "pypi")]
fn root_package_splits_transitive_too() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Force a split
"anyio==4.3.0 ; python_version >= '3.12'",
"anyio==4.2.0 ; python_version < '3.12'",
"b"
]
[tool.uv.sources]
b = { path = "b" }
"# };
make_project(context.temp_dir.path(), "a", deps)?;
let deps = indoc! {r#"
dependencies = [
"b1 ; python_version < '3.12'",
"b2 ; python_version >= '3.12'",
]
[tool.uv.sources]
b1 = { path = "../b1" }
b2 = { path = "../b2" }
"# };
make_project(&context.temp_dir.path().join("b"), "b", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl",
]
"# };
make_project(&context.temp_dir.path().join("b1"), "b1", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl",
]
"# };
make_project(&context.temp_dir.path().join("b2"), "b2", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 10 packages in [TIME]
"###
);
assert_snapshot!(context.read("uv.lock"), @r#"
version = 1
revision = 3
requires-python = ">=3.11, <3.13"
resolution-markers = [
"python_full_version >= '3.12'",
"python_full_version < '3.12'",
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "a"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "anyio", version = "4.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
{ name = "anyio", version = "4.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
{ name = "b" },
]
[package.metadata]
requires-dist = [
{ name = "anyio", marker = "python_full_version < '3.12'", specifier = "==4.2.0" },
{ name = "anyio", marker = "python_full_version >= '3.12'", specifier = "==4.3.0" },
{ name = "b", directory = "b" },
]
[[package]]
name = "anyio"
version = "4.2.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version < '3.12'",
]
dependencies = [
{ name = "idna", marker = "python_full_version < '3.12'" },
{ name = "sniffio", marker = "python_full_version < '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f", size = 158770, upload-time = "2023-12-16T17:06:57.709Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee", size = 85481, upload-time = "2023-12-16T17:06:55.989Z" },
]
[[package]]
name = "anyio"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.12'",
]
dependencies = [
{ name = "idna", marker = "python_full_version >= '3.12'" },
{ name = "sniffio", marker = "python_full_version >= '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
]
[[package]]
name = "b"
version = "0.1.0"
source = { directory = "b" }
dependencies = [
{ name = "b1", marker = "python_full_version < '3.12'" },
{ name = "b2", marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [
{ name = "b1", marker = "python_full_version < '3.12'", directory = "b1" },
{ name = "b2", marker = "python_full_version >= '3.12'", directory = "b2" },
]
[[package]]
name = "b1"
version = "0.1.0"
source = { directory = "b1" }
dependencies = [
{ name = "iniconfig", version = "1.1.1", source = { url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" }, marker = "python_full_version < '3.12'" },
]
[package.metadata]
requires-dist = [{ name = "iniconfig", url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" }]
[[package]]
name = "b2"
version = "0.1.0"
source = { directory = "b2" }
dependencies = [
{ name = "iniconfig", version = "2.0.0", source = { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" }, marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [{ name = "iniconfig", url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" }]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "iniconfig"
version = "1.1.1"
source = { url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" }
resolution-markers = [
"python_full_version < '3.12'",
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3" },
]
[[package]]
name = "iniconfig"
version = "2.0.0"
source = { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" }
resolution-markers = [
"python_full_version >= '3.12'",
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
"#);
Ok(())
}
/// The root package has diverging URLs on one package, and other dependencies have one URL
/// for each side.
///
/// Requirements:
/// ```
/// a -> anyio==4.4.0 ; python_version >= '3.12'
/// a -> anyio==4.3.0 ; python_version < '3.12'
/// a -> b1 ; python_version < '3.12' -> iniconfig==1.1.1
/// a -> b2 ; python_version >= '3.12' -> iniconfig==2.0.0
/// ```
#[test]
#[cfg(feature = "pypi")]
fn root_package_splits_other_dependencies_too() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Force a split
"anyio==4.3.0 ; python_version >= '3.12'",
"anyio==4.2.0 ; python_version < '3.12'",
# These two are currently included in both parts of the split.
"b1 ; python_version < '3.12'",
"b2 ; python_version >= '3.12'",
]
[tool.uv.sources]
b1 = { path = "b1" }
b2 = { path = "b2" }
"# };
make_project(context.temp_dir.path(), "a", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig==1.1.1",
]
"# };
make_project(&context.temp_dir.path().join("b1"), "b1", deps)?;
let deps = indoc! {r#"
dependencies = [
"iniconfig==2.0.0"
]
"# };
make_project(&context.temp_dir.path().join("b2"), "b2", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 9 packages in [TIME]
"###
);
assert_snapshot!(context.read("uv.lock"), @r#"
version = 1
revision = 3
requires-python = ">=3.11, <3.13"
resolution-markers = [
"python_full_version >= '3.12'",
"python_full_version < '3.12'",
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "a"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "anyio", version = "4.2.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
{ name = "anyio", version = "4.3.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
{ name = "b1", marker = "python_full_version < '3.12'" },
{ name = "b2", marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [
{ name = "anyio", marker = "python_full_version < '3.12'", specifier = "==4.2.0" },
{ name = "anyio", marker = "python_full_version >= '3.12'", specifier = "==4.3.0" },
{ name = "b1", marker = "python_full_version < '3.12'", directory = "b1" },
{ name = "b2", marker = "python_full_version >= '3.12'", directory = "b2" },
]
[[package]]
name = "anyio"
version = "4.2.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version < '3.12'",
]
dependencies = [
{ name = "idna", marker = "python_full_version < '3.12'" },
{ name = "sniffio", marker = "python_full_version < '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/2d/b8/7333d87d5f03247215d86a86362fd3e324111788c6cdd8d2e6196a6ba833/anyio-4.2.0.tar.gz", hash = "sha256:e1875bb4b4e2de1669f4bc7869b6d3f54231cdced71605e6e64c9be77e3be50f", size = 158770, upload-time = "2023-12-16T17:06:57.709Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/bf/cd/d6d9bb1dadf73e7af02d18225cbd2c93f8552e13130484f1c8dcfece292b/anyio-4.2.0-py3-none-any.whl", hash = "sha256:745843b39e829e108e518c489b31dc757de7d2131d53fac32bd8df268227bfee", size = 85481, upload-time = "2023-12-16T17:06:55.989Z" },
]
[[package]]
name = "anyio"
version = "4.3.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.12'",
]
dependencies = [
{ name = "idna", marker = "python_full_version >= '3.12'" },
{ name = "sniffio", marker = "python_full_version >= '3.12'" },
]
sdist = { url = "https://files.pythonhosted.org/packages/db/4d/3970183622f0330d3c23d9b8a5f52e365e50381fd484d08e3285104333d3/anyio-4.3.0.tar.gz", hash = "sha256:f75253795a87df48568485fd18cdd2a3fa5c4f7c5be8e5e36637733fce06fed6", size = 159642, upload-time = "2024-02-19T08:36:28.641Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/14/fd/2f20c40b45e4fb4324834aea24bd4afdf1143390242c0b33774da0e2e34f/anyio-4.3.0-py3-none-any.whl", hash = "sha256:048e05d0f6caeed70d731f3db756d35dcc1f35747c8c403364a8332c630441b8", size = 85584, upload-time = "2024-02-19T08:36:26.842Z" },
]
[[package]]
name = "b1"
version = "0.1.0"
source = { directory = "b1" }
dependencies = [
{ name = "iniconfig", version = "1.1.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
]
[package.metadata]
requires-dist = [{ name = "iniconfig", specifier = "==1.1.1" }]
[[package]]
name = "b2"
version = "0.1.0"
source = { directory = "b2" }
dependencies = [
{ name = "iniconfig", version = "2.0.0", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [{ name = "iniconfig", specifier = "==2.0.0" }]
[[package]]
name = "idna"
version = "3.6"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/bf/3f/ea4b9117521a1e9c50344b909be7886dd00a519552724809bb1f486986c2/idna-3.6.tar.gz", hash = "sha256:9ecdbbd083b06798ae1e86adcbfe8ab1479cf864e4ee30fe4e46a003d12491ca", size = 175426, upload-time = "2023-11-25T15:40:54.902Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/c2/e7/a82b05cf63a603df6e68d59ae6a68bf5064484a0718ea5033660af4b54a9/idna-3.6-py3-none-any.whl", hash = "sha256:c05567e9c24a6b9faaa835c4821bad0590fbb9d5779e7caa6e1cc4978e7eb24f", size = 61567, upload-time = "2023-11-25T15:40:52.604Z" },
]
[[package]]
name = "iniconfig"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version < '3.12'",
]
sdist = { url = "https://files.pythonhosted.org/packages/23/a2/97899f6bd0e873fed3a7e67ae8d3a08b21799430fb4da15cfedf10d6e2c2/iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32", size = 8104, upload-time = "2020-10-14T10:20:18.572Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", size = 4990, upload-time = "2020-10-16T17:37:23.05Z" },
]
[[package]]
name = "iniconfig"
version = "2.0.0"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version >= '3.12'",
]
sdist = { url = "https://files.pythonhosted.org/packages/d7/4b/cbd8e699e64a6f16ca3a8220661b5f83792b3017d0f79807cb8708d33913/iniconfig-2.0.0.tar.gz", hash = "sha256:2d91e135bf72d31a410b17c16da610a82cb55f6b0477d1a902134b24a455b8b3", size = 4646, upload-time = "2023-01-07T11:08:11.254Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374", size = 5892, upload-time = "2023-01-07T11:08:09.864Z" },
]
[[package]]
name = "sniffio"
version = "1.3.1"
source = { registry = "https://pypi.org/simple" }
sdist = { url = "https://files.pythonhosted.org/packages/a2/87/a6771e1546d97e7e041b6ae58d80074f81b7d5121207425c964ddf5cfdbd/sniffio-1.3.1.tar.gz", hash = "sha256:f4324edc670a0f49750a81b895f35c3adb843cca46f0530f79fc1babb23789dc", size = 20372, upload-time = "2024-02-25T23:20:04.057Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/e9/44/75a9c9421471a6c4805dbf2356f7c181a29c1879239abab1ea2cc8f38b40/sniffio-1.3.1-py3-none-any.whl", hash = "sha256:2f6da418d1f1e0fddd844478f41680e794e6051915791a034ff65e5f100525a2", size = 10235, upload-time = "2024-02-25T23:20:01.196Z" },
]
"#);
Ok(())
}
/// Whether the dependency comes from the registry or a direct URL depends on the branch.
///
/// ```toml
/// dependencies = [
/// "iniconfig == 1.1.1 ; python_version < '3.12'",
/// "iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'",
/// ]
/// ```
#[test]
#[cfg(feature = "pypi")]
fn branching_between_registry_and_direct_url() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
"iniconfig == 1.1.1 ; python_version < '3.12'",
"iniconfig @ https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl ; python_version >= '3.12'",
]
"# };
make_project(context.temp_dir.path(), "a", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// We have source dist and wheel for the registry, but only the wheel for the direct URL.
assert_snapshot!(context.read("uv.lock"), @r#"
version = 1
revision = 3
requires-python = ">=3.11, <3.13"
resolution-markers = [
"python_full_version >= '3.12'",
"python_full_version < '3.12'",
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "a"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "iniconfig", version = "1.1.1", source = { registry = "https://pypi.org/simple" }, marker = "python_full_version < '3.12'" },
{ name = "iniconfig", version = "2.0.0", source = { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" }, marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [
{ name = "iniconfig", marker = "python_full_version < '3.12'", specifier = "==1.1.1" },
{ name = "iniconfig", marker = "python_full_version >= '3.12'", url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" },
]
[[package]]
name = "iniconfig"
version = "1.1.1"
source = { registry = "https://pypi.org/simple" }
resolution-markers = [
"python_full_version < '3.12'",
]
sdist = { url = "https://files.pythonhosted.org/packages/23/a2/97899f6bd0e873fed3a7e67ae8d3a08b21799430fb4da15cfedf10d6e2c2/iniconfig-1.1.1.tar.gz", hash = "sha256:bc3af051d7d14b2ee5ef9969666def0cd1a000e121eaea580d4a313df4b37f32", size = 8104, upload-time = "2020-10-14T10:20:18.572Z" }
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3", size = 4990, upload-time = "2020-10-16T17:37:23.05Z" },
]
[[package]]
name = "iniconfig"
version = "2.0.0"
source = { url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl" }
resolution-markers = [
"python_full_version >= '3.12'",
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/ef/a6/62565a6e1cf69e10f5727360368e451d4b7f58beeac6173dc9db836a5b46/iniconfig-2.0.0-py3-none-any.whl", hash = "sha256:b6a85871a79d2e3b22d2d1b94ac2824226a63c6b741c88f7ae975f18b6778374" },
]
"#);
Ok(())
}
/// The root package has two different direct URLs for disjoint forks, but they are from different sources.
///
/// ```toml
/// dependencies = [
/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
/// "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'",
/// ]
/// ```
#[test]
#[cfg(all(feature = "git", feature = "pypi"))]
fn branching_urls_of_different_sources_disjoint() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Valid, disjoint split
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
"iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'",
]
"# };
make_project(context.temp_dir.path(), "a", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Resolved 3 packages in [TIME]
"###
);
// We have source dist and wheel for the registry, but only the wheel for the direct URL.
assert_snapshot!(context.read("uv.lock"), @r#"
version = 1
revision = 3
requires-python = ">=3.11, <3.13"
resolution-markers = [
"python_full_version >= '3.12'",
"python_full_version < '3.12'",
]
[options]
exclude-newer = "2024-03-25T00:00:00Z"
[[package]]
name = "a"
version = "0.1.0"
source = { editable = "." }
dependencies = [
{ name = "iniconfig", version = "1.1.1", source = { url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" }, marker = "python_full_version < '3.12'" },
{ name = "iniconfig", version = "2.0.0", source = { git = "https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a#93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" }, marker = "python_full_version >= '3.12'" },
]
[package.metadata]
requires-dist = [
{ name = "iniconfig", marker = "python_full_version < '3.12'", url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" },
{ name = "iniconfig", marker = "python_full_version >= '3.12'", git = "https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" },
]
[[package]]
name = "iniconfig"
version = "1.1.1"
source = { url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl" }
resolution-markers = [
"python_full_version < '3.12'",
]
wheels = [
{ url = "https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl", hash = "sha256:011e24c64b7f47f6ebd835bb12a743f2fbe9a26d4cecaa7f53bc4f35ee9da8b3" },
]
[[package]]
name = "iniconfig"
version = "2.0.0"
source = { git = "https://github.com/pytest-dev/iniconfig?rev=93f5930e668c0d1ddf4597e38dd0dea4e2665e7a#93f5930e668c0d1ddf4597e38dd0dea4e2665e7a" }
resolution-markers = [
"python_full_version >= '3.12'",
]
"#);
Ok(())
}
/// The root package has two different direct URLs from different sources, but they are not
/// disjoint.
///
/// ```toml
/// dependencies = [
/// "iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
/// "iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.12'",
/// ]
/// ```
#[test]
#[cfg(all(feature = "git", feature = "pypi"))]
fn branching_urls_of_different_sources_conflict() -> Result<()> {
let context = TestContext::new("3.12");
let deps = indoc! {r#"
dependencies = [
# Conflicting split
"iniconfig @ https://files.pythonhosted.org/packages/9b/dd/b3c12c6d707058fa947864b67f0c4e0c39ef8610988d7baea9578f3c48f3/iniconfig-1.1.1-py2.py3-none-any.whl ; python_version < '3.12'",
"iniconfig @ git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a ; python_version >= '3.11'",
]
"# };
make_project(context.temp_dir.path(), "a", deps)?;
uv_snapshot!(context.filters(), context.lock().current_dir(&context.temp_dir), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
× Failed to resolve dependencies for `a` (v0.1.0)
╰─▶ Requirements contain conflicting URLs for package `iniconfig` in split `python_full_version == '3.11.*'`:
- git+https://github.com/pytest-dev/iniconfig@93f5930e668c0d1ddf4597e38dd0dea4e2665e7a
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/python_install.rs | crates/uv/tests/it/python_install.rs | #[cfg(windows)]
use std::path::PathBuf;
use std::{env, path::Path, process::Command};
use crate::common::{TestContext, uv_snapshot};
use assert_cmd::assert::OutputAssertExt;
use assert_fs::{
assert::PathAssert,
prelude::{FileTouch, FileWriteStr, PathChild, PathCreateDir},
};
use indoc::indoc;
use predicates::prelude::predicate;
use tracing::debug;
use uv_fs::Simplified;
use uv_static::EnvVars;
#[test]
fn python_install() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_empty_python_install_mirror()
.with_python_download_cache();
// Install the latest version
uv_snapshot!(context.filters(), context.python_install(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
let bin_python = context
.bin_dir
.child(format!("python3.14{}", std::env::consts::EXE_SUFFIX));
// The executable should be installed in the bin directory
bin_python.assert(predicate::path::exists());
// On Unix, it should be a link
#[cfg(unix)]
bin_python.assert(predicate::path::is_symlink());
// The link should be a path to the binary
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.14.2-[PLATFORM]/bin/python3.14"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.14.2-[PLATFORM]/python"
);
});
}
// The executable should "work"
uv_snapshot!(context.filters(), Command::new(bin_python.as_os_str())
.arg("-c").arg("import subprocess; print('hello world')"), @r###"
success: true
exit_code: 0
----- stdout -----
hello world
----- stderr -----
"###);
// Should be a no-op when already installed
uv_snapshot!(context.filters(), context.python_install(), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python is already installed. Use `uv python install <request>` to install another version.
"###);
// Similarly, when a requested version is already installed
uv_snapshot!(context.filters(), context.python_install().arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python 3.14 is already installed
");
// You can opt-in to a reinstall
uv_snapshot!(context.filters(), context.python_install().arg("3.14").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
~ cpython-3.14.2-[PLATFORM] (python3.14)
");
// The executable should still be present in the bin directory
bin_python.assert(predicate::path::exists());
// Uninstallation requires an argument
uv_snapshot!(context.filters(), context.python_uninstall(), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<TARGETS>...
Usage: uv python uninstall --install-dir <INSTALL_DIR> <TARGETS>...
For more information, try '--help'.
"###);
uv_snapshot!(context.filters(), context.python_uninstall().arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Searching for Python versions matching: Python 3.14
Uninstalled Python 3.14.2 in [TIME]
- cpython-3.14.2-[PLATFORM] (python3.14)
");
// The executable should be removed
bin_python.assert(predicate::path::missing());
}
#[test]
fn python_reinstall() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_python_download_cache();
// Install a couple versions
uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("3.13"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.12.12-[PLATFORM] (python3.12)
+ cpython-3.13.11-[PLATFORM] (python3.13)
");
// Reinstall a single version
uv_snapshot!(context.filters(), context.python_install().arg("3.13").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.13.11 in [TIME]
~ cpython-3.13.11-[PLATFORM] (python3.13)
");
// Reinstall multiple versions
uv_snapshot!(context.filters(), context.python_install().arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
~ cpython-3.12.12-[PLATFORM] (python3.12)
~ cpython-3.13.11-[PLATFORM] (python3.13)
");
// Reinstalling a version that is not installed should also work
uv_snapshot!(context.filters(), context.python_install().arg("3.11").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.11.14 in [TIME]
+ cpython-3.11.14-[PLATFORM] (python3.11)
");
}
#[test]
fn python_reinstall_patch() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_python_download_cache();
// Install a couple patch versions
uv_snapshot!(context.filters(), context.python_install().arg("3.12.6").arg("3.12.7"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.12.6-[PLATFORM]
+ cpython-3.12.7-[PLATFORM] (python3.12)
");
// Reinstall all "3.12" versions
// TODO(zanieb): This doesn't work today, because we need this to install the "latest" as there
// is no workflow for `--upgrade` yet
uv_snapshot!(context.filters(), context.python_install().arg("3.12").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.12 in [TIME]
+ cpython-3.12.12-[PLATFORM] (python3.12)
");
}
#[test]
fn python_install_automatic() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_filtered_python_sources()
.with_managed_python_dirs()
.with_python_download_cache();
// With downloads disabled, the automatic install should fail
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("--no-python-downloads")
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found in [PYTHON SOURCES]
hint: A managed Python download is available, but Python downloads are set to 'never'
");
// Otherwise, we should fetch the latest Python version
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r###"
success: true
exit_code: 0
----- stdout -----
(3, 14)
----- stderr -----
"###);
// Subsequently, we can use the interpreter even with downloads disabled
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("--no-python-downloads")
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r###"
success: true
exit_code: 0
----- stdout -----
(3, 14)
----- stderr -----
"###);
// We should respect the Python request
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("-p").arg("3.12")
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r###"
success: true
exit_code: 0
----- stdout -----
(3, 12)
----- stderr -----
"###);
// But some requests cannot be mapped to a download
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("-p").arg("foobar")
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: No interpreter found for executable name `foobar` in [PYTHON SOURCES]
"###);
// Create a "broken" Python executable in the test context `bin`
// (the snapshot is different on Windows so we just test on Unix)
#[cfg(unix)]
{
use std::os::unix::fs::PermissionsExt;
let contents = r"#!/bin/sh
echo 'error: intentionally broken python executable' >&2
exit 1";
let python = context
.bin_dir
.join(format!("python3{}", std::env::consts::EXE_SUFFIX));
fs_err::write(&python, contents).unwrap();
let mut perms = fs_err::metadata(&python).unwrap().permissions();
perms.set_mode(0o755);
fs_err::set_permissions(&python, perms).unwrap();
// We should ignore the broken executable and download a version still
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
// In tests, we ignore `PATH` during Python discovery so we need to add the context `bin`
.env(EnvVars::UV_TEST_PYTHON_PATH, context.bin_dir.as_os_str())
.arg("-p").arg("3.11")
.arg("python").arg("-c").arg("import sys; print(sys.version_info[:2])"), @r###"
success: true
exit_code: 0
----- stdout -----
(3, 11)
----- stderr -----
"###);
}
}
/// Regression test for a bad cpython runtime
/// <https://github.com/astral-sh/uv/issues/13610>
#[test]
fn regression_cpython() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_filtered_python_sources()
.with_managed_python_dirs()
.with_python_download_cache();
let init = context.temp_dir.child("mre.py");
init.write_str(indoc! { r#"
class Foo(str): ...
a = []
new_value = Foo("1")
a += new_value
"#
})
.unwrap();
// We should respect the Python request
uv_snapshot!(context.filters(), context.run()
.env_remove(EnvVars::VIRTUAL_ENV)
.arg("-p").arg("3.12")
.arg("mre.py"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
"###);
}
#[test]
fn python_install_force() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install the latest version
uv_snapshot!(context.filters(), context.python_install(), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
let bin_python = context
.bin_dir
.child(format!("python3.14{}", std::env::consts::EXE_SUFFIX));
// You can force replacement of the executables
uv_snapshot!(context.filters(), context.python_install().arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
// The executable should still be present in the bin directory
bin_python.assert(predicate::path::exists());
// If an unmanaged executable is present, `--force` is required
fs_err::remove_file(bin_python.path()).unwrap();
bin_python.touch().unwrap();
uv_snapshot!(context.filters(), context.python_install().arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Failed to install executable for cpython-3.14.2-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.14` but is not managed by uv; use `--force` to replace it
");
uv_snapshot!(context.filters(), context.python_install().arg("--force").arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
bin_python.assert(predicate::path::exists());
}
#[test]
fn python_install_minor() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install a minor version
uv_snapshot!(context.filters(), context.python_install().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.11.14 in [TIME]
+ cpython-3.11.14-[PLATFORM] (python3.11)
");
let bin_python = context
.bin_dir
.child(format!("python3.11{}", std::env::consts::EXE_SUFFIX));
// The executable should be installed in the bin directory
bin_python.assert(predicate::path::exists());
// It should be a link to the minor version
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.14-[PLATFORM]/bin/python3.11"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11.14-[PLATFORM]/python"
);
});
}
uv_snapshot!(context.filters(), context.python_uninstall().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Searching for Python versions matching: Python 3.11
Uninstalled Python 3.11.14 in [TIME]
- cpython-3.11.14-[PLATFORM] (python3.11)
");
// The executable should be removed
bin_python.assert(predicate::path::missing());
}
#[test]
fn python_install_multiple_patch() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs();
// Install multiple patch versions
uv_snapshot!(context.filters(), context.python_install().arg("3.12.8").arg("3.12.6"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.12.6-[PLATFORM]
+ cpython-3.12.8-[PLATFORM] (python3.12)
");
let bin_python = context
.bin_dir
.child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
// The executable should be installed in the bin directory
bin_python.assert(predicate::path::exists());
// The link should resolve to the newer patch version
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python"
);
});
}
uv_snapshot!(context.filters(), context.python_uninstall().arg("3.12.8"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Searching for Python versions matching: Python 3.12.8
Uninstalled Python 3.12.8 in [TIME]
- cpython-3.12.8-[PLATFORM] (python3.12)
");
// TODO(zanieb): This behavior is not implemented yet
// // The executable should be installed in the bin directory
// bin_python.assert(predicate::path::exists());
// // When the version is removed, the link should point to the other patch version
// if cfg!(unix) {
// insta::with_settings!({
// filters => context.filters(),
// }, {
// insta::assert_snapshot!(
// canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12"
// );
// });
// } else if cfg!(windows) {
// insta::with_settings!({
// filters => context.filters(),
// }, {
// insta::assert_snapshot!(
// canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python"
// );
// });
// }
}
#[test]
fn python_install_preview() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_python_download_cache();
// Install the latest version
uv_snapshot!(context.filters(), context.python_install().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python, python3, python3.14)
");
let bin_python = context
.bin_dir
.child(format!("python3.14{}", std::env::consts::EXE_SUFFIX));
// The executable should be installed in the bin directory
bin_python.assert(predicate::path::exists());
// On Unix, it should be a link
#[cfg(unix)]
bin_python.assert(predicate::path::is_symlink());
// The link should be to a path containing a minor version symlink directory
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.14-[PLATFORM]/bin/python3.14"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.14-[PLATFORM]/python"
);
});
}
// The executable should "work"
uv_snapshot!(context.filters(), Command::new(bin_python.as_os_str())
.arg("-c").arg("import subprocess; print('hello world')"), @r###"
success: true
exit_code: 0
----- stdout -----
hello world
----- stderr -----
"###);
// Should be a no-op when already installed
uv_snapshot!(context.filters(), context.python_install().arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python is already installed. Use `uv python install <request>` to install another version.
");
// You can opt-in to a reinstall
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--reinstall"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
~ cpython-3.14.2-[PLATFORM] (python, python3, python3.14)
");
// The executable should still be present in the bin directory
bin_python.assert(predicate::path::exists());
// You can also force replacement of the executables
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--force"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python, python3, python3.14)
");
// The executable should still be present in the bin directory
bin_python.assert(predicate::path::exists());
// If an unmanaged executable is present, `--force` is required
fs_err::remove_file(bin_python.path()).unwrap();
bin_python.touch().unwrap();
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
warning: Failed to install executable for cpython-3.14.2-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.14` but is not managed by uv; use `--force` to replace it
");
// With `--bin`, this should error instead of warn
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--bin").arg("3.14"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Failed to install executable for cpython-3.14.2-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.14` but is not managed by uv; use `--force` to replace it
");
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.14").env(EnvVars::UV_PYTHON_INSTALL_BIN, "1"), @r"
success: false
exit_code: 1
----- stdout -----
----- stderr -----
error: Failed to install executable for cpython-3.14.2-[PLATFORM]
Caused by: Executable already exists at `[BIN]/python3.14` but is not managed by uv; use `--force` to replace it
");
// With `--no-bin`, this should be silent
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python 3.14 is already installed
");
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.14").env(EnvVars::UV_PYTHON_INSTALL_BIN, "0"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Python 3.14 is already installed
");
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--force").arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM] (python3.14)
");
bin_python.assert(predicate::path::exists());
// On Unix, it should be a link
#[cfg(unix)]
bin_python.assert(predicate::path::is_symlink());
// Uninstallation requires an argument
uv_snapshot!(context.filters(), context.python_uninstall(), @r###"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the following required arguments were not provided:
<TARGETS>...
Usage: uv python uninstall --install-dir <INSTALL_DIR> <TARGETS>...
For more information, try '--help'.
"###);
uv_snapshot!(context.filters(), context.python_uninstall().arg("3.14"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Searching for Python versions matching: Python 3.14
Uninstalled Python 3.14.2 in [TIME]
- cpython-3.14.2-[PLATFORM] (python, python3, python3.14)
");
// The executable should be removed
bin_python.assert(predicate::path::missing());
// Install a minor version
uv_snapshot!(context.filters(), context.python_install().arg("3.11").arg("--preview"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.11.14 in [TIME]
+ cpython-3.11.14-[PLATFORM] (python3.11)
");
let bin_python = context
.bin_dir
.child(format!("python3.11{}", std::env::consts::EXE_SUFFIX));
// The link should be to a path containing a minor version symlink directory
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11-[PLATFORM]/bin/python3.11"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.11-[PLATFORM]/python"
);
});
}
uv_snapshot!(context.filters(), context.python_uninstall().arg("3.11"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Searching for Python versions matching: Python 3.11
Uninstalled Python 3.11.14 in [TIME]
- cpython-3.11.14-[PLATFORM] (python3.11)
");
// Install multiple patch versions
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.8").arg("3.12.6"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed 2 versions in [TIME]
+ cpython-3.12.6-[PLATFORM]
+ cpython-3.12.8-[PLATFORM] (python3.12)
");
let bin_python = context
.bin_dir
.child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
// The link should resolve to the newer patch version
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.8-[PLATFORM]/python"
);
});
}
}
#[test]
fn python_install_preview_no_bin() {
let context: TestContext = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_python_download_cache();
// Install the latest version
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.14.2 in [TIME]
+ cpython-3.14.2-[PLATFORM]
");
let bin_python = context
.bin_dir
.child(format!("python3.14{}", std::env::consts::EXE_SUFFIX));
// The executable should not be installed in the bin directory
bin_python.assert(predicate::path::missing());
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("--no-bin").arg("--default"), @r"
success: false
exit_code: 2
----- stdout -----
----- stderr -----
error: the argument '--no-bin' cannot be used with '--default'
Usage: uv python install --no-bin --install-dir <INSTALL_DIR> [TARGETS]...
For more information, try '--help'.
");
let bin_python = context
.bin_dir
.child(format!("python{}", std::env::consts::EXE_SUFFIX));
// The executable should not be installed in the bin directory
bin_python.assert(predicate::path::missing());
}
#[test]
fn python_install_preview_upgrade() {
let context = TestContext::new_with_versions(&[])
.with_filtered_python_keys()
.with_filtered_exe_suffix()
.with_managed_python_dirs()
.with_python_download_cache();
let bin_python = context
.bin_dir
.child(format!("python3.12{}", std::env::consts::EXE_SUFFIX));
// Install 3.12.5
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.5"), @r"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.5 in [TIME]
+ cpython-3.12.5-[PLATFORM] (python3.12)
");
// Installing with a patch version should cause the link to be to the patch installation.
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
read_link(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python"
);
});
}
// Installing 3.12.4 should not replace the executable, but also shouldn't fail
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.4"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.4 in [TIME]
+ cpython-3.12.4-[PLATFORM]
"###);
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python"
);
});
}
// Using `--reinstall` is not sufficient to replace it either
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.4").arg("--reinstall"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.4 in [TIME]
~ cpython-3.12.4-[PLATFORM]
"###);
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.5-[PLATFORM]/python"
);
});
}
// But `--force` is
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.4").arg("--force"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.4 in [TIME]
+ cpython-3.12.4-[PLATFORM] (python3.12)
"###);
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.4-[PLATFORM]/python"
);
});
}
// But installing 3.12.6 should upgrade automatically
uv_snapshot!(context.filters(), context.python_install().arg("--preview").arg("3.12.6"), @r###"
success: true
exit_code: 0
----- stdout -----
----- stderr -----
Installed Python 3.12.6 in [TIME]
+ cpython-3.12.6-[PLATFORM] (python3.12)
"###);
if cfg!(unix) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/bin/python3.12"
);
});
} else if cfg!(windows) {
insta::with_settings!({
filters => context.filters(),
}, {
insta::assert_snapshot!(
canonicalize_link_path(&bin_python), @"[TEMP_DIR]/managed/cpython-3.12.6-[PLATFORM]/python"
);
});
}
}
#[test]
fn python_install_freethreaded() {
let context: TestContext = TestContext::new_with_versions(&[])
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv/tests/it/common/mod.rs | crates/uv/tests/it/common/mod.rs | // The `unreachable_pub` is to silence false positives in RustRover.
#![allow(dead_code, unreachable_pub)]
use std::borrow::BorrowMut;
use std::ffi::OsString;
use std::io::Write as _;
use std::iter::Iterator;
use std::path::{Path, PathBuf};
use std::process::{Command, ExitStatus, Output, Stdio};
use std::str::FromStr;
use std::{env, io};
use uv_python::downloads::ManagedPythonDownloadList;
use assert_cmd::assert::{Assert, OutputAssertExt};
use assert_fs::assert::PathAssert;
use assert_fs::fixture::{
ChildPath, FileWriteStr, PathChild, PathCopy, PathCreateDir, SymlinkToFile,
};
use base64::{Engine, prelude::BASE64_STANDARD as base64};
use futures::StreamExt;
use indoc::{formatdoc, indoc};
use itertools::Itertools;
use predicates::prelude::predicate;
use regex::Regex;
use tokio::io::AsyncWriteExt;
use uv_cache::{Cache, CacheBucket};
use uv_fs::Simplified;
use uv_preview::Preview;
use uv_python::managed::ManagedPythonInstallations;
use uv_python::{
EnvironmentPreference, PythonInstallation, PythonPreference, PythonRequest, PythonVersion,
};
use uv_static::EnvVars;
// Exclude any packages uploaded after this date.
static EXCLUDE_NEWER: &str = "2024-03-25T00:00:00Z";
pub const PACKSE_VERSION: &str = "0.3.53";
pub const DEFAULT_PYTHON_VERSION: &str = "3.12";
/// Using a find links url allows using `--index-url` instead of `--extra-index-url` in tests
/// to prevent dependency confusion attacks against our test suite.
pub fn build_vendor_links_url() -> String {
env::var(EnvVars::UV_TEST_PACKSE_INDEX)
.map(|url| format!("{}/vendor/", url.trim_end_matches('/')))
.ok()
.unwrap_or(format!(
"https://astral-sh.github.io/packse/{PACKSE_VERSION}/vendor/"
))
}
pub fn packse_index_url() -> String {
env::var(EnvVars::UV_TEST_PACKSE_INDEX)
.map(|url| format!("{}/simple-html/", url.trim_end_matches('/')))
.ok()
.unwrap_or(format!(
"https://astral-sh.github.io/packse/{PACKSE_VERSION}/simple-html/"
))
}
#[doc(hidden)] // Macro and test context only, don't use directly.
pub const INSTA_FILTERS: &[(&str, &str)] = &[
(r"--cache-dir [^\s]+", "--cache-dir [CACHE_DIR]"),
// Operation times
(r"(\s|\()(\d+m )?(\d+\.)?\d+(ms|s)", "$1[TIME]"),
// File sizes
(r"(\s|\()(\d+\.)?\d+([KM]i)?B", "$1[SIZE]"),
// Timestamps
(r"tv_sec: \d+", "tv_sec: [TIME]"),
(r"tv_nsec: \d+", "tv_nsec: [TIME]"),
// Rewrite Windows output to Unix output
(r"\\([\w\d]|\.)", "/$1"),
(r"uv\.exe", "uv"),
// uv version display
(
r"uv(-.*)? \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?( \([^)]*\))?",
r"uv [VERSION] ([COMMIT] DATE)",
),
// Trim end-of-line whitespaces, to allow removing them on save.
(r"([^\s])[ \t]+(\r?\n)", "$1$2"),
];
/// Create a context for tests which simplifies shared behavior across tests.
///
/// * Set the current directory to a temporary directory (`temp_dir`).
/// * Set the cache dir to a different temporary directory (`cache_dir`).
/// * Set a cutoff for versions used in the resolution so the snapshots don't change after a new release.
/// * Set the venv to a fresh `.venv` in `temp_dir`
pub struct TestContext {
pub root: ChildPath,
pub temp_dir: ChildPath,
pub cache_dir: ChildPath,
pub python_dir: ChildPath,
pub home_dir: ChildPath,
pub user_config_dir: ChildPath,
pub bin_dir: ChildPath,
pub venv: ChildPath,
pub workspace_root: PathBuf,
/// The Python version used for the virtual environment, if any.
pub python_version: Option<PythonVersion>,
/// All the Python versions available during this test context.
pub python_versions: Vec<(PythonVersion, PathBuf)>,
/// Standard filters for this test context.
filters: Vec<(String, String)>,
/// Extra environment variables to apply to all commands.
extra_env: Vec<(OsString, OsString)>,
#[allow(dead_code)]
_root: tempfile::TempDir,
}
impl TestContext {
/// Create a new test context with a virtual environment.
///
/// See [`TestContext::new_with_versions`] if multiple versions are needed or
/// if creation of the virtual environment should be deferred.
pub fn new(python_version: &str) -> Self {
let new = Self::new_with_versions(&[python_version]);
new.create_venv();
new
}
/// Set the "exclude newer" timestamp for all commands in this context.
pub fn with_exclude_newer(mut self, exclude_newer: &str) -> Self {
self.extra_env
.push((EnvVars::UV_EXCLUDE_NEWER.into(), exclude_newer.into()));
self
}
/// Set the "http timeout" for all commands in this context.
pub fn with_http_timeout(mut self, http_timeout: &str) -> Self {
self.extra_env
.push((EnvVars::UV_HTTP_TIMEOUT.into(), http_timeout.into()));
self
}
/// Set the "concurrent installs" for all commands in this context.
pub fn with_concurrent_installs(mut self, concurrent_installs: &str) -> Self {
self.extra_env.push((
EnvVars::UV_CONCURRENT_INSTALLS.into(),
concurrent_installs.into(),
));
self
}
/// Add extra standard filtering for messages like "Resolved 10 packages" which
/// can differ between platforms.
///
/// In some cases, these counts are helpful for the snapshot and should not be filtered.
#[must_use]
pub fn with_filtered_counts(mut self) -> Self {
for verb in &[
"Resolved",
"Prepared",
"Installed",
"Uninstalled",
"Audited",
] {
self.filters.push((
format!("{verb} \\d+ packages?"),
format!("{verb} [N] packages"),
));
}
self.filters.push((
"Removed \\d+ files?".to_string(),
"Removed [N] files".to_string(),
));
self
}
/// Add extra filtering for cache size output
#[must_use]
pub fn with_filtered_cache_size(mut self) -> Self {
// Filter raw byte counts (numbers on their own line)
self.filters
.push((r"(?m)^\d+\n".to_string(), "[SIZE]\n".to_string()));
// Filter human-readable sizes (e.g., "384.2 KiB")
self.filters.push((
r"(?m)^\d+(\.\d+)? [KMGT]i?B\n".to_string(),
"[SIZE]\n".to_string(),
));
self
}
/// Add extra standard filtering for Windows-compatible missing file errors.
pub fn with_filtered_missing_file_error(mut self) -> Self {
// The exact message string depends on the system language, so we remove it.
// We want to only remove the phrase after `Caused by:`
self.filters.push((
r"[^:\n]* \(os error 2\)".to_string(),
" [OS ERROR 2]".to_string(),
));
// Replace the Windows "The system cannot find the path specified. (os error 3)"
// with the Unix "No such file or directory (os error 2)"
// and mask the language-dependent message.
self.filters.push((
r"[^:\n]* \(os error 3\)".to_string(),
" [OS ERROR 2]".to_string(),
));
self
}
/// Add extra standard filtering for executable suffixes on the current platform e.g.
/// drops `.exe` on Windows.
#[must_use]
pub fn with_filtered_exe_suffix(mut self) -> Self {
self.filters
.push((regex::escape(env::consts::EXE_SUFFIX), String::new()));
self
}
/// Add extra standard filtering for Python interpreter sources
#[must_use]
pub fn with_filtered_python_sources(mut self) -> Self {
self.filters.push((
"virtual environments, managed installations, or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"virtual environments, managed installations, search path, or registry".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"virtual environments, search path, or registry".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"virtual environments, registry, or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"virtual environments or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"managed installations or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"managed installations, search path, or registry".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"search path or registry".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters.push((
"registry or search path".to_string(),
"[PYTHON SOURCES]".to_string(),
));
self.filters
.push(("search path".to_string(), "[PYTHON SOURCES]".to_string()));
self
}
/// Add extra standard filtering for Python executable names, e.g., stripping version number
/// and `.exe` suffixes.
#[must_use]
pub fn with_filtered_python_names(mut self) -> Self {
for name in ["python", "pypy"] {
// Note we strip version numbers from the executable names because, e.g., on Windows
// `python.exe` is the equivalent to a Unix `python3.12`.`
let suffix = if cfg!(windows) {
// On Windows, we'll require a `.exe` suffix for disambiguation
// We'll also strip version numbers if present, which is not common for `python.exe`
// but can occur for, e.g., `pypy3.12.exe`
let exe_suffix = regex::escape(env::consts::EXE_SUFFIX);
format!(r"(\d\.\d+|\d)?{exe_suffix}")
} else {
// On Unix, we'll strip version numbers
if name == "python" {
// We can't require them in this case since `/python` is common
r"(\d\.\d+|\d)?(t|d|td)?".to_string()
} else {
// However, for other names we'll require them to avoid over-matching
r"(\d\.\d+|\d)(t|d|td)?".to_string()
}
};
self.filters.push((
// We use a leading path separator to help disambiguate cases where the name is not
// used in a path.
format!(r"[\\/]{name}{suffix}"),
format!("/[{}]", name.to_uppercase()),
));
}
self
}
/// Add extra standard filtering for venv executable directories on the current platform e.g.
/// `Scripts` on Windows and `bin` on Unix.
#[must_use]
pub fn with_filtered_virtualenv_bin(mut self) -> Self {
self.filters.push((
format!(
r"[\\/]{}[\\/]",
venv_bin_path(PathBuf::new()).to_string_lossy()
),
"/[BIN]/".to_string(),
));
self.filters.push((
format!(r"[\\/]{}", venv_bin_path(PathBuf::new()).to_string_lossy()),
"/[BIN]".to_string(),
));
self
}
/// Add extra standard filtering for Python installation `bin/` directories, which are not
/// present on Windows but are on Unix. See [`TestContext::with_filtered_virtualenv_bin`] for
/// the virtual environment equivalent.
#[must_use]
pub fn with_filtered_python_install_bin(mut self) -> Self {
// We don't want to eagerly match paths that aren't actually Python executables, so we
// do our best to detect that case
let suffix = if cfg!(windows) {
let exe_suffix = regex::escape(env::consts::EXE_SUFFIX);
// On Windows, we usually don't have a version attached but we might, e.g., for pypy3.12
format!(r"(\d\.\d+|\d)?{exe_suffix}")
} else {
// On Unix, we'll require a version to be attached to avoid over-matching
r"\d\.\d+|\d".to_string()
};
if cfg!(unix) {
self.filters.push((
format!(r"[\\/]bin/python({suffix})"),
"/[INSTALL-BIN]/python$1".to_string(),
));
self.filters.push((
format!(r"[\\/]bin/pypy({suffix})"),
"/[INSTALL-BIN]/pypy$1".to_string(),
));
} else {
self.filters.push((
format!(r"[\\/]python({suffix})"),
"/[INSTALL-BIN]/python$1".to_string(),
));
self.filters.push((
format!(r"[\\/]pypy({suffix})"),
"/[INSTALL-BIN]/pypy$1".to_string(),
));
}
self
}
/// Filtering for various keys in a `pyvenv.cfg` file that will vary
/// depending on the specific machine used:
/// - `home = foo/bar/baz/python3.X.X/bin`
/// - `uv = X.Y.Z`
/// - `extends-environment = <path/to/parent/venv>`
#[must_use]
pub fn with_pyvenv_cfg_filters(mut self) -> Self {
let added_filters = [
(r"home = .+".to_string(), "home = [PYTHON_HOME]".to_string()),
(
r"uv = \d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?".to_string(),
"uv = [UV_VERSION]".to_string(),
),
(
r"extends-environment = .+".to_string(),
"extends-environment = [PARENT_VENV]".to_string(),
),
];
for filter in added_filters {
self.filters.insert(0, filter);
}
self
}
/// Add extra filtering for ` -> <PATH>` symlink display for Python versions in the test
/// context, e.g., for use in `uv python list`.
#[must_use]
pub fn with_filtered_python_symlinks(mut self) -> Self {
for (version, executable) in &self.python_versions {
if fs_err::symlink_metadata(executable).unwrap().is_symlink() {
self.filters.extend(
Self::path_patterns(executable.read_link().unwrap())
.into_iter()
.map(|pattern| (format! {" -> {pattern}"}, String::new())),
);
}
// Drop links that are byproducts of the test context too
self.filters.push((
regex::escape(&format!(" -> [PYTHON-{version}]")),
String::new(),
));
}
self
}
/// Add extra standard filtering for a given path.
#[must_use]
pub fn with_filtered_path(mut self, path: &Path, name: &str) -> Self {
// Note this is sloppy, ideally we wouldn't push to the front of the `Vec` but we need
// this to come in front of other filters or we can transform the path (e.g., with `[TMP]`)
// before we reach this filter.
for pattern in Self::path_patterns(path)
.into_iter()
.map(|pattern| (pattern, format!("[{name}]/")))
{
self.filters.insert(0, pattern);
}
self
}
/// Adds a filter that specifically ignores the link mode warning.
///
/// This occurs in some cases and can be used on an ad hoc basis to squash
/// the warning in the snapshots. This is useful because the warning does
/// not consistently appear. It is dependent on the environment. (For
/// example, sometimes it's dependent on whether `/tmp` and `~/.local` live
/// on the same file system.)
#[inline]
pub fn with_filtered_link_mode_warning(mut self) -> Self {
let pattern = "warning: Failed to hardlink files; .*\n.*\n.*\n";
self.filters.push((pattern.to_string(), String::new()));
self
}
/// Adds a filter for platform-specific errors when a file is not executable.
#[inline]
pub fn with_filtered_not_executable(mut self) -> Self {
let pattern = if cfg!(unix) {
r"Permission denied \(os error 13\)"
} else {
r"\%1 is not a valid Win32 application. \(os error 193\)"
};
self.filters
.push((pattern.to_string(), "[PERMISSION DENIED]".to_string()));
self
}
/// Adds a filter that ignores platform information in a Python installation key.
pub fn with_filtered_python_keys(mut self) -> Self {
// Filter platform keys
let platform_re = r"(?x)
( # We capture the group before the platform
(?:cpython|pypy|graalpy)# Python implementation
-
\d+\.\d+ # Major and minor version
(?: # The patch version is handled separately
\.
(?:
\[X\] # A previously filtered patch version [X]
| # OR
\d+ # An actual patch version
)
)? # (we allow the patch version to be missing entirely, e.g., in a request)
(?:(?:a|b|rc)[0-9]+)? # Pre-release version component, e.g., `a6` or `rc2`
(?:[td])? # A short variant, such as `t` (for freethreaded) or `d` (for debug)
(?:(\+[a-z]+)+)? # A long variant, such as `+freethreaded` or `+freethreaded+debug`
)
-
[a-z0-9]+ # Operating system (e.g., 'macos')
-
[a-z0-9_]+ # Architecture (e.g., 'aarch64')
-
[a-z]+ # Libc (e.g., 'none')
";
self.filters
.push((platform_re.to_string(), "$1-[PLATFORM]".to_string()));
self
}
/// Add a filter that ignores temporary directory in path.
pub fn with_filtered_windows_temp_dir(mut self) -> Self {
let pattern = regex::escape(
&self
.temp_dir
.simplified_display()
.to_string()
.replace('/', "\\"),
);
self.filters.push((pattern, "[TEMP_DIR]".to_string()));
self
}
/// Adds filters for non-deterministic `CycloneDX` data
pub fn with_cyclonedx_filters(mut self) -> Self {
self.filters.push((
r"urn:uuid:[0-9a-f]{8}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{4}-[0-9a-f]{12}".to_string(),
"[SERIAL_NUMBER]".to_string(),
));
self.filters.push((
r#""timestamp": "[0-9]{4}-[0-9]{2}-[0-9]{2}T[0-9]{2}:[0-9]{2}:[0-9]{2}\.[0-9]+Z""#
.to_string(),
r#""timestamp": "[TIMESTAMP]""#.to_string(),
));
self.filters.push((
r#""name": "uv",\s*"version": "\d+\.\d+\.\d+(-(alpha|beta|rc)\.\d+)?(\+\d+)?""#
.to_string(),
r#""name": "uv",
"version": "[VERSION]""#
.to_string(),
));
self
}
/// Add a filter that collapses duplicate whitespace.
#[must_use]
pub fn with_collapsed_whitespace(mut self) -> Self {
self.filters.push((r"[ \t]+".to_string(), " ".to_string()));
self
}
/// Use a shared global cache for Python downloads.
#[must_use]
pub fn with_python_download_cache(mut self) -> Self {
self.extra_env.push((
EnvVars::UV_PYTHON_CACHE_DIR.into(),
// Respect `UV_PYTHON_CACHE_DIR` if set, or use the default cache directory
env::var_os(EnvVars::UV_PYTHON_CACHE_DIR).unwrap_or_else(|| {
uv_cache::Cache::from_settings(false, None)
.unwrap()
.bucket(CacheBucket::Python)
.into()
}),
));
self
}
#[must_use]
pub fn with_empty_python_install_mirror(mut self) -> Self {
self.extra_env.push((
EnvVars::UV_PYTHON_INSTALL_MIRROR.into(),
String::new().into(),
));
self
}
/// Add extra directories and configuration for managed Python installations.
#[must_use]
pub fn with_managed_python_dirs(mut self) -> Self {
let managed = self.temp_dir.join("managed");
self.extra_env.push((
EnvVars::UV_PYTHON_BIN_DIR.into(),
self.bin_dir.as_os_str().to_owned(),
));
self.extra_env
.push((EnvVars::UV_PYTHON_INSTALL_DIR.into(), managed.into()));
self.extra_env
.push((EnvVars::UV_PYTHON_DOWNLOADS.into(), "automatic".into()));
self
}
pub fn with_versions_as_managed(mut self, versions: &[&str]) -> Self {
self.extra_env.push((
EnvVars::UV_INTERNAL__TEST_PYTHON_MANAGED.into(),
versions.iter().join(" ").into(),
));
self
}
/// Add a custom filter to the `TestContext`.
pub fn with_filter(mut self, filter: (impl Into<String>, impl Into<String>)) -> Self {
self.filters.push((filter.0.into(), filter.1.into()));
self
}
// Unsets the git credential helper using temp home gitconfig
pub fn with_unset_git_credential_helper(self) -> Self {
let git_config = self.home_dir.child(".gitconfig");
git_config
.write_str(indoc! {r"
[credential]
helper =
"})
.expect("Failed to unset git credential helper");
self
}
/// Clear filters on `TestContext`.
pub fn clear_filters(mut self) -> Self {
self.filters.clear();
self
}
/// Default to the canonicalized path to the temp directory. We need to do this because on
/// macOS (and Windows on GitHub Actions) the standard temp dir is a symlink. (On macOS, the
/// temporary directory is, like `/var/...`, which resolves to `/private/var/...`.)
///
/// It turns out that, at least on macOS, if we pass a symlink as `current_dir`, it gets
/// _immediately_ resolved (such that if you call `current_dir` in the running `Command`, it
/// returns resolved symlink). This breaks some snapshot tests, since we _don't_ want to
/// resolve symlinks for user-provided paths.
pub fn test_bucket_dir() -> PathBuf {
std::env::temp_dir()
.simple_canonicalize()
.expect("failed to canonicalize temp dir")
.join("uv")
.join("tests")
}
/// Create a new test context with multiple Python versions.
///
/// Does not create a virtual environment by default, but the first Python version
/// can be used to create a virtual environment with [`TestContext::create_venv`].
///
/// See [`TestContext::new`] if only a single version is desired.
pub fn new_with_versions(python_versions: &[&str]) -> Self {
let bucket = Self::test_bucket_dir();
fs_err::create_dir_all(&bucket).expect("Failed to create test bucket");
let root = tempfile::TempDir::new_in(bucket).expect("Failed to create test root directory");
// Create a `.git` directory to isolate tests that search for git boundaries from the state
// of the file system
fs_err::create_dir_all(root.path().join(".git"))
.expect("Failed to create `.git` placeholder in test root directory");
let temp_dir = ChildPath::new(root.path()).child("temp");
fs_err::create_dir_all(&temp_dir).expect("Failed to create test working directory");
let cache_dir = ChildPath::new(root.path()).child("cache");
fs_err::create_dir_all(&cache_dir).expect("Failed to create test cache directory");
let python_dir = ChildPath::new(root.path()).child("python");
fs_err::create_dir_all(&python_dir).expect("Failed to create test Python directory");
let bin_dir = ChildPath::new(root.path()).child("bin");
fs_err::create_dir_all(&bin_dir).expect("Failed to create test bin directory");
// When the `git` feature is disabled, enforce that the test suite does not use `git`
if cfg!(not(feature = "git")) {
Self::disallow_git_cli(&bin_dir).expect("Failed to setup disallowed `git` command");
}
let home_dir = ChildPath::new(root.path()).child("home");
fs_err::create_dir_all(&home_dir).expect("Failed to create test home directory");
let user_config_dir = if cfg!(windows) {
ChildPath::new(home_dir.path())
} else {
ChildPath::new(home_dir.path()).child(".config")
};
// Canonicalize the temp dir for consistent snapshot behavior
let canonical_temp_dir = temp_dir.canonicalize().unwrap();
let venv = ChildPath::new(canonical_temp_dir.join(".venv"));
let python_version = python_versions
.first()
.map(|version| PythonVersion::from_str(version).unwrap());
let site_packages = python_version
.as_ref()
.map(|version| site_packages_path(&venv, &format!("python{version}")));
// The workspace root directory is not available without walking up the tree
// https://github.com/rust-lang/cargo/issues/3946
let workspace_root = Path::new(&env::var(EnvVars::CARGO_MANIFEST_DIR).unwrap())
.parent()
.expect("CARGO_MANIFEST_DIR should be nested in workspace")
.parent()
.expect("CARGO_MANIFEST_DIR should be doubly nested in workspace")
.to_path_buf();
let download_list = ManagedPythonDownloadList::new_only_embedded().unwrap();
let python_versions: Vec<_> = python_versions
.iter()
.map(|version| PythonVersion::from_str(version).unwrap())
.zip(
python_installations_for_versions(&temp_dir, python_versions, &download_list)
.expect("Failed to find test Python versions"),
)
.collect();
// Construct directories for each Python executable on Unix where the executable names
// need to be normalized
if cfg!(unix) {
for (version, executable) in &python_versions {
let parent = python_dir.child(version.to_string());
parent.create_dir_all().unwrap();
parent.child("python3").symlink_to_file(executable).unwrap();
}
}
let mut filters = Vec::new();
filters.extend(
Self::path_patterns(get_bin())
.into_iter()
.map(|pattern| (pattern, "[UV]".to_string())),
);
// Exclude `link-mode` on Windows since we set it in the remote test suite
if cfg!(windows) {
filters.push((" --link-mode <LINK_MODE>".to_string(), String::new()));
filters.push((r#"link-mode = "copy"\n"#.to_string(), String::new()));
// Unix uses "exit status", Windows uses "exit code"
filters.push((r"exit code: ".to_string(), "exit status: ".to_string()));
}
for (version, executable) in &python_versions {
// Add filtering for the interpreter path
filters.extend(
Self::path_patterns(executable)
.into_iter()
.map(|pattern| (pattern, format!("[PYTHON-{version}]"))),
);
// And for the symlink we created in the test the Python path
filters.extend(
Self::path_patterns(python_dir.join(version.to_string()))
.into_iter()
.map(|pattern| {
(
format!("{pattern}[a-zA-Z0-9]*"),
format!("[PYTHON-{version}]"),
)
}),
);
// Add Python patch version filtering unless explicitly requested to ensure
// snapshots are patch version agnostic when it is not a part of the test.
if version.patch().is_none() {
filters.push((
format!(r"({})\.\d+", regex::escape(version.to_string().as_str())),
"$1.[X]".to_string(),
));
}
}
filters.extend(
Self::path_patterns(&bin_dir)
.into_iter()
.map(|pattern| (pattern, "[BIN]/".to_string())),
);
filters.extend(
Self::path_patterns(&cache_dir)
.into_iter()
.map(|pattern| (pattern, "[CACHE_DIR]/".to_string())),
);
if let Some(ref site_packages) = site_packages {
filters.extend(
Self::path_patterns(site_packages)
.into_iter()
.map(|pattern| (pattern, "[SITE_PACKAGES]/".to_string())),
);
}
filters.extend(
Self::path_patterns(&venv)
.into_iter()
.map(|pattern| (pattern, "[VENV]/".to_string())),
);
// Account for [`Simplified::user_display`] which is relative to the command working directory
if let Some(site_packages) = site_packages {
filters.push((
Self::path_pattern(
site_packages
.strip_prefix(&canonical_temp_dir)
.expect("The test site-packages directory is always in the tempdir"),
),
"[SITE_PACKAGES]/".to_string(),
));
}
// Filter Python library path differences between Windows and Unix
filters.push((
r"[\\/]lib[\\/]python\d+\.\d+[\\/]".to_string(),
"/[PYTHON-LIB]/".to_string(),
));
filters.push((r"[\\/]Lib[\\/]".to_string(), "/[PYTHON-LIB]/".to_string()));
filters.extend(
Self::path_patterns(&temp_dir)
.into_iter()
.map(|pattern| (pattern, "[TEMP_DIR]/".to_string())),
);
filters.extend(
Self::path_patterns(&python_dir)
.into_iter()
.map(|pattern| (pattern, "[PYTHON_DIR]/".to_string())),
);
let mut uv_user_config_dir = PathBuf::from(user_config_dir.path());
uv_user_config_dir.push("uv");
filters.extend(
Self::path_patterns(&uv_user_config_dir)
.into_iter()
.map(|pattern| (pattern, "[UV_USER_CONFIG_DIR]/".to_string())),
);
filters.extend(
Self::path_patterns(&user_config_dir)
.into_iter()
.map(|pattern| (pattern, "[USER_CONFIG_DIR]/".to_string())),
);
filters.extend(
Self::path_patterns(&home_dir)
.into_iter()
.map(|pattern| (pattern, "[HOME]/".to_string())),
);
filters.extend(
Self::path_patterns(&workspace_root)
.into_iter()
.map(|pattern| (pattern, "[WORKSPACE]/".to_string())),
);
// Make virtual environment activation cross-platform and shell-agnostic
filters.push((
r"Activate with: (.*)\\Scripts\\activate".to_string(),
"Activate with: source $1/[BIN]/activate".to_string(),
));
filters.push((
r"Activate with: Scripts\\activate".to_string(),
"Activate with: source [BIN]/activate".to_string(),
));
filters.push((
r"Activate with: source (.*/|)bin/activate(?:\.\w+)?".to_string(),
"Activate with: source $1[BIN]/activate".to_string(),
));
// Filter non-deterministic temporary directory names
// Note we apply this _after_ all the full paths to avoid breaking their matching
filters.push((r"(\\|\/)\.tmp.*(\\|\/)".to_string(), "/[TMP]/".to_string()));
// Account for platform prefix differences `file://` (Unix) vs `file:///` (Windows)
filters.push((r"file:///".to_string(), "file://".to_string()));
// Destroy any remaining UNC prefixes (Windows only)
filters.push((r"\\\\\?\\".to_string(), String::new()));
// Remove the version from the packse url in lockfile snapshots. This avoids having a huge
// diff any time we upgrade packse
filters.push((
format!("https://astral-sh.github.io/packse/{PACKSE_VERSION}"),
"https://astral-sh.github.io/packse/PACKSE_VERSION".to_string(),
));
// Developer convenience
if let Ok(packse_test_index) = env::var(EnvVars::UV_TEST_PACKSE_INDEX) {
filters.push((
packse_test_index.trim_end_matches('/').to_string(),
"https://astral-sh.github.io/packse/PACKSE_VERSION".to_string(),
));
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | true |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/lib.rs | crates/uv-distribution-filename/src/lib.rs | use std::fmt::{Display, Formatter};
use std::str::FromStr;
use uv_normalize::PackageName;
use uv_pep440::Version;
pub use build_tag::{BuildTag, BuildTagError};
pub use egg::{EggInfoFilename, EggInfoFilenameError};
pub use expanded_tags::{ExpandedTagError, ExpandedTags};
pub use extension::{DistExtension, ExtensionError, SourceDistExtension};
pub use source_dist::{SourceDistFilename, SourceDistFilenameError};
pub use wheel::{WheelFilename, WheelFilenameError};
mod build_tag;
mod egg;
mod expanded_tags;
mod extension;
mod source_dist;
mod splitter;
mod wheel;
mod wheel_tag;
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub enum DistFilename {
SourceDistFilename(SourceDistFilename),
WheelFilename(WheelFilename),
}
impl DistFilename {
/// Parse a filename as wheel or source dist name.
pub fn try_from_filename(filename: &str, package_name: &PackageName) -> Option<Self> {
match DistExtension::from_path(filename) {
Ok(DistExtension::Wheel) => {
if let Ok(filename) = WheelFilename::from_str(filename) {
return Some(Self::WheelFilename(filename));
}
}
Ok(DistExtension::Source(extension)) => {
if let Ok(filename) = SourceDistFilename::parse(filename, extension, package_name) {
return Some(Self::SourceDistFilename(filename));
}
}
Err(_) => {}
}
None
}
/// Like [`DistFilename::try_from_normalized_filename`], but without knowing the package name.
///
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
/// source dist filename version doesn't contain minus (the version is normalized).
pub fn try_from_normalized_filename(filename: &str) -> Option<Self> {
if let Ok(filename) = WheelFilename::from_str(filename) {
Some(Self::WheelFilename(filename))
} else if let Ok(filename) = SourceDistFilename::parsed_normalized_filename(filename) {
Some(Self::SourceDistFilename(filename))
} else {
None
}
}
pub fn name(&self) -> &PackageName {
match self {
Self::SourceDistFilename(filename) => &filename.name,
Self::WheelFilename(filename) => &filename.name,
}
}
pub fn version(&self) -> &Version {
match self {
Self::SourceDistFilename(filename) => &filename.version,
Self::WheelFilename(filename) => &filename.version,
}
}
pub fn into_version(self) -> Version {
match self {
Self::SourceDistFilename(filename) => filename.version,
Self::WheelFilename(filename) => filename.version,
}
}
/// Whether the file is a `bdist_wheel` or an `sdist`.
pub fn filetype(&self) -> &'static str {
match self {
Self::SourceDistFilename(_) => "sdist",
Self::WheelFilename(_) => "bdist_wheel",
}
}
}
impl Display for DistFilename {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::SourceDistFilename(filename) => Display::fmt(filename, f),
Self::WheelFilename(filename) => Display::fmt(filename, f),
}
}
}
#[cfg(test)]
mod tests {
use crate::WheelFilename;
#[test]
fn wheel_filename_size() {
assert_eq!(size_of::<WheelFilename>(), 48);
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/build_tag.rs | crates/uv-distribution-filename/src/build_tag.rs | use std::num::ParseIntError;
use std::str::FromStr;
use uv_small_str::SmallString;
#[derive(thiserror::Error, Debug)]
pub enum BuildTagError {
#[error("must not be empty")]
Empty,
#[error("must start with a digit")]
NoLeadingDigit,
#[error(transparent)]
ParseInt(#[from] ParseIntError),
}
/// The optional build tag for a wheel:
///
/// > Must start with a digit. Acts as a tie-breaker if two wheel file names are the same in all
/// > other respects (i.e. name, version, and other tags). Sort as an empty tuple if unspecified,
/// > else sort as a two-item tuple with the first item being the initial digits as an int, and the
/// > second item being the remainder of the tag as a str.
///
/// See: <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#file-name-convention>
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Hash,
Ord,
PartialOrd,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub struct BuildTag(u64, Option<SmallString>);
impl FromStr for BuildTag {
type Err = BuildTagError;
fn from_str(s: &str) -> Result<Self, Self::Err> {
// A build tag must not be empty.
if s.is_empty() {
return Err(BuildTagError::Empty);
}
// A build tag must start with a digit.
let (prefix, suffix) = match s.find(|c: char| !c.is_ascii_digit()) {
// Ex) `abc`
Some(0) => return Err(BuildTagError::NoLeadingDigit),
// Ex) `123abc`
Some(split) => {
let (prefix, suffix) = s.split_at(split);
(prefix, Some(suffix))
}
// Ex) `123`
None => (s, None),
};
Ok(Self(prefix.parse::<u64>()?, suffix.map(SmallString::from)))
}
}
impl std::fmt::Display for BuildTag {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match &self.1 {
Some(suffix) => write!(f, "{}{}", self.0, suffix),
None => write!(f, "{}", self.0),
}
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/expanded_tags.rs | crates/uv-distribution-filename/src/expanded_tags.rs | use std::str::FromStr;
use memchr::memchr;
use thiserror::Error;
use uv_platform_tags::{
AbiTag, LanguageTag, ParseAbiTagError, ParseLanguageTagError, ParsePlatformTagError,
PlatformTag, TagCompatibility, Tags,
};
use crate::splitter::MemchrSplitter;
use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall};
/// The expanded wheel tags as stored in a `WHEEL` file.
///
/// For example, if a wheel filename included `py2.py3-none-any`, the `WHEEL` file would include:
/// ```
/// Tag: py2-none-any
/// Tag: py3-none-any
/// ```
///
/// This type stores those expanded tags.
#[derive(Debug, Clone, Eq, PartialEq, Ord, PartialOrd)]
pub struct ExpandedTags(smallvec::SmallVec<[WheelTag; 1]>);
impl ExpandedTags {
/// Parse a list of expanded wheel tags (e.g., `py3-none-any`).
pub fn parse<'a>(tags: impl IntoIterator<Item = &'a str>) -> Result<Self, ExpandedTagError> {
let tags = tags
.into_iter()
.map(parse_expanded_tag)
.collect::<Result<_, _>>()?;
Ok(Self(tags))
}
/// Returns `true` if the wheel is compatible with the given tags.
pub fn is_compatible(&self, compatible_tags: &Tags) -> bool {
self.0.iter().any(|tag| {
compatible_tags.is_compatible(tag.python_tags(), tag.abi_tags(), tag.platform_tags())
})
}
/// Return the Python tags in this expanded tag set.
pub fn python_tags(&self) -> impl Iterator<Item = &LanguageTag> {
self.0.iter().flat_map(WheelTag::python_tags)
}
/// Return the ABI tags in this expanded tag set.
pub fn abi_tags(&self) -> impl Iterator<Item = &AbiTag> {
self.0.iter().flat_map(WheelTag::abi_tags)
}
/// Return the platform tags in this expanded tag set.
pub fn platform_tags(&self) -> impl Iterator<Item = &PlatformTag> {
self.0.iter().flat_map(WheelTag::platform_tags)
}
/// Return the [`TagCompatibility`] of the wheel with the given tags
pub fn compatibility(&self, compatible_tags: &Tags) -> TagCompatibility {
compatible_tags.compatibility(
self.python_tags().copied().collect::<Vec<_>>().as_slice(),
self.abi_tags().copied().collect::<Vec<_>>().as_slice(),
self.platform_tags().cloned().collect::<Vec<_>>().as_slice(),
)
}
}
#[derive(Error, Debug)]
pub enum ExpandedTagError {
#[error("The wheel tag \"{0}\" is missing a language tag")]
MissingLanguageTag(String),
#[error("The wheel tag \"{0}\" is missing an ABI tag")]
MissingAbiTag(String),
#[error("The wheel tag \"{0}\" is missing a platform tag")]
MissingPlatformTag(String),
#[error("The wheel tag \"{0}\" contains too many segments")]
ExtraSegment(String),
#[error("The wheel tag \"{0}\" contains an invalid language tag")]
InvalidLanguageTag(String, #[source] ParseLanguageTagError),
#[error("The wheel tag \"{0}\" contains an invalid ABI tag")]
InvalidAbiTag(String, #[source] ParseAbiTagError),
#[error("The wheel tag \"{0}\" contains an invalid platform tag")]
InvalidPlatformTag(String, #[source] ParsePlatformTagError),
}
/// Parse an expanded (i.e., simplified) wheel tag, e.g. `py3-none-any`.
///
/// Unlike parsing tags in a wheel filename, each tag in this case is expected to contain exactly
/// three segments separated by `-`: a language tag, an ABI tag, and a platform tag; however,
/// empirically, some build backends do emit multipart tags (like `cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64`),
/// so we allow those too.
fn parse_expanded_tag(tag: &str) -> Result<WheelTag, ExpandedTagError> {
let mut splitter = memchr::Memchr::new(b'-', tag.as_bytes());
if tag.is_empty() {
return Err(ExpandedTagError::MissingLanguageTag(tag.to_string()));
}
let Some(python_tag_index) = splitter.next() else {
return Err(ExpandedTagError::MissingAbiTag(tag.to_string()));
};
let Some(abi_tag_index) = splitter.next() else {
return Err(ExpandedTagError::MissingPlatformTag(tag.to_string()));
};
if splitter.next().is_some() {
return Err(ExpandedTagError::ExtraSegment(tag.to_string()));
}
let python_tag = &tag[..python_tag_index];
let abi_tag = &tag[python_tag_index + 1..abi_tag_index];
let platform_tag = &tag[abi_tag_index + 1..];
let is_small = memchr(b'.', tag.as_bytes()).is_none();
if let Some(small) = is_small
.then(|| {
Some(WheelTagSmall {
python_tag: LanguageTag::from_str(python_tag).ok()?,
abi_tag: AbiTag::from_str(abi_tag).ok()?,
platform_tag: PlatformTag::from_str(platform_tag).ok()?,
})
})
.flatten()
{
Ok(WheelTag::Small { small })
} else {
Ok(WheelTag::Large {
large: Box::new(WheelTagLarge {
build_tag: None,
python_tag: MemchrSplitter::split(python_tag, b'.')
.map(LanguageTag::from_str)
.filter_map(Result::ok)
.collect(),
abi_tag: MemchrSplitter::split(abi_tag, b'.')
.map(AbiTag::from_str)
.filter_map(Result::ok)
.collect(),
platform_tag: MemchrSplitter::split(platform_tag, b'.')
.map(PlatformTag::from_str)
.filter_map(Result::ok)
.collect(),
repr: tag.into(),
}),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_simple_expanded_tag() {
let tags = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r"
ExpandedTags(
[
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
],
)
");
}
#[test]
fn test_parse_multiple_expanded_tags() {
let tags = ExpandedTags::parse(vec![
"py2-none-any",
"py3-none-any",
"cp39-cp39-linux_x86_64",
])
.unwrap();
insta::assert_debug_snapshot!(tags, @r"
ExpandedTags(
[
Small {
small: WheelTagSmall {
python_tag: Python {
major: 2,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
},
Small {
small: WheelTagSmall {
python_tag: CPython {
python_version: (
3,
9,
),
},
abi_tag: CPython {
gil_disabled: false,
python_version: (
3,
9,
),
},
platform_tag: Linux {
arch: X86_64,
},
},
},
],
)
");
}
#[test]
fn test_parse_complex_platform_tag() {
let tags = ExpandedTags::parse(vec![
"cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64",
])
.unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
CPython {
python_version: (
3,
12,
),
},
],
abi_tag: [
CPython {
gil_disabled: false,
python_version: (
3,
12,
),
},
],
platform_tag: [
Manylinux {
major: 2,
minor: 17,
arch: X86_64,
},
Manylinux2014 {
arch: X86_64,
},
],
repr: "cp312-cp312-manylinux_2_17_x86_64.manylinux2014_x86_64",
},
},
],
)
"#);
}
#[test]
fn test_parse_unknown_expanded_tag() {
let tags = ExpandedTags::parse(vec!["py3-foo-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
Python {
major: 3,
minor: None,
},
],
abi_tag: [],
platform_tag: [
Any,
],
repr: "py3-foo-any",
},
},
],
)
"#);
}
#[test]
fn test_parse_expanded_tag_with_dots() {
let tags = ExpandedTags::parse(vec!["py2.py3-none-any"]).unwrap();
insta::assert_debug_snapshot!(tags, @r#"
ExpandedTags(
[
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
Python {
major: 2,
minor: None,
},
Python {
major: 3,
minor: None,
},
],
abi_tag: [
None,
],
platform_tag: [
Any,
],
repr: "py2.py3-none-any",
},
},
],
)
"#);
}
#[test]
fn test_error_missing_language_tag() {
let err = ExpandedTags::parse(vec![""]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingLanguageTag(
"",
)
"#);
}
#[test]
fn test_error_missing_abi_tag() {
let err = ExpandedTags::parse(vec!["py3"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingAbiTag(
"py3",
)
"#);
}
#[test]
fn test_error_missing_platform_tag() {
let err = ExpandedTags::parse(vec!["py3-none"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
MissingPlatformTag(
"py3-none",
)
"#);
}
#[test]
fn test_error_extra_segment() {
let err = ExpandedTags::parse(vec!["py3-none-any-extra"]).unwrap_err();
insta::assert_debug_snapshot!(err, @r#"
ExtraSegment(
"py3-none-any-extra",
)
"#);
}
#[test]
fn test_parse_expanded_tag_single_segment() {
let result = parse_expanded_tag("py3-none-any");
assert!(result.is_ok());
let tag = result.unwrap();
insta::assert_debug_snapshot!(tag, @r"
Small {
small: WheelTagSmall {
python_tag: Python {
major: 3,
minor: None,
},
abi_tag: None,
platform_tag: Any,
},
}
");
}
#[test]
fn test_parse_expanded_tag_multi_segment() {
let result = parse_expanded_tag("cp39.cp310-cp39.cp310-linux_x86_64.linux_i686");
assert!(result.is_ok());
let tag = result.unwrap();
insta::assert_debug_snapshot!(tag, @r#"
Large {
large: WheelTagLarge {
build_tag: None,
python_tag: [
CPython {
python_version: (
3,
9,
),
},
CPython {
python_version: (
3,
10,
),
},
],
abi_tag: [
CPython {
gil_disabled: false,
python_version: (
3,
9,
),
},
CPython {
gil_disabled: false,
python_version: (
3,
10,
),
},
],
platform_tag: [
Linux {
arch: X86_64,
},
Linux {
arch: X86,
},
],
repr: "cp39.cp310-cp39.cp310-linux_x86_64.linux_i686",
},
}
"#);
}
#[test]
fn test_parse_expanded_tag_empty() {
let result = parse_expanded_tag("");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingLanguageTag(
"",
)
"#);
}
#[test]
fn test_parse_expanded_tag_one_segment() {
let result = parse_expanded_tag("python");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingAbiTag(
"python",
)
"#);
}
#[test]
fn test_parse_expanded_tag_two_segments() {
let result = parse_expanded_tag("py3-none");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
MissingPlatformTag(
"py3-none",
)
"#);
}
#[test]
fn test_parse_expanded_tag_four_segments() {
let result = parse_expanded_tag("py3-none-any-extra");
assert!(result.is_err());
insta::assert_debug_snapshot!(result.unwrap_err(), @r#"
ExtraSegment(
"py3-none-any-extra",
)
"#);
}
#[test]
fn test_expanded_tags_ordering() {
let tags1 = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
let tags2 = ExpandedTags::parse(vec!["py3-none-any"]).unwrap();
let tags3 = ExpandedTags::parse(vec!["py2-none-any"]).unwrap();
assert_eq!(tags1, tags2);
assert_ne!(tags1, tags3);
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/source_dist.rs | crates/uv-distribution-filename/src/source_dist.rs | use std::fmt::{Display, Formatter};
use std::str::FromStr;
use crate::SourceDistExtension;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use uv_normalize::{InvalidNameError, PackageName};
use uv_pep440::{Version, VersionParseError};
/// Note that this is a normalized and not an exact representation, keep the original string if you
/// need the latter.
#[derive(
Clone,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Serialize,
Deserialize,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub struct SourceDistFilename {
pub name: PackageName,
pub version: Version,
pub extension: SourceDistExtension,
}
impl SourceDistFilename {
/// No `FromStr` impl since we need to know the package name to be able to reasonable parse
/// these (consider e.g. `a-1-1.zip`)
pub fn parse(
filename: &str,
extension: SourceDistExtension,
package_name: &PackageName,
) -> Result<Self, SourceDistFilenameError> {
// Drop the extension (e.g., given `tar.gz`, drop `.tar.gz`).
if filename.len() <= extension.name().len() + 1 {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Extension,
});
}
let stem = &filename[..(filename.len() - (extension.name().len() + 1))];
if stem.len() <= package_name.as_ref().len() + "-".len() {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Filename(package_name.clone()),
});
}
let actual_package_name = PackageName::from_str(&stem[..package_name.as_ref().len()])
.map_err(|err| SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::PackageName(err),
})?;
if actual_package_name != *package_name {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Filename(package_name.clone()),
});
}
// We checked the length above
let version =
Version::from_str(&stem[package_name.as_ref().len() + "-".len()..]).map_err(|err| {
SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Version(err),
}
})?;
Ok(Self {
name: package_name.clone(),
version,
extension,
})
}
/// Like [`SourceDistFilename::parse`], but without knowing the package name.
///
/// Source dist filenames can be ambiguous, e.g. `a-1-1.tar.gz`. Without knowing the package name, we assume that
/// source dist filename version doesn't contain minus (the version is normalized).
pub fn parsed_normalized_filename(filename: &str) -> Result<Self, SourceDistFilenameError> {
let Ok(extension) = SourceDistExtension::from_path(filename) else {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Extension,
});
};
// Drop the extension (e.g., given `tar.gz`, drop `.tar.gz`).
if filename.len() <= extension.name().len() + 1 {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Extension,
});
}
let stem = &filename[..(filename.len() - (extension.name().len() + 1))];
let Some((package_name, version)) = stem.rsplit_once('-') else {
return Err(SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Minus,
});
};
let package_name =
PackageName::from_str(package_name).map_err(|err| SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::PackageName(err),
})?;
// We checked the length above
let version = Version::from_str(version).map_err(|err| SourceDistFilenameError {
filename: filename.to_string(),
kind: SourceDistFilenameErrorKind::Version(err),
})?;
Ok(Self {
name: package_name,
version,
extension,
})
}
}
impl Display for SourceDistFilename {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}-{}.{}",
self.name.as_dist_info_name(),
self.version,
self.extension
)
}
}
#[derive(Error, Debug, Clone)]
pub struct SourceDistFilenameError {
filename: String,
kind: SourceDistFilenameErrorKind,
}
impl Display for SourceDistFilenameError {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failed to parse source distribution filename {}: {}",
self.filename, self.kind
)
}
}
#[derive(Error, Debug, Clone)]
enum SourceDistFilenameErrorKind {
#[error("Name doesn't start with package name {0}")]
Filename(PackageName),
#[error("File extension is invalid")]
Extension,
#[error("Version section is invalid")]
Version(#[from] VersionParseError),
#[error(transparent)]
PackageName(#[from] InvalidNameError),
#[error("Missing name-version separator")]
Minus,
}
#[cfg(test)]
mod tests {
use std::str::FromStr;
use uv_normalize::PackageName;
use crate::{SourceDistExtension, SourceDistFilename};
/// Only test already normalized names since the parsing is lossy
///
/// <https://packaging.python.org/en/latest/specifications/source-distribution-format/#source-distribution-file-name>
/// <https://packaging.python.org/en/latest/specifications/binary-distribution-format/#escaping-and-unicode>
#[test]
fn roundtrip() {
for normalized in [
"foo_lib-1.2.3.zip",
"foo_lib-1.2.3a3.zip",
"foo_lib-1.2.3.tar.gz",
"foo_lib-1.2.3.tar.bz2",
"foo_lib-1.2.3.tar.zst",
"foo_lib-1.2.3.tar.xz",
"foo_lib-1.2.3.tar.lz",
"foo_lib-1.2.3.tar.lzma",
"foo_lib-1.2.3.tgz",
"foo_lib-1.2.3.tbz",
"foo_lib-1.2.3.tlz",
"foo_lib-1.2.3.txz",
] {
let ext = SourceDistExtension::from_path(normalized).unwrap();
assert_eq!(
SourceDistFilename::parse(
normalized,
ext,
&PackageName::from_str("foo_lib").unwrap()
)
.unwrap()
.to_string(),
normalized
);
}
}
#[test]
fn errors() {
for invalid in ["b-1.2.3.zip", "a-1.2.3-gamma.3.zip"] {
let ext = SourceDistExtension::from_path(invalid).unwrap();
assert!(
SourceDistFilename::parse(invalid, ext, &PackageName::from_str("a").unwrap())
.is_err()
);
}
}
#[test]
fn name_too_long() {
assert!(
SourceDistFilename::parse(
"foo.zip",
SourceDistExtension::Zip,
&PackageName::from_str("foo-lib").unwrap()
)
.is_err()
);
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/extension.rs | crates/uv-distribution-filename/src/extension.rs | use std::fmt::{Display, Formatter};
use std::path::Path;
use serde::{Deserialize, Serialize};
use thiserror::Error;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum DistExtension {
Wheel,
Source(SourceDistExtension),
}
#[derive(
Clone,
Copy,
Debug,
PartialEq,
Eq,
PartialOrd,
Ord,
Hash,
Serialize,
Deserialize,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub enum SourceDistExtension {
Tar,
TarBz2,
TarGz,
TarLz,
TarLzma,
TarXz,
TarZst,
Tbz,
Tgz,
Tlz,
Txz,
Zip,
}
impl DistExtension {
/// Extract the [`DistExtension`] from a path.
pub fn from_path(path: impl AsRef<Path>) -> Result<Self, ExtensionError> {
let Some(extension) = path.as_ref().extension().and_then(|ext| ext.to_str()) else {
return Err(ExtensionError::Dist);
};
match extension {
"whl" => Ok(Self::Wheel),
_ => SourceDistExtension::from_path(path)
.map(Self::Source)
.map_err(|_| ExtensionError::Dist),
}
}
/// Return the name for the extension.
pub fn name(&self) -> &'static str {
match self {
Self::Wheel => "whl",
Self::Source(ext) => ext.name(),
}
}
}
impl SourceDistExtension {
/// Extract the [`SourceDistExtension`] from a path.
pub fn from_path(path: impl AsRef<Path>) -> Result<Self, ExtensionError> {
/// Returns true if the path is a tar file (e.g., `.tar.gz`).
fn is_tar(path: &Path) -> bool {
path.file_stem().is_some_and(|stem| {
Path::new(stem)
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("tar"))
})
}
let Some(extension) = path.as_ref().extension().and_then(|ext| ext.to_str()) else {
return Err(ExtensionError::SourceDist);
};
match extension {
"zip" => Ok(Self::Zip),
"tar" => Ok(Self::Tar),
"tgz" => Ok(Self::Tgz),
"tbz" => Ok(Self::Tbz),
"txz" => Ok(Self::Txz),
"tlz" => Ok(Self::Tlz),
"gz" if is_tar(path.as_ref()) => Ok(Self::TarGz),
"bz2" if is_tar(path.as_ref()) => Ok(Self::TarBz2),
"xz" if is_tar(path.as_ref()) => Ok(Self::TarXz),
"lz" if is_tar(path.as_ref()) => Ok(Self::TarLz),
"lzma" if is_tar(path.as_ref()) => Ok(Self::TarLzma),
"zst" if is_tar(path.as_ref()) => Ok(Self::TarZst),
_ => Err(ExtensionError::SourceDist),
}
}
/// Return the name for the extension.
pub fn name(&self) -> &'static str {
match self {
Self::Tar => "tar",
Self::TarBz2 => "tar.bz2",
Self::TarGz => "tar.gz",
Self::TarLz => "tar.lz",
Self::TarLzma => "tar.lzma",
Self::TarXz => "tar.xz",
Self::TarZst => "tar.zst",
Self::Tbz => "tbz",
Self::Tgz => "tgz",
Self::Tlz => "tlz",
Self::Txz => "txz",
Self::Zip => "zip",
}
}
}
impl Display for SourceDistExtension {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.write_str(self.name())
}
}
#[derive(Error, Debug)]
pub enum ExtensionError {
#[error(
"`.whl`, `.tar.gz`, `.zip`, `.tar.bz2`, `.tar.lz`, `.tar.lzma`, `.tar.xz`, `.tar.zst`, `.tar`, `.tbz`, `.tgz`, `.tlz`, or `.txz`"
)]
Dist,
#[error(
"`.tar.gz`, `.zip`, `.tar.bz2`, `.tar.lz`, `.tar.lzma`, `.tar.xz`, `.tar.zst`, `.tar`, `.tbz`, `.tgz`, `.tlz`, or `.txz`"
)]
SourceDist,
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/wheel.rs | crates/uv-distribution-filename/src/wheel.rs | use std::fmt::{Display, Formatter};
use std::hash::Hash;
use std::str::FromStr;
use memchr::memchr;
use serde::{Deserialize, Deserializer, Serialize, Serializer, de};
use thiserror::Error;
use uv_cache_key::cache_digest;
use uv_normalize::{InvalidNameError, PackageName};
use uv_pep440::{Version, VersionParseError};
use uv_platform_tags::{
AbiTag, LanguageTag, ParseAbiTagError, ParseLanguageTagError, ParsePlatformTagError,
PlatformTag, TagCompatibility, Tags,
};
use crate::splitter::MemchrSplitter;
use crate::wheel_tag::{WheelTag, WheelTagLarge, WheelTagSmall};
use crate::{BuildTag, BuildTagError};
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub struct WheelFilename {
pub name: PackageName,
pub version: Version,
tags: WheelTag,
}
impl FromStr for WheelFilename {
type Err = WheelFilenameError;
fn from_str(filename: &str) -> Result<Self, Self::Err> {
let stem = filename.strip_suffix(".whl").ok_or_else(|| {
WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must end with .whl".to_string(),
)
})?;
Self::parse(stem, filename)
}
}
impl Display for WheelFilename {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}-{}-{}.whl",
self.name.as_dist_info_name(),
self.version,
self.tags,
)
}
}
impl WheelFilename {
/// Create a [`WheelFilename`] from its components.
pub fn new(
name: PackageName,
version: Version,
python_tag: LanguageTag,
abi_tag: AbiTag,
platform_tag: PlatformTag,
) -> Self {
Self {
name,
version,
tags: WheelTag::Small {
small: WheelTagSmall {
python_tag,
abi_tag,
platform_tag,
},
},
}
}
/// Returns `true` if the wheel is compatible with the given tags.
pub fn is_compatible(&self, compatible_tags: &Tags) -> bool {
compatible_tags.is_compatible(self.python_tags(), self.abi_tags(), self.platform_tags())
}
/// Return the [`TagCompatibility`] of the wheel with the given tags
pub fn compatibility(&self, compatible_tags: &Tags) -> TagCompatibility {
compatible_tags.compatibility(self.python_tags(), self.abi_tags(), self.platform_tags())
}
/// The wheel filename without the extension.
pub fn stem(&self) -> String {
format!(
"{}-{}-{}",
self.name.as_dist_info_name(),
self.version,
self.tags
)
}
/// Returns a consistent cache key with a maximum length of 64 characters.
///
/// Prefers `{version}-{tags}` if such an identifier fits within the maximum allowed length;
/// otherwise, uses a truncated version of the version and a digest of the tags.
pub fn cache_key(&self) -> String {
const CACHE_KEY_MAX_LEN: usize = 64;
let full = format!("{}-{}", self.version, self.tags);
if full.len() <= CACHE_KEY_MAX_LEN {
return full;
}
// Create a digest of the tag string (instead of its individual fields) to retain
// compatibility across platforms, Rust versions, etc.
let digest = cache_digest(&format!("{}", self.tags));
// Truncate the version, but avoid trailing dots, plus signs, etc. to avoid ambiguity.
let version_width = CACHE_KEY_MAX_LEN - 1 /* dash */ - 16 /* digest */;
let mut version = self.version.to_string();
// PANIC SAFETY: version strings can only contain ASCII characters.
version.truncate(version_width);
let version = version.trim_end_matches(['.', '+']);
format!("{version}-{digest}")
}
/// Return the wheel's Python tags.
pub fn python_tags(&self) -> &[LanguageTag] {
self.tags.python_tags()
}
/// Return the wheel's ABI tags.
pub fn abi_tags(&self) -> &[AbiTag] {
self.tags.abi_tags()
}
/// Return the wheel's platform tags.
pub fn platform_tags(&self) -> &[PlatformTag] {
self.tags.platform_tags()
}
/// Return the wheel's build tag, if present.
pub fn build_tag(&self) -> Option<&BuildTag> {
self.tags.build_tag()
}
/// Parse a wheel filename from the stem (e.g., `foo-1.2.3-py3-none-any`).
pub fn from_stem(stem: &str) -> Result<Self, WheelFilenameError> {
// The wheel stem should not contain the `.whl` extension.
if std::path::Path::new(stem)
.extension()
.is_some_and(|ext| ext.eq_ignore_ascii_case("whl"))
{
return Err(WheelFilenameError::UnexpectedExtension(stem.to_string()));
}
Self::parse(stem, stem)
}
/// Parse a wheel filename from the stem (e.g., `foo-1.2.3-py3-none-any`).
///
/// The originating `filename` is used for high-fidelity error messages.
fn parse(stem: &str, filename: &str) -> Result<Self, WheelFilenameError> {
// The wheel filename should contain either five or six entries. If six, then the third
// entry is the build tag. If five, then the third entry is the Python tag.
// https://www.python.org/dev/peps/pep-0427/#file-name-convention
let mut splitter = memchr::Memchr::new(b'-', stem.as_bytes());
let Some(version) = splitter.next() else {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must have a version".to_string(),
));
};
let Some(build_tag_or_python_tag) = splitter.next() else {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must have a Python tag".to_string(),
));
};
let Some(python_tag_or_abi_tag) = splitter.next() else {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must have an ABI tag".to_string(),
));
};
let Some(abi_tag_or_platform_tag) = splitter.next() else {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must have a platform tag".to_string(),
));
};
let (name, version, build_tag, python_tag, abi_tag, platform_tag, is_small) =
if let Some(platform_tag) = splitter.next() {
if splitter.next().is_some() {
return Err(WheelFilenameError::InvalidWheelFileName(
filename.to_string(),
"Must have 5 or 6 components, but has more".to_string(),
));
}
(
&stem[..version],
&stem[version + 1..build_tag_or_python_tag],
Some(&stem[build_tag_or_python_tag + 1..python_tag_or_abi_tag]),
&stem[python_tag_or_abi_tag + 1..abi_tag_or_platform_tag],
&stem[abi_tag_or_platform_tag + 1..platform_tag],
&stem[platform_tag + 1..],
// Always take the slow path if a build tag is present.
false,
)
} else {
(
&stem[..version],
&stem[version + 1..build_tag_or_python_tag],
None,
&stem[build_tag_or_python_tag + 1..python_tag_or_abi_tag],
&stem[python_tag_or_abi_tag + 1..abi_tag_or_platform_tag],
&stem[abi_tag_or_platform_tag + 1..],
// Determine whether any of the tag types contain a period, which would indicate
// that at least one of the tag types includes multiple tags (which in turn
// necessitates taking the slow path).
memchr(b'.', &stem.as_bytes()[build_tag_or_python_tag..]).is_none(),
)
};
let name = PackageName::from_str(name)
.map_err(|err| WheelFilenameError::InvalidPackageName(filename.to_string(), err))?;
let version = Version::from_str(version)
.map_err(|err| WheelFilenameError::InvalidVersion(filename.to_string(), err))?;
let build_tag = build_tag
.map(|build_tag| {
BuildTag::from_str(build_tag)
.map_err(|err| WheelFilenameError::InvalidBuildTag(filename.to_string(), err))
})
.transpose()?;
let tags = if let Some(small) = is_small
.then(|| {
Some(WheelTagSmall {
python_tag: LanguageTag::from_str(python_tag).ok()?,
abi_tag: AbiTag::from_str(abi_tag).ok()?,
platform_tag: PlatformTag::from_str(platform_tag).ok()?,
})
})
.flatten()
{
WheelTag::Small { small }
} else {
// Store the plaintext representation of the tags.
let repr = &stem[build_tag_or_python_tag + 1..];
WheelTag::Large {
large: Box::new(WheelTagLarge {
build_tag,
python_tag: MemchrSplitter::split(python_tag, b'.')
.map(LanguageTag::from_str)
.filter_map(Result::ok)
.collect(),
abi_tag: MemchrSplitter::split(abi_tag, b'.')
.map(AbiTag::from_str)
.filter_map(Result::ok)
.collect(),
platform_tag: MemchrSplitter::split(platform_tag, b'.')
.map(PlatformTag::from_str)
.filter_map(Result::ok)
.collect(),
repr: repr.into(),
}),
}
};
Ok(Self {
name,
version,
tags,
})
}
}
impl<'de> Deserialize<'de> for WheelFilename {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
struct Visitor;
impl de::Visitor<'_> for Visitor {
type Value = WheelFilename;
fn expecting(&self, f: &mut Formatter) -> std::fmt::Result {
f.write_str("a string")
}
fn visit_str<E: de::Error>(self, v: &str) -> Result<Self::Value, E> {
WheelFilename::from_str(v).map_err(de::Error::custom)
}
}
deserializer.deserialize_str(Visitor)
}
}
impl Serialize for WheelFilename {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
#[derive(Error, Debug)]
pub enum WheelFilenameError {
#[error("The wheel filename \"{0}\" is invalid: {1}")]
InvalidWheelFileName(String, String),
#[error("The wheel filename \"{0}\" has an invalid version: {1}")]
InvalidVersion(String, VersionParseError),
#[error("The wheel filename \"{0}\" has an invalid package name")]
InvalidPackageName(String, InvalidNameError),
#[error("The wheel filename \"{0}\" has an invalid build tag: {1}")]
InvalidBuildTag(String, BuildTagError),
#[error("The wheel filename \"{0}\" has an invalid language tag: {1}")]
InvalidLanguageTag(String, ParseLanguageTagError),
#[error("The wheel filename \"{0}\" has an invalid ABI tag: {1}")]
InvalidAbiTag(String, ParseAbiTagError),
#[error("The wheel filename \"{0}\" has an invalid platform tag: {1}")]
InvalidPlatformTag(String, ParsePlatformTagError),
#[error("The wheel filename \"{0}\" is missing a language tag")]
MissingLanguageTag(String),
#[error("The wheel filename \"{0}\" is missing an ABI tag")]
MissingAbiTag(String),
#[error("The wheel filename \"{0}\" is missing a platform tag")]
MissingPlatformTag(String),
#[error("The wheel stem \"{0}\" has an unexpected extension")]
UnexpectedExtension(String),
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn err_not_whl_extension() {
let err = WheelFilename::from_str("foo.rs").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo.rs" is invalid: Must end with .whl"###);
}
#[test]
fn err_1_part_empty() {
let err = WheelFilename::from_str(".whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename ".whl" is invalid: Must have a version"###);
}
#[test]
fn err_1_part_no_version() {
let err = WheelFilename::from_str("foo.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo.whl" is invalid: Must have a version"###);
}
#[test]
fn err_2_part_no_pythontag() {
let err = WheelFilename::from_str("foo-1.2.3.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3.whl" is invalid: Must have a Python tag"###);
}
#[test]
fn err_3_part_no_abitag() {
let err = WheelFilename::from_str("foo-1.2.3-py3.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-py3.whl" is invalid: Must have an ABI tag"###);
}
#[test]
fn err_4_part_no_platformtag() {
let err = WheelFilename::from_str("foo-1.2.3-py3-none.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-py3-none.whl" is invalid: Must have a platform tag"###);
}
#[test]
fn err_too_many_parts() {
let err =
WheelFilename::from_str("foo-1.2.3-202206090410-py3-none-any-whoops.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-202206090410-py3-none-any-whoops.whl" is invalid: Must have 5 or 6 components, but has more"###);
}
#[test]
fn err_invalid_package_name() {
let err = WheelFilename::from_str("f!oo-1.2.3-py3-none-any.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "f!oo-1.2.3-py3-none-any.whl" has an invalid package name"###);
}
#[test]
fn err_invalid_version() {
let err = WheelFilename::from_str("foo-x.y.z-py3-none-any.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-x.y.z-py3-none-any.whl" has an invalid version: expected version to start with a number, but no leading ASCII digits were found"###);
}
#[test]
fn err_invalid_build_tag() {
let err = WheelFilename::from_str("foo-1.2.3-tag-py3-none-any.whl").unwrap_err();
insta::assert_snapshot!(err, @r###"The wheel filename "foo-1.2.3-tag-py3-none-any.whl" has an invalid build tag: must start with a digit"###);
}
#[test]
fn ok_single_tags() {
insta::assert_debug_snapshot!(WheelFilename::from_str("foo-1.2.3-py3-none-any.whl"));
}
#[test]
fn ok_multiple_tags() {
insta::assert_debug_snapshot!(WheelFilename::from_str(
"foo-1.2.3-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
));
}
#[test]
fn ok_build_tag() {
insta::assert_debug_snapshot!(WheelFilename::from_str(
"foo-1.2.3-202206090410-py3-none-any.whl"
));
}
#[test]
fn from_and_to_string() {
let wheel_names = &[
"django_allauth-0.51.0-py3-none-any.whl",
"osm2geojson-0.2.4-py3-none-any.whl",
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
];
for wheel_name in wheel_names {
assert_eq!(
WheelFilename::from_str(wheel_name).unwrap().to_string(),
*wheel_name
);
}
}
#[test]
fn cache_key() {
// Short names should use `version-tags` format.
let filename = WheelFilename::from_str("django_allauth-0.51.0-py3-none-any.whl").unwrap();
insta::assert_snapshot!(filename.cache_key(), @"0.51.0-py3-none-any");
// Common `manylinux` names should use still use the `version-tags` format.
let filename = WheelFilename::from_str(
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl",
)
.unwrap();
insta::assert_snapshot!(filename.cache_key(), @"1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64");
// But larger names should use the `truncated(version)-digest(tags)` format.
let filename = WheelFilename::from_str(
"numpy-1.26.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.musllinux_1_2.whl",
)
.unwrap();
insta::assert_snapshot!(filename.cache_key(), @"1.26.2-5a2adc379b2dc214");
// Larger versions should get truncated.
let filename = WheelFilename::from_str(
"example-1.2.3.4.5.6.7.8.9.0.1.2.3.4.5.6.7.8.9.0.1.2.1.2.3.4.5.6.7.8.9.0.1.1.2-cp311-cp311-manylinux_2_17_x86_64.manylinux2014_x86_64.whl"
).unwrap();
insta::assert_snapshot!(filename.cache_key(), @"1.2.3.4.5.6.7.8.9.0.1.2.3.4.5.6.7.8.9.0.1.2.1.2-80bf8598e9647cf7");
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/egg.rs | crates/uv-distribution-filename/src/egg.rs | use std::str::FromStr;
use thiserror::Error;
use uv_normalize::{InvalidNameError, PackageName};
use uv_pep440::{Version, VersionParseError};
#[derive(Error, Debug)]
pub enum EggInfoFilenameError {
#[error("The filename \"{0}\" does not end in `.egg-info`")]
InvalidExtension(String),
#[error("The `.egg-info` filename \"{0}\" is missing a package name")]
MissingPackageName(String),
#[error("The `.egg-info` filename \"{0}\" has an invalid package name")]
InvalidPackageName(String, InvalidNameError),
#[error("The `.egg-info` filename \"{0}\" has an invalid version: {1}")]
InvalidVersion(String, VersionParseError),
}
/// A filename parsed from an `.egg-info` file or directory (e.g., `zstandard-0.22.0-py3.12.egg-info`).
///
/// An `.egg-info` filename can contain up to four components, as in:
///
/// ```text
/// name ["-" version ["-py" pyver ["-" required_platform]]] "." ext
/// ```
///
/// See: <https://setuptools.pypa.io/en/latest/deprecated/python_eggs.html#filename-embedded-metadata>
#[derive(Debug, Clone)]
pub struct EggInfoFilename {
pub name: PackageName,
pub version: Option<Version>,
}
impl EggInfoFilename {
/// Parse an `.egg-info` filename, requiring at least a name.
pub fn parse(stem: &str) -> Result<Self, EggInfoFilenameError> {
// pip uses the following regex:
// ```python
// EGG_NAME = re.compile(
// r"""
// (?P<name>[^-]+) (
// -(?P<ver>[^-]+) (
// -py(?P<pyver>[^-]+) (
// -(?P<plat>.+)
// )?
// )?
// )?
// """,
// re.VERBOSE | re.IGNORECASE,
// ).match
// ```
let mut parts = stem.split('-');
let name = parts
.next()
.ok_or_else(|| EggInfoFilenameError::MissingPackageName(format!("{stem}.egg-info")))?;
let name = PackageName::from_str(name)
.map_err(|e| EggInfoFilenameError::InvalidPackageName(format!("{stem}.egg-info"), e))?;
let version = parts
.next()
.map(|s| {
Version::from_str(s).map_err(|e| {
EggInfoFilenameError::InvalidVersion(format!("{stem}.egg-info"), e)
})
})
.transpose()?;
Ok(Self { name, version })
}
}
impl FromStr for EggInfoFilename {
type Err = EggInfoFilenameError;
fn from_str(filename: &str) -> Result<Self, Self::Err> {
let stem = filename
.strip_suffix(".egg-info")
.ok_or_else(|| EggInfoFilenameError::InvalidExtension(filename.to_string()))?;
Self::parse(stem)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn egg_info_filename() {
let filename = "zstandard-0.22.0-py3.12-darwin.egg-info";
let parsed = EggInfoFilename::from_str(filename).unwrap();
assert_eq!(parsed.name.as_ref(), "zstandard");
assert_eq!(
parsed.version.map(|v| v.to_string()),
Some("0.22.0".to_string())
);
let filename = "zstandard-0.22.0-py3.12.egg-info";
let parsed = EggInfoFilename::from_str(filename).unwrap();
assert_eq!(parsed.name.as_ref(), "zstandard");
assert_eq!(
parsed.version.map(|v| v.to_string()),
Some("0.22.0".to_string())
);
let filename = "zstandard-0.22.0.egg-info";
let parsed = EggInfoFilename::from_str(filename).unwrap();
assert_eq!(parsed.name.as_ref(), "zstandard");
assert_eq!(
parsed.version.map(|v| v.to_string()),
Some("0.22.0".to_string())
);
let filename = "zstandard.egg-info";
let parsed = EggInfoFilename::from_str(filename).unwrap();
assert_eq!(parsed.name.as_ref(), "zstandard");
assert!(parsed.version.is_none());
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/wheel_tag.rs | crates/uv-distribution-filename/src/wheel_tag.rs | use std::fmt::{Display, Formatter};
use crate::BuildTag;
use uv_platform_tags::{AbiTag, LanguageTag, PlatformTag};
use uv_small_str::SmallString;
/// A [`SmallVec`] type for storing tags.
///
/// Wheels tend to include a single language, ABI, and platform tag, so we use a [`SmallVec`] with a
/// capacity of 1 to optimize for this common case.
pub(crate) type TagSet<T> = smallvec::SmallVec<[T; 3]>;
/// The portion of the wheel filename following the name and version: the optional build tag, along
/// with the Python tag(s), ABI tag(s), and platform tag(s).
///
/// Most wheels consist of a single Python, ABI, and platform tag (and no build tag). We represent
/// such wheels with [`WheelTagSmall`], a variant with a smaller memory footprint and (generally)
/// zero allocations. The [`WheelTagLarge`] variant is used for wheels with multiple tags, a build
/// tag, or an unsupported tag (i.e., a tag that can't be represented by [`LanguageTag`],
/// [`AbiTag`], or [`PlatformTag`]). (Unsupported tags are filtered out, but retained in the display
/// representation of [`WheelTagLarge`].)
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
pub(crate) enum WheelTag {
Small { small: WheelTagSmall },
Large { large: Box<WheelTagLarge> },
}
impl WheelTag {
/// Return the Python tags.
pub(crate) fn python_tags(&self) -> &[LanguageTag] {
match self {
Self::Small { small } => std::slice::from_ref(&small.python_tag),
Self::Large { large } => large.python_tag.as_slice(),
}
}
/// Return the ABI tags.
pub(crate) fn abi_tags(&self) -> &[AbiTag] {
match self {
Self::Small { small } => std::slice::from_ref(&small.abi_tag),
Self::Large { large } => large.abi_tag.as_slice(),
}
}
/// Return the platform tags.
pub(crate) fn platform_tags(&self) -> &[PlatformTag] {
match self {
Self::Small { small } => std::slice::from_ref(&small.platform_tag),
Self::Large { large } => large.platform_tag.as_slice(),
}
}
/// Return the build tag, if present.
pub(crate) fn build_tag(&self) -> Option<&BuildTag> {
match self {
Self::Small { .. } => None,
Self::Large { large } => large.build_tag.as_ref(),
}
}
}
impl Display for WheelTag {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
Self::Small { small } => write!(f, "{small}"),
Self::Large { large } => write!(f, "{large}"),
}
}
}
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
#[allow(clippy::struct_field_names)]
pub(crate) struct WheelTagSmall {
/// The Python tag, e.g., `py3` in `1.2.3-py3-none-any`.
pub(crate) python_tag: LanguageTag,
/// The ABI tag, e.g., `none` in `1.2.3-py3-none-any`.
pub(crate) abi_tag: AbiTag,
/// The platform tag, e.g., `none` in `1.2.3-py3-none-any`.
pub(crate) platform_tag: PlatformTag,
}
impl Display for WheelTagSmall {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"{}-{}-{}",
self.python_tag, self.abi_tag, self.platform_tag
)
}
}
#[derive(
Debug,
Clone,
Eq,
PartialEq,
Ord,
PartialOrd,
Hash,
rkyv::Archive,
rkyv::Deserialize,
rkyv::Serialize,
)]
#[rkyv(derive(Debug))]
#[allow(clippy::struct_field_names)]
pub(crate) struct WheelTagLarge {
/// The optional build tag, e.g., `73` in `1.2.3-73-py3-none-any`.
pub(crate) build_tag: Option<BuildTag>,
/// The Python tag(s), e.g., `py3` in `1.2.3-73-py3-none-any`.
pub(crate) python_tag: TagSet<LanguageTag>,
/// The ABI tag(s), e.g., `none` in `1.2.3-73-py3-none-any`.
pub(crate) abi_tag: TagSet<AbiTag>,
/// The platform tag(s), e.g., `none` in `1.2.3-73-py3-none-any`.
pub(crate) platform_tag: TagSet<PlatformTag>,
/// The string representation of the tag.
///
/// Preserves any unsupported tags that were filtered out when parsing the wheel filename.
pub(crate) repr: SmallString,
}
impl Display for WheelTagLarge {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.repr)
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
astral-sh/uv | https://github.com/astral-sh/uv/blob/2318e48e819080f37a002551035c2b1880a81a70/crates/uv-distribution-filename/src/splitter.rs | crates/uv-distribution-filename/src/splitter.rs | /// A simple splitter that uses `memchr` to find the next delimiter.
pub(crate) struct MemchrSplitter<'a> {
memchr: memchr::Memchr<'a>,
haystack: &'a str,
offset: usize,
}
impl<'a> MemchrSplitter<'a> {
#[inline]
pub(crate) fn split(haystack: &'a str, delimiter: u8) -> Self {
Self {
memchr: memchr::Memchr::new(delimiter, haystack.as_bytes()),
haystack,
offset: 0,
}
}
}
impl<'a> Iterator for MemchrSplitter<'a> {
type Item = &'a str;
#[inline(always)]
#[allow(clippy::inline_always)]
fn next(&mut self) -> Option<Self::Item> {
match self.memchr.next() {
Some(index) => {
let start = self.offset;
self.offset = index + 1;
Some(&self.haystack[start..index])
}
None if self.offset < self.haystack.len() => {
let start = self.offset;
self.offset = self.haystack.len();
Some(&self.haystack[start..])
}
None => None,
}
}
#[inline]
fn size_hint(&self) -> (usize, Option<usize>) {
// We know we'll return at least one item if there's remaining text.
let min = usize::from(self.offset < self.haystack.len());
// Maximum possible splits is remaining length divided by 2 (minimum one char between delimiters).
let max = (self.haystack.len() - self.offset).div_ceil(2) + min;
(min, Some(max))
}
}
| rust | Apache-2.0 | 2318e48e819080f37a002551035c2b1880a81a70 | 2026-01-04T15:31:58.679374Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/parse_grammar.rs | crates/generate/src/parse_grammar.rs | use std::collections::HashSet;
use log::warn;
use regex::Regex;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use thiserror::Error;
use crate::{
grammars::{InputGrammar, PrecedenceEntry, ReservedWordContext, Variable, VariableType},
rules::{Precedence, Rule},
};
#[derive(Deserialize)]
#[serde(tag = "type")]
#[allow(non_camel_case_types)]
#[allow(clippy::upper_case_acronyms)]
enum RuleJSON {
ALIAS {
content: Box<Self>,
named: bool,
value: String,
},
BLANK,
STRING {
value: String,
},
PATTERN {
value: String,
flags: Option<String>,
},
SYMBOL {
name: String,
},
CHOICE {
members: Vec<Self>,
},
FIELD {
name: String,
content: Box<Self>,
},
SEQ {
members: Vec<Self>,
},
REPEAT {
content: Box<Self>,
},
REPEAT1 {
content: Box<Self>,
},
PREC_DYNAMIC {
value: i32,
content: Box<Self>,
},
PREC_LEFT {
value: PrecedenceValueJSON,
content: Box<Self>,
},
PREC_RIGHT {
value: PrecedenceValueJSON,
content: Box<Self>,
},
PREC {
value: PrecedenceValueJSON,
content: Box<Self>,
},
TOKEN {
content: Box<Self>,
},
IMMEDIATE_TOKEN {
content: Box<Self>,
},
RESERVED {
context_name: String,
content: Box<Self>,
},
}
#[derive(Deserialize)]
#[serde(untagged)]
enum PrecedenceValueJSON {
Integer(i32),
Name(String),
}
#[derive(Deserialize)]
pub struct GrammarJSON {
pub name: String,
rules: Map<String, Value>,
#[serde(default)]
precedences: Vec<Vec<RuleJSON>>,
#[serde(default)]
conflicts: Vec<Vec<String>>,
#[serde(default)]
externals: Vec<RuleJSON>,
#[serde(default)]
extras: Vec<RuleJSON>,
#[serde(default)]
inline: Vec<String>,
#[serde(default)]
supertypes: Vec<String>,
#[serde(default)]
word: Option<String>,
#[serde(default)]
reserved: Map<String, Value>,
}
pub type ParseGrammarResult<T> = Result<T, ParseGrammarError>;
#[derive(Debug, Error, Serialize)]
pub enum ParseGrammarError {
#[error("{0}")]
Serialization(String),
#[error("Rules in the `extras` array must not contain empty strings")]
InvalidExtra,
#[error("Invalid rule in precedences array. Only strings and symbols are allowed")]
Unexpected,
#[error("Reserved word sets must be arrays")]
InvalidReservedWordSet,
#[error("Grammar Error: Unexpected rule `{0}` in `token()` call")]
UnexpectedRule(String),
}
impl From<serde_json::Error> for ParseGrammarError {
fn from(value: serde_json::Error) -> Self {
Self::Serialization(value.to_string())
}
}
/// Check if a rule is referenced by another rule.
///
/// This function is used to determine if a variable is used in a given rule,
/// and `is_other` indicates if the rule is an external, and if it is,
/// to not assume that a named symbol that is equal to itself means it's being referenced.
///
/// For example, if we have an external rule **and** a normal rule both called `foo`,
/// `foo` should not be thought of as directly used unless it's used within another rule.
fn rule_is_referenced(rule: &Rule, target: &str, is_external: bool) -> bool {
match rule {
Rule::NamedSymbol(name) => name == target && !is_external,
Rule::Choice(rules) | Rule::Seq(rules) => {
rules.iter().any(|r| rule_is_referenced(r, target, false))
}
Rule::Metadata { rule, .. } | Rule::Reserved { rule, .. } => {
rule_is_referenced(rule, target, is_external)
}
Rule::Repeat(inner) => rule_is_referenced(inner, target, false),
Rule::Blank | Rule::String(_) | Rule::Pattern(_, _) | Rule::Symbol(_) => false,
}
}
fn variable_is_used(
grammar_rules: &[(String, Rule)],
extras: &[Rule],
externals: &[Rule],
target_name: &str,
in_progress: &mut HashSet<String>,
) -> bool {
let root = &grammar_rules.first().unwrap().0;
if target_name == root {
return true;
}
if extras
.iter()
.any(|rule| rule_is_referenced(rule, target_name, false))
{
return true;
}
if externals
.iter()
.any(|rule| rule_is_referenced(rule, target_name, true))
{
return true;
}
in_progress.insert(target_name.to_string());
let result = grammar_rules
.iter()
.filter(|(key, _)| *key != target_name)
.any(|(name, rule)| {
if !rule_is_referenced(rule, target_name, false) || in_progress.contains(name) {
return false;
}
variable_is_used(grammar_rules, extras, externals, name, in_progress)
});
in_progress.remove(target_name);
result
}
pub(crate) fn parse_grammar(input: &str) -> ParseGrammarResult<InputGrammar> {
let mut grammar_json = serde_json::from_str::<GrammarJSON>(input)?;
let mut extra_symbols =
grammar_json
.extras
.into_iter()
.try_fold(Vec::<Rule>::new(), |mut acc, item| {
let rule = parse_rule(item, false)?;
if let Rule::String(ref value) = rule {
if value.is_empty() {
Err(ParseGrammarError::InvalidExtra)?;
}
}
acc.push(rule);
ParseGrammarResult::Ok(acc)
})?;
let mut external_tokens = grammar_json
.externals
.into_iter()
.map(|e| parse_rule(e, false))
.collect::<ParseGrammarResult<Vec<_>>>()?;
let mut precedence_orderings = Vec::with_capacity(grammar_json.precedences.len());
for list in grammar_json.precedences {
let mut ordering = Vec::with_capacity(list.len());
for entry in list {
ordering.push(match entry {
RuleJSON::STRING { value } => PrecedenceEntry::Name(value),
RuleJSON::SYMBOL { name } => PrecedenceEntry::Symbol(name),
_ => Err(ParseGrammarError::Unexpected)?,
});
}
precedence_orderings.push(ordering);
}
let mut variables = Vec::with_capacity(grammar_json.rules.len());
let rules = grammar_json
.rules
.into_iter()
.map(|(n, r)| Ok((n, parse_rule(serde_json::from_value(r)?, false)?)))
.collect::<ParseGrammarResult<Vec<_>>>()?;
let mut in_progress = HashSet::new();
for (name, rule) in &rules {
if grammar_json.word.as_ref().is_none_or(|w| w != name)
&& !variable_is_used(
&rules,
&extra_symbols,
&external_tokens,
name,
&mut in_progress,
)
{
grammar_json.conflicts.retain(|r| !r.contains(name));
grammar_json.supertypes.retain(|r| r != name);
grammar_json.inline.retain(|r| r != name);
extra_symbols.retain(|r| !rule_is_referenced(r, name, true));
external_tokens.retain(|r| !rule_is_referenced(r, name, true));
precedence_orderings.retain(|r| {
!r.iter().any(|e| {
let PrecedenceEntry::Symbol(s) = e else {
return false;
};
s == name
})
});
continue;
}
if extra_symbols
.iter()
.any(|r| rule_is_referenced(r, name, false))
{
let inner_rule = if let Rule::Metadata { rule, .. } = rule {
rule
} else {
rule
};
let matches_empty = match inner_rule {
Rule::String(rule_str) => rule_str.is_empty(),
Rule::Pattern(ref value, _) => Regex::new(value)
.map(|reg| reg.is_match(""))
.unwrap_or(false),
_ => false,
};
if matches_empty {
warn!(
concat!(
"Named extra rule `{}` matches the empty string. ",
"Inline this to avoid infinite loops while parsing."
),
name
);
}
}
variables.push(Variable {
name: name.clone(),
kind: VariableType::Named,
rule: rule.clone(),
});
}
let reserved_words = grammar_json
.reserved
.into_iter()
.map(|(name, rule_values)| {
let Value::Array(rule_values) = rule_values else {
Err(ParseGrammarError::InvalidReservedWordSet)?
};
let mut reserved_words = Vec::with_capacity(rule_values.len());
for value in rule_values {
reserved_words.push(parse_rule(serde_json::from_value(value)?, false)?);
}
Ok(ReservedWordContext {
name,
reserved_words,
})
})
.collect::<ParseGrammarResult<Vec<_>>>()?;
Ok(InputGrammar {
name: grammar_json.name,
word_token: grammar_json.word,
expected_conflicts: grammar_json.conflicts,
supertype_symbols: grammar_json.supertypes,
variables_to_inline: grammar_json.inline,
precedence_orderings,
variables,
extra_symbols,
external_tokens,
reserved_words,
})
}
fn parse_rule(json: RuleJSON, is_token: bool) -> ParseGrammarResult<Rule> {
match json {
RuleJSON::ALIAS {
content,
value,
named,
} => parse_rule(*content, is_token).map(|r| Rule::alias(r, value, named)),
RuleJSON::BLANK => Ok(Rule::Blank),
RuleJSON::STRING { value } => Ok(Rule::String(value)),
RuleJSON::PATTERN { value, flags } => Ok(Rule::Pattern(
value,
flags.map_or(String::new(), |f| {
f.matches(|c| {
if c == 'i' {
true
} else {
// silently ignore unicode flags
if c != 'u' && c != 'v' {
warn!("unsupported flag {c}");
}
false
}
})
.collect()
}),
)),
RuleJSON::SYMBOL { name } => {
if is_token {
Err(ParseGrammarError::UnexpectedRule(name))?
} else {
Ok(Rule::NamedSymbol(name))
}
}
RuleJSON::CHOICE { members } => members
.into_iter()
.map(|m| parse_rule(m, is_token))
.collect::<ParseGrammarResult<Vec<_>>>()
.map(Rule::choice),
RuleJSON::FIELD { content, name } => {
parse_rule(*content, is_token).map(|r| Rule::field(name, r))
}
RuleJSON::SEQ { members } => members
.into_iter()
.map(|m| parse_rule(m, is_token))
.collect::<ParseGrammarResult<Vec<_>>>()
.map(Rule::seq),
RuleJSON::REPEAT1 { content } => parse_rule(*content, is_token).map(Rule::repeat),
RuleJSON::REPEAT { content } => {
parse_rule(*content, is_token).map(|m| Rule::choice(vec![Rule::repeat(m), Rule::Blank]))
}
RuleJSON::PREC { value, content } => {
parse_rule(*content, is_token).map(|r| Rule::prec(value.into(), r))
}
RuleJSON::PREC_LEFT { value, content } => {
parse_rule(*content, is_token).map(|r| Rule::prec_left(value.into(), r))
}
RuleJSON::PREC_RIGHT { value, content } => {
parse_rule(*content, is_token).map(|r| Rule::prec_right(value.into(), r))
}
RuleJSON::PREC_DYNAMIC { value, content } => {
parse_rule(*content, is_token).map(|r| Rule::prec_dynamic(value, r))
}
RuleJSON::RESERVED {
content,
context_name,
} => parse_rule(*content, is_token).map(|r| Rule::Reserved {
rule: Box::new(r),
context_name,
}),
RuleJSON::TOKEN { content } => parse_rule(*content, true).map(Rule::token),
RuleJSON::IMMEDIATE_TOKEN { content } => {
parse_rule(*content, is_token).map(Rule::immediate_token)
}
}
}
impl From<PrecedenceValueJSON> for Precedence {
fn from(val: PrecedenceValueJSON) -> Self {
match val {
PrecedenceValueJSON::Integer(i) => Self::Integer(i),
PrecedenceValueJSON::Name(i) => Self::Name(i),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_parse_grammar() {
let grammar = parse_grammar(
r#"{
"name": "my_lang",
"rules": {
"file": {
"type": "REPEAT1",
"content": {
"type": "SYMBOL",
"name": "statement"
}
},
"statement": {
"type": "STRING",
"value": "foo"
}
}
}"#,
)
.unwrap();
assert_eq!(grammar.name, "my_lang");
assert_eq!(
grammar.variables,
vec![
Variable {
name: "file".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::NamedSymbol("statement".to_string()))
},
Variable {
name: "statement".to_string(),
kind: VariableType::Named,
rule: Rule::String("foo".to_string())
},
]
);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables.rs | crates/generate/src/build_tables.rs | mod build_lex_table;
mod build_parse_table;
mod coincident_tokens;
mod item;
mod item_set_builder;
mod minimize_parse_table;
mod token_conflicts;
use std::collections::{BTreeSet, HashMap};
pub use build_lex_table::LARGE_CHARACTER_RANGE_COUNT;
use build_parse_table::BuildTableResult;
pub use build_parse_table::ParseTableBuilderError;
use log::{debug, info};
use self::{
build_lex_table::build_lex_table,
build_parse_table::{build_parse_table, ParseStateInfo},
coincident_tokens::CoincidentTokenIndex,
item_set_builder::ParseItemSetBuilder,
minimize_parse_table::minimize_parse_table,
token_conflicts::TokenConflictMap,
};
use crate::{
grammars::{InlinedProductionMap, LexicalGrammar, SyntaxGrammar},
nfa::{CharacterSet, NfaCursor},
node_types::VariableInfo,
rules::{AliasMap, Symbol, SymbolType, TokenSet},
tables::{LexTable, ParseAction, ParseTable, ParseTableEntry},
OptLevel,
};
pub struct Tables {
pub parse_table: ParseTable,
pub main_lex_table: LexTable,
pub keyword_lex_table: LexTable,
pub large_character_sets: Vec<(Option<Symbol>, CharacterSet)>,
}
pub fn build_tables(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
simple_aliases: &AliasMap,
variable_info: &[VariableInfo],
inlines: &InlinedProductionMap,
report_symbol_name: Option<&str>,
optimizations: OptLevel,
) -> BuildTableResult<Tables> {
let item_set_builder = ParseItemSetBuilder::new(syntax_grammar, lexical_grammar, inlines);
let following_tokens =
get_following_tokens(syntax_grammar, lexical_grammar, inlines, &item_set_builder);
let (mut parse_table, parse_state_info) = build_parse_table(
syntax_grammar,
lexical_grammar,
item_set_builder,
variable_info,
)?;
let token_conflict_map = TokenConflictMap::new(lexical_grammar, following_tokens);
let coincident_token_index = CoincidentTokenIndex::new(&parse_table, lexical_grammar);
let keywords = identify_keywords(
lexical_grammar,
&parse_table,
syntax_grammar.word_token,
&token_conflict_map,
&coincident_token_index,
);
populate_error_state(
&mut parse_table,
syntax_grammar,
lexical_grammar,
&coincident_token_index,
&token_conflict_map,
&keywords,
);
populate_used_symbols(&mut parse_table, syntax_grammar, lexical_grammar);
minimize_parse_table(
&mut parse_table,
syntax_grammar,
lexical_grammar,
simple_aliases,
&token_conflict_map,
&keywords,
optimizations,
);
let lex_tables = build_lex_table(
&mut parse_table,
syntax_grammar,
lexical_grammar,
&keywords,
&coincident_token_index,
&token_conflict_map,
);
populate_external_lex_states(&mut parse_table, syntax_grammar);
mark_fragile_tokens(&mut parse_table, lexical_grammar, &token_conflict_map);
if let Some(report_symbol_name) = report_symbol_name {
report_state_info(
syntax_grammar,
lexical_grammar,
&parse_table,
&parse_state_info,
report_symbol_name,
);
}
if parse_table.states.len() > u16::MAX as usize {
Err(ParseTableBuilderError::StateCount(parse_table.states.len()))?;
}
Ok(Tables {
parse_table,
main_lex_table: lex_tables.main_lex_table,
keyword_lex_table: lex_tables.keyword_lex_table,
large_character_sets: lex_tables.large_character_sets,
})
}
fn get_following_tokens(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
inlines: &InlinedProductionMap,
builder: &ParseItemSetBuilder,
) -> Vec<TokenSet> {
let mut result = vec![TokenSet::new(); lexical_grammar.variables.len()];
let productions = syntax_grammar
.variables
.iter()
.flat_map(|v| &v.productions)
.chain(&inlines.productions);
let all_tokens = (0..result.len())
.map(Symbol::terminal)
.collect::<TokenSet>();
for production in productions {
for i in 1..production.steps.len() {
let left_tokens = builder.last_set(&production.steps[i - 1].symbol);
let right_tokens = builder.first_set(&production.steps[i].symbol);
let right_reserved_tokens = builder.reserved_first_set(&production.steps[i].symbol);
for left_token in left_tokens.iter() {
if left_token.is_terminal() {
result[left_token.index].insert_all_terminals(right_tokens);
if let Some(reserved_tokens) = right_reserved_tokens {
result[left_token.index].insert_all_terminals(reserved_tokens);
}
}
}
}
}
for extra in &syntax_grammar.extra_symbols {
if extra.is_terminal() {
for entry in &mut result {
entry.insert(*extra);
}
result[extra.index] = all_tokens.clone();
}
}
result
}
fn populate_error_state(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap,
keywords: &TokenSet,
) {
let state = &mut parse_table.states[0];
let n = lexical_grammar.variables.len();
// First identify the *conflict-free tokens*: tokens that do not overlap with
// any other token in any way, besides matching exactly the same string.
let conflict_free_tokens = (0..n)
.filter_map(|i| {
let conflicts_with_other_tokens = (0..n).any(|j| {
j != i
&& !coincident_token_index.contains(Symbol::terminal(i), Symbol::terminal(j))
&& token_conflict_map.does_match_shorter_or_longer(i, j)
});
if conflicts_with_other_tokens {
None
} else {
debug!(
"error recovery - token {} has no conflicts",
lexical_grammar.variables[i].name
);
Some(Symbol::terminal(i))
}
})
.collect::<TokenSet>();
let recover_entry = ParseTableEntry {
reusable: false,
actions: vec![ParseAction::Recover],
};
// Exclude from the error-recovery state any token that conflicts with one of
// the *conflict-free tokens* identified above.
for i in 0..n {
let symbol = Symbol::terminal(i);
if !conflict_free_tokens.contains(&symbol)
&& !keywords.contains(&symbol)
&& syntax_grammar.word_token != Some(symbol)
{
if let Some(t) = conflict_free_tokens.iter().find(|t| {
!coincident_token_index.contains(symbol, *t)
&& token_conflict_map.does_conflict(symbol.index, t.index)
}) {
debug!(
"error recovery - exclude token {} because of conflict with {}",
lexical_grammar.variables[i].name, lexical_grammar.variables[t.index].name
);
continue;
}
}
debug!(
"error recovery - include token {}",
lexical_grammar.variables[i].name
);
state
.terminal_entries
.entry(symbol)
.or_insert_with(|| recover_entry.clone());
}
for (i, external_token) in syntax_grammar.external_tokens.iter().enumerate() {
if external_token.corresponding_internal_token.is_none() {
state
.terminal_entries
.entry(Symbol::external(i))
.or_insert_with(|| recover_entry.clone());
}
}
state.terminal_entries.insert(Symbol::end(), recover_entry);
}
fn populate_used_symbols(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
) {
let mut terminal_usages = vec![false; lexical_grammar.variables.len()];
let mut non_terminal_usages = vec![false; syntax_grammar.variables.len()];
let mut external_usages = vec![false; syntax_grammar.external_tokens.len()];
for state in &parse_table.states {
for symbol in state.terminal_entries.keys() {
match symbol.kind {
SymbolType::Terminal => terminal_usages[symbol.index] = true,
SymbolType::External => external_usages[symbol.index] = true,
_ => {}
}
}
for symbol in state.nonterminal_entries.keys() {
non_terminal_usages[symbol.index] = true;
}
}
parse_table.symbols.push(Symbol::end());
for (i, value) in terminal_usages.into_iter().enumerate() {
if value {
// Assign the grammar's word token a low numerical index. This ensures that
// it can be stored in a subtree with no heap allocations, even for grammars with
// very large numbers of tokens. This is an optimization, but it's also important to
// ensure that a subtree's symbol can be successfully reassigned to the word token
// without having to move the subtree to the heap.
// See https://github.com/tree-sitter/tree-sitter/issues/258
if syntax_grammar.word_token.is_some_and(|t| t.index == i) {
parse_table.symbols.insert(1, Symbol::terminal(i));
} else {
parse_table.symbols.push(Symbol::terminal(i));
}
}
}
for (i, value) in external_usages.into_iter().enumerate() {
if value {
parse_table.symbols.push(Symbol::external(i));
}
}
for (i, value) in non_terminal_usages.into_iter().enumerate() {
if value {
parse_table.symbols.push(Symbol::non_terminal(i));
}
}
}
fn populate_external_lex_states(parse_table: &mut ParseTable, syntax_grammar: &SyntaxGrammar) {
let mut external_tokens_by_corresponding_internal_token = HashMap::new();
for (i, external_token) in syntax_grammar.external_tokens.iter().enumerate() {
if let Some(symbol) = external_token.corresponding_internal_token {
external_tokens_by_corresponding_internal_token.insert(symbol.index, i);
}
}
// Ensure that external lex state 0 represents the absence of any
// external tokens.
parse_table.external_lex_states.push(TokenSet::new());
for i in 0..parse_table.states.len() {
let mut external_tokens = TokenSet::new();
for token in parse_table.states[i].terminal_entries.keys() {
if token.is_external() {
external_tokens.insert(*token);
} else if token.is_terminal() {
if let Some(index) =
external_tokens_by_corresponding_internal_token.get(&token.index)
{
external_tokens.insert(Symbol::external(*index));
}
}
}
parse_table.states[i].external_lex_state_id = parse_table
.external_lex_states
.iter()
.position(|tokens| *tokens == external_tokens)
.unwrap_or_else(|| {
parse_table.external_lex_states.push(external_tokens);
parse_table.external_lex_states.len() - 1
});
}
}
fn identify_keywords(
lexical_grammar: &LexicalGrammar,
parse_table: &ParseTable,
word_token: Option<Symbol>,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex,
) -> TokenSet {
if word_token.is_none() {
return TokenSet::new();
}
let word_token = word_token.unwrap();
let mut cursor = NfaCursor::new(&lexical_grammar.nfa, Vec::new());
// First find all of the candidate keyword tokens: tokens that start with
// letters or underscore and can match the same string as a word token.
let keyword_candidates = lexical_grammar
.variables
.iter()
.enumerate()
.filter_map(|(i, variable)| {
cursor.reset(vec![variable.start_state]);
if all_chars_are_alphabetical(&cursor)
&& token_conflict_map.does_match_same_string(i, word_token.index)
&& !token_conflict_map.does_match_different_string(i, word_token.index)
{
debug!(
"Keywords - add candidate {}",
lexical_grammar.variables[i].name
);
Some(Symbol::terminal(i))
} else {
None
}
})
.collect::<TokenSet>();
// Exclude keyword candidates that shadow another keyword candidate.
let keywords = keyword_candidates
.iter()
.filter(|token| {
for other_token in keyword_candidates.iter() {
if other_token != *token
&& token_conflict_map.does_match_same_string(other_token.index, token.index)
{
debug!(
"Keywords - exclude {} because it matches the same string as {}",
lexical_grammar.variables[token.index].name,
lexical_grammar.variables[other_token.index].name
);
return false;
}
}
true
})
.collect::<TokenSet>();
// Exclude keyword candidates for which substituting the keyword capture
// token would introduce new lexical conflicts with other tokens.
let keywords = keywords
.iter()
.filter(|token| {
for other_index in 0..lexical_grammar.variables.len() {
if keyword_candidates.contains(&Symbol::terminal(other_index)) {
continue;
}
// If the word token was already valid in every state containing
// this keyword candidate, then substituting the word token won't
// introduce any new lexical conflicts.
if coincident_token_index
.states_with(*token, Symbol::terminal(other_index))
.iter()
.all(|state_id| {
parse_table.states[*state_id]
.terminal_entries
.contains_key(&word_token)
})
{
continue;
}
if !token_conflict_map.has_same_conflict_status(
token.index,
word_token.index,
other_index,
) {
debug!(
"Keywords - exclude {} because of conflict with {}",
lexical_grammar.variables[token.index].name,
lexical_grammar.variables[other_index].name
);
return false;
}
}
debug!(
"Keywords - include {}",
lexical_grammar.variables[token.index].name,
);
true
})
.collect();
keywords
}
fn mark_fragile_tokens(
parse_table: &mut ParseTable,
lexical_grammar: &LexicalGrammar,
token_conflict_map: &TokenConflictMap,
) {
let n = lexical_grammar.variables.len();
let mut valid_tokens_mask = Vec::with_capacity(n);
for state in &mut parse_table.states {
valid_tokens_mask.clear();
valid_tokens_mask.resize(n, false);
for token in state.terminal_entries.keys() {
if token.is_terminal() {
valid_tokens_mask[token.index] = true;
}
}
for (token, entry) in &mut state.terminal_entries {
if token.is_terminal() {
for (i, is_valid) in valid_tokens_mask.iter().enumerate() {
if *is_valid && token_conflict_map.does_overlap(i, token.index) {
entry.reusable = false;
break;
}
}
}
}
}
}
fn report_state_info<'a>(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
parse_table: &ParseTable,
parse_state_info: &[ParseStateInfo<'a>],
report_symbol_name: &'a str,
) {
let mut all_state_indices = BTreeSet::new();
let mut symbols_with_state_indices = (0..syntax_grammar.variables.len())
.map(|i| (Symbol::non_terminal(i), BTreeSet::new()))
.collect::<Vec<_>>();
for (i, state) in parse_table.states.iter().enumerate() {
all_state_indices.insert(i);
let item_set = &parse_state_info[state.id];
for entry in &item_set.1.entries {
if !entry.item.is_augmented() {
symbols_with_state_indices[entry.item.variable_index as usize]
.1
.insert(i);
}
}
}
symbols_with_state_indices.sort_unstable_by_key(|(_, states)| -(states.len() as i32));
let max_symbol_name_length = syntax_grammar
.variables
.iter()
.map(|v| v.name.len())
.max()
.unwrap();
for (symbol, states) in &symbols_with_state_indices {
info!(
"{:width$}\t{}",
syntax_grammar.variables[symbol.index].name,
states.len(),
width = max_symbol_name_length
);
}
info!("");
let state_indices = if report_symbol_name == "*" {
Some(&all_state_indices)
} else {
symbols_with_state_indices
.iter()
.find_map(|(symbol, state_indices)| {
if syntax_grammar.variables[symbol.index].name == report_symbol_name {
Some(state_indices)
} else {
None
}
})
};
if let Some(state_indices) = state_indices {
let mut state_indices = state_indices.iter().copied().collect::<Vec<_>>();
state_indices.sort_unstable_by_key(|i| (parse_table.states[*i].core_id, *i));
for state_index in state_indices {
let id = parse_table.states[state_index].id;
let (preceding_symbols, item_set) = &parse_state_info[id];
info!("state index: {state_index}");
info!("state id: {id}");
info!(
"symbol sequence: {}",
preceding_symbols
.iter()
.map(|symbol| {
if symbol.is_terminal() {
lexical_grammar.variables[symbol.index].name.clone()
} else if symbol.is_external() {
syntax_grammar.external_tokens[symbol.index].name.clone()
} else {
syntax_grammar.variables[symbol.index].name.clone()
}
})
.collect::<Vec<_>>()
.join(" ")
);
info!(
"\nitems:\n{}",
item::ParseItemSetDisplay(item_set, syntax_grammar, lexical_grammar),
);
}
}
}
fn all_chars_are_alphabetical(cursor: &NfaCursor) -> bool {
cursor.transition_chars().all(|(chars, is_sep)| {
if is_sep {
true
} else {
chars.chars().all(|c| c.is_alphabetic() || c == '_')
}
})
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/dedup.rs | crates/generate/src/dedup.rs | pub fn split_state_id_groups<S>(
states: &[S],
state_ids_by_group_id: &mut Vec<Vec<usize>>,
group_ids_by_state_id: &mut [usize],
start_group_id: usize,
mut should_split: impl FnMut(&S, &S, &[usize]) -> bool,
) -> bool {
let mut result = false;
let mut group_id = start_group_id;
while group_id < state_ids_by_group_id.len() {
let state_ids = &state_ids_by_group_id[group_id];
let mut split_state_ids = Vec::new();
let mut i = 0;
while i < state_ids.len() {
let left_state_id = state_ids[i];
if split_state_ids.contains(&left_state_id) {
i += 1;
continue;
}
let left_state = &states[left_state_id];
// Identify all of the other states in the group that are incompatible with
// this state.
let mut j = i + 1;
while j < state_ids.len() {
let right_state_id = state_ids[j];
if split_state_ids.contains(&right_state_id) {
j += 1;
continue;
}
let right_state = &states[right_state_id];
if should_split(left_state, right_state, group_ids_by_state_id) {
split_state_ids.push(right_state_id);
}
j += 1;
}
i += 1;
}
// If any states were removed from the group, add them all as a new group.
if !split_state_ids.is_empty() {
result = true;
state_ids_by_group_id[group_id].retain(|i| !split_state_ids.contains(i));
let new_group_id = state_ids_by_group_id.len();
for id in &split_state_ids {
group_ids_by_state_id[*id] = new_group_id;
}
state_ids_by_group_id.push(split_state_ids);
}
group_id += 1;
}
result
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/node_types.rs | crates/generate/src/node_types.rs | use std::collections::{BTreeMap, BTreeSet, HashMap, HashSet};
use serde::Serialize;
use thiserror::Error;
use super::{
grammars::{LexicalGrammar, SyntaxGrammar, VariableType},
rules::{Alias, AliasMap, Symbol, SymbolType},
};
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ChildType {
Normal(Symbol),
Aliased(Alias),
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct FieldInfo {
pub quantity: ChildQuantity,
pub types: Vec<ChildType>,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct VariableInfo {
pub fields: HashMap<String, FieldInfo>,
pub children: FieldInfo,
pub children_without_fields: FieldInfo,
pub has_multi_step_production: bool,
}
#[derive(Debug, Serialize, PartialEq, Eq, Default, PartialOrd, Ord)]
#[cfg(feature = "load")]
pub struct NodeInfoJSON {
#[serde(rename = "type")]
kind: String,
named: bool,
#[serde(skip_serializing_if = "std::ops::Not::not")]
root: bool,
#[serde(skip_serializing_if = "std::ops::Not::not")]
extra: bool,
#[serde(skip_serializing_if = "Option::is_none")]
fields: Option<BTreeMap<String, FieldInfoJSON>>,
#[serde(skip_serializing_if = "Option::is_none")]
children: Option<FieldInfoJSON>,
#[serde(skip_serializing_if = "Option::is_none")]
subtypes: Option<Vec<NodeTypeJSON>>,
}
#[derive(Clone, Debug, Serialize, PartialEq, Eq, PartialOrd, Ord, Hash)]
#[cfg(feature = "load")]
pub struct NodeTypeJSON {
#[serde(rename = "type")]
kind: String,
named: bool,
}
#[derive(Debug, Serialize, PartialEq, Eq, PartialOrd, Ord)]
#[cfg(feature = "load")]
pub struct FieldInfoJSON {
multiple: bool,
required: bool,
types: Vec<NodeTypeJSON>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct ChildQuantity {
exists: bool,
required: bool,
multiple: bool,
}
#[cfg(feature = "load")]
impl Default for FieldInfoJSON {
fn default() -> Self {
Self {
multiple: false,
required: true,
types: Vec::new(),
}
}
}
impl Default for ChildQuantity {
fn default() -> Self {
Self::one()
}
}
impl ChildQuantity {
#[must_use]
const fn zero() -> Self {
Self {
exists: false,
required: false,
multiple: false,
}
}
#[must_use]
const fn one() -> Self {
Self {
exists: true,
required: true,
multiple: false,
}
}
const fn append(&mut self, other: Self) {
if other.exists {
if self.exists || other.multiple {
self.multiple = true;
}
if other.required {
self.required = true;
}
self.exists = true;
}
}
const fn union(&mut self, other: Self) -> bool {
let mut result = false;
if !self.exists && other.exists {
result = true;
self.exists = true;
}
if self.required && !other.required {
result = true;
self.required = false;
}
if !self.multiple && other.multiple {
result = true;
self.multiple = true;
}
result
}
}
pub type VariableInfoResult<T> = Result<T, VariableInfoError>;
#[derive(Debug, Error, Serialize)]
pub enum VariableInfoError {
#[error("Grammar error: Supertype symbols must always have a single visible child, but `{0}` can have multiple")]
InvalidSupertype(String),
}
/// Compute a summary of the public-facing structure of each variable in the
/// grammar. Each variable in the grammar corresponds to a distinct public-facing
/// node type.
///
/// The information collected about each node type `N` is:
/// 1. `child_types` - The types of visible children that can appear within `N`.
/// 2. `fields` - The fields that `N` can have. Data regarding each field:
/// * `types` - The types of visible children the field can contain.
/// * `optional` - Do `N` nodes always have this field?
/// * `multiple` - Can `N` nodes have multiple children for this field?
/// 3. `children_without_fields` - The *other* named children of `N` that are not associated with
/// fields. Data regarding these children:
/// * `types` - The types of named children with no field.
/// * `optional` - Do `N` nodes always have at least one named child with no field?
/// * `multiple` - Can `N` nodes have multiple named children with no field?
///
/// Each summary must account for some indirect factors:
/// 1. hidden nodes. When a parent node `N` has a hidden child `C`, the visible children of `C`
/// *appear* to be direct children of `N`.
/// 2. aliases. If a parent node type `M` is aliased as some other type `N`, then nodes which
/// *appear* to have type `N` may have internal structure based on `M`.
pub fn get_variable_info(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
default_aliases: &AliasMap,
) -> VariableInfoResult<Vec<VariableInfo>> {
let child_type_is_visible = |t: &ChildType| {
variable_type_for_child_type(t, syntax_grammar, lexical_grammar) >= VariableType::Anonymous
};
let child_type_is_named = |t: &ChildType| {
variable_type_for_child_type(t, syntax_grammar, lexical_grammar) == VariableType::Named
};
// Each variable's summary can depend on the summaries of other hidden variables,
// and variables can have mutually recursive structure. So we compute the summaries
// iteratively, in a loop that terminates only when no more changes are possible.
let mut did_change = true;
let mut all_initialized = false;
let mut result = vec![VariableInfo::default(); syntax_grammar.variables.len()];
while did_change {
did_change = false;
for (i, variable) in syntax_grammar.variables.iter().enumerate() {
let mut variable_info = result[i].clone();
// Examine each of the variable's productions. The variable's child types can be
// immediately combined across all productions, but the child quantities must be
// recorded separately for each production.
for production in &variable.productions {
let mut production_field_quantities = HashMap::new();
let mut production_children_quantity = ChildQuantity::zero();
let mut production_children_without_fields_quantity = ChildQuantity::zero();
let mut production_has_uninitialized_invisible_children = false;
if production.steps.len() > 1 {
variable_info.has_multi_step_production = true;
}
for step in &production.steps {
let child_symbol = step.symbol;
let child_type = if let Some(alias) = &step.alias {
ChildType::Aliased(alias.clone())
} else if let Some(alias) = default_aliases.get(&step.symbol) {
ChildType::Aliased(alias.clone())
} else {
ChildType::Normal(child_symbol)
};
let child_is_hidden = !child_type_is_visible(&child_type)
&& !syntax_grammar.supertype_symbols.contains(&child_symbol);
// Maintain the set of all child types for this variable, and the quantity of
// visible children in this production.
did_change |=
extend_sorted(&mut variable_info.children.types, Some(&child_type));
if !child_is_hidden {
production_children_quantity.append(ChildQuantity::one());
}
// Maintain the set of child types associated with each field, and the quantity
// of children associated with each field in this production.
if let Some(field_name) = &step.field_name {
let field_info = variable_info
.fields
.entry(field_name.clone())
.or_insert_with(FieldInfo::default);
did_change |= extend_sorted(&mut field_info.types, Some(&child_type));
let production_field_quantity = production_field_quantities
.entry(field_name)
.or_insert_with(ChildQuantity::zero);
// Inherit the types and quantities of hidden children associated with
// fields.
if child_is_hidden && child_symbol.is_non_terminal() {
let child_variable_info = &result[child_symbol.index];
did_change |= extend_sorted(
&mut field_info.types,
&child_variable_info.children.types,
);
production_field_quantity.append(child_variable_info.children.quantity);
} else {
production_field_quantity.append(ChildQuantity::one());
}
}
// Maintain the set of named children without fields within this variable.
else if child_type_is_named(&child_type) {
production_children_without_fields_quantity.append(ChildQuantity::one());
did_change |= extend_sorted(
&mut variable_info.children_without_fields.types,
Some(&child_type),
);
}
// Inherit all child information from hidden children.
if child_is_hidden && child_symbol.is_non_terminal() {
let child_variable_info = &result[child_symbol.index];
// If a hidden child can have multiple children, then its parent node can
// appear to have multiple children.
if child_variable_info.has_multi_step_production {
variable_info.has_multi_step_production = true;
}
// If a hidden child has fields, then the parent node can appear to have
// those same fields.
for (field_name, child_field_info) in &child_variable_info.fields {
production_field_quantities
.entry(field_name)
.or_insert_with(ChildQuantity::zero)
.append(child_field_info.quantity);
did_change |= extend_sorted(
&mut variable_info
.fields
.entry(field_name.clone())
.or_insert_with(FieldInfo::default)
.types,
&child_field_info.types,
);
}
// If a hidden child has children, then the parent node can appear to have
// those same children.
production_children_quantity.append(child_variable_info.children.quantity);
did_change |= extend_sorted(
&mut variable_info.children.types,
&child_variable_info.children.types,
);
// If a hidden child can have named children without fields, then the parent
// node can appear to have those same children.
if step.field_name.is_none() {
let grandchildren_info = &child_variable_info.children_without_fields;
if !grandchildren_info.types.is_empty() {
production_children_without_fields_quantity
.append(child_variable_info.children_without_fields.quantity);
did_change |= extend_sorted(
&mut variable_info.children_without_fields.types,
&child_variable_info.children_without_fields.types,
);
}
}
}
// Note whether or not this production contains children whose summaries
// have not yet been computed.
if child_symbol.index >= i && !all_initialized {
production_has_uninitialized_invisible_children = true;
}
}
// If this production's children all have had their summaries initialized,
// then expand the quantity information with all of the possibilities introduced
// by this production.
if !production_has_uninitialized_invisible_children {
did_change |= variable_info
.children
.quantity
.union(production_children_quantity);
did_change |= variable_info
.children_without_fields
.quantity
.union(production_children_without_fields_quantity);
for (field_name, info) in &mut variable_info.fields {
did_change |= info.quantity.union(
production_field_quantities
.get(field_name)
.copied()
.unwrap_or_else(ChildQuantity::zero),
);
}
}
}
result[i] = variable_info;
}
all_initialized = true;
}
for supertype_symbol in &syntax_grammar.supertype_symbols {
if result[supertype_symbol.index].has_multi_step_production {
let variable = &syntax_grammar.variables[supertype_symbol.index];
Err(VariableInfoError::InvalidSupertype(variable.name.clone()))?;
}
}
// Update all of the node type lists to eliminate hidden nodes.
for supertype_symbol in &syntax_grammar.supertype_symbols {
result[supertype_symbol.index]
.children
.types
.retain(child_type_is_visible);
}
for variable_info in &mut result {
for field_info in variable_info.fields.values_mut() {
field_info.types.retain(child_type_is_visible);
}
variable_info.fields.retain(|_, v| !v.types.is_empty());
variable_info
.children_without_fields
.types
.retain(child_type_is_visible);
}
Ok(result)
}
fn get_aliases_by_symbol(
syntax_grammar: &SyntaxGrammar,
default_aliases: &AliasMap,
) -> HashMap<Symbol, BTreeSet<Option<Alias>>> {
let mut aliases_by_symbol = HashMap::new();
for (symbol, alias) in default_aliases {
aliases_by_symbol.insert(*symbol, {
let mut aliases = BTreeSet::new();
aliases.insert(Some(alias.clone()));
aliases
});
}
for extra_symbol in &syntax_grammar.extra_symbols {
if !default_aliases.contains_key(extra_symbol) {
aliases_by_symbol
.entry(*extra_symbol)
.or_insert_with(BTreeSet::new)
.insert(None);
}
}
for variable in &syntax_grammar.variables {
for production in &variable.productions {
for step in &production.steps {
aliases_by_symbol
.entry(step.symbol)
.or_insert_with(BTreeSet::new)
.insert(
step.alias
.as_ref()
.or_else(|| default_aliases.get(&step.symbol))
.cloned(),
);
}
}
}
aliases_by_symbol.insert(
Symbol::non_terminal(0),
std::iter::once(&None).cloned().collect(),
);
aliases_by_symbol
}
pub fn get_supertype_symbol_map(
syntax_grammar: &SyntaxGrammar,
default_aliases: &AliasMap,
variable_info: &[VariableInfo],
) -> BTreeMap<Symbol, Vec<ChildType>> {
let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases);
let mut supertype_symbol_map = BTreeMap::new();
let mut symbols_by_alias = HashMap::new();
for (symbol, aliases) in &aliases_by_symbol {
for alias in aliases.iter().flatten() {
symbols_by_alias
.entry(alias)
.or_insert_with(Vec::new)
.push(*symbol);
}
}
for (i, info) in variable_info.iter().enumerate() {
let symbol = Symbol::non_terminal(i);
if syntax_grammar.supertype_symbols.contains(&symbol) {
let subtypes = info.children.types.clone();
supertype_symbol_map.insert(symbol, subtypes);
}
}
supertype_symbol_map
}
#[cfg(feature = "load")]
pub type SuperTypeCycleResult<T> = Result<T, SuperTypeCycleError>;
#[derive(Debug, Error, Serialize)]
pub struct SuperTypeCycleError {
items: Vec<String>,
}
impl std::fmt::Display for SuperTypeCycleError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Dependency cycle detected in node types:")?;
for (i, item) in self.items.iter().enumerate() {
write!(f, " {item}")?;
if i < self.items.len() - 1 {
write!(f, ",")?;
}
}
Ok(())
}
}
#[cfg(feature = "load")]
pub fn generate_node_types_json(
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
default_aliases: &AliasMap,
variable_info: &[VariableInfo],
) -> SuperTypeCycleResult<Vec<NodeInfoJSON>> {
let mut node_types_json = BTreeMap::new();
let child_type_to_node_type = |child_type: &ChildType| match child_type {
ChildType::Aliased(alias) => NodeTypeJSON {
kind: alias.value.clone(),
named: alias.is_named,
},
ChildType::Normal(symbol) => {
if let Some(alias) = default_aliases.get(symbol) {
NodeTypeJSON {
kind: alias.value.clone(),
named: alias.is_named,
}
} else {
match symbol.kind {
SymbolType::NonTerminal => {
let variable = &syntax_grammar.variables[symbol.index];
NodeTypeJSON {
kind: variable.name.clone(),
named: variable.kind != VariableType::Anonymous,
}
}
SymbolType::Terminal => {
let variable = &lexical_grammar.variables[symbol.index];
NodeTypeJSON {
kind: variable.name.clone(),
named: variable.kind != VariableType::Anonymous,
}
}
SymbolType::External => {
let variable = &syntax_grammar.external_tokens[symbol.index];
NodeTypeJSON {
kind: variable.name.clone(),
named: variable.kind != VariableType::Anonymous,
}
}
_ => panic!("Unexpected symbol type"),
}
}
}
};
let populate_field_info_json = |json: &mut FieldInfoJSON, info: &FieldInfo| {
if info.types.is_empty() {
json.required = false;
} else {
json.multiple |= info.quantity.multiple;
json.required &= info.quantity.required;
json.types
.extend(info.types.iter().map(child_type_to_node_type));
json.types.sort_unstable();
json.types.dedup();
}
};
let aliases_by_symbol = get_aliases_by_symbol(syntax_grammar, default_aliases);
let empty = BTreeSet::new();
let extra_names = syntax_grammar
.extra_symbols
.iter()
.flat_map(|symbol| {
aliases_by_symbol
.get(symbol)
.unwrap_or(&empty)
.iter()
.map(|alias| {
alias.as_ref().map_or(
match symbol.kind {
SymbolType::NonTerminal => &syntax_grammar.variables[symbol.index].name,
SymbolType::Terminal => &lexical_grammar.variables[symbol.index].name,
SymbolType::External => {
&syntax_grammar.external_tokens[symbol.index].name
}
_ => unreachable!(),
},
|alias| &alias.value,
)
})
})
.collect::<HashSet<_>>();
let mut subtype_map = Vec::new();
for (i, info) in variable_info.iter().enumerate() {
let symbol = Symbol::non_terminal(i);
let variable = &syntax_grammar.variables[i];
if syntax_grammar.supertype_symbols.contains(&symbol) {
let node_type_json =
node_types_json
.entry(variable.name.clone())
.or_insert_with(|| NodeInfoJSON {
kind: variable.name.clone(),
named: true,
root: false,
extra: extra_names.contains(&variable.name),
fields: None,
children: None,
subtypes: None,
});
let mut subtypes = info
.children
.types
.iter()
.map(child_type_to_node_type)
.collect::<Vec<_>>();
subtypes.sort_unstable();
subtypes.dedup();
let supertype = NodeTypeJSON {
kind: node_type_json.kind.clone(),
named: true,
};
subtype_map.push((supertype, subtypes.clone()));
node_type_json.subtypes = Some(subtypes);
} else if !syntax_grammar.variables_to_inline.contains(&symbol) {
// If a rule is aliased under multiple names, then its information
// contributes to multiple entries in the final JSON.
for alias in aliases_by_symbol.get(&symbol).unwrap_or(&BTreeSet::new()) {
let kind;
let is_named;
if let Some(alias) = alias {
kind = &alias.value;
is_named = alias.is_named;
} else if variable.kind.is_visible() {
kind = &variable.name;
is_named = variable.kind == VariableType::Named;
} else {
continue;
}
// There may already be an entry with this name, because multiple
// rules may be aliased with the same name.
let mut node_type_existed = true;
let node_type_json = node_types_json.entry(kind.clone()).or_insert_with(|| {
node_type_existed = false;
NodeInfoJSON {
kind: kind.clone(),
named: is_named,
root: i == 0,
extra: extra_names.contains(&kind),
fields: Some(BTreeMap::new()),
children: None,
subtypes: None,
}
});
let fields_json = node_type_json.fields.as_mut().unwrap();
for (new_field, field_info) in &info.fields {
let field_json = fields_json.entry(new_field.clone()).or_insert_with(|| {
// If another rule is aliased with the same name, and does *not* have this
// field, then this field cannot be required.
let mut field_json = FieldInfoJSON::default();
if node_type_existed {
field_json.required = false;
}
field_json
});
populate_field_info_json(field_json, field_info);
}
// If another rule is aliased with the same name, any fields that aren't present in
// this cannot be required.
for (existing_field, field_json) in fields_json.iter_mut() {
if !info.fields.contains_key(existing_field) {
field_json.required = false;
}
}
populate_field_info_json(
node_type_json
.children
.get_or_insert(FieldInfoJSON::default()),
&info.children_without_fields,
);
}
}
}
// Sort the subtype map topologically so that subtypes are listed before their supertypes.
let mut sorted_kinds = Vec::with_capacity(subtype_map.len());
let mut top_sort = topological_sort::TopologicalSort::<String>::new();
for (supertype, subtypes) in &subtype_map {
for subtype in subtypes {
top_sort.add_dependency(subtype.kind.clone(), supertype.kind.clone());
}
}
loop {
let mut next_kinds = top_sort.pop_all();
match (next_kinds.is_empty(), top_sort.is_empty()) {
(true, true) => break,
(true, false) => {
let mut items = top_sort.collect::<Vec<String>>();
items.sort();
return Err(SuperTypeCycleError { items });
}
(false, _) => {
next_kinds.sort();
sorted_kinds.extend(next_kinds);
}
}
}
subtype_map.sort_by(|a, b| {
let a_idx = sorted_kinds.iter().position(|n| n.eq(&a.0.kind)).unwrap();
let b_idx = sorted_kinds.iter().position(|n| n.eq(&b.0.kind)).unwrap();
a_idx.cmp(&b_idx)
});
for node_type_json in node_types_json.values_mut() {
if node_type_json
.children
.as_ref()
.is_some_and(|c| c.types.is_empty())
{
node_type_json.children = None;
}
if let Some(children) = &mut node_type_json.children {
process_supertypes(children, &subtype_map);
}
if let Some(fields) = &mut node_type_json.fields {
for field_info in fields.values_mut() {
process_supertypes(field_info, &subtype_map);
}
}
}
let mut anonymous_node_types = Vec::new();
let regular_tokens = lexical_grammar
.variables
.iter()
.enumerate()
.flat_map(|(i, variable)| {
aliases_by_symbol
.get(&Symbol::terminal(i))
.unwrap_or(&empty)
.iter()
.map(move |alias| {
alias
.as_ref()
.map_or((&variable.name, variable.kind), |alias| {
(&alias.value, alias.kind())
})
})
});
let external_tokens =
syntax_grammar
.external_tokens
.iter()
.enumerate()
.flat_map(|(i, token)| {
aliases_by_symbol
.get(&Symbol::external(i))
.unwrap_or(&empty)
.iter()
.map(move |alias| {
alias.as_ref().map_or((&token.name, token.kind), |alias| {
(&alias.value, alias.kind())
})
})
});
for (name, kind) in regular_tokens.chain(external_tokens) {
match kind {
VariableType::Named => {
let node_type_json =
node_types_json
.entry(name.clone())
.or_insert_with(|| NodeInfoJSON {
kind: name.clone(),
named: true,
root: false,
extra: extra_names.contains(&name),
fields: None,
children: None,
subtypes: None,
});
if let Some(children) = &mut node_type_json.children {
children.required = false;
}
if let Some(fields) = &mut node_type_json.fields {
for field in fields.values_mut() {
field.required = false;
}
}
}
VariableType::Anonymous => anonymous_node_types.push(NodeInfoJSON {
kind: name.clone(),
named: false,
root: false,
extra: extra_names.contains(&name),
fields: None,
children: None,
subtypes: None,
}),
_ => {}
}
}
let mut result = node_types_json.into_iter().map(|e| e.1).collect::<Vec<_>>();
result.extend(anonymous_node_types);
result.sort_unstable_by(|a, b| {
b.subtypes
.is_some()
.cmp(&a.subtypes.is_some())
.then_with(|| {
let a_is_leaf = a.children.is_none() && a.fields.is_none();
let b_is_leaf = b.children.is_none() && b.fields.is_none();
a_is_leaf.cmp(&b_is_leaf)
})
.then_with(|| a.kind.cmp(&b.kind))
.then_with(|| a.named.cmp(&b.named))
.then_with(|| a.root.cmp(&b.root))
.then_with(|| a.extra.cmp(&b.extra))
});
result.dedup();
Ok(result)
}
#[cfg(feature = "load")]
fn process_supertypes(info: &mut FieldInfoJSON, subtype_map: &[(NodeTypeJSON, Vec<NodeTypeJSON>)]) {
for (supertype, subtypes) in subtype_map {
if info.types.contains(supertype) {
info.types.retain(|t| !subtypes.contains(t));
}
}
}
fn variable_type_for_child_type(
child_type: &ChildType,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
) -> VariableType {
match child_type {
ChildType::Aliased(alias) => alias.kind(),
ChildType::Normal(symbol) => {
if syntax_grammar.supertype_symbols.contains(symbol) {
VariableType::Named
} else if syntax_grammar.variables_to_inline.contains(symbol) {
VariableType::Hidden
} else {
match symbol.kind {
SymbolType::NonTerminal => syntax_grammar.variables[symbol.index].kind,
SymbolType::Terminal => lexical_grammar.variables[symbol.index].kind,
SymbolType::External => syntax_grammar.external_tokens[symbol.index].kind,
_ => VariableType::Hidden,
}
}
}
}
}
fn extend_sorted<'a, T>(vec: &mut Vec<T>, values: impl IntoIterator<Item = &'a T>) -> bool
where
T: 'a + Clone + Eq + Ord,
{
values.into_iter().fold(false, |acc, value| {
if let Err(i) = vec.binary_search(value) {
vec.insert(i, value.clone());
true
} else {
acc
}
})
}
#[cfg(all(test, feature = "load"))]
mod tests {
use super::*;
use crate::{
grammars::{
InputGrammar, LexicalVariable, Production, ProductionStep, SyntaxVariable, Variable,
},
prepare_grammar::prepare_grammar,
rules::Rule,
};
#[test]
fn test_node_types_simple() {
let node_types = get_node_types(&InputGrammar {
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | true |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/render.rs | crates/generate/src/render.rs | use std::{
cmp,
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
fmt::Write,
mem::swap,
};
use crate::LANGUAGE_VERSION;
use indoc::indoc;
use super::{
build_tables::Tables,
grammars::{ExternalToken, LexicalGrammar, SyntaxGrammar, VariableType},
nfa::CharacterSet,
node_types::ChildType,
rules::{Alias, AliasMap, Symbol, SymbolType, TokenSet},
tables::{
AdvanceAction, FieldLocation, GotoAction, LexState, LexTable, ParseAction, ParseTable,
ParseTableEntry,
},
};
const SMALL_STATE_THRESHOLD: usize = 64;
pub const ABI_VERSION_MIN: usize = 14;
pub const ABI_VERSION_MAX: usize = LANGUAGE_VERSION;
const ABI_VERSION_WITH_RESERVED_WORDS: usize = 15;
#[clippy::format_args]
macro_rules! add {
($this: tt, $($arg: tt)*) => {{
$this.buffer.write_fmt(format_args!($($arg)*)).unwrap();
}}
}
macro_rules! add_whitespace {
($this:tt) => {{
// 4 bytes per char, 2 spaces per indent level
$this.buffer.reserve(4 * 2 * $this.indent_level);
for _ in 0..$this.indent_level {
write!(&mut $this.buffer, " ").unwrap();
}
}};
}
#[clippy::format_args]
macro_rules! add_line {
($this: tt, $($arg: tt)*) => {
add_whitespace!($this);
$this.buffer.write_fmt(format_args!($($arg)*)).unwrap();
$this.buffer += "\n";
}
}
macro_rules! indent {
($this:tt) => {
$this.indent_level += 1;
};
}
macro_rules! dedent {
($this:tt) => {
assert_ne!($this.indent_level, 0);
$this.indent_level -= 1;
};
}
#[derive(Default)]
struct Generator {
buffer: String,
indent_level: usize,
language_name: String,
parse_table: ParseTable,
main_lex_table: LexTable,
keyword_lex_table: LexTable,
large_character_sets: Vec<(Option<Symbol>, CharacterSet)>,
large_character_set_info: Vec<LargeCharacterSetInfo>,
large_state_count: usize,
syntax_grammar: SyntaxGrammar,
lexical_grammar: LexicalGrammar,
default_aliases: AliasMap,
symbol_order: HashMap<Symbol, usize>,
symbol_ids: HashMap<Symbol, String>,
alias_ids: HashMap<Alias, String>,
unique_aliases: Vec<Alias>,
symbol_map: HashMap<Symbol, Symbol>,
reserved_word_sets: Vec<TokenSet>,
reserved_word_set_ids_by_parse_state: Vec<usize>,
field_names: Vec<String>,
supertype_symbol_map: BTreeMap<Symbol, Vec<ChildType>>,
supertype_map: BTreeMap<String, Vec<ChildType>>,
abi_version: usize,
metadata: Option<Metadata>,
}
struct LargeCharacterSetInfo {
constant_name: String,
is_used: bool,
}
struct Metadata {
major_version: u8,
minor_version: u8,
patch_version: u8,
}
impl Generator {
fn generate(mut self) -> String {
self.init();
self.add_header();
self.add_includes();
self.add_pragmas();
self.add_stats();
self.add_symbol_enum();
self.add_symbol_names_list();
self.add_unique_symbol_map();
self.add_symbol_metadata_list();
if !self.field_names.is_empty() {
self.add_field_name_enum();
self.add_field_name_names_list();
self.add_field_sequences();
}
if !self.parse_table.production_infos.is_empty() {
self.add_alias_sequences();
}
self.add_non_terminal_alias_map();
self.add_primary_state_id_list();
if self.abi_version >= ABI_VERSION_WITH_RESERVED_WORDS && !self.supertype_map.is_empty() {
self.add_supertype_map();
}
let buffer_offset_before_lex_functions = self.buffer.len();
let mut main_lex_table = LexTable::default();
swap(&mut main_lex_table, &mut self.main_lex_table);
self.add_lex_function("ts_lex", main_lex_table);
if self.syntax_grammar.word_token.is_some() {
let mut keyword_lex_table = LexTable::default();
swap(&mut keyword_lex_table, &mut self.keyword_lex_table);
self.add_lex_function("ts_lex_keywords", keyword_lex_table);
}
// Once the lex functions are generated, and we've determined which large
// character sets are actually used, we can generate the large character set
// constants. Insert them into the output buffer before the lex functions.
let lex_functions = self.buffer[buffer_offset_before_lex_functions..].to_string();
self.buffer.truncate(buffer_offset_before_lex_functions);
for ix in 0..self.large_character_sets.len() {
self.add_character_set(ix);
}
self.buffer.push_str(&lex_functions);
self.add_lex_modes();
if self.abi_version >= ABI_VERSION_WITH_RESERVED_WORDS && self.reserved_word_sets.len() > 1
{
self.add_reserved_word_sets();
}
self.add_parse_table();
if !self.syntax_grammar.external_tokens.is_empty() {
self.add_external_token_enum();
self.add_external_scanner_symbol_map();
self.add_external_scanner_states_list();
}
self.add_parser_export();
self.buffer
}
fn init(&mut self) {
let mut symbol_identifiers = HashSet::new();
for i in 0..self.parse_table.symbols.len() {
self.assign_symbol_id(self.parse_table.symbols[i], &mut symbol_identifiers);
}
self.symbol_ids.insert(
Symbol::end_of_nonterminal_extra(),
self.symbol_ids[&Symbol::end()].clone(),
);
self.symbol_map = HashMap::new();
for symbol in &self.parse_table.symbols {
let mut mapping = symbol;
// There can be multiple symbols in the grammar that have the same name and kind,
// due to simple aliases. When that happens, ensure that they map to the same
// public-facing symbol. If one of the symbols is not aliased, choose that one
// to be the public-facing symbol. Otherwise, pick the symbol with the lowest
// numeric value.
if let Some(alias) = self.default_aliases.get(symbol) {
let kind = alias.kind();
for other_symbol in &self.parse_table.symbols {
if let Some(other_alias) = self.default_aliases.get(other_symbol) {
if other_symbol < mapping && other_alias == alias {
mapping = other_symbol;
}
} else if self.metadata_for_symbol(*other_symbol) == (&alias.value, kind) {
mapping = other_symbol;
break;
}
}
}
// Two anonymous tokens with different flags but the same string value
// should be represented with the same symbol in the public API. Examples:
// * "<" and token(prec(1, "<"))
// * "(" and token.immediate("(")
else if symbol.is_terminal() {
let metadata = self.metadata_for_symbol(*symbol);
for other_symbol in &self.parse_table.symbols {
let other_metadata = self.metadata_for_symbol(*other_symbol);
if other_metadata == metadata {
if let Some(mapped) = self.symbol_map.get(other_symbol) {
if mapped == symbol {
break;
}
}
mapping = other_symbol;
break;
}
}
}
self.symbol_map.insert(*symbol, *mapping);
}
for production_info in &self.parse_table.production_infos {
// Build a list of all field names
for field_name in production_info.field_map.keys() {
if let Err(i) = self.field_names.binary_search(field_name) {
self.field_names.insert(i, field_name.clone());
}
}
for alias in &production_info.alias_sequence {
// Generate a mapping from aliases to C identifiers.
if let Some(alias) = &alias {
// Some aliases match an existing symbol in the grammar.
let alias_id =
if let Some(existing_symbol) = self.symbols_for_alias(alias).first() {
self.symbol_ids[&self.symbol_map[existing_symbol]].clone()
}
// Other aliases don't match any existing symbol, and need their own
// identifiers.
else {
if let Err(i) = self.unique_aliases.binary_search(alias) {
self.unique_aliases.insert(i, alias.clone());
}
if alias.is_named {
format!("alias_sym_{}", self.sanitize_identifier(&alias.value))
} else {
format!("anon_alias_sym_{}", self.sanitize_identifier(&alias.value))
}
};
self.alias_ids.entry(alias.clone()).or_insert(alias_id);
}
}
}
for (ix, (symbol, _)) in self.large_character_sets.iter().enumerate() {
let count = self.large_character_sets[0..ix]
.iter()
.filter(|(sym, _)| sym == symbol)
.count()
+ 1;
let constant_name = if let Some(symbol) = symbol {
format!("{}_character_set_{}", self.symbol_ids[symbol], count)
} else {
format!("extras_character_set_{count}")
};
self.large_character_set_info.push(LargeCharacterSetInfo {
constant_name,
is_used: false,
});
}
// Assign an id to each unique reserved word set
self.reserved_word_sets.push(TokenSet::new());
for state in &self.parse_table.states {
let id = if let Some(ix) = self
.reserved_word_sets
.iter()
.position(|set| *set == state.reserved_words)
{
ix
} else {
self.reserved_word_sets.push(state.reserved_words.clone());
self.reserved_word_sets.len() - 1
};
self.reserved_word_set_ids_by_parse_state.push(id);
}
if self.abi_version >= ABI_VERSION_WITH_RESERVED_WORDS {
for (supertype, subtypes) in &self.supertype_symbol_map {
if let Some(supertype) = self.symbol_ids.get(supertype) {
self.supertype_map
.entry(supertype.clone())
.or_insert_with(|| subtypes.clone());
}
}
self.supertype_symbol_map.clear();
}
// Determine which states should use the "small state" representation, and which should
// use the normal array representation.
let threshold = cmp::min(SMALL_STATE_THRESHOLD, self.parse_table.symbols.len() / 2);
self.large_state_count = self
.parse_table
.states
.iter()
.enumerate()
.take_while(|(i, s)| {
*i <= 1 || s.terminal_entries.len() + s.nonterminal_entries.len() > threshold
})
.count();
}
fn add_header(&mut self) {
add_line!(self, "/* Automatically @generated by tree-sitter */",);
add_line!(self, "");
}
fn add_includes(&mut self) {
add_line!(self, "#include \"tree_sitter/parser.h\"");
add_line!(self, "");
}
fn add_pragmas(&mut self) {
add_line!(self, "#if defined(__GNUC__) || defined(__clang__)");
add_line!(
self,
"#pragma GCC diagnostic ignored \"-Wmissing-field-initializers\""
);
add_line!(self, "#endif");
add_line!(self, "");
// Compiling large lexer functions can be very slow. Disabling optimizations
// is not ideal, but only a very small fraction of overall parse time is
// spent lexing, so the performance impact of this is negligible.
if self.main_lex_table.states.len() > 300 {
add_line!(self, "#ifdef _MSC_VER");
add_line!(self, "#pragma optimize(\"\", off)");
add_line!(self, "#elif defined(__clang__)");
add_line!(self, "#pragma clang optimize off");
add_line!(self, "#elif defined(__GNUC__)");
add_line!(self, "#pragma GCC optimize (\"O0\")");
add_line!(self, "#endif");
add_line!(self, "");
}
}
fn add_stats(&mut self) {
let token_count = self
.parse_table
.symbols
.iter()
.filter(|symbol| {
if symbol.is_terminal() || symbol.is_eof() {
true
} else if symbol.is_external() {
self.syntax_grammar.external_tokens[symbol.index]
.corresponding_internal_token
.is_none()
} else {
false
}
})
.count();
add_line!(self, "#define LANGUAGE_VERSION {}", self.abi_version);
add_line!(
self,
"#define STATE_COUNT {}",
self.parse_table.states.len()
);
add_line!(self, "#define LARGE_STATE_COUNT {}", self.large_state_count);
add_line!(
self,
"#define SYMBOL_COUNT {}",
self.parse_table.symbols.len()
);
add_line!(self, "#define ALIAS_COUNT {}", self.unique_aliases.len());
add_line!(self, "#define TOKEN_COUNT {token_count}");
add_line!(
self,
"#define EXTERNAL_TOKEN_COUNT {}",
self.syntax_grammar.external_tokens.len()
);
add_line!(self, "#define FIELD_COUNT {}", self.field_names.len());
add_line!(
self,
"#define MAX_ALIAS_SEQUENCE_LENGTH {}",
self.parse_table.max_aliased_production_length
);
add_line!(
self,
"#define MAX_RESERVED_WORD_SET_SIZE {}",
self.reserved_word_sets
.iter()
.map(TokenSet::len)
.max()
.unwrap()
);
add_line!(
self,
"#define PRODUCTION_ID_COUNT {}",
self.parse_table.production_infos.len()
);
add_line!(self, "#define SUPERTYPE_COUNT {}", self.supertype_map.len());
add_line!(self, "");
}
fn add_symbol_enum(&mut self) {
add_line!(self, "enum ts_symbol_identifiers {{");
indent!(self);
self.symbol_order.insert(Symbol::end(), 0);
let mut i = 1;
for symbol in &self.parse_table.symbols {
if *symbol != Symbol::end() {
self.symbol_order.insert(*symbol, i);
add_line!(self, "{} = {i},", self.symbol_ids[symbol]);
i += 1;
}
}
for alias in &self.unique_aliases {
add_line!(self, "{} = {i},", self.alias_ids[alias]);
i += 1;
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_symbol_names_list(&mut self) {
add_line!(self, "static const char * const ts_symbol_names[] = {{");
indent!(self);
for symbol in &self.parse_table.symbols {
let name = self.sanitize_string(
self.default_aliases
.get(symbol)
.map_or(self.metadata_for_symbol(*symbol).0, |alias| {
alias.value.as_str()
}),
);
add_line!(self, "[{}] = \"{name}\",", self.symbol_ids[symbol]);
}
for alias in &self.unique_aliases {
add_line!(
self,
"[{}] = \"{}\",",
self.alias_ids[alias],
self.sanitize_string(&alias.value)
);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_unique_symbol_map(&mut self) {
add_line!(self, "static const TSSymbol ts_symbol_map[] = {{");
indent!(self);
for symbol in &self.parse_table.symbols {
add_line!(
self,
"[{}] = {},",
self.symbol_ids[symbol],
self.symbol_ids[&self.symbol_map[symbol]],
);
}
for alias in &self.unique_aliases {
add_line!(
self,
"[{}] = {},",
self.alias_ids[alias],
self.alias_ids[alias],
);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_field_name_enum(&mut self) {
add_line!(self, "enum ts_field_identifiers {{");
indent!(self);
for (i, field_name) in self.field_names.iter().enumerate() {
add_line!(self, "{} = {},", self.field_id(field_name), i + 1);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_field_name_names_list(&mut self) {
add_line!(self, "static const char * const ts_field_names[] = {{");
indent!(self);
add_line!(self, "[0] = NULL,");
for field_name in &self.field_names {
add_line!(self, "[{}] = \"{field_name}\",", self.field_id(field_name));
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_symbol_metadata_list(&mut self) {
add_line!(
self,
"static const TSSymbolMetadata ts_symbol_metadata[] = {{"
);
indent!(self);
for symbol in &self.parse_table.symbols {
add_line!(self, "[{}] = {{", self.symbol_ids[symbol]);
indent!(self);
if let Some(Alias { is_named, .. }) = self.default_aliases.get(symbol) {
add_line!(self, ".visible = true,");
add_line!(self, ".named = {is_named},");
} else {
match self.metadata_for_symbol(*symbol).1 {
VariableType::Named => {
add_line!(self, ".visible = true,");
add_line!(self, ".named = true,");
}
VariableType::Anonymous => {
add_line!(self, ".visible = true,");
add_line!(self, ".named = false,");
}
VariableType::Hidden => {
add_line!(self, ".visible = false,");
add_line!(self, ".named = true,");
if self.syntax_grammar.supertype_symbols.contains(symbol) {
add_line!(self, ".supertype = true,");
}
}
VariableType::Auxiliary => {
add_line!(self, ".visible = false,");
add_line!(self, ".named = false,");
}
}
}
dedent!(self);
add_line!(self, "}},");
}
for alias in &self.unique_aliases {
add_line!(self, "[{}] = {{", self.alias_ids[alias]);
indent!(self);
add_line!(self, ".visible = true,");
add_line!(self, ".named = {},", alias.is_named);
dedent!(self);
add_line!(self, "}},");
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_alias_sequences(&mut self) {
add_line!(
self,
"static const TSSymbol ts_alias_sequences[PRODUCTION_ID_COUNT][MAX_ALIAS_SEQUENCE_LENGTH] = {{",
);
indent!(self);
for (i, production_info) in self.parse_table.production_infos.iter().enumerate() {
if production_info.alias_sequence.is_empty() {
// Work around MSVC's intolerance of empty array initializers by
// explicitly zero-initializing the first element.
if i == 0 {
add_line!(self, "[0] = {{0}},");
}
continue;
}
add_line!(self, "[{i}] = {{");
indent!(self);
for (j, alias) in production_info.alias_sequence.iter().enumerate() {
if let Some(alias) = alias {
add_line!(self, "[{j}] = {},", self.alias_ids[alias]);
}
}
dedent!(self);
add_line!(self, "}},");
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_non_terminal_alias_map(&mut self) {
let mut alias_ids_by_symbol = HashMap::new();
for variable in &self.syntax_grammar.variables {
for production in &variable.productions {
for step in &production.steps {
if let Some(alias) = &step.alias {
if step.symbol.is_non_terminal()
&& Some(alias) != self.default_aliases.get(&step.symbol)
&& self.symbol_ids.contains_key(&step.symbol)
{
if let Some(alias_id) = self.alias_ids.get(alias) {
let alias_ids =
alias_ids_by_symbol.entry(step.symbol).or_insert(Vec::new());
if let Err(i) = alias_ids.binary_search(&alias_id) {
alias_ids.insert(i, alias_id);
}
}
}
}
}
}
}
let mut alias_ids_by_symbol = alias_ids_by_symbol.iter().collect::<Vec<_>>();
alias_ids_by_symbol.sort_unstable_by_key(|e| e.0);
add_line!(
self,
"static const uint16_t ts_non_terminal_alias_map[] = {{"
);
indent!(self);
for (symbol, alias_ids) in alias_ids_by_symbol {
let symbol_id = &self.symbol_ids[symbol];
let public_symbol_id = &self.symbol_ids[&self.symbol_map[symbol]];
add_line!(self, "{symbol_id}, {},", 1 + alias_ids.len());
indent!(self);
add_line!(self, "{public_symbol_id},");
for alias_id in alias_ids {
add_line!(self, "{alias_id},");
}
dedent!(self);
}
add_line!(self, "0,");
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
/// Produces a list of the "primary state" for every state in the grammar.
///
/// The "primary state" for a given state is the first encountered state that behaves
/// identically with respect to query analysis. We derive this by keeping track of the `core_id`
/// for each state and treating the first state with a given `core_id` as primary.
fn add_primary_state_id_list(&mut self) {
add_line!(
self,
"static const TSStateId ts_primary_state_ids[STATE_COUNT] = {{"
);
indent!(self);
let mut first_state_for_each_core_id = HashMap::new();
for (idx, state) in self.parse_table.states.iter().enumerate() {
let primary_state = first_state_for_each_core_id
.entry(state.core_id)
.or_insert(idx);
add_line!(self, "[{idx}] = {primary_state},");
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_field_sequences(&mut self) {
let mut flat_field_maps = vec![];
let mut next_flat_field_map_index = 0;
self.get_field_map_id(
Vec::new(),
&mut flat_field_maps,
&mut next_flat_field_map_index,
);
let mut field_map_ids = Vec::with_capacity(self.parse_table.production_infos.len());
for production_info in &self.parse_table.production_infos {
if production_info.field_map.is_empty() {
field_map_ids.push((0, 0));
} else {
let mut flat_field_map = Vec::with_capacity(production_info.field_map.len());
for (field_name, locations) in &production_info.field_map {
for location in locations {
flat_field_map.push((field_name.clone(), *location));
}
}
let field_map_len = flat_field_map.len();
field_map_ids.push((
self.get_field_map_id(
flat_field_map,
&mut flat_field_maps,
&mut next_flat_field_map_index,
),
field_map_len,
));
}
}
add_line!(
self,
"static const TSMapSlice ts_field_map_slices[PRODUCTION_ID_COUNT] = {{",
);
indent!(self);
for (production_id, (row_id, length)) in field_map_ids.into_iter().enumerate() {
if length > 0 {
add_line!(
self,
"[{production_id}] = {{.index = {row_id}, .length = {length}}},",
);
}
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
add_line!(
self,
"static const TSFieldMapEntry ts_field_map_entries[] = {{",
);
indent!(self);
for (row_index, field_pairs) in flat_field_maps.into_iter().skip(1) {
add_line!(self, "[{row_index}] =");
indent!(self);
for (field_name, location) in field_pairs {
add_whitespace!(self);
add!(self, "{{{}, {}", self.field_id(&field_name), location.index);
if location.inherited {
add!(self, ", .inherited = true");
}
add!(self, "}},\n");
}
dedent!(self);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_supertype_map(&mut self) {
add_line!(
self,
"static const TSSymbol ts_supertype_symbols[SUPERTYPE_COUNT] = {{"
);
indent!(self);
for supertype in self.supertype_map.keys() {
add_line!(self, "{supertype},");
}
dedent!(self);
add_line!(self, "}};\n");
add_line!(
self,
"static const TSMapSlice ts_supertype_map_slices[] = {{",
);
indent!(self);
let mut row_id = 0;
let mut supertype_ids = vec![0];
let mut supertype_string_map = BTreeMap::new();
for (supertype, subtypes) in &self.supertype_map {
supertype_string_map.insert(
supertype,
subtypes
.iter()
.flat_map(|s| match s {
ChildType::Normal(symbol) => vec![self.symbol_ids.get(symbol).cloned()],
ChildType::Aliased(alias) => {
self.alias_ids.get(alias).cloned().map_or_else(
|| {
self.symbols_for_alias(alias)
.into_iter()
.map(|s| self.symbol_ids.get(&s).cloned())
.collect()
},
|a| vec![Some(a)],
)
}
})
.flatten()
.collect::<BTreeSet<String>>(),
);
}
for (supertype, subtypes) in &supertype_string_map {
let length = subtypes.len();
add_line!(
self,
"[{supertype}] = {{.index = {row_id}, .length = {length}}},",
);
row_id += length;
supertype_ids.push(row_id);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
add_line!(
self,
"static const TSSymbol ts_supertype_map_entries[] = {{",
);
indent!(self);
for (i, (_, subtypes)) in supertype_string_map.iter().enumerate() {
let row_index = supertype_ids[i];
add_line!(self, "[{row_index}] =");
indent!(self);
for subtype in subtypes {
add_whitespace!(self);
add!(self, "{subtype},\n");
}
dedent!(self);
}
dedent!(self);
add_line!(self, "}};");
add_line!(self, "");
}
fn add_lex_function(&mut self, name: &str, lex_table: LexTable) {
add_line!(
self,
"static bool {name}(TSLexer *lexer, TSStateId state) {{",
);
indent!(self);
add_line!(self, "START_LEXER();");
add_line!(self, "eof = lexer->eof(lexer);");
add_line!(self, "switch (state) {{");
indent!(self);
for (i, state) in lex_table.states.into_iter().enumerate() {
add_line!(self, "case {i}:");
indent!(self);
self.add_lex_state(i, state);
dedent!(self);
}
add_line!(self, "default:");
indent!(self);
add_line!(self, "return false;");
dedent!(self);
dedent!(self);
add_line!(self, "}}");
dedent!(self);
add_line!(self, "}}");
add_line!(self, "");
}
fn add_lex_state(&mut self, _state_ix: usize, state: LexState) {
if let Some(accept_action) = state.accept_action {
add_line!(self, "ACCEPT_TOKEN({});", self.symbol_ids[&accept_action]);
}
if let Some(eof_action) = state.eof_action {
add_line!(self, "if (eof) ADVANCE({});", eof_action.state);
}
let mut chars_copy = CharacterSet::empty();
let mut large_set = CharacterSet::empty();
let mut ruled_out_chars = CharacterSet::empty();
// The transitions in a lex state are sorted with the single-character
// transitions first. If there are many single-character transitions,
// then implement them using an array of (lookahead character, state)
// pairs, instead of individual if statements, in order to reduce compile
// time.
let mut leading_simple_transition_count = 0;
let mut leading_simple_transition_range_count = 0;
for (chars, action) in &state.advance_actions {
if action.in_main_token
&& chars.ranges().all(|r| {
let start = *r.start() as u32;
let end = *r.end() as u32;
end <= start + 1 && u16::try_from(end).is_ok()
})
{
leading_simple_transition_count += 1;
leading_simple_transition_range_count += chars.range_count();
} else {
break;
}
}
if leading_simple_transition_range_count >= 8 {
add_line!(self, "ADVANCE_MAP(");
indent!(self);
for (chars, action) in &state.advance_actions[0..leading_simple_transition_count] {
for range in chars.ranges() {
add_whitespace!(self);
self.add_character(*range.start());
add!(self, ", {},\n", action.state);
if range.end() > range.start() {
add_whitespace!(self);
self.add_character(*range.end());
add!(self, ", {},\n", action.state);
}
}
ruled_out_chars = ruled_out_chars.add(chars);
}
dedent!(self);
add_line!(self, ");");
} else {
leading_simple_transition_count = 0;
}
for (chars, action) in &state.advance_actions[leading_simple_transition_count..] {
add_whitespace!(self);
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | true |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/grammars.rs | crates/generate/src/grammars.rs | use std::{collections::HashMap, fmt};
use super::{
nfa::Nfa,
rules::{Alias, Associativity, Precedence, Rule, Symbol, TokenSet},
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub enum VariableType {
Hidden,
Auxiliary,
Anonymous,
Named,
}
// Input grammar
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Variable {
pub name: String,
pub kind: VariableType,
pub rule: Rule,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum PrecedenceEntry {
Name(String),
Symbol(String),
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct InputGrammar {
pub name: String,
pub variables: Vec<Variable>,
pub extra_symbols: Vec<Rule>,
pub expected_conflicts: Vec<Vec<String>>,
pub precedence_orderings: Vec<Vec<PrecedenceEntry>>,
pub external_tokens: Vec<Rule>,
pub variables_to_inline: Vec<String>,
pub supertype_symbols: Vec<String>,
pub word_token: Option<String>,
pub reserved_words: Vec<ReservedWordContext<Rule>>,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ReservedWordContext<T> {
pub name: String,
pub reserved_words: Vec<T>,
}
// Extracted lexical grammar
#[derive(Debug, PartialEq, Eq)]
pub struct LexicalVariable {
pub name: String,
pub kind: VariableType,
pub implicit_precedence: i32,
pub start_state: u32,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct LexicalGrammar {
pub nfa: Nfa,
pub variables: Vec<LexicalVariable>,
}
// Extracted syntax grammar
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ProductionStep {
pub symbol: Symbol,
pub precedence: Precedence,
pub associativity: Option<Associativity>,
pub alias: Option<Alias>,
pub field_name: Option<String>,
pub reserved_word_set_id: ReservedWordSetId,
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq, Hash, PartialOrd, Ord)]
pub struct ReservedWordSetId(pub usize);
impl fmt::Display for ReservedWordSetId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
pub const NO_RESERVED_WORDS: ReservedWordSetId = ReservedWordSetId(usize::MAX);
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct Production {
pub steps: Vec<ProductionStep>,
pub dynamic_precedence: i32,
}
#[derive(Default)]
pub struct InlinedProductionMap {
pub productions: Vec<Production>,
pub production_map: HashMap<(*const Production, u32), Vec<usize>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SyntaxVariable {
pub name: String,
pub kind: VariableType,
pub productions: Vec<Production>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ExternalToken {
pub name: String,
pub kind: VariableType,
pub corresponding_internal_token: Option<Symbol>,
}
#[derive(Debug, Default)]
pub struct SyntaxGrammar {
pub variables: Vec<SyntaxVariable>,
pub extra_symbols: Vec<Symbol>,
pub expected_conflicts: Vec<Vec<Symbol>>,
pub external_tokens: Vec<ExternalToken>,
pub supertype_symbols: Vec<Symbol>,
pub variables_to_inline: Vec<Symbol>,
pub word_token: Option<Symbol>,
pub precedence_orderings: Vec<Vec<PrecedenceEntry>>,
pub reserved_word_sets: Vec<TokenSet>,
}
#[cfg(test)]
impl ProductionStep {
#[must_use]
pub fn new(symbol: Symbol) -> Self {
Self {
symbol,
precedence: Precedence::None,
associativity: None,
alias: None,
field_name: None,
reserved_word_set_id: ReservedWordSetId::default(),
}
}
pub fn with_prec(
mut self,
precedence: Precedence,
associativity: Option<Associativity>,
) -> Self {
self.precedence = precedence;
self.associativity = associativity;
self
}
pub fn with_alias(mut self, value: &str, is_named: bool) -> Self {
self.alias = Some(Alias {
value: value.to_string(),
is_named,
});
self
}
pub fn with_field_name(mut self, name: &str) -> Self {
self.field_name = Some(name.to_string());
self
}
}
impl Production {
pub fn first_symbol(&self) -> Option<Symbol> {
self.steps.first().map(|s| s.symbol)
}
}
#[cfg(test)]
impl Variable {
pub fn named(name: &str, rule: Rule) -> Self {
Self {
name: name.to_string(),
kind: VariableType::Named,
rule,
}
}
pub fn auxiliary(name: &str, rule: Rule) -> Self {
Self {
name: name.to_string(),
kind: VariableType::Auxiliary,
rule,
}
}
pub fn hidden(name: &str, rule: Rule) -> Self {
Self {
name: name.to_string(),
kind: VariableType::Hidden,
rule,
}
}
pub fn anonymous(name: &str, rule: Rule) -> Self {
Self {
name: name.to_string(),
kind: VariableType::Anonymous,
rule,
}
}
}
impl VariableType {
pub fn is_visible(self) -> bool {
self == Self::Named || self == Self::Anonymous
}
}
impl LexicalGrammar {
pub fn variable_indices_for_nfa_states<'a>(
&'a self,
state_ids: &'a [u32],
) -> impl Iterator<Item = usize> + 'a {
let mut prev = None;
state_ids.iter().filter_map(move |state_id| {
let variable_id = self.variable_index_for_nfa_state(*state_id);
if prev == Some(variable_id) {
None
} else {
prev = Some(variable_id);
prev
}
})
}
pub fn variable_index_for_nfa_state(&self, state_id: u32) -> usize {
self.variables
.iter()
.position(|v| v.start_state >= state_id)
.unwrap()
}
}
impl SyntaxVariable {
pub fn is_auxiliary(&self) -> bool {
self.kind == VariableType::Auxiliary
}
pub fn is_hidden(&self) -> bool {
self.kind == VariableType::Hidden || self.kind == VariableType::Auxiliary
}
}
impl InlinedProductionMap {
pub fn inlined_productions<'a>(
&'a self,
production: &Production,
step_index: u32,
) -> Option<impl Iterator<Item = &'a Production> + 'a> {
self.production_map
.get(&(std::ptr::from_ref::<Production>(production), step_index))
.map(|production_indices| {
production_indices
.iter()
.copied()
.map(move |index| &self.productions[index])
})
}
}
impl fmt::Display for PrecedenceEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
match self {
Self::Name(n) => write!(f, "'{n}'"),
Self::Symbol(s) => write!(f, "$.{s}"),
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/nfa.rs | crates/generate/src/nfa.rs | use std::{
char,
cmp::{max, Ordering},
fmt,
iter::ExactSizeIterator,
mem::{self, swap},
ops::{Range, RangeInclusive},
};
/// A set of characters represented as a vector of ranges.
#[derive(Clone, Default, PartialEq, Eq, Hash)]
pub struct CharacterSet {
ranges: Vec<Range<u32>>,
}
/// A state in an NFA representing a regular grammar.
#[derive(Debug, PartialEq, Eq)]
pub enum NfaState {
Advance {
chars: CharacterSet,
state_id: u32,
is_sep: bool,
precedence: i32,
},
Split(u32, u32),
Accept {
variable_index: usize,
precedence: i32,
},
}
#[derive(PartialEq, Eq, Default)]
pub struct Nfa {
pub states: Vec<NfaState>,
}
#[derive(Debug)]
pub struct NfaCursor<'a> {
pub(crate) state_ids: Vec<u32>,
nfa: &'a Nfa,
}
#[derive(Debug, PartialEq, Eq)]
pub struct NfaTransition {
pub characters: CharacterSet,
pub is_separator: bool,
pub precedence: i32,
pub states: Vec<u32>,
}
const END: u32 = char::MAX as u32 + 1;
impl CharacterSet {
/// Create a character set with a single character.
pub const fn empty() -> Self {
Self { ranges: Vec::new() }
}
/// Create a character set with a given *inclusive* range of characters.
#[allow(clippy::single_range_in_vec_init)]
#[cfg(test)]
fn from_range(mut first: char, mut last: char) -> Self {
if first > last {
swap(&mut first, &mut last);
}
Self {
ranges: vec![(first as u32)..(last as u32 + 1)],
}
}
/// Create a character set with a single character.
#[allow(clippy::single_range_in_vec_init)]
pub fn from_char(c: char) -> Self {
Self {
ranges: vec![(c as u32)..(c as u32 + 1)],
}
}
/// Create a character set containing all characters *not* present
/// in this character set.
pub fn negate(mut self) -> Self {
let mut i = 0;
let mut previous_end = 0;
while i < self.ranges.len() {
let range = &mut self.ranges[i];
let start = previous_end;
previous_end = range.end;
if start < range.start {
self.ranges[i] = start..range.start;
i += 1;
} else {
self.ranges.remove(i);
}
}
if previous_end < END {
self.ranges.push(previous_end..END);
}
self
}
pub fn add_char(mut self, c: char) -> Self {
self.add_int_range(0, c as u32, c as u32 + 1);
self
}
pub fn add_range(mut self, start: char, end: char) -> Self {
self.add_int_range(0, start as u32, end as u32 + 1);
self
}
pub fn add(mut self, other: &Self) -> Self {
let mut index = 0;
for range in &other.ranges {
index = self.add_int_range(index, range.start, range.end);
}
self
}
pub fn assign(&mut self, other: &Self) {
self.ranges.clear();
self.ranges.extend_from_slice(&other.ranges);
}
fn add_int_range(&mut self, mut i: usize, start: u32, end: u32) -> usize {
while i < self.ranges.len() {
let range = &mut self.ranges[i];
if range.start > end {
self.ranges.insert(i, start..end);
return i;
}
if range.end >= start {
range.end = range.end.max(end);
range.start = range.start.min(start);
// Join this range with the next range if needed.
while i + 1 < self.ranges.len() && self.ranges[i + 1].start <= self.ranges[i].end {
self.ranges[i].end = self.ranges[i].end.max(self.ranges[i + 1].end);
self.ranges.remove(i + 1);
}
return i;
}
i += 1;
}
self.ranges.push(start..end);
i
}
pub fn does_intersect(&self, other: &Self) -> bool {
let mut left_ranges = self.ranges.iter();
let mut right_ranges = other.ranges.iter();
let mut left_range = left_ranges.next();
let mut right_range = right_ranges.next();
while let (Some(left), Some(right)) = (&left_range, &right_range) {
if left.end <= right.start {
left_range = left_ranges.next();
} else if left.start >= right.end {
right_range = right_ranges.next();
} else {
return true;
}
}
false
}
/// Get the set of characters that are present in both this set
/// and the other set. Remove those common characters from both
/// of the operands.
pub fn remove_intersection(&mut self, other: &mut Self) -> Self {
let mut intersection = Vec::new();
let mut left_i = 0;
let mut right_i = 0;
while left_i < self.ranges.len() && right_i < other.ranges.len() {
let left = &mut self.ranges[left_i];
let right = &mut other.ranges[right_i];
match left.start.cmp(&right.start) {
Ordering::Less => {
// [ L ]
// [ R ]
if left.end <= right.start {
left_i += 1;
continue;
}
match left.end.cmp(&right.end) {
// [ L ]
// [ R ]
Ordering::Less => {
intersection.push(right.start..left.end);
swap(&mut left.end, &mut right.start);
left_i += 1;
}
// [ L ]
// [ R ]
Ordering::Equal => {
intersection.push(right.clone());
left.end = right.start;
other.ranges.remove(right_i);
}
// [ L ]
// [ R ]
Ordering::Greater => {
intersection.push(right.clone());
let new_range = left.start..right.start;
left.start = right.end;
self.ranges.insert(left_i, new_range);
other.ranges.remove(right_i);
left_i += 1;
}
}
}
// [ L ]
// [ R ]
Ordering::Equal if left.end < right.end => {
intersection.push(left.start..left.end);
right.start = left.end;
self.ranges.remove(left_i);
}
// [ L ]
// [ R ]
Ordering::Equal if left.end == right.end => {
intersection.push(left.clone());
self.ranges.remove(left_i);
other.ranges.remove(right_i);
}
// [ L ]
// [ R ]
Ordering::Equal if left.end > right.end => {
intersection.push(right.clone());
left.start = right.end;
other.ranges.remove(right_i);
}
Ordering::Equal => {}
Ordering::Greater => {
// [ L ]
// [ R ]
if left.start >= right.end {
right_i += 1;
continue;
}
match left.end.cmp(&right.end) {
// [ L ]
// [ R ]
Ordering::Less => {
intersection.push(left.clone());
let new_range = right.start..left.start;
right.start = left.end;
other.ranges.insert(right_i, new_range);
self.ranges.remove(left_i);
right_i += 1;
}
// [ L ]
// [ R ]
Ordering::Equal => {
intersection.push(left.clone());
right.end = left.start;
self.ranges.remove(left_i);
}
// [ L ]
// [ R ]
Ordering::Greater => {
intersection.push(left.start..right.end);
swap(&mut left.start, &mut right.end);
right_i += 1;
}
}
}
}
}
Self {
ranges: intersection,
}
}
/// Produces a `CharacterSet` containing every character in `self` that is not present in
/// `other`.
pub fn difference(mut self, mut other: Self) -> Self {
self.remove_intersection(&mut other);
self
}
/// Produces a `CharacterSet` containing every character that is in _exactly one_ of `self` or
/// `other`, but is not present in both sets.
#[cfg(test)]
fn symmetric_difference(mut self, mut other: Self) -> Self {
self.remove_intersection(&mut other);
self.add(&other)
}
pub fn char_codes(&self) -> impl Iterator<Item = u32> + '_ {
self.ranges.iter().flat_map(Clone::clone)
}
pub fn chars(&self) -> impl Iterator<Item = char> + '_ {
self.char_codes().filter_map(char::from_u32)
}
pub fn range_count(&self) -> usize {
self.ranges.len()
}
pub fn ranges(&self) -> impl Iterator<Item = RangeInclusive<char>> + '_ {
self.ranges.iter().filter_map(|range| {
let start = range.clone().find_map(char::from_u32)?;
let end = (range.start..range.end).rev().find_map(char::from_u32)?;
Some(start..=end)
})
}
pub fn is_empty(&self) -> bool {
self.ranges.is_empty()
}
/// Get a reduced list of character ranges, assuming that a given
/// set of characters can be safely ignored.
pub fn simplify_ignoring(&self, ruled_out_characters: &Self) -> Self {
let mut prev_range: Option<Range<u32>> = None;
Self {
ranges: self
.ranges
.iter()
.map(|range| Some(range.clone()))
.chain([None])
.filter_map(move |range| {
if let Some(range) = &range {
if ruled_out_characters.contains_codepoint_range(range.clone()) {
return None;
}
if let Some(prev_range) = &mut prev_range {
if ruled_out_characters
.contains_codepoint_range(prev_range.end..range.start)
{
prev_range.end = range.end;
return None;
}
}
}
let result = prev_range.clone();
prev_range = range;
result
})
.collect(),
}
}
pub fn contains_codepoint_range(&self, seek_range: Range<u32>) -> bool {
let ix = match self.ranges.binary_search_by(|probe| {
if probe.end <= seek_range.start {
Ordering::Less
} else if probe.start > seek_range.start {
Ordering::Greater
} else {
Ordering::Equal
}
}) {
Ok(ix) | Err(ix) => ix,
};
self.ranges
.get(ix)
.is_some_and(|range| range.start <= seek_range.start && range.end >= seek_range.end)
}
pub fn contains(&self, c: char) -> bool {
self.contains_codepoint_range(c as u32..c as u32 + 1)
}
}
impl Ord for CharacterSet {
fn cmp(&self, other: &Self) -> Ordering {
let count_cmp = self
.ranges
.iter()
.map(ExactSizeIterator::len)
.sum::<usize>()
.cmp(&other.ranges.iter().map(ExactSizeIterator::len).sum());
if count_cmp != Ordering::Equal {
return count_cmp;
}
for (left_range, right_range) in self.ranges.iter().zip(other.ranges.iter()) {
let cmp = left_range.len().cmp(&right_range.len());
if cmp != Ordering::Equal {
return cmp;
}
for (left, right) in left_range.clone().zip(right_range.clone()) {
let cmp = left.cmp(&right);
if cmp != Ordering::Equal {
return cmp;
}
}
}
Ordering::Equal
}
}
impl PartialOrd for CharacterSet {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl fmt::Debug for CharacterSet {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "CharacterSet [")?;
let mut set = self.clone();
if self.contains(char::MAX) {
write!(f, "^ ")?;
set = set.negate();
}
for (i, range) in set.ranges().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{range:?}")?;
}
write!(f, "]")?;
Ok(())
}
}
impl Nfa {
#[must_use]
pub const fn new() -> Self {
Self { states: Vec::new() }
}
pub fn last_state_id(&self) -> u32 {
assert!(!self.states.is_empty());
self.states.len() as u32 - 1
}
}
impl fmt::Debug for Nfa {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "Nfa {{ states: {{")?;
for (i, state) in self.states.iter().enumerate() {
writeln!(f, " {i}: {state:?},")?;
}
write!(f, "}} }}")?;
Ok(())
}
}
impl<'a> NfaCursor<'a> {
pub fn new(nfa: &'a Nfa, mut states: Vec<u32>) -> Self {
let mut result = Self {
nfa,
state_ids: Vec::new(),
};
result.add_states(&mut states);
result
}
pub fn reset(&mut self, mut states: Vec<u32>) {
self.state_ids.clear();
self.add_states(&mut states);
}
pub fn force_reset(&mut self, states: Vec<u32>) {
self.state_ids = states;
}
pub fn transition_chars(&self) -> impl Iterator<Item = (&CharacterSet, bool)> {
self.raw_transitions().map(|t| (t.0, t.1))
}
pub fn transitions(&self) -> Vec<NfaTransition> {
Self::group_transitions(self.raw_transitions())
}
fn raw_transitions(&self) -> impl Iterator<Item = (&CharacterSet, bool, i32, u32)> {
self.state_ids.iter().filter_map(move |id| {
if let NfaState::Advance {
chars,
state_id,
precedence,
is_sep,
} = &self.nfa.states[*id as usize]
{
Some((chars, *is_sep, *precedence, *state_id))
} else {
None
}
})
}
fn group_transitions<'b>(
iter: impl Iterator<Item = (&'b CharacterSet, bool, i32, u32)>,
) -> Vec<NfaTransition> {
let mut result = Vec::<NfaTransition>::new();
for (chars, is_sep, prec, state) in iter {
let mut chars = chars.clone();
let mut i = 0;
while i < result.len() && !chars.is_empty() {
let intersection = result[i].characters.remove_intersection(&mut chars);
if !intersection.is_empty() {
let mut intersection_states = result[i].states.clone();
if let Err(j) = intersection_states.binary_search(&state) {
intersection_states.insert(j, state);
}
let intersection_transition = NfaTransition {
characters: intersection,
is_separator: result[i].is_separator && is_sep,
precedence: max(result[i].precedence, prec),
states: intersection_states,
};
if result[i].characters.is_empty() {
result[i] = intersection_transition;
} else {
result.insert(i, intersection_transition);
i += 1;
}
}
i += 1;
}
if !chars.is_empty() {
result.push(NfaTransition {
characters: chars,
precedence: prec,
states: vec![state],
is_separator: is_sep,
});
}
}
let mut i = 0;
while i < result.len() {
for j in 0..i {
if result[j].states == result[i].states
&& result[j].is_separator == result[i].is_separator
&& result[j].precedence == result[i].precedence
{
let characters = mem::take(&mut result[j].characters);
result[j].characters = characters.add(&result[i].characters);
result.remove(i);
i -= 1;
break;
}
}
i += 1;
}
result.sort_unstable_by(|a, b| a.characters.cmp(&b.characters));
result
}
pub fn completions(&self) -> impl Iterator<Item = (usize, i32)> + '_ {
self.state_ids.iter().filter_map(move |state_id| {
if let NfaState::Accept {
variable_index,
precedence,
} = self.nfa.states[*state_id as usize]
{
Some((variable_index, precedence))
} else {
None
}
})
}
pub fn add_states(&mut self, new_state_ids: &mut Vec<u32>) {
let mut i = 0;
while i < new_state_ids.len() {
let state_id = new_state_ids[i];
let state = &self.nfa.states[state_id as usize];
if let NfaState::Split(left, right) = state {
let mut has_left = false;
let mut has_right = false;
for new_state_id in new_state_ids.iter() {
if *new_state_id == *left {
has_left = true;
}
if *new_state_id == *right {
has_right = true;
}
}
if !has_left {
new_state_ids.push(*left);
}
if !has_right {
new_state_ids.push(*right);
}
} else if let Err(i) = self.state_ids.binary_search(&state_id) {
self.state_ids.insert(i, state_id);
}
i += 1;
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_adding_ranges() {
let mut set = CharacterSet::empty()
.add_range('c', 'm')
.add_range('q', 's');
// within existing range
set = set.add_char('d');
assert_eq!(
set,
CharacterSet::empty()
.add_range('c', 'm')
.add_range('q', 's')
);
// at end of existing range
set = set.add_char('m');
assert_eq!(
set,
CharacterSet::empty()
.add_range('c', 'm')
.add_range('q', 's')
);
// adjacent to end of existing range
set = set.add_char('n');
assert_eq!(
set,
CharacterSet::empty()
.add_range('c', 'n')
.add_range('q', 's')
);
// filling gap between existing ranges
set = set.add_range('o', 'p');
assert_eq!(set, CharacterSet::empty().add_range('c', 's'));
set = CharacterSet::empty()
.add_range('c', 'f')
.add_range('i', 'l')
.add_range('n', 'r');
set = set.add_range('d', 'o');
assert_eq!(set, CharacterSet::empty().add_range('c', 'r'));
}
#[test]
fn test_adding_sets() {
let set1 = CharacterSet::empty()
.add_range('c', 'f')
.add_range('i', 'l');
let set2 = CharacterSet::empty().add_range('b', 'g').add_char('h');
assert_eq!(
set1.add(&set2),
CharacterSet::empty()
.add_range('b', 'g')
.add_range('h', 'l')
);
}
#[test]
fn test_group_transitions() {
let table = [
// overlapping character classes
(
vec![
(CharacterSet::empty().add_range('a', 'f'), false, 0, 1),
(CharacterSet::empty().add_range('d', 'i'), false, 1, 2),
],
vec![
NfaTransition {
characters: CharacterSet::empty().add_range('a', 'c'),
is_separator: false,
precedence: 0,
states: vec![1],
},
NfaTransition {
characters: CharacterSet::empty().add_range('d', 'f'),
is_separator: false,
precedence: 1,
states: vec![1, 2],
},
NfaTransition {
characters: CharacterSet::empty().add_range('g', 'i'),
is_separator: false,
precedence: 1,
states: vec![2],
},
],
),
// large character class followed by many individual characters
(
vec![
(CharacterSet::empty().add_range('a', 'z'), false, 0, 1),
(CharacterSet::empty().add_char('d'), false, 0, 2),
(CharacterSet::empty().add_char('i'), false, 0, 3),
(CharacterSet::empty().add_char('f'), false, 0, 4),
],
vec![
NfaTransition {
characters: CharacterSet::empty().add_char('d'),
is_separator: false,
precedence: 0,
states: vec![1, 2],
},
NfaTransition {
characters: CharacterSet::empty().add_char('f'),
is_separator: false,
precedence: 0,
states: vec![1, 4],
},
NfaTransition {
characters: CharacterSet::empty().add_char('i'),
is_separator: false,
precedence: 0,
states: vec![1, 3],
},
NfaTransition {
characters: CharacterSet::empty()
.add_range('a', 'c')
.add_char('e')
.add_range('g', 'h')
.add_range('j', 'z'),
is_separator: false,
precedence: 0,
states: vec![1],
},
],
),
// negated character class followed by an individual character
(
vec![
(CharacterSet::empty().add_char('0'), false, 0, 1),
(CharacterSet::empty().add_char('b'), false, 0, 2),
(
CharacterSet::empty().add_range('a', 'f').negate(),
false,
0,
3,
),
(CharacterSet::empty().add_char('c'), false, 0, 4),
],
vec![
NfaTransition {
characters: CharacterSet::empty().add_char('0'),
precedence: 0,
states: vec![1, 3],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::empty().add_char('b'),
precedence: 0,
states: vec![2],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::empty().add_char('c'),
precedence: 0,
states: vec![4],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::empty()
.add_range('a', 'f')
.add_char('0')
.negate(),
precedence: 0,
states: vec![3],
is_separator: false,
},
],
),
// multiple negated character classes
(
vec![
(CharacterSet::from_char('a'), false, 0, 1),
(CharacterSet::from_range('a', 'c').negate(), false, 0, 2),
(CharacterSet::from_char('g'), false, 0, 6),
(CharacterSet::from_range('d', 'f').negate(), false, 0, 3),
(CharacterSet::from_range('g', 'i').negate(), false, 0, 4),
(CharacterSet::from_char('g'), false, 0, 5),
],
vec![
NfaTransition {
characters: CharacterSet::from_char('a'),
precedence: 0,
states: vec![1, 3, 4],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::from_char('g'),
precedence: 0,
states: vec![2, 3, 5, 6],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::from_range('b', 'c'),
precedence: 0,
states: vec![3, 4],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::from_range('h', 'i'),
precedence: 0,
states: vec![2, 3],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::from_range('d', 'f'),
precedence: 0,
states: vec![2, 4],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::from_range('a', 'i').negate(),
precedence: 0,
states: vec![2, 3, 4],
is_separator: false,
},
],
),
// disjoint characters with same state
(
vec![
(CharacterSet::from_char('a'), false, 0, 1),
(CharacterSet::from_char('b'), false, 0, 2),
(CharacterSet::from_char('c'), false, 0, 1),
(CharacterSet::from_char('d'), false, 0, 1),
(CharacterSet::from_char('e'), false, 0, 2),
],
vec![
NfaTransition {
characters: CharacterSet::empty().add_char('b').add_char('e'),
precedence: 0,
states: vec![2],
is_separator: false,
},
NfaTransition {
characters: CharacterSet::empty().add_char('a').add_range('c', 'd'),
precedence: 0,
states: vec![1],
is_separator: false,
},
],
),
];
for (i, row) in table.iter().enumerate() {
assert_eq!(
NfaCursor::group_transitions(
row.0
.iter()
.map(|(chars, is_sep, prec, state)| (chars, *is_sep, *prec, *state))
),
row.1,
"row {i}",
);
}
}
#[test]
fn test_character_set_intersection_difference_ops() {
struct Row {
left: CharacterSet,
right: CharacterSet,
left_only: CharacterSet,
right_only: CharacterSet,
intersection: CharacterSet,
}
let rows = [
// [ L ]
// [ R ]
Row {
left: CharacterSet::from_range('a', 'f'),
right: CharacterSet::from_range('g', 'm'),
left_only: CharacterSet::from_range('a', 'f'),
right_only: CharacterSet::from_range('g', 'm'),
intersection: CharacterSet::empty(),
},
// [ L ]
// [ R ]
Row {
left: CharacterSet::from_range('a', 'f'),
right: CharacterSet::from_range('c', 'i'),
left_only: CharacterSet::from_range('a', 'b'),
right_only: CharacterSet::from_range('g', 'i'),
intersection: CharacterSet::from_range('c', 'f'),
},
// [ L ]
// [ R ]
Row {
left: CharacterSet::from_range('a', 'f'),
right: CharacterSet::from_range('d', 'f'),
left_only: CharacterSet::from_range('a', 'c'),
right_only: CharacterSet::empty(),
intersection: CharacterSet::from_range('d', 'f'),
},
// [ L ]
// [ R ]
Row {
left: CharacterSet::from_range('a', 'm'),
right: CharacterSet::from_range('d', 'f'),
left_only: CharacterSet::empty()
.add_range('a', 'c')
.add_range('g', 'm'),
right_only: CharacterSet::empty(),
intersection: CharacterSet::from_range('d', 'f'),
},
// [ L ]
// [R]
Row {
left: CharacterSet::from_range(',', '/'),
right: CharacterSet::from_char('/'),
left_only: CharacterSet::from_range(',', '.'),
right_only: CharacterSet::empty(),
intersection: CharacterSet::from_char('/'),
},
// [ L ]
// [R]
Row {
left: CharacterSet::from_range(',', '/'),
right: CharacterSet::from_char('/'),
left_only: CharacterSet::from_range(',', '.'),
right_only: CharacterSet::empty(),
intersection: CharacterSet::from_char('/'),
},
// [ L1 ] [ L2 ]
// [ R ]
Row {
left: CharacterSet::empty()
.add_range('a', 'e')
.add_range('h', 'l'),
right: CharacterSet::from_range('c', 'i'),
left_only: CharacterSet::empty()
.add_range('a', 'b')
.add_range('j', 'l'),
right_only: CharacterSet::from_range('f', 'g'),
intersection: CharacterSet::empty()
.add_range('c', 'e')
.add_range('h', 'i'),
},
];
for (i, row) in rows.iter().enumerate() {
let mut left = row.left.clone();
let mut right = row.right.clone();
assert_eq!(
left.remove_intersection(&mut right),
row.intersection,
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | true |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar.rs | crates/generate/src/prepare_grammar.rs | mod expand_repeats;
mod expand_tokens;
mod extract_default_aliases;
mod extract_tokens;
mod flatten_grammar;
mod intern_symbols;
mod process_inlines;
use std::{
cmp::Ordering,
collections::{hash_map, BTreeSet, HashMap, HashSet},
mem,
};
pub use expand_tokens::ExpandTokensError;
pub use extract_tokens::ExtractTokensError;
pub use flatten_grammar::FlattenGrammarError;
use indexmap::IndexMap;
pub use intern_symbols::InternSymbolsError;
pub use process_inlines::ProcessInlinesError;
use serde::Serialize;
use thiserror::Error;
pub use self::expand_tokens::expand_tokens;
use self::{
expand_repeats::expand_repeats, extract_default_aliases::extract_default_aliases,
extract_tokens::extract_tokens, flatten_grammar::flatten_grammar,
intern_symbols::intern_symbols, process_inlines::process_inlines,
};
use super::{
grammars::{
ExternalToken, InlinedProductionMap, InputGrammar, LexicalGrammar, PrecedenceEntry,
SyntaxGrammar, Variable,
},
rules::{AliasMap, Precedence, Rule, Symbol},
};
use crate::grammars::ReservedWordContext;
pub struct IntermediateGrammar<T, U> {
variables: Vec<Variable>,
extra_symbols: Vec<T>,
expected_conflicts: Vec<Vec<Symbol>>,
precedence_orderings: Vec<Vec<PrecedenceEntry>>,
external_tokens: Vec<U>,
variables_to_inline: Vec<Symbol>,
supertype_symbols: Vec<Symbol>,
word_token: Option<Symbol>,
reserved_word_sets: Vec<ReservedWordContext<T>>,
}
pub type InternedGrammar = IntermediateGrammar<Rule, Variable>;
pub type ExtractedSyntaxGrammar = IntermediateGrammar<Symbol, ExternalToken>;
#[derive(Debug, PartialEq, Eq)]
pub struct ExtractedLexicalGrammar {
pub variables: Vec<Variable>,
pub separators: Vec<Rule>,
}
impl<T, U> Default for IntermediateGrammar<T, U> {
fn default() -> Self {
Self {
variables: Vec::default(),
extra_symbols: Vec::default(),
expected_conflicts: Vec::default(),
precedence_orderings: Vec::default(),
external_tokens: Vec::default(),
variables_to_inline: Vec::default(),
supertype_symbols: Vec::default(),
word_token: Option::default(),
reserved_word_sets: Vec::default(),
}
}
}
pub type PrepareGrammarResult<T> = Result<T, PrepareGrammarError>;
#[derive(Debug, Error, Serialize)]
#[error(transparent)]
pub enum PrepareGrammarError {
ValidatePrecedences(#[from] ValidatePrecedenceError),
ValidateIndirectRecursion(#[from] IndirectRecursionError),
InternSymbols(#[from] InternSymbolsError),
ExtractTokens(#[from] ExtractTokensError),
FlattenGrammar(#[from] FlattenGrammarError),
ExpandTokens(#[from] ExpandTokensError),
ProcessInlines(#[from] ProcessInlinesError),
}
pub type ValidatePrecedenceResult<T> = Result<T, ValidatePrecedenceError>;
#[derive(Debug, Error, Serialize)]
#[error(transparent)]
pub enum ValidatePrecedenceError {
Undeclared(#[from] UndeclaredPrecedenceError),
Ordering(#[from] ConflictingPrecedenceOrderingError),
}
#[derive(Debug, Error, Serialize)]
pub struct IndirectRecursionError(pub Vec<String>);
impl std::fmt::Display for IndirectRecursionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "Grammar contains an indirectly recursive rule: ")?;
for (i, symbol) in self.0.iter().enumerate() {
if i > 0 {
write!(f, " -> ")?;
}
write!(f, "{symbol}")?;
}
Ok(())
}
}
#[derive(Debug, Error, Serialize)]
pub struct UndeclaredPrecedenceError {
pub precedence: String,
pub rule: String,
}
impl std::fmt::Display for UndeclaredPrecedenceError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Undeclared precedence '{}' in rule '{}'",
self.precedence, self.rule
)?;
Ok(())
}
}
#[derive(Debug, Error, Serialize)]
pub struct ConflictingPrecedenceOrderingError {
pub precedence_1: String,
pub precedence_2: String,
}
impl std::fmt::Display for ConflictingPrecedenceOrderingError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Conflicting orderings for precedences {} and {}",
self.precedence_1, self.precedence_2
)?;
Ok(())
}
}
/// Transform an input grammar into separate components that are ready
/// for parse table construction.
pub fn prepare_grammar(
input_grammar: &InputGrammar,
) -> PrepareGrammarResult<(
SyntaxGrammar,
LexicalGrammar,
InlinedProductionMap,
AliasMap,
)> {
validate_precedences(input_grammar)?;
validate_indirect_recursion(input_grammar)?;
let interned_grammar = intern_symbols(input_grammar)?;
let (syntax_grammar, lexical_grammar) = extract_tokens(interned_grammar)?;
let syntax_grammar = expand_repeats(syntax_grammar);
let mut syntax_grammar = flatten_grammar(syntax_grammar)?;
let lexical_grammar = expand_tokens(lexical_grammar)?;
let default_aliases = extract_default_aliases(&mut syntax_grammar, &lexical_grammar);
let inlines = process_inlines(&syntax_grammar, &lexical_grammar)?;
Ok((syntax_grammar, lexical_grammar, inlines, default_aliases))
}
/// Check for indirect recursion cycles in the grammar that can cause infinite loops while
/// parsing. An indirect recursion cycle occurs when a non-terminal can derive itself through
/// a chain of single-symbol productions (e.g., A -> B, B -> A).
fn validate_indirect_recursion(grammar: &InputGrammar) -> Result<(), IndirectRecursionError> {
let mut epsilon_transitions: IndexMap<&str, BTreeSet<String>> = IndexMap::new();
for variable in &grammar.variables {
let productions = get_single_symbol_productions(&variable.rule);
// Filter out rules that *directly* reference themselves, as this doesn't
// cause a parsing loop.
let filtered: BTreeSet<String> = productions
.into_iter()
.filter(|s| s != &variable.name)
.collect();
epsilon_transitions.insert(variable.name.as_str(), filtered);
}
for start_symbol in epsilon_transitions.keys() {
let mut visited = BTreeSet::new();
let mut path = Vec::new();
if let Some((start_idx, end_idx)) =
get_cycle(start_symbol, &epsilon_transitions, &mut visited, &mut path)
{
let cycle_symbols = path[start_idx..=end_idx]
.iter()
.map(|s| (*s).to_string())
.collect();
return Err(IndirectRecursionError(cycle_symbols));
}
}
Ok(())
}
fn get_single_symbol_productions(rule: &Rule) -> BTreeSet<String> {
match rule {
Rule::NamedSymbol(name) => BTreeSet::from([name.clone()]),
Rule::Choice(choices) => choices
.iter()
.flat_map(get_single_symbol_productions)
.collect(),
Rule::Metadata { rule, .. } => get_single_symbol_productions(rule),
_ => BTreeSet::new(),
}
}
/// Perform a depth-first search to detect cycles in single state transitions.
fn get_cycle<'a>(
current: &'a str,
transitions: &'a IndexMap<&'a str, BTreeSet<String>>,
visited: &mut BTreeSet<&'a str>,
path: &mut Vec<&'a str>,
) -> Option<(usize, usize)> {
if let Some(first_idx) = path.iter().position(|s| *s == current) {
path.push(current);
return Some((first_idx, path.len() - 1));
}
if visited.contains(current) {
return None;
}
path.push(current);
visited.insert(current);
if let Some(next_symbols) = transitions.get(current) {
for next in next_symbols {
if let Some(cycle) = get_cycle(next, transitions, visited, path) {
return Some(cycle);
}
}
}
path.pop();
None
}
/// Check that all of the named precedences used in the grammar are declared
/// within the `precedences` lists, and also that there are no conflicting
/// precedence orderings declared in those lists.
fn validate_precedences(grammar: &InputGrammar) -> ValidatePrecedenceResult<()> {
// Check that no rule contains a named precedence that is not present in
// any of the `precedences` lists.
fn validate(
rule_name: &str,
rule: &Rule,
names: &HashSet<&String>,
) -> ValidatePrecedenceResult<()> {
match rule {
Rule::Repeat(rule) => validate(rule_name, rule, names),
Rule::Seq(elements) | Rule::Choice(elements) => elements
.iter()
.try_for_each(|e| validate(rule_name, e, names)),
Rule::Metadata { rule, params } => {
if let Precedence::Name(n) = ¶ms.precedence {
if !names.contains(n) {
Err(UndeclaredPrecedenceError {
precedence: n.clone(),
rule: rule_name.to_string(),
})?;
}
}
validate(rule_name, rule, names)?;
Ok(())
}
_ => Ok(()),
}
}
// For any two precedence names `a` and `b`, if `a` comes before `b`
// in some list, then it cannot come *after* `b` in any list.
let mut pairs = HashMap::new();
for list in &grammar.precedence_orderings {
for (i, mut entry1) in list.iter().enumerate() {
for mut entry2 in list.iter().skip(i + 1) {
if entry2 == entry1 {
continue;
}
let mut ordering = Ordering::Greater;
if entry1 > entry2 {
ordering = Ordering::Less;
mem::swap(&mut entry1, &mut entry2);
}
match pairs.entry((entry1, entry2)) {
hash_map::Entry::Vacant(e) => {
e.insert(ordering);
}
hash_map::Entry::Occupied(e) => {
if e.get() != &ordering {
Err(ConflictingPrecedenceOrderingError {
precedence_1: entry1.to_string(),
precedence_2: entry2.to_string(),
})?;
}
}
}
}
}
}
let precedence_names = grammar
.precedence_orderings
.iter()
.flat_map(|l| l.iter())
.filter_map(|p| {
if let PrecedenceEntry::Name(n) = p {
Some(n)
} else {
None
}
})
.collect::<HashSet<&String>>();
for variable in &grammar.variables {
validate(&variable.name, &variable.rule, &precedence_names)?;
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
use crate::grammars::VariableType;
#[test]
fn test_validate_precedences_with_undeclared_precedence() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("d".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("omg".to_string()), Rule::string("y")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
],
..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Undeclared precedence 'omg' in rule 'v2'",
);
}
#[test]
fn test_validate_precedences_with_conflicting_order() {
let grammar = InputGrammar {
precedence_orderings: vec![
vec![
PrecedenceEntry::Name("a".to_string()),
PrecedenceEntry::Name("b".to_string()),
],
vec![
PrecedenceEntry::Name("b".to_string()),
PrecedenceEntry::Name("c".to_string()),
PrecedenceEntry::Name("a".to_string()),
],
],
variables: vec![
Variable {
name: "v1".to_string(),
kind: VariableType::Named,
rule: Rule::Seq(vec![
Rule::prec_left(Precedence::Name("b".to_string()), Rule::string("w")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("x")),
]),
},
Variable {
name: "v2".to_string(),
kind: VariableType::Named,
rule: Rule::repeat(Rule::Choice(vec![
Rule::prec_left(Precedence::Name("a".to_string()), Rule::string("y")),
Rule::prec(Precedence::Name("c".to_string()), Rule::string("z")),
])),
},
],
..Default::default()
};
let result = validate_precedences(&grammar);
assert_eq!(
result.unwrap_err().to_string(),
"Conflicting orderings for precedences 'a' and 'b'",
);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/rules.rs | crates/generate/src/rules.rs | use std::{collections::BTreeMap, fmt};
use serde::Serialize;
use smallbitvec::SmallBitVec;
use super::grammars::VariableType;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub enum SymbolType {
External,
End,
EndOfNonTerminalExtra,
Terminal,
NonTerminal,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub enum Associativity {
Left,
Right,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub struct Alias {
pub value: String,
pub is_named: bool,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Default, Serialize)]
pub enum Precedence {
#[default]
None,
Integer(i32),
Name(String),
}
pub type AliasMap = BTreeMap<Symbol, Alias>;
#[derive(Clone, Debug, Default, PartialEq, Eq, Hash, Serialize)]
pub struct MetadataParams {
pub precedence: Precedence,
pub dynamic_precedence: i32,
pub associativity: Option<Associativity>,
pub is_token: bool,
pub is_main_token: bool,
pub alias: Option<Alias>,
pub field_name: Option<String>,
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize)]
pub struct Symbol {
pub kind: SymbolType,
pub index: usize,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize)]
pub enum Rule {
Blank,
String(String),
Pattern(String, String),
NamedSymbol(String),
Symbol(Symbol),
Choice(Vec<Self>),
Metadata {
params: MetadataParams,
rule: Box<Self>,
},
Repeat(Box<Self>),
Seq(Vec<Self>),
Reserved {
rule: Box<Self>,
context_name: String,
},
}
// Because tokens are represented as small (~400 max) unsigned integers,
// sets of tokens can be efficiently represented as bit vectors with each
// index corresponding to a token, and each value representing whether or not
// the token is present in the set.
#[derive(Default, Clone, PartialEq, Eq, Hash)]
pub struct TokenSet {
terminal_bits: SmallBitVec,
external_bits: SmallBitVec,
eof: bool,
end_of_nonterminal_extra: bool,
}
impl fmt::Debug for TokenSet {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_list().entries(self.iter()).finish()
}
}
impl PartialOrd for TokenSet {
fn partial_cmp(&self, other: &Self) -> Option<std::cmp::Ordering> {
Some(self.cmp(other))
}
}
impl Ord for TokenSet {
fn cmp(&self, other: &Self) -> std::cmp::Ordering {
self.terminal_bits
.iter()
.cmp(other.terminal_bits.iter())
.then_with(|| self.external_bits.iter().cmp(other.external_bits.iter()))
.then_with(|| self.eof.cmp(&other.eof))
.then_with(|| {
self.end_of_nonterminal_extra
.cmp(&other.end_of_nonterminal_extra)
})
}
}
impl Rule {
pub fn field(name: String, content: Self) -> Self {
add_metadata(content, move |params| {
params.field_name = Some(name);
})
}
pub fn alias(content: Self, value: String, is_named: bool) -> Self {
add_metadata(content, move |params| {
params.alias = Some(Alias { value, is_named });
})
}
pub fn token(content: Self) -> Self {
add_metadata(content, |params| {
params.is_token = true;
})
}
pub fn immediate_token(content: Self) -> Self {
add_metadata(content, |params| {
params.is_token = true;
params.is_main_token = true;
})
}
pub fn prec(value: Precedence, content: Self) -> Self {
add_metadata(content, |params| {
params.precedence = value;
})
}
pub fn prec_left(value: Precedence, content: Self) -> Self {
add_metadata(content, |params| {
params.associativity = Some(Associativity::Left);
params.precedence = value;
})
}
pub fn prec_right(value: Precedence, content: Self) -> Self {
add_metadata(content, |params| {
params.associativity = Some(Associativity::Right);
params.precedence = value;
})
}
pub fn prec_dynamic(value: i32, content: Self) -> Self {
add_metadata(content, |params| {
params.dynamic_precedence = value;
})
}
pub fn repeat(rule: Self) -> Self {
Self::Repeat(Box::new(rule))
}
pub fn choice(rules: Vec<Self>) -> Self {
let mut elements = Vec::with_capacity(rules.len());
for rule in rules {
choice_helper(&mut elements, rule);
}
Self::Choice(elements)
}
pub const fn seq(rules: Vec<Self>) -> Self {
Self::Seq(rules)
}
pub fn is_empty(&self) -> bool {
match self {
Self::Blank | Self::Pattern(..) | Self::NamedSymbol(_) | Self::Symbol(_) => false,
Self::String(string) => string.is_empty(),
Self::Metadata { rule, .. } | Self::Repeat(rule) | Self::Reserved { rule, .. } => {
rule.is_empty()
}
Self::Choice(rules) => rules.iter().any(Self::is_empty),
Self::Seq(rules) => rules.iter().all(Self::is_empty),
}
}
}
impl Alias {
#[must_use]
pub const fn kind(&self) -> VariableType {
if self.is_named {
VariableType::Named
} else {
VariableType::Anonymous
}
}
}
impl Precedence {
#[must_use]
pub const fn is_none(&self) -> bool {
matches!(self, Self::None)
}
}
#[cfg(test)]
impl Rule {
#[must_use]
pub const fn terminal(index: usize) -> Self {
Self::Symbol(Symbol::terminal(index))
}
#[must_use]
pub const fn non_terminal(index: usize) -> Self {
Self::Symbol(Symbol::non_terminal(index))
}
#[must_use]
pub const fn external(index: usize) -> Self {
Self::Symbol(Symbol::external(index))
}
#[must_use]
pub fn named(name: &'static str) -> Self {
Self::NamedSymbol(name.to_string())
}
#[must_use]
pub fn string(value: &'static str) -> Self {
Self::String(value.to_string())
}
#[must_use]
pub fn pattern(value: &'static str, flags: &'static str) -> Self {
Self::Pattern(value.to_string(), flags.to_string())
}
}
impl Symbol {
#[must_use]
pub fn is_terminal(&self) -> bool {
self.kind == SymbolType::Terminal
}
#[must_use]
pub fn is_non_terminal(&self) -> bool {
self.kind == SymbolType::NonTerminal
}
#[must_use]
pub fn is_external(&self) -> bool {
self.kind == SymbolType::External
}
#[must_use]
pub fn is_eof(&self) -> bool {
self.kind == SymbolType::End
}
#[must_use]
pub const fn non_terminal(index: usize) -> Self {
Self {
kind: SymbolType::NonTerminal,
index,
}
}
#[must_use]
pub const fn terminal(index: usize) -> Self {
Self {
kind: SymbolType::Terminal,
index,
}
}
#[must_use]
pub const fn external(index: usize) -> Self {
Self {
kind: SymbolType::External,
index,
}
}
#[must_use]
pub const fn end() -> Self {
Self {
kind: SymbolType::End,
index: 0,
}
}
#[must_use]
pub const fn end_of_nonterminal_extra() -> Self {
Self {
kind: SymbolType::EndOfNonTerminalExtra,
index: 0,
}
}
}
impl From<Symbol> for Rule {
fn from(symbol: Symbol) -> Self {
Self::Symbol(symbol)
}
}
impl TokenSet {
#[must_use]
pub const fn new() -> Self {
Self {
terminal_bits: SmallBitVec::new(),
external_bits: SmallBitVec::new(),
eof: false,
end_of_nonterminal_extra: false,
}
}
pub fn iter(&self) -> impl Iterator<Item = Symbol> + '_ {
self.terminal_bits
.iter()
.enumerate()
.filter_map(|(i, value)| {
if value {
Some(Symbol::terminal(i))
} else {
None
}
})
.chain(
self.external_bits
.iter()
.enumerate()
.filter_map(|(i, value)| {
if value {
Some(Symbol::external(i))
} else {
None
}
}),
)
.chain(if self.eof { Some(Symbol::end()) } else { None })
.chain(if self.end_of_nonterminal_extra {
Some(Symbol::end_of_nonterminal_extra())
} else {
None
})
}
pub fn terminals(&self) -> impl Iterator<Item = Symbol> + '_ {
self.terminal_bits
.iter()
.enumerate()
.filter_map(|(i, value)| {
if value {
Some(Symbol::terminal(i))
} else {
None
}
})
}
pub fn contains(&self, symbol: &Symbol) -> bool {
match symbol.kind {
SymbolType::NonTerminal => panic!("Cannot store non-terminals in a TokenSet"),
SymbolType::Terminal => self.terminal_bits.get(symbol.index).unwrap_or(false),
SymbolType::External => self.external_bits.get(symbol.index).unwrap_or(false),
SymbolType::End => self.eof,
SymbolType::EndOfNonTerminalExtra => self.end_of_nonterminal_extra,
}
}
pub fn contains_terminal(&self, index: usize) -> bool {
self.terminal_bits.get(index).unwrap_or(false)
}
pub fn insert(&mut self, other: Symbol) {
let vec = match other.kind {
SymbolType::NonTerminal => panic!("Cannot store non-terminals in a TokenSet"),
SymbolType::Terminal => &mut self.terminal_bits,
SymbolType::External => &mut self.external_bits,
SymbolType::End => {
self.eof = true;
return;
}
SymbolType::EndOfNonTerminalExtra => {
self.end_of_nonterminal_extra = true;
return;
}
};
if other.index >= vec.len() {
vec.resize(other.index + 1, false);
}
vec.set(other.index, true);
}
pub fn remove(&mut self, other: &Symbol) -> bool {
let vec = match other.kind {
SymbolType::NonTerminal => panic!("Cannot store non-terminals in a TokenSet"),
SymbolType::Terminal => &mut self.terminal_bits,
SymbolType::External => &mut self.external_bits,
SymbolType::End => {
return if self.eof {
self.eof = false;
true
} else {
false
}
}
SymbolType::EndOfNonTerminalExtra => {
return if self.end_of_nonterminal_extra {
self.end_of_nonterminal_extra = false;
true
} else {
false
};
}
};
if other.index < vec.len() && vec[other.index] {
vec.set(other.index, false);
while vec.last() == Some(false) {
vec.pop();
}
return true;
}
false
}
pub fn is_empty(&self) -> bool {
!self.eof
&& !self.end_of_nonterminal_extra
&& !self.terminal_bits.iter().any(|a| a)
&& !self.external_bits.iter().any(|a| a)
}
pub fn len(&self) -> usize {
self.eof as usize
+ self.end_of_nonterminal_extra as usize
+ self.terminal_bits.iter().filter(|b| *b).count()
+ self.external_bits.iter().filter(|b| *b).count()
}
pub fn insert_all_terminals(&mut self, other: &Self) -> bool {
let mut result = false;
if other.terminal_bits.len() > self.terminal_bits.len() {
self.terminal_bits.resize(other.terminal_bits.len(), false);
}
for (i, element) in other.terminal_bits.iter().enumerate() {
if element {
result |= !self.terminal_bits[i];
self.terminal_bits.set(i, element);
}
}
result
}
fn insert_all_externals(&mut self, other: &Self) -> bool {
let mut result = false;
if other.external_bits.len() > self.external_bits.len() {
self.external_bits.resize(other.external_bits.len(), false);
}
for (i, element) in other.external_bits.iter().enumerate() {
if element {
result |= !self.external_bits[i];
self.external_bits.set(i, element);
}
}
result
}
pub fn insert_all(&mut self, other: &Self) -> bool {
let mut result = false;
if other.eof {
result |= !self.eof;
self.eof = true;
}
if other.end_of_nonterminal_extra {
result |= !self.end_of_nonterminal_extra;
self.end_of_nonterminal_extra = true;
}
result |= self.insert_all_terminals(other);
result |= self.insert_all_externals(other);
result
}
}
impl FromIterator<Symbol> for TokenSet {
fn from_iter<T: IntoIterator<Item = Symbol>>(iter: T) -> Self {
let mut result = Self::new();
for symbol in iter {
result.insert(symbol);
}
result
}
}
fn add_metadata<T: FnOnce(&mut MetadataParams)>(input: Rule, f: T) -> Rule {
match input {
Rule::Metadata { rule, mut params } if !params.is_token => {
f(&mut params);
Rule::Metadata { rule, params }
}
_ => {
let mut params = MetadataParams::default();
f(&mut params);
Rule::Metadata {
rule: Box::new(input),
params,
}
}
}
}
fn choice_helper(result: &mut Vec<Rule>, rule: Rule) {
match rule {
Rule::Choice(elements) => {
for element in elements {
choice_helper(result, element);
}
}
_ => {
if !result.contains(&rule) {
result.push(rule);
}
}
}
}
impl fmt::Display for Precedence {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Integer(i) => write!(f, "{i}"),
Self::Name(s) => write!(f, "'{s}'"),
Self::None => write!(f, "none"),
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/tables.rs | crates/generate/src/tables.rs | use std::collections::BTreeMap;
use super::{
nfa::CharacterSet,
rules::{Alias, Symbol, TokenSet},
};
pub type ProductionInfoId = usize;
pub type ParseStateId = usize;
pub type LexStateId = usize;
use std::hash::BuildHasherDefault;
use indexmap::IndexMap;
use rustc_hash::FxHasher;
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum ParseAction {
Accept,
Shift {
state: ParseStateId,
is_repetition: bool,
},
ShiftExtra,
Recover,
Reduce {
symbol: Symbol,
child_count: usize,
dynamic_precedence: i32,
production_id: ProductionInfoId,
},
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum GotoAction {
Goto(ParseStateId),
ShiftExtra,
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub struct ParseTableEntry {
pub actions: Vec<ParseAction>,
pub reusable: bool,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
pub struct ParseState {
pub id: ParseStateId,
pub terminal_entries: IndexMap<Symbol, ParseTableEntry, BuildHasherDefault<FxHasher>>,
pub nonterminal_entries: IndexMap<Symbol, GotoAction, BuildHasherDefault<FxHasher>>,
pub reserved_words: TokenSet,
pub lex_state_id: usize,
pub external_lex_state_id: usize,
pub core_id: usize,
}
#[derive(Clone, Copy, Debug, Default, PartialEq, Eq)]
pub struct FieldLocation {
pub index: usize,
pub inherited: bool,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ProductionInfo {
pub alias_sequence: Vec<Option<Alias>>,
pub field_map: BTreeMap<String, Vec<FieldLocation>>,
}
#[derive(Debug, Default, PartialEq, Eq)]
pub struct ParseTable {
pub states: Vec<ParseState>,
pub symbols: Vec<Symbol>,
pub production_infos: Vec<ProductionInfo>,
pub max_aliased_production_length: usize,
pub external_lex_states: Vec<TokenSet>,
}
#[derive(Clone, Debug, PartialEq, Eq, PartialOrd, Ord)]
pub struct AdvanceAction {
pub state: LexStateId,
pub in_main_token: bool,
}
#[derive(Clone, Debug, Default, PartialEq, Eq, PartialOrd, Ord)]
pub struct LexState {
pub accept_action: Option<Symbol>,
pub eof_action: Option<AdvanceAction>,
pub advance_actions: Vec<(CharacterSet, AdvanceAction)>,
}
#[derive(Debug, PartialEq, Eq, Default)]
pub struct LexTable {
pub states: Vec<LexState>,
}
impl ParseTableEntry {
#[must_use]
pub const fn new() -> Self {
Self {
reusable: true,
actions: Vec::new(),
}
}
}
impl ParseState {
pub fn is_end_of_non_terminal_extra(&self) -> bool {
self.terminal_entries
.contains_key(&Symbol::end_of_nonterminal_extra())
}
pub fn referenced_states(&self) -> impl Iterator<Item = ParseStateId> + '_ {
self.terminal_entries
.iter()
.flat_map(|(_, entry)| {
entry.actions.iter().filter_map(|action| match action {
ParseAction::Shift { state, .. } => Some(*state),
_ => None,
})
})
.chain(self.nonterminal_entries.iter().filter_map(|(_, action)| {
if let GotoAction::Goto(state) = action {
Some(*state)
} else {
None
}
}))
}
pub fn update_referenced_states<F>(&mut self, mut f: F)
where
F: FnMut(usize, &Self) -> usize,
{
let mut updates = Vec::new();
for (symbol, entry) in &self.terminal_entries {
for (i, action) in entry.actions.iter().enumerate() {
if let ParseAction::Shift { state, .. } = action {
let result = f(*state, self);
if result != *state {
updates.push((*symbol, i, result));
}
}
}
}
for (symbol, action) in &self.nonterminal_entries {
if let GotoAction::Goto(other_state) = action {
let result = f(*other_state, self);
if result != *other_state {
updates.push((*symbol, 0, result));
}
}
}
for (symbol, action_index, new_state) in updates {
if symbol.is_non_terminal() {
self.nonterminal_entries
.insert(symbol, GotoAction::Goto(new_state));
} else {
let entry = self.terminal_entries.get_mut(&symbol).unwrap();
if let ParseAction::Shift { is_repetition, .. } = entry.actions[action_index] {
entry.actions[action_index] = ParseAction::Shift {
state: new_state,
is_repetition,
};
}
}
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/generate.rs | crates/generate/src/generate.rs | use std::{collections::BTreeMap, sync::LazyLock};
#[cfg(feature = "load")]
use std::{
env, fs,
io::Write,
path::{Path, PathBuf},
process::{Command, Stdio},
};
use bitflags::bitflags;
use log::warn;
use node_types::VariableInfo;
use regex::{Regex, RegexBuilder};
use rules::{Alias, Symbol};
#[cfg(feature = "load")]
use semver::Version;
#[cfg(feature = "load")]
use serde::Deserialize;
use serde::Serialize;
use thiserror::Error;
mod build_tables;
mod dedup;
mod grammars;
mod nfa;
mod node_types;
pub mod parse_grammar;
mod prepare_grammar;
#[cfg(feature = "qjs-rt")]
mod quickjs;
mod render;
mod rules;
mod tables;
use build_tables::build_tables;
pub use build_tables::ParseTableBuilderError;
use grammars::{InlinedProductionMap, InputGrammar, LexicalGrammar, SyntaxGrammar};
pub use node_types::{SuperTypeCycleError, VariableInfoError};
use parse_grammar::parse_grammar;
pub use parse_grammar::ParseGrammarError;
use prepare_grammar::prepare_grammar;
pub use prepare_grammar::PrepareGrammarError;
use render::render_c_code;
pub use render::{ABI_VERSION_MAX, ABI_VERSION_MIN};
static JSON_COMMENT_REGEX: LazyLock<Regex> = LazyLock::new(|| {
RegexBuilder::new("^\\s*//.*")
.multi_line(true)
.build()
.unwrap()
});
struct JSONOutput {
#[cfg(feature = "load")]
node_types_json: String,
syntax_grammar: SyntaxGrammar,
lexical_grammar: LexicalGrammar,
inlines: InlinedProductionMap,
simple_aliases: BTreeMap<Symbol, Alias>,
variable_info: Vec<VariableInfo>,
}
struct GeneratedParser {
c_code: String,
#[cfg(feature = "load")]
node_types_json: String,
}
// NOTE: This constant must be kept in sync with the definition of
// `TREE_SITTER_LANGUAGE_VERSION` in `lib/include/tree_sitter/api.h`.
const LANGUAGE_VERSION: usize = 15;
pub const ALLOC_HEADER: &str = include_str!("templates/alloc.h");
pub const ARRAY_HEADER: &str = include_str!("templates/array.h");
pub const PARSER_HEADER: &str = include_str!("parser.h.inc");
pub type GenerateResult<T> = Result<T, GenerateError>;
#[derive(Debug, Error, Serialize)]
pub enum GenerateError {
#[error("Error with specified path -- {0}")]
GrammarPath(String),
#[error(transparent)]
IO(IoError),
#[cfg(feature = "load")]
#[error(transparent)]
LoadGrammarFile(#[from] LoadGrammarError),
#[error(transparent)]
ParseGrammar(#[from] ParseGrammarError),
#[error(transparent)]
Prepare(#[from] PrepareGrammarError),
#[error(transparent)]
VariableInfo(#[from] VariableInfoError),
#[error(transparent)]
BuildTables(#[from] ParseTableBuilderError),
#[cfg(feature = "load")]
#[error(transparent)]
ParseVersion(#[from] ParseVersionError),
#[error(transparent)]
SuperTypeCycle(#[from] SuperTypeCycleError),
}
#[derive(Debug, Error, Serialize)]
pub struct IoError {
pub error: String,
pub path: Option<String>,
}
impl IoError {
fn new(error: &std::io::Error, path: Option<&Path>) -> Self {
Self {
error: error.to_string(),
path: path.map(|p| p.to_string_lossy().to_string()),
}
}
}
impl std::fmt::Display for IoError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)?;
if let Some(ref path) = self.path {
write!(f, " ({path})")?;
}
Ok(())
}
}
#[cfg(feature = "load")]
pub type LoadGrammarFileResult<T> = Result<T, LoadGrammarError>;
#[cfg(feature = "load")]
#[derive(Debug, Error, Serialize)]
pub enum LoadGrammarError {
#[error("Path to a grammar file with `.js` or `.json` extension is required")]
InvalidPath,
#[error("Failed to load grammar.js -- {0}")]
LoadJSGrammarFile(#[from] JSError),
#[error("Failed to load grammar.json -- {0}")]
IO(IoError),
#[error("Unknown grammar file extension: {0:?}")]
FileExtension(PathBuf),
}
#[cfg(feature = "load")]
#[derive(Debug, Error, Serialize)]
pub enum ParseVersionError {
#[error("{0}")]
Version(String),
#[error("{0}")]
JSON(String),
#[error(transparent)]
IO(IoError),
}
#[cfg(feature = "load")]
pub type JSResult<T> = Result<T, JSError>;
#[cfg(feature = "load")]
#[derive(Debug, Error, Serialize)]
pub enum JSError {
#[error("Failed to run `{runtime}` -- {error}")]
JSRuntimeSpawn { runtime: String, error: String },
#[error("Got invalid UTF8 from `{runtime}` -- {error}")]
JSRuntimeUtf8 { runtime: String, error: String },
#[error("`{runtime}` process exited with status {code}")]
JSRuntimeExit { runtime: String, code: i32 },
#[error("Failed to open stdin for `{runtime}`")]
JSRuntimeStdin { runtime: String },
#[error("Failed to write {item} to `{runtime}`'s stdin -- {error}")]
JSRuntimeWrite {
runtime: String,
item: String,
error: String,
},
#[error("Failed to read output from `{runtime}` -- {error}")]
JSRuntimeRead { runtime: String, error: String },
#[error(transparent)]
IO(IoError),
#[cfg(feature = "qjs-rt")]
#[error("Failed to get relative path")]
RelativePath,
#[error("Could not parse this package's version as semver -- {0}")]
Semver(String),
#[error("Failed to serialze grammar JSON -- {0}")]
Serialzation(String),
#[cfg(feature = "qjs-rt")]
#[error("QuickJS error: {0}")]
QuickJS(String),
}
#[cfg(feature = "load")]
impl From<serde_json::Error> for JSError {
fn from(value: serde_json::Error) -> Self {
Self::Serialzation(value.to_string())
}
}
#[cfg(feature = "load")]
impl From<semver::Error> for JSError {
fn from(value: semver::Error) -> Self {
Self::Semver(value.to_string())
}
}
#[cfg(feature = "qjs-rt")]
impl From<rquickjs::Error> for JSError {
fn from(value: rquickjs::Error) -> Self {
Self::QuickJS(value.to_string())
}
}
bitflags! {
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct OptLevel: u32 {
const MergeStates = 1 << 0;
}
}
impl Default for OptLevel {
fn default() -> Self {
Self::MergeStates
}
}
#[cfg(feature = "load")]
#[allow(clippy::too_many_arguments)]
pub fn generate_parser_in_directory<T, U, V>(
repo_path: T,
out_path: Option<U>,
grammar_path: Option<V>,
mut abi_version: usize,
report_symbol_name: Option<&str>,
js_runtime: Option<&str>,
generate_parser: bool,
optimizations: OptLevel,
) -> GenerateResult<()>
where
T: Into<PathBuf>,
U: Into<PathBuf>,
V: Into<PathBuf>,
{
let mut repo_path: PathBuf = repo_path.into();
// Populate a new empty grammar directory.
let grammar_path = if let Some(path) = grammar_path {
let path_buf: PathBuf = path.into();
if !path_buf
.try_exists()
.map_err(|e| GenerateError::GrammarPath(e.to_string()))?
{
fs::create_dir_all(&path_buf)
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(path_buf.as_path()))))?;
repo_path = path_buf;
repo_path.join("grammar.js")
} else {
path_buf
}
} else {
repo_path.join("grammar.js")
};
// Read the grammar file.
let grammar_json = load_grammar_file(&grammar_path, js_runtime)?;
let src_path = out_path.map_or_else(|| repo_path.join("src"), |p| p.into());
let header_path = src_path.join("tree_sitter");
// Ensure that the output directory exists
fs::create_dir_all(&src_path)
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
if grammar_path.file_name().unwrap() != "grammar.json" {
fs::write(src_path.join("grammar.json"), &grammar_json)
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(src_path.as_path()))))?;
}
// If our job is only to generate `grammar.json` and not `parser.c`, stop here.
let input_grammar = parse_grammar(&grammar_json)?;
if !generate_parser {
let node_types_json = generate_node_types_from_grammar(&input_grammar)?.node_types_json;
write_file(&src_path.join("node-types.json"), node_types_json)?;
return Ok(());
}
let semantic_version = read_grammar_version(&repo_path)?;
if semantic_version.is_none() && abi_version > ABI_VERSION_MIN {
warn!(
concat!(
"No `tree-sitter.json` file found in your grammar, ",
"this file is required to generate with ABI {}. ",
"Using ABI version {} instead.\n",
"This file can be set up with `tree-sitter init`. ",
"For more information, see https://tree-sitter.github.io/tree-sitter/cli/init."
),
abi_version, ABI_VERSION_MIN
);
abi_version = ABI_VERSION_MIN;
}
// Generate the parser and related files.
let GeneratedParser {
c_code,
node_types_json,
} = generate_parser_for_grammar_with_opts(
&input_grammar,
abi_version,
semantic_version.map(|v| (v.major as u8, v.minor as u8, v.patch as u8)),
report_symbol_name,
optimizations,
)?;
write_file(&src_path.join("parser.c"), c_code)?;
write_file(&src_path.join("node-types.json"), node_types_json)?;
fs::create_dir_all(&header_path)
.map_err(|e| GenerateError::IO(IoError::new(&e, Some(header_path.as_path()))))?;
write_file(&header_path.join("alloc.h"), ALLOC_HEADER)?;
write_file(&header_path.join("array.h"), ARRAY_HEADER)?;
write_file(&header_path.join("parser.h"), PARSER_HEADER)?;
Ok(())
}
pub fn generate_parser_for_grammar(
grammar_json: &str,
semantic_version: Option<(u8, u8, u8)>,
) -> GenerateResult<(String, String)> {
let grammar_json = JSON_COMMENT_REGEX.replace_all(grammar_json, "\n");
let input_grammar = parse_grammar(&grammar_json)?;
let parser = generate_parser_for_grammar_with_opts(
&input_grammar,
LANGUAGE_VERSION,
semantic_version,
None,
OptLevel::empty(),
)?;
Ok((input_grammar.name, parser.c_code))
}
fn generate_node_types_from_grammar(input_grammar: &InputGrammar) -> GenerateResult<JSONOutput> {
let (syntax_grammar, lexical_grammar, inlines, simple_aliases) =
prepare_grammar(input_grammar)?;
let variable_info =
node_types::get_variable_info(&syntax_grammar, &lexical_grammar, &simple_aliases)?;
#[cfg(feature = "load")]
let node_types_json = node_types::generate_node_types_json(
&syntax_grammar,
&lexical_grammar,
&simple_aliases,
&variable_info,
)?;
Ok(JSONOutput {
#[cfg(feature = "load")]
node_types_json: serde_json::to_string_pretty(&node_types_json).unwrap(),
syntax_grammar,
lexical_grammar,
inlines,
simple_aliases,
variable_info,
})
}
fn generate_parser_for_grammar_with_opts(
input_grammar: &InputGrammar,
abi_version: usize,
semantic_version: Option<(u8, u8, u8)>,
report_symbol_name: Option<&str>,
optimizations: OptLevel,
) -> GenerateResult<GeneratedParser> {
let JSONOutput {
syntax_grammar,
lexical_grammar,
inlines,
simple_aliases,
variable_info,
#[cfg(feature = "load")]
node_types_json,
} = generate_node_types_from_grammar(input_grammar)?;
let supertype_symbol_map =
node_types::get_supertype_symbol_map(&syntax_grammar, &simple_aliases, &variable_info);
let tables = build_tables(
&syntax_grammar,
&lexical_grammar,
&simple_aliases,
&variable_info,
&inlines,
report_symbol_name,
optimizations,
)?;
let c_code = render_c_code(
&input_grammar.name,
tables,
syntax_grammar,
lexical_grammar,
simple_aliases,
abi_version,
semantic_version,
supertype_symbol_map,
);
Ok(GeneratedParser {
c_code,
#[cfg(feature = "load")]
node_types_json,
})
}
/// This will read the `tree-sitter.json` config file and attempt to extract the version.
///
/// If the file is not found in the current directory or any of its parent directories, this will
/// return `None` to maintain backwards compatibility. If the file is found but the version cannot
/// be parsed as semver, this will return an error.
#[cfg(feature = "load")]
fn read_grammar_version(repo_path: &Path) -> Result<Option<Version>, ParseVersionError> {
#[derive(Deserialize)]
struct TreeSitterJson {
metadata: Metadata,
}
#[derive(Deserialize)]
struct Metadata {
version: String,
}
let filename = "tree-sitter.json";
let mut path = repo_path.join(filename);
loop {
let json = path
.exists()
.then(|| {
let contents = fs::read_to_string(path.as_path())
.map_err(|e| ParseVersionError::IO(IoError::new(&e, Some(path.as_path()))))?;
serde_json::from_str::<TreeSitterJson>(&contents).map_err(|e| {
ParseVersionError::JSON(format!("Failed to parse `{}` -- {e}", path.display()))
})
})
.transpose()?;
if let Some(json) = json {
return Version::parse(&json.metadata.version)
.map_err(|e| {
ParseVersionError::Version(format!(
"Failed to parse `{}` version as semver -- {e}",
path.display()
))
})
.map(Some);
}
path.pop(); // filename
if !path.pop() {
return Ok(None);
}
path.push(filename);
}
}
#[cfg(feature = "load")]
pub fn load_grammar_file(
grammar_path: &Path,
js_runtime: Option<&str>,
) -> LoadGrammarFileResult<String> {
if grammar_path.is_dir() {
Err(LoadGrammarError::InvalidPath)?;
}
match grammar_path.extension().and_then(|e| e.to_str()) {
Some("js") => Ok(load_js_grammar_file(grammar_path, js_runtime)?),
Some("json") => Ok(fs::read_to_string(grammar_path)
.map_err(|e| LoadGrammarError::IO(IoError::new(&e, Some(grammar_path))))?),
_ => Err(LoadGrammarError::FileExtension(grammar_path.to_owned()))?,
}
}
#[cfg(feature = "load")]
fn load_js_grammar_file(grammar_path: &Path, js_runtime: Option<&str>) -> JSResult<String> {
let grammar_path = dunce::canonicalize(grammar_path)
.map_err(|e| JSError::IO(IoError::new(&e, Some(grammar_path))))?;
#[cfg(feature = "qjs-rt")]
if js_runtime == Some("native") {
return quickjs::execute_native_runtime(&grammar_path);
}
// The "file:///" prefix is incompatible with the quickjs runtime, but is required
// for node and bun
#[cfg(windows)]
let grammar_path = PathBuf::from(format!("file:///{}", grammar_path.display()));
let js_runtime = js_runtime.unwrap_or("node");
let mut js_command = Command::new(js_runtime);
match js_runtime {
"node" => {
js_command.args(["--input-type=module", "-"]);
}
"bun" => {
js_command.arg("-");
}
"deno" => {
js_command.args(["run", "--allow-all", "-"]);
}
_ => {}
}
let mut js_process = js_command
.env("TREE_SITTER_GRAMMAR_PATH", grammar_path)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn()
.map_err(|e| JSError::JSRuntimeSpawn {
runtime: js_runtime.to_string(),
error: e.to_string(),
})?;
let mut js_stdin = js_process
.stdin
.take()
.ok_or_else(|| JSError::JSRuntimeStdin {
runtime: js_runtime.to_string(),
})?;
let cli_version = Version::parse(env!("CARGO_PKG_VERSION"))?;
write!(
js_stdin,
"globalThis.TREE_SITTER_CLI_VERSION_MAJOR = {};
globalThis.TREE_SITTER_CLI_VERSION_MINOR = {};
globalThis.TREE_SITTER_CLI_VERSION_PATCH = {};",
cli_version.major, cli_version.minor, cli_version.patch,
)
.map_err(|e| JSError::JSRuntimeWrite {
runtime: js_runtime.to_string(),
item: "tree-sitter version".to_string(),
error: e.to_string(),
})?;
js_stdin
.write(include_bytes!("./dsl.js"))
.map_err(|e| JSError::JSRuntimeWrite {
runtime: js_runtime.to_string(),
item: "grammar dsl".to_string(),
error: e.to_string(),
})?;
drop(js_stdin);
let output = js_process
.wait_with_output()
.map_err(|e| JSError::JSRuntimeRead {
runtime: js_runtime.to_string(),
error: e.to_string(),
})?;
match output.status.code() {
Some(0) => {
let stdout = String::from_utf8(output.stdout).map_err(|e| JSError::JSRuntimeUtf8 {
runtime: js_runtime.to_string(),
error: e.to_string(),
})?;
let mut grammar_json = &stdout[..];
if let Some(pos) = stdout.rfind('\n') {
// If there's a newline, split the last line from the rest of the output
let node_output = &stdout[..pos];
grammar_json = &stdout[pos + 1..];
let mut stdout = std::io::stdout().lock();
stdout
.write_all(node_output.as_bytes())
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
stdout
.write_all(b"\n")
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
stdout
.flush()
.map_err(|e| JSError::IO(IoError::new(&e, None)))?;
}
Ok(serde_json::to_string_pretty(&serde_json::from_str::<
serde_json::Value,
>(grammar_json)?)?)
}
Some(code) => Err(JSError::JSRuntimeExit {
runtime: js_runtime.to_string(),
code,
}),
None => Err(JSError::JSRuntimeExit {
runtime: js_runtime.to_string(),
code: -1,
}),
}
}
#[cfg(feature = "load")]
pub fn write_file(path: &Path, body: impl AsRef<[u8]>) -> GenerateResult<()> {
fs::write(path, body).map_err(|e| GenerateError::IO(IoError::new(&e, Some(path))))
}
#[cfg(test)]
mod tests {
use super::{LANGUAGE_VERSION, PARSER_HEADER};
#[test]
fn test_language_versions_are_in_sync() {
let api_h = include_str!("../../../lib/include/tree_sitter/api.h");
let api_language_version = api_h
.lines()
.find_map(|line| {
line.trim()
.strip_prefix("#define TREE_SITTER_LANGUAGE_VERSION ")
.and_then(|v| v.parse::<usize>().ok())
})
.expect("Failed to find TREE_SITTER_LANGUAGE_VERSION definition in api.h");
assert_eq!(LANGUAGE_VERSION, api_language_version);
}
#[test]
fn test_parser_header_in_sync() {
let parser_h = include_str!("../../../lib/src/parser.h");
assert!(
parser_h == PARSER_HEADER,
"parser.h.inc is out of sync with lib/src/parser.h. Run: cp lib/src/parser.h crates/generate/src/parser.h.inc"
);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/quickjs.rs | crates/generate/src/quickjs.rs | use std::{
collections::HashMap,
path::{Path, PathBuf},
sync::{LazyLock, Mutex},
};
use log::{error, info, warn};
use rquickjs::{
loader::{FileResolver, ScriptLoader},
Context, Ctx, Function, Module, Object, Runtime, Type, Value,
};
use super::{IoError, JSError, JSResult};
const DSL: &[u8] = include_bytes!("dsl.js");
trait JSResultExt<T> {
fn or_js_error(self, ctx: &Ctx) -> JSResult<T>;
}
impl<T> JSResultExt<T> for Result<T, rquickjs::Error> {
fn or_js_error(self, ctx: &Ctx) -> JSResult<T> {
match self {
Ok(v) => Ok(v),
Err(rquickjs::Error::Exception) => Err(format_js_exception(ctx.catch())),
Err(e) => Err(JSError::QuickJS(e.to_string())),
}
}
}
fn format_js_exception(v: Value) -> JSError {
let Some(exception) = v.into_exception() else {
return JSError::QuickJS("Expected a JS exception".to_string());
};
let error_obj = exception.as_object();
let mut parts = Vec::new();
for (key, label) in [("message", "Message"), ("stack", "Stack"), ("name", "Type")] {
if let Ok(value) = error_obj.get::<_, String>(key) {
parts.push(format!("{label}: {value}"));
}
}
if parts.is_empty() {
JSError::QuickJS(exception.to_string())
} else {
JSError::QuickJS(parts.join("\n"))
}
}
static FILE_CACHE: LazyLock<Mutex<HashMap<String, String>>> =
LazyLock::new(|| Mutex::new(HashMap::new()));
#[rquickjs::function]
fn load_file(path: String) -> rquickjs::Result<String> {
{
let cache = FILE_CACHE.lock().unwrap();
if let Some(cached) = cache.get(&path) {
return Ok(cached.clone());
}
}
let content = std::fs::read_to_string(&path).map_err(|e| {
rquickjs::Error::new_from_js_message("IOError", "FileReadError", e.to_string())
})?;
{
let mut cache = FILE_CACHE.lock().unwrap();
cache.insert(path, content.clone());
}
Ok(content)
}
#[rquickjs::class]
#[derive(rquickjs::class::Trace, rquickjs::JsLifetime, Default)]
pub struct Console {}
impl Console {
fn format_args(args: &[Value<'_>]) -> String {
args.iter()
.map(|v| match v.type_of() {
Type::Bool => v.as_bool().unwrap().to_string(),
Type::Int => v.as_int().unwrap().to_string(),
Type::Float => v.as_float().unwrap().to_string(),
Type::String => v
.as_string()
.unwrap()
.to_string()
.unwrap_or_else(|_| String::new()),
Type::Null => "null".to_string(),
Type::Undefined => "undefined".to_string(),
Type::Uninitialized => "uninitialized".to_string(),
Type::Module => "module".to_string(),
Type::BigInt => v.get::<String>().unwrap_or_else(|_| "BigInt".to_string()),
Type::Unknown => "unknown".to_string(),
Type::Array => {
let js_vals = v
.as_array()
.unwrap()
.iter::<Value<'_>>()
.filter_map(|x| x.ok())
.map(|x| {
if x.is_string() {
format!("'{}'", Self::format_args(&[x]))
} else {
Self::format_args(&[x])
}
})
.collect::<Vec<_>>()
.join(", ");
format!("[ {js_vals} ]")
}
Type::Symbol
| Type::Object
| Type::Proxy
| Type::Function
| Type::Constructor
| Type::Promise
| Type::Exception => "[object Object]".to_string(),
})
.collect::<Vec<_>>()
.join(" ")
}
}
#[rquickjs::methods]
impl Console {
#[qjs(constructor)]
pub const fn new() -> Self {
Console {}
}
#[allow(clippy::needless_pass_by_value)]
pub fn log(&self, args: rquickjs::function::Rest<Value<'_>>) -> rquickjs::Result<()> {
info!("{}", Self::format_args(&args));
Ok(())
}
#[allow(clippy::needless_pass_by_value)]
pub fn warn(&self, args: rquickjs::function::Rest<Value<'_>>) -> rquickjs::Result<()> {
warn!("{}", Self::format_args(&args));
Ok(())
}
#[allow(clippy::needless_pass_by_value)]
pub fn error(&self, args: rquickjs::function::Rest<Value<'_>>) -> rquickjs::Result<()> {
error!("Error: {}", Self::format_args(&args));
Ok(())
}
}
fn resolve_module_path(base_path: &Path, module_path: &str) -> rquickjs::Result<PathBuf> {
let candidates = if module_path.starts_with("./") || module_path.starts_with("../") {
let target = base_path.join(module_path);
vec![
target.with_extension("js"),
target.with_extension("json"),
target.clone(),
]
} else {
let local_target = base_path.join(module_path);
let node_modules_target = Path::new("node_modules").join(module_path);
vec![
local_target.with_extension("js"),
local_target.with_extension("json"),
local_target.clone(),
node_modules_target.with_extension("js"),
node_modules_target.with_extension("json"),
node_modules_target,
]
};
for candidate in candidates {
if let Ok(resolved) = try_resolve_path(&candidate) {
return Ok(resolved);
}
}
Err(rquickjs::Error::new_from_js_message(
"Error",
"ModuleNotFound",
format!("Module not found: {module_path}"),
))
}
fn try_resolve_path(path: &Path) -> rquickjs::Result<PathBuf> {
let metadata = std::fs::metadata(path).map_err(|_| {
rquickjs::Error::new_from_js_message(
"Error",
"FileNotFound",
format!("Path not found: {}", path.display()),
)
})?;
if metadata.is_file() {
return Ok(path.to_path_buf());
}
if metadata.is_dir() {
let index_path = path.join("index.js");
if index_path.exists() {
return Ok(index_path);
}
}
Err(rquickjs::Error::new_from_js_message(
"Error",
"ResolutionFailed",
format!("Cannot resolve: {}", path.display()),
))
}
#[allow(clippy::needless_pass_by_value)]
fn require_from_module<'a>(
ctx: Ctx<'a>,
module_path: String,
from_module: &str,
) -> rquickjs::Result<Value<'a>> {
let current_module = PathBuf::from(from_module);
let current_dir = if current_module.is_file() {
current_module.parent().unwrap_or(Path::new("."))
} else {
current_module.as_path()
};
let resolved_path = resolve_module_path(current_dir, &module_path)?;
let contents = load_file(resolved_path.to_string_lossy().to_string())?;
load_module_from_content(&ctx, &resolved_path, &contents)
}
fn load_module_from_content<'a>(
ctx: &Ctx<'a>,
path: &Path,
contents: &str,
) -> rquickjs::Result<Value<'a>> {
if path.extension().is_some_and(|ext| ext == "json") {
return ctx.eval::<Value, _>(format!("JSON.parse({contents:?})"));
}
let exports = Object::new(ctx.clone())?;
let module_obj = Object::new(ctx.clone())?;
module_obj.set("exports", exports.clone())?;
let filename = path.to_string_lossy().to_string();
let dirname = path
.parent()
.map_or_else(|| ".".to_string(), |p| p.to_string_lossy().to_string());
// Require function specific to *this* module
let module_path = filename.clone();
let require = Function::new(
ctx.clone(),
move |ctx_inner: Ctx<'a>, target_path: String| -> rquickjs::Result<Value<'a>> {
require_from_module(ctx_inner, target_path, &module_path)
},
)?;
let wrapper =
format!("(function(exports, require, module, __filename, __dirname) {{ {contents} }})");
let module_func = ctx.eval::<Function, _>(wrapper)?;
module_func.call::<_, Value>((exports, require, module_obj.clone(), filename, dirname))?;
module_obj.get("exports")
}
pub fn execute_native_runtime(grammar_path: &Path) -> JSResult<String> {
let runtime = Runtime::new()?;
runtime.set_memory_limit(64 * 1024 * 1024); // 64MB
runtime.set_max_stack_size(256 * 1024); // 256KB
let context = Context::full(&runtime)?;
let resolver = FileResolver::default()
.with_path("./node_modules")
.with_path("./")
.with_pattern("{}.mjs");
let loader = ScriptLoader::default().with_extension("mjs");
runtime.set_loader(resolver, loader);
let cwd = std::env::current_dir().map_err(|e| JSError::IO(IoError::new(&e, None)))?;
let relative_path = pathdiff::diff_paths(grammar_path, &cwd)
.map(|p| p.to_string_lossy().to_string())
.ok_or(JSError::RelativePath)?;
context.with(|ctx| -> JSResult<String> {
let globals = ctx.globals();
globals.set("native", true).or_js_error(&ctx)?;
globals
.set("__ts_grammar_path", relative_path)
.or_js_error(&ctx)?;
let console = rquickjs::Class::instance(ctx.clone(), Console::new()).or_js_error(&ctx)?;
globals.set("console", console).or_js_error(&ctx)?;
let process = Object::new(ctx.clone()).or_js_error(&ctx)?;
let env = Object::new(ctx.clone()).or_js_error(&ctx)?;
for (key, value) in std::env::vars() {
env.set(key, value).or_js_error(&ctx)?;
}
process.set("env", env).or_js_error(&ctx)?;
globals.set("process", process).or_js_error(&ctx)?;
let module = Object::new(ctx.clone()).or_js_error(&ctx)?;
module
.set("exports", Object::new(ctx.clone()).or_js_error(&ctx)?)
.or_js_error(&ctx)?;
globals.set("module", module).or_js_error(&ctx)?;
let grammar_path_string = grammar_path.to_string_lossy().to_string();
let main_require = Function::new(
ctx.clone(),
move |ctx_inner, target_path: String| -> rquickjs::Result<Value> {
require_from_module(ctx_inner, target_path, &grammar_path_string)
},
)?;
globals.set("require", main_require).or_js_error(&ctx)?;
let promise = Module::evaluate(ctx.clone(), "dsl", DSL).or_js_error(&ctx)?;
promise.finish::<()>().or_js_error(&ctx)?;
let grammar_json = ctx
.eval::<rquickjs::String, _>("globalThis.output")
.map(|s| s.to_string())
.or_js_error(&ctx)?
.or_js_error(&ctx)?;
let parsed = serde_json::from_str::<serde_json::Value>(&grammar_json)?;
Ok(serde_json::to_string_pretty(&parsed)?)
})
}
#[cfg(test)]
mod tests {
use std::{
fs,
sync::{Arc, Mutex, OnceLock},
};
use tempfile::TempDir;
use super::*;
static TEST_MUTEX: OnceLock<Arc<Mutex<()>>> = OnceLock::new();
fn with_test_lock<F, R>(test: F) -> R
where
F: FnOnce() -> R,
{
let _guard = TEST_MUTEX.get_or_init(|| Arc::new(Mutex::new(()))).lock();
let result = test();
cleanup_runtime_state();
result
}
fn cleanup_runtime_state() {
FILE_CACHE.lock().unwrap().clear();
}
#[test]
fn test_basic_grammar_execution() {
with_test_lock(|| {
let temp_dir = TempDir::new().unwrap();
std::env::set_current_dir(temp_dir.path()).unwrap();
let grammar_path = temp_dir.path().join("grammar.js");
fs::write(
&grammar_path,
r"
module.exports = grammar({
name: 'test',
rules: { source_file: $ => 'hello' }
});
",
)
.unwrap();
let json = execute_native_runtime(&grammar_path).expect("Failed to execute grammar");
assert!(json.contains("\"name\": \"test\""));
assert!(json.contains("\"hello\""));
});
}
#[test]
fn test_module_imports() {
with_test_lock(|| {
let temp_dir = TempDir::new().unwrap();
std::env::set_current_dir(temp_dir.path()).unwrap();
fs::write(
temp_dir.path().join("common.js"),
r"
module.exports = { identifier: $ => /[a-zA-Z_][a-zA-Z0-9_]*/ };
",
)
.unwrap();
fs::write(
temp_dir.path().join("grammar.js"),
r"
const common = require('./common');
module.exports = grammar({
name: 'test_import',
rules: { source_file: common.identifier }
});
",
)
.unwrap();
let json = execute_native_runtime(&temp_dir.path().join("grammar.js"))
.expect("Failed to execute grammar with imports");
assert!(json.contains("\"name\": \"test_import\""));
});
}
#[test]
fn test_json_module_loading() {
with_test_lock(|| {
let temp_dir = TempDir::new().unwrap();
std::env::set_current_dir(temp_dir.path()).unwrap();
fs::write(
temp_dir.path().join("package.json"),
r#"{"version": "1.0.0"}"#,
)
.unwrap();
fs::write(
temp_dir.path().join("grammar.js"),
r"
const pkg = require('./package.json');
module.exports = grammar({
name: 'json_test',
rules: {
source_file: $ => 'version_' + pkg.version.replace(/\./g, '_')
}
});
",
)
.unwrap();
let json = execute_native_runtime(&temp_dir.path().join("grammar.js"))
.expect("Failed to execute grammar with JSON import");
assert!(json.contains("version_1_0_0"));
});
}
#[test]
fn test_resource_limits() {
with_test_lock(|| {
let temp_dir = TempDir::new().unwrap();
std::env::set_current_dir(temp_dir.path()).unwrap();
fs::write(
temp_dir.path().join("grammar.js"),
r"
const huge = new Array(10000000).fill('x'.repeat(1000));
module.exports = grammar({
name: 'resource_test',
rules: { source_file: $ => 'test' }
});
",
)
.unwrap();
let result = execute_native_runtime(&temp_dir.path().join("grammar.js"));
assert!(result.is_err());
assert!(matches!(result.unwrap_err(), JSError::QuickJS(_)));
});
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/intern_symbols.rs | crates/generate/src/prepare_grammar/intern_symbols.rs | use log::warn;
use serde::Serialize;
use thiserror::Error;
use super::InternedGrammar;
use crate::{
grammars::{InputGrammar, ReservedWordContext, Variable, VariableType},
rules::{Rule, Symbol},
};
pub type InternSymbolsResult<T> = Result<T, InternSymbolsError>;
#[derive(Debug, Error, Serialize)]
pub enum InternSymbolsError {
#[error("A grammar's start rule must be visible.")]
HiddenStartRule,
#[error("Undefined symbol `{0}`")]
Undefined(String),
#[error("Undefined symbol `{0}` in grammar's supertypes array")]
UndefinedSupertype(String),
#[error("Undefined symbol `{0}` in grammar's conflicts array")]
UndefinedConflict(String),
#[error("Undefined symbol `{0}` as grammar's word token")]
UndefinedWordToken(String),
}
pub(super) fn intern_symbols(grammar: &InputGrammar) -> InternSymbolsResult<InternedGrammar> {
let interner = Interner { grammar };
if variable_type_for_name(&grammar.variables[0].name) == VariableType::Hidden {
Err(InternSymbolsError::HiddenStartRule)?;
}
let mut variables = Vec::with_capacity(grammar.variables.len());
for variable in &grammar.variables {
variables.push(Variable {
name: variable.name.clone(),
kind: variable_type_for_name(&variable.name),
rule: interner.intern_rule(&variable.rule, Some(&variable.name))?,
});
}
let mut external_tokens = Vec::with_capacity(grammar.external_tokens.len());
for external_token in &grammar.external_tokens {
let rule = interner.intern_rule(external_token, None)?;
let (name, kind) = if let Rule::NamedSymbol(name) = external_token {
(name.clone(), variable_type_for_name(name))
} else {
(String::new(), VariableType::Anonymous)
};
external_tokens.push(Variable { name, kind, rule });
}
let mut extra_symbols = Vec::with_capacity(grammar.extra_symbols.len());
for extra_token in &grammar.extra_symbols {
extra_symbols.push(interner.intern_rule(extra_token, None)?);
}
let mut supertype_symbols = Vec::with_capacity(grammar.supertype_symbols.len());
for supertype_symbol_name in &grammar.supertype_symbols {
supertype_symbols.push(interner.intern_name(supertype_symbol_name).ok_or_else(|| {
InternSymbolsError::UndefinedSupertype(supertype_symbol_name.clone())
})?);
}
let mut reserved_words = Vec::with_capacity(grammar.reserved_words.len());
for reserved_word_set in &grammar.reserved_words {
let mut interned_set = Vec::with_capacity(reserved_word_set.reserved_words.len());
for rule in &reserved_word_set.reserved_words {
interned_set.push(interner.intern_rule(rule, None)?);
}
reserved_words.push(ReservedWordContext {
name: reserved_word_set.name.clone(),
reserved_words: interned_set,
});
}
let mut expected_conflicts = Vec::with_capacity(grammar.expected_conflicts.len());
for conflict in &grammar.expected_conflicts {
let mut interned_conflict = Vec::with_capacity(conflict.len());
for name in conflict {
interned_conflict.push(
interner
.intern_name(name)
.ok_or_else(|| InternSymbolsError::UndefinedConflict(name.clone()))?,
);
}
expected_conflicts.push(interned_conflict);
}
let mut variables_to_inline = Vec::new();
for name in &grammar.variables_to_inline {
if let Some(symbol) = interner.intern_name(name) {
variables_to_inline.push(symbol);
}
}
let word_token = if let Some(name) = grammar.word_token.as_ref() {
Some(
interner
.intern_name(name)
.ok_or_else(|| InternSymbolsError::UndefinedWordToken(name.clone()))?,
)
} else {
None
};
for (i, variable) in variables.iter_mut().enumerate() {
if supertype_symbols.contains(&Symbol::non_terminal(i)) {
variable.kind = VariableType::Hidden;
}
}
Ok(InternedGrammar {
variables,
external_tokens,
extra_symbols,
expected_conflicts,
variables_to_inline,
supertype_symbols,
word_token,
precedence_orderings: grammar.precedence_orderings.clone(),
reserved_word_sets: reserved_words,
})
}
struct Interner<'a> {
grammar: &'a InputGrammar,
}
impl Interner<'_> {
fn intern_rule(&self, rule: &Rule, name: Option<&str>) -> InternSymbolsResult<Rule> {
match rule {
Rule::Choice(elements) => {
self.check_single(elements, name, "choice");
let mut result = Vec::with_capacity(elements.len());
for element in elements {
result.push(self.intern_rule(element, name)?);
}
Ok(Rule::Choice(result))
}
Rule::Seq(elements) => {
self.check_single(elements, name, "seq");
let mut result = Vec::with_capacity(elements.len());
for element in elements {
result.push(self.intern_rule(element, name)?);
}
Ok(Rule::Seq(result))
}
Rule::Repeat(content) => Ok(Rule::Repeat(Box::new(self.intern_rule(content, name)?))),
Rule::Metadata { rule, params } => Ok(Rule::Metadata {
rule: Box::new(self.intern_rule(rule, name)?),
params: params.clone(),
}),
Rule::Reserved { rule, context_name } => Ok(Rule::Reserved {
rule: Box::new(self.intern_rule(rule, name)?),
context_name: context_name.clone(),
}),
Rule::NamedSymbol(name) => self.intern_name(name).map_or_else(
|| Err(InternSymbolsError::Undefined(name.clone())),
|symbol| Ok(Rule::Symbol(symbol)),
),
_ => Ok(rule.clone()),
}
}
fn intern_name(&self, symbol: &str) -> Option<Symbol> {
for (i, variable) in self.grammar.variables.iter().enumerate() {
if variable.name == symbol {
return Some(Symbol::non_terminal(i));
}
}
for (i, external_token) in self.grammar.external_tokens.iter().enumerate() {
if let Rule::NamedSymbol(name) = external_token {
if name == symbol {
return Some(Symbol::external(i));
}
}
}
None
}
// In the case of a seq or choice rule of 1 element in a hidden rule, weird
// inconsistent behavior with queries can occur. So we should warn the user about it.
fn check_single(&self, elements: &[Rule], name: Option<&str>, kind: &str) {
if elements.len() == 1 && matches!(elements[0], Rule::String(_) | Rule::Pattern(_, _)) {
warn!(
"rule {} contains a `{kind}` rule with a single element. This is unnecessary.",
name.unwrap_or_default()
);
}
}
}
fn variable_type_for_name(name: &str) -> VariableType {
if name.starts_with('_') {
VariableType::Hidden
} else {
VariableType::Named
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_repeat_expansion() {
let grammar = intern_symbols(&build_grammar(vec![
Variable::named("x", Rule::choice(vec![Rule::named("y"), Rule::named("_z")])),
Variable::named("y", Rule::named("_z")),
Variable::named("_z", Rule::string("a")),
]))
.unwrap();
assert_eq!(
grammar.variables,
vec![
Variable::named(
"x",
Rule::choice(vec![Rule::non_terminal(1), Rule::non_terminal(2),])
),
Variable::named("y", Rule::non_terminal(2)),
Variable::hidden("_z", Rule::string("a")),
]
);
}
#[test]
fn test_interning_external_token_names() {
// Variable `y` is both an internal and an external token.
// Variable `z` is just an external token.
let mut input_grammar = build_grammar(vec![
Variable::named(
"w",
Rule::choice(vec![Rule::named("x"), Rule::named("y"), Rule::named("z")]),
),
Variable::named("x", Rule::string("a")),
Variable::named("y", Rule::string("b")),
]);
input_grammar
.external_tokens
.extend(vec![Rule::named("y"), Rule::named("z")]);
let grammar = intern_symbols(&input_grammar).unwrap();
// Variable `y` is referred to by its internal index.
// Variable `z` is referred to by its external index.
assert_eq!(
grammar.variables,
vec![
Variable::named(
"w",
Rule::choice(vec![
Rule::non_terminal(1),
Rule::non_terminal(2),
Rule::external(1),
])
),
Variable::named("x", Rule::string("a")),
Variable::named("y", Rule::string("b")),
]
);
// The external token for `y` refers back to its internal index.
assert_eq!(
grammar.external_tokens,
vec![
Variable::named("y", Rule::non_terminal(2)),
Variable::named("z", Rule::external(1)),
]
);
}
#[test]
fn test_grammar_with_undefined_symbols() {
let result = intern_symbols(&build_grammar(vec![Variable::named("x", Rule::named("y"))]));
assert!(result.is_err(), "Expected an error but got none");
let e = result.err().unwrap();
assert_eq!(e.to_string(), "Undefined symbol `y`");
}
fn build_grammar(variables: Vec<Variable>) -> InputGrammar {
InputGrammar {
variables,
name: "the_language".to_string(),
..Default::default()
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/extract_default_aliases.rs | crates/generate/src/prepare_grammar/extract_default_aliases.rs | use crate::{
grammars::{LexicalGrammar, SyntaxGrammar},
rules::{Alias, AliasMap, Symbol, SymbolType},
};
#[derive(Clone, Default)]
struct SymbolStatus {
aliases: Vec<(Alias, usize)>,
appears_unaliased: bool,
}
// Update the grammar by finding symbols that always are aliased, and for each such symbol,
// promoting one of its aliases to a "default alias", which is applied globally instead
// of in a context-specific way.
//
// This has two benefits:
// * It reduces the overhead of storing production-specific alias info in the parse table.
// * Within an `ERROR` node, no context-specific aliases will be applied. This transformation
// ensures that the children of an `ERROR` node have symbols that are consistent with the way that
// they would appear in a valid syntax tree.
pub(super) fn extract_default_aliases(
syntax_grammar: &mut SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
) -> AliasMap {
let mut terminal_status_list = vec![SymbolStatus::default(); lexical_grammar.variables.len()];
let mut non_terminal_status_list =
vec![SymbolStatus::default(); syntax_grammar.variables.len()];
let mut external_status_list =
vec![SymbolStatus::default(); syntax_grammar.external_tokens.len()];
// For each grammar symbol, find all of the aliases under which the symbol appears,
// and determine whether or not the symbol ever appears *unaliased*.
for variable in &syntax_grammar.variables {
for production in &variable.productions {
for step in &production.steps {
let status = match step.symbol.kind {
SymbolType::External => &mut external_status_list[step.symbol.index],
SymbolType::NonTerminal => &mut non_terminal_status_list[step.symbol.index],
SymbolType::Terminal => &mut terminal_status_list[step.symbol.index],
SymbolType::End | SymbolType::EndOfNonTerminalExtra => {
panic!("Unexpected end token")
}
};
// Default aliases don't work for inlined variables.
if syntax_grammar.variables_to_inline.contains(&step.symbol) {
continue;
}
if let Some(alias) = &step.alias {
if let Some(count_for_alias) = status
.aliases
.iter_mut()
.find_map(|(a, count)| if a == alias { Some(count) } else { None })
{
*count_for_alias += 1;
} else {
status.aliases.push((alias.clone(), 1));
}
} else {
status.appears_unaliased = true;
}
}
}
}
for symbol in &syntax_grammar.extra_symbols {
let status = match symbol.kind {
SymbolType::External => &mut external_status_list[symbol.index],
SymbolType::NonTerminal => &mut non_terminal_status_list[symbol.index],
SymbolType::Terminal => &mut terminal_status_list[symbol.index],
SymbolType::End | SymbolType::EndOfNonTerminalExtra => panic!("Unexpected end token"),
};
status.appears_unaliased = true;
}
let symbols_with_statuses = (terminal_status_list
.iter_mut()
.enumerate()
.map(|(i, status)| (Symbol::terminal(i), status)))
.chain(
non_terminal_status_list
.iter_mut()
.enumerate()
.map(|(i, status)| (Symbol::non_terminal(i), status)),
)
.chain(
external_status_list
.iter_mut()
.enumerate()
.map(|(i, status)| (Symbol::external(i), status)),
);
// For each symbol that always appears aliased, find the alias the occurs most often,
// and designate that alias as the symbol's "default alias". Store all of these
// default aliases in a map that will be returned.
let mut result = AliasMap::new();
for (symbol, status) in symbols_with_statuses {
if status.appears_unaliased {
status.aliases.clear();
} else if let Some(default_entry) = status
.aliases
.iter()
.enumerate()
.max_by_key(|(i, (_, count))| (count, -(*i as i64)))
.map(|(_, entry)| entry.clone())
{
status.aliases.clear();
status.aliases.push(default_entry.clone());
result.insert(symbol, default_entry.0);
}
}
// Wherever a symbol is aliased as its default alias, remove the usage of the alias,
// because it will now be redundant.
let mut alias_positions_to_clear = Vec::new();
for variable in &mut syntax_grammar.variables {
alias_positions_to_clear.clear();
for (i, production) in variable.productions.iter().enumerate() {
for (j, step) in production.steps.iter().enumerate() {
let status = match step.symbol.kind {
SymbolType::External => &mut external_status_list[step.symbol.index],
SymbolType::NonTerminal => &mut non_terminal_status_list[step.symbol.index],
SymbolType::Terminal => &mut terminal_status_list[step.symbol.index],
SymbolType::End | SymbolType::EndOfNonTerminalExtra => {
panic!("Unexpected end token")
}
};
// If this step is aliased as the symbol's default alias, then remove that alias.
if step.alias.is_some()
&& step.alias.as_ref() == status.aliases.first().map(|t| &t.0)
{
let mut other_productions_must_use_this_alias_at_this_index = false;
for (other_i, other_production) in variable.productions.iter().enumerate() {
if other_i != i
&& other_production.steps.len() > j
&& other_production.steps[j].alias == step.alias
&& result.get(&other_production.steps[j].symbol) != step.alias.as_ref()
{
other_productions_must_use_this_alias_at_this_index = true;
break;
}
}
if !other_productions_must_use_this_alias_at_this_index {
alias_positions_to_clear.push((i, j));
}
}
}
}
for (production_index, step_index) in &alias_positions_to_clear {
variable.productions[*production_index].steps[*step_index].alias = None;
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
grammars::{LexicalVariable, Production, ProductionStep, SyntaxVariable, VariableType},
nfa::Nfa,
};
#[test]
fn test_extract_simple_aliases() {
let mut syntax_grammar = SyntaxGrammar {
variables: vec![
SyntaxVariable {
name: "v1".to_owned(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(0)).with_alias("a1", true),
ProductionStep::new(Symbol::terminal(1)).with_alias("a2", true),
ProductionStep::new(Symbol::terminal(2)).with_alias("a3", true),
ProductionStep::new(Symbol::terminal(3)).with_alias("a4", true),
],
}],
},
SyntaxVariable {
name: "v2".to_owned(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
// Token 0 is always aliased as "a1".
ProductionStep::new(Symbol::terminal(0)).with_alias("a1", true),
// Token 1 is aliased within rule `v1` above, but not here.
ProductionStep::new(Symbol::terminal(1)),
// Token 2 is aliased differently here than in `v1`. The alias from
// `v1` should be promoted to the default alias, because `v1` appears
// first in the grammar.
ProductionStep::new(Symbol::terminal(2)).with_alias("a5", true),
// Token 3 is also aliased differently here than in `v1`. In this case,
// this alias should be promoted to the default alias, because it is
// used a greater number of times (twice).
ProductionStep::new(Symbol::terminal(3)).with_alias("a6", true),
ProductionStep::new(Symbol::terminal(3)).with_alias("a6", true),
],
}],
},
],
..Default::default()
};
let lexical_grammar = LexicalGrammar {
nfa: Nfa::new(),
variables: vec![
LexicalVariable {
name: "t0".to_string(),
kind: VariableType::Anonymous,
implicit_precedence: 0,
start_state: 0,
},
LexicalVariable {
name: "t1".to_string(),
kind: VariableType::Anonymous,
implicit_precedence: 0,
start_state: 0,
},
LexicalVariable {
name: "t2".to_string(),
kind: VariableType::Anonymous,
implicit_precedence: 0,
start_state: 0,
},
LexicalVariable {
name: "t3".to_string(),
kind: VariableType::Anonymous,
implicit_precedence: 0,
start_state: 0,
},
],
};
let default_aliases = extract_default_aliases(&mut syntax_grammar, &lexical_grammar);
assert_eq!(default_aliases.len(), 3);
assert_eq!(
default_aliases.get(&Symbol::terminal(0)),
Some(&Alias {
value: "a1".to_string(),
is_named: true,
})
);
assert_eq!(
default_aliases.get(&Symbol::terminal(2)),
Some(&Alias {
value: "a3".to_string(),
is_named: true,
})
);
assert_eq!(
default_aliases.get(&Symbol::terminal(3)),
Some(&Alias {
value: "a6".to_string(),
is_named: true,
})
);
assert_eq!(default_aliases.get(&Symbol::terminal(1)), None);
assert_eq!(
syntax_grammar.variables,
vec![
SyntaxVariable {
name: "v1".to_owned(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(0)),
ProductionStep::new(Symbol::terminal(1)).with_alias("a2", true),
ProductionStep::new(Symbol::terminal(2)),
ProductionStep::new(Symbol::terminal(3)).with_alias("a4", true),
],
},],
},
SyntaxVariable {
name: "v2".to_owned(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(0)),
ProductionStep::new(Symbol::terminal(1)),
ProductionStep::new(Symbol::terminal(2)).with_alias("a5", true),
ProductionStep::new(Symbol::terminal(3)),
ProductionStep::new(Symbol::terminal(3)),
],
},],
},
]
);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/expand_repeats.rs | crates/generate/src/prepare_grammar/expand_repeats.rs | use std::{collections::HashMap, mem};
use super::ExtractedSyntaxGrammar;
use crate::{
grammars::{Variable, VariableType},
rules::{Rule, Symbol},
};
struct Expander {
variable_name: String,
repeat_count_in_variable: usize,
preceding_symbol_count: usize,
auxiliary_variables: Vec<Variable>,
existing_repeats: HashMap<Rule, Symbol>,
}
impl Expander {
fn expand_variable(&mut self, index: usize, variable: &mut Variable) -> bool {
self.variable_name.clear();
self.variable_name.push_str(&variable.name);
self.repeat_count_in_variable = 0;
let mut rule = Rule::Blank;
mem::swap(&mut rule, &mut variable.rule);
// In the special case of a hidden variable with a repetition at its top level,
// convert that rule itself into a binary tree structure instead of introducing
// another auxiliary rule.
if let (VariableType::Hidden, Rule::Repeat(repeated_content)) = (variable.kind, &rule) {
let inner_rule = self.expand_rule(repeated_content);
variable.rule = self.wrap_rule_in_binary_tree(Symbol::non_terminal(index), inner_rule);
variable.kind = VariableType::Auxiliary;
return true;
}
variable.rule = self.expand_rule(&rule);
false
}
fn expand_rule(&mut self, rule: &Rule) -> Rule {
match rule {
// For choices, sequences, and metadata, descend into the child rules,
// replacing any nested repetitions.
Rule::Choice(elements) => Rule::Choice(
elements
.iter()
.map(|element| self.expand_rule(element))
.collect(),
),
Rule::Seq(elements) => Rule::Seq(
elements
.iter()
.map(|element| self.expand_rule(element))
.collect(),
),
Rule::Metadata { rule, params } => Rule::Metadata {
rule: Box::new(self.expand_rule(rule)),
params: params.clone(),
},
// For repetitions, introduce an auxiliary rule that contains the
// repeated content, but can also contain a recursive binary tree structure.
Rule::Repeat(content) => {
let inner_rule = self.expand_rule(content);
if let Some(existing_symbol) = self.existing_repeats.get(&inner_rule) {
return Rule::Symbol(*existing_symbol);
}
self.repeat_count_in_variable += 1;
let rule_name = format!(
"{}_repeat{}",
self.variable_name, self.repeat_count_in_variable
);
let repeat_symbol = Symbol::non_terminal(
self.preceding_symbol_count + self.auxiliary_variables.len(),
);
self.existing_repeats
.insert(inner_rule.clone(), repeat_symbol);
self.auxiliary_variables.push(Variable {
name: rule_name,
kind: VariableType::Auxiliary,
rule: self.wrap_rule_in_binary_tree(repeat_symbol, inner_rule),
});
Rule::Symbol(repeat_symbol)
}
// For primitive rules, don't change anything.
_ => rule.clone(),
}
}
fn wrap_rule_in_binary_tree(&self, symbol: Symbol, rule: Rule) -> Rule {
Rule::choice(vec![
Rule::Seq(vec![Rule::Symbol(symbol), Rule::Symbol(symbol)]),
rule,
])
}
}
pub(super) fn expand_repeats(mut grammar: ExtractedSyntaxGrammar) -> ExtractedSyntaxGrammar {
let mut expander = Expander {
variable_name: String::new(),
repeat_count_in_variable: 0,
preceding_symbol_count: grammar.variables.len(),
auxiliary_variables: Vec::new(),
existing_repeats: HashMap::new(),
};
for (i, variable) in grammar.variables.iter_mut().enumerate() {
let expanded_top_level_repetition = expander.expand_variable(i, variable);
// If a hidden variable had a top-level repetition and it was converted to
// a recursive rule, then it can't be inlined.
if expanded_top_level_repetition {
grammar
.variables_to_inline
.retain(|symbol| *symbol != Symbol::non_terminal(i));
}
}
grammar.variables.extend(expander.auxiliary_variables);
grammar
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_basic_repeat_expansion() {
// Repeats nested inside of sequences and choices are expanded.
let grammar = expand_repeats(build_grammar(vec![Variable::named(
"rule0",
Rule::seq(vec![
Rule::terminal(10),
Rule::choice(vec![
Rule::repeat(Rule::terminal(11)),
Rule::repeat(Rule::terminal(12)),
]),
Rule::terminal(13),
]),
)]));
assert_eq!(
grammar.variables,
vec![
Variable::named(
"rule0",
Rule::seq(vec![
Rule::terminal(10),
Rule::choice(vec![Rule::non_terminal(1), Rule::non_terminal(2),]),
Rule::terminal(13),
])
),
Variable::auxiliary(
"rule0_repeat1",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(1),]),
Rule::terminal(11),
])
),
Variable::auxiliary(
"rule0_repeat2",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(2), Rule::non_terminal(2),]),
Rule::terminal(12),
])
),
]
);
}
#[test]
fn test_repeat_deduplication() {
// Terminal 4 appears inside of a repeat in three different places.
let grammar = expand_repeats(build_grammar(vec![
Variable::named(
"rule0",
Rule::choice(vec![
Rule::seq(vec![Rule::terminal(1), Rule::repeat(Rule::terminal(4))]),
Rule::seq(vec![Rule::terminal(2), Rule::repeat(Rule::terminal(4))]),
]),
),
Variable::named(
"rule1",
Rule::seq(vec![Rule::terminal(3), Rule::repeat(Rule::terminal(4))]),
),
]));
// Only one auxiliary rule is created for repeating terminal 4.
assert_eq!(
grammar.variables,
vec![
Variable::named(
"rule0",
Rule::choice(vec![
Rule::seq(vec![Rule::terminal(1), Rule::non_terminal(2)]),
Rule::seq(vec![Rule::terminal(2), Rule::non_terminal(2)]),
])
),
Variable::named(
"rule1",
Rule::seq(vec![Rule::terminal(3), Rule::non_terminal(2),])
),
Variable::auxiliary(
"rule0_repeat1",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(2), Rule::non_terminal(2),]),
Rule::terminal(4),
])
)
]
);
}
#[test]
fn test_expansion_of_nested_repeats() {
let grammar = expand_repeats(build_grammar(vec![Variable::named(
"rule0",
Rule::seq(vec![
Rule::terminal(10),
Rule::repeat(Rule::seq(vec![
Rule::terminal(11),
Rule::repeat(Rule::terminal(12)),
])),
]),
)]));
assert_eq!(
grammar.variables,
vec![
Variable::named(
"rule0",
Rule::seq(vec![Rule::terminal(10), Rule::non_terminal(2),])
),
Variable::auxiliary(
"rule0_repeat1",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(1),]),
Rule::terminal(12),
])
),
Variable::auxiliary(
"rule0_repeat2",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(2), Rule::non_terminal(2),]),
Rule::seq(vec![Rule::terminal(11), Rule::non_terminal(1),]),
])
),
]
);
}
#[test]
fn test_expansion_of_repeats_at_top_of_hidden_rules() {
let grammar = expand_repeats(build_grammar(vec![
Variable::named("rule0", Rule::non_terminal(1)),
Variable::hidden(
"_rule1",
Rule::repeat(Rule::choice(vec![Rule::terminal(11), Rule::terminal(12)])),
),
]));
assert_eq!(
grammar.variables,
vec![
Variable::named("rule0", Rule::non_terminal(1),),
Variable::auxiliary(
"_rule1",
Rule::choice(vec![
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(1)]),
Rule::terminal(11),
Rule::terminal(12),
]),
),
]
);
}
fn build_grammar(variables: Vec<Variable>) -> ExtractedSyntaxGrammar {
ExtractedSyntaxGrammar {
variables,
..Default::default()
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/flatten_grammar.rs | crates/generate/src/prepare_grammar/flatten_grammar.rs | use std::collections::HashMap;
use serde::Serialize;
use thiserror::Error;
use super::ExtractedSyntaxGrammar;
use crate::{
grammars::{
Production, ProductionStep, ReservedWordSetId, SyntaxGrammar, SyntaxVariable, Variable,
},
rules::{Alias, Associativity, Precedence, Rule, Symbol, TokenSet},
};
pub type FlattenGrammarResult<T> = Result<T, FlattenGrammarError>;
#[derive(Debug, Error, Serialize)]
pub enum FlattenGrammarError {
#[error("No such reserved word set: {0}")]
NoReservedWordSet(String),
#[error(
"The rule `{0}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string
unless they are used only as the grammar's start rule.
"
)]
EmptyString(String),
#[error("Rule `{0}` cannot be inlined because it contains a reference to itself")]
RecursiveInline(String),
}
struct RuleFlattener {
production: Production,
reserved_word_set_ids: HashMap<String, ReservedWordSetId>,
precedence_stack: Vec<Precedence>,
associativity_stack: Vec<Associativity>,
reserved_word_stack: Vec<ReservedWordSetId>,
alias_stack: Vec<Alias>,
field_name_stack: Vec<String>,
}
impl RuleFlattener {
const fn new(reserved_word_set_ids: HashMap<String, ReservedWordSetId>) -> Self {
Self {
production: Production {
steps: Vec::new(),
dynamic_precedence: 0,
},
reserved_word_set_ids,
precedence_stack: Vec::new(),
associativity_stack: Vec::new(),
reserved_word_stack: Vec::new(),
alias_stack: Vec::new(),
field_name_stack: Vec::new(),
}
}
fn flatten_variable(&mut self, variable: Variable) -> FlattenGrammarResult<SyntaxVariable> {
let choices = extract_choices(variable.rule);
let mut productions = Vec::with_capacity(choices.len());
for rule in choices {
let production = self.flatten_rule(rule)?;
if !productions.contains(&production) {
productions.push(production);
}
}
Ok(SyntaxVariable {
name: variable.name,
kind: variable.kind,
productions,
})
}
fn flatten_rule(&mut self, rule: Rule) -> FlattenGrammarResult<Production> {
self.production = Production::default();
self.alias_stack.clear();
self.reserved_word_stack.clear();
self.precedence_stack.clear();
self.associativity_stack.clear();
self.field_name_stack.clear();
self.apply(rule, true)?;
Ok(self.production.clone())
}
fn apply(&mut self, rule: Rule, at_end: bool) -> FlattenGrammarResult<bool> {
match rule {
Rule::Seq(members) => {
let mut result = false;
let last_index = members.len() - 1;
for (i, member) in members.into_iter().enumerate() {
result |= self.apply(member, i == last_index && at_end)?;
}
Ok(result)
}
Rule::Metadata { rule, params } => {
let mut has_precedence = false;
if !params.precedence.is_none() {
has_precedence = true;
self.precedence_stack.push(params.precedence);
}
let mut has_associativity = false;
if let Some(associativity) = params.associativity {
has_associativity = true;
self.associativity_stack.push(associativity);
}
let mut has_alias = false;
if let Some(alias) = params.alias {
has_alias = true;
self.alias_stack.push(alias);
}
let mut has_field_name = false;
if let Some(field_name) = params.field_name {
has_field_name = true;
self.field_name_stack.push(field_name);
}
if params.dynamic_precedence.abs() > self.production.dynamic_precedence.abs() {
self.production.dynamic_precedence = params.dynamic_precedence;
}
let did_push = self.apply(*rule, at_end)?;
if has_precedence {
self.precedence_stack.pop();
if did_push && !at_end {
self.production.steps.last_mut().unwrap().precedence = self
.precedence_stack
.last()
.cloned()
.unwrap_or(Precedence::None);
}
}
if has_associativity {
self.associativity_stack.pop();
if did_push && !at_end {
self.production.steps.last_mut().unwrap().associativity =
self.associativity_stack.last().copied();
}
}
if has_alias {
self.alias_stack.pop();
}
if has_field_name {
self.field_name_stack.pop();
}
Ok(did_push)
}
Rule::Reserved { rule, context_name } => {
self.reserved_word_stack.push(
self.reserved_word_set_ids
.get(&context_name)
.copied()
.ok_or_else(|| {
FlattenGrammarError::NoReservedWordSet(context_name.clone())
})?,
);
let did_push = self.apply(*rule, at_end)?;
self.reserved_word_stack.pop();
Ok(did_push)
}
Rule::Symbol(symbol) => {
self.production.steps.push(ProductionStep {
symbol,
precedence: self
.precedence_stack
.last()
.cloned()
.unwrap_or(Precedence::None),
associativity: self.associativity_stack.last().copied(),
reserved_word_set_id: self
.reserved_word_stack
.last()
.copied()
.unwrap_or(ReservedWordSetId::default()),
alias: self.alias_stack.last().cloned(),
field_name: self.field_name_stack.last().cloned(),
});
Ok(true)
}
_ => Ok(false),
}
}
}
fn extract_choices(rule: Rule) -> Vec<Rule> {
match rule {
Rule::Seq(elements) => {
let mut result = vec![Rule::Blank];
for element in elements {
let extraction = extract_choices(element);
let mut next_result = Vec::with_capacity(result.len());
for entry in result {
for extraction_entry in &extraction {
next_result.push(Rule::Seq(vec![entry.clone(), extraction_entry.clone()]));
}
}
result = next_result;
}
result
}
Rule::Choice(elements) => {
let mut result = Vec::with_capacity(elements.len());
for element in elements {
for rule in extract_choices(element) {
result.push(rule);
}
}
result
}
Rule::Metadata { rule, params } => extract_choices(*rule)
.into_iter()
.map(|rule| Rule::Metadata {
rule: Box::new(rule),
params: params.clone(),
})
.collect(),
Rule::Reserved { rule, context_name } => extract_choices(*rule)
.into_iter()
.map(|rule| Rule::Reserved {
rule: Box::new(rule),
context_name: context_name.clone(),
})
.collect(),
_ => vec![rule],
}
}
fn symbol_is_used(variables: &[SyntaxVariable], symbol: Symbol) -> bool {
for variable in variables {
for production in &variable.productions {
for step in &production.steps {
if step.symbol == symbol {
return true;
}
}
}
}
false
}
pub(super) fn flatten_grammar(
grammar: ExtractedSyntaxGrammar,
) -> FlattenGrammarResult<SyntaxGrammar> {
let mut reserved_word_set_ids_by_name = HashMap::new();
for (ix, set) in grammar.reserved_word_sets.iter().enumerate() {
reserved_word_set_ids_by_name.insert(set.name.clone(), ReservedWordSetId(ix));
}
let mut flattener = RuleFlattener::new(reserved_word_set_ids_by_name);
let variables = grammar
.variables
.into_iter()
.map(|variable| flattener.flatten_variable(variable))
.collect::<FlattenGrammarResult<Vec<_>>>()?;
for (i, variable) in variables.iter().enumerate() {
let symbol = Symbol::non_terminal(i);
let used = symbol_is_used(&variables, symbol);
for production in &variable.productions {
if used && production.steps.is_empty() {
Err(FlattenGrammarError::EmptyString(variable.name.clone()))?;
}
if grammar.variables_to_inline.contains(&symbol)
&& production.steps.iter().any(|step| step.symbol == symbol)
{
Err(FlattenGrammarError::RecursiveInline(variable.name.clone()))?;
}
}
}
let mut reserved_word_sets = grammar
.reserved_word_sets
.into_iter()
.map(|set| set.reserved_words.into_iter().collect())
.collect::<Vec<_>>();
// If no default reserved word set is specified, there are no reserved words.
if reserved_word_sets.is_empty() {
reserved_word_sets.push(TokenSet::default());
}
Ok(SyntaxGrammar {
extra_symbols: grammar.extra_symbols,
expected_conflicts: grammar.expected_conflicts,
variables_to_inline: grammar.variables_to_inline,
precedence_orderings: grammar.precedence_orderings,
external_tokens: grammar.external_tokens,
supertype_symbols: grammar.supertype_symbols,
word_token: grammar.word_token,
reserved_word_sets,
variables,
})
}
#[cfg(test)]
mod tests {
use super::*;
use crate::grammars::VariableType;
#[test]
fn test_flatten_grammar() {
let mut flattener = RuleFlattener::new(HashMap::default());
let result = flattener
.flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::non_terminal(1),
Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![
Rule::non_terminal(2),
Rule::choice(vec![
Rule::prec_right(
Precedence::Integer(102),
Rule::seq(vec![Rule::non_terminal(3), Rule::non_terminal(4)]),
),
Rule::non_terminal(5),
]),
Rule::non_terminal(6),
]),
),
Rule::non_terminal(7),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(3))
.with_prec(Precedence::Integer(102), Some(Associativity::Right)),
ProductionStep::new(Symbol::non_terminal(4))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
]
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(5))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
]
},
]
);
}
#[test]
fn test_flatten_grammar_with_maximum_dynamic_precedence() {
let mut flattener = RuleFlattener::new(HashMap::default());
let result = flattener
.flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::non_terminal(1),
Rule::prec_dynamic(
101,
Rule::seq(vec![
Rule::non_terminal(2),
Rule::choice(vec![
Rule::prec_dynamic(
102,
Rule::seq(vec![Rule::non_terminal(3), Rule::non_terminal(4)]),
),
Rule::non_terminal(5),
]),
Rule::non_terminal(6),
]),
),
Rule::non_terminal(7),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 102,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::non_terminal(3)),
ProductionStep::new(Symbol::non_terminal(4)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
],
},
Production {
dynamic_precedence: 101,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::non_terminal(5)),
ProductionStep::new(Symbol::non_terminal(6)),
ProductionStep::new(Symbol::non_terminal(7)),
],
},
]
);
}
#[test]
fn test_flatten_grammar_with_final_precedence() {
let mut flattener = RuleFlattener::new(HashMap::default());
let result = flattener
.flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(2)]),
),
})
.unwrap();
assert_eq!(
result.productions,
vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(1))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
ProductionStep::new(Symbol::non_terminal(2))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),
]
}]
);
let result = flattener
.flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::prec_left(
Precedence::Integer(101),
Rule::seq(vec![Rule::non_terminal(1)]),
),
})
.unwrap();
assert_eq!(
result.productions,
vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::non_terminal(1))
.with_prec(Precedence::Integer(101), Some(Associativity::Left)),]
}]
);
}
#[test]
fn test_flatten_grammar_with_field_names() {
let mut flattener = RuleFlattener::new(HashMap::default());
let result = flattener
.flatten_variable(Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::field("first-thing".to_string(), Rule::terminal(1)),
Rule::terminal(2),
Rule::choice(vec![
Rule::Blank,
Rule::field("second-thing".to_string(), Rule::terminal(3)),
]),
]),
})
.unwrap();
assert_eq!(
result.productions,
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(1)).with_field_name("first-thing"),
ProductionStep::new(Symbol::terminal(2))
]
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(1)).with_field_name("first-thing"),
ProductionStep::new(Symbol::terminal(2)),
ProductionStep::new(Symbol::terminal(3)).with_field_name("second-thing"),
]
},
]
);
}
#[test]
fn test_flatten_grammar_with_recursive_inline_variable() {
let result = flatten_grammar(ExtractedSyntaxGrammar {
extra_symbols: Vec::new(),
expected_conflicts: Vec::new(),
variables_to_inline: vec![Symbol::non_terminal(0)],
precedence_orderings: Vec::new(),
external_tokens: Vec::new(),
supertype_symbols: Vec::new(),
word_token: None,
reserved_word_sets: Vec::new(),
variables: vec![Variable {
name: "test".to_string(),
kind: VariableType::Named,
rule: Rule::seq(vec![
Rule::non_terminal(0),
Rule::non_terminal(1),
Rule::non_terminal(2),
]),
}],
});
assert_eq!(
result.unwrap_err().to_string(),
"Rule `test` cannot be inlined because it contains a reference to itself",
);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/process_inlines.rs | crates/generate/src/prepare_grammar/process_inlines.rs | use std::collections::HashMap;
use serde::Serialize;
use thiserror::Error;
use crate::{
grammars::{InlinedProductionMap, LexicalGrammar, Production, ProductionStep, SyntaxGrammar},
rules::SymbolType,
};
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
struct ProductionStepId {
// A `None` value here means that the production itself was produced via inlining,
// and is stored in the builder's `productions` vector, as opposed to being
// stored in one of the grammar's variables.
variable_index: Option<usize>,
production_index: usize,
step_index: usize,
}
struct InlinedProductionMapBuilder {
production_indices_by_step_id: HashMap<ProductionStepId, Vec<usize>>,
productions: Vec<Production>,
}
impl InlinedProductionMapBuilder {
fn build(mut self, grammar: &SyntaxGrammar) -> InlinedProductionMap {
let mut step_ids_to_process = Vec::new();
for (variable_index, variable) in grammar.variables.iter().enumerate() {
for production_index in 0..variable.productions.len() {
step_ids_to_process.push(ProductionStepId {
variable_index: Some(variable_index),
production_index,
step_index: 0,
});
while !step_ids_to_process.is_empty() {
let mut i = 0;
while i < step_ids_to_process.len() {
let step_id = step_ids_to_process[i];
if let Some(step) = self.production_step_for_id(step_id, grammar) {
if grammar.variables_to_inline.contains(&step.symbol) {
let inlined_step_ids = self
.inline_production_at_step(step_id, grammar)
.iter()
.copied()
.map(|production_index| ProductionStepId {
variable_index: None,
production_index,
step_index: step_id.step_index,
});
step_ids_to_process.splice(i..=i, inlined_step_ids);
} else {
step_ids_to_process[i] = ProductionStepId {
variable_index: step_id.variable_index,
production_index: step_id.production_index,
step_index: step_id.step_index + 1,
};
i += 1;
}
} else {
step_ids_to_process.remove(i);
}
}
}
}
}
let productions = self.productions;
let production_indices_by_step_id = self.production_indices_by_step_id;
let production_map = production_indices_by_step_id
.into_iter()
.map(|(step_id, production_indices)| {
let production =
core::ptr::from_ref::<Production>(step_id.variable_index.map_or_else(
|| &productions[step_id.production_index],
|variable_index| {
&grammar.variables[variable_index].productions[step_id.production_index]
},
));
((production, step_id.step_index as u32), production_indices)
})
.collect();
InlinedProductionMap {
productions,
production_map,
}
}
fn inline_production_at_step<'a>(
&'a mut self,
step_id: ProductionStepId,
grammar: &'a SyntaxGrammar,
) -> &'a [usize] {
// Build a list of productions produced by inlining rules.
let mut i = 0;
let step_index = step_id.step_index;
let mut productions_to_add = vec![self.production_for_id(step_id, grammar).clone()];
while i < productions_to_add.len() {
if let Some(step) = productions_to_add[i].steps.get(step_index) {
let symbol = step.symbol;
if grammar.variables_to_inline.contains(&symbol) {
// Remove the production from the vector, replacing it with a placeholder.
let production = productions_to_add
.splice(i..=i, std::iter::once(&Production::default()).cloned())
.next()
.unwrap();
// Replace the placeholder with the inlined productions.
productions_to_add.splice(
i..=i,
grammar.variables[symbol.index].productions.iter().map(|p| {
let mut production = production.clone();
let removed_step = production
.steps
.splice(step_index..=step_index, p.steps.iter().cloned())
.next()
.unwrap();
let inserted_steps =
&mut production.steps[step_index..(step_index + p.steps.len())];
if let Some(alias) = removed_step.alias {
for inserted_step in inserted_steps.iter_mut() {
inserted_step.alias = Some(alias.clone());
}
}
if let Some(field_name) = removed_step.field_name {
for inserted_step in inserted_steps.iter_mut() {
inserted_step.field_name = Some(field_name.clone());
}
}
if let Some(last_inserted_step) = inserted_steps.last_mut() {
if last_inserted_step.precedence.is_none() {
last_inserted_step.precedence = removed_step.precedence;
}
if last_inserted_step.associativity.is_none() {
last_inserted_step.associativity = removed_step.associativity;
}
}
if p.dynamic_precedence.abs() > production.dynamic_precedence.abs() {
production.dynamic_precedence = p.dynamic_precedence;
}
production
}),
);
continue;
}
}
i += 1;
}
// Store all the computed productions.
let result = productions_to_add
.into_iter()
.map(|production| {
self.productions
.iter()
.position(|p| *p == production)
.unwrap_or_else(|| {
self.productions.push(production);
self.productions.len() - 1
})
})
.collect();
// Cache these productions based on the original production step.
self.production_indices_by_step_id
.entry(step_id)
.or_insert(result)
}
fn production_for_id<'a>(
&'a self,
id: ProductionStepId,
grammar: &'a SyntaxGrammar,
) -> &'a Production {
id.variable_index.map_or_else(
|| &self.productions[id.production_index],
|variable_index| &grammar.variables[variable_index].productions[id.production_index],
)
}
fn production_step_for_id<'a>(
&'a self,
id: ProductionStepId,
grammar: &'a SyntaxGrammar,
) -> Option<&'a ProductionStep> {
self.production_for_id(id, grammar).steps.get(id.step_index)
}
}
pub type ProcessInlinesResult<T> = Result<T, ProcessInlinesError>;
#[derive(Debug, Error, Serialize)]
pub enum ProcessInlinesError {
#[error("External token `{0}` cannot be inlined")]
ExternalToken(String),
#[error("Token `{0}` cannot be inlined")]
Token(String),
#[error("Rule `{0}` cannot be inlined because it is the first rule")]
FirstRule(String),
}
pub(super) fn process_inlines(
grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
) -> ProcessInlinesResult<InlinedProductionMap> {
for symbol in &grammar.variables_to_inline {
match symbol.kind {
SymbolType::External => {
Err(ProcessInlinesError::ExternalToken(
grammar.external_tokens[symbol.index].name.clone(),
))?;
}
SymbolType::Terminal => {
Err(ProcessInlinesError::Token(
lexical_grammar.variables[symbol.index].name.clone(),
))?;
}
SymbolType::NonTerminal if symbol.index == 0 => {
Err(ProcessInlinesError::FirstRule(
grammar.variables[symbol.index].name.clone(),
))?;
}
_ => {}
}
}
Ok(InlinedProductionMapBuilder {
productions: Vec::new(),
production_indices_by_step_id: HashMap::new(),
}
.build(grammar))
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
grammars::{LexicalVariable, SyntaxVariable, VariableType},
rules::{Associativity, Precedence, Symbol},
};
#[test]
fn test_basic_inlining() {
let grammar = SyntaxGrammar {
variables_to_inline: vec![Symbol::non_terminal(1)],
variables: vec![
SyntaxVariable {
name: "non-terminal-0".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::non_terminal(1)), // inlined
ProductionStep::new(Symbol::terminal(11)),
],
}],
},
SyntaxVariable {
name: "non-terminal-1".to_string(),
kind: VariableType::Named,
productions: vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(12)),
ProductionStep::new(Symbol::terminal(13)),
],
},
Production {
dynamic_precedence: -2,
steps: vec![ProductionStep::new(Symbol::terminal(14))],
},
],
},
],
..Default::default()
};
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
// Nothing to inline at step 0.
assert!(inline_map
.inlined_productions(&grammar.variables[0].productions[0], 0)
.is_none());
// Inlining variable 1 yields two productions.
assert_eq!(
inline_map
.inlined_productions(&grammar.variables[0].productions[0], 1)
.unwrap()
.cloned()
.collect::<Vec<_>>(),
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::terminal(12)),
ProductionStep::new(Symbol::terminal(13)),
ProductionStep::new(Symbol::terminal(11)),
],
},
Production {
dynamic_precedence: -2,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::terminal(14)),
ProductionStep::new(Symbol::terminal(11)),
],
},
]
);
}
#[test]
fn test_nested_inlining() {
let grammar = SyntaxGrammar {
variables: vec![
SyntaxVariable {
name: "non-terminal-0".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::non_terminal(1)), // inlined
ProductionStep::new(Symbol::terminal(11)),
ProductionStep::new(Symbol::non_terminal(2)), // inlined
ProductionStep::new(Symbol::terminal(12)),
],
}],
},
SyntaxVariable {
name: "non-terminal-1".to_string(),
kind: VariableType::Named,
productions: vec![
Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::terminal(13))],
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::non_terminal(3)), // inlined
ProductionStep::new(Symbol::terminal(14)),
],
},
],
},
SyntaxVariable {
name: "non-terminal-2".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::terminal(15))],
}],
},
SyntaxVariable {
name: "non-terminal-3".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::terminal(16))],
}],
},
],
variables_to_inline: vec![
Symbol::non_terminal(1),
Symbol::non_terminal(2),
Symbol::non_terminal(3),
],
..Default::default()
};
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
let productions = inline_map
.inlined_productions(&grammar.variables[0].productions[0], 1)
.unwrap()
.collect::<Vec<_>>();
assert_eq!(
productions.iter().copied().cloned().collect::<Vec<_>>(),
vec![
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::terminal(13)),
ProductionStep::new(Symbol::terminal(11)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::terminal(12)),
],
},
Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::terminal(16)),
ProductionStep::new(Symbol::terminal(14)),
ProductionStep::new(Symbol::terminal(11)),
ProductionStep::new(Symbol::non_terminal(2)),
ProductionStep::new(Symbol::terminal(12)),
],
},
]
);
assert_eq!(
inline_map
.inlined_productions(productions[0], 3)
.unwrap()
.cloned()
.collect::<Vec<_>>(),
vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::terminal(13)),
ProductionStep::new(Symbol::terminal(11)),
ProductionStep::new(Symbol::terminal(15)),
ProductionStep::new(Symbol::terminal(12)),
],
},]
);
}
#[test]
fn test_inlining_with_precedence_and_alias() {
let grammar = SyntaxGrammar {
variables_to_inline: vec![Symbol::non_terminal(1), Symbol::non_terminal(2)],
variables: vec![
SyntaxVariable {
name: "non-terminal-0".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
// inlined
ProductionStep::new(Symbol::non_terminal(1))
.with_prec(Precedence::Integer(1), Some(Associativity::Left)),
ProductionStep::new(Symbol::terminal(10)),
// inlined
ProductionStep::new(Symbol::non_terminal(2))
.with_alias("outer_alias", true),
],
}],
},
SyntaxVariable {
name: "non-terminal-1".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(11))
.with_prec(Precedence::Integer(2), None)
.with_alias("inner_alias", true),
ProductionStep::new(Symbol::terminal(12)),
],
}],
},
SyntaxVariable {
name: "non-terminal-2".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::terminal(13))],
}],
},
],
..Default::default()
};
let inline_map = process_inlines(&grammar, &LexicalGrammar::default()).unwrap();
let productions = inline_map
.inlined_productions(&grammar.variables[0].productions[0], 0)
.unwrap()
.collect::<Vec<_>>();
assert_eq!(
productions.iter().copied().cloned().collect::<Vec<_>>(),
vec![Production {
dynamic_precedence: 0,
steps: vec![
// The first step in the inlined production retains its precedence
// and alias.
ProductionStep::new(Symbol::terminal(11))
.with_prec(Precedence::Integer(2), None)
.with_alias("inner_alias", true),
// The final step of the inlined production inherits the precedence of
// the inlined step.
ProductionStep::new(Symbol::terminal(12))
.with_prec(Precedence::Integer(1), Some(Associativity::Left)),
ProductionStep::new(Symbol::terminal(10)),
ProductionStep::new(Symbol::non_terminal(2)).with_alias("outer_alias", true),
]
}],
);
assert_eq!(
inline_map
.inlined_productions(productions[0], 3)
.unwrap()
.cloned()
.collect::<Vec<_>>(),
vec![Production {
dynamic_precedence: 0,
steps: vec![
ProductionStep::new(Symbol::terminal(11))
.with_prec(Precedence::Integer(2), None)
.with_alias("inner_alias", true),
ProductionStep::new(Symbol::terminal(12))
.with_prec(Precedence::Integer(1), Some(Associativity::Left)),
ProductionStep::new(Symbol::terminal(10)),
// All steps of the inlined production inherit their alias from the
// inlined step.
ProductionStep::new(Symbol::terminal(13)).with_alias("outer_alias", true),
]
}],
);
}
#[test]
fn test_error_when_inlining_tokens() {
let lexical_grammar = LexicalGrammar {
variables: vec![LexicalVariable {
name: "something".to_string(),
kind: VariableType::Named,
implicit_precedence: 0,
start_state: 0,
}],
..Default::default()
};
let grammar = SyntaxGrammar {
variables_to_inline: vec![Symbol::terminal(0)],
variables: vec![SyntaxVariable {
name: "non-terminal-0".to_string(),
kind: VariableType::Named,
productions: vec![Production {
dynamic_precedence: 0,
steps: vec![ProductionStep::new(Symbol::terminal(0))],
}],
}],
..Default::default()
};
let result = process_inlines(&grammar, &lexical_grammar);
assert!(result.is_err(), "expected an error, but got none");
let err = result.err().unwrap();
assert_eq!(err.to_string(), "Token `something` cannot be inlined",);
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/expand_tokens.rs | crates/generate/src/prepare_grammar/expand_tokens.rs | use regex_syntax::{
hir::{Class, Hir, HirKind},
ParserBuilder,
};
use serde::Serialize;
use thiserror::Error;
use super::ExtractedLexicalGrammar;
use crate::{
grammars::{LexicalGrammar, LexicalVariable},
nfa::{CharacterSet, Nfa, NfaState},
rules::{Precedence, Rule},
};
struct NfaBuilder {
nfa: Nfa,
is_sep: bool,
precedence_stack: Vec<i32>,
}
pub type ExpandTokensResult<T> = Result<T, ExpandTokensError>;
#[derive(Debug, Error, Serialize)]
pub enum ExpandTokensError {
#[error(
"The rule `{0}` matches the empty string.
Tree-sitter does not support syntactic rules that match the empty string
unless they are used only as the grammar's start rule.
"
)]
EmptyString(String),
#[error(transparent)]
Processing(ExpandTokensProcessingError),
#[error(transparent)]
ExpandRule(ExpandRuleError),
}
#[derive(Debug, Error, Serialize)]
pub struct ExpandTokensProcessingError {
rule: String,
error: ExpandRuleError,
}
impl std::fmt::Display for ExpandTokensProcessingError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(
f,
"Error processing rule {}: Grammar error: Unexpected rule {:?}",
self.rule, self.error
)?;
Ok(())
}
}
fn get_implicit_precedence(rule: &Rule) -> i32 {
match rule {
Rule::String(_) => 2,
Rule::Metadata { rule, params } => {
if params.is_main_token {
get_implicit_precedence(rule) + 1
} else {
get_implicit_precedence(rule)
}
}
_ => 0,
}
}
const fn get_completion_precedence(rule: &Rule) -> i32 {
if let Rule::Metadata { params, .. } = rule {
if let Precedence::Integer(p) = params.precedence {
return p;
}
}
0
}
pub fn expand_tokens(mut grammar: ExtractedLexicalGrammar) -> ExpandTokensResult<LexicalGrammar> {
let mut builder = NfaBuilder {
nfa: Nfa::new(),
is_sep: true,
precedence_stack: vec![0],
};
let separator_rule = if grammar.separators.is_empty() {
Rule::Blank
} else {
grammar.separators.push(Rule::Blank);
Rule::repeat(Rule::choice(grammar.separators))
};
let mut variables = Vec::with_capacity(grammar.variables.len());
for (i, variable) in grammar.variables.into_iter().enumerate() {
if variable.rule.is_empty() {
Err(ExpandTokensError::EmptyString(variable.name.clone()))?;
}
let is_immediate_token = match &variable.rule {
Rule::Metadata { params, .. } => params.is_main_token,
_ => false,
};
builder.is_sep = false;
builder.nfa.states.push(NfaState::Accept {
variable_index: i,
precedence: get_completion_precedence(&variable.rule),
});
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&variable.rule, last_state_id)
.map_err(|e| {
ExpandTokensError::Processing(ExpandTokensProcessingError {
rule: variable.name.clone(),
error: e,
})
})?;
if !is_immediate_token {
builder.is_sep = true;
let last_state_id = builder.nfa.last_state_id();
builder
.expand_rule(&separator_rule, last_state_id)
.map_err(ExpandTokensError::ExpandRule)?;
}
variables.push(LexicalVariable {
name: variable.name,
kind: variable.kind,
implicit_precedence: get_implicit_precedence(&variable.rule),
start_state: builder.nfa.last_state_id(),
});
}
Ok(LexicalGrammar {
nfa: builder.nfa,
variables,
})
}
pub type ExpandRuleResult<T> = Result<T, ExpandRuleError>;
#[derive(Debug, Error, Serialize)]
pub enum ExpandRuleError {
#[error("Grammar error: Unexpected rule {0:?}")]
UnexpectedRule(Rule),
#[error("{0}")]
Parse(String),
#[error(transparent)]
ExpandRegex(ExpandRegexError),
}
pub type ExpandRegexResult<T> = Result<T, ExpandRegexError>;
#[derive(Debug, Error, Serialize)]
pub enum ExpandRegexError {
#[error("{0}")]
Utf8(String),
#[error("Regex error: Assertions are not supported")]
Assertion,
}
impl NfaBuilder {
fn expand_rule(&mut self, rule: &Rule, mut next_state_id: u32) -> ExpandRuleResult<bool> {
match rule {
Rule::Pattern(s, f) => {
// With unicode enabled, `\w`, `\s` and `\d` expand to character sets that are much
// larger than intended, so we replace them with the actual
// character sets they should represent. If the full unicode range
// of `\w`, `\s` or `\d` are needed then `\p{L}`, `\p{Z}` and `\p{N}` should be
// used.
let s = s
.replace(r"\w", r"[0-9A-Za-z_]")
.replace(r"\s", r"[\t-\r ]")
.replace(r"\d", r"[0-9]")
.replace(r"\W", r"[^0-9A-Za-z_]")
.replace(r"\S", r"[^\t-\r ]")
.replace(r"\D", r"[^0-9]");
let mut parser = ParserBuilder::new()
.case_insensitive(f.contains('i'))
.unicode(true)
.utf8(false)
.build();
let hir = parser
.parse(&s)
.map_err(|e| ExpandRuleError::Parse(e.to_string()))?;
self.expand_regex(&hir, next_state_id)
.map_err(ExpandRuleError::ExpandRegex)
}
Rule::String(s) => {
for c in s.chars().rev() {
self.push_advance(CharacterSet::from_char(c), next_state_id);
next_state_id = self.nfa.last_state_id();
}
Ok(!s.is_empty())
}
Rule::Choice(elements) => {
let mut alternative_state_ids = Vec::with_capacity(elements.len());
for element in elements {
if self.expand_rule(element, next_state_id)? {
alternative_state_ids.push(self.nfa.last_state_id());
} else {
alternative_state_ids.push(next_state_id);
}
}
alternative_state_ids.sort_unstable();
alternative_state_ids.dedup();
alternative_state_ids.retain(|i| *i != self.nfa.last_state_id());
for alternative_state_id in alternative_state_ids {
self.push_split(alternative_state_id);
}
Ok(true)
}
Rule::Seq(elements) => {
let mut result = false;
for element in elements.iter().rev() {
if self.expand_rule(element, next_state_id)? {
result = true;
}
next_state_id = self.nfa.last_state_id();
}
Ok(result)
}
Rule::Repeat(rule) => {
self.nfa.states.push(NfaState::Accept {
variable_index: 0,
precedence: 0,
}); // Placeholder for split
let split_state_id = self.nfa.last_state_id();
if self.expand_rule(rule, split_state_id)? {
self.nfa.states[split_state_id as usize] =
NfaState::Split(self.nfa.last_state_id(), next_state_id);
Ok(true)
} else {
Ok(false)
}
}
Rule::Metadata { rule, params } => {
let has_precedence = if let Precedence::Integer(precedence) = ¶ms.precedence {
self.precedence_stack.push(*precedence);
true
} else {
false
};
let result = self.expand_rule(rule, next_state_id);
if has_precedence {
self.precedence_stack.pop();
}
result
}
Rule::Blank => Ok(false),
_ => Err(ExpandRuleError::UnexpectedRule(rule.clone()))?,
}
}
fn expand_regex(&mut self, hir: &Hir, mut next_state_id: u32) -> ExpandRegexResult<bool> {
match hir.kind() {
HirKind::Empty => Ok(false),
HirKind::Literal(literal) => {
for character in std::str::from_utf8(&literal.0)
.map_err(|e| ExpandRegexError::Utf8(e.to_string()))?
.chars()
.rev()
{
let char_set = CharacterSet::from_char(character);
self.push_advance(char_set, next_state_id);
next_state_id = self.nfa.last_state_id();
}
Ok(true)
}
HirKind::Class(class) => match class {
Class::Unicode(class) => {
let mut chars = CharacterSet::default();
for c in class.ranges() {
chars = chars.add_range(c.start(), c.end());
}
// For some reason, the long s `ſ` is included if the letter `s` is in a
// pattern, so we remove it.
if chars.range_count() == 3
&& chars
.ranges()
// exact check to ensure that `ſ` wasn't intentionally added.
.all(|r| ['s'..='s', 'S'..='S', 'ſ'..='ſ'].contains(&r))
{
chars = chars.difference(CharacterSet::from_char('ſ'));
}
self.push_advance(chars, next_state_id);
Ok(true)
}
Class::Bytes(bytes_class) => {
let mut chars = CharacterSet::default();
for c in bytes_class.ranges() {
chars = chars.add_range(c.start().into(), c.end().into());
}
self.push_advance(chars, next_state_id);
Ok(true)
}
},
HirKind::Look(_) => Err(ExpandRegexError::Assertion)?,
HirKind::Repetition(repetition) => match (repetition.min, repetition.max) {
(0, Some(1)) => self.expand_zero_or_one(&repetition.sub, next_state_id),
(1, None) => self.expand_one_or_more(&repetition.sub, next_state_id),
(0, None) => self.expand_zero_or_more(&repetition.sub, next_state_id),
(min, Some(max)) if min == max => {
self.expand_count(&repetition.sub, min, next_state_id)
}
(min, None) => {
if self.expand_zero_or_more(&repetition.sub, next_state_id)? {
self.expand_count(&repetition.sub, min, next_state_id)
} else {
Ok(false)
}
}
(min, Some(max)) => {
let mut result = self.expand_count(&repetition.sub, min, next_state_id)?;
for _ in min..max {
if result {
next_state_id = self.nfa.last_state_id();
}
if self.expand_zero_or_one(&repetition.sub, next_state_id)? {
result = true;
}
}
Ok(result)
}
},
HirKind::Capture(capture) => self.expand_regex(&capture.sub, next_state_id),
HirKind::Concat(concat) => {
let mut result = false;
for hir in concat.iter().rev() {
if self.expand_regex(hir, next_state_id)? {
result = true;
next_state_id = self.nfa.last_state_id();
}
}
Ok(result)
}
HirKind::Alternation(alternations) => {
let mut alternative_state_ids = Vec::with_capacity(alternations.len());
for hir in alternations {
if self.expand_regex(hir, next_state_id)? {
alternative_state_ids.push(self.nfa.last_state_id());
} else {
alternative_state_ids.push(next_state_id);
}
}
alternative_state_ids.sort_unstable();
alternative_state_ids.dedup();
alternative_state_ids.retain(|i| *i != self.nfa.last_state_id());
for alternative_state_id in alternative_state_ids {
self.push_split(alternative_state_id);
}
Ok(true)
}
}
}
fn expand_one_or_more(&mut self, hir: &Hir, next_state_id: u32) -> ExpandRegexResult<bool> {
self.nfa.states.push(NfaState::Accept {
variable_index: 0,
precedence: 0,
}); // Placeholder for split
let split_state_id = self.nfa.last_state_id();
if self.expand_regex(hir, split_state_id)? {
self.nfa.states[split_state_id as usize] =
NfaState::Split(self.nfa.last_state_id(), next_state_id);
Ok(true)
} else {
self.nfa.states.pop();
Ok(false)
}
}
fn expand_zero_or_one(&mut self, hir: &Hir, next_state_id: u32) -> ExpandRegexResult<bool> {
if self.expand_regex(hir, next_state_id)? {
self.push_split(next_state_id);
Ok(true)
} else {
Ok(false)
}
}
fn expand_zero_or_more(&mut self, hir: &Hir, next_state_id: u32) -> ExpandRegexResult<bool> {
if self.expand_one_or_more(hir, next_state_id)? {
self.push_split(next_state_id);
Ok(true)
} else {
Ok(false)
}
}
fn expand_count(
&mut self,
hir: &Hir,
count: u32,
mut next_state_id: u32,
) -> ExpandRegexResult<bool> {
let mut result = false;
for _ in 0..count {
if self.expand_regex(hir, next_state_id)? {
result = true;
next_state_id = self.nfa.last_state_id();
}
}
Ok(result)
}
fn push_advance(&mut self, chars: CharacterSet, state_id: u32) {
let precedence = *self.precedence_stack.last().unwrap();
self.nfa.states.push(NfaState::Advance {
chars,
state_id,
precedence,
is_sep: self.is_sep,
});
}
fn push_split(&mut self, state_id: u32) {
let last_state_id = self.nfa.last_state_id();
self.nfa
.states
.push(NfaState::Split(state_id, last_state_id));
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
grammars::Variable,
nfa::{NfaCursor, NfaTransition},
};
fn simulate_nfa<'a>(grammar: &'a LexicalGrammar, s: &'a str) -> Option<(usize, &'a str)> {
let start_states = grammar.variables.iter().map(|v| v.start_state).collect();
let mut cursor = NfaCursor::new(&grammar.nfa, start_states);
let mut result = None;
let mut result_precedence = i32::MIN;
let mut start_char = 0;
let mut end_char = 0;
for c in s.chars() {
for (id, precedence) in cursor.completions() {
if result.is_none() || result_precedence <= precedence {
result = Some((id, &s[start_char..end_char]));
result_precedence = precedence;
}
}
if let Some(NfaTransition {
states,
is_separator,
..
}) = cursor
.transitions()
.into_iter()
.find(|t| t.characters.contains(c) && t.precedence >= result_precedence)
{
cursor.reset(states);
end_char += c.len_utf8();
if is_separator {
start_char = end_char;
}
} else {
break;
}
}
for (id, precedence) in cursor.completions() {
if result.is_none() || result_precedence <= precedence {
result = Some((id, &s[start_char..end_char]));
result_precedence = precedence;
}
}
result
}
#[test]
fn test_rule_expansion() {
struct Row {
rules: Vec<Rule>,
separators: Vec<Rule>,
examples: Vec<(&'static str, Option<(usize, &'static str)>)>,
}
let table = [
// regex with sequences and alternatives
Row {
rules: vec![Rule::pattern("(a|b|c)d(e|f|g)h?", "")],
separators: vec![],
examples: vec![
("ade1", Some((0, "ade"))),
("bdf1", Some((0, "bdf"))),
("bdfh1", Some((0, "bdfh"))),
("ad1", None),
],
},
// regex with repeats
Row {
rules: vec![Rule::pattern("a*", "")],
separators: vec![],
examples: vec![("aaa1", Some((0, "aaa"))), ("b", Some((0, "")))],
},
// regex with repeats in sequences
Row {
rules: vec![Rule::pattern("a((bc)+|(de)*)f", "")],
separators: vec![],
examples: vec![
("af1", Some((0, "af"))),
("adedef1", Some((0, "adedef"))),
("abcbcbcf1", Some((0, "abcbcbcf"))),
("a", None),
],
},
// regex with character ranges
Row {
rules: vec![Rule::pattern("[a-fA-F0-9]+", "")],
separators: vec![],
examples: vec![("A1ff0.", Some((0, "A1ff0")))],
},
// regex with perl character classes
Row {
rules: vec![Rule::pattern("\\w\\d\\s", "")],
separators: vec![],
examples: vec![("_0 ", Some((0, "_0 ")))],
},
// string
Row {
rules: vec![Rule::string("abc")],
separators: vec![],
examples: vec![("abcd", Some((0, "abc"))), ("ab", None)],
},
// complex rule containing strings and regexes
Row {
rules: vec![Rule::repeat(Rule::seq(vec![
Rule::string("{"),
Rule::pattern("[a-f]+", ""),
Rule::string("}"),
]))],
separators: vec![],
examples: vec![
("{a}{", Some((0, "{a}"))),
("{a}{d", Some((0, "{a}"))),
("ab", None),
],
},
// longest match rule
Row {
rules: vec![
Rule::pattern("a|bc", ""),
Rule::pattern("aa", ""),
Rule::pattern("bcd", ""),
],
separators: vec![],
examples: vec![
("a.", Some((0, "a"))),
("bc.", Some((0, "bc"))),
("aa.", Some((1, "aa"))),
("bcd?", Some((2, "bcd"))),
("b.", None),
("c.", None),
],
},
// regex with an alternative including the empty string
Row {
rules: vec![Rule::pattern("a(b|)+c", "")],
separators: vec![],
examples: vec![
("ac.", Some((0, "ac"))),
("abc.", Some((0, "abc"))),
("abbc.", Some((0, "abbc"))),
],
},
// separators
Row {
rules: vec![Rule::pattern("[a-f]+", "")],
separators: vec![Rule::string("\\\n"), Rule::pattern("\\s", "")],
examples: vec![
(" a", Some((0, "a"))),
(" \nb", Some((0, "b"))),
(" \\a", None),
(" \\\na", Some((0, "a"))),
],
},
// shorter tokens with higher precedence
Row {
rules: vec![
Rule::prec(Precedence::Integer(2), Rule::pattern("abc", "")),
Rule::prec(Precedence::Integer(1), Rule::pattern("ab[cd]e", "")),
Rule::pattern("[a-e]+", ""),
],
separators: vec![Rule::string("\\\n"), Rule::pattern("\\s", "")],
examples: vec![
("abceef", Some((0, "abc"))),
("abdeef", Some((1, "abde"))),
("aeeeef", Some((2, "aeeee"))),
],
},
// immediate tokens with higher precedence
Row {
rules: vec![
Rule::prec(Precedence::Integer(1), Rule::pattern("[^a]+", "")),
Rule::immediate_token(Rule::prec(
Precedence::Integer(2),
Rule::pattern("[^ab]+", ""),
)),
],
separators: vec![Rule::pattern("\\s", "")],
examples: vec![("cccb", Some((1, "ccc")))],
},
Row {
rules: vec![Rule::seq(vec![
Rule::string("a"),
Rule::choice(vec![Rule::string("b"), Rule::string("c")]),
Rule::string("d"),
])],
separators: vec![],
examples: vec![
("abd", Some((0, "abd"))),
("acd", Some((0, "acd"))),
("abc", None),
("ad", None),
("d", None),
("a", None),
],
},
// nested choices within sequences
Row {
rules: vec![Rule::seq(vec![
Rule::pattern("[0-9]+", ""),
Rule::choice(vec![
Rule::Blank,
Rule::choice(vec![Rule::seq(vec![
Rule::choice(vec![Rule::string("e"), Rule::string("E")]),
Rule::choice(vec![
Rule::Blank,
Rule::choice(vec![Rule::string("+"), Rule::string("-")]),
]),
Rule::pattern("[0-9]+", ""),
])]),
]),
])],
separators: vec![],
examples: vec![
("12", Some((0, "12"))),
("12e", Some((0, "12"))),
("12g", Some((0, "12"))),
("12e3", Some((0, "12e3"))),
("12e+", Some((0, "12"))),
("12E+34 +", Some((0, "12E+34"))),
("12e34", Some((0, "12e34"))),
],
},
// nested groups
Row {
rules: vec![Rule::seq(vec![Rule::pattern(r"([^x\\]|\\(.|\n))+", "")])],
separators: vec![],
examples: vec![("abcx", Some((0, "abc"))), ("abc\\0x", Some((0, "abc\\0")))],
},
// allowing unrecognized escape sequences
Row {
rules: vec![
// Escaped forward slash (used in JS because '/' is the regex delimiter)
Rule::pattern(r"\/", ""),
// Escaped quotes
Rule::pattern(r#"\"\'"#, ""),
// Quote preceded by a literal backslash
Rule::pattern(r"[\\']+", ""),
],
separators: vec![],
examples: vec![
("/", Some((0, "/"))),
("\"\'", Some((1, "\"\'"))),
(r"'\'a", Some((2, r"'\'"))),
],
},
// unicode property escapes
Row {
rules: vec![
Rule::pattern(r"\p{L}+\P{L}+", ""),
Rule::pattern(r"\p{White_Space}+\P{White_Space}+[\p{White_Space}]*", ""),
],
separators: vec![],
examples: vec![
(" 123 abc", Some((1, " 123 "))),
("ბΨƁ___ƀƔ", Some((0, "ბΨƁ___"))),
],
},
// unicode property escapes in bracketed sets
Row {
rules: vec![Rule::pattern(r"[\p{L}\p{Nd}]+", "")],
separators: vec![],
examples: vec![("abΨ12٣٣, ok", Some((0, "abΨ12٣٣")))],
},
// unicode character escapes
Row {
rules: vec![
Rule::pattern(r"\u{00dc}", ""),
Rule::pattern(r"\U{000000dd}", ""),
Rule::pattern(r"\u00de", ""),
Rule::pattern(r"\U000000df", ""),
],
separators: vec![],
examples: vec![
("\u{00dc}", Some((0, "\u{00dc}"))),
("\u{00dd}", Some((1, "\u{00dd}"))),
("\u{00de}", Some((2, "\u{00de}"))),
("\u{00df}", Some((3, "\u{00df}"))),
],
},
Row {
rules: vec![
Rule::pattern(r"u\{[0-9a-fA-F]+\}", ""),
// Already-escaped curly braces
Rule::pattern(r"\{[ab]{3}\}", ""),
// Unicode codepoints
Rule::pattern(r"\u{1000A}", ""),
// Unicode codepoints (lowercase)
Rule::pattern(r"\u{1000b}", ""),
],
separators: vec![],
examples: vec![
("u{1234} ok", Some((0, "u{1234}"))),
("{aba}}", Some((1, "{aba}"))),
("\u{1000A}", Some((2, "\u{1000A}"))),
("\u{1000b}", Some((3, "\u{1000b}"))),
],
},
// Emojis
Row {
rules: vec![Rule::pattern(r"\p{Emoji}+", "")],
separators: vec![],
examples: vec![
("🐎", Some((0, "🐎"))),
("🐴🐴", Some((0, "🐴🐴"))),
("#0", Some((0, "#0"))), // These chars are technically emojis!
("⻢", None),
("♞", None),
("horse", None),
],
},
// Intersection
Row {
rules: vec![Rule::pattern(r"[[0-7]&&[4-9]]+", "")],
separators: vec![],
examples: vec![
("456", Some((0, "456"))),
("64", Some((0, "64"))),
("452", Some((0, "45"))),
("91", None),
("8", None),
("3", None),
],
},
// Difference
Row {
rules: vec![Rule::pattern(r"[[0-9]--[4-7]]+", "")],
separators: vec![],
examples: vec![
("123", Some((0, "123"))),
("83", Some((0, "83"))),
("9", Some((0, "9"))),
("124", Some((0, "12"))),
("67", None),
("4", None),
],
},
// Symmetric difference
Row {
rules: vec![Rule::pattern(r"[[0-7]~~[4-9]]+", "")],
separators: vec![],
examples: vec![
("123", Some((0, "123"))),
("83", Some((0, "83"))),
("9", Some((0, "9"))),
("124", Some((0, "12"))),
("67", None),
("4", None),
],
},
// Nested set operations
Row {
// 0 1 2 3 4 5 6 7 8 9
// [0-5]: y y y y y y
// [2-4]: y y y
// [0-5]--[2-4]: y y y
// [3-9]: y y y y y y y
// [6-7]: y y
// [3-9]--[5-7]: y y y y y
// final regex: y y y y y y
rules: vec![Rule::pattern(r"[[[0-5]--[2-4]]~~[[3-9]--[6-7]]]+", "")],
separators: vec![],
examples: vec![
("01", Some((0, "01"))),
("432", Some((0, "43"))),
("8", Some((0, "8"))),
("9", Some((0, "9"))),
("2", None),
("567", None),
],
},
];
for Row {
rules,
separators,
examples,
} in &table
{
let grammar = expand_tokens(ExtractedLexicalGrammar {
separators: separators.clone(),
variables: rules
.iter()
.map(|rule| Variable::named("", rule.clone()))
.collect(),
})
.unwrap();
for (haystack, needle) in examples {
assert_eq!(simulate_nfa(&grammar, haystack), *needle);
}
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/prepare_grammar/extract_tokens.rs | crates/generate/src/prepare_grammar/extract_tokens.rs | use std::collections::HashMap;
use serde::Serialize;
use thiserror::Error;
use super::{ExtractedLexicalGrammar, ExtractedSyntaxGrammar, InternedGrammar};
use crate::{
grammars::{ExternalToken, ReservedWordContext, Variable, VariableType},
rules::{MetadataParams, Rule, Symbol, SymbolType},
};
pub type ExtractTokensResult<T> = Result<T, ExtractTokensError>;
#[derive(Debug, Error, Serialize)]
pub enum ExtractTokensError {
#[error(
"The rule `{0}` contains an empty string.
Tree-sitter does not support syntactic rules that contain an empty string
unless they are used only as the grammar's start rule.
"
)]
EmptyString(String),
#[error("Rule '{0}' cannot be used as both an external token and a non-terminal rule")]
ExternalTokenNonTerminal(String),
#[error("Non-symbol rules cannot be used as external tokens")]
NonSymbolExternalToken,
#[error(transparent)]
WordToken(NonTerminalWordTokenError),
#[error("Reserved word '{0}' must be a token")]
NonTokenReservedWord(String),
}
#[derive(Debug, Error, Serialize)]
pub struct NonTerminalWordTokenError {
pub symbol_name: String,
pub conflicting_symbol_name: Option<String>,
}
impl std::fmt::Display for NonTerminalWordTokenError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Non-terminal symbol '{}' cannot be used as the word token",
self.symbol_name
)?;
if let Some(conflicting_name) = &self.conflicting_symbol_name {
writeln!(
f,
", because its rule is duplicated in '{conflicting_name}'",
)
} else {
writeln!(f)
}
}
}
pub(super) fn extract_tokens(
mut grammar: InternedGrammar,
) -> ExtractTokensResult<(ExtractedSyntaxGrammar, ExtractedLexicalGrammar)> {
let mut extractor = TokenExtractor {
current_variable_name: String::new(),
current_variable_token_count: 0,
is_first_rule: false,
extracted_variables: Vec::new(),
extracted_usage_counts: Vec::new(),
};
for (i, variable) in &mut grammar.variables.iter_mut().enumerate() {
extractor.extract_tokens_in_variable(i == 0, variable)?;
}
for variable in &mut grammar.external_tokens {
extractor.extract_tokens_in_variable(false, variable)?;
}
let mut lexical_variables = Vec::with_capacity(extractor.extracted_variables.len());
for variable in extractor.extracted_variables {
lexical_variables.push(variable);
}
// If a variable's entire rule was extracted as a token and that token didn't
// appear within any other rule, then remove that variable from the syntax
// grammar, giving its name to the token in the lexical grammar. Any symbols
// that pointed to that variable will need to be updated to point to the
// variable in the lexical grammar. Symbols that pointed to later variables
// will need to have their indices decremented.
let mut variables = Vec::with_capacity(grammar.variables.len());
let mut symbol_replacer = SymbolReplacer {
replacements: HashMap::new(),
};
for (i, variable) in grammar.variables.into_iter().enumerate() {
if let Rule::Symbol(Symbol {
kind: SymbolType::Terminal,
index,
}) = variable.rule
{
if i > 0 && extractor.extracted_usage_counts[index] == 1 {
let lexical_variable = &mut lexical_variables[index];
if lexical_variable.kind == VariableType::Auxiliary
|| variable.kind != VariableType::Hidden
{
lexical_variable.kind = variable.kind;
lexical_variable.name = variable.name;
symbol_replacer.replacements.insert(i, index);
continue;
}
}
}
variables.push(variable);
}
for variable in &mut variables {
variable.rule = symbol_replacer.replace_symbols_in_rule(&variable.rule);
}
let expected_conflicts = grammar
.expected_conflicts
.into_iter()
.map(|conflict| {
let mut result = conflict
.iter()
.map(|symbol| symbol_replacer.replace_symbol(*symbol))
.collect::<Vec<_>>();
result.sort_unstable();
result.dedup();
result
})
.collect();
let supertype_symbols = grammar
.supertype_symbols
.into_iter()
.map(|symbol| symbol_replacer.replace_symbol(symbol))
.collect();
let variables_to_inline = grammar
.variables_to_inline
.into_iter()
.map(|symbol| symbol_replacer.replace_symbol(symbol))
.collect();
let mut separators = Vec::new();
let mut extra_symbols = Vec::new();
for rule in grammar.extra_symbols {
if let Rule::Symbol(symbol) = rule {
extra_symbols.push(symbol_replacer.replace_symbol(symbol));
} else if let Some(index) = lexical_variables.iter().position(|v| v.rule == rule) {
extra_symbols.push(Symbol::terminal(index));
} else {
separators.push(rule);
}
}
let mut external_tokens = Vec::with_capacity(grammar.external_tokens.len());
for external_token in grammar.external_tokens {
let rule = symbol_replacer.replace_symbols_in_rule(&external_token.rule);
if let Rule::Symbol(symbol) = rule {
if symbol.is_non_terminal() {
Err(ExtractTokensError::ExternalTokenNonTerminal(
variables[symbol.index].name.clone(),
))?;
}
if symbol.is_external() {
external_tokens.push(ExternalToken {
name: external_token.name,
kind: external_token.kind,
corresponding_internal_token: None,
});
} else {
external_tokens.push(ExternalToken {
name: lexical_variables[symbol.index].name.clone(),
kind: external_token.kind,
corresponding_internal_token: Some(symbol),
});
}
} else {
Err(ExtractTokensError::NonSymbolExternalToken)?;
}
}
let word_token = if let Some(token) = grammar.word_token {
let token = symbol_replacer.replace_symbol(token);
if token.is_non_terminal() {
let word_token_variable = &variables[token.index];
let conflicting_symbol_name = variables
.iter()
.enumerate()
.find(|(i, v)| *i != token.index && v.rule == word_token_variable.rule)
.map(|(_, v)| v.name.clone());
Err(ExtractTokensError::WordToken(NonTerminalWordTokenError {
symbol_name: word_token_variable.name.clone(),
conflicting_symbol_name,
}))?;
}
Some(token)
} else {
None
};
let mut reserved_word_contexts = Vec::with_capacity(grammar.reserved_word_sets.len());
for reserved_word_context in grammar.reserved_word_sets {
let mut reserved_words = Vec::with_capacity(reserved_word_contexts.len());
for reserved_rule in reserved_word_context.reserved_words {
if let Rule::Symbol(symbol) = reserved_rule {
reserved_words.push(symbol_replacer.replace_symbol(symbol));
} else if let Some(index) = lexical_variables
.iter()
.position(|v| v.rule == reserved_rule)
{
reserved_words.push(Symbol::terminal(index));
} else {
let rule = if let Rule::Metadata { rule, .. } = &reserved_rule {
rule.as_ref()
} else {
&reserved_rule
};
let token_name = match rule {
Rule::String(s) => s.clone(),
Rule::Pattern(p, _) => p.clone(),
_ => "unknown".to_string(),
};
Err(ExtractTokensError::NonTokenReservedWord(token_name))?;
}
}
reserved_word_contexts.push(ReservedWordContext {
name: reserved_word_context.name,
reserved_words,
});
}
Ok((
ExtractedSyntaxGrammar {
variables,
expected_conflicts,
extra_symbols,
variables_to_inline,
supertype_symbols,
external_tokens,
word_token,
precedence_orderings: grammar.precedence_orderings,
reserved_word_sets: reserved_word_contexts,
},
ExtractedLexicalGrammar {
variables: lexical_variables,
separators,
},
))
}
struct TokenExtractor {
current_variable_name: String,
current_variable_token_count: usize,
is_first_rule: bool,
extracted_variables: Vec<Variable>,
extracted_usage_counts: Vec<usize>,
}
struct SymbolReplacer {
replacements: HashMap<usize, usize>,
}
impl TokenExtractor {
fn extract_tokens_in_variable(
&mut self,
is_first: bool,
variable: &mut Variable,
) -> ExtractTokensResult<()> {
self.current_variable_name.clear();
self.current_variable_name.push_str(&variable.name);
self.current_variable_token_count = 0;
self.is_first_rule = is_first;
variable.rule = self.extract_tokens_in_rule(&variable.rule)?;
Ok(())
}
fn extract_tokens_in_rule(&mut self, input: &Rule) -> ExtractTokensResult<Rule> {
match input {
Rule::String(name) => Ok(self.extract_token(input, Some(name))?.into()),
Rule::Pattern(..) => Ok(self.extract_token(input, None)?.into()),
Rule::Metadata { params, rule } => {
if params.is_token {
let mut params = params.clone();
params.is_token = false;
let string_value = if let Rule::String(value) = rule.as_ref() {
Some(value)
} else {
None
};
let rule_to_extract = if params == MetadataParams::default() {
rule.as_ref()
} else {
input
};
Ok(self.extract_token(rule_to_extract, string_value)?.into())
} else {
Ok(Rule::Metadata {
params: params.clone(),
rule: Box::new(self.extract_tokens_in_rule(rule)?),
})
}
}
Rule::Repeat(content) => Ok(Rule::Repeat(Box::new(
self.extract_tokens_in_rule(content)?,
))),
Rule::Seq(elements) => Ok(Rule::Seq(
elements
.iter()
.map(|e| self.extract_tokens_in_rule(e))
.collect::<ExtractTokensResult<Vec<_>>>()?,
)),
Rule::Choice(elements) => Ok(Rule::Choice(
elements
.iter()
.map(|e| self.extract_tokens_in_rule(e))
.collect::<ExtractTokensResult<Vec<_>>>()?,
)),
Rule::Reserved { rule, context_name } => Ok(Rule::Reserved {
rule: Box::new(self.extract_tokens_in_rule(rule)?),
context_name: context_name.clone(),
}),
_ => Ok(input.clone()),
}
}
fn extract_token(
&mut self,
rule: &Rule,
string_value: Option<&String>,
) -> ExtractTokensResult<Symbol> {
for (i, variable) in self.extracted_variables.iter_mut().enumerate() {
if variable.rule == *rule {
self.extracted_usage_counts[i] += 1;
return Ok(Symbol::terminal(i));
}
}
let index = self.extracted_variables.len();
let variable = if let Some(string_value) = string_value {
if string_value.is_empty() && !self.is_first_rule {
Err(ExtractTokensError::EmptyString(
self.current_variable_name.clone(),
))?;
}
Variable {
name: string_value.clone(),
kind: VariableType::Anonymous,
rule: rule.clone(),
}
} else {
self.current_variable_token_count += 1;
Variable {
name: format!(
"{}_token{}",
self.current_variable_name, self.current_variable_token_count
),
kind: VariableType::Auxiliary,
rule: rule.clone(),
}
};
self.extracted_variables.push(variable);
self.extracted_usage_counts.push(1);
Ok(Symbol::terminal(index))
}
}
impl SymbolReplacer {
fn replace_symbols_in_rule(&mut self, rule: &Rule) -> Rule {
match rule {
Rule::Symbol(symbol) => self.replace_symbol(*symbol).into(),
Rule::Choice(elements) => Rule::Choice(
elements
.iter()
.map(|e| self.replace_symbols_in_rule(e))
.collect(),
),
Rule::Seq(elements) => Rule::Seq(
elements
.iter()
.map(|e| self.replace_symbols_in_rule(e))
.collect(),
),
Rule::Repeat(content) => Rule::Repeat(Box::new(self.replace_symbols_in_rule(content))),
Rule::Metadata { rule, params } => Rule::Metadata {
params: params.clone(),
rule: Box::new(self.replace_symbols_in_rule(rule)),
},
Rule::Reserved { rule, context_name } => Rule::Reserved {
rule: Box::new(self.replace_symbols_in_rule(rule)),
context_name: context_name.clone(),
},
_ => rule.clone(),
}
}
fn replace_symbol(&self, symbol: Symbol) -> Symbol {
if !symbol.is_non_terminal() {
return symbol;
}
if let Some(replacement) = self.replacements.get(&symbol.index) {
return Symbol::terminal(*replacement);
}
let mut adjusted_index = symbol.index;
for replaced_index in self.replacements.keys() {
if *replaced_index < symbol.index {
adjusted_index -= 1;
}
}
Symbol::non_terminal(adjusted_index)
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn test_extraction() {
let (syntax_grammar, lexical_grammar) = extract_tokens(build_grammar(vec![
Variable::named(
"rule_0",
Rule::repeat(Rule::seq(vec![
Rule::string("a"),
Rule::pattern("b", ""),
Rule::choice(vec![
Rule::non_terminal(1),
Rule::non_terminal(2),
Rule::token(Rule::repeat(Rule::choice(vec![
Rule::string("c"),
Rule::string("d"),
]))),
]),
])),
),
Variable::named("rule_1", Rule::pattern("e", "")),
Variable::named("rule_2", Rule::pattern("b", "")),
Variable::named(
"rule_3",
Rule::seq(vec![Rule::non_terminal(2), Rule::Blank]),
),
]))
.unwrap();
assert_eq!(
syntax_grammar.variables,
vec![
Variable::named(
"rule_0",
Rule::repeat(Rule::seq(vec![
// The string "a" was replaced by a symbol referencing the lexical grammar
Rule::terminal(0),
// The pattern "b" was replaced by a symbol referencing the lexical grammar
Rule::terminal(1),
Rule::choice(vec![
// The symbol referencing `rule_1` was replaced by a symbol referencing
// the lexical grammar.
Rule::terminal(3),
// The symbol referencing `rule_2` had its index decremented because
// `rule_1` was moved to the lexical grammar.
Rule::non_terminal(1),
// The rule wrapped in `token` was replaced by a symbol referencing
// the lexical grammar.
Rule::terminal(2),
])
]))
),
// The pattern "e" was only used in once place: as the definition of `rule_1`,
// so that rule was moved to the lexical grammar. The pattern "b" appeared in
// two places, so it was not moved into the lexical grammar.
Variable::named("rule_2", Rule::terminal(1)),
Variable::named(
"rule_3",
Rule::seq(vec![Rule::non_terminal(1), Rule::Blank,])
),
]
);
assert_eq!(
lexical_grammar.variables,
vec![
Variable::anonymous("a", Rule::string("a")),
Variable::auxiliary("rule_0_token1", Rule::pattern("b", "")),
Variable::auxiliary(
"rule_0_token2",
Rule::repeat(Rule::choice(vec![Rule::string("c"), Rule::string("d"),]))
),
Variable::named("rule_1", Rule::pattern("e", "")),
]
);
}
#[test]
fn test_start_rule_is_token() {
let (syntax_grammar, lexical_grammar) =
extract_tokens(build_grammar(vec![Variable::named(
"rule_0",
Rule::string("hello"),
)]))
.unwrap();
assert_eq!(
syntax_grammar.variables,
vec![Variable::named("rule_0", Rule::terminal(0)),]
);
assert_eq!(
lexical_grammar.variables,
vec![Variable::anonymous("hello", Rule::string("hello")),]
);
}
#[test]
fn test_extracting_extra_symbols() {
let mut grammar = build_grammar(vec![
Variable::named("rule_0", Rule::string("x")),
Variable::named("comment", Rule::pattern("//.*", "")),
]);
grammar.extra_symbols = vec![Rule::string(" "), Rule::non_terminal(1)];
let (syntax_grammar, lexical_grammar) = extract_tokens(grammar).unwrap();
assert_eq!(syntax_grammar.extra_symbols, vec![Symbol::terminal(1),]);
assert_eq!(lexical_grammar.separators, vec![Rule::string(" "),]);
}
#[test]
fn test_extract_externals() {
let mut grammar = build_grammar(vec![
Variable::named(
"rule_0",
Rule::seq(vec![
Rule::external(0),
Rule::string("a"),
Rule::non_terminal(1),
Rule::non_terminal(2),
]),
),
Variable::named("rule_1", Rule::string("b")),
Variable::named("rule_2", Rule::string("c")),
]);
grammar.external_tokens = vec![
Variable::named("external_0", Rule::external(0)),
Variable::anonymous("a", Rule::string("a")),
Variable::named("rule_2", Rule::non_terminal(2)),
];
let (syntax_grammar, _) = extract_tokens(grammar).unwrap();
assert_eq!(
syntax_grammar.external_tokens,
vec![
ExternalToken {
name: "external_0".to_string(),
kind: VariableType::Named,
corresponding_internal_token: None,
},
ExternalToken {
name: "a".to_string(),
kind: VariableType::Anonymous,
corresponding_internal_token: Some(Symbol::terminal(0)),
},
ExternalToken {
name: "rule_2".to_string(),
kind: VariableType::Named,
corresponding_internal_token: Some(Symbol::terminal(2)),
},
]
);
}
#[test]
fn test_error_on_external_with_same_name_as_non_terminal() {
let mut grammar = build_grammar(vec![
Variable::named(
"rule_0",
Rule::seq(vec![Rule::non_terminal(1), Rule::non_terminal(2)]),
),
Variable::named(
"rule_1",
Rule::seq(vec![Rule::non_terminal(2), Rule::non_terminal(2)]),
),
Variable::named("rule_2", Rule::string("a")),
]);
grammar.external_tokens = vec![Variable::named("rule_1", Rule::non_terminal(1))];
let result = extract_tokens(grammar);
assert!(result.is_err(), "Expected an error but got no error");
let err = result.err().unwrap();
assert_eq!(
err.to_string(),
"Rule 'rule_1' cannot be used as both an external token and a non-terminal rule"
);
}
#[test]
fn test_extraction_on_hidden_terminal() {
let (syntax_grammar, lexical_grammar) = extract_tokens(build_grammar(vec![
Variable::named("rule_0", Rule::non_terminal(1)),
Variable::hidden("_rule_1", Rule::string("a")),
]))
.unwrap();
// The rule `_rule_1` should not "absorb" the
// terminal "a", since it is hidden,
// so we expect two variables still
assert_eq!(
syntax_grammar.variables,
vec![
Variable::named("rule_0", Rule::non_terminal(1)),
Variable::hidden("_rule_1", Rule::terminal(0)),
]
);
// We should not have a hidden rule in our lexical grammar, only the terminal "a"
assert_eq!(
lexical_grammar.variables,
vec![Variable::anonymous("a", Rule::string("a"))]
);
}
#[test]
fn test_extraction_with_empty_string() {
assert!(extract_tokens(build_grammar(vec![
Variable::named("rule_0", Rule::non_terminal(1)),
Variable::hidden("_rule_1", Rule::string("")),
]))
.is_err());
}
fn build_grammar(variables: Vec<Variable>) -> InternedGrammar {
InternedGrammar {
variables,
..Default::default()
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/token_conflicts.rs | crates/generate/src/build_tables/token_conflicts.rs | use std::{cmp::Ordering, collections::HashSet, fmt};
use crate::{
build_tables::item::TokenSetDisplay,
grammars::{LexicalGrammar, SyntaxGrammar},
nfa::{CharacterSet, NfaCursor, NfaTransition},
rules::TokenSet,
};
#[derive(Clone, Debug, Default, PartialEq, Eq)]
struct TokenConflictStatus {
matches_prefix: bool,
does_match_continuation: bool,
does_match_valid_continuation: bool,
does_match_separators: bool,
matches_same_string: bool,
matches_different_string: bool,
}
pub struct TokenConflictMap<'a> {
n: usize,
status_matrix: Vec<TokenConflictStatus>,
following_tokens: Vec<TokenSet>,
starting_chars_by_index: Vec<CharacterSet>,
following_chars_by_index: Vec<CharacterSet>,
grammar: &'a LexicalGrammar,
}
impl<'a> TokenConflictMap<'a> {
/// Create a token conflict map based on a lexical grammar, which describes the structure
/// of each token, and a `following_token` map, which indicates which tokens may be appear
/// immediately after each other token.
///
/// This analyzes the possible kinds of overlap between each pair of tokens and stores
/// them in a matrix.
pub fn new(grammar: &'a LexicalGrammar, following_tokens: Vec<TokenSet>) -> Self {
let mut cursor = NfaCursor::new(&grammar.nfa, Vec::new());
let starting_chars = get_starting_chars(&mut cursor, grammar);
let following_chars = get_following_chars(&starting_chars, &following_tokens);
let n = grammar.variables.len();
let mut status_matrix = vec![TokenConflictStatus::default(); n * n];
for i in 0..grammar.variables.len() {
for j in 0..i {
let status = compute_conflict_status(&mut cursor, grammar, &following_chars, i, j);
status_matrix[matrix_index(n, i, j)] = status.0;
status_matrix[matrix_index(n, j, i)] = status.1;
}
}
TokenConflictMap {
n,
status_matrix,
following_tokens,
starting_chars_by_index: starting_chars,
following_chars_by_index: following_chars,
grammar,
}
}
/// Does token `i` match any strings that token `j` also matches, such that token `i`
/// is preferred over token `j`?
pub fn has_same_conflict_status(&self, a: usize, b: usize, other: usize) -> bool {
let left = &self.status_matrix[matrix_index(self.n, a, other)];
let right = &self.status_matrix[matrix_index(self.n, b, other)];
left == right
}
/// Does token `i` match any strings that token `j` does *not* match?
pub fn does_match_different_string(&self, i: usize, j: usize) -> bool {
self.status_matrix[matrix_index(self.n, i, j)].matches_different_string
}
/// Does token `i` match any strings that token `j` also matches, where
/// token `i` is preferred over token `j`?
pub fn does_match_same_string(&self, i: usize, j: usize) -> bool {
self.status_matrix[matrix_index(self.n, i, j)].matches_same_string
}
pub fn does_conflict(&self, i: usize, j: usize) -> bool {
let entry = &self.status_matrix[matrix_index(self.n, i, j)];
entry.does_match_valid_continuation
|| entry.does_match_separators
|| entry.matches_same_string
}
/// Does token `i` match any strings that are *prefixes* of strings matched by `j`?
pub fn does_match_prefix(&self, i: usize, j: usize) -> bool {
self.status_matrix[matrix_index(self.n, i, j)].matches_prefix
}
pub fn does_match_shorter_or_longer(&self, i: usize, j: usize) -> bool {
let entry = &self.status_matrix[matrix_index(self.n, i, j)];
let reverse_entry = &self.status_matrix[matrix_index(self.n, j, i)];
(entry.does_match_valid_continuation || entry.does_match_separators)
&& !reverse_entry.does_match_separators
}
pub fn does_overlap(&self, i: usize, j: usize) -> bool {
let status = &self.status_matrix[matrix_index(self.n, i, j)];
status.does_match_separators
|| status.matches_prefix
|| status.matches_same_string
|| status.does_match_continuation
}
pub fn prefer_token(grammar: &LexicalGrammar, left: (i32, usize), right: (i32, usize)) -> bool {
match left.0.cmp(&right.0) {
Ordering::Less => false,
Ordering::Greater => true,
Ordering::Equal => match grammar.variables[left.1]
.implicit_precedence
.cmp(&grammar.variables[right.1].implicit_precedence)
{
Ordering::Less => false,
Ordering::Greater => true,
Ordering::Equal => left.1 < right.1,
},
}
}
pub fn prefer_transition(
grammar: &LexicalGrammar,
t: &NfaTransition,
completed_id: usize,
completed_precedence: i32,
has_separator_transitions: bool,
) -> bool {
if t.precedence < completed_precedence {
return false;
}
if t.precedence == completed_precedence {
if t.is_separator {
return false;
}
if has_separator_transitions
&& !grammar
.variable_indices_for_nfa_states(&t.states)
.any(|i| i == completed_id)
{
return false;
}
}
true
}
}
impl fmt::Debug for TokenConflictMap<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "TokenConflictMap {{")?;
let syntax_grammar = SyntaxGrammar::default();
writeln!(f, " following_tokens: {{")?;
for (i, following_tokens) in self.following_tokens.iter().enumerate() {
writeln!(
f,
" follow({:?}): {},",
self.grammar.variables[i].name,
TokenSetDisplay(following_tokens, &syntax_grammar, self.grammar)
)?;
}
writeln!(f, " }},")?;
writeln!(f, " starting_characters: {{")?;
for i in 0..self.n {
writeln!(
f,
" {:?}: {:?},",
self.grammar.variables[i].name, self.starting_chars_by_index[i]
)?;
}
writeln!(f, " }},")?;
writeln!(f, " following_characters: {{")?;
for i in 0..self.n {
writeln!(
f,
" {:?}: {:?},",
self.grammar.variables[i].name, self.following_chars_by_index[i]
)?;
}
writeln!(f, " }},")?;
writeln!(f, " status_matrix: {{")?;
for i in 0..self.n {
writeln!(f, " {:?}: {{", self.grammar.variables[i].name)?;
for j in 0..self.n {
writeln!(
f,
" {:?}: {:?},",
self.grammar.variables[j].name,
self.status_matrix[matrix_index(self.n, i, j)]
)?;
}
writeln!(f, " }},")?;
}
write!(f, " }},")?;
write!(f, "}}")?;
Ok(())
}
}
const fn matrix_index(variable_count: usize, i: usize, j: usize) -> usize {
variable_count * i + j
}
fn get_starting_chars(cursor: &mut NfaCursor, grammar: &LexicalGrammar) -> Vec<CharacterSet> {
let mut result = Vec::with_capacity(grammar.variables.len());
for variable in &grammar.variables {
cursor.reset(vec![variable.start_state]);
let mut all_chars = CharacterSet::empty();
for (chars, _) in cursor.transition_chars() {
all_chars = all_chars.add(chars);
}
result.push(all_chars);
}
result
}
fn get_following_chars(
starting_chars: &[CharacterSet],
following_tokens: &[TokenSet],
) -> Vec<CharacterSet> {
following_tokens
.iter()
.map(|following_tokens| {
let mut chars = CharacterSet::empty();
for token in following_tokens.iter() {
if token.is_terminal() {
chars = chars.add(&starting_chars[token.index]);
}
}
chars
})
.collect()
}
fn compute_conflict_status(
cursor: &mut NfaCursor,
grammar: &LexicalGrammar,
following_chars: &[CharacterSet],
i: usize,
j: usize,
) -> (TokenConflictStatus, TokenConflictStatus) {
let mut visited_state_sets = HashSet::new();
let mut state_set_queue = vec![vec![
grammar.variables[i].start_state,
grammar.variables[j].start_state,
]];
let mut result = (
TokenConflictStatus::default(),
TokenConflictStatus::default(),
);
while let Some(state_set) = state_set_queue.pop() {
let mut live_variable_indices = grammar.variable_indices_for_nfa_states(&state_set);
// If only one of the two tokens could possibly match from this state, then
// there is no reason to analyze any of its successors. Just record the fact
// that the token matches a string that the other token does not match.
let first_live_variable_index = live_variable_indices.next().unwrap();
if live_variable_indices.count() == 0 {
if first_live_variable_index == i {
result.0.matches_different_string = true;
} else {
result.1.matches_different_string = true;
}
continue;
}
// Don't pursue states where there's no potential for conflict.
cursor.reset(state_set);
let within_separator = cursor.transition_chars().any(|(_, sep)| sep);
// Examine each possible completed token in this state.
let mut completion = None;
for (id, precedence) in cursor.completions() {
if within_separator {
if id == i {
result.0.does_match_separators = true;
} else {
result.1.does_match_separators = true;
}
}
// If the other token has already completed, then this is
// a same-string conflict.
if let Some((prev_id, prev_precedence)) = completion {
if id == prev_id {
continue;
}
// Determine which of the two tokens is preferred.
let preferred_id;
if TokenConflictMap::prefer_token(
grammar,
(prev_precedence, prev_id),
(precedence, id),
) {
preferred_id = prev_id;
} else {
preferred_id = id;
completion = Some((id, precedence));
}
if preferred_id == i {
result.0.matches_same_string = true;
} else {
result.1.matches_same_string = true;
}
} else {
completion = Some((id, precedence));
}
}
// Examine each possible transition from this state to detect substring conflicts.
for transition in cursor.transitions() {
let mut can_advance = true;
// If there is already a completed token in this state, then determine
// if the next state can also match the completed token. If so, then
// this is *not* a conflict.
if let Some((completed_id, completed_precedence)) = completion {
let mut advanced_id = None;
let mut successor_contains_completed_id = false;
for variable_id in grammar.variable_indices_for_nfa_states(&transition.states) {
if variable_id == completed_id {
successor_contains_completed_id = true;
break;
}
advanced_id = Some(variable_id);
}
// Determine which action is preferred: matching the already complete
// token, or continuing on to try and match the other longer token.
if let (Some(advanced_id), false) = (advanced_id, successor_contains_completed_id) {
if TokenConflictMap::prefer_transition(
grammar,
&transition,
completed_id,
completed_precedence,
within_separator,
) {
can_advance = true;
if advanced_id == i {
result.0.does_match_continuation = true;
if transition.characters.does_intersect(&following_chars[j]) {
result.0.does_match_valid_continuation = true;
}
} else {
result.1.does_match_continuation = true;
if transition.characters.does_intersect(&following_chars[i]) {
result.1.does_match_valid_continuation = true;
}
}
} else if completed_id == i {
result.0.matches_prefix = true;
} else {
result.1.matches_prefix = true;
}
}
}
if can_advance && visited_state_sets.insert(transition.states.clone()) {
state_set_queue.push(transition.states);
}
}
}
result
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
grammars::{Variable, VariableType},
prepare_grammar::{expand_tokens, ExtractedLexicalGrammar},
rules::{Precedence, Rule, Symbol},
};
#[test]
fn test_starting_characters() {
let grammar = expand_tokens(ExtractedLexicalGrammar {
separators: Vec::new(),
variables: vec![
Variable {
name: "token_0".to_string(),
kind: VariableType::Named,
rule: Rule::pattern("[a-f]1|0x\\d", ""),
},
Variable {
name: "token_1".to_string(),
kind: VariableType::Named,
rule: Rule::pattern("d*ef", ""),
},
],
})
.unwrap();
let token_map = TokenConflictMap::new(&grammar, Vec::new());
assert_eq!(
token_map.starting_chars_by_index[0],
CharacterSet::empty().add_range('a', 'f').add_char('0')
);
assert_eq!(
token_map.starting_chars_by_index[1],
CharacterSet::empty().add_range('d', 'e')
);
}
#[test]
fn test_token_conflicts() {
let grammar = expand_tokens(ExtractedLexicalGrammar {
separators: Vec::new(),
variables: vec![
Variable {
name: "in".to_string(),
kind: VariableType::Named,
rule: Rule::string("in"),
},
Variable {
name: "identifier".to_string(),
kind: VariableType::Named,
rule: Rule::pattern("\\w+", ""),
},
Variable {
name: "instanceof".to_string(),
kind: VariableType::Named,
rule: Rule::string("instanceof"),
},
],
})
.unwrap();
let var = |name| index_of_var(&grammar, name);
let token_map = TokenConflictMap::new(
&grammar,
vec![
std::iter::once(&Symbol::terminal(var("identifier")))
.copied()
.collect(),
std::iter::once(&Symbol::terminal(var("in")))
.copied()
.collect(),
std::iter::once(&Symbol::terminal(var("identifier")))
.copied()
.collect(),
],
);
// Given the string "in", the `in` token is preferred over the `identifier` token
assert!(token_map.does_match_same_string(var("in"), var("identifier")));
assert!(!token_map.does_match_same_string(var("identifier"), var("in")));
// Depending on what character follows, the string "in" may be treated as part of an
// `identifier` token.
assert!(token_map.does_conflict(var("identifier"), var("in")));
// Depending on what character follows, the string "instanceof" may be treated as part of
// an `identifier` token.
assert!(token_map.does_conflict(var("identifier"), var("instanceof")));
assert!(token_map.does_conflict(var("instanceof"), var("in")));
}
#[test]
fn test_token_conflicts_with_separators() {
let grammar = expand_tokens(ExtractedLexicalGrammar {
separators: vec![Rule::pattern("\\s", "")],
variables: vec![
Variable {
name: "x".to_string(),
kind: VariableType::Named,
rule: Rule::string("x"),
},
Variable {
name: "newline".to_string(),
kind: VariableType::Named,
rule: Rule::string("\n"),
},
],
})
.unwrap();
let var = |name| index_of_var(&grammar, name);
let token_map = TokenConflictMap::new(&grammar, vec![TokenSet::new(); 4]);
assert!(token_map.does_conflict(var("newline"), var("x")));
assert!(!token_map.does_conflict(var("x"), var("newline")));
}
#[test]
fn test_token_conflicts_with_open_ended_tokens() {
let grammar = expand_tokens(ExtractedLexicalGrammar {
separators: vec![Rule::pattern("\\s", "")],
variables: vec![
Variable {
name: "x".to_string(),
kind: VariableType::Named,
rule: Rule::string("x"),
},
Variable {
name: "anything".to_string(),
kind: VariableType::Named,
rule: Rule::prec(Precedence::Integer(-1), Rule::pattern(".*", "")),
},
],
})
.unwrap();
let var = |name| index_of_var(&grammar, name);
let token_map = TokenConflictMap::new(&grammar, vec![TokenSet::new(); 4]);
assert!(token_map.does_match_shorter_or_longer(var("anything"), var("x")));
assert!(!token_map.does_match_shorter_or_longer(var("x"), var("anything")));
}
fn index_of_var(grammar: &LexicalGrammar, name: &str) -> usize {
grammar
.variables
.iter()
.position(|v| v.name == name)
.unwrap()
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/coincident_tokens.rs | crates/generate/src/build_tables/coincident_tokens.rs | use std::fmt;
use crate::{
grammars::LexicalGrammar,
rules::Symbol,
tables::{ParseStateId, ParseTable},
};
pub struct CoincidentTokenIndex<'a> {
entries: Vec<Vec<ParseStateId>>,
grammar: &'a LexicalGrammar,
n: usize,
}
impl<'a> CoincidentTokenIndex<'a> {
pub fn new(table: &ParseTable, lexical_grammar: &'a LexicalGrammar) -> Self {
let n = lexical_grammar.variables.len();
let mut result = Self {
n,
grammar: lexical_grammar,
entries: vec![Vec::new(); n * n],
};
for (i, state) in table.states.iter().enumerate() {
for symbol in state.terminal_entries.keys() {
if symbol.is_terminal() {
for other_symbol in state.terminal_entries.keys() {
if other_symbol.is_terminal() {
let index = result.index(symbol.index, other_symbol.index);
if result.entries[index].last().copied() != Some(i) {
result.entries[index].push(i);
}
}
}
}
}
}
result
}
pub fn states_with(&self, a: Symbol, b: Symbol) -> &[ParseStateId] {
&self.entries[self.index(a.index, b.index)]
}
pub fn contains(&self, a: Symbol, b: Symbol) -> bool {
!self.entries[self.index(a.index, b.index)].is_empty()
}
#[must_use]
const fn index(&self, a: usize, b: usize) -> usize {
if a < b {
a * self.n + b
} else {
b * self.n + a
}
}
}
impl fmt::Debug for CoincidentTokenIndex<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "CoincidentTokenIndex {{")?;
writeln!(f, " entries: {{")?;
for i in 0..self.n {
writeln!(f, " {}: {{", self.grammar.variables[i].name)?;
for j in 0..self.n {
writeln!(
f,
" {}: {:?},",
self.grammar.variables[j].name,
self.entries[self.index(i, j)].len()
)?;
}
writeln!(f, " }},")?;
}
write!(f, " }},")?;
write!(f, "}}")?;
Ok(())
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/build_lex_table.rs | crates/generate/src/build_tables/build_lex_table.rs | use std::{
collections::{hash_map::Entry, HashMap, VecDeque},
mem,
};
use log::debug;
use super::{coincident_tokens::CoincidentTokenIndex, token_conflicts::TokenConflictMap};
use crate::{
dedup::split_state_id_groups,
grammars::{LexicalGrammar, SyntaxGrammar},
nfa::{CharacterSet, NfaCursor},
rules::{Symbol, TokenSet},
tables::{AdvanceAction, LexState, LexTable, ParseStateId, ParseTable},
};
pub const LARGE_CHARACTER_RANGE_COUNT: usize = 8;
pub struct LexTables {
pub main_lex_table: LexTable,
pub keyword_lex_table: LexTable,
pub large_character_sets: Vec<(Option<Symbol>, CharacterSet)>,
}
pub fn build_lex_table(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
keywords: &TokenSet,
coincident_token_index: &CoincidentTokenIndex,
token_conflict_map: &TokenConflictMap,
) -> LexTables {
let keyword_lex_table = if syntax_grammar.word_token.is_some() {
let mut builder = LexTableBuilder::new(lexical_grammar);
builder.add_state_for_tokens(keywords);
builder.table
} else {
LexTable::default()
};
let mut parse_state_ids_by_token_set = Vec::<(TokenSet, Vec<ParseStateId>)>::new();
for (i, state) in parse_table.states.iter().enumerate() {
let tokens = state
.terminal_entries
.keys()
.copied()
.chain(state.reserved_words.iter())
.filter_map(|token| {
if token.is_terminal() {
if keywords.contains(&token) {
syntax_grammar.word_token
} else {
Some(token)
}
} else if token.is_eof() {
Some(token)
} else {
None
}
})
.collect();
let mut did_merge = false;
for entry in &mut parse_state_ids_by_token_set {
if merge_token_set(
&mut entry.0,
&tokens,
lexical_grammar,
token_conflict_map,
coincident_token_index,
) {
did_merge = true;
entry.1.push(i);
break;
}
}
if !did_merge {
parse_state_ids_by_token_set.push((tokens, vec![i]));
}
}
let mut builder = LexTableBuilder::new(lexical_grammar);
for (tokens, parse_state_ids) in parse_state_ids_by_token_set {
let lex_state_id = builder.add_state_for_tokens(&tokens);
for id in parse_state_ids {
parse_table.states[id].lex_state_id = lex_state_id;
}
}
let mut main_lex_table = mem::take(&mut builder.table);
minimize_lex_table(&mut main_lex_table, parse_table);
sort_states(&mut main_lex_table, parse_table);
let mut large_character_sets = Vec::new();
for (variable_ix, _variable) in lexical_grammar.variables.iter().enumerate() {
let symbol = Symbol::terminal(variable_ix);
builder.reset();
builder.add_state_for_tokens(&TokenSet::from_iter([symbol]));
for state in &builder.table.states {
let mut characters = CharacterSet::empty();
for (chars, action) in &state.advance_actions {
if action.in_main_token {
characters = characters.add(chars);
continue;
}
if chars.range_count() > LARGE_CHARACTER_RANGE_COUNT
&& !large_character_sets.iter().any(|(_, set)| set == chars)
{
large_character_sets.push((None, chars.clone()));
}
}
if characters.range_count() > LARGE_CHARACTER_RANGE_COUNT
&& !large_character_sets
.iter()
.any(|(_, set)| *set == characters)
{
large_character_sets.push((Some(symbol), characters));
}
}
}
LexTables {
main_lex_table,
keyword_lex_table,
large_character_sets,
}
}
struct QueueEntry {
state_id: usize,
nfa_states: Vec<u32>,
eof_valid: bool,
}
struct LexTableBuilder<'a> {
lexical_grammar: &'a LexicalGrammar,
cursor: NfaCursor<'a>,
table: LexTable,
state_queue: VecDeque<QueueEntry>,
state_ids_by_nfa_state_set: HashMap<(Vec<u32>, bool), usize>,
}
impl<'a> LexTableBuilder<'a> {
fn new(lexical_grammar: &'a LexicalGrammar) -> Self {
Self {
lexical_grammar,
cursor: NfaCursor::new(&lexical_grammar.nfa, vec![]),
table: LexTable::default(),
state_queue: VecDeque::new(),
state_ids_by_nfa_state_set: HashMap::new(),
}
}
fn reset(&mut self) {
self.table = LexTable::default();
self.state_queue.clear();
self.state_ids_by_nfa_state_set.clear();
}
fn add_state_for_tokens(&mut self, tokens: &TokenSet) -> usize {
let mut eof_valid = false;
let nfa_states = tokens
.iter()
.filter_map(|token| {
if token.is_terminal() {
Some(self.lexical_grammar.variables[token.index].start_state)
} else {
eof_valid = true;
None
}
})
.collect();
let (state_id, is_new) = self.add_state(nfa_states, eof_valid);
if is_new {
debug!(
"entry point state: {state_id}, tokens: {:?}",
tokens
.iter()
.map(|t| &self.lexical_grammar.variables[t.index].name)
.collect::<Vec<_>>()
);
}
while let Some(QueueEntry {
state_id,
nfa_states,
eof_valid,
}) = self.state_queue.pop_front()
{
self.populate_state(state_id, nfa_states, eof_valid);
}
state_id
}
fn add_state(&mut self, nfa_states: Vec<u32>, eof_valid: bool) -> (usize, bool) {
self.cursor.reset(nfa_states);
match self
.state_ids_by_nfa_state_set
.entry((self.cursor.state_ids.clone(), eof_valid))
{
Entry::Occupied(o) => (*o.get(), false),
Entry::Vacant(v) => {
let state_id = self.table.states.len();
self.table.states.push(LexState::default());
self.state_queue.push_back(QueueEntry {
state_id,
nfa_states: v.key().0.clone(),
eof_valid,
});
v.insert(state_id);
(state_id, true)
}
}
}
fn populate_state(&mut self, state_id: usize, nfa_states: Vec<u32>, eof_valid: bool) {
self.cursor.force_reset(nfa_states);
// The EOF state is represented as an empty list of NFA states.
let mut completion = None;
for (id, prec) in self.cursor.completions() {
if let Some((prev_id, prev_precedence)) = completion {
if TokenConflictMap::prefer_token(
self.lexical_grammar,
(prev_precedence, prev_id),
(prec, id),
) {
continue;
}
}
completion = Some((id, prec));
}
let transitions = self.cursor.transitions();
let has_sep = self.cursor.transition_chars().any(|(_, sep)| sep);
// If EOF is a valid lookahead token, add a transition predicated on the null
// character that leads to the empty set of NFA states.
if eof_valid {
let (next_state_id, _) = self.add_state(Vec::new(), false);
self.table.states[state_id].eof_action = Some(AdvanceAction {
state: next_state_id,
in_main_token: true,
});
}
for transition in transitions {
if let Some((completed_id, completed_precedence)) = completion {
if !TokenConflictMap::prefer_transition(
self.lexical_grammar,
&transition,
completed_id,
completed_precedence,
has_sep,
) {
continue;
}
}
let (next_state_id, _) =
self.add_state(transition.states, eof_valid && transition.is_separator);
self.table.states[state_id].advance_actions.push((
transition.characters,
AdvanceAction {
state: next_state_id,
in_main_token: !transition.is_separator,
},
));
}
if let Some((complete_id, _)) = completion {
self.table.states[state_id].accept_action = Some(Symbol::terminal(complete_id));
} else if self.cursor.state_ids.is_empty() {
self.table.states[state_id].accept_action = Some(Symbol::end());
}
}
}
fn merge_token_set(
tokens: &mut TokenSet,
other: &TokenSet,
lexical_grammar: &LexicalGrammar,
token_conflict_map: &TokenConflictMap,
coincident_token_index: &CoincidentTokenIndex,
) -> bool {
for i in 0..lexical_grammar.variables.len() {
let symbol = Symbol::terminal(i);
let set_without_terminal = match (tokens.contains_terminal(i), other.contains_terminal(i)) {
(true, false) => other,
(false, true) => tokens,
_ => continue,
};
for existing_token in set_without_terminal.terminals() {
if token_conflict_map.does_conflict(i, existing_token.index)
|| token_conflict_map.does_match_prefix(i, existing_token.index)
{
return false;
}
if !coincident_token_index.contains(symbol, existing_token)
&& (token_conflict_map.does_overlap(existing_token.index, i)
|| token_conflict_map.does_overlap(i, existing_token.index))
{
return false;
}
}
}
tokens.insert_all(other);
true
}
fn minimize_lex_table(table: &mut LexTable, parse_table: &mut ParseTable) {
// Initially group the states by their accept action and their
// valid lookahead characters.
let mut state_ids_by_signature = HashMap::new();
for (i, state) in table.states.iter().enumerate() {
let signature = (
i == 0,
state.accept_action,
state.eof_action.is_some(),
state
.advance_actions
.iter()
.map(|(characters, action)| (characters.clone(), action.in_main_token))
.collect::<Vec<_>>(),
);
state_ids_by_signature
.entry(signature)
.or_insert(Vec::new())
.push(i);
}
let mut state_ids_by_group_id = state_ids_by_signature
.into_iter()
.map(|e| e.1)
.collect::<Vec<_>>();
state_ids_by_group_id.sort();
let error_group_index = state_ids_by_group_id
.iter()
.position(|g| g.contains(&0))
.unwrap();
state_ids_by_group_id.swap(error_group_index, 0);
let mut group_ids_by_state_id = vec![0; table.states.len()];
for (group_id, state_ids) in state_ids_by_group_id.iter().enumerate() {
for state_id in state_ids {
group_ids_by_state_id[*state_id] = group_id;
}
}
while split_state_id_groups(
&table.states,
&mut state_ids_by_group_id,
&mut group_ids_by_state_id,
1,
lex_states_differ,
) {}
let mut new_states = Vec::with_capacity(state_ids_by_group_id.len());
for state_ids in &state_ids_by_group_id {
let mut new_state = LexState::default();
mem::swap(&mut new_state, &mut table.states[state_ids[0]]);
for (_, advance_action) in &mut new_state.advance_actions {
advance_action.state = group_ids_by_state_id[advance_action.state];
}
if let Some(eof_action) = &mut new_state.eof_action {
eof_action.state = group_ids_by_state_id[eof_action.state];
}
new_states.push(new_state);
}
for state in &mut parse_table.states {
state.lex_state_id = group_ids_by_state_id[state.lex_state_id];
}
table.states = new_states;
}
fn lex_states_differ(left: &LexState, right: &LexState, group_ids_by_state_id: &[usize]) -> bool {
left.advance_actions
.iter()
.zip(right.advance_actions.iter())
.any(|(left, right)| {
group_ids_by_state_id[left.1.state] != group_ids_by_state_id[right.1.state]
})
}
fn sort_states(table: &mut LexTable, parse_table: &mut ParseTable) {
// Get a mapping of old state index -> new_state_index
let mut old_ids_by_new_id = (0..table.states.len()).collect::<Vec<_>>();
old_ids_by_new_id[1..].sort_by_key(|id| &table.states[*id]);
// Get the inverse mapping
let mut new_ids_by_old_id = vec![0; old_ids_by_new_id.len()];
for (id, old_id) in old_ids_by_new_id.iter().enumerate() {
new_ids_by_old_id[*old_id] = id;
}
// Reorder the parse states and update their references to reflect
// the new ordering.
table.states = old_ids_by_new_id
.iter()
.map(|old_id| {
let mut state = LexState::default();
mem::swap(&mut state, &mut table.states[*old_id]);
for (_, advance_action) in &mut state.advance_actions {
advance_action.state = new_ids_by_old_id[advance_action.state];
}
if let Some(eof_action) = &mut state.eof_action {
eof_action.state = new_ids_by_old_id[eof_action.state];
}
state
})
.collect();
// Update the parse table's lex state references
for state in &mut parse_table.states {
state.lex_state_id = new_ids_by_old_id[state.lex_state_id];
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/build_parse_table.rs | crates/generate/src/build_tables/build_parse_table.rs | use std::{
cmp::Ordering,
collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque},
hash::BuildHasherDefault,
};
use indexmap::{map::Entry, IndexMap};
use log::warn;
use rustc_hash::FxHasher;
use serde::Serialize;
use thiserror::Error;
use super::{
item::{ParseItem, ParseItemSet, ParseItemSetCore, ParseItemSetEntry},
item_set_builder::ParseItemSetBuilder,
};
use crate::{
grammars::{LexicalGrammar, PrecedenceEntry, ReservedWordSetId, SyntaxGrammar, VariableType},
node_types::VariableInfo,
rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet},
tables::{
FieldLocation, GotoAction, ParseAction, ParseState, ParseStateId, ParseTable,
ParseTableEntry, ProductionInfo, ProductionInfoId,
},
};
// For conflict reporting, each parse state is associated with an example
// sequence of symbols that could lead to that parse state.
type SymbolSequence = Vec<Symbol>;
type AuxiliarySymbolSequence = Vec<AuxiliarySymbolInfo>;
pub type ParseStateInfo<'a> = (SymbolSequence, ParseItemSet<'a>);
#[derive(Clone, PartialEq)]
struct AuxiliarySymbolInfo {
auxiliary_symbol: Symbol,
parent_symbols: Vec<Symbol>,
}
#[derive(Debug, Default)]
struct ReductionInfo {
precedence: Precedence,
symbols: Vec<Symbol>,
has_left_assoc: bool,
has_right_assoc: bool,
has_non_assoc: bool,
}
struct ParseStateQueueEntry {
state_id: ParseStateId,
preceding_auxiliary_symbols: AuxiliarySymbolSequence,
}
struct ParseTableBuilder<'a> {
item_set_builder: ParseItemSetBuilder<'a>,
syntax_grammar: &'a SyntaxGrammar,
lexical_grammar: &'a LexicalGrammar,
variable_info: &'a [VariableInfo],
core_ids_by_core: HashMap<ParseItemSetCore<'a>, usize>,
state_ids_by_item_set: IndexMap<ParseItemSet<'a>, ParseStateId, BuildHasherDefault<FxHasher>>,
parse_state_info_by_id: Vec<ParseStateInfo<'a>>,
parse_state_queue: VecDeque<ParseStateQueueEntry>,
non_terminal_extra_states: Vec<(Symbol, usize)>,
actual_conflicts: HashSet<Vec<Symbol>>,
parse_table: ParseTable,
}
pub type BuildTableResult<T> = Result<T, ParseTableBuilderError>;
#[derive(Debug, Error, Serialize)]
pub enum ParseTableBuilderError {
#[error("Unresolved conflict for symbol sequence:\n\n{0}")]
Conflict(#[from] ConflictError),
#[error("Extra rules must have unambiguous endings. Conflicting rules: {0}")]
AmbiguousExtra(#[from] AmbiguousExtraError),
#[error(
"The non-terminal rule `{0}` is used in a non-terminal `extra` rule, which is not allowed."
)]
ImproperNonTerminalExtra(String),
#[error("State count `{0}` exceeds the max value {max}.", max=u16::MAX)]
StateCount(usize),
}
#[derive(Default, Debug, Serialize, Error)]
pub struct ConflictError {
pub symbol_sequence: Vec<String>,
pub conflicting_lookahead: String,
pub possible_interpretations: Vec<Interpretation>,
pub possible_resolutions: Vec<Resolution>,
}
#[derive(Default, Debug, Serialize, Error)]
pub struct Interpretation {
pub preceding_symbols: Vec<String>,
pub variable_name: String,
pub production_step_symbols: Vec<String>,
pub step_index: u32,
pub done: bool,
pub conflicting_lookahead: String,
pub precedence: Option<String>,
pub associativity: Option<String>,
}
#[derive(Debug, Serialize)]
pub enum Resolution {
Precedence { symbols: Vec<String> },
Associativity { symbols: Vec<String> },
AddConflict { symbols: Vec<String> },
}
#[derive(Debug, Serialize, Error)]
pub struct AmbiguousExtraError {
pub parent_symbols: Vec<String>,
}
impl std::fmt::Display for ConflictError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for symbol in &self.symbol_sequence {
write!(f, " {symbol}")?;
}
writeln!(f, " • {} …\n", self.conflicting_lookahead)?;
writeln!(f, "Possible interpretations:\n")?;
let mut interpretations = self
.possible_interpretations
.iter()
.map(|i| {
let line = i.to_string();
let prec_line = if let (Some(precedence), Some(associativity)) =
(&i.precedence, &i.associativity)
{
Some(format!(
"(precedence: {precedence}, associativity: {associativity})",
))
} else {
i.precedence
.as_ref()
.map(|precedence| format!("(precedence: {precedence})"))
};
(line, prec_line)
})
.collect::<Vec<_>>();
let max_interpretation_length = interpretations
.iter()
.map(|i| i.0.chars().count())
.max()
.unwrap();
interpretations.sort_unstable();
for (i, (line, prec_suffix)) in interpretations.into_iter().enumerate() {
write!(f, " {}:", i + 1).unwrap();
write!(f, "{line}")?;
if let Some(prec_suffix) = prec_suffix {
write!(
f,
"{:1$}",
"",
max_interpretation_length.saturating_sub(line.chars().count()) + 2
)?;
write!(f, "{prec_suffix}")?;
}
writeln!(f)?;
}
writeln!(f, "\nPossible resolutions:\n")?;
for (i, resolution) in self.possible_resolutions.iter().enumerate() {
writeln!(f, " {}: {resolution}", i + 1)?;
}
Ok(())
}
}
impl std::fmt::Display for Interpretation {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for symbol in &self.preceding_symbols {
write!(f, " {symbol}")?;
}
write!(f, " ({}", self.variable_name)?;
for (i, symbol) in self.production_step_symbols.iter().enumerate() {
if i == self.step_index as usize {
write!(f, " •")?;
}
write!(f, " {symbol}")?;
}
write!(f, ")")?;
if self.done {
write!(f, " • {} …", self.conflicting_lookahead)?;
}
Ok(())
}
}
impl std::fmt::Display for Resolution {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
match self {
Self::Precedence { symbols } => {
write!(f, "Specify a higher precedence in ")?;
for (i, symbol) in symbols.iter().enumerate() {
if i > 0 {
write!(f, " and ")?;
}
write!(f, "`{symbol}`")?;
}
write!(f, " than in the other rules.")?;
}
Self::Associativity { symbols } => {
write!(f, "Specify a left or right associativity in ")?;
for (i, symbol) in symbols.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "`{symbol}`")?;
}
}
Self::AddConflict { symbols } => {
write!(f, "Add a conflict for these rules: ")?;
for (i, symbol) in symbols.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "`{symbol}`")?;
}
}
}
Ok(())
}
}
impl std::fmt::Display for AmbiguousExtraError {
fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result {
for (i, symbol) in self.parent_symbols.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{symbol}")?;
}
Ok(())
}
}
impl<'a> ParseTableBuilder<'a> {
fn new(
syntax_grammar: &'a SyntaxGrammar,
lexical_grammar: &'a LexicalGrammar,
item_set_builder: ParseItemSetBuilder<'a>,
variable_info: &'a [VariableInfo],
) -> Self {
Self {
syntax_grammar,
lexical_grammar,
item_set_builder,
variable_info,
non_terminal_extra_states: Vec::new(),
state_ids_by_item_set: IndexMap::default(),
core_ids_by_core: HashMap::new(),
parse_state_info_by_id: Vec::new(),
parse_state_queue: VecDeque::new(),
actual_conflicts: syntax_grammar.expected_conflicts.iter().cloned().collect(),
parse_table: ParseTable {
states: Vec::new(),
symbols: Vec::new(),
external_lex_states: Vec::new(),
production_infos: Vec::new(),
max_aliased_production_length: 1,
},
}
}
fn build(mut self) -> BuildTableResult<(ParseTable, Vec<ParseStateInfo<'a>>)> {
// Ensure that the empty alias sequence has index 0.
self.parse_table
.production_infos
.push(ProductionInfo::default());
// Add the error state at index 0.
self.add_parse_state(&Vec::new(), &Vec::new(), ParseItemSet::default());
// Add the starting state at index 1.
self.add_parse_state(
&Vec::new(),
&Vec::new(),
ParseItemSet {
entries: vec![ParseItemSetEntry {
item: ParseItem::start(),
lookaheads: std::iter::once(Symbol::end()).collect(),
following_reserved_word_set: ReservedWordSetId::default(),
}],
},
);
// Compute the possible item sets for non-terminal extras.
let mut non_terminal_extra_item_sets_by_first_terminal = BTreeMap::new();
for extra_non_terminal in self
.syntax_grammar
.extra_symbols
.iter()
.filter(|s| s.is_non_terminal())
{
let variable = &self.syntax_grammar.variables[extra_non_terminal.index];
for production in &variable.productions {
non_terminal_extra_item_sets_by_first_terminal
.entry(production.first_symbol().unwrap())
.or_insert_with(ParseItemSet::default)
.insert(ParseItem {
variable_index: extra_non_terminal.index as u32,
production,
step_index: 1,
has_preceding_inherited_fields: false,
})
.lookaheads
.insert(Symbol::end_of_nonterminal_extra());
}
}
let non_terminal_sets_len = non_terminal_extra_item_sets_by_first_terminal.len();
self.non_terminal_extra_states
.reserve(non_terminal_sets_len);
self.parse_state_info_by_id.reserve(non_terminal_sets_len);
self.parse_table.states.reserve(non_terminal_sets_len);
self.parse_state_queue.reserve(non_terminal_sets_len);
// Add a state for each starting terminal of a non-terminal extra rule.
for (terminal, item_set) in non_terminal_extra_item_sets_by_first_terminal {
if terminal.is_non_terminal() {
Err(ParseTableBuilderError::ImproperNonTerminalExtra(
self.symbol_name(&terminal),
))?;
}
// Add the parse state, and *then* push the terminal and the state id into the
// list of nonterminal extra states
let state_id = self.add_parse_state(&Vec::new(), &Vec::new(), item_set);
self.non_terminal_extra_states.push((terminal, state_id));
}
while let Some(entry) = self.parse_state_queue.pop_front() {
let item_set = self
.item_set_builder
.transitive_closure(&self.parse_state_info_by_id[entry.state_id].1);
self.add_actions(
self.parse_state_info_by_id[entry.state_id].0.clone(),
entry.preceding_auxiliary_symbols,
entry.state_id,
&item_set,
)?;
}
if !self.actual_conflicts.is_empty() {
warn!(
"unnecessary conflicts:\n {}",
&self
.actual_conflicts
.iter()
.map(|conflict| {
conflict
.iter()
.map(|symbol| format!("`{}`", self.symbol_name(symbol)))
.collect::<Vec<_>>()
.join(", ")
})
.collect::<Vec<_>>()
.join("\n ")
);
}
Ok((self.parse_table, self.parse_state_info_by_id))
}
fn add_parse_state(
&mut self,
preceding_symbols: &SymbolSequence,
preceding_auxiliary_symbols: &AuxiliarySymbolSequence,
item_set: ParseItemSet<'a>,
) -> ParseStateId {
match self.state_ids_by_item_set.entry(item_set) {
// If an equivalent item set has already been processed, then return
// the existing parse state index.
Entry::Occupied(o) => *o.get(),
// Otherwise, insert a new parse state and add it to the queue of
// parse states to populate.
Entry::Vacant(v) => {
let core = v.key().core();
let core_count = self.core_ids_by_core.len();
let core_id = *self.core_ids_by_core.entry(core).or_insert(core_count);
let state_id = self.parse_table.states.len();
self.parse_state_info_by_id
.push((preceding_symbols.clone(), v.key().clone()));
self.parse_table.states.push(ParseState {
id: state_id,
lex_state_id: 0,
external_lex_state_id: 0,
terminal_entries: IndexMap::default(),
nonterminal_entries: IndexMap::default(),
reserved_words: TokenSet::default(),
core_id,
});
self.parse_state_queue.push_back(ParseStateQueueEntry {
state_id,
preceding_auxiliary_symbols: preceding_auxiliary_symbols.clone(),
});
v.insert(state_id);
state_id
}
}
}
fn add_actions(
&mut self,
mut preceding_symbols: SymbolSequence,
mut preceding_auxiliary_symbols: AuxiliarySymbolSequence,
state_id: ParseStateId,
item_set: &ParseItemSet<'a>,
) -> BuildTableResult<()> {
let mut terminal_successors = BTreeMap::new();
let mut non_terminal_successors = BTreeMap::new();
let mut lookaheads_with_conflicts = TokenSet::new();
let mut reduction_infos = HashMap::<Symbol, ReductionInfo>::new();
// Each item in the item set contributes to either or a Shift action or a Reduce
// action in this state.
for ParseItemSetEntry {
item,
lookaheads,
following_reserved_word_set: reserved_lookaheads,
} in &item_set.entries
{
// If the item is unfinished, then this state has a transition for the item's
// next symbol. Advance the item to its next step and insert the resulting
// item into the successor item set.
if let Some(next_symbol) = item.symbol() {
let mut successor = item.successor();
let successor_set = if next_symbol.is_non_terminal() {
let variable = &self.syntax_grammar.variables[next_symbol.index];
// Keep track of where auxiliary non-terminals (repeat symbols) are
// used within visible symbols. This information may be needed later
// for conflict resolution.
if variable.is_auxiliary() {
preceding_auxiliary_symbols
.push(self.get_auxiliary_node_info(item_set, next_symbol));
}
// For most parse items, the symbols associated with the preceding children
// don't matter: they have no effect on the REDUCE action that would be
// performed at the end of the item. But the symbols *do* matter for
// children that are hidden and have fields, because those fields are
// "inherited" by the parent node.
//
// If this item has consumed a hidden child with fields, then the symbols
// of its preceding children need to be taken into account when comparing
// it with other items.
if variable.is_hidden()
&& !self.variable_info[next_symbol.index].fields.is_empty()
{
successor.has_preceding_inherited_fields = true;
}
non_terminal_successors
.entry(next_symbol)
.or_insert_with(ParseItemSet::default)
} else {
terminal_successors
.entry(next_symbol)
.or_insert_with(ParseItemSet::default)
};
let successor_entry = successor_set.insert(successor);
successor_entry.lookaheads.insert_all(lookaheads);
successor_entry.following_reserved_word_set = successor_entry
.following_reserved_word_set
.max(*reserved_lookaheads);
}
// If the item is finished, then add a Reduce action to this state based
// on this item.
else {
let symbol = Symbol::non_terminal(item.variable_index as usize);
let action = if item.is_augmented() {
ParseAction::Accept
} else {
ParseAction::Reduce {
symbol,
child_count: item.step_index as usize,
dynamic_precedence: item.production.dynamic_precedence,
production_id: self.get_production_id(item),
}
};
let precedence = item.precedence();
let associativity = item.associativity();
for lookahead in lookaheads.iter() {
let table_entry = self.parse_table.states[state_id]
.terminal_entries
.entry(lookahead)
.or_insert_with(ParseTableEntry::new);
let reduction_info = reduction_infos.entry(lookahead).or_default();
// While inserting Reduce actions, eagerly resolve conflicts related
// to precedence: avoid inserting lower-precedence reductions, and
// clear the action list when inserting higher-precedence reductions.
if table_entry.actions.is_empty() {
table_entry.actions.push(action);
} else {
match Self::compare_precedence(
self.syntax_grammar,
precedence,
&[symbol],
&reduction_info.precedence,
&reduction_info.symbols,
) {
Ordering::Greater => {
table_entry.actions.clear();
table_entry.actions.push(action);
lookaheads_with_conflicts.remove(&lookahead);
*reduction_info = ReductionInfo::default();
}
Ordering::Equal => {
table_entry.actions.push(action);
lookaheads_with_conflicts.insert(lookahead);
}
Ordering::Less => continue,
}
}
reduction_info.precedence.clone_from(precedence);
if let Err(i) = reduction_info.symbols.binary_search(&symbol) {
reduction_info.symbols.insert(i, symbol);
}
match associativity {
Some(Associativity::Left) => reduction_info.has_left_assoc = true,
Some(Associativity::Right) => reduction_info.has_right_assoc = true,
None => reduction_info.has_non_assoc = true,
}
}
}
}
preceding_auxiliary_symbols.dedup();
// Having computed the successor item sets for each symbol, add a new
// parse state for each of these item sets, and add a corresponding Shift
// action to this state.
for (symbol, next_item_set) in terminal_successors {
preceding_symbols.push(symbol);
let next_state_id = self.add_parse_state(
&preceding_symbols,
&preceding_auxiliary_symbols,
next_item_set,
);
preceding_symbols.pop();
let entry = self.parse_table.states[state_id]
.terminal_entries
.entry(symbol);
if let Entry::Occupied(e) = &entry {
if !e.get().actions.is_empty() {
lookaheads_with_conflicts.insert(symbol);
}
}
entry
.or_insert_with(ParseTableEntry::new)
.actions
.push(ParseAction::Shift {
state: next_state_id,
is_repetition: false,
});
}
for (symbol, next_item_set) in non_terminal_successors {
preceding_symbols.push(symbol);
let next_state_id = self.add_parse_state(
&preceding_symbols,
&preceding_auxiliary_symbols,
next_item_set,
);
preceding_symbols.pop();
self.parse_table.states[state_id]
.nonterminal_entries
.insert(symbol, GotoAction::Goto(next_state_id));
}
// For any symbol with multiple actions, perform conflict resolution.
// This will either
// * choose one action over the others using precedence or associativity
// * keep multiple actions if this conflict has been whitelisted in the grammar
// * fail, terminating the parser generation process
for symbol in lookaheads_with_conflicts.iter() {
self.handle_conflict(
item_set,
state_id,
&preceding_symbols,
&preceding_auxiliary_symbols,
symbol,
reduction_infos.get(&symbol).unwrap(),
)?;
}
// Add actions for the grammar's `extra` symbols.
let state = &mut self.parse_table.states[state_id];
let is_end_of_non_terminal_extra = state.is_end_of_non_terminal_extra();
// If this state represents the end of a non-terminal extra rule, then make sure that
// it doesn't have other successor states. Non-terminal extra rules must have
// unambiguous endings.
if is_end_of_non_terminal_extra {
if state.terminal_entries.len() > 1 {
let parent_symbols = item_set
.entries
.iter()
.filter_map(|ParseItemSetEntry { item, .. }| {
if !item.is_augmented() && item.step_index > 0 {
Some(item.variable_index)
} else {
None
}
})
.collect::<HashSet<_>>();
let parent_symbol_names = parent_symbols
.iter()
.map(|&variable_index| {
self.syntax_grammar.variables[variable_index as usize]
.name
.clone()
})
.collect::<Vec<_>>();
Err(AmbiguousExtraError {
parent_symbols: parent_symbol_names,
})?;
}
}
// Add actions for the start tokens of each non-terminal extra rule.
else {
for (terminal, state_id) in &self.non_terminal_extra_states {
state
.terminal_entries
.entry(*terminal)
.or_insert(ParseTableEntry {
reusable: true,
actions: vec![ParseAction::Shift {
state: *state_id,
is_repetition: false,
}],
});
}
// Add ShiftExtra actions for the terminal extra tokens. These actions
// are added to every state except for those at the ends of non-terminal
// extras.
for extra_token in &self.syntax_grammar.extra_symbols {
if extra_token.is_non_terminal() {
state
.nonterminal_entries
.insert(*extra_token, GotoAction::ShiftExtra);
} else {
state
.terminal_entries
.entry(*extra_token)
.or_insert(ParseTableEntry {
reusable: true,
actions: vec![ParseAction::ShiftExtra],
});
}
}
}
if let Some(keyword_capture_token) = self.syntax_grammar.word_token {
let reserved_word_set_id = item_set
.entries
.iter()
.filter_map(|entry| {
if let Some(next_step) = entry.item.step() {
if next_step.symbol == keyword_capture_token {
Some(next_step.reserved_word_set_id)
} else {
None
}
} else if entry.lookaheads.contains(&keyword_capture_token) {
Some(entry.following_reserved_word_set)
} else {
None
}
})
.max();
if let Some(reserved_word_set_id) = reserved_word_set_id {
state.reserved_words =
self.syntax_grammar.reserved_word_sets[reserved_word_set_id.0].clone();
}
}
Ok(())
}
fn handle_conflict(
&mut self,
item_set: &ParseItemSet,
state_id: ParseStateId,
preceding_symbols: &SymbolSequence,
preceding_auxiliary_symbols: &[AuxiliarySymbolInfo],
conflicting_lookahead: Symbol,
reduction_info: &ReductionInfo,
) -> BuildTableResult<()> {
let entry = self.parse_table.states[state_id]
.terminal_entries
.get_mut(&conflicting_lookahead)
.unwrap();
// Determine which items in the set conflict with each other, and the
// precedences associated with SHIFT vs REDUCE actions. There won't
// be multiple REDUCE actions with different precedences; that is
// sorted out ahead of time in `add_actions`. But there can still be
// REDUCE-REDUCE conflicts where all actions have the *same*
// precedence, and there can still be SHIFT/REDUCE conflicts.
let mut considered_associativity = false;
let mut shift_precedence = Vec::<(&Precedence, Symbol)>::new();
let mut conflicting_items = BTreeSet::new();
for ParseItemSetEntry {
item, lookaheads, ..
} in &item_set.entries
{
if let Some(step) = item.step() {
if item.step_index > 0
&& self
.item_set_builder
.first_set(&step.symbol)
.contains(&conflicting_lookahead)
{
if item.variable_index != u32::MAX {
conflicting_items.insert(item);
}
let p = (
item.precedence(),
Symbol::non_terminal(item.variable_index as usize),
);
if let Err(i) = shift_precedence.binary_search(&p) {
shift_precedence.insert(i, p);
}
}
} else if lookaheads.contains(&conflicting_lookahead) && item.variable_index != u32::MAX
{
conflicting_items.insert(item);
}
}
if let ParseAction::Shift { is_repetition, .. } = entry.actions.last_mut().unwrap() {
// If all of the items in the conflict have the same parent symbol,
// and that parent symbols is auxiliary, then this is just the intentional
// ambiguity associated with a repeat rule. Resolve that class of ambiguity
// by leaving it in the parse table, but marking the SHIFT action with
// an `is_repetition` flag.
let conflicting_variable_index =
conflicting_items.iter().next().unwrap().variable_index;
if self.syntax_grammar.variables[conflicting_variable_index as usize].is_auxiliary()
&& conflicting_items
.iter()
.all(|item| item.variable_index == conflicting_variable_index)
{
*is_repetition = true;
return Ok(());
}
// If the SHIFT action has higher precedence, remove all the REDUCE actions.
let mut shift_is_less = false;
let mut shift_is_more = false;
for p in shift_precedence {
match Self::compare_precedence(
self.syntax_grammar,
p.0,
&[p.1],
&reduction_info.precedence,
&reduction_info.symbols,
) {
Ordering::Greater => shift_is_more = true,
Ordering::Less => shift_is_less = true,
Ordering::Equal => {}
}
}
if shift_is_more && !shift_is_less {
entry.actions.drain(0..entry.actions.len() - 1);
}
// If the REDUCE actions have higher precedence, remove the SHIFT action.
else if shift_is_less && !shift_is_more {
entry.actions.pop();
conflicting_items.retain(|item| item.is_done());
}
// If the SHIFT and REDUCE actions have the same predence, consider
// the REDUCE actions' associativity.
else if !shift_is_less && !shift_is_more {
considered_associativity = true;
// If all Reduce actions are left associative, remove the SHIFT action.
// If all Reduce actions are right associative, remove the REDUCE actions.
match (
reduction_info.has_left_assoc,
reduction_info.has_non_assoc,
reduction_info.has_right_assoc,
) {
(true, false, false) => {
entry.actions.pop();
conflicting_items.retain(|item| item.is_done());
}
(false, false, true) => {
entry.actions.drain(0..entry.actions.len() - 1);
}
_ => {}
}
}
}
// If all of the actions but one have been eliminated, then there's no problem.
let entry = self.parse_table.states[state_id]
.terminal_entries
.get_mut(&conflicting_lookahead)
.unwrap();
if entry.actions.len() == 1 {
return Ok(());
}
// Determine the set of parent symbols involved in this conflict.
let mut actual_conflict = Vec::new();
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | true |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/minimize_parse_table.rs | crates/generate/src/build_tables/minimize_parse_table.rs | use std::{
collections::{HashMap, HashSet},
mem,
};
use log::debug;
use super::token_conflicts::TokenConflictMap;
use crate::{
dedup::split_state_id_groups,
grammars::{LexicalGrammar, SyntaxGrammar, VariableType},
rules::{AliasMap, Symbol, TokenSet},
tables::{GotoAction, ParseAction, ParseState, ParseStateId, ParseTable, ParseTableEntry},
OptLevel,
};
pub fn minimize_parse_table(
parse_table: &mut ParseTable,
syntax_grammar: &SyntaxGrammar,
lexical_grammar: &LexicalGrammar,
simple_aliases: &AliasMap,
token_conflict_map: &TokenConflictMap,
keywords: &TokenSet,
optimizations: OptLevel,
) {
let mut minimizer = Minimizer {
parse_table,
syntax_grammar,
lexical_grammar,
token_conflict_map,
keywords,
simple_aliases,
};
if optimizations.contains(OptLevel::MergeStates) {
minimizer.merge_compatible_states();
}
minimizer.remove_unit_reductions();
minimizer.remove_unused_states();
minimizer.reorder_states_by_descending_size();
}
struct Minimizer<'a> {
parse_table: &'a mut ParseTable,
syntax_grammar: &'a SyntaxGrammar,
lexical_grammar: &'a LexicalGrammar,
token_conflict_map: &'a TokenConflictMap<'a>,
keywords: &'a TokenSet,
simple_aliases: &'a AliasMap,
}
impl Minimizer<'_> {
fn remove_unit_reductions(&mut self) {
let mut aliased_symbols = HashSet::new();
for variable in &self.syntax_grammar.variables {
for production in &variable.productions {
for step in &production.steps {
if step.alias.is_some() {
aliased_symbols.insert(step.symbol);
}
}
}
}
let mut unit_reduction_symbols_by_state = HashMap::new();
for (i, state) in self.parse_table.states.iter().enumerate() {
let mut only_unit_reductions = true;
let mut unit_reduction_symbol = None;
for (_, entry) in &state.terminal_entries {
for action in &entry.actions {
match action {
ParseAction::ShiftExtra => continue,
ParseAction::Reduce {
child_count: 1,
production_id: 0,
symbol,
..
} if !self.simple_aliases.contains_key(symbol)
&& !self.syntax_grammar.supertype_symbols.contains(symbol)
&& !self.syntax_grammar.extra_symbols.contains(symbol)
&& !aliased_symbols.contains(symbol)
&& self.syntax_grammar.variables[symbol.index].kind
!= VariableType::Named
&& (unit_reduction_symbol.is_none()
|| unit_reduction_symbol == Some(symbol)) =>
{
unit_reduction_symbol = Some(symbol);
continue;
}
_ => {}
}
only_unit_reductions = false;
break;
}
if !only_unit_reductions {
break;
}
}
if let Some(symbol) = unit_reduction_symbol {
if only_unit_reductions {
unit_reduction_symbols_by_state.insert(i, *symbol);
}
}
}
for state in &mut self.parse_table.states {
let mut done = false;
while !done {
done = true;
state.update_referenced_states(|other_state_id, state| {
unit_reduction_symbols_by_state.get(&other_state_id).map_or(
other_state_id,
|symbol| {
done = false;
match state.nonterminal_entries.get(symbol) {
Some(GotoAction::Goto(state_id)) => *state_id,
_ => other_state_id,
}
},
)
});
}
}
}
fn merge_compatible_states(&mut self) {
let core_count = 1 + self
.parse_table
.states
.iter()
.map(|state| state.core_id)
.max()
.unwrap();
// Initially group the states by their parse item set core.
let mut group_ids_by_state_id = Vec::with_capacity(self.parse_table.states.len());
let mut state_ids_by_group_id = vec![Vec::<ParseStateId>::new(); core_count];
for (i, state) in self.parse_table.states.iter().enumerate() {
state_ids_by_group_id[state.core_id].push(i);
group_ids_by_state_id.push(state.core_id);
}
split_state_id_groups(
&self.parse_table.states,
&mut state_ids_by_group_id,
&mut group_ids_by_state_id,
0,
|left, right, groups| self.states_conflict(left, right, groups),
);
while split_state_id_groups(
&self.parse_table.states,
&mut state_ids_by_group_id,
&mut group_ids_by_state_id,
0,
|left, right, groups| self.state_successors_differ(left, right, groups),
) {}
let error_group_index = state_ids_by_group_id
.iter()
.position(|g| g.contains(&0))
.unwrap();
let start_group_index = state_ids_by_group_id
.iter()
.position(|g| g.contains(&1))
.unwrap();
state_ids_by_group_id.swap(error_group_index, 0);
state_ids_by_group_id.swap(start_group_index, 1);
// Create a list of new parse states: one state for each group of old states.
let mut new_states = Vec::with_capacity(state_ids_by_group_id.len());
for state_ids in &state_ids_by_group_id {
// Initialize the new state based on the first old state in the group.
let mut parse_state = mem::take(&mut self.parse_table.states[state_ids[0]]);
// Extend the new state with all of the actions from the other old states
// in the group.
for state_id in &state_ids[1..] {
let other_parse_state = mem::take(&mut self.parse_table.states[*state_id]);
parse_state
.terminal_entries
.extend(other_parse_state.terminal_entries);
parse_state
.nonterminal_entries
.extend(other_parse_state.nonterminal_entries);
parse_state
.reserved_words
.insert_all(&other_parse_state.reserved_words);
for symbol in parse_state.terminal_entries.keys() {
parse_state.reserved_words.remove(symbol);
}
}
// Update the new state's outgoing references using the new grouping.
parse_state.update_referenced_states(|state_id, _| group_ids_by_state_id[state_id]);
new_states.push(parse_state);
}
self.parse_table.states = new_states;
}
fn states_conflict(
&self,
left_state: &ParseState,
right_state: &ParseState,
group_ids_by_state_id: &[ParseStateId],
) -> bool {
for (token, left_entry) in &left_state.terminal_entries {
if let Some(right_entry) = right_state.terminal_entries.get(token) {
if self.entries_conflict(
left_state.id,
right_state.id,
token,
left_entry,
right_entry,
group_ids_by_state_id,
) {
return true;
}
} else if self.token_conflicts(left_state.id, right_state.id, right_state, *token) {
return true;
}
}
for token in right_state.terminal_entries.keys() {
if !left_state.terminal_entries.contains_key(token)
&& self.token_conflicts(left_state.id, right_state.id, left_state, *token)
{
return true;
}
}
false
}
fn state_successors_differ(
&self,
state1: &ParseState,
state2: &ParseState,
group_ids_by_state_id: &[ParseStateId],
) -> bool {
for (token, entry1) in &state1.terminal_entries {
if let ParseAction::Shift { state: s1, .. } = entry1.actions.last().unwrap() {
if let Some(entry2) = state2.terminal_entries.get(token) {
if let ParseAction::Shift { state: s2, .. } = entry2.actions.last().unwrap() {
let group1 = group_ids_by_state_id[*s1];
let group2 = group_ids_by_state_id[*s2];
if group1 != group2 {
debug!(
"split states {} {} - successors for {} are split: {s1} {s2}",
state1.id,
state2.id,
self.symbol_name(token),
);
return true;
}
}
}
}
}
for (symbol, s1) in &state1.nonterminal_entries {
if let Some(s2) = state2.nonterminal_entries.get(symbol) {
match (s1, s2) {
(GotoAction::ShiftExtra, GotoAction::ShiftExtra) => {}
(GotoAction::Goto(s1), GotoAction::Goto(s2)) => {
let group1 = group_ids_by_state_id[*s1];
let group2 = group_ids_by_state_id[*s2];
if group1 != group2 {
debug!(
"split states {} {} - successors for {} are split: {s1} {s2}",
state1.id,
state2.id,
self.symbol_name(symbol),
);
return true;
}
}
_ => return true,
}
}
}
false
}
fn entries_conflict(
&self,
state_id1: ParseStateId,
state_id2: ParseStateId,
token: &Symbol,
entry1: &ParseTableEntry,
entry2: &ParseTableEntry,
group_ids_by_state_id: &[ParseStateId],
) -> bool {
// To be compatible, entries need to have the same actions.
let actions1 = &entry1.actions;
let actions2 = &entry2.actions;
if actions1.len() != actions2.len() {
debug!(
"split states {state_id1} {state_id2} - differing action counts for token {}",
self.symbol_name(token)
);
return true;
}
for (action1, action2) in actions1.iter().zip(actions2.iter()) {
// Two shift actions are equivalent if their destinations are in the same group.
if let (
ParseAction::Shift {
state: s1,
is_repetition: is_repetition1,
},
ParseAction::Shift {
state: s2,
is_repetition: is_repetition2,
},
) = (action1, action2)
{
let group1 = group_ids_by_state_id[*s1];
let group2 = group_ids_by_state_id[*s2];
if group1 == group2 && is_repetition1 == is_repetition2 {
continue;
}
debug!(
"split states {state_id1} {state_id2} - successors for {} are split: {s1} {s2}",
self.symbol_name(token),
);
return true;
} else if action1 != action2 {
debug!(
"split states {state_id1} {state_id2} - unequal actions for {}",
self.symbol_name(token),
);
return true;
}
}
false
}
fn token_conflicts(
&self,
left_id: ParseStateId,
right_id: ParseStateId,
right_state: &ParseState,
new_token: Symbol,
) -> bool {
if new_token == Symbol::end_of_nonterminal_extra() {
debug!("split states {left_id} {right_id} - end of non-terminal extra",);
return true;
}
// Do not add external tokens; they could conflict lexically with any of the state's
// existing lookahead tokens.
if new_token.is_external() {
debug!(
"split states {left_id} {right_id} - external token {}",
self.symbol_name(&new_token),
);
return true;
}
if right_state.reserved_words.contains(&new_token) {
return false;
}
// Do not add tokens which are both internal and external. Their validity could
// influence the behavior of the external scanner.
if self
.syntax_grammar
.external_tokens
.iter()
.any(|external| external.corresponding_internal_token == Some(new_token))
{
debug!(
"split states {left_id} {right_id} - internal/external token {}",
self.symbol_name(&new_token),
);
return true;
}
// Do not add a token if it conflicts with an existing token.
for token in right_state.terminal_entries.keys().copied() {
if !token.is_terminal() {
continue;
}
if self.syntax_grammar.word_token == Some(token) && self.keywords.contains(&new_token) {
continue;
}
if self.syntax_grammar.word_token == Some(new_token) && self.keywords.contains(&token) {
continue;
}
if self
.token_conflict_map
.does_conflict(new_token.index, token.index)
|| self
.token_conflict_map
.does_match_same_string(new_token.index, token.index)
{
debug!(
"split states {} {} - token {} conflicts with {}",
left_id,
right_id,
self.symbol_name(&new_token),
self.symbol_name(&token),
);
return true;
}
}
false
}
fn symbol_name(&self, symbol: &Symbol) -> &String {
if symbol.is_non_terminal() {
&self.syntax_grammar.variables[symbol.index].name
} else if symbol.is_external() {
&self.syntax_grammar.external_tokens[symbol.index].name
} else {
&self.lexical_grammar.variables[symbol.index].name
}
}
fn remove_unused_states(&mut self) {
let mut state_usage_map = vec![false; self.parse_table.states.len()];
state_usage_map[0] = true;
state_usage_map[1] = true;
for state in &self.parse_table.states {
for referenced_state in state.referenced_states() {
state_usage_map[referenced_state] = true;
}
}
let mut removed_predecessor_count = 0;
let mut state_replacement_map = vec![0; self.parse_table.states.len()];
for state_id in 0..self.parse_table.states.len() {
state_replacement_map[state_id] = state_id - removed_predecessor_count;
if !state_usage_map[state_id] {
removed_predecessor_count += 1;
}
}
let mut state_id = 0;
let mut original_state_id = 0;
while state_id < self.parse_table.states.len() {
if state_usage_map[original_state_id] {
self.parse_table.states[state_id].update_referenced_states(|other_state_id, _| {
state_replacement_map[other_state_id]
});
state_id += 1;
} else {
self.parse_table.states.remove(state_id);
}
original_state_id += 1;
}
}
fn reorder_states_by_descending_size(&mut self) {
// Get a mapping of old state index -> new_state_index
let mut old_ids_by_new_id = (0..self.parse_table.states.len()).collect::<Vec<_>>();
old_ids_by_new_id.sort_unstable_by_key(|i| {
// Don't changes states 0 (the error state) or 1 (the start state).
if *i <= 1 {
return *i as i64 - 1_000_000;
}
// Reorder all the other states by descending symbol count.
let state = &self.parse_table.states[*i];
-((state.terminal_entries.len() + state.nonterminal_entries.len()) as i64)
});
// Get the inverse mapping
let mut new_ids_by_old_id = vec![0; old_ids_by_new_id.len()];
for (id, old_id) in old_ids_by_new_id.iter().enumerate() {
new_ids_by_old_id[*old_id] = id;
}
// Reorder the parse states and update their references to reflect
// the new ordering.
self.parse_table.states = old_ids_by_new_id
.iter()
.map(|old_id| {
let mut state = ParseState::default();
mem::swap(&mut state, &mut self.parse_table.states[*old_id]);
state.update_referenced_states(|id, _| new_ids_by_old_id[id]);
state
})
.collect();
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/item_set_builder.rs | crates/generate/src/build_tables/item_set_builder.rs | use std::{
collections::{HashMap, HashSet},
fmt,
};
use super::item::{ParseItem, ParseItemDisplay, ParseItemSet, ParseItemSetEntry, TokenSetDisplay};
use crate::{
grammars::{InlinedProductionMap, LexicalGrammar, ReservedWordSetId, SyntaxGrammar},
rules::{Symbol, SymbolType, TokenSet},
};
#[derive(Clone, Debug, PartialEq, Eq)]
struct TransitiveClosureAddition<'a> {
item: ParseItem<'a>,
info: FollowSetInfo,
}
#[derive(Clone, Debug, Default, PartialEq, Eq)]
struct FollowSetInfo {
lookaheads: TokenSet,
reserved_lookaheads: ReservedWordSetId,
propagates_lookaheads: bool,
}
pub struct ParseItemSetBuilder<'a> {
syntax_grammar: &'a SyntaxGrammar,
lexical_grammar: &'a LexicalGrammar,
first_sets: HashMap<Symbol, TokenSet>,
reserved_first_sets: HashMap<Symbol, ReservedWordSetId>,
last_sets: HashMap<Symbol, TokenSet>,
inlines: &'a InlinedProductionMap,
transitive_closure_additions: Vec<Vec<TransitiveClosureAddition<'a>>>,
}
fn find_or_push<T: Eq>(vector: &mut Vec<T>, value: T) {
if !vector.contains(&value) {
vector.push(value);
}
}
impl<'a> ParseItemSetBuilder<'a> {
pub fn new(
syntax_grammar: &'a SyntaxGrammar,
lexical_grammar: &'a LexicalGrammar,
inlines: &'a InlinedProductionMap,
) -> Self {
let mut result = Self {
syntax_grammar,
lexical_grammar,
first_sets: HashMap::new(),
reserved_first_sets: HashMap::new(),
last_sets: HashMap::new(),
inlines,
transitive_closure_additions: vec![Vec::new(); syntax_grammar.variables.len()],
};
// For each grammar symbol, populate the FIRST and LAST sets: the set of
// terminals that appear at the beginning and end that symbol's productions,
// respectively.
// For a terminal symbol, the FIRST and LAST sets just consist of the
// terminal itself.
for i in 0..lexical_grammar.variables.len() {
let symbol = Symbol::terminal(i);
let mut set = TokenSet::new();
set.insert(symbol);
result.first_sets.insert(symbol, set.clone());
result.last_sets.insert(symbol, set);
result
.reserved_first_sets
.insert(symbol, ReservedWordSetId::default());
}
for i in 0..syntax_grammar.external_tokens.len() {
let symbol = Symbol::external(i);
let mut set = TokenSet::new();
set.insert(symbol);
result.first_sets.insert(symbol, set.clone());
result.last_sets.insert(symbol, set);
result
.reserved_first_sets
.insert(symbol, ReservedWordSetId::default());
}
// The FIRST set of a non-terminal `i` is the union of the FIRST sets
// of all the symbols that appear at the beginnings of i's productions. Some
// of these symbols may themselves be non-terminals, so this is a recursive
// definition.
//
// Rather than computing these sets using recursion, we use an explicit stack
// called `symbols_to_process`.
let mut symbols_to_process = Vec::new();
let mut processed_non_terminals = HashSet::new();
for i in 0..syntax_grammar.variables.len() {
let symbol = Symbol::non_terminal(i);
let first_set = result.first_sets.entry(symbol).or_default();
let reserved_first_set = result.reserved_first_sets.entry(symbol).or_default();
processed_non_terminals.clear();
symbols_to_process.clear();
symbols_to_process.push(symbol);
while let Some(sym) = symbols_to_process.pop() {
for production in &syntax_grammar.variables[sym.index].productions {
if let Some(step) = production.steps.first() {
if step.symbol.is_terminal() || step.symbol.is_external() {
first_set.insert(step.symbol);
} else if processed_non_terminals.insert(step.symbol) {
symbols_to_process.push(step.symbol);
}
*reserved_first_set = (*reserved_first_set).max(step.reserved_word_set_id);
}
}
}
// The LAST set is defined in a similar way to the FIRST set.
let last_set = result.last_sets.entry(symbol).or_default();
processed_non_terminals.clear();
symbols_to_process.clear();
symbols_to_process.push(symbol);
while let Some(sym) = symbols_to_process.pop() {
for production in &syntax_grammar.variables[sym.index].productions {
if let Some(step) = production.steps.last() {
if step.symbol.is_terminal() || step.symbol.is_external() {
last_set.insert(step.symbol);
} else if processed_non_terminals.insert(step.symbol) {
symbols_to_process.push(step.symbol);
}
}
}
}
}
// To compute an item set's transitive closure, we find each item in the set
// whose next symbol is a non-terminal, and we add new items to the set for
// each of that symbol's productions. These productions might themselves begin
// with non-terminals, so the process continues recursively. In this process,
// the total set of entries that get added depends only on two things:
//
// * the non-terminal symbol that occurs next in each item
//
// * the set of terminals that can follow that non-terminal symbol in the item
//
// So we can avoid a lot of duplicated recursive work by precomputing, for each
// non-terminal symbol `i`, a final list of *additions* that must be made to an
// item set when symbol `i` occurs as the next symbol in one if its core items.
// The structure of a precomputed *addition* is as follows:
//
// * `item` - the new item that must be added as part of the expansion of the symbol `i`.
//
// * `lookaheads` - the set of possible lookahead tokens that can always come after `item`
// in an expansion of symbol `i`.
//
// * `reserved_lookaheads` - the set of reserved lookahead lookahead tokens that can
// always come after `item` in the expansion of symbol `i`.
//
// * `propagates_lookaheads` - a boolean indicating whether or not `item` can occur at the
// *end* of the expansion of symbol `i`, so that i's own current lookahead tokens can
// occur after `item`.
//
// Rather than computing these additions recursively, we use an explicit stack.
let empty_lookaheads = TokenSet::new();
let mut stack = Vec::new();
let mut follow_set_info_by_non_terminal = HashMap::<usize, FollowSetInfo>::new();
for i in 0..syntax_grammar.variables.len() {
// First, build up a map whose keys are all of the non-terminals that can
// appear at the beginning of non-terminal `i`, and whose values store
// information about the tokens that can follow those non-terminals.
stack.clear();
stack.push((i, &empty_lookaheads, ReservedWordSetId::default(), true));
follow_set_info_by_non_terminal.clear();
while let Some((sym_ix, lookaheads, reserved_word_set_id, propagates_lookaheads)) =
stack.pop()
{
let mut did_add = false;
let info = follow_set_info_by_non_terminal.entry(sym_ix).or_default();
did_add |= info.lookaheads.insert_all(lookaheads);
if reserved_word_set_id > info.reserved_lookaheads {
info.reserved_lookaheads = reserved_word_set_id;
did_add = true;
}
did_add |= propagates_lookaheads && !info.propagates_lookaheads;
info.propagates_lookaheads |= propagates_lookaheads;
if !did_add {
continue;
}
for production in &syntax_grammar.variables[sym_ix].productions {
if let Some(symbol) = production.first_symbol() {
if symbol.is_non_terminal() {
if let Some(next_step) = production.steps.get(1) {
stack.push((
symbol.index,
&result.first_sets[&next_step.symbol],
result.reserved_first_sets[&next_step.symbol],
false,
));
} else {
stack.push((
symbol.index,
lookaheads,
reserved_word_set_id,
propagates_lookaheads,
));
}
}
}
}
}
// Store all of those non-terminals' productions, along with their associated
// lookahead info, as *additions* associated with non-terminal `i`.
let additions_for_non_terminal = &mut result.transitive_closure_additions[i];
for (&variable_index, follow_set_info) in &follow_set_info_by_non_terminal {
let variable = &syntax_grammar.variables[variable_index];
let non_terminal = Symbol::non_terminal(variable_index);
let variable_index = variable_index as u32;
if syntax_grammar.variables_to_inline.contains(&non_terminal) {
continue;
}
for production in &variable.productions {
let item = ParseItem {
variable_index,
production,
step_index: 0,
has_preceding_inherited_fields: false,
};
if let Some(inlined_productions) =
inlines.inlined_productions(item.production, item.step_index)
{
for production in inlined_productions {
find_or_push(
additions_for_non_terminal,
TransitiveClosureAddition {
item: item.substitute_production(production),
info: follow_set_info.clone(),
},
);
}
} else {
find_or_push(
additions_for_non_terminal,
TransitiveClosureAddition {
item,
info: follow_set_info.clone(),
},
);
}
}
}
}
result
}
pub fn transitive_closure(&self, item_set: &ParseItemSet<'a>) -> ParseItemSet<'a> {
let mut result = ParseItemSet::default();
for entry in &item_set.entries {
if let Some(productions) = self
.inlines
.inlined_productions(entry.item.production, entry.item.step_index)
{
for production in productions {
self.add_item(
&mut result,
&ParseItemSetEntry {
item: entry.item.substitute_production(production),
lookaheads: entry.lookaheads.clone(),
following_reserved_word_set: entry.following_reserved_word_set,
},
);
}
} else {
self.add_item(&mut result, entry);
}
}
result
}
pub fn first_set(&self, symbol: &Symbol) -> &TokenSet {
&self.first_sets[symbol]
}
pub fn reserved_first_set(&self, symbol: &Symbol) -> Option<&TokenSet> {
let id = *self.reserved_first_sets.get(symbol)?;
Some(&self.syntax_grammar.reserved_word_sets[id.0])
}
pub fn last_set(&self, symbol: &Symbol) -> &TokenSet {
&self.last_sets[symbol]
}
fn add_item(&self, set: &mut ParseItemSet<'a>, entry: &ParseItemSetEntry<'a>) {
if let Some(step) = entry.item.step() {
if step.symbol.is_non_terminal() {
let next_step = entry.item.successor().step();
// Determine which tokens can follow this non-terminal.
let (following_tokens, following_reserved_tokens) =
if let Some(next_step) = next_step {
(
self.first_sets.get(&next_step.symbol).unwrap(),
*self.reserved_first_sets.get(&next_step.symbol).unwrap(),
)
} else {
(&entry.lookaheads, entry.following_reserved_word_set)
};
// Use the pre-computed *additions* to expand the non-terminal.
for addition in &self.transitive_closure_additions[step.symbol.index] {
let entry = set.insert(addition.item);
entry.lookaheads.insert_all(&addition.info.lookaheads);
if let Some(word_token) = self.syntax_grammar.word_token {
if addition.info.lookaheads.contains(&word_token) {
entry.following_reserved_word_set = entry
.following_reserved_word_set
.max(addition.info.reserved_lookaheads);
}
}
if addition.info.propagates_lookaheads {
entry.lookaheads.insert_all(following_tokens);
if let Some(word_token) = self.syntax_grammar.word_token {
if following_tokens.contains(&word_token) {
entry.following_reserved_word_set = entry
.following_reserved_word_set
.max(following_reserved_tokens);
}
}
}
}
}
}
let e = set.insert(entry.item);
e.lookaheads.insert_all(&entry.lookaheads);
e.following_reserved_word_set = e
.following_reserved_word_set
.max(entry.following_reserved_word_set);
}
}
impl fmt::Debug for ParseItemSetBuilder<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
writeln!(f, "ParseItemSetBuilder {{")?;
writeln!(f, " first_sets: {{")?;
for (symbol, first_set) in &self.first_sets {
let name = match symbol.kind {
SymbolType::NonTerminal => &self.syntax_grammar.variables[symbol.index].name,
SymbolType::External => &self.syntax_grammar.external_tokens[symbol.index].name,
SymbolType::Terminal => &self.lexical_grammar.variables[symbol.index].name,
SymbolType::End | SymbolType::EndOfNonTerminalExtra => "END",
};
writeln!(
f,
" first({name:?}): {}",
TokenSetDisplay(first_set, self.syntax_grammar, self.lexical_grammar)
)?;
}
writeln!(f, " }}")?;
writeln!(f, " last_sets: {{")?;
for (symbol, last_set) in &self.last_sets {
let name = match symbol.kind {
SymbolType::NonTerminal => &self.syntax_grammar.variables[symbol.index].name,
SymbolType::External => &self.syntax_grammar.external_tokens[symbol.index].name,
SymbolType::Terminal => &self.lexical_grammar.variables[symbol.index].name,
SymbolType::End | SymbolType::EndOfNonTerminalExtra => "END",
};
writeln!(
f,
" last({name:?}): {}",
TokenSetDisplay(last_set, self.syntax_grammar, self.lexical_grammar)
)?;
}
writeln!(f, " }}")?;
writeln!(f, " additions: {{")?;
for (i, variable) in self.syntax_grammar.variables.iter().enumerate() {
writeln!(f, " {}: {{", variable.name)?;
for addition in &self.transitive_closure_additions[i] {
writeln!(
f,
" {}",
ParseItemDisplay(&addition.item, self.syntax_grammar, self.lexical_grammar)
)?;
}
writeln!(f, " }},")?;
}
write!(f, " }},")?;
write!(f, "}}")?;
Ok(())
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/generate/src/build_tables/item.rs | crates/generate/src/build_tables/item.rs | use std::{
cmp::Ordering,
fmt,
hash::{Hash, Hasher},
sync::LazyLock,
};
use crate::{
grammars::{
LexicalGrammar, Production, ProductionStep, ReservedWordSetId, SyntaxGrammar,
NO_RESERVED_WORDS,
},
rules::{Associativity, Precedence, Symbol, SymbolType, TokenSet},
};
static START_PRODUCTION: LazyLock<Production> = LazyLock::new(|| Production {
dynamic_precedence: 0,
steps: vec![ProductionStep {
symbol: Symbol {
index: 0,
kind: SymbolType::NonTerminal,
},
precedence: Precedence::None,
associativity: None,
alias: None,
field_name: None,
reserved_word_set_id: NO_RESERVED_WORDS,
}],
});
/// A [`ParseItem`] represents an in-progress match of a single production in a grammar.
#[derive(Clone, Copy, Debug)]
pub struct ParseItem<'a> {
/// The index of the parent rule within the grammar.
pub variable_index: u32,
/// The number of symbols that have already been matched.
pub step_index: u32,
/// The production being matched.
pub production: &'a Production,
/// A boolean indicating whether any of the already-matched children were
/// hidden nodes and had fields. Ordinarily, a parse item's behavior is not
/// affected by the symbols of its preceding children; it only needs to
/// keep track of their fields and aliases.
///
/// Take for example these two items:
/// X -> a b • c
/// X -> a g • c
///
/// They can be considered equivalent, for the purposes of parse table
/// generation, because they entail the same actions. But if this flag is
/// true, then the item's set of inherited fields may depend on the specific
/// symbols of its preceding children.
pub has_preceding_inherited_fields: bool,
}
/// A [`ParseItemSet`] represents a set of in-progress matches of productions in a
/// grammar, and for each in-progress match, a set of "lookaheads" - tokens that
/// are allowed to *follow* the in-progress rule. This object corresponds directly
/// to a state in the final parse table.
#[derive(Clone, Debug, PartialEq, Eq, Default)]
pub struct ParseItemSet<'a> {
pub entries: Vec<ParseItemSetEntry<'a>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ParseItemSetEntry<'a> {
pub item: ParseItem<'a>,
pub lookaheads: TokenSet,
pub following_reserved_word_set: ReservedWordSetId,
}
/// A [`ParseItemSetCore`] is like a [`ParseItemSet`], but without the lookahead
/// information. Parse states with the same core are candidates for merging.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ParseItemSetCore<'a> {
pub entries: Vec<ParseItem<'a>>,
}
pub struct ParseItemDisplay<'a>(
pub &'a ParseItem<'a>,
pub &'a SyntaxGrammar,
pub &'a LexicalGrammar,
);
pub struct TokenSetDisplay<'a>(
pub &'a TokenSet,
pub &'a SyntaxGrammar,
pub &'a LexicalGrammar,
);
pub struct ParseItemSetDisplay<'a>(
pub &'a ParseItemSet<'a>,
pub &'a SyntaxGrammar,
pub &'a LexicalGrammar,
);
impl<'a> ParseItem<'a> {
pub fn start() -> Self {
ParseItem {
variable_index: u32::MAX,
production: &START_PRODUCTION,
step_index: 0,
has_preceding_inherited_fields: false,
}
}
pub fn step(&self) -> Option<&'a ProductionStep> {
self.production.steps.get(self.step_index as usize)
}
pub fn symbol(&self) -> Option<Symbol> {
self.step().map(|step| step.symbol)
}
pub fn associativity(&self) -> Option<Associativity> {
self.prev_step().and_then(|step| step.associativity)
}
pub fn precedence(&self) -> &Precedence {
self.prev_step()
.map_or(&Precedence::None, |step| &step.precedence)
}
pub fn prev_step(&self) -> Option<&'a ProductionStep> {
if self.step_index > 0 {
Some(&self.production.steps[self.step_index as usize - 1])
} else {
None
}
}
#[must_use]
pub fn is_done(&self) -> bool {
self.step_index as usize == self.production.steps.len()
}
#[must_use]
pub const fn is_augmented(&self) -> bool {
self.variable_index == u32::MAX
}
/// Create an item like this one, but advanced by one step.
#[must_use]
pub const fn successor(&self) -> Self {
ParseItem {
variable_index: self.variable_index,
production: self.production,
step_index: self.step_index + 1,
has_preceding_inherited_fields: self.has_preceding_inherited_fields,
}
}
/// Create an item identical to this one, but with a different production.
/// This is used when dynamically "inlining" certain symbols in a production.
pub const fn substitute_production(&self, production: &'a Production) -> Self {
let mut result = *self;
result.production = production;
result
}
}
impl<'a> ParseItemSet<'a> {
pub fn insert(&mut self, item: ParseItem<'a>) -> &mut ParseItemSetEntry<'a> {
match self.entries.binary_search_by(|e| e.item.cmp(&item)) {
Err(i) => {
self.entries.insert(
i,
ParseItemSetEntry {
item,
lookaheads: TokenSet::new(),
following_reserved_word_set: ReservedWordSetId::default(),
},
);
&mut self.entries[i]
}
Ok(i) => &mut self.entries[i],
}
}
pub fn core(&self) -> ParseItemSetCore<'a> {
ParseItemSetCore {
entries: self.entries.iter().map(|e| e.item).collect(),
}
}
}
impl fmt::Display for ParseItemDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
if self.0.is_augmented() {
write!(f, "START →")?;
} else {
write!(
f,
"{} →",
self.1.variables[self.0.variable_index as usize].name
)?;
}
for (i, step) in self.0.production.steps.iter().enumerate() {
if i == self.0.step_index as usize {
write!(f, " •")?;
if !step.precedence.is_none()
|| step.associativity.is_some()
|| step.reserved_word_set_id != ReservedWordSetId::default()
{
write!(f, " (")?;
if !step.precedence.is_none() {
write!(f, " {}", step.precedence)?;
}
if let Some(associativity) = step.associativity {
write!(f, " {associativity:?}")?;
}
if step.reserved_word_set_id != ReservedWordSetId::default() {
write!(f, "reserved: {}", step.reserved_word_set_id)?;
}
write!(f, " )")?;
}
}
write!(f, " ")?;
if step.symbol.is_terminal() {
if let Some(variable) = self.2.variables.get(step.symbol.index) {
write!(f, "{}", variable.name)?;
} else {
write!(f, "terminal-{}", step.symbol.index)?;
}
} else if step.symbol.is_external() {
write!(f, "{}", self.1.external_tokens[step.symbol.index].name)?;
} else {
write!(f, "{}", self.1.variables[step.symbol.index].name)?;
}
if let Some(alias) = &step.alias {
write!(f, "@{}", alias.value)?;
}
}
if self.0.is_done() {
write!(f, " •")?;
if let Some(step) = self.0.production.steps.last() {
if let Some(associativity) = step.associativity {
if step.precedence.is_none() {
write!(f, " ({associativity:?})")?;
} else {
write!(f, " ({} {associativity:?})", step.precedence)?;
}
} else if !step.precedence.is_none() {
write!(f, " ({})", step.precedence)?;
}
}
}
Ok(())
}
}
const fn escape_invisible(c: char) -> Option<&'static str> {
Some(match c {
'\n' => "\\n",
'\r' => "\\r",
'\t' => "\\t",
'\0' => "\\0",
'\\' => "\\\\",
'\x0b' => "\\v",
'\x0c' => "\\f",
_ => return None,
})
}
fn display_variable_name(source: &str) -> String {
source
.chars()
.fold(String::with_capacity(source.len()), |mut acc, c| {
if let Some(esc) = escape_invisible(c) {
acc.push_str(esc);
} else {
acc.push(c);
}
acc
})
}
impl fmt::Display for TokenSetDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
write!(f, "[")?;
for (i, symbol) in self.0.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
if symbol.is_terminal() {
if let Some(variable) = self.2.variables.get(symbol.index) {
write!(f, "{}", display_variable_name(&variable.name))?;
} else {
write!(f, "terminal-{}", symbol.index)?;
}
} else if symbol.is_external() {
write!(f, "{}", self.1.external_tokens[symbol.index].name)?;
} else {
write!(f, "{}", self.1.variables[symbol.index].name)?;
}
}
write!(f, "]")?;
Ok(())
}
}
impl fmt::Display for ParseItemSetDisplay<'_> {
fn fmt(&self, f: &mut fmt::Formatter) -> Result<(), fmt::Error> {
for entry in &self.0.entries {
write!(
f,
"{}\t{}",
ParseItemDisplay(&entry.item, self.1, self.2),
TokenSetDisplay(&entry.lookaheads, self.1, self.2),
)?;
if entry.following_reserved_word_set != ReservedWordSetId::default() {
write!(
f,
"\treserved word set: {}",
entry.following_reserved_word_set
)?;
}
writeln!(f)?;
}
Ok(())
}
}
impl Hash for ParseItem<'_> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
hasher.write_u32(self.variable_index);
hasher.write_u32(self.step_index);
hasher.write_i32(self.production.dynamic_precedence);
hasher.write_usize(self.production.steps.len());
hasher.write_i32(i32::from(self.has_preceding_inherited_fields));
self.precedence().hash(hasher);
self.associativity().hash(hasher);
// The already-matched children don't play any role in the parse state for
// this item, unless any of the following are true:
// * the children have fields
// * the children have aliases
// * the children are hidden and represent rules that have fields.
// See the docs for `has_preceding_inherited_fields`.
for step in &self.production.steps[0..self.step_index as usize] {
step.alias.hash(hasher);
step.field_name.hash(hasher);
if self.has_preceding_inherited_fields {
step.symbol.hash(hasher);
}
}
for step in &self.production.steps[self.step_index as usize..] {
step.hash(hasher);
}
}
}
impl PartialEq for ParseItem<'_> {
fn eq(&self, other: &Self) -> bool {
if self.variable_index != other.variable_index
|| self.step_index != other.step_index
|| self.production.dynamic_precedence != other.production.dynamic_precedence
|| self.production.steps.len() != other.production.steps.len()
|| self.precedence() != other.precedence()
|| self.associativity() != other.associativity()
|| self.has_preceding_inherited_fields != other.has_preceding_inherited_fields
{
return false;
}
for (i, step) in self.production.steps.iter().enumerate() {
// See the previous comment (in the `Hash::hash` impl) regarding comparisons
// of parse items' already-completed steps.
if i < self.step_index as usize {
if step.alias != other.production.steps[i].alias {
return false;
}
if step.field_name != other.production.steps[i].field_name {
return false;
}
if self.has_preceding_inherited_fields
&& step.symbol != other.production.steps[i].symbol
{
return false;
}
} else if *step != other.production.steps[i] {
return false;
}
}
true
}
}
impl Ord for ParseItem<'_> {
fn cmp(&self, other: &Self) -> Ordering {
self.step_index
.cmp(&other.step_index)
.then_with(|| self.variable_index.cmp(&other.variable_index))
.then_with(|| {
self.production
.dynamic_precedence
.cmp(&other.production.dynamic_precedence)
})
.then_with(|| {
self.production
.steps
.len()
.cmp(&other.production.steps.len())
})
.then_with(|| self.precedence().cmp(other.precedence()))
.then_with(|| self.associativity().cmp(&other.associativity()))
.then_with(|| {
for (i, step) in self.production.steps.iter().enumerate() {
// See the previous comment (in the `Hash::hash` impl) regarding comparisons
// of parse items' already-completed steps.
let o = if i < self.step_index as usize {
step.alias
.cmp(&other.production.steps[i].alias)
.then_with(|| {
step.field_name.cmp(&other.production.steps[i].field_name)
})
} else {
step.cmp(&other.production.steps[i])
};
if o != Ordering::Equal {
return o;
}
}
Ordering::Equal
})
}
}
impl PartialOrd for ParseItem<'_> {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Eq for ParseItem<'_> {}
impl Hash for ParseItemSet<'_> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
hasher.write_usize(self.entries.len());
for entry in &self.entries {
entry.item.hash(hasher);
entry.lookaheads.hash(hasher);
entry.following_reserved_word_set.hash(hasher);
}
}
}
impl Hash for ParseItemSetCore<'_> {
fn hash<H: Hasher>(&self, hasher: &mut H) {
hasher.write_usize(self.entries.len());
for item in &self.entries {
item.hash(hasher);
}
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/loader/build.rs | crates/loader/build.rs | fn main() {
println!(
"cargo:rustc-env=BUILD_TARGET={}",
std::env::var("TARGET").unwrap()
);
println!(
"cargo:rustc-env=BUILD_HOST={}",
std::env::var("HOST").unwrap()
);
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/loader/src/loader.rs | crates/loader/src/loader.rs | #![cfg_attr(not(any(test, doctest)), doc = include_str!("../README.md"))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))]
use std::ops::Range;
#[cfg(feature = "tree-sitter-highlight")]
use std::sync::Mutex;
use std::{
collections::HashMap,
env, fs,
hash::{Hash as _, Hasher as _},
io::{BufRead, BufReader},
marker::PhantomData,
mem,
path::{Path, PathBuf},
process::Command,
sync::LazyLock,
time::{SystemTime, SystemTimeError},
};
use etcetera::BaseStrategy as _;
use fs4::fs_std::FileExt;
use libloading::{Library, Symbol};
use log::{error, info, warn};
use once_cell::unsync::OnceCell;
use regex::{Regex, RegexBuilder};
use semver::Version;
use serde::{Deserialize, Deserializer, Serialize};
use thiserror::Error;
use tree_sitter::Language;
#[cfg(any(feature = "tree-sitter-highlight", feature = "tree-sitter-tags"))]
use tree_sitter::QueryError;
#[cfg(feature = "tree-sitter-highlight")]
use tree_sitter::QueryErrorKind;
#[cfg(feature = "wasm")]
use tree_sitter::WasmError;
#[cfg(feature = "tree-sitter-highlight")]
use tree_sitter_highlight::HighlightConfiguration;
#[cfg(feature = "tree-sitter-tags")]
use tree_sitter_tags::{Error as TagsError, TagsConfiguration};
static GRAMMAR_NAME_REGEX: LazyLock<Regex> =
LazyLock::new(|| Regex::new(r#""name":\s*"(.*?)""#).unwrap());
const WASI_SDK_VERSION: &str = include_str!("../wasi-sdk-version").trim_ascii();
pub type LoaderResult<T> = Result<T, LoaderError>;
#[derive(Debug, Error)]
pub enum LoaderError {
#[error(transparent)]
Compiler(CompilerError),
#[error("Parser compilation failed.\nStdout: {0}\nStderr: {1}")]
Compilation(String, String),
#[error("Failed to execute curl for {0} -- {1}")]
Curl(String, std::io::Error),
#[error("Failed to load language in current directory:\n{0}")]
CurrentDirectoryLoad(Box<Self>),
#[error("External file path {0} is outside of parser directory {1}")]
ExternalFile(String, String),
#[error("Failed to extract archive {0} to {1}")]
Extraction(String, String),
#[error("Failed to load language for file name {0}:\n{1}")]
FileNameLoad(String, Box<Self>),
#[error("Failed to parse the language name from grammar.json at {0}")]
GrammarJSON(String),
#[error(transparent)]
HomeDir(#[from] etcetera::HomeDirError),
#[error(transparent)]
IO(IoError),
#[error(transparent)]
Library(LibraryError),
#[error("Failed to compare binary and source timestamps:\n{0}")]
ModifiedTime(Box<Self>),
#[error("No language found")]
NoLanguage,
#[error(transparent)]
Query(LoaderQueryError),
#[error(transparent)]
ScannerSymbols(ScannerSymbolError),
#[error("Failed to load language for scope '{0}':\n{1}")]
ScopeLoad(String, Box<Self>),
#[error(transparent)]
Serialization(#[from] serde_json::Error),
#[error(transparent)]
Symbol(SymbolError),
#[error(transparent)]
Tags(#[from] TagsError),
#[error("Failed to execute tar for {0} -- {1}")]
Tar(String, std::io::Error),
#[error(transparent)]
Time(#[from] SystemTimeError),
#[error("Unknown scope '{0}'")]
UnknownScope(String),
#[error("Failed to download wasi-sdk from {0}")]
WasiSDKDownload(String),
#[error(transparent)]
WasiSDKClang(#[from] WasiSDKClangError),
#[error("Unsupported platform for wasi-sdk")]
WasiSDKPlatform,
#[cfg(feature = "wasm")]
#[error(transparent)]
Wasm(#[from] WasmError),
#[error("Failed to run wasi-sdk clang -- {0}")]
WasmCompiler(std::io::Error),
#[error("wasi-sdk clang command failed: {0}")]
WasmCompilation(String),
}
#[derive(Debug, Error)]
pub struct CompilerError {
pub error: std::io::Error,
pub command: Box<Command>,
}
impl std::fmt::Display for CompilerError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failed to execute the C compiler with the following command:\n{:?}\nError: {}",
*self.command, self.error
)?;
Ok(())
}
}
#[derive(Debug, Error)]
pub struct IoError {
pub error: std::io::Error,
pub path: Option<String>,
}
impl IoError {
fn new(error: std::io::Error, path: Option<&Path>) -> Self {
Self {
error,
path: path.map(|p| p.to_string_lossy().to_string()),
}
}
}
impl std::fmt::Display for IoError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}", self.error)?;
if let Some(ref path) = self.path {
write!(f, " ({path})")?;
}
Ok(())
}
}
#[derive(Debug, Error)]
pub struct LibraryError {
pub error: libloading::Error,
pub path: String,
}
impl std::fmt::Display for LibraryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Error opening dynamic library {} -- {}",
self.path, self.error
)?;
Ok(())
}
}
#[derive(Debug, Error)]
pub struct LoaderQueryError {
pub error: QueryError,
pub file: Option<String>,
}
impl std::fmt::Display for LoaderQueryError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if let Some(ref path) = self.file {
writeln!(f, "Error in query file {path}:")?;
}
write!(f, "{}", self.error)?;
Ok(())
}
}
#[derive(Debug, Error)]
pub struct SymbolError {
pub error: libloading::Error,
pub symbol_name: String,
pub path: String,
}
impl std::fmt::Display for SymbolError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"Failed to load symbol {} from {} -- {}",
self.symbol_name, self.path, self.error
)?;
Ok(())
}
}
#[derive(Debug, Error)]
pub struct ScannerSymbolError {
pub missing: Vec<String>,
}
impl std::fmt::Display for ScannerSymbolError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
writeln!(
f,
"Missing required functions in the external scanner, parsing won't work without these!\n"
)?;
for symbol in &self.missing {
writeln!(f, " `{symbol}`")?;
}
writeln!(
f,
"You can read more about this at https://tree-sitter.github.io/tree-sitter/creating-parsers/4-external-scanners\n"
)?;
Ok(())
}
}
#[derive(Debug, Error)]
pub struct WasiSDKClangError {
pub wasi_sdk_dir: String,
pub possible_executables: Vec<&'static str>,
pub download: bool,
}
impl std::fmt::Display for WasiSDKClangError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
if self.download {
write!(
f,
"Failed to find clang executable in downloaded wasi-sdk at '{}'.",
self.wasi_sdk_dir
)?;
} else {
write!(f, "TREE_SITTER_WASI_SDK_PATH is set to '{}', but no clang executable found in 'bin/' directory.", self.wasi_sdk_dir)?;
}
let possible_exes = self.possible_executables.join(", ");
write!(f, " Looked for: {possible_exes}.")?;
Ok(())
}
}
pub const DEFAULT_HIGHLIGHTS_QUERY_FILE_NAME: &str = "highlights.scm";
pub const DEFAULT_INJECTIONS_QUERY_FILE_NAME: &str = "injections.scm";
pub const DEFAULT_LOCALS_QUERY_FILE_NAME: &str = "locals.scm";
pub const DEFAULT_TAGS_QUERY_FILE_NAME: &str = "tags.scm";
#[derive(Default, Deserialize, Serialize)]
pub struct Config {
#[serde(default)]
#[serde(
rename = "parser-directories",
deserialize_with = "deserialize_parser_directories"
)]
pub parser_directories: Vec<PathBuf>,
}
#[derive(Serialize, Deserialize, Clone, Default)]
#[serde(untagged)]
pub enum PathsJSON {
#[default]
Empty,
Single(PathBuf),
Multiple(Vec<PathBuf>),
}
impl PathsJSON {
fn into_vec(self) -> Option<Vec<PathBuf>> {
match self {
Self::Empty => None,
Self::Single(s) => Some(vec![s]),
Self::Multiple(s) => Some(s),
}
}
const fn is_empty(&self) -> bool {
matches!(self, Self::Empty)
}
/// Represent this set of paths as a string that can be included in templates
#[must_use]
pub fn to_variable_value<'a>(&'a self, default: &'a PathBuf) -> &'a str {
match self {
Self::Empty => Some(default),
Self::Single(path_buf) => Some(path_buf),
Self::Multiple(paths) => paths.first(),
}
.map_or("", |path| path.as_os_str().to_str().unwrap_or(""))
}
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum PackageJSONAuthor {
String(String),
Object {
name: String,
email: Option<String>,
url: Option<String>,
},
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(untagged)]
pub enum PackageJSONRepository {
String(String),
Object { url: String },
}
#[derive(Serialize, Deserialize)]
pub struct PackageJSON {
pub name: String,
pub version: Version,
pub description: Option<String>,
pub author: Option<PackageJSONAuthor>,
pub maintainers: Option<Vec<PackageJSONAuthor>>,
pub license: Option<String>,
pub repository: Option<PackageJSONRepository>,
#[serde(default)]
#[serde(rename = "tree-sitter", skip_serializing_if = "Option::is_none")]
pub tree_sitter: Option<Vec<LanguageConfigurationJSON>>,
}
fn default_path() -> PathBuf {
PathBuf::from(".")
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(rename_all = "kebab-case")]
pub struct LanguageConfigurationJSON {
#[serde(default = "default_path")]
pub path: PathBuf,
pub scope: Option<String>,
pub file_types: Option<Vec<String>>,
pub content_regex: Option<String>,
pub first_line_regex: Option<String>,
pub injection_regex: Option<String>,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub highlights: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub injections: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub locals: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub tags: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub external_files: PathsJSON,
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct TreeSitterJSON {
#[serde(rename = "$schema")]
pub schema: Option<String>,
pub grammars: Vec<Grammar>,
pub metadata: Metadata,
#[serde(default)]
pub bindings: Bindings,
}
impl TreeSitterJSON {
pub fn from_file(path: &Path) -> LoaderResult<Self> {
let path = path.join("tree-sitter.json");
Ok(serde_json::from_str(&fs::read_to_string(&path).map_err(
|e| LoaderError::IO(IoError::new(e, Some(path.as_path()))),
)?)?)
}
#[must_use]
pub fn has_multiple_language_configs(&self) -> bool {
self.grammars.len() > 1
}
}
#[derive(Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct Grammar {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub camelcase: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub title: Option<String>,
pub scope: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub path: Option<PathBuf>,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub external_files: PathsJSON,
pub file_types: Option<Vec<String>>,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub highlights: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub injections: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub locals: PathsJSON,
#[serde(default, skip_serializing_if = "PathsJSON::is_empty")]
pub tags: PathsJSON,
#[serde(skip_serializing_if = "Option::is_none")]
pub injection_regex: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub first_line_regex: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub content_regex: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub class_name: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Metadata {
pub version: Version,
#[serde(skip_serializing_if = "Option::is_none")]
pub license: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub authors: Option<Vec<Author>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub links: Option<Links>,
#[serde(skip)]
pub namespace: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Author {
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub email: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub url: Option<String>,
}
#[derive(Serialize, Deserialize)]
pub struct Links {
pub repository: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub funding: Option<String>,
}
#[derive(Serialize, Deserialize, Clone)]
#[serde(default)]
pub struct Bindings {
pub c: bool,
pub go: bool,
pub java: bool,
#[serde(skip)]
pub kotlin: bool,
pub node: bool,
pub python: bool,
pub rust: bool,
pub swift: bool,
pub zig: bool,
}
impl Bindings {
/// return available languages and its default enabled state.
#[must_use]
pub const fn languages(&self) -> [(&'static str, bool); 8] {
[
("c", true),
("go", true),
("java", false),
// Comment out Kotlin until the bindings are actually available.
// ("kotlin", false),
("node", true),
("python", true),
("rust", true),
("swift", true),
("zig", false),
]
}
/// construct Bindings from a language list. If a language isn't supported, its name will be put on the error part.
pub fn with_enabled_languages<'a, I>(languages: I) -> Result<Self, &'a str>
where
I: Iterator<Item = &'a str>,
{
let mut out = Self {
c: false,
go: false,
java: false,
kotlin: false,
node: false,
python: false,
rust: false,
swift: false,
zig: false,
};
for v in languages {
match v {
"c" => out.c = true,
"go" => out.go = true,
"java" => out.java = true,
// Comment out Kotlin until the bindings are actually available.
// "kotlin" => out.kotlin = true,
"node" => out.node = true,
"python" => out.python = true,
"rust" => out.rust = true,
"swift" => out.swift = true,
"zig" => out.zig = true,
unsupported => return Err(unsupported),
}
}
Ok(out)
}
}
impl Default for Bindings {
fn default() -> Self {
Self {
c: true,
go: true,
java: false,
kotlin: false,
node: true,
python: true,
rust: true,
swift: true,
zig: false,
}
}
}
// Replace `~` or `$HOME` with home path string.
// (While paths like "~/.tree-sitter/config.json" can be deserialized,
// they're not valid path for I/O modules.)
fn deserialize_parser_directories<'de, D>(deserializer: D) -> Result<Vec<PathBuf>, D::Error>
where
D: Deserializer<'de>,
{
let paths = Vec::<PathBuf>::deserialize(deserializer)?;
let Ok(home) = etcetera::home_dir() else {
return Ok(paths);
};
let standardized = paths
.into_iter()
.map(|path| standardize_path(path, &home))
.collect();
Ok(standardized)
}
fn standardize_path(path: PathBuf, home: &Path) -> PathBuf {
if let Ok(p) = path.strip_prefix("~") {
return home.join(p);
}
if let Ok(p) = path.strip_prefix("$HOME") {
return home.join(p);
}
path
}
impl Config {
#[must_use]
pub fn initial() -> Self {
let home_dir = etcetera::home_dir().expect("Cannot determine home directory");
Self {
parser_directories: vec![
home_dir.join("github"),
home_dir.join("src"),
home_dir.join("source"),
home_dir.join("projects"),
home_dir.join("dev"),
home_dir.join("git"),
],
}
}
}
const BUILD_TARGET: &str = env!("BUILD_TARGET");
pub struct LanguageConfiguration<'a> {
pub scope: Option<String>,
pub content_regex: Option<Regex>,
pub first_line_regex: Option<Regex>,
pub injection_regex: Option<Regex>,
pub file_types: Vec<String>,
pub root_path: PathBuf,
pub highlights_filenames: Option<Vec<PathBuf>>,
pub injections_filenames: Option<Vec<PathBuf>>,
pub locals_filenames: Option<Vec<PathBuf>>,
pub tags_filenames: Option<Vec<PathBuf>>,
pub language_name: String,
language_id: usize,
#[cfg(feature = "tree-sitter-highlight")]
highlight_config: OnceCell<Option<HighlightConfiguration>>,
#[cfg(feature = "tree-sitter-tags")]
tags_config: OnceCell<Option<TagsConfiguration>>,
#[cfg(feature = "tree-sitter-highlight")]
highlight_names: &'a Mutex<Vec<String>>,
#[cfg(feature = "tree-sitter-highlight")]
use_all_highlight_names: bool,
_phantom: PhantomData<&'a ()>,
}
pub struct Loader {
pub parser_lib_path: PathBuf,
languages_by_id: Vec<(PathBuf, OnceCell<Language>, Option<Vec<PathBuf>>)>,
language_configurations: Vec<LanguageConfiguration<'static>>,
language_configuration_ids_by_file_type: HashMap<String, Vec<usize>>,
language_configuration_in_current_path: Option<usize>,
language_configuration_ids_by_first_line_regex: HashMap<String, Vec<usize>>,
#[cfg(feature = "tree-sitter-highlight")]
highlight_names: Box<Mutex<Vec<String>>>,
#[cfg(feature = "tree-sitter-highlight")]
use_all_highlight_names: bool,
debug_build: bool,
sanitize_build: bool,
force_rebuild: bool,
#[cfg(feature = "wasm")]
wasm_store: Mutex<Option<tree_sitter::WasmStore>>,
}
pub struct CompileConfig<'a> {
pub src_path: &'a Path,
pub header_paths: Vec<&'a Path>,
pub parser_path: PathBuf,
pub scanner_path: Option<PathBuf>,
pub external_files: Option<&'a [PathBuf]>,
pub output_path: Option<PathBuf>,
pub flags: &'a [&'a str],
pub sanitize: bool,
pub name: String,
}
impl<'a> CompileConfig<'a> {
#[must_use]
pub fn new(
src_path: &'a Path,
externals: Option<&'a [PathBuf]>,
output_path: Option<PathBuf>,
) -> Self {
Self {
src_path,
header_paths: vec![src_path],
parser_path: src_path.join("parser.c"),
scanner_path: None,
external_files: externals,
output_path,
flags: &[],
sanitize: false,
name: String::new(),
}
}
}
unsafe impl Sync for Loader {}
impl Loader {
pub fn new() -> LoaderResult<Self> {
let parser_lib_path = if let Ok(path) = env::var("TREE_SITTER_LIBDIR") {
PathBuf::from(path)
} else {
if cfg!(target_os = "macos") {
let legacy_apple_path = etcetera::base_strategy::Apple::new()?
.cache_dir() // `$HOME/Library/Caches/`
.join("tree-sitter");
if legacy_apple_path.exists() && legacy_apple_path.is_dir() {
std::fs::remove_dir_all(&legacy_apple_path).map_err(|e| {
LoaderError::IO(IoError::new(e, Some(legacy_apple_path.as_path())))
})?;
}
}
etcetera::choose_base_strategy()?
.cache_dir()
.join("tree-sitter")
.join("lib")
};
Ok(Self::with_parser_lib_path(parser_lib_path))
}
#[must_use]
pub fn with_parser_lib_path(parser_lib_path: PathBuf) -> Self {
Self {
parser_lib_path,
languages_by_id: Vec::new(),
language_configurations: Vec::new(),
language_configuration_ids_by_file_type: HashMap::new(),
language_configuration_in_current_path: None,
language_configuration_ids_by_first_line_regex: HashMap::new(),
#[cfg(feature = "tree-sitter-highlight")]
highlight_names: Box::new(Mutex::new(Vec::new())),
#[cfg(feature = "tree-sitter-highlight")]
use_all_highlight_names: true,
debug_build: false,
sanitize_build: false,
force_rebuild: false,
#[cfg(feature = "wasm")]
wasm_store: Mutex::default(),
}
}
#[cfg(feature = "tree-sitter-highlight")]
#[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))]
pub fn configure_highlights(&mut self, names: &[String]) {
self.use_all_highlight_names = false;
let mut highlights = self.highlight_names.lock().unwrap();
highlights.clear();
highlights.extend(names.iter().cloned());
}
#[must_use]
#[cfg(feature = "tree-sitter-highlight")]
#[cfg_attr(docsrs, doc(cfg(feature = "tree-sitter-highlight")))]
pub fn highlight_names(&self) -> Vec<String> {
self.highlight_names.lock().unwrap().clone()
}
pub fn find_all_languages(&mut self, config: &Config) -> LoaderResult<()> {
if config.parser_directories.is_empty() {
warn!(concat!(
"You have not configured any parser directories!\n",
"Please run `tree-sitter init-config` and edit the resulting\n",
"configuration file to indicate where we should look for\n",
"language grammars.\n"
));
}
for parser_container_dir in &config.parser_directories {
if let Ok(entries) = fs::read_dir(parser_container_dir) {
for entry in entries {
let entry = entry.map_err(|e| LoaderError::IO(IoError::new(e, None)))?;
if let Some(parser_dir_name) = entry.file_name().to_str() {
if parser_dir_name.starts_with("tree-sitter-") {
self.find_language_configurations_at_path(
&parser_container_dir.join(parser_dir_name),
false,
)
.ok();
}
}
}
}
}
Ok(())
}
pub fn languages_at_path(&mut self, path: &Path) -> LoaderResult<Vec<(Language, String)>> {
if let Ok(configurations) = self.find_language_configurations_at_path(path, true) {
let mut language_ids = configurations
.iter()
.map(|c| (c.language_id, c.language_name.clone()))
.collect::<Vec<_>>();
language_ids.sort_unstable();
language_ids.dedup();
language_ids
.into_iter()
.map(|(id, name)| Ok((self.language_for_id(id)?, name)))
.collect::<LoaderResult<Vec<_>>>()
} else {
Ok(Vec::new())
}
}
#[must_use]
pub fn get_all_language_configurations(&self) -> Vec<(&LanguageConfiguration, &Path)> {
self.language_configurations
.iter()
.map(|c| (c, self.languages_by_id[c.language_id].0.as_ref()))
.collect()
}
pub fn language_configuration_for_scope(
&self,
scope: &str,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
for configuration in &self.language_configurations {
if configuration.scope.as_ref().is_some_and(|s| s == scope) {
let language = self.language_for_id(configuration.language_id)?;
return Ok(Some((language, configuration)));
}
}
Ok(None)
}
pub fn language_configuration_for_first_line_regex(
&self,
path: &Path,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
self.language_configuration_ids_by_first_line_regex
.iter()
.try_fold(None, |_, (regex, ids)| {
if let Some(regex) = Self::regex(Some(regex)) {
let file = fs::File::open(path)
.map_err(|e| LoaderError::IO(IoError::new(e, Some(path))))?;
let reader = BufReader::new(file);
let first_line = reader
.lines()
.next()
.transpose()
.map_err(|e| LoaderError::IO(IoError::new(e, Some(path))))?;
if let Some(first_line) = first_line {
if regex.is_match(&first_line) && !ids.is_empty() {
let configuration = &self.language_configurations[ids[0]];
let language = self.language_for_id(configuration.language_id)?;
return Ok(Some((language, configuration)));
}
}
}
Ok(None)
})
}
pub fn language_configuration_for_file_name(
&self,
path: &Path,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
// Find all the language configurations that match this file name
// or a suffix of the file name.
let configuration_ids = path
.file_name()
.and_then(|n| n.to_str())
.and_then(|file_name| self.language_configuration_ids_by_file_type.get(file_name))
.or_else(|| {
let mut path = path.to_owned();
let mut extensions = Vec::with_capacity(2);
while let Some(extension) = path.extension() {
extensions.push(extension.to_str()?.to_string());
path = PathBuf::from(path.file_stem()?.to_os_string());
}
extensions.reverse();
self.language_configuration_ids_by_file_type
.get(&extensions.join("."))
});
if let Some(configuration_ids) = configuration_ids {
if !configuration_ids.is_empty() {
let configuration = if configuration_ids.len() == 1 {
&self.language_configurations[configuration_ids[0]]
}
// If multiple language configurations match, then determine which
// one to use by applying the configurations' content regexes.
else {
let file_contents =
fs::read(path).map_err(|e| LoaderError::IO(IoError::new(e, Some(path))))?;
let file_contents = String::from_utf8_lossy(&file_contents);
let mut best_score = -2isize;
let mut best_configuration_id = None;
for configuration_id in configuration_ids {
let config = &self.language_configurations[*configuration_id];
// If the language configuration has a content regex, assign
// a score based on the length of the first match.
let score;
if let Some(content_regex) = &config.content_regex {
if let Some(mat) = content_regex.find(&file_contents) {
score = (mat.end() - mat.start()) as isize;
}
// If the content regex does not match, then *penalize* this
// language configuration, so that language configurations
// without content regexes are preferred over those with
// non-matching content regexes.
else {
score = -1;
}
} else {
score = 0;
}
if score > best_score {
best_configuration_id = Some(*configuration_id);
best_score = score;
}
}
&self.language_configurations[best_configuration_id.unwrap()]
};
let language = self.language_for_id(configuration.language_id)?;
return Ok(Some((language, configuration)));
}
}
Ok(None)
}
pub fn language_configuration_for_injection_string(
&self,
string: &str,
) -> LoaderResult<Option<(Language, &LanguageConfiguration)>> {
let mut best_match_length = 0;
let mut best_match_position = None;
for (i, configuration) in self.language_configurations.iter().enumerate() {
if let Some(injection_regex) = &configuration.injection_regex {
if let Some(mat) = injection_regex.find(string) {
let length = mat.end() - mat.start();
if length > best_match_length {
best_match_position = Some(i);
best_match_length = length;
}
}
}
}
if let Some(i) = best_match_position {
let configuration = &self.language_configurations[i];
let language = self.language_for_id(configuration.language_id)?;
Ok(Some((language, configuration)))
} else {
Ok(None)
}
}
pub fn language_for_configuration(
&self,
configuration: &LanguageConfiguration,
) -> LoaderResult<Language> {
self.language_for_id(configuration.language_id)
}
fn language_for_id(&self, id: usize) -> LoaderResult<Language> {
let (path, language, externals) = &self.languages_by_id[id];
language
.get_or_try_init(|| {
let src_path = path.join("src");
self.load_language_at_path(CompileConfig::new(
&src_path,
externals.as_deref(),
None,
))
})
.cloned()
}
pub fn compile_parser_at_path(
&self,
grammar_path: &Path,
output_path: PathBuf,
flags: &[&str],
) -> LoaderResult<()> {
let src_path = grammar_path.join("src");
let mut config = CompileConfig::new(&src_path, None, Some(output_path));
config.flags = flags;
self.load_language_at_path(config).map(|_| ())
}
pub fn load_language_at_path(&self, mut config: CompileConfig) -> LoaderResult<Language> {
let grammar_path = config.src_path.join("grammar.json");
config.name = Self::grammar_json_name(&grammar_path)?;
self.load_language_at_path_with_name(config)
}
pub fn load_language_at_path_with_name(
&self,
mut config: CompileConfig,
) -> LoaderResult<Language> {
let mut lib_name = config.name.clone();
let language_fn_name = format!("tree_sitter_{}", config.name.replace('-', "_"));
if self.debug_build {
lib_name.push_str(".debug._");
}
if self.sanitize_build {
lib_name.push_str(".sanitize._");
config.sanitize = true;
}
if config.output_path.is_none() {
fs::create_dir_all(&self.parser_lib_path).map_err(|e| {
LoaderError::IO(IoError::new(e, Some(self.parser_lib_path.as_path())))
})?;
}
let mut recompile = self.force_rebuild || config.output_path.is_some(); // if specified, always recompile
let output_path = config.output_path.unwrap_or_else(|| {
let mut path = self.parser_lib_path.join(lib_name);
path.set_extension(env::consts::DLL_EXTENSION);
#[cfg(feature = "wasm")]
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | true |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/tags/src/tags.rs | crates/tags/src/tags.rs | #![cfg_attr(not(any(test, doctest)), doc = include_str!("../README.md"))]
pub mod c_lib;
use std::{
char,
collections::HashMap,
ffi::{CStr, CString},
mem,
ops::{ControlFlow, Range},
os::raw::c_char,
str,
sync::atomic::{AtomicUsize, Ordering},
};
use memchr::memchr;
use regex::Regex;
use streaming_iterator::StreamingIterator;
use thiserror::Error;
use tree_sitter::{
Language, LossyUtf8, ParseOptions, Parser, Point, Query, QueryCursor, QueryError,
QueryPredicateArg, Tree,
};
const MAX_LINE_LEN: usize = 180;
const CANCELLATION_CHECK_INTERVAL: usize = 100;
/// Contains the data needed to compute tags for code written in a
/// particular language.
#[derive(Debug)]
pub struct TagsConfiguration {
pub language: Language,
pub query: Query,
syntax_type_names: Vec<Box<[u8]>>,
c_syntax_type_names: Vec<*const u8>,
capture_map: HashMap<u32, NamedCapture>,
doc_capture_index: Option<u32>,
name_capture_index: Option<u32>,
ignore_capture_index: Option<u32>,
local_scope_capture_index: Option<u32>,
local_definition_capture_index: Option<u32>,
tags_pattern_index: usize,
pattern_info: Vec<PatternInfo>,
}
unsafe impl Send for TagsConfiguration {}
unsafe impl Sync for TagsConfiguration {}
#[derive(Debug)]
pub struct NamedCapture {
pub syntax_type_id: u32,
pub is_definition: bool,
}
pub struct TagsContext {
pub parser: Parser,
cursor: QueryCursor,
}
#[derive(Debug, Clone)]
pub struct Tag {
pub range: Range<usize>,
pub name_range: Range<usize>,
pub line_range: Range<usize>,
pub span: Range<Point>,
pub utf16_column_range: Range<usize>,
pub docs: Option<String>,
pub is_definition: bool,
pub syntax_type_id: u32,
}
#[derive(Debug, Error, PartialEq)]
pub enum Error {
#[error(transparent)]
Query(#[from] QueryError),
#[error(transparent)]
Regex(#[from] regex::Error),
#[error("Cancelled")]
Cancelled,
#[error("Invalid language")]
InvalidLanguage,
#[error("Invalid capture @{0}. Expected one of: @definition.*, @reference.*, @doc, @name, @local.(scope|definition|reference).")]
InvalidCapture(String),
}
#[derive(Debug, Default)]
struct PatternInfo {
docs_adjacent_capture: Option<u32>,
local_scope_inherits: bool,
name_must_be_non_local: bool,
doc_strip_regex: Option<Regex>,
}
#[derive(Debug)]
struct LocalDef<'a> {
name: &'a [u8],
}
#[derive(Debug)]
struct LocalScope<'a> {
inherits: bool,
range: Range<usize>,
local_defs: Vec<LocalDef<'a>>,
}
struct TagsIter<'a, I>
where
I: StreamingIterator<Item = tree_sitter::QueryMatch<'a, 'a>>,
{
matches: I,
_tree: Tree,
source: &'a [u8],
prev_line_info: Option<LineInfo>,
config: &'a TagsConfiguration,
cancellation_flag: Option<&'a AtomicUsize>,
iter_count: usize,
tag_queue: Vec<(Tag, usize)>,
scopes: Vec<LocalScope<'a>>,
}
struct LineInfo {
utf8_position: Point,
utf8_byte: usize,
utf16_column: usize,
line_range: Range<usize>,
}
impl TagsConfiguration {
pub fn new(language: Language, tags_query: &str, locals_query: &str) -> Result<Self, Error> {
let query = Query::new(&language, &format!("{locals_query}{tags_query}"))?;
let tags_query_offset = locals_query.len();
let mut tags_pattern_index = 0;
for i in 0..(query.pattern_count()) {
let pattern_offset = query.start_byte_for_pattern(i);
if pattern_offset < tags_query_offset {
tags_pattern_index += 1;
}
}
let mut capture_map = HashMap::new();
let mut syntax_type_names = Vec::new();
let mut doc_capture_index = None;
let mut name_capture_index = None;
let mut ignore_capture_index = None;
let mut local_scope_capture_index = None;
let mut local_definition_capture_index = None;
for (i, name) in query.capture_names().iter().enumerate() {
match *name {
"name" => name_capture_index = Some(i as u32),
"ignore" => ignore_capture_index = Some(i as u32),
"doc" => doc_capture_index = Some(i as u32),
"local.scope" => local_scope_capture_index = Some(i as u32),
"local.definition" => local_definition_capture_index = Some(i as u32),
"local.reference" | "" => {}
_ => {
let mut is_definition = false;
let kind = if name.starts_with("definition.") {
is_definition = true;
name.trim_start_matches("definition.")
} else if name.starts_with("reference.") {
name.trim_start_matches("reference.")
} else {
return Err(Error::InvalidCapture((*name).to_string()));
};
if let Ok(cstr) = CString::new(kind) {
let c_kind = cstr.to_bytes_with_nul().to_vec().into_boxed_slice();
let syntax_type_id = syntax_type_names
.iter()
.position(|n| n == &c_kind)
.unwrap_or_else(|| {
syntax_type_names.push(c_kind);
syntax_type_names.len() - 1
}) as u32;
capture_map.insert(
i as u32,
NamedCapture {
syntax_type_id,
is_definition,
},
);
}
}
}
}
let c_syntax_type_names = syntax_type_names.iter().map(|s| s.as_ptr()).collect();
let pattern_info = (0..query.pattern_count())
.map(|pattern_index| {
let mut info = PatternInfo::default();
for (property, is_positive) in query.property_predicates(pattern_index) {
if !is_positive && property.key.as_ref() == "local" {
info.name_must_be_non_local = true;
}
}
info.local_scope_inherits = true;
for property in query.property_settings(pattern_index) {
if property.key.as_ref() == "local.scope-inherits"
&& property
.value
.as_ref()
.is_some_and(|v| v.as_ref() == "false")
{
info.local_scope_inherits = false;
}
}
if let Some(doc_capture_index) = doc_capture_index {
for predicate in query.general_predicates(pattern_index) {
if predicate.args.first()
== Some(&QueryPredicateArg::Capture(doc_capture_index))
{
match (predicate.operator.as_ref(), predicate.args.get(1)) {
("select-adjacent!", Some(QueryPredicateArg::Capture(index))) => {
info.docs_adjacent_capture = Some(*index);
}
("strip!", Some(QueryPredicateArg::String(pattern))) => {
let regex = Regex::new(pattern.as_ref())?;
info.doc_strip_regex = Some(regex);
}
_ => {}
}
}
}
}
Ok(info)
})
.collect::<Result<Vec<_>, Error>>()?;
Ok(Self {
language,
query,
syntax_type_names,
c_syntax_type_names,
capture_map,
doc_capture_index,
name_capture_index,
ignore_capture_index,
local_scope_capture_index,
local_definition_capture_index,
tags_pattern_index,
pattern_info,
})
}
#[must_use]
pub fn syntax_type_name(&self, id: u32) -> &str {
unsafe {
let cstr = CStr::from_ptr(
self.syntax_type_names[id as usize]
.as_ptr()
.cast::<c_char>(),
)
.to_bytes();
str::from_utf8(cstr).expect("syntax type name was not valid utf-8")
}
}
}
impl Default for TagsContext {
fn default() -> Self {
Self::new()
}
}
impl TagsContext {
#[must_use]
pub fn new() -> Self {
Self {
parser: Parser::new(),
cursor: QueryCursor::new(),
}
}
pub const fn parser(&mut self) -> &mut Parser {
&mut self.parser
}
pub fn generate_tags<'a>(
&'a mut self,
config: &'a TagsConfiguration,
source: &'a [u8],
cancellation_flag: Option<&'a AtomicUsize>,
) -> Result<(impl Iterator<Item = Result<Tag, Error>> + 'a, bool), Error> {
self.parser
.set_language(&config.language)
.map_err(|_| Error::InvalidLanguage)?;
self.parser.reset();
let tree = self
.parser
.parse_with_options(
&mut |i, _| {
if i < source.len() {
&source[i..]
} else {
&[]
}
},
None,
Some(ParseOptions::new().progress_callback(&mut |_| {
if let Some(cancellation_flag) = cancellation_flag {
if cancellation_flag.load(Ordering::SeqCst) != 0 {
ControlFlow::Break(())
} else {
ControlFlow::Continue(())
}
} else {
ControlFlow::Continue(())
}
})),
)
.ok_or(Error::Cancelled)?;
// The `matches` iterator borrows the `Tree`, which prevents it from being
// moved. But the tree is really just a pointer, so it's actually ok to
// move it.
let tree_ref = unsafe { mem::transmute::<&Tree, &'static Tree>(&tree) };
let matches = self
.cursor
.matches(&config.query, tree_ref.root_node(), source);
Ok((
TagsIter {
_tree: tree,
matches,
source,
config,
cancellation_flag,
prev_line_info: None,
tag_queue: Vec::new(),
iter_count: 0,
scopes: vec![LocalScope {
range: 0..source.len(),
inherits: false,
local_defs: Vec::new(),
}],
},
tree_ref.root_node().has_error(),
))
}
}
impl<'a, I> Iterator for TagsIter<'a, I>
where
I: StreamingIterator<Item = tree_sitter::QueryMatch<'a, 'a>>,
{
type Item = Result<Tag, Error>;
fn next(&mut self) -> Option<Self::Item> {
loop {
// Periodically check for cancellation, returning `Cancelled` error if the
// cancellation flag was flipped.
if let Some(cancellation_flag) = self.cancellation_flag {
self.iter_count += 1;
if self.iter_count >= CANCELLATION_CHECK_INTERVAL {
self.iter_count = 0;
if cancellation_flag.load(Ordering::Relaxed) != 0 {
return Some(Err(Error::Cancelled));
}
}
}
// If there is a queued tag for an earlier node in the syntax tree, then pop
// it off of the queue and return it.
if let Some(last_entry) = self.tag_queue.last() {
if self.tag_queue.len() > 1
&& self.tag_queue[0].0.name_range.end < last_entry.0.name_range.start
{
let tag = self.tag_queue.remove(0).0;
if tag.is_ignored() {
continue;
}
return Some(Ok(tag));
}
}
// If there is another match, then compute its tag and add it to the
// tag queue.
if let Some(mat) = self.matches.next() {
let pattern_info = &self.config.pattern_info[mat.pattern_index];
if mat.pattern_index < self.config.tags_pattern_index {
for capture in mat.captures {
let index = Some(capture.index);
let range = capture.node.byte_range();
if index == self.config.local_scope_capture_index {
self.scopes.push(LocalScope {
range,
inherits: pattern_info.local_scope_inherits,
local_defs: Vec::new(),
});
} else if index == self.config.local_definition_capture_index {
if let Some(scope) = self.scopes.iter_mut().rev().find(|scope| {
scope.range.start <= range.start && scope.range.end >= range.end
}) {
scope.local_defs.push(LocalDef {
name: &self.source[range.clone()],
});
}
}
}
continue;
}
let mut name_node = None;
let mut doc_nodes = Vec::new();
let mut tag_node = None;
let mut syntax_type_id = 0;
let mut is_definition = false;
let mut docs_adjacent_node = None;
let mut is_ignored = false;
for capture in mat.captures {
let index = Some(capture.index);
if index == self.config.ignore_capture_index {
is_ignored = true;
name_node = Some(capture.node);
}
if index == self.config.pattern_info[mat.pattern_index].docs_adjacent_capture {
docs_adjacent_node = Some(capture.node);
}
if index == self.config.name_capture_index {
name_node = Some(capture.node);
} else if index == self.config.doc_capture_index {
doc_nodes.push(capture.node);
}
if let Some(named_capture) = self.config.capture_map.get(&capture.index) {
tag_node = Some(capture.node);
syntax_type_id = named_capture.syntax_type_id;
is_definition = named_capture.is_definition;
}
}
if let Some(name_node) = name_node {
let name_range = name_node.byte_range();
let tag;
if let Some(tag_node) = tag_node {
if name_node.has_error() {
continue;
}
if pattern_info.name_must_be_non_local {
let mut is_local = false;
for scope in self.scopes.iter().rev() {
if scope.range.start <= name_range.start
&& scope.range.end >= name_range.end
{
if scope
.local_defs
.iter()
.any(|d| d.name == &self.source[name_range.clone()])
{
is_local = true;
break;
}
if !scope.inherits {
break;
}
}
}
if is_local {
continue;
}
}
// If needed, filter the doc nodes based on their ranges, selecting
// only the slice that are adjacent to some specified node.
let mut docs_start_index = 0;
if let (Some(docs_adjacent_node), false) =
(docs_adjacent_node, doc_nodes.is_empty())
{
docs_start_index = doc_nodes.len();
let mut start_row = docs_adjacent_node.start_position().row;
while docs_start_index > 0 {
let doc_node = &doc_nodes[docs_start_index - 1];
let prev_doc_end_row = doc_node.end_position().row;
if prev_doc_end_row + 1 >= start_row {
docs_start_index -= 1;
start_row = doc_node.start_position().row;
} else {
break;
}
}
}
// Generate a doc string from all of the doc nodes, applying any strip
// regexes.
let mut docs = None;
for doc_node in &doc_nodes[docs_start_index..] {
if let Ok(content) = str::from_utf8(&self.source[doc_node.byte_range()])
{
let content = pattern_info.doc_strip_regex.as_ref().map_or_else(
|| content.to_string(),
|regex| regex.replace_all(content, "").to_string(),
);
match &mut docs {
None => docs = Some(content),
Some(d) => {
d.push('\n');
d.push_str(&content);
}
}
}
}
let rng = tag_node.byte_range();
let range = rng.start.min(name_range.start)..rng.end.max(name_range.end);
let span = name_node.start_position()..name_node.end_position();
// Compute tag properties that depend on the text of the containing line. If
// the previous tag occurred on the same line, then
// reuse results from the previous tag.
let mut prev_utf16_column = 0;
let mut prev_utf8_byte = name_range.start - span.start.column;
let line_info = self.prev_line_info.as_ref().and_then(|info| {
if info.utf8_position.row == span.start.row {
Some(info)
} else {
None
}
});
let line_range = if let Some(line_info) = line_info {
if line_info.utf8_position.column <= span.start.column {
prev_utf8_byte = line_info.utf8_byte;
prev_utf16_column = line_info.utf16_column;
}
line_info.line_range.clone()
} else {
self::line_range(
self.source,
name_range.start,
span.start,
MAX_LINE_LEN,
)
};
let utf16_start_column = prev_utf16_column
+ utf16_len(&self.source[prev_utf8_byte..name_range.start]);
let utf16_end_column =
utf16_start_column + utf16_len(&self.source[name_range.clone()]);
let utf16_column_range = utf16_start_column..utf16_end_column;
self.prev_line_info = Some(LineInfo {
utf8_position: span.end,
utf8_byte: name_range.end,
utf16_column: utf16_end_column,
line_range: line_range.clone(),
});
tag = Tag {
range,
name_range,
line_range,
span,
utf16_column_range,
docs,
is_definition,
syntax_type_id,
};
} else if is_ignored {
tag = Tag::ignored(name_range);
} else {
continue;
}
// Only create one tag per node. The tag queue is sorted by node position
// to allow for fast lookup.
match self.tag_queue.binary_search_by_key(
&(tag.name_range.end, tag.name_range.start),
|(tag, _)| (tag.name_range.end, tag.name_range.start),
) {
Ok(i) => {
let (existing_tag, pattern_index) = &mut self.tag_queue[i];
if *pattern_index > mat.pattern_index {
*pattern_index = mat.pattern_index;
*existing_tag = tag;
}
}
Err(i) => self.tag_queue.insert(i, (tag, mat.pattern_index)),
}
}
}
// If there are no more matches, then drain the queue.
else if !self.tag_queue.is_empty() {
return Some(Ok(self.tag_queue.remove(0).0));
} else {
return None;
}
}
}
}
impl Tag {
#[must_use]
const fn ignored(name_range: Range<usize>) -> Self {
Self {
name_range,
line_range: 0..0,
span: Point::new(0, 0)..Point::new(0, 0),
utf16_column_range: 0..0,
range: usize::MAX..usize::MAX,
docs: None,
is_definition: false,
syntax_type_id: 0,
}
}
#[must_use]
const fn is_ignored(&self) -> bool {
self.range.start == usize::MAX
}
}
fn line_range(
text: &[u8],
start_byte: usize,
start_point: Point,
max_line_len: usize,
) -> Range<usize> {
// Trim leading whitespace
let mut line_start_byte = start_byte - start_point.column;
while line_start_byte < text.len() && text[line_start_byte].is_ascii_whitespace() {
line_start_byte += 1;
}
let max_line_len = max_line_len.min(text.len() - line_start_byte);
let text_after_line_start = &text[line_start_byte..(line_start_byte + max_line_len)];
let line_len = if let Some(len) = memchr(b'\n', text_after_line_start) {
len
} else if let Err(e) = str::from_utf8(text_after_line_start) {
e.valid_up_to()
} else {
max_line_len
};
// Trim trailing whitespace
let mut line_end_byte = line_start_byte + line_len;
while line_end_byte > line_start_byte && text[line_end_byte - 1].is_ascii_whitespace() {
line_end_byte -= 1;
}
line_start_byte..line_end_byte
}
fn utf16_len(bytes: &[u8]) -> usize {
LossyUtf8::new(bytes)
.flat_map(|chunk| chunk.chars().map(char::len_utf16))
.sum()
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_get_line() {
let text = "abc\ndefg❤hij\nklmno".as_bytes();
assert_eq!(line_range(text, 5, Point::new(1, 1), 30), 4..14);
assert_eq!(line_range(text, 5, Point::new(1, 1), 6), 4..8);
assert_eq!(line_range(text, 17, Point::new(2, 2), 30), 15..20);
assert_eq!(line_range(text, 17, Point::new(2, 2), 4), 15..19);
}
#[test]
fn test_get_line_trims() {
let text = b" foo\nbar\n";
assert_eq!(line_range(text, 0, Point::new(0, 0), 10), 3..6);
let text = b"\t func foo \nbar\n";
assert_eq!(line_range(text, 0, Point::new(0, 0), 10), 2..10);
let r = line_range(text, 0, Point::new(0, 0), 14);
assert_eq!(r, 2..10);
assert_eq!(str::from_utf8(&text[r]).unwrap_or(""), "func foo");
let r = line_range(text, 12, Point::new(1, 0), 14);
assert_eq!(r, 12..15);
assert_eq!(str::from_utf8(&text[r]).unwrap_or(""), "bar");
}
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/tags/src/c_lib.rs | crates/tags/src/c_lib.rs | use std::{
collections::HashMap, ffi::CStr, fmt, os::raw::c_char, process::abort, slice, str,
sync::atomic::AtomicUsize,
};
use tree_sitter::Language;
use super::{Error, TagsConfiguration, TagsContext};
const BUFFER_TAGS_RESERVE_CAPACITY: usize = 100;
const BUFFER_DOCS_RESERVE_CAPACITY: usize = 1024;
#[repr(C)]
#[derive(Debug, PartialEq, Eq)]
pub enum TSTagsError {
Ok,
UnknownScope,
Timeout,
InvalidLanguage,
InvalidUtf8,
InvalidRegex,
InvalidQuery,
InvalidCapture,
Unknown,
}
#[repr(C)]
pub struct TSPoint {
row: u32,
column: u32,
}
#[repr(C)]
pub struct TSTag {
pub start_byte: u32,
pub end_byte: u32,
pub name_start_byte: u32,
pub name_end_byte: u32,
pub line_start_byte: u32,
pub line_end_byte: u32,
pub start_point: TSPoint,
pub end_point: TSPoint,
pub utf16_start_column: u32,
pub utf16_end_column: u32,
pub docs_start_byte: u32,
pub docs_end_byte: u32,
pub syntax_type_id: u32,
pub is_definition: bool,
}
pub struct TSTagger {
languages: HashMap<String, TagsConfiguration>,
}
pub struct TSTagsBuffer {
context: TagsContext,
tags: Vec<TSTag>,
docs: Vec<u8>,
errors_present: bool,
}
#[no_mangle]
pub extern "C" fn ts_tagger_new() -> *mut TSTagger {
Box::into_raw(Box::new(TSTagger {
languages: HashMap::new(),
}))
}
/// Delete a [`TSTagger`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagger`] instance.
#[no_mangle]
pub unsafe extern "C" fn ts_tagger_delete(this: *mut TSTagger) {
drop(Box::from_raw(this));
}
/// Add a language to a [`TSTagger`].
///
/// Returns a [`TSTagsError`] indicating whether the operation was successful or not.
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagger`] instance.
/// `scope_name` must be non-null and a valid pointer to a null-terminated string.
/// `tags_query` and `locals_query` must be non-null and valid pointers to strings.
///
/// The caller must ensure that the lengths of `tags_query` and `locals_query` are correct.
#[no_mangle]
pub unsafe extern "C" fn ts_tagger_add_language(
this: *mut TSTagger,
scope_name: *const c_char,
language: Language,
tags_query: *const u8,
locals_query: *const u8,
tags_query_len: u32,
locals_query_len: u32,
) -> TSTagsError {
let tagger = unwrap_mut_ptr(this);
let scope_name = unwrap(CStr::from_ptr(scope_name).to_str());
let tags_query = slice::from_raw_parts(tags_query, tags_query_len as usize);
let locals_query = if !locals_query.is_null() {
slice::from_raw_parts(locals_query, locals_query_len as usize)
} else {
&[]
};
let Ok(tags_query) = str::from_utf8(tags_query) else {
return TSTagsError::InvalidUtf8;
};
let Ok(locals_query) = str::from_utf8(locals_query) else {
return TSTagsError::InvalidUtf8;
};
match TagsConfiguration::new(language, tags_query, locals_query) {
Ok(c) => {
tagger.languages.insert(scope_name.to_string(), c);
TSTagsError::Ok
}
Err(Error::Query(_)) => TSTagsError::InvalidQuery,
Err(Error::Regex(_)) => TSTagsError::InvalidRegex,
Err(Error::Cancelled) => TSTagsError::Timeout,
Err(Error::InvalidLanguage) => TSTagsError::InvalidLanguage,
Err(Error::InvalidCapture(_)) => TSTagsError::InvalidCapture,
}
}
/// Tags some source code.
///
/// Returns a [`TSTagsError`] indicating whether the operation was successful or not.
///
/// # Safety
///
/// `this` must be a non-null valid pointer to a [`TSTagger`] instance.
/// `scope_name` must be a non-null valid pointer to a null-terminated string.
/// `source_code` must be a non-null valid pointer to a slice of bytes.
/// `output` must be a non-null valid pointer to a [`TSTagsBuffer`] instance.
/// `cancellation_flag` must be a non-null valid pointer to an [`AtomicUsize`] instance.
#[no_mangle]
pub unsafe extern "C" fn ts_tagger_tag(
this: *mut TSTagger,
scope_name: *const c_char,
source_code: *const u8,
source_code_len: u32,
output: *mut TSTagsBuffer,
cancellation_flag: *const AtomicUsize,
) -> TSTagsError {
let tagger = unwrap_mut_ptr(this);
let buffer = unwrap_mut_ptr(output);
let scope_name = unwrap(CStr::from_ptr(scope_name).to_str());
if let Some(config) = tagger.languages.get(scope_name) {
shrink_and_clear(&mut buffer.tags, BUFFER_TAGS_RESERVE_CAPACITY);
shrink_and_clear(&mut buffer.docs, BUFFER_DOCS_RESERVE_CAPACITY);
let source_code = slice::from_raw_parts(source_code, source_code_len as usize);
let cancellation_flag = cancellation_flag.as_ref();
let tags = match buffer
.context
.generate_tags(config, source_code, cancellation_flag)
{
Ok((tags, found_error)) => {
buffer.errors_present = found_error;
tags
}
Err(e) => {
return match e {
Error::InvalidLanguage => TSTagsError::InvalidLanguage,
_ => TSTagsError::Timeout,
}
}
};
for tag in tags {
let Ok(tag) = tag else {
buffer.tags.clear();
buffer.docs.clear();
return TSTagsError::Timeout;
};
let prev_docs_len = buffer.docs.len();
if let Some(docs) = tag.docs {
buffer.docs.extend_from_slice(docs.as_bytes());
}
buffer.tags.push(TSTag {
start_byte: tag.range.start as u32,
end_byte: tag.range.end as u32,
name_start_byte: tag.name_range.start as u32,
name_end_byte: tag.name_range.end as u32,
line_start_byte: tag.line_range.start as u32,
line_end_byte: tag.line_range.end as u32,
start_point: TSPoint {
row: tag.span.start.row as u32,
column: tag.span.start.column as u32,
},
end_point: TSPoint {
row: tag.span.end.row as u32,
column: tag.span.end.column as u32,
},
utf16_start_column: tag.utf16_column_range.start as u32,
utf16_end_column: tag.utf16_column_range.end as u32,
docs_start_byte: prev_docs_len as u32,
docs_end_byte: buffer.docs.len() as u32,
syntax_type_id: tag.syntax_type_id,
is_definition: tag.is_definition,
});
}
TSTagsError::Ok
} else {
TSTagsError::UnknownScope
}
}
#[no_mangle]
pub extern "C" fn ts_tags_buffer_new() -> *mut TSTagsBuffer {
Box::into_raw(Box::new(TSTagsBuffer {
context: TagsContext::new(),
tags: Vec::with_capacity(BUFFER_TAGS_RESERVE_CAPACITY),
docs: Vec::with_capacity(BUFFER_DOCS_RESERVE_CAPACITY),
errors_present: false,
}))
}
/// Delete a [`TSTagsBuffer`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance created by
/// [`ts_tags_buffer_new`].
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_delete(this: *mut TSTagsBuffer) {
drop(Box::from_raw(this));
}
/// Get the tags from a [`TSTagsBuffer`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance created by
/// [`ts_tags_buffer_new`].
///
/// The caller must ensure that the returned pointer is not used after the [`TSTagsBuffer`]
/// is deleted with [`ts_tags_buffer_delete`], else the data will point to garbage.
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_tags(this: *const TSTagsBuffer) -> *const TSTag {
unwrap_ptr(this).tags.as_ptr()
}
/// Get the number of tags in a [`TSTagsBuffer`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance.
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_tags_len(this: *const TSTagsBuffer) -> u32 {
unwrap_ptr(this).tags.len() as u32
}
/// Get the documentation strings from a [`TSTagsBuffer`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance created by
/// [`ts_tags_buffer_new`].
///
/// The caller must ensure that the returned pointer is not used after the [`TSTagsBuffer`]
/// is deleted with [`ts_tags_buffer_delete`], else the data will point to garbage.
///
/// The returned pointer points to a C-style string.
/// To get the length of the string, use [`ts_tags_buffer_docs_len`].
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_docs(this: *const TSTagsBuffer) -> *const c_char {
unwrap_ptr(this).docs.as_ptr().cast::<c_char>()
}
/// Get the length of the documentation strings in a [`TSTagsBuffer`].
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance created by
/// [`ts_tags_buffer_new`].
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_docs_len(this: *const TSTagsBuffer) -> u32 {
unwrap_ptr(this).docs.len() as u32
}
/// Get whether or not a [`TSTagsBuffer`] contains any parse errors.
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagsBuffer`] instance created by
/// [`ts_tags_buffer_new`].
#[no_mangle]
pub unsafe extern "C" fn ts_tags_buffer_found_parse_error(this: *const TSTagsBuffer) -> bool {
unwrap_ptr(this).errors_present
}
/// Get the syntax kinds for a given scope name.
///
/// Returns a pointer to a null-terminated array of null-terminated strings.
///
/// # Safety
///
/// `this` must be non-null and a valid pointer to a [`TSTagger`] instance created by
/// [`ts_tagger_new`].
/// `scope_name` must be non-null and a valid pointer to a null-terminated string.
/// `len` must be non-null and a valid pointer to a `u32`.
///
/// The caller must ensure that the returned pointer is not used after the [`TSTagger`]
/// is deleted with [`ts_tagger_delete`], else the data will point to garbage.
///
/// The returned pointer points to a C-style string array.
#[no_mangle]
pub unsafe extern "C" fn ts_tagger_syntax_kinds_for_scope_name(
this: *mut TSTagger,
scope_name: *const c_char,
len: *mut u32,
) -> *const *const c_char {
let tagger = unwrap_mut_ptr(this);
let scope_name = unwrap(CStr::from_ptr(scope_name).to_str());
let len = unwrap_mut_ptr(len);
*len = 0;
if let Some(config) = tagger.languages.get(scope_name) {
*len = config.c_syntax_type_names.len() as u32;
return config.c_syntax_type_names.as_ptr().cast::<*const c_char>();
}
std::ptr::null()
}
unsafe fn unwrap_ptr<'a, T>(result: *const T) -> &'a T {
result.as_ref().unwrap_or_else(|| {
eprintln!("{}:{} - pointer must not be null", file!(), line!());
abort();
})
}
unsafe fn unwrap_mut_ptr<'a, T>(result: *mut T) -> &'a mut T {
result.as_mut().unwrap_or_else(|| {
eprintln!("{}:{} - pointer must not be null", file!(), line!());
abort();
})
}
fn unwrap<T, E: fmt::Display>(result: Result<T, E>) -> T {
result.unwrap_or_else(|error| {
eprintln!("tree-sitter tag error: {error}");
abort();
})
}
fn shrink_and_clear<T>(vec: &mut Vec<T>, capacity: usize) {
if vec.len() > capacity {
vec.truncate(capacity);
vec.shrink_to_fit();
}
vec.clear();
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/test.rs | crates/xtask/src/test.rs | use std::{
env,
path::Path,
process::{Command, Stdio},
};
use anyhow::{anyhow, Result};
use regex::Regex;
use crate::{bail_on_err, Test};
pub fn run(args: &Test) -> Result<()> {
let test_flags = if args.address_sanitizer {
env::set_var("CFLAGS", "-fsanitize=undefined,address");
// When the Tree-sitter C library is compiled with the address sanitizer, the address
// sanitizer runtime library needs to be linked into the final test executable. When
// using Xcode clang, the Rust linker doesn't know where to find that library, so we
// need to specify linker flags directly.
let output = Command::new("cc").arg("-print-runtime-dir").output()?;
bail_on_err(&output, "Failed to get clang runtime dir")?;
let runtime_dir = String::from_utf8(output.stdout)?;
if runtime_dir.contains("/Xcode.app/") {
env::set_var(
"RUSTFLAGS",
format!(
"-C link-arg=-L{runtime_dir} -C link-arg=-lclang_rt.asan_osx_dynamic -C link-arg=-Wl,-rpath,{runtime_dir}"
),
);
}
// Specify a `--target` explicitly. This is required for address sanitizer support.
let output = Command::new("rustup")
.arg("show")
.arg("active-toolchain")
.output()?;
bail_on_err(&output, "Failed to get active Rust toolchain")?;
let toolchain = String::from_utf8(output.stdout)?;
let re = Regex::new(r"(stable|beta|nightly)-([_a-z0-9-]+).*")?;
let captures = re
.captures(&toolchain)
.ok_or_else(|| anyhow!("Failed to parse toolchain '{toolchain}'"))?;
let current_target = captures.get(2).unwrap().as_str();
format!("--target={current_target}")
} else {
String::new()
};
if let Some(language) = &args.language {
env::set_var("TREE_SITTER_LANGUAGE", language);
}
if let Some(example) = &args.example {
env::set_var("TREE_SITTER_EXAMPLE_INCLUDE", example);
}
if let Some(seed) = args.seed {
env::set_var("TREE_SITTER_SEED", seed.to_string());
}
if let Some(iterations) = args.iterations {
env::set_var("TREE_SITTER_ITERATIONS", iterations.to_string());
}
if args.debug {
env::set_var("TREE_SITTER_LOG", "1");
}
if args.debug_graph {
env::set_var("TREE_SITTER_LOG_GRAPHS", "1");
}
if args.g {
let mut cargo_cmd = Command::new("cargo");
cargo_cmd
.arg("test")
.arg("--all")
.arg(&test_flags)
.arg("--no-run")
.arg("--message-format=json");
let cargo_cmd = cargo_cmd.stdout(Stdio::piped()).spawn()?;
let jq_cmd = Command::new("jq")
.arg("-rs")
.arg(r#"map(select(.target.name == "tree_sitter_cli" and .executable))[0].executable"#)
.stdin(cargo_cmd.stdout.unwrap())
.output()?;
let test_binary = String::from_utf8(jq_cmd.stdout)?;
let mut lldb_cmd = Command::new("lldb");
lldb_cmd.arg(test_binary.trim()).arg("--").args(&args.args);
bail_on_err(
&lldb_cmd.spawn()?.wait_with_output()?,
&format!("Failed to run {lldb_cmd:?}"),
)?;
} else {
let mut cargo_cmd = Command::new("cargo");
cargo_cmd.arg("test").arg("--all");
if args.wasm {
cargo_cmd.arg("--features").arg("wasm");
}
if !test_flags.is_empty() {
cargo_cmd.arg(&test_flags);
}
cargo_cmd.args(&args.args);
if args.nocapture {
#[cfg(not(target_os = "windows"))]
cargo_cmd.arg("--");
cargo_cmd.arg("--nocapture");
}
bail_on_err(
&cargo_cmd.spawn()?.wait_with_output()?,
&format!("Failed to run {cargo_cmd:?}"),
)?;
}
Ok(())
}
pub fn run_wasm() -> Result<()> {
std::env::set_current_dir("lib/binding_web")?;
let node_modules_dir = Path::new("node_modules");
let npm = if cfg!(target_os = "windows") {
"npm.cmd"
} else {
"npm"
};
if !node_modules_dir.join("chai").exists() || !node_modules_dir.join("mocha").exists() {
println!("Installing test dependencies...");
let output = Command::new(npm).arg("install").output()?;
bail_on_err(&output, "Failed to install test dependencies")?;
}
let child = Command::new(npm).arg("test").spawn()?;
let output = child.wait_with_output()?;
bail_on_err(&output, &format!("Failed to run `{npm} test`"))?;
// Display test results
let output = String::from_utf8_lossy(&output.stdout);
for line in output.lines() {
println!("{line}");
}
Ok(())
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/benchmark.rs | crates/xtask/src/benchmark.rs | use anyhow::Result;
use crate::{bail_on_err, Benchmark};
pub fn run(args: &Benchmark) -> Result<()> {
if let Some(ref example) = args.example_file_name {
std::env::set_var("TREE_SITTER_BENCHMARK_EXAMPLE_FILTER", example);
}
if let Some(ref language) = args.language {
std::env::set_var("TREE_SITTER_BENCHMARK_LANGUAGE_FILTER", language);
}
if args.repetition_count != 5 {
std::env::set_var(
"TREE_SITTER_BENCHMARK_REPETITION_COUNT",
args.repetition_count.to_string(),
);
}
if args.debug {
let output = std::process::Command::new("cargo")
.arg("bench")
.arg("benchmark")
.arg("-p")
.arg("tree-sitter-cli")
.arg("--no-run")
.arg("--message-format=json")
.spawn()?
.wait_with_output()?;
bail_on_err(&output, "Failed to run `cargo bench`")?;
let json_output = serde_json::from_slice::<serde_json::Value>(&output.stdout)?;
let test_binary = json_output
.as_array()
.ok_or_else(|| anyhow::anyhow!("Invalid JSON output"))?
.iter()
.find_map(|message| {
if message
.get("target")
.and_then(|target| target.get("name"))
.and_then(|name| name.as_str())
.is_some_and(|name| name == "benchmark")
&& message
.get("executable")
.and_then(|executable| executable.as_str())
.is_some()
{
message
.get("executable")
.and_then(|executable| executable.as_str())
} else {
None
}
})
.ok_or_else(|| anyhow::anyhow!("Failed to find benchmark executable"))?;
println!("{test_binary}");
} else {
let status = std::process::Command::new("cargo")
.arg("bench")
.arg("benchmark")
.arg("-p")
.arg("tree-sitter-cli")
.status()?;
if !status.success() {
anyhow::bail!("Failed to run `cargo bench`");
}
}
Ok(())
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/build_wasm.rs | crates/xtask/src/build_wasm.rs | use std::{
collections::HashSet,
ffi::{OsStr, OsString},
fmt::Write,
fs,
path::{Path, PathBuf},
process::Command,
time::Duration,
};
use anyhow::{anyhow, Result};
use etcetera::BaseStrategy as _;
use indoc::indoc;
use notify::{
event::{AccessKind, AccessMode},
EventKind, RecursiveMode,
};
use notify_debouncer_full::new_debouncer;
use tree_sitter_loader::{IoError, LoaderError, WasiSDKClangError};
use crate::{
bail_on_err, embed_sources::embed_sources_in_map, watch_wasm, BuildWasm, EMSCRIPTEN_TAG,
};
#[derive(PartialEq, Eq)]
enum EmccSource {
Native,
Docker,
Podman,
}
const EXPORTED_RUNTIME_METHODS: [&str; 20] = [
"AsciiToString",
"stringToUTF8",
"UTF8ToString",
"lengthBytesUTF8",
"stringToUTF16",
"loadWebAssemblyModule",
"getValue",
"setValue",
"HEAPF32",
"HEAPF64",
"HEAP_DATA_VIEW",
"HEAP8",
"HEAPU8",
"HEAP16",
"HEAPU16",
"HEAP32",
"HEAPU32",
"HEAP64",
"HEAPU64",
"LE_HEAP_STORE_I64",
];
const WASI_SDK_VERSION: &str = include_str!("../../loader/wasi-sdk-version").trim_ascii();
pub fn run_wasm(args: &BuildWasm) -> Result<()> {
let mut emscripten_flags = if args.debug {
vec!["-O0", "--minify", "0"]
} else {
vec!["-O3", "--minify", "0"]
};
if args.debug {
emscripten_flags.extend(["-s", "ASSERTIONS=1", "-s", "SAFE_HEAP=1", "-g"]);
}
if args.verbose {
emscripten_flags.extend(["-s", "VERBOSE=1", "-v"]);
}
let emcc_name = if cfg!(windows) { "emcc.bat" } else { "emcc" };
// Order of preference: emscripten > docker > podman > error
let source = if !args.docker && Command::new(emcc_name).output().is_ok() {
EmccSource::Native
} else if Command::new("docker")
.output()
.is_ok_and(|out| out.status.success())
{
EmccSource::Docker
} else if Command::new("podman")
.arg("--version")
.output()
.is_ok_and(|out| out.status.success())
{
EmccSource::Podman
} else {
return Err(anyhow!(
"You must have either emcc, docker, or podman on your PATH to run this command"
));
};
let mut command = match source {
EmccSource::Native => Command::new(emcc_name),
EmccSource::Docker | EmccSource::Podman => {
let mut command = match source {
EmccSource::Docker => Command::new("docker"),
EmccSource::Podman => Command::new("podman"),
_ => unreachable!(),
};
command.args(["run", "--rm"]);
// Mount the root directory as a volume, which is the repo root
let mut volume_string = OsString::from(std::env::current_dir().unwrap());
volume_string.push(":/src:Z");
command.args([OsStr::new("--volume"), &volume_string]);
// In case `docker` is an alias to `podman`, ensure that podman
// mounts the current directory as writable by the container
// user which has the same uid as the host user. Setting the
// podman-specific variable is more reliable than attempting to
// detect whether `docker` is an alias for `podman`.
// see https://docs.podman.io/en/latest/markdown/podman-run.1.html#userns-mode
command.env("PODMAN_USERNS", "keep-id");
// Get the current user id so that files created in the docker container will have
// the same owner.
#[cfg(unix)]
{
#[link(name = "c")]
extern "C" {
fn getuid() -> u32;
}
// don't need to set user for podman since PODMAN_USERNS=keep-id is already set
if source == EmccSource::Docker {
let user_id = unsafe { getuid() };
command.args(["--user", &user_id.to_string()]);
}
};
// Run `emcc` in a container using the `emscripten-slim` image
command.args([EMSCRIPTEN_TAG, "emcc"]);
command
}
};
fs::create_dir_all("target/scratch").unwrap();
let exported_functions = format!(
"{}{}",
fs::read_to_string("lib/src/wasm/stdlib-symbols.txt")?,
fs::read_to_string("lib/binding_web/lib/exports.txt")?
)
.replace('"', "")
.lines()
.fold(String::new(), |mut output, line| {
let _ = write!(output, "_{line}");
output
})
.trim_end_matches(',')
.to_string();
let exported_functions = format!("EXPORTED_FUNCTIONS={exported_functions}");
let exported_runtime_methods = format!(
"EXPORTED_RUNTIME_METHODS={}",
EXPORTED_RUNTIME_METHODS.join(",")
);
// Clean up old files from prior runs
for file in [
"web-tree-sitter.mjs",
"web-tree-sitter.cjs",
"web-tree-sitter.wasm",
"web-tree-sitter.wasm.map",
] {
fs::remove_file(PathBuf::from("lib/binding_web/lib").join(file)).ok();
}
if !args.cjs {
emscripten_flags.extend(["-s", "EXPORT_ES6=1"]);
}
macro_rules! binding_file {
($ext:literal) => {
concat!("lib/binding_web/lib/web-tree-sitter", $ext)
};
}
#[rustfmt::skip]
emscripten_flags.extend([
"-gsource-map=inline",
"-fno-exceptions",
"-std=c11",
"-s", "WASM=1",
"-s", "MODULARIZE=1",
"-s", "INITIAL_MEMORY=33554432",
"-s", "ALLOW_MEMORY_GROWTH=1",
"-s", "SUPPORT_BIG_ENDIAN=1",
"-s", "WASM_BIGINT=1",
"-s", "MAIN_MODULE=2",
"-s", "FILESYSTEM=0",
"-s", "NODEJS_CATCH_EXIT=0",
"-s", "NODEJS_CATCH_REJECTION=0",
"-s", &exported_functions,
"-s", &exported_runtime_methods,
"-D", "fprintf(...)=",
"-D", "printf(...)=",
"-D", "NDEBUG=",
"-D", "_POSIX_C_SOURCE=200112L",
"-D", "_DEFAULT_SOURCE=",
"-D", "_DARWIN_C_SOURCE=",
"-I", "lib/src",
"-I", "lib/include",
"--js-library", "lib/binding_web/lib/imports.js",
"--pre-js", "lib/binding_web/lib/prefix.js",
"-o", if args.cjs { binding_file!(".cjs") } else { binding_file!(".mjs") },
"lib/src/lib.c",
"lib/binding_web/lib/tree-sitter.c",
]);
if args.emit_tsd {
emscripten_flags.extend(["--emit-tsd", "web-tree-sitter.d.ts"]);
}
let command = command.args(&emscripten_flags);
if args.watch {
watch_wasm!(|| build_wasm(command, args.emit_tsd));
} else {
build_wasm(command, args.emit_tsd)?;
}
Ok(())
}
fn build_wasm(cmd: &mut Command, edit_tsd: bool) -> Result<()> {
bail_on_err(
&cmd.spawn()?.wait_with_output()?,
"Failed to compile the Tree-sitter Wasm library",
)?;
if edit_tsd {
let file = "lib/binding_web/lib/web-tree-sitter.d.ts";
let content = fs::read_to_string(file)?
.replace("Automatically generated", "Automatically @generated")
.replace(
"AsciiToString(ptr: any): string",
"AsciiToString(ptr: number): string",
)
.replace(
"stringToUTF8(str: any, outPtr: any, maxBytesToWrite: any): any",
"stringToUTF8(str: string, outPtr: number, maxBytesToWrite: number): number",
)
.replace(
"UTF8ToString(ptr: number, maxBytesToRead?: number | undefined): string",
"UTF8ToString(ptr: number, maxBytesToRead?: number): string",
)
.replace(
"lengthBytesUTF8(str: any): number",
"lengthBytesUTF8(str: string): number",
)
.replace(
"stringToUTF16(str: any, outPtr: any, maxBytesToWrite: any): number",
"stringToUTF16(str: string, outPtr: number, maxBytesToWrite: number): number",
)
.replace(
concat!(
"loadWebAssemblyModule(binary: any, flags: any, libName?: string | ",
"undefined, localScope?: any | undefined, handle?: number | undefined): any"
),
concat!(
"loadWebAssemblyModule(binary: Uint8Array | WebAssembly.Module, flags: Record<string, boolean>,",
" libName?: string, localScope?: Record<string, unknown>, handle?: number):",
" Promise<Record<string, () => number>>"
),
)
.replace(
"getValue(ptr: number, type?: string): any",
"getValue(ptr: number, type?: string): number",
)
.replace("HEAPF32: any", "HEAPF32: Float32Array")
.replace("HEAPF64: any", "HEAPF64: Float64Array")
.replace("HEAP_DATA_VIEW: any", "HEAP_DATA_VIEW: DataView")
.replace("HEAP8: any", "HEAP8: Int8Array")
.replace("HEAPU8: any", "HEAPU8: Uint8Array")
.replace("HEAP16: any", "HEAP16: Int16Array")
.replace("HEAPU16: any", "HEAPU16: Uint16Array")
.replace("HEAP32: any", "HEAP32: Int32Array")
.replace("HEAPU32: any", "HEAPU32: Uint32Array")
.replace("HEAP64: any", "HEAP64: BigInt64Array")
.replace("HEAPU64: any", "HEAPU64: BigUint64Array")
.replace("BigInt;", "bigint;")
.replace("BigInt)", "bigint)")
.replace(
"WasmModule & typeof RuntimeExports;",
indoc! {"
WasmModule & typeof RuntimeExports & {
currentParseCallback: ((index: number, position: {row: number, column: number}) => string | undefined) | null;
currentLogCallback: ((message: string, isLex: boolean) => void) | null;
currentProgressCallback: ((state: {currentOffset: number, hasError: boolean}) => void) | null;
currentQueryProgressCallback: ((state: {currentOffset: number}) => void) | null;
};
"},
)
.replace(
"MainModuleFactory (options?: unknown): Promise<MainModule>",
"MainModuleFactory(options?: Partial<EmscriptenModule>): Promise<MainModule>",
);
fs::write(file, content)?;
}
// Post-process the source map to embed source content for optimized builds
let map_path = Path::new("lib")
.join("binding_web")
.join("lib")
.join("web-tree-sitter.wasm.map");
if map_path.exists() {
if let Err(e) = embed_sources_in_map(&map_path) {
eprintln!("Warning: Failed to embed sources in source map: {e}");
}
}
Ok(())
}
/// This ensures that the wasi-sdk is available, downloading and extracting it if necessary,
/// and returns the path to the `clang` executable.
///
/// If `TREE_SITTER_WASI_SDK_PATH` is set, it will use that path to look for the clang executable.
///
/// Note that this is just a minimially modified version of
/// `tree_sitter_loader::ensure_wasi_sdk_exists`. In the loader, this functionality is implemented
/// as a private method of `Loader`. Rather than add this to the public API, we just
/// re-implement it. Any fixes and/or modifications made to the loader's copy should be reflected
/// here.
pub fn ensure_wasi_sdk_exists() -> Result<PathBuf> {
let possible_executables = if cfg!(windows) {
vec![
"clang.exe",
"wasm32-unknown-wasi-clang.exe",
"wasm32-wasi-clang.exe",
]
} else {
vec!["clang", "wasm32-unknown-wasi-clang", "wasm32-wasi-clang"]
};
if let Ok(wasi_sdk_path) = std::env::var("TREE_SITTER_WASI_SDK_PATH") {
let wasi_sdk_dir = PathBuf::from(wasi_sdk_path);
for exe in &possible_executables {
let clang_exe = wasi_sdk_dir.join("bin").join(exe);
if clang_exe.exists() {
return Ok(clang_exe);
}
}
Err(LoaderError::WasiSDKClang(WasiSDKClangError {
wasi_sdk_dir: wasi_sdk_dir.to_string_lossy().to_string(),
possible_executables: possible_executables.clone(),
download: false,
}))?;
}
let cache_dir = etcetera::choose_base_strategy()?
.cache_dir()
.join("tree-sitter");
fs::create_dir_all(&cache_dir).map_err(|error| {
LoaderError::IO(IoError {
error,
path: Some(cache_dir.to_string_lossy().to_string()),
})
})?;
let wasi_sdk_dir = cache_dir.join("wasi-sdk");
for exe in &possible_executables {
let clang_exe = wasi_sdk_dir.join("bin").join(exe);
if clang_exe.exists() {
return Ok(clang_exe);
}
}
fs::create_dir_all(&wasi_sdk_dir).map_err(|error| {
LoaderError::IO(IoError {
error,
path: Some(wasi_sdk_dir.to_string_lossy().to_string()),
})
})?;
let arch_os = if cfg!(target_os = "macos") {
if cfg!(target_arch = "aarch64") {
"arm64-macos"
} else {
"x86_64-macos"
}
} else if cfg!(target_os = "windows") {
if cfg!(target_arch = "aarch64") {
"arm64-windows"
} else {
"x86_64-windows"
}
} else if cfg!(target_os = "linux") {
if cfg!(target_arch = "aarch64") {
"arm64-linux"
} else {
"x86_64-linux"
}
} else {
Err(LoaderError::WasiSDKPlatform)?
};
let sdk_filename = format!("wasi-sdk-{WASI_SDK_VERSION}-{arch_os}.tar.gz");
let wasi_sdk_major_version = WASI_SDK_VERSION
.trim_end_matches(char::is_numeric) // trim minor version...
.trim_end_matches('.'); // ...and '.' separator
let sdk_url = format!(
"https://github.com/WebAssembly/wasi-sdk/releases/download/wasi-sdk-{wasi_sdk_major_version}/{sdk_filename}",
);
eprintln!("Downloading wasi-sdk from {sdk_url}...");
let temp_tar_path = cache_dir.join(sdk_filename);
let status = Command::new("curl")
.arg("-f")
.arg("-L")
.arg("-o")
.arg(&temp_tar_path)
.arg(&sdk_url)
.status()
.map_err(|e| LoaderError::Curl(sdk_url.clone(), e))?;
if !status.success() {
Err(LoaderError::WasiSDKDownload(sdk_url))?;
}
eprintln!("Extracting wasi-sdk to {}...", wasi_sdk_dir.display());
extract_tar_gz_with_strip(&temp_tar_path, &wasi_sdk_dir)?;
fs::remove_file(temp_tar_path).ok();
for exe in &possible_executables {
let clang_exe = wasi_sdk_dir.join("bin").join(exe);
if clang_exe.exists() {
return Ok(clang_exe);
}
}
Err(LoaderError::WasiSDKClang(WasiSDKClangError {
wasi_sdk_dir: wasi_sdk_dir.to_string_lossy().to_string(),
possible_executables,
download: true,
}))?
}
/// Extracts a tar.gz archive with `tar`, stripping the first path component.
fn extract_tar_gz_with_strip(archive_path: &Path, destination: &Path) -> Result<()> {
let status = Command::new("tar")
.arg("-xzf")
.arg(archive_path)
.arg("--strip-components=1")
.arg("-C")
.arg(destination)
.status()
.map_err(|e| LoaderError::Tar(archive_path.to_string_lossy().to_string(), e))?;
if !status.success() {
Err(LoaderError::Extraction(
archive_path.to_string_lossy().to_string(),
destination.to_string_lossy().to_string(),
))?;
}
Ok(())
}
pub fn run_wasm_stdlib() -> Result<()> {
let export_flags = include_str!("../../../lib/src/wasm/stdlib-symbols.txt")
.lines()
.map(|line| format!("-Wl,--export={}", &line[1..line.len() - 2]))
.collect::<Vec<String>>();
let clang_exe = ensure_wasi_sdk_exists()?;
let output = Command::new(&clang_exe)
.args([
"-o",
"stdlib.wasm",
"-Os",
"-fPIC",
"-DTREE_SITTER_FEATURE_WASM",
"-Wl,--no-entry",
"-Wl,--stack-first",
"-Wl,-z",
"-Wl,stack-size=65536",
"-Wl,--import-undefined",
"-Wl,--import-memory",
"-Wl,--import-table",
"-Wl,--strip-debug",
"-Wl,--export=__wasm_call_ctors",
"-Wl,--export=__stack_pointer",
"-Wl,--export=reset_heap",
])
.args(&export_flags)
.arg("crates/language/wasm/src/stdlib.c")
.output()?;
bail_on_err(&output, "Failed to compile the Tree-sitter Wasm stdlib")?;
let xxd = Command::new("xxd")
.args(["-C", "-i", "stdlib.wasm"])
.output()?;
bail_on_err(
&xxd,
"Failed to run xxd on the compiled Tree-sitter Wasm stdlib",
)?;
fs::write("lib/src/wasm/wasm-stdlib.h", xxd.stdout)?;
fs::rename("stdlib.wasm", "target/stdlib.wasm")?;
Ok(())
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/embed_sources.rs | crates/xtask/src/embed_sources.rs | use anyhow::Result;
use std::fs;
use std::path::Path;
/// Restores sourcesContent if it was stripped by Binaryen.
///
/// This is a workaround for Binaryen where `wasm-opt -O2` and higher
/// optimization levels strip the `sourcesContent` field from source maps,
/// even when the source map was generated with `--sources` flag.
///
/// This is fixed upstream in Binaryen as of Apr 9, 2025, but there hasn't been a release with the fix yet.
/// See: <https://github.com/WebAssembly/binaryen/issues/6805>
///
/// This reads the original source files and embeds them in the
/// source map's `sourcesContent` field, making debugging possible even
/// with optimized builds.
///
/// TODO: Once Binaryen releases a version with the fix, and emscripten updates to that
/// version, and we update our emscripten version, this function can be removed.
pub fn embed_sources_in_map(map_path: &Path) -> Result<()> {
let map_content = fs::read_to_string(map_path)?;
let mut map: serde_json::Value = serde_json::from_str(&map_content)?;
if let Some(sources_content) = map.get("sourcesContent") {
if let Some(arr) = sources_content.as_array() {
if !arr.is_empty() && arr.iter().any(|v| !v.is_null()) {
return Ok(());
}
}
}
let sources = map["sources"]
.as_array()
.ok_or_else(|| anyhow::anyhow!("No sources array in source map"))?;
let map_dir = map_path.parent().unwrap_or(Path::new("."));
let mut sources_content = Vec::new();
for source in sources {
let source_path = source.as_str().unwrap_or("");
let full_path = map_dir.join(source_path);
let content = if full_path.exists() {
match fs::read_to_string(&full_path) {
Ok(content) => serde_json::Value::String(content),
Err(_) => serde_json::Value::Null,
}
} else {
serde_json::Value::Null
};
sources_content.push(content);
}
map["sourcesContent"] = serde_json::Value::Array(sources_content);
let output = serde_json::to_string(&map)?;
fs::write(map_path, output)?;
Ok(())
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/check_wasm_exports.rs | crates/xtask/src/check_wasm_exports.rs | use std::{
collections::HashSet,
env,
io::BufRead,
path::PathBuf,
process::{Command, Stdio},
time::Duration,
};
use anyhow::{anyhow, Result};
use notify::{
event::{AccessKind, AccessMode},
EventKind, RecursiveMode,
};
use notify_debouncer_full::new_debouncer;
use crate::{bail_on_err, watch_wasm, CheckWasmExports};
const EXCLUDES: [&str; 25] = [
// Unneeded because the JS side has its own way of implementing it
"ts_node_child_by_field_name",
"ts_node_edit",
// Precomputed and stored in the JS side
"ts_node_type",
"ts_node_grammar_type",
"ts_node_eq",
"ts_tree_cursor_current_field_name",
"ts_lookahead_iterator_current_symbol_name",
// Not used in Wasm
"ts_init",
"ts_set_allocator",
"ts_parser_print_dot_graphs",
"ts_tree_print_dot_graph",
"ts_parser_set_wasm_store",
"ts_parser_take_wasm_store",
"ts_parser_language",
"ts_node_language",
"ts_tree_language",
"ts_lookahead_iterator_language",
"ts_parser_logger",
"ts_parser_parse_string",
"ts_parser_parse_string_encoding",
// Query cursor is not managed by user in web bindings
"ts_query_cursor_delete",
"ts_query_cursor_match_limit",
"ts_query_cursor_remove_match",
"ts_query_cursor_set_point_range",
"ts_query_cursor_set_containing_byte_range",
];
pub fn run(args: &CheckWasmExports) -> Result<()> {
if args.watch {
watch_wasm!(check_wasm_exports);
} else {
check_wasm_exports()?;
}
Ok(())
}
fn check_wasm_exports() -> Result<()> {
let mut wasm_exports = std::fs::read_to_string("lib/binding_web/lib/exports.txt")?
.lines()
.map(|s| s.replace("_wasm", "").replace("byte", "index"))
// remove leading and trailing quotes, trailing comma
.map(|s| s[1..s.len() - 2].to_string())
.collect::<HashSet<_>>();
// Run wasm-objdump to see symbols used internally in binding.c but not exposed in any way.
let wasm_objdump = Command::new("wasm-objdump")
.args([
"--details",
"lib/binding_web/debug/web-tree-sitter.wasm",
"--section",
"Name",
])
.output()
.expect("Failed to run wasm-objdump");
bail_on_err(&wasm_objdump, "Failed to run wasm-objdump")?;
wasm_exports.extend(
wasm_objdump
.stdout
.lines()
.map_while(Result::ok)
.skip_while(|line| !line.contains("- func"))
.filter_map(|line| {
if line.contains("func") {
if let Some(function) = line.split_whitespace().nth(2).map(String::from) {
let trimmed = function.trim_start_matches('<').trim_end_matches('>');
if trimmed.starts_with("ts") && !trimmed.contains("__") {
return Some(trimmed.to_string());
}
}
}
None
}),
);
let nm_cmd = env::var("NM").unwrap_or_else(|_| "nm".to_owned());
let nm_child = Command::new(nm_cmd)
.arg("-W")
.arg("-U")
.arg("libtree-sitter.so")
.stdout(Stdio::piped())
.output()
.expect("Failed to run nm");
bail_on_err(&nm_child, "Failed to run nm")?;
let export_reader = nm_child
.stdout
.lines()
.map_while(Result::ok)
.filter(|line| line.contains(" T "));
let exports = export_reader
.filter_map(|line| line.split_whitespace().nth(2).map(String::from))
.filter(|symbol| !EXCLUDES.contains(&symbol.as_str()))
.collect::<HashSet<_>>();
let mut missing = exports
.iter()
.filter(|&symbol| !wasm_exports.contains(symbol))
.map(String::as_str)
.collect::<Vec<_>>();
missing.sort_unstable();
if !missing.is_empty() {
Err(anyhow!(format!(
"Unmatched Wasm exports:\n{}",
missing.join("\n")
)))?;
}
Ok(())
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/fetch.rs | crates/xtask/src/fetch.rs | use crate::{bail_on_err, root_dir, FetchFixtures, EMSCRIPTEN_VERSION};
use anyhow::Result;
use std::{fs, process::Command};
pub fn run_fixtures(args: &FetchFixtures) -> Result<()> {
let fixtures_dir = root_dir().join("test").join("fixtures");
let grammars_dir = fixtures_dir.join("grammars");
let fixtures_path = fixtures_dir.join("fixtures.json");
// grammar name, tag
let mut fixtures: Vec<(String, String)> =
serde_json::from_str(&fs::read_to_string(&fixtures_path)?)?;
for (grammar, tag) in &mut fixtures {
let grammar_dir = grammars_dir.join(&grammar);
let grammar_url = format!("https://github.com/tree-sitter/tree-sitter-{grammar}");
println!("Fetching the {grammar} grammar...");
if !grammar_dir.exists() {
let mut command = Command::new("git");
command.args([
"clone",
"--depth",
"1",
"--branch",
tag,
&grammar_url,
&grammar_dir.to_string_lossy(),
]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
&format!("Failed to clone the {grammar} grammar"),
)?;
} else {
let mut describe_command = Command::new("git");
describe_command.current_dir(&grammar_dir).args([
"describe",
"--tags",
"--exact-match",
"HEAD",
]);
let output = describe_command.output()?;
let current_tag = String::from_utf8_lossy(&output.stdout);
let current_tag = current_tag.trim();
if current_tag != tag {
println!("Updating {grammar} grammar from {current_tag} to {tag}...");
let mut fetch_command = Command::new("git");
fetch_command.current_dir(&grammar_dir).args([
"fetch",
"origin",
&format!("refs/tags/{tag}:refs/tags/{tag}"),
]);
bail_on_err(
&fetch_command.spawn()?.wait_with_output()?,
&format!("Failed to fetch tag {tag} for {grammar} grammar"),
)?;
let mut reset_command = Command::new("git");
reset_command
.current_dir(&grammar_dir)
.args(["reset", "--hard", "HEAD"]);
bail_on_err(
&reset_command.spawn()?.wait_with_output()?,
&format!("Failed to reset {grammar} grammar working tree"),
)?;
let mut checkout_command = Command::new("git");
checkout_command
.current_dir(&grammar_dir)
.args(["checkout", tag]);
bail_on_err(
&checkout_command.spawn()?.wait_with_output()?,
&format!("Failed to checkout tag {tag} for {grammar} grammar"),
)?;
} else {
println!("{grammar} grammar is already at tag {tag}");
}
}
}
if args.update {
println!("Updating the fixtures lock file");
fs::write(
&fixtures_path,
// format the JSON without extra newlines
serde_json::to_string(&fixtures)?
.replace("[[", "[\n [")
.replace("],", "],\n ")
.replace("]]", "]\n]"),
)?;
}
Ok(())
}
pub fn run_emscripten() -> Result<()> {
let emscripten_dir = root_dir().join("target").join("emsdk");
if emscripten_dir.exists() {
println!("Emscripten SDK already exists");
return Ok(());
}
println!("Cloning the Emscripten SDK...");
let mut command = Command::new("git");
command.args([
"clone",
"https://github.com/emscripten-core/emsdk.git",
&emscripten_dir.to_string_lossy(),
]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to clone the Emscripten SDK",
)?;
std::env::set_current_dir(&emscripten_dir)?;
let emsdk = if cfg!(windows) {
"emsdk.bat"
} else {
"./emsdk"
};
let mut command = Command::new(emsdk);
command.args(["install", EMSCRIPTEN_VERSION]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to install Emscripten",
)?;
let mut command = Command::new(emsdk);
command.args(["activate", EMSCRIPTEN_VERSION]);
bail_on_err(
&command.spawn()?.wait_with_output()?,
"Failed to activate Emscripten",
)
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/main.rs | crates/xtask/src/main.rs | mod benchmark;
mod build_wasm;
mod bump;
mod check_wasm_exports;
mod clippy;
mod embed_sources;
mod fetch;
mod generate;
mod test;
mod test_schema;
mod upgrade_wasmtime;
use std::{path::Path, process::Command};
use anstyle::{AnsiColor, Color, Style};
use anyhow::Result;
use clap::{crate_authors, Args, FromArgMatches as _, Subcommand};
use semver::Version;
#[derive(Subcommand)]
#[command(about="Run various tasks", author=crate_authors!("\n"), styles=get_styles())]
enum Commands {
/// Runs `cargo benchmark` with some optional environment variables set.
Benchmark(Benchmark),
/// Compile the Tree-sitter Wasm library. This will create two files in the
/// `lib/binding_web` directory: `web-tree-sitter.js` and `web-tree-sitter.wasm`.
BuildWasm(BuildWasm),
/// Compile the Tree-sitter Wasm standard library.
BuildWasmStdlib,
/// Bumps the version of the workspace.
BumpVersion(BumpVersion),
/// Checks that Wasm exports are synced.
CheckWasmExports(CheckWasmExports),
/// Runs `cargo clippy`.
Clippy(Clippy),
/// Fetches emscripten.
FetchEmscripten,
/// Fetches the fixtures for testing tree-sitter.
FetchFixtures(FetchFixtures),
/// Generate the Rust bindings from the C library.
GenerateBindings,
/// Generates the fixtures for testing tree-sitter.
GenerateFixtures(GenerateFixtures),
/// Generates the JSON schema for the test runner summary.
GenerateTestSchema,
/// Generate the list of exports from Tree-sitter Wasm files.
GenerateWasmExports,
/// Run the test suite
Test(Test),
/// Run the Wasm test suite
TestWasm,
/// Upgrade the wasmtime dependency.
UpgradeWasmtime(UpgradeWasmtime),
}
#[derive(Args)]
struct Benchmark {
/// The language to run the benchmarks for.
#[arg(long, short)]
language: Option<String>,
/// The example file to run the benchmarks for.
#[arg(long, short)]
example_file_name: Option<String>,
/// The number of times to parse each sample (default is 5).
#[arg(long, short, default_value = "5")]
repetition_count: u32,
/// Whether to run the benchmarks in debug mode.
#[arg(long, short = 'g')]
debug: bool,
}
#[derive(Args)]
struct BuildWasm {
/// Compile the library more quickly, with fewer optimizations
/// and more runtime assertions.
#[arg(long, short = '0')]
debug: bool,
/// Run emscripten using docker, even if \`emcc\` is installed.
/// By default, \`emcc\` will be run directly when available.
#[arg(long, short)]
docker: bool,
/// Run emscripten with verbose output.
#[arg(long, short)]
verbose: bool,
/// Rebuild when relevant files are changed.
#[arg(long, short)]
watch: bool,
/// Emit TypeScript type definitions for the generated bindings,
/// requires `tsc` to be available.
#[arg(long, short)]
emit_tsd: bool,
/// Generate `CommonJS` modules instead of ES modules.
#[arg(long, short, env = "CJS")]
cjs: bool,
}
#[derive(Args)]
struct BumpVersion {
/// The version to bump to.
#[arg(index = 1, required = true)]
version: Version,
}
#[derive(Args)]
struct CheckWasmExports {
/// Recheck when relevant files are changed.
#[arg(long, short)]
watch: bool,
}
#[derive(Args)]
struct Clippy {
/// Automatically apply lint suggestions (`clippy --fix`).
#[arg(long, short)]
fix: bool,
/// The package to run Clippy against (`cargo -p <PACKAGE> clippy`).
#[arg(long, short)]
package: Option<String>,
}
#[derive(Args)]
struct FetchFixtures {
/// Update all fixtures to the latest tag
#[arg(long, short)]
update: bool,
}
#[derive(Args)]
struct GenerateFixtures {
/// Generates the parser to Wasm
#[arg(long, short)]
wasm: bool,
}
#[derive(Args)]
struct Test {
/// Compile C code with the Clang address sanitizer.
#[arg(long, short)]
address_sanitizer: bool,
/// Run only the corpus tests for the given language.
#[arg(long, short)]
language: Option<String>,
/// Run only the corpus tests whose name contain the given string.
#[arg(long, short)]
example: Option<String>,
/// Run the given number of iterations of randomized tests (default 10).
#[arg(long, short)]
iterations: Option<u32>,
/// Set the seed used to control random behavior.
#[arg(long, short)]
seed: Option<usize>,
/// Print parsing log to stderr.
#[arg(long, short)]
debug: bool,
/// Generate an SVG graph of parsing logs.
#[arg(long, short = 'D')]
debug_graph: bool,
/// Run the tests with a debugger.
#[arg(short)]
g: bool,
#[arg(trailing_var_arg = true)]
args: Vec<String>,
/// Don't capture the output
#[arg(long)]
nocapture: bool,
/// Enable the Wasm tests.
#[arg(long, short)]
wasm: bool,
}
#[derive(Args)]
struct UpgradeWasmtime {
/// The version to upgrade to.
#[arg(long, short)]
version: Version,
}
const BUILD_VERSION: &str = env!("CARGO_PKG_VERSION");
const BUILD_SHA: Option<&str> = option_env!("BUILD_SHA");
const EMSCRIPTEN_VERSION: &str = include_str!("../../loader/emscripten-version").trim_ascii();
const EMSCRIPTEN_TAG: &str = concat!(
"docker.io/emscripten/emsdk:",
include_str!("../../loader/emscripten-version")
)
.trim_ascii();
fn main() {
let result = run();
if let Err(err) = &result {
// Ignore BrokenPipe errors
if let Some(error) = err.downcast_ref::<std::io::Error>() {
if error.kind() == std::io::ErrorKind::BrokenPipe {
return;
}
}
if !err.to_string().is_empty() {
eprintln!("{err:?}");
}
std::process::exit(1);
}
}
fn run() -> Result<()> {
let version = BUILD_SHA.map_or_else(
|| BUILD_VERSION.to_string(),
|build_sha| format!("{BUILD_VERSION} ({build_sha})"),
);
let version: &'static str = Box::leak(version.into_boxed_str());
let cli = clap::Command::new("xtask")
.help_template(
"\
{before-help}{name} {version}
{author-with-newline}{about-with-newline}
{usage-heading} {usage}
{all-args}{after-help}
",
)
.version(version)
.subcommand_required(true)
.arg_required_else_help(true)
.disable_help_subcommand(true)
.disable_colored_help(false);
let command = Commands::from_arg_matches(&Commands::augment_subcommands(cli).get_matches())?;
match command {
Commands::Benchmark(benchmark_options) => benchmark::run(&benchmark_options)?,
Commands::BuildWasm(build_wasm_options) => build_wasm::run_wasm(&build_wasm_options)?,
Commands::BuildWasmStdlib => build_wasm::run_wasm_stdlib()?,
Commands::BumpVersion(bump_options) => bump::run(bump_options)?,
Commands::CheckWasmExports(check_options) => check_wasm_exports::run(&check_options)?,
Commands::Clippy(clippy_options) => clippy::run(&clippy_options)?,
Commands::FetchEmscripten => fetch::run_emscripten()?,
Commands::FetchFixtures(fetch_fixture_options) => {
fetch::run_fixtures(&fetch_fixture_options)?;
}
Commands::GenerateBindings => generate::run_bindings()?,
Commands::GenerateFixtures(generate_fixtures_options) => {
generate::run_fixtures(&generate_fixtures_options)?;
}
Commands::GenerateTestSchema => test_schema::run_test_schema()?,
Commands::GenerateWasmExports => generate::run_wasm_exports()?,
Commands::Test(test_options) => test::run(&test_options)?,
Commands::TestWasm => test::run_wasm()?,
Commands::UpgradeWasmtime(upgrade_wasmtime_options) => {
upgrade_wasmtime::run(&upgrade_wasmtime_options)?;
}
}
Ok(())
}
fn root_dir() -> &'static Path {
Path::new(env!("CARGO_MANIFEST_DIR"))
.parent()
.unwrap()
.parent()
.unwrap()
}
fn bail_on_err(output: &std::process::Output, prefix: &str) -> Result<()> {
if !output.status.success() {
let stderr = String::from_utf8_lossy(&output.stderr);
anyhow::bail!("{prefix}:\n{stderr}");
}
Ok(())
}
#[must_use]
const fn get_styles() -> clap::builder::Styles {
clap::builder::Styles::styled()
.usage(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Yellow))),
)
.header(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Yellow))),
)
.literal(Style::new().fg_color(Some(Color::Ansi(AnsiColor::Green))))
.invalid(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Red))),
)
.error(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Red))),
)
.valid(
Style::new()
.bold()
.fg_color(Some(Color::Ansi(AnsiColor::Green))),
)
.placeholder(Style::new().fg_color(Some(Color::Ansi(AnsiColor::White))))
}
pub fn create_commit(msg: &str, paths: &[&str]) -> Result<String> {
for path in paths {
let output = Command::new("git").args(["add", path]).output()?;
if !output.status.success() {
anyhow::bail!(
"Failed to add {path}: {}",
String::from_utf8_lossy(&output.stderr)
);
}
}
let output = Command::new("git").args(["commit", "-m", msg]).output()?;
if !output.status.success() {
anyhow::bail!(
"Failed to commit: {}",
String::from_utf8_lossy(&output.stderr)
);
}
let output = Command::new("git").args(["rev-parse", "HEAD"]).output()?;
if !output.status.success() {
anyhow::bail!(
"Failed to get commit SHA: {}",
String::from_utf8_lossy(&output.stderr)
);
}
Ok(String::from_utf8(output.stdout)?.trim().to_string())
}
#[macro_export]
macro_rules! watch_wasm {
($watch_fn:expr) => {
if let Err(e) = $watch_fn() {
eprintln!("{e}");
} else {
println!("Build succeeded");
}
let watch_files = [
"lib/tree-sitter.c",
"lib/exports.txt",
"lib/imports.js",
"lib/prefix.js",
]
.iter()
.map(PathBuf::from)
.collect::<HashSet<PathBuf>>();
let (tx, rx) = std::sync::mpsc::channel();
let mut debouncer = new_debouncer(Duration::from_secs(1), None, tx)?;
debouncer.watch("lib/binding_web", RecursiveMode::NonRecursive)?;
for result in rx {
match result {
Ok(events) => {
for event in events {
if event.kind == EventKind::Access(AccessKind::Close(AccessMode::Write))
&& event
.paths
.iter()
.filter_map(|p| p.file_name())
.any(|p| watch_files.contains(&PathBuf::from(p)))
{
if let Err(e) = $watch_fn() {
eprintln!("{e}");
} else {
println!("Build succeeded");
}
}
}
}
Err(errors) => {
return Err(anyhow!(
"{}",
errors
.into_iter()
.map(|e| e.to_string())
.collect::<Vec<_>>()
.join("\n")
));
}
}
}
};
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
tree-sitter/tree-sitter | https://github.com/tree-sitter/tree-sitter/blob/dd60d5cff079dbae8db798ce7272879dbd2ac9e8/crates/xtask/src/test_schema.rs | crates/xtask/src/test_schema.rs | use std::path::PathBuf;
use anyhow::Result;
use serde_json::to_writer_pretty;
use tree_sitter_cli::test::TestSummary;
pub fn run_test_schema() -> Result<()> {
let schema = schemars::schema_for!(TestSummary);
let xtask_path: PathBuf = env!("CARGO_MANIFEST_DIR").into();
let schema_path = xtask_path
.parent()
.unwrap()
.parent()
.unwrap()
.join("docs")
.join("src")
.join("assets")
.join("schemas")
.join("test-summary.schema.json");
let mut file = std::fs::File::create(schema_path)?;
Ok(to_writer_pretty(&mut file, &schema)?)
}
| rust | MIT | dd60d5cff079dbae8db798ce7272879dbd2ac9e8 | 2026-01-04T15:38:34.599794Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.