repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/testutils/src/lib.rs
lib/testutils/src/lib.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::collections::HashSet; use std::env; use std::ffi::OsStr; use std::fs; use std::fs::OpenOptions; use std::io::Write as _; use std::path::Path; use std::path::PathBuf; use std::process::Command; use std::process::Stdio; use std::sync::Arc; use itertools::Itertools as _; use jj_lib::backend; use jj_lib::backend::Backend; use jj_lib::backend::BackendInitError; use jj_lib::backend::ChangeId; use jj_lib::backend::CommitId; use jj_lib::backend::CopyId; use jj_lib::backend::FileId; use jj_lib::backend::MillisSinceEpoch; use jj_lib::backend::Signature; use jj_lib::backend::Timestamp; use jj_lib::backend::TreeValue; use jj_lib::commit::Commit; use jj_lib::commit_builder::CommitBuilder; use jj_lib::config::ConfigLayer; use jj_lib::config::ConfigSource; use jj_lib::config::StackedConfig; use jj_lib::git_backend::GitBackend; use jj_lib::gitignore::GitIgnoreFile; use jj_lib::matchers::EverythingMatcher; use jj_lib::matchers::NothingMatcher; use jj_lib::merged_tree::MergedTree; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::MutableRepo; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo; use jj_lib::repo::RepoLoader; use jj_lib::repo::StoreFactories; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathBuf; use jj_lib::repo_path::RepoPathComponent; use jj_lib::rewrite::RebaseOptions; use jj_lib::rewrite::RebasedCommit; use jj_lib::secret_backend::SecretBackend; use jj_lib::settings::UserSettings; use jj_lib::signing::Signer; use jj_lib::simple_backend::SimpleBackend; use jj_lib::store::Store; use jj_lib::transaction::Transaction; use jj_lib::tree::Tree; use jj_lib::tree_builder::TreeBuilder; use jj_lib::working_copy::SnapshotError; use jj_lib::working_copy::SnapshotOptions; use jj_lib::working_copy::SnapshotStats; use jj_lib::workspace::Workspace; use pollster::FutureExt as _; use tempfile::TempDir; use tokio::io::AsyncReadExt as _; use crate::test_backend::TestBackendFactory; pub mod git; pub mod proptest; pub mod test_backend; pub const HERMETIC_GIT_CONFIGS: &[(&str, &str)] = &[ // gitoxide uses "main" as the default branch name, whereas git uses "master". This also // prevents git CLI from issuing the initial branch name advice. ("init.defaultBranch", "master"), ]; // TODO: Consider figuring out a way to make `GitBackend` and `git(1)` calls in // tests ignore external configuration and removing this function. This is // somewhat tricky because `gix` looks at system and user configuration, and // `GitBackend` also calls into `git(1)` for things like garbage collection. pub fn hermetic_git() { let mut envs = [ // Prevent GitBackend from loading user and system configurations. For // gitoxide API use in tests, Config::isolated() is probably better. ("GIT_CONFIG_SYSTEM", "/dev/null"), ("GIT_CONFIG_GLOBAL", "/dev/null"), ] .map(|(key, value)| (key.to_string(), value.to_string())) .to_vec(); for (i, (key, value)) in HERMETIC_GIT_CONFIGS.iter().enumerate() { envs.push((format!("GIT_CONFIG_KEY_{i}"), key.to_string())); envs.push((format!("GIT_CONFIG_VALUE_{i}"), value.to_string())); } envs.push(( "GIT_CONFIG_COUNT".to_string(), HERMETIC_GIT_CONFIGS.len().to_string(), )); for (key, value) in envs { // SAFETY: This is actually not safe. `getenv` and `setenv` are not thread safe, // and we can't guarantee that the following call won't have race conditions. unsafe { env::set_var(key, value) }; } } pub fn new_temp_dir() -> TempDir { hermetic_git(); tempfile::Builder::new() .prefix("jj-test-") .tempdir() .unwrap() } /// Returns new low-level config object that includes fake user configuration /// needed to run basic operations. pub fn base_user_config() -> StackedConfig { let config_text = r#" user.name = "Test User" user.email = "test.user@example.com" operation.username = "test-username" operation.hostname = "host.example.com" debug.randomness-seed = 42 "#; let mut config = StackedConfig::with_defaults(); config.add_layer(ConfigLayer::parse(ConfigSource::User, config_text).unwrap()); config } /// Returns new immutable settings object that includes fake user configuration /// needed to run basic operations. pub fn user_settings() -> UserSettings { UserSettings::from_config(base_user_config()).unwrap() } /// Creates [`SnapshotOptions`] for use in tests. pub fn empty_snapshot_options() -> SnapshotOptions<'static> { SnapshotOptions { base_ignores: GitIgnoreFile::empty(), progress: None, start_tracking_matcher: &EverythingMatcher, force_tracking_matcher: &NothingMatcher, max_new_file_size: u64::MAX, } } /// Panic if `CI` environment variable is set to a non-empty value /// /// Most CI environments set this variable automatically. See e.g. /// <https://docs.github.com/en/actions/writing-workflows/choosing-what-your-workflow-does/store-information-in-variables#default-environment-variables> #[track_caller] pub fn ensure_running_outside_ci(reason: &str) { let running_in_ci = std::env::var("CI").is_ok_and(|value| !value.is_empty()); assert!(!running_in_ci, "Running in CI, {reason}."); } /// Tests if an external tool is installed and in the PATH pub fn is_external_tool_installed(program_name: impl AsRef<OsStr>) -> bool { Command::new(program_name) .arg("--version") .stdout(Stdio::null()) .status() .is_ok() } #[derive(Debug)] pub struct TestEnvironment { temp_dir: TempDir, test_backend_factory: TestBackendFactory, } impl TestEnvironment { pub fn init() -> Self { Self { temp_dir: new_temp_dir(), test_backend_factory: TestBackendFactory::default(), } } pub fn root(&self) -> &Path { self.temp_dir.path() } pub fn default_store_factories(&self) -> StoreFactories { let mut factories = StoreFactories::default(); factories.add_backend("test", { let factory = self.test_backend_factory.clone(); Box::new(move |_settings, store_path| Ok(Box::new(factory.load(store_path)))) }); factories.add_backend( SecretBackend::name(), Box::new(|settings, store_path| { Ok(Box::new(SecretBackend::load(settings, store_path)?)) }), ); factories } pub fn load_repo_at_head( &self, settings: &UserSettings, repo_path: &Path, ) -> Arc<ReadonlyRepo> { RepoLoader::init_from_file_system(settings, repo_path, &self.default_store_factories()) .unwrap() .load_at_head() .unwrap() } } pub struct TestRepo { pub env: TestEnvironment, pub repo: Arc<ReadonlyRepo>, repo_path: PathBuf, } #[derive(PartialEq, Eq, Copy, Clone)] pub enum TestRepoBackend { Git, Simple, Test, } impl TestRepoBackend { fn init_backend( &self, env: &TestEnvironment, settings: &UserSettings, store_path: &Path, ) -> Result<Box<dyn Backend>, BackendInitError> { match self { Self::Git => Ok(Box::new(GitBackend::init_internal(settings, store_path)?)), Self::Simple => Ok(Box::new(SimpleBackend::init(store_path))), Self::Test => Ok(Box::new(env.test_backend_factory.init(store_path))), } } } impl TestRepo { pub fn init() -> Self { Self::init_with_backend(TestRepoBackend::Test) } pub fn init_with_backend(backend: TestRepoBackend) -> Self { Self::init_with_backend_and_settings(backend, &user_settings()) } pub fn init_with_settings(settings: &UserSettings) -> Self { Self::init_with_backend_and_settings(TestRepoBackend::Test, settings) } pub fn init_with_backend_and_settings( backend: TestRepoBackend, settings: &UserSettings, ) -> Self { let env = TestEnvironment::init(); let repo_dir = env.root().join("repo"); fs::create_dir(&repo_dir).unwrap(); let repo = ReadonlyRepo::init( settings, &repo_dir, &|settings, store_path| backend.init_backend(&env, settings, store_path), Signer::from_settings(settings).unwrap(), ReadonlyRepo::default_op_store_initializer(), ReadonlyRepo::default_op_heads_store_initializer(), ReadonlyRepo::default_index_store_initializer(), ReadonlyRepo::default_submodule_store_initializer(), ) .unwrap(); Self { env, repo, repo_path: repo_dir, } } pub fn repo_path(&self) -> &Path { &self.repo_path } } pub struct TestWorkspace { pub env: TestEnvironment, pub workspace: Workspace, pub repo: Arc<ReadonlyRepo>, } impl TestWorkspace { pub fn init() -> Self { Self::init_with_backend(TestRepoBackend::Test) } pub fn init_with_backend(backend: TestRepoBackend) -> Self { Self::init_with_backend_and_settings(backend, &user_settings()) } pub fn init_with_settings(settings: &UserSettings) -> Self { Self::init_with_backend_and_settings(TestRepoBackend::Test, settings) } pub fn init_with_backend_and_settings( backend: TestRepoBackend, settings: &UserSettings, ) -> Self { let signer = Signer::from_settings(settings).unwrap(); Self::init_with_backend_and_signer(backend, signer, settings) } pub fn init_with_backend_and_signer( backend: TestRepoBackend, signer: Signer, settings: &UserSettings, ) -> Self { let env = TestEnvironment::init(); let workspace_root = env.root().join("repo"); fs::create_dir(&workspace_root).unwrap(); let (workspace, repo) = Workspace::init_with_backend( settings, &workspace_root, &|settings, store_path| backend.init_backend(&env, settings, store_path), signer, ) .unwrap(); Self { env, workspace, repo, } } pub fn root_dir(&self) -> PathBuf { self.env.root().join("repo").join("..") } pub fn repo_path(&self) -> &Path { self.workspace.repo_path() } /// Snapshots the working copy and returns the tree. Updates the working /// copy state on disk, but does not update the working-copy commit (no /// new operation). pub fn snapshot_with_options( &mut self, options: &SnapshotOptions, ) -> Result<(MergedTree, SnapshotStats), SnapshotError> { let mut locked_ws = self.workspace.start_working_copy_mutation().unwrap(); let (tree, stats) = locked_ws.locked_wc().snapshot(options).block_on()?; // arbitrary operation id locked_ws.finish(self.repo.op_id().clone()).unwrap(); Ok((tree, stats)) } /// Like `snapshot_with_option()` but with default options pub fn snapshot(&mut self) -> Result<MergedTree, SnapshotError> { let (tree, _stats) = self.snapshot_with_options(&empty_snapshot_options())?; Ok(tree) } } pub fn commit_transactions(txs: Vec<Transaction>) -> Arc<ReadonlyRepo> { let repo_loader = txs[0].base_repo().loader().clone(); let mut op_ids = vec![]; for tx in txs { op_ids.push(tx.commit("test").unwrap().op_id().clone()); std::thread::sleep(std::time::Duration::from_millis(1)); } let repo = repo_loader.load_at_head().unwrap(); // Test the setup. The assumption here is that the parent order matches the // order in which they were merged (which currently matches the transaction // commit order), so we want to know make sure they appear in a certain // order, so the caller can decide the order by passing them to this // function in a certain order. assert_eq!(*repo.operation().parent_ids(), op_ids); repo } pub fn repo_path_component(value: &str) -> &RepoPathComponent { RepoPathComponent::new(value).unwrap() } pub fn repo_path(value: &str) -> &RepoPath { RepoPath::from_internal_string(value).unwrap() } pub fn repo_path_buf(value: impl Into<String>) -> RepoPathBuf { RepoPathBuf::from_internal_string(value).unwrap() } pub fn read_file(store: &Store, path: &RepoPath, id: &FileId) -> Vec<u8> { let mut reader = store.read_file(path, id).block_on().unwrap(); let mut content = vec![]; reader.read_to_end(&mut content).block_on().unwrap(); content } pub fn write_file(store: &Store, path: &RepoPath, contents: &str) -> FileId { store .write_file(path, &mut contents.as_bytes()) .block_on() .unwrap() } pub struct TestTreeBuilder { store: Arc<Store>, tree_builder: TreeBuilder, } impl TestTreeBuilder { pub fn new(store: Arc<Store>) -> Self { let tree_builder = TreeBuilder::new(store.clone(), store.empty_tree_id().clone()); Self { store, tree_builder, } } pub fn file( &mut self, path: &RepoPath, contents: impl AsRef<[u8]>, ) -> TestTreeFileEntryBuilder<'_> { TestTreeFileEntryBuilder { tree_builder: &mut self.tree_builder, path: path.to_owned(), contents: contents.as_ref().to_vec(), executable: false, } } pub fn symlink(&mut self, path: &RepoPath, target: &str) { let id = self.store.write_symlink(path, target).block_on().unwrap(); self.tree_builder .set(path.to_owned(), TreeValue::Symlink(id)); } pub fn submodule(&mut self, path: &RepoPath, commit: CommitId) { self.tree_builder .set(path.to_owned(), TreeValue::GitSubmodule(commit)); } pub fn write_single_tree(self) -> Tree { let id = self.tree_builder.write_tree().unwrap(); self.store.get_tree(RepoPathBuf::root(), &id).unwrap() } pub fn write_merged_tree(self) -> MergedTree { let id = self.tree_builder.write_tree().unwrap(); MergedTree::resolved(self.store, id) } } pub struct TestTreeFileEntryBuilder<'a> { tree_builder: &'a mut TreeBuilder, path: RepoPathBuf, contents: Vec<u8>, executable: bool, } impl TestTreeFileEntryBuilder<'_> { pub fn executable(mut self, executable: bool) -> Self { self.executable = executable; self } } impl Drop for TestTreeFileEntryBuilder<'_> { fn drop(&mut self) { let id = self .tree_builder .store() .write_file(&self.path, &mut self.contents.as_slice()) .block_on() .unwrap(); let path = std::mem::replace(&mut self.path, RepoPathBuf::root()); self.tree_builder.set( path, TreeValue::File { id, executable: self.executable, copy_id: CopyId::placeholder(), }, ); } } pub fn create_single_tree_with( repo: &Arc<ReadonlyRepo>, build: impl FnOnce(&mut TestTreeBuilder), ) -> Tree { let mut builder = TestTreeBuilder::new(repo.store().clone()); build(&mut builder); builder.write_single_tree() } pub fn create_single_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]) -> Tree { create_single_tree_with(repo, |builder| { for (path, contents) in path_contents { builder.file(path, contents); } }) } pub fn create_tree_with( repo: &Arc<ReadonlyRepo>, build: impl FnOnce(&mut TestTreeBuilder), ) -> MergedTree { let mut builder = TestTreeBuilder::new(repo.store().clone()); build(&mut builder); builder.write_merged_tree() } pub fn create_tree(repo: &Arc<ReadonlyRepo>, path_contents: &[(&RepoPath, &str)]) -> MergedTree { create_tree_with(repo, |builder| { for (path, contents) in path_contents { builder.file(path, contents); } }) } #[must_use] pub fn create_random_tree(repo: &Arc<ReadonlyRepo>) -> MergedTree { let number = rand::random::<u32>(); let path = repo_path_buf(format!("file{number}")); create_tree_with(repo, |builder| { builder.file(&path, "contents"); }) } pub fn create_random_commit(mut_repo: &mut MutableRepo) -> CommitBuilder<'_> { let tree = create_random_tree(mut_repo.base_repo()); let number = rand::random::<u32>(); mut_repo .new_commit(vec![mut_repo.store().root_commit_id().clone()], tree) .set_description(format!("random commit {number}")) } pub fn commit_with_tree(store: &Arc<Store>, tree: MergedTree) -> Commit { let signature = Signature { name: "Some One".to_string(), email: "someone@example.com".to_string(), timestamp: Timestamp { timestamp: MillisSinceEpoch(0), tz_offset: 0, }, }; let (root_tree, conflict_labels) = tree.into_tree_ids_and_labels(); let commit = backend::Commit { parents: vec![store.root_commit_id().clone()], predecessors: vec![], root_tree, conflict_labels: conflict_labels.into_merge(), change_id: ChangeId::from_hex("abcd"), description: "description".to_string(), author: signature.clone(), committer: signature, secure_sig: None, }; store.write_commit(commit, None).block_on().unwrap() } pub fn dump_tree(merged_tree: &MergedTree) -> String { use std::fmt::Write as _; let store = merged_tree.store(); let mut buf = String::new(); let trees = merged_tree.trees().block_on().unwrap(); writeln!(&mut buf, "merged tree (sides: {})", trees.num_sides()).unwrap(); for tree in &trees { writeln!(&mut buf, " tree {}", tree.id()).unwrap(); for (path, entry) in tree.entries_matching(&EverythingMatcher) { match entry { TreeValue::File { id, executable: _, copy_id: _, } => { let file_buf = read_file(store, &path, &id); let file_contents = String::from_utf8_lossy(&file_buf); writeln!(&mut buf, " file {path:?} ({id}): {file_contents:?}").unwrap(); } TreeValue::Symlink(id) => { writeln!(&mut buf, " symlink {path:?} ({id})").unwrap(); } TreeValue::GitSubmodule(id) => { writeln!(&mut buf, " submodule {path:?} ({id})").unwrap(); } _ => { writeln!(&mut buf, " entry {path:?}: {entry:?}").unwrap(); } } } } buf } #[macro_export] macro_rules! assert_tree_eq { ($left_tree:expr, $right_tree:expr $(,)?) => { assert_tree_eq!($left_tree, $right_tree, "trees are different") }; ($left_tree:expr, $right_tree:expr, $($args:tt)+) => {{ let left_tree: &::jj_lib::merged_tree::MergedTree = &$left_tree; let right_tree: &::jj_lib::merged_tree::MergedTree = &$right_tree; assert_eq!( left_tree.tree_ids_and_labels(), right_tree.tree_ids_and_labels(), "{}:\n left: {}\nright: {}", format_args!($($args)*), $crate::dump_tree(left_tree), $crate::dump_tree(right_tree), ) }}; } pub fn write_random_commit(mut_repo: &mut MutableRepo) -> Commit { write_random_commit_with_parents(mut_repo, &[]) } pub fn write_random_commit_with_parents(mut_repo: &mut MutableRepo, parents: &[&Commit]) -> Commit { let parents = if parents.is_empty() { &[&mut_repo.store().root_commit()] } else { parents }; create_random_commit(mut_repo) .set_parents(parents.iter().map(|commit| commit.id().clone()).collect()) .write() .unwrap() } pub fn write_working_copy_file(workspace_root: &Path, path: &RepoPath, contents: impl AsRef<[u8]>) { let path = path.to_fs_path(workspace_root).unwrap(); if let Some(parent) = path.parent() { fs::create_dir_all(parent).unwrap(); } let mut file = OpenOptions::new() .write(true) .create(true) .truncate(true) .open(path) .unwrap(); file.write_all(contents.as_ref()).unwrap(); } /// Rebase descendants of the rewritten commits. Returns map of original commit /// ID to rebased (or abandoned parent) commit ID. pub fn rebase_descendants_with_options_return_map( repo: &mut MutableRepo, options: &RebaseOptions, ) -> HashMap<CommitId, CommitId> { let mut rebased: HashMap<CommitId, CommitId> = HashMap::new(); repo.rebase_descendants_with_options(options, |old_commit, rebased_commit| { let old_commit_id = old_commit.id().clone(); let new_commit_id = match rebased_commit { RebasedCommit::Rewritten(new_commit) => new_commit.id().clone(), RebasedCommit::Abandoned { parent_id } => parent_id, }; rebased.insert(old_commit_id, new_commit_id); }) .unwrap(); rebased } fn assert_in_rebased_map( repo: &impl Repo, rebased: &HashMap<CommitId, CommitId>, expected_old_commit: &Commit, ) -> Commit { let new_commit_id = rebased.get(expected_old_commit.id()).unwrap_or_else(|| { panic!( "Expected commit to have been rebased: {}", expected_old_commit.id().hex() ) }); repo.store().get_commit(new_commit_id).unwrap().clone() } pub fn assert_rebased_onto( repo: &impl Repo, rebased: &HashMap<CommitId, CommitId>, expected_old_commit: &Commit, expected_new_parent_ids: &[&CommitId], ) -> Commit { let new_commit = assert_in_rebased_map(repo, rebased, expected_old_commit); assert_eq!( new_commit.parent_ids().to_vec(), expected_new_parent_ids .iter() .map(|x| (*x).clone()) .collect_vec() ); assert_eq!(new_commit.change_id(), expected_old_commit.change_id()); new_commit } /// Maps children of an abandoned commit to a new rebase target. /// /// If `expected_old_commit` was abandoned, the `rebased` map indicates the /// commit the children of `expected_old_commit` should be rebased to, which /// would have a different change id. This happens when the EmptyBehavior in /// RebaseOptions is not the default; because of the details of the /// implementation this returned parent commit is always singular. pub fn assert_abandoned_with_parent( repo: &impl Repo, rebased: &HashMap<CommitId, CommitId>, expected_old_commit: &Commit, expected_new_parent_id: &CommitId, ) -> Commit { let new_parent_commit = assert_in_rebased_map(repo, rebased, expected_old_commit); assert_eq!(new_parent_commit.id(), expected_new_parent_id); assert_ne!( new_parent_commit.change_id(), expected_old_commit.change_id() ); new_parent_commit } pub fn assert_no_forgotten_test_files(test_dir: &Path) { // Parse the integration tests' main modules from the Cargo manifest. let manifest = { let file_path = test_dir.parent().unwrap().join("Cargo.toml"); let text = fs::read_to_string(&file_path).unwrap(); toml_edit::Document::parse(text).unwrap() }; let test_bin_mods = if let Some(item) = manifest.get("test") { let tables = item.as_array_of_tables().unwrap(); tables .iter() .map(|test| test.get("name").unwrap().as_str().unwrap().to_owned()) .collect() } else { vec![] }; // Add to that all submodules which are declared in the main test modules via // `mod`. let mut test_mods: HashSet<_> = test_bin_mods .iter() .flat_map(|test_mod| { let test_mod_path = test_dir.join(test_mod).with_extension("rs"); let test_mod_contents = fs::read_to_string(&test_mod_path).unwrap(); test_mod_contents .lines() .map(|line| line.trim_start_matches("pub ")) .filter_map(|line| line.strip_prefix("mod")) .filter_map(|line| line.strip_suffix(";")) .map(|line| line.trim().to_string()) .collect_vec() }) .collect(); test_mods.extend(test_bin_mods); // Gather list of Rust source files in test directory for comparison. let test_mod_files: HashSet<_> = fs::read_dir(test_dir) .unwrap() .map(|entry| entry.unwrap().path()) .filter(|path| path.extension().is_some_and(|ext| ext == "rs")) .filter_map(|path| { path.file_stem() .and_then(|stem| stem.to_os_string().into_string().ok()) }) .collect(); assert!( test_mod_files.is_subset(&test_mods), "the following test source files are not declared as integration tests nor included as \ submodules of one: {}", test_mod_files .difference(&test_mods) .map(|mod_stem| format!("{mod_stem}.rs")) .join(", "), ); } /// Returns true if the directory appears to be on a filesystem with strict /// UTF-8 validation, as on ZFS with the `utf8only=on` property set. #[cfg(unix)] pub fn check_strict_utf8_fs(dir: &Path) -> bool { use std::os::unix::ffi::OsStrExt as _; let test_file_normal = tempfile::Builder::new() .prefix(OsStr::from_bytes(b"strict-utf8-normal-")) .tempfile_in(dir); assert!(test_file_normal.is_ok()); let test_file_invalid = tempfile::Builder::new() .prefix(OsStr::from_bytes(b"strict-utf8-\xe0-")) .tempfile_in(dir); test_file_invalid.is_err() }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/testutils/src/test_backend.rs
lib/testutils/src/test_backend.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::fmt::Debug; use std::fmt::Error; use std::fmt::Formatter; use std::io::Cursor; use std::path::Path; use std::path::PathBuf; use std::pin::Pin; use std::sync::Arc; use std::sync::Mutex; use std::sync::MutexGuard; use std::time::SystemTime; use async_trait::async_trait; use futures::stream; use futures::stream::BoxStream; use jj_lib::backend::Backend; use jj_lib::backend::BackendError; use jj_lib::backend::BackendResult; use jj_lib::backend::ChangeId; use jj_lib::backend::Commit; use jj_lib::backend::CommitId; use jj_lib::backend::CopyHistory; use jj_lib::backend::CopyId; use jj_lib::backend::CopyRecord; use jj_lib::backend::FileId; use jj_lib::backend::SecureSig; use jj_lib::backend::SigningFn; use jj_lib::backend::SymlinkId; use jj_lib::backend::Tree; use jj_lib::backend::TreeId; use jj_lib::backend::make_root_commit; use jj_lib::dag_walk::topo_order_reverse; use jj_lib::index::Index; use jj_lib::object_id::ObjectId as _; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathBuf; use tokio::io::AsyncRead; use tokio::io::AsyncReadExt as _; use tokio::runtime::Runtime; const HASH_LENGTH: usize = 10; const CHANGE_ID_LENGTH: usize = 16; // Keyed by canonical store path. Since we just use the path as a key, we can't // rely on on the file system to resolve two different uncanonicalized paths to // the same real path (as we would if we just used the path with `std::fs` // functions). type TestBackendDataMap = HashMap<PathBuf, Arc<Mutex<TestBackendData>>>; #[derive(Default)] pub struct TestBackendData { commits: HashMap<CommitId, Commit>, trees: HashMap<RepoPathBuf, HashMap<TreeId, Tree>>, files: HashMap<RepoPathBuf, HashMap<FileId, Vec<u8>>>, symlinks: HashMap<RepoPathBuf, HashMap<SymlinkId, String>>, copies: HashMap<CopyId, CopyHistory>, } #[derive(Clone, Default)] pub struct TestBackendFactory { backend_data: Arc<Mutex<TestBackendDataMap>>, } impl TestBackendFactory { pub fn init(&self, store_path: &Path) -> TestBackend { let data = Arc::new(Mutex::new(TestBackendData::default())); self.backend_data .lock() .unwrap() .insert(store_path.canonicalize().unwrap(), data.clone()); TestBackend::with_data(data) } pub fn load(&self, store_path: &Path) -> TestBackend { let data = self .backend_data .lock() .unwrap() .get(&store_path.canonicalize().unwrap()) .unwrap() .clone(); TestBackend::with_data(data) } } impl Debug for TestBackendFactory { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { f.debug_struct("TestBackendFactory").finish_non_exhaustive() } } fn get_hash(content: &(impl jj_lib::content_hash::ContentHash + ?Sized)) -> Vec<u8> { jj_lib::content_hash::blake2b_hash(content).as_slice()[..HASH_LENGTH].to_vec() } /// A commit backend for use in tests. /// /// It's meant to be strict, in order to catch bugs where we make the /// wrong assumptions. For example, unlike both `GitBackend` and /// `SimpleBackend`, this backend doesn't share objects written to /// different paths (writing a file with contents X to path A will not /// make it possible to read that contents from path B given the same /// `FileId`). pub struct TestBackend { root_commit_id: CommitId, root_change_id: ChangeId, empty_tree_id: TreeId, data: Arc<Mutex<TestBackendData>>, runtime: Runtime, } impl TestBackend { pub fn with_data(data: Arc<Mutex<TestBackendData>>) -> Self { let root_commit_id = CommitId::from_bytes(&[0; HASH_LENGTH]); let root_change_id = ChangeId::from_bytes(&[0; CHANGE_ID_LENGTH]); let empty_tree_id = TreeId::new(get_hash(&Tree::default())); let runtime = Runtime::new().unwrap(); Self { root_commit_id, root_change_id, empty_tree_id, data, runtime, } } fn locked_data(&self) -> MutexGuard<'_, TestBackendData> { self.data.lock().unwrap() } pub fn remove_commit_unchecked(&self, id: &CommitId) { self.locked_data().commits.remove(id); } async fn run_async<R: Send + 'static>( &self, process: impl FnOnce(MutexGuard<TestBackendData>) -> R + Send + 'static, ) -> R { let data = self.data.clone(); self.runtime .spawn(async move { process(data.lock().unwrap()) }) .await .unwrap() } } impl Debug for TestBackend { fn fmt(&self, f: &mut Formatter<'_>) -> Result<(), Error> { f.debug_struct("TestBackend").finish_non_exhaustive() } } #[async_trait] impl Backend for TestBackend { fn name(&self) -> &'static str { "test" } fn commit_id_length(&self) -> usize { HASH_LENGTH } fn change_id_length(&self) -> usize { CHANGE_ID_LENGTH } fn root_commit_id(&self) -> &CommitId { &self.root_commit_id } fn root_change_id(&self) -> &ChangeId { &self.root_change_id } fn empty_tree_id(&self) -> &TreeId { &self.empty_tree_id } fn concurrency(&self) -> usize { // Not optimal, just for testing the async code more 10 } async fn read_file( &self, path: &RepoPath, id: &FileId, ) -> BackendResult<Pin<Box<dyn AsyncRead + Send>>> { let path = path.to_owned(); let id = id.clone(); self.run_async(move |data| { match data .files .get(&path) .and_then(|items| items.get(&id)) .cloned() { None => Err(BackendError::ObjectNotFound { object_type: "file".to_string(), hash: id.hex(), source: format!("at path {path:?}").into(), }), Some(contents) => { let reader: Pin<Box<dyn AsyncRead + Send>> = Box::pin(Cursor::new(contents)); Ok(reader) } } }) .await } async fn write_file( &self, path: &RepoPath, contents: &mut (dyn AsyncRead + Send + Unpin), ) -> BackendResult<FileId> { let path = path.to_owned(); let mut bytes = Vec::new(); contents.read_to_end(&mut bytes).await.unwrap(); self.run_async(move |mut data| { let id = FileId::new(get_hash(&bytes)); data.files .entry(path.clone()) .or_default() .insert(id.clone(), bytes); Ok(id) }) .await } async fn read_symlink(&self, path: &RepoPath, id: &SymlinkId) -> BackendResult<String> { let path = path.to_owned(); let id = id.clone(); self.run_async(move |data| { match data .symlinks .get(&path) .and_then(|items| items.get(&id)) .cloned() { None => Err(BackendError::ObjectNotFound { object_type: "symlink".to_string(), hash: id.hex(), source: format!("at path {path:?}").into(), }), Some(target) => Ok(target), } }) .await } async fn write_symlink(&self, path: &RepoPath, target: &str) -> BackendResult<SymlinkId> { let id = SymlinkId::new(get_hash(target.as_bytes())); let path = path.to_owned(); let target = target.to_owned(); self.run_async(move |mut data| { data.symlinks .entry(path) .or_default() .insert(id.clone(), target); Ok(id) }) .await } async fn read_copy(&self, id: &CopyId) -> BackendResult<CopyHistory> { let id = id.clone(); self.run_async(move |data| { let copy = data.copies .get(&id) .cloned() .ok_or_else(|| BackendError::ObjectNotFound { object_type: "copy".to_string(), hash: id.hex(), source: "".into(), })?; Ok(copy) }) .await } async fn write_copy(&self, contents: &CopyHistory) -> BackendResult<CopyId> { let contents = contents.clone(); self.run_async(move |mut data| { let id = CopyId::new(get_hash(&contents)); data.copies.insert(id.clone(), contents); Ok(id) }) .await } async fn get_related_copies(&self, copy_id: &CopyId) -> BackendResult<Vec<CopyHistory>> { let copy_id = copy_id.clone(); self.run_async(move |data| { let copies = &data.copies; if !copies.contains_key(&copy_id) { return Err(BackendError::ObjectNotFound { object_type: "copy history".to_string(), hash: copy_id.hex(), source: "".into(), }); } // Return all copy histories to test that the caller correctly ignores histories // that are not relevant to the trees they're working with. let mut histories = vec![]; for id in topo_order_reverse( copies.keys(), |id| *id, |id| copies.get(*id).unwrap().parents.iter(), |_| panic!("graph has cycle"), ) .unwrap() { histories.push(copies.get(id).unwrap().clone()); } Ok(histories) }) .await } async fn read_tree(&self, path: &RepoPath, id: &TreeId) -> BackendResult<Tree> { if id == &self.empty_tree_id { return Ok(Tree::default()); } let path = path.to_owned(); let id = id.clone(); self.run_async(move |data| { match data .trees .get(&path) .and_then(|items| items.get(&id)) .cloned() { None => Err(BackendError::ObjectNotFound { object_type: "tree".to_string(), hash: id.hex(), source: format!("at path {path:?}").into(), }), Some(tree) => Ok(tree), } }) .await } async fn write_tree(&self, path: &RepoPath, contents: &Tree) -> BackendResult<TreeId> { let path = path.to_owned(); let contents = contents.clone(); self.run_async(move |mut data| { let id = TreeId::new(get_hash(&contents)); data.trees .entry(path.clone()) .or_default() .insert(id.clone(), contents.clone()); Ok(id) }) .await } async fn read_commit(&self, id: &CommitId) -> BackendResult<Commit> { if id == &self.root_commit_id { return Ok(make_root_commit( self.root_change_id.clone(), self.empty_tree_id.clone(), )); } let id = id.clone(); self.run_async(move |data| match data.commits.get(&id).cloned() { None => Err(BackendError::ObjectNotFound { object_type: "commit".to_string(), hash: id.hex(), source: "".into(), }), Some(commit) => Ok(commit), }) .await } async fn write_commit( &self, mut contents: Commit, mut sign_with: Option<&mut SigningFn>, ) -> BackendResult<(CommitId, Commit)> { assert!(contents.secure_sig.is_none(), "commit.secure_sig was set"); if let Some(sign) = &mut sign_with { let data = format!("{contents:?}").into_bytes(); let sig = sign(&data).map_err(|err| BackendError::Other(Box::new(err)))?; contents.secure_sig = Some(SecureSig { data, sig }); } self.run_async(move |mut data| { let id = CommitId::new(get_hash(&contents)); data.commits.insert(id.clone(), contents.clone()); Ok((id, contents)) }) .await } fn get_copy_records( &self, _paths: Option<&[RepoPathBuf]>, _root: &CommitId, _head: &CommitId, ) -> BackendResult<BoxStream<'_, BackendResult<CopyRecord>>> { Ok(Box::pin(stream::empty())) } fn gc(&self, _index: &dyn Index, _keep_newer: SystemTime) -> BackendResult<()> { Ok(()) } } #[cfg(test)] mod tests { use pollster::FutureExt as _; use super::*; use crate::repo_path_buf; fn copy_history(path: &str, parents: &[CopyId]) -> CopyHistory { CopyHistory { current_path: repo_path_buf(path), parents: parents.to_vec(), salt: vec![], } } #[test] fn get_related_copies() { let backend = TestBackend::with_data(Arc::new(Mutex::new(TestBackendData::default()))); // Test with a single chain so the resulting order is deterministic let copy1 = copy_history("foo1", &[]); let copy1_id = backend.write_copy(&copy1).block_on().unwrap(); let copy2 = copy_history("foo2", std::slice::from_ref(&copy1_id)); let copy2_id = backend.write_copy(&copy2).block_on().unwrap(); let copy3 = copy_history("foo3", std::slice::from_ref(&copy2_id)); let copy3_id = backend.write_copy(&copy3).block_on().unwrap(); // Error when looking up by non-existent id assert!( backend .get_related_copies(&CopyId::from_hex("abcd")) .block_on() .is_err() ); // Looking up by any id returns the related copies in the same order (children // before parents) let related = backend.get_related_copies(&copy1_id).block_on().unwrap(); assert_eq!(related, vec![copy3.clone(), copy2.clone(), copy1.clone()]); let related: Vec<CopyHistory> = backend.get_related_copies(&copy3_id).block_on().unwrap(); assert_eq!(related, vec![copy3.clone(), copy2.clone(), copy1.clone()]); } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/testutils/src/git.rs
lib/testutils/src/git.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::path::Path; use std::path::PathBuf; use gix::date::parse::TimeBuf; pub const GIT_USER: &str = "Someone"; pub const GIT_EMAIL: &str = "someone@example.org"; fn git_config() -> Vec<bstr::BString> { vec![ format!("user.name = {GIT_USER}").into(), format!("user.email = {GIT_EMAIL}").into(), "init.defaultBranch = master".into(), ] } fn open_options() -> gix::open::Options { gix::open::Options::isolated() .config_overrides(git_config()) .strict_config(true) } pub fn open(directory: impl Into<PathBuf>) -> gix::Repository { gix::open_opts(directory, open_options()).unwrap() } pub fn init(directory: impl AsRef<Path>) -> gix::Repository { gix::ThreadSafeRepository::init_opts( directory, gix::create::Kind::WithWorktree, gix::create::Options::default(), open_options(), ) .unwrap() .to_thread_local() } pub fn init_bare(directory: impl AsRef<Path>) -> gix::Repository { gix::ThreadSafeRepository::init_opts( directory, gix::create::Kind::Bare, gix::create::Options::default(), open_options(), ) .unwrap() .to_thread_local() } pub fn clone(dest_path: &Path, repo_url: &str, remote_name: Option<&str>) -> gix::Repository { let remote_name = remote_name.unwrap_or("origin"); // gitoxide doesn't write the remote HEAD as a symbolic link, which prevents // `jj` from getting it. // // This, plus the fact that the code to clone a repo in gitoxide is non-trivial, // makes it appealing to just spawn a git subprocess let output = std::process::Command::new("git") .args(["clone", repo_url, "--origin", remote_name]) .arg(dest_path) .output() .unwrap(); assert!( output.status.success(), "git cloning failed with {}:\n{}\n----- stderr -----\n{}", output.status, bstr::BString::from(output.stdout), bstr::BString::from(output.stderr), ); open(dest_path) } /// Writes out gitlink entry pointing to the `target_repo`. pub fn create_gitlink(src_repo: impl AsRef<Path>, target_repo: impl AsRef<Path>) { let git_link_path = src_repo.as_ref().join(".git"); std::fs::write( git_link_path, format!("gitdir: {}\n", target_repo.as_ref().display()), ) .unwrap(); } pub fn remove_config_value(mut repo: gix::Repository, section: &str, key: &str) { let mut config = repo.config_snapshot_mut(); let Ok(mut section) = config.section_mut(section, None) else { return; }; section.remove(key); let mut file = std::fs::File::create(config.meta().path.as_ref().unwrap()).unwrap(); config .write_to_filter(&mut file, |section| section.meta() == config.meta()) .unwrap(); } pub struct CommitResult { pub tree_id: gix::ObjectId, pub commit_id: gix::ObjectId, } pub fn add_commit( repo: &gix::Repository, reference: &str, filename: &str, content: &[u8], message: &str, parents: &[gix::ObjectId], ) -> CommitResult { let blob_oid = repo.write_blob(content).unwrap(); let parent_tree_editor = parents.first().map(|commit_id| { repo.find_commit(*commit_id) .unwrap() .tree() .unwrap() .edit() .unwrap() }); let empty_tree_editor_fn = || { repo.edit_tree(gix::ObjectId::empty_tree(repo.object_hash())) .unwrap() }; let mut tree_editor = parent_tree_editor.unwrap_or_else(empty_tree_editor_fn); tree_editor .upsert(filename, gix::object::tree::EntryKind::Blob, blob_oid) .unwrap(); let tree_id = tree_editor.write().unwrap().detach(); let commit_id = write_commit(repo, reference, tree_id, message, parents); CommitResult { tree_id, commit_id } } pub fn write_commit( repo: &gix::Repository, reference: &str, tree_id: gix::ObjectId, message: &str, parents: &[gix::ObjectId], ) -> gix::ObjectId { let signature = signature(); repo.commit_as( signature.to_ref(&mut TimeBuf::default()), signature.to_ref(&mut TimeBuf::default()), reference, message, tree_id, parents.iter().copied(), ) .unwrap() .detach() } pub fn set_head_to_id(repo: &gix::Repository, target: gix::ObjectId) { repo.edit_reference(gix::refs::transaction::RefEdit { change: gix::refs::transaction::Change::Update { log: gix::refs::transaction::LogChange::default(), expected: gix::refs::transaction::PreviousValue::Any, new: gix::refs::Target::Object(target), }, name: "HEAD".try_into().unwrap(), deref: false, }) .unwrap(); } pub fn set_symbolic_reference(repo: &gix::Repository, reference: &str, target: &str) { use gix::refs::transaction; let change = transaction::Change::Update { log: transaction::LogChange { mode: transaction::RefLog::AndReference, force_create_reflog: true, message: "create symbolic reference".into(), }, expected: transaction::PreviousValue::Any, new: gix::refs::Target::Symbolic(target.try_into().unwrap()), }; let ref_edit = transaction::RefEdit { change, name: reference.try_into().unwrap(), deref: false, }; repo.edit_reference(ref_edit).unwrap(); } pub fn checkout_tree_index(repo: &gix::Repository, tree_id: gix::ObjectId) { let objects = repo.objects.clone(); let mut index = repo.index_from_tree(&tree_id).unwrap(); gix::worktree::state::checkout( &mut index, repo.workdir().unwrap(), objects, &gix::progress::Discard, &gix::progress::Discard, &gix::interrupt::IS_INTERRUPTED, gix::worktree::state::checkout::Options::default(), ) .unwrap(); } fn signature() -> gix::actor::Signature { gix::actor::Signature { name: bstr::BString::from(GIT_USER), email: bstr::BString::from(GIT_EMAIL), time: gix::date::Time::new(0, 0), } } #[derive(Debug, PartialEq, Eq)] pub enum GitStatusInfo { Index(IndexStatus), Worktree(WorktreeStatus), } #[derive(Debug, PartialEq, Eq)] pub enum IndexStatus { Addition, Deletion, Rename, Modification, } #[derive(Debug, PartialEq, Eq)] pub enum WorktreeStatus { Removed, Added, Modified, TypeChange, Renamed, Copied, IntentToAdd, Conflict, Ignored, } impl<'lhs, 'rhs> From<gix::diff::index::ChangeRef<'lhs, 'rhs>> for IndexStatus { fn from(value: gix::diff::index::ChangeRef<'lhs, 'rhs>) -> Self { match value { gix::diff::index::ChangeRef::Addition { .. } => Self::Addition, gix::diff::index::ChangeRef::Deletion { .. } => Self::Deletion, gix::diff::index::ChangeRef::Rewrite { .. } => Self::Rename, gix::diff::index::ChangeRef::Modification { .. } => Self::Modification, } } } impl From<Option<gix::status::index_worktree::iter::Summary>> for WorktreeStatus { fn from(value: Option<gix::status::index_worktree::iter::Summary>) -> Self { match value { Some(gix::status::index_worktree::iter::Summary::Removed) => Self::Removed, Some(gix::status::index_worktree::iter::Summary::Added) => Self::Added, Some(gix::status::index_worktree::iter::Summary::Modified) => Self::Modified, Some(gix::status::index_worktree::iter::Summary::TypeChange) => Self::TypeChange, Some(gix::status::index_worktree::iter::Summary::Renamed) => Self::Renamed, Some(gix::status::index_worktree::iter::Summary::Copied) => Self::Copied, Some(gix::status::index_worktree::iter::Summary::IntentToAdd) => Self::IntentToAdd, Some(gix::status::index_worktree::iter::Summary::Conflict) => Self::Conflict, None => Self::Ignored, } } } impl From<gix::status::Item> for GitStatusInfo { fn from(value: gix::status::Item) -> Self { match value { gix::status::Item::TreeIndex(change) => Self::Index(change.into()), gix::status::Item::IndexWorktree(item) => Self::Worktree(item.summary().into()), } } } #[derive(Debug, PartialEq, Eq)] pub struct GitStatus { path: String, status: GitStatusInfo, } impl From<gix::status::Item> for GitStatus { fn from(value: gix::status::Item) -> Self { let path = value.location().to_string(); let status = value.into(); Self { path, status } } } pub fn status(repo: &gix::Repository) -> Vec<GitStatus> { let mut status: Vec<GitStatus> = repo .status(gix::progress::Discard) .unwrap() .untracked_files(gix::status::UntrackedFiles::Files) .dirwalk_options(|options| { options.emit_ignored(Some(gix::dir::walk::EmissionMode::Matching)) }) .into_iter(None) .unwrap() .map(Result::unwrap) .map(|x| x.into()) .collect(); status.sort_by(|a, b| a.path.cmp(&b.path)); status } pub struct IndexManager<'a> { index: gix::index::File, repo: &'a gix::Repository, } impl<'a> IndexManager<'a> { pub fn new(repo: &'a gix::Repository) -> Self { // This would be equivalent to repo.open_index_or_empty() if such // function existed. let index = repo.index_or_empty().unwrap(); let index = gix::index::File::clone(&index); // unshare Self { index, repo } } pub fn add_file(&mut self, name: &str, data: &[u8]) { std::fs::write(self.repo.workdir().unwrap().join(name), data).unwrap(); let blob_oid = self.repo.write_blob(data).unwrap().detach(); self.index.dangerously_push_entry( gix::index::entry::Stat::default(), blob_oid, gix::index::entry::Flags::from_stage(gix::index::entry::Stage::Unconflicted), gix::index::entry::Mode::FILE, name.as_bytes().into(), ); } pub fn sync_index(&mut self) { self.index.sort_entries(); self.index.verify_entries().unwrap(); self.index .write(gix::index::write::Options::default()) .unwrap(); } } pub fn add_remote(repo_dir: impl AsRef<Path>, remote_name: &str, url: &str) { let output = std::process::Command::new("git") .current_dir(repo_dir) .args(["remote", "add", remote_name, url]) .output() .unwrap(); assert!( output.status.success(), "git remote add {remote_name} {url} failed with {}:\n{}\n----- stderr -----\n{}", output.status, bstr::BString::from(output.stdout), bstr::BString::from(output.stderr), ); } pub fn rename_remote(repo_dir: impl AsRef<Path>, original: &str, new: &str) { let output = std::process::Command::new("git") .current_dir(repo_dir) .args(["remote", "rename", original, new]) .output() .unwrap(); assert!( output.status.success(), "git remote rename failed with {}:\n{}\n----- stderr -----\n{}", output.status, bstr::BString::from(output.stdout), bstr::BString::from(output.stderr), ); } pub fn fetch(repo_dir: impl AsRef<Path>, remote: &str) { let output = std::process::Command::new("git") .current_dir(repo_dir) .args(["fetch", remote]) .output() .unwrap(); assert!( output.status.success(), "git fetch {remote} failed with {}:\n{}\n----- stderr -----\n{}", output.status, bstr::BString::from(output.stdout), bstr::BString::from(output.stderr), ); }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/testutils/src/proptest.rs
lib/testutils/src/proptest.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::BTreeMap; use std::fmt::Debug; use std::sync::Arc; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::merged_tree::MergedTree; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathBuf; use jj_lib::repo_path::RepoPathComponent; use proptest::prelude::*; use proptest_derive::Arbitrary; use proptest_state_machine::ReferenceStateMachine; use crate::create_tree_with; fn arb_file_contents() -> impl Strategy<Value = Vec<u8>> { prop_oneof![ // Empty files represent a significant edge case, so we want to increase the likelihood of // empty file contents in subsequent transitions. Just(vec![]), // [0] is the simplest "binary" file and it's included here to increase the likelihood of // identical binary file contents in subsequent transition. Just(vec![0]), // Diffing is line-oriented, so try to generate files with relatively // many newlines. "(\n|[a-z]|\\PC)*".prop_map(|s| s.into_bytes()), // Arbitrary binary contents, not limited to valid UTF-8. proptest::collection::vec(any::<u8>(), 0..32), ] } #[derive(Arbitrary, Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum DirEntry { File { #[proptest(strategy = "arb_file_contents()")] contents: Vec<u8>, executable: bool, }, // TODO: Only files are created for now; extend test to include symlinks. #[proptest(skip)] Symlink { target: String }, // TODO: Only files are created for now; extend test to include submodules. #[proptest(skip)] GitSubmodule { commit: CommitId }, } fn arb_path_component() -> impl Strategy<Value = String> { // HACK: Forbidding `.` here to avoid `.`/`..` in the path components, which // causes downstream errors. "(a|b|c|d|[\\PC&&[^/.]]+)" } #[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord)] pub enum Transition { /// Create a new [`DirEntry`] at [`path`](Self::SetDirEntry::path). /// - If there is already a file or directory at `path`, it is first /// deleted. (Directories will be recursively deleted.) /// - If [`dir_entry`](Self::SetDirEntry::path) is [`None`], the entry at /// `path` is deleted. SetDirEntry { path: RepoPathBuf, dir_entry: Option<DirEntry>, }, /// Commit the current working copy. Used by the system under test. Commit, } #[derive(Clone, Debug, Default)] pub struct WorkingCopyReferenceStateMachine { entries: BTreeMap<RepoPathBuf, DirEntry>, } impl WorkingCopyReferenceStateMachine { /// Check invariants that should be maintained by the test code itself /// (rather than the library code). If these fail, then the test harness is /// buggy. fn check_invariants(&self) { for file_path in self.entries.keys() { for ancestor in file_path.ancestors().skip(1) { assert!( !self.entries.contains_key(ancestor), "file {file_path:?} exists, but {ancestor:?} is not a directory" ); } } } pub fn paths(&self) -> impl Iterator<Item = &RepoPath> { self.entries.keys().map(AsRef::as_ref) } pub fn create_tree(&self, repo: &Arc<ReadonlyRepo>) -> MergedTree { create_tree_with(repo, |builder| { for (path, dir_entry) in &self.entries { match dir_entry.clone() { DirEntry::File { contents, executable, } => { builder.file(path, contents).executable(executable); } DirEntry::Symlink { target } => builder.symlink(path, &target), DirEntry::GitSubmodule { commit } => builder.submodule(path, commit), } } }) } } impl WorkingCopyReferenceStateMachine { fn arb_extant_directory(&self) -> impl Strategy<Value = RepoPathBuf> + use<> { let extant_directories = if self.entries.is_empty() { vec![RepoPathBuf::root()] } else { self.entries .keys() .flat_map(|file_path| file_path.ancestors().skip(1)) .map(|path| path.to_owned()) .unique() .collect_vec() }; proptest::sample::select(extant_directories) } fn arb_extant_path(&self) -> impl Strategy<Value = RepoPathBuf> + use<> { proptest::sample::select( self.entries .keys() .flat_map(|file_path| file_path.ancestors()) .filter(|path| !path.is_root()) .map(|path| path.to_owned()) .unique() .collect_vec(), ) } fn arb_transition_create(&self) -> impl Strategy<Value = Transition> + use<> { ( self.arb_extant_directory(), proptest::collection::vec(arb_path_component(), 1..3), any::<DirEntry>(), ) .prop_map(|(extant_dir_path, new_path_components, dir_entry)| { let mut path = extant_dir_path; path.extend( new_path_components .iter() .map(|c| RepoPathComponent::new(c).unwrap()), ); Transition::SetDirEntry { path, dir_entry: Some(dir_entry), } }) } fn arb_transition_modify(&self) -> impl Strategy<Value = Transition> + use<> { (self.arb_extant_path(), any::<Option<DirEntry>>()).prop_map(|(path, new_dir_entry)| { Transition::SetDirEntry { path, dir_entry: new_dir_entry, } }) } } impl ReferenceStateMachine for WorkingCopyReferenceStateMachine { type State = Self; type Transition = Transition; fn init_state() -> BoxedStrategy<Self::State> { Just(Self::State::default()).boxed() } fn transitions(state: &Self::State) -> BoxedStrategy<Self::Transition> { // NOTE: Using `prop_oneof` here instead of `proptest::sample::select` // since it seems to minimize better? if !state.entries.is_empty() { prop_oneof![ Just(Transition::Commit), state.arb_transition_create(), state.arb_transition_modify(), ] .boxed() } else { prop_oneof![Just(Transition::Commit), state.arb_transition_create()].boxed() } } fn apply(mut state: Self::State, transition: &Self::Transition) -> Self::State { match transition { Transition::Commit => { // Do nothing; this is handled by the system under test. } Transition::SetDirEntry { path, dir_entry } => { assert_ne!(path.as_ref(), RepoPath::root()); let entries = &mut state.entries; // Remove all entries which are contained within `path` (in case it is a // pre-existing directory). entries.retain(|extant_path, _| !extant_path.starts_with(path)); for new_dir in path.ancestors().skip(1) { entries.remove(new_dir); } match dir_entry { Some(dir_entry) => { entries.insert(path.to_owned(), dir_entry.to_owned()); } None => { assert!(!entries.contains_key(path)); } } } } state.check_invariants(); state } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/proc-macros/src/lib.rs
lib/proc-macros/src/lib.rs
mod content_hash; extern crate proc_macro; use quote::quote; use syn::DeriveInput; use syn::parse_macro_input; /// Derive macro generating an impl of the trait `ContentHash`. /// /// Derives the `ContentHash` trait for a struct by calling `ContentHash::hash` /// on each of the struct members in the order that they're declared. All /// members of the struct must implement the `ContentHash` trait. #[proc_macro_derive(ContentHash)] pub fn derive_content_hash(input: proc_macro::TokenStream) -> proc_macro::TokenStream { let input = parse_macro_input!(input as DeriveInput); // The name of the struct. let name = &input.ident; // Generate an expression to hash each of the fields in the struct. let hash_impl = content_hash::generate_hash_impl(&input.data); // Handle structs and enums with generics. let generics = content_hash::add_trait_bounds(input.generics); let (impl_generics, ty_generics, where_clause) = generics.split_for_impl(); let expanded = quote! { #[automatically_derived] impl #impl_generics ::jj_lib::content_hash::ContentHash for #name #ty_generics #where_clause { fn hash(&self, state: &mut impl digest::Update) { #hash_impl } } }; expanded.into() }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/lib/proc-macros/src/content_hash.rs
lib/proc-macros/src/content_hash.rs
use proc_macro2::Ident; use proc_macro2::TokenStream; use quote::format_ident; use quote::quote; use quote::quote_spanned; use syn::Data; use syn::Field; use syn::Fields; use syn::GenericParam; use syn::Generics; use syn::Index; use syn::Type; use syn::parse_quote; use syn::spanned::Spanned as _; pub fn add_trait_bounds(mut generics: Generics) -> Generics { for param in &mut generics.params { if let GenericParam::Type(ref mut type_param) = *param { type_param .bounds .push(parse_quote!(::jj_lib::content_hash::ContentHash)); } } generics } pub fn generate_hash_impl(data: &Data) -> TokenStream { match *data { Data::Struct(ref data) => match data.fields { Fields::Named(ref fields) => { let hash_statements = fields.named.iter().map(|f| { let field_name = &f.ident; let ty = &f.ty; quote_spanned! {ty.span()=> <#ty as ::jj_lib::content_hash::ContentHash>::hash( &self.#field_name, state); } }); quote! { #(#hash_statements)* } } Fields::Unnamed(ref fields) => { let hash_statements = fields.unnamed.iter().enumerate().map(|(i, f)| { let index = Index::from(i); let ty = &f.ty; quote_spanned! {ty.span() => <#ty as ::jj_lib::content_hash::ContentHash>::hash(&self.#index, state); } }); quote! { #(#hash_statements)* } } Fields::Unit => { quote! {} } }, // Generates a match statement with a match arm and hash implementation // for each of the variants in the enum. Data::Enum(ref data) => { let match_hash_statements = data.variants.iter().enumerate().map(|(i, v)| { let variant_id = &v.ident; match &v.fields { Fields::Named(fields) => { let bindings = enum_bindings(fields.named.iter()); let hash_statements = hash_statements_for_enum_fields(i, fields.named.iter()); quote_spanned! {v.span() => Self::#variant_id{ #(#bindings),* } => { #(#hash_statements)* } } } Fields::Unnamed(fields) => { let bindings = enum_bindings(fields.unnamed.iter()); let hash_statements = hash_statements_for_enum_fields(i, fields.unnamed.iter()); quote_spanned! {v.span() => Self::#variant_id( #(#bindings),* ) => { #(#hash_statements)* } } } Fields::Unit => { let ix = index_to_ordinal(i); quote_spanned! {v.span() => Self::#variant_id => { ::jj_lib::content_hash::ContentHash::hash(&#ix, state); } } } } }); quote! { match self { #(#match_hash_statements)* } } } Data::Union(_) => unimplemented!("ContentHash cannot be derived for unions."), } } // The documentation for `ContentHash` specifies that the hash impl for each // enum variant should hash the ordinal number of the enum variant as a little // endian u32 before hashing the variant's fields, if any. fn index_to_ordinal(ix: usize) -> u32 { u32::try_from(ix).expect("The number of enum variants overflows a u32.") } fn enum_bindings_with_type<'a>(fields: impl IntoIterator<Item = &'a Field>) -> Vec<(Type, Ident)> { fields .into_iter() .enumerate() .map(|(i, f)| { // If the field is named, use the name, otherwise generate a placeholder name. ( f.ty.clone(), f.ident.clone().unwrap_or(format_ident!("field_{}", i)), ) }) .collect::<Vec<_>>() } fn enum_bindings<'a>(fields: impl IntoIterator<Item = &'a Field>) -> Vec<Ident> { enum_bindings_with_type(fields) .into_iter() .map(|(_, b)| b) .collect() } fn hash_statements_for_enum_fields<'a>( index: usize, fields: impl IntoIterator<Item = &'a Field>, ) -> Vec<TokenStream> { let ix = index_to_ordinal(index); let typed_bindings = enum_bindings_with_type(fields); let mut hash_statements = Vec::with_capacity(typed_bindings.len() + 1); hash_statements.push(quote! {::jj_lib::content_hash::ContentHash::hash(&#ix, state);}); for (ty, b) in &typed_bindings { hash_statements.push(quote_spanned! {b.span() => <#ty as ::jj_lib::content_hash::ContentHash>::hash(#b, state); }); } hash_statements }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/build.rs
cli/build.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::path::Path; use std::process::Command; const GIT_HEAD_PATH: &str = "../.git/HEAD"; const JJ_OP_HEADS_PATH: &str = "../.jj/repo/op_heads/heads"; fn main() { let version = std::env::var("CARGO_PKG_VERSION").unwrap(); println!("cargo:rerun-if-env-changed=NIX_JJ_GIT_HASH"); let git_hash = get_git_hash_from_nix().or_else(|| { if Path::new(GIT_HEAD_PATH).exists() { // In colocated workspace, .git/HEAD should reflect the working-copy parent. println!("cargo:rerun-if-changed={GIT_HEAD_PATH}"); } else if Path::new(JJ_OP_HEADS_PATH).exists() { // op_heads changes when working-copy files are mutated, which is way more // frequent than .git/HEAD. println!("cargo:rerun-if-changed={JJ_OP_HEADS_PATH}"); } get_git_hash_from_jj().or_else(get_git_hash_from_git) }); if let Some(git_hash) = git_hash { println!("cargo:rustc-env=JJ_VERSION={version}-{git_hash}"); } else { println!("cargo:rustc-env=JJ_VERSION={version}"); } let docs_symlink_path = Path::new("docs"); println!("cargo:rerun-if-changed={}", docs_symlink_path.display()); if docs_symlink_path.join("index.md").exists() { println!("cargo:rustc-env=JJ_DOCS_DIR=docs/"); } else { println!("cargo:rustc-env=JJ_DOCS_DIR=../docs/"); } } fn get_git_hash_from_nix() -> Option<String> { std::env::var("NIX_JJ_GIT_HASH") .ok() .filter(|s| !s.is_empty()) } fn get_git_hash_from_jj() -> Option<String> { Command::new("jj") .args([ "--ignore-working-copy", "--color=never", "log", "--no-graph", "-r=@-", "-T=commit_id ++ '-'", ]) .output() .ok() .filter(|output| output.status.success()) .map(|output| { let mut parent_commits = String::from_utf8(output.stdout).unwrap(); // If a development version of `jj` is compiled at a merge commit, this will // result in several commit ids separated by `-`s. parent_commits.truncate(parent_commits.trim_end_matches('-').len()); parent_commits }) } fn get_git_hash_from_git() -> Option<String> { Command::new("git") .args(["rev-parse", "HEAD"]) .output() .ok() .filter(|output| output.status.success()) .map(|output| { str::from_utf8(&output.stdout) .unwrap() .trim_end() .to_owned() }) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/complete.rs
cli/src/complete.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashSet; use std::io::BufRead as _; use std::path::Path; use clap::FromArgMatches as _; use clap::builder::StyledStr; use clap_complete::CompletionCandidate; use indoc::indoc; use itertools::Itertools as _; use jj_lib::config::ConfigNamePathBuf; use jj_lib::file_util::normalize_path; use jj_lib::file_util::slash_path; use jj_lib::settings::UserSettings; use jj_lib::workspace::DefaultWorkspaceLoaderFactory; use jj_lib::workspace::WorkspaceLoaderFactory as _; use crate::cli_util::GlobalArgs; use crate::cli_util::expand_args; use crate::cli_util::find_workspace_dir; use crate::cli_util::load_template_aliases; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::config::CONFIG_SCHEMA; use crate::config::ConfigArgKind; use crate::config::ConfigEnv; use crate::config::config_from_environment; use crate::config::default_config_layers; use crate::merge_tools::ExternalMergeTool; use crate::merge_tools::configured_merge_tools; use crate::merge_tools::get_external_tool_config; use crate::revset_util::load_revset_aliases; use crate::ui::Ui; const BOOKMARK_HELP_TEMPLATE: &str = r#"template-aliases.'bookmark_help()'=''' " " ++ if(normal_target, if(normal_target.description(), normal_target.description().first_line(), "(no description set)", ), "(conflicted bookmark)", ) '''"#; const TAG_HELP_TEMPLATE: &str = r#"template-aliases.'tag_help()'=''' " " ++ if(normal_target, if(normal_target.description(), normal_target.description().first_line(), "(no description set)", ), "(conflicted tag)", ) '''"#; /// A helper function for various completer functions. It returns /// (candidate, help) assuming they are separated by a space. fn split_help_text(line: &str) -> (&str, Option<StyledStr>) { match line.split_once(' ') { Some((name, help)) => (name, Some(help.to_string().into())), None => (line, None), } } pub fn local_bookmarks() -> Vec<CompletionCandidate> { with_jj(|jj, _| { let output = jj .build() .arg("bookmark") .arg("list") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg(r#"if(!remote, name ++ bookmark_help()) ++ "\n""#) .output() .map_err(user_error)?; Ok(String::from_utf8_lossy(&output.stdout) .lines() .map(split_help_text) .map(|(name, help)| CompletionCandidate::new(name).help(help)) .collect()) }) } pub fn tracked_bookmarks() -> Vec<CompletionCandidate> { with_jj(|jj, _| { let output = jj .build() .arg("bookmark") .arg("list") .arg("--tracked") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg(r#"if(remote, name ++ '@' ++ remote ++ bookmark_help() ++ "\n")"#) .output() .map_err(user_error)?; Ok(String::from_utf8_lossy(&output.stdout) .lines() .map(split_help_text) .filter_map(|(symbol, help)| Some((symbol.split_once('@')?, help))) // There may be multiple remote bookmarks to untrack. Just pick the // first one for help text. .dedup_by(|((name1, _), _), ((name2, _), _)| name1 == name2) .map(|((name, _remote), help)| CompletionCandidate::new(name).help(help)) .collect()) }) } pub fn untracked_bookmarks() -> Vec<CompletionCandidate> { with_jj(|jj, _settings| { let remotes = jj .build() .arg("git") .arg("remote") .arg("list") .output() .map_err(user_error)?; let remotes = String::from_utf8_lossy(&remotes.stdout); let remotes = remotes .lines() .filter_map(|l| l.split_whitespace().next()) .collect_vec(); let bookmark_table = jj .build() .arg("bookmark") .arg("list") .arg("--all-remotes") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg( r#" if(remote != "git", if(!remote, name) ++ "\t" ++ if(remote, name ++ "@" ++ remote) ++ "\t" ++ if(tracked, "tracked") ++ "\t" ++ bookmark_help() ++ "\n" )"#, ) .output() .map_err(user_error)?; let bookmark_table = String::from_utf8_lossy(&bookmark_table.stdout); let mut possible_bookmarks_to_track = Vec::new(); let mut already_tracked_bookmarks = HashSet::new(); for line in bookmark_table.lines() { let [local, remote, tracked, help] = line.split('\t').collect_array().unwrap_or_default(); if !local.is_empty() { possible_bookmarks_to_track.extend( remotes .iter() .map(|remote| (format!("{local}@{remote}"), help)), ); } else if tracked.is_empty() { possible_bookmarks_to_track.push((remote.to_owned(), help)); } else { already_tracked_bookmarks.insert(remote); } } possible_bookmarks_to_track .retain(|(bookmark, _help)| !already_tracked_bookmarks.contains(&bookmark.as_str())); Ok(possible_bookmarks_to_track .iter() .filter_map(|(symbol, help)| Some((symbol.split_once('@')?, help))) // There may be multiple remote bookmarks to track. Just pick the // first one for help text. .dedup_by(|((name1, _), _), ((name2, _), _)| name1 == name2) .map(|((name, _remote), help)| { CompletionCandidate::new(name).help(Some(help.to_string().into())) }) .collect()) }) } pub fn bookmarks() -> Vec<CompletionCandidate> { with_jj(|jj, _settings| { let output = jj .build() .arg("bookmark") .arg("list") .arg("--all-remotes") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg( // only provide help for local refs, remote could be ambiguous r#"name ++ if(remote, "@" ++ remote, bookmark_help()) ++ "\n""#, ) .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); Ok((&stdout .lines() .map(split_help_text) .chunk_by(|(name, _)| name.split_once('@').map(|t| t.0).unwrap_or(name))) .into_iter() .map(|(bookmark, mut refs)| { let help = refs.find_map(|(_, help)| help); let local = help.is_some(); let display_order = match local { true => 0, false => 1, }; CompletionCandidate::new(bookmark) .help(help) .display_order(Some(display_order)) }) .collect()) }) } pub fn local_tags() -> Vec<CompletionCandidate> { with_jj(|jj, _| { let output = jj .build() .arg("tag") .arg("list") .arg("--config") .arg(TAG_HELP_TEMPLATE) .arg("--template") .arg(r#"if(!remote, name ++ tag_help()) ++ "\n""#) .output() .map_err(user_error)?; Ok(String::from_utf8_lossy(&output.stdout) .lines() .map(split_help_text) .map(|(name, help)| CompletionCandidate::new(name).help(help)) .collect()) }) } pub fn git_remotes() -> Vec<CompletionCandidate> { with_jj(|jj, _| { let output = jj .build() .arg("git") .arg("remote") .arg("list") .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); Ok(stdout .lines() .filter_map(|line| line.split_once(' ').map(|(name, _url)| name)) .map(CompletionCandidate::new) .collect()) }) } pub fn template_aliases() -> Vec<CompletionCandidate> { with_jj(|_, settings| { let Ok(template_aliases) = load_template_aliases(&Ui::null(), settings.config()) else { return Ok(Vec::new()); }; Ok(template_aliases .symbol_names() .map(CompletionCandidate::new) .sorted() .collect()) }) } pub fn aliases() -> Vec<CompletionCandidate> { with_jj(|_, settings| { Ok(settings .table_keys("aliases") // This is opinionated, but many people probably have several // single- or two-letter aliases they use all the time. These // aliases don't need to be completed and they would only clutter // the output of `jj <TAB>`. .filter(|alias| alias.len() > 2) .map(CompletionCandidate::new) .collect()) }) } fn revisions(match_prefix: &str, revset_filter: Option<&str>) -> Vec<CompletionCandidate> { with_jj(|jj, settings| { // display order const LOCAL_BOOKMARK: usize = 0; const TAG: usize = 1; const CHANGE_ID: usize = 2; const REMOTE_BOOKMARK: usize = 3; const REVSET_ALIAS: usize = 4; let mut candidates = Vec::new(); // bookmarks let mut cmd = jj.build(); cmd.arg("bookmark") .arg("list") .arg("--all-remotes") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg( r#"if(remote != "git", name ++ if(remote, "@" ++ remote) ++ bookmark_help() ++ "\n")"#, ); if let Some(revs) = revset_filter { cmd.arg("--revisions").arg(revs); } let output = cmd.output().map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); candidates.extend( stdout .lines() .map(split_help_text) .filter(|(bookmark, _)| bookmark.starts_with(match_prefix)) .map(|(bookmark, help)| { let local = !bookmark.contains('@'); let display_order = match local { true => LOCAL_BOOKMARK, false => REMOTE_BOOKMARK, }; CompletionCandidate::new(bookmark) .help(help) .display_order(Some(display_order)) }), ); // tags // Tags cannot be filtered by revisions. In order to avoid suggesting // immutable tags for mutable revision args, we skip tags entirely if // revset_filter is set. This is not a big loss, since tags usually point // to immutable revisions anyway. if revset_filter.is_none() { let output = jj .build() .arg("tag") .arg("list") .arg("--config") .arg(BOOKMARK_HELP_TEMPLATE) .arg("--template") .arg(r#"name ++ bookmark_help() ++ "\n""#) .arg(format!("glob:{}*", globset::escape(match_prefix))) .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); candidates.extend(stdout.lines().map(|line| { let (name, desc) = split_help_text(line); CompletionCandidate::new(name) .help(desc) .display_order(Some(TAG)) })); } // change IDs let revisions = revset_filter .map(String::from) .or_else(|| settings.get_string("revsets.short-prefixes").ok()) .or_else(|| settings.get_string("revsets.log").ok()) .unwrap_or_default(); let output = jj .build() .arg("log") .arg("--no-graph") .arg("--limit") .arg("100") .arg("--revisions") .arg(revisions) .arg("--template") .arg( r#" join(" ", separate("/", change_id.shortest(), if(hidden || divergent, change_offset), ), if(description, description.first_line(), "(no description set)"), ) ++ "\n""#, ) .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); candidates.extend( stdout .lines() .map(split_help_text) .filter(|(id, _)| id.starts_with(match_prefix)) .map(|(id, desc)| { CompletionCandidate::new(id) .help(desc) .display_order(Some(CHANGE_ID)) }), ); // revset aliases let revset_aliases = load_revset_aliases(&Ui::null(), settings.config())?; let mut symbol_names: Vec<_> = revset_aliases.symbol_names().collect(); symbol_names.sort(); candidates.extend( symbol_names .into_iter() .filter(|symbol| symbol.starts_with(match_prefix)) .map(|symbol| { let (_, defn) = revset_aliases.get_symbol(symbol).unwrap(); CompletionCandidate::new(symbol) .help(Some(defn.into())) .display_order(Some(REVSET_ALIAS)) }), ); Ok(candidates) }) } fn revset_expression( current: &std::ffi::OsStr, revset_filter: Option<&str>, ) -> Vec<CompletionCandidate> { let Some(current) = current.to_str() else { return Vec::new(); }; let (prepend, match_prefix) = split_revset_trailing_name(current).unwrap_or(("", current)); let candidates = revisions(match_prefix, revset_filter); if prepend.is_empty() { candidates } else { candidates .into_iter() .map(|candidate| candidate.add_prefix(prepend)) .collect() } } pub fn revset_expression_all(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { revset_expression(current, None) } pub fn revset_expression_mutable(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { revset_expression(current, Some("mutable()")) } pub fn revset_expression_mutable_conflicts(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { revset_expression(current, Some("mutable() & conflicts()")) } /// Identifies if an incomplete expression ends with a name, or may be continued /// with a name. /// /// If the expression ends with an name or a partial name, returns a tuple that /// splits the string at the point the name starts. /// If the expression is empty or ends with a prefix or infix operator that /// could plausibly be followed by a name, returns a tuple where the first /// item is the entire input string, and the second item is empty. /// Otherwise, returns `None`. /// /// The input expression may be incomplete (e.g. missing closing parentheses), /// and the ability to reject invalid expressions is limited. fn split_revset_trailing_name(incomplete_revset_str: &str) -> Option<(&str, &str)> { let final_part = incomplete_revset_str .rsplit_once([':', '~', '|', '&', '(', ',']) .map(|(_, rest)| rest) .unwrap_or(incomplete_revset_str); let final_part = final_part .rsplit_once("..") .map(|(_, rest)| rest) .unwrap_or(final_part) .trim_ascii_start(); let re = regex::Regex::new(r"^(?:[\p{XID_CONTINUE}_/]+[@.+-])*[\p{XID_CONTINUE}_/]*$").unwrap(); re.is_match(final_part) .then(|| incomplete_revset_str.split_at(incomplete_revset_str.len() - final_part.len())) } pub fn operations() -> Vec<CompletionCandidate> { with_jj(|jj, _| { let output = jj .build() .arg("operation") .arg("log") .arg("--no-graph") .arg("--limit") .arg("100") .arg("--template") .arg( r#" separate(" ", id.short(), "(" ++ format_timestamp(time.end()) ++ ")", description.first_line(), ) ++ "\n""#, ) .output() .map_err(user_error)?; Ok(String::from_utf8_lossy(&output.stdout) .lines() .map(|line| { let (id, help) = split_help_text(line); CompletionCandidate::new(id).help(help) }) .collect()) }) } pub fn workspaces() -> Vec<CompletionCandidate> { let template = indoc! {r#" name ++ "\t" ++ if( target.description(), target.description().first_line(), "(no description set)" ) ++ "\n" "#}; with_jj(|jj, _| { let output = jj .build() .arg("workspace") .arg("list") .arg("--template") .arg(template) .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); Ok(stdout .lines() .filter_map(|line| { let res = line.split_once("\t").map(|(name, desc)| { CompletionCandidate::new(name).help(Some(desc.to_string().into())) }); if res.is_none() { eprintln!("Error parsing line {line}"); } res }) .collect()) }) } fn merge_tools_filtered_by( settings: &UserSettings, condition: impl Fn(ExternalMergeTool) -> bool, ) -> impl Iterator<Item = &str> { configured_merge_tools(settings).filter(move |name| { let Ok(Some(tool)) = get_external_tool_config(settings, name) else { return false; }; condition(tool) }) } pub fn merge_editors() -> Vec<CompletionCandidate> { with_jj(|_, settings| { Ok([":builtin", ":ours", ":theirs"] .into_iter() .chain(merge_tools_filtered_by(settings, |tool| { !tool.merge_args.is_empty() })) .map(CompletionCandidate::new) .collect()) }) } /// Approximate list of known diff editors pub fn diff_editors() -> Vec<CompletionCandidate> { with_jj(|_, settings| { Ok(std::iter::once(":builtin") .chain(merge_tools_filtered_by( settings, // The args are empty only if `edit-args` are explicitly set to // `[]` in TOML. If they are not specified, the default // `["$left", "$right"]` value would be used. |tool| !tool.edit_args.is_empty(), )) .map(CompletionCandidate::new) .collect()) }) } /// Approximate list of known diff tools pub fn diff_formatters() -> Vec<CompletionCandidate> { let builtin_format_kinds = crate::diff_util::all_builtin_diff_format_names(); with_jj(|_, settings| { Ok(builtin_format_kinds .iter() .map(|s| s.as_str()) .chain(merge_tools_filtered_by( settings, // The args are empty only if `diff-args` are explicitly set to // `[]` in TOML. If they are not specified, the default // `["$left", "$right"]` value would be used. |tool| !tool.diff_args.is_empty(), )) .map(CompletionCandidate::new) .collect()) }) } fn config_keys_rec( prefix: ConfigNamePathBuf, properties: &serde_json::Map<String, serde_json::Value>, acc: &mut Vec<CompletionCandidate>, only_leaves: bool, suffix: &str, ) { for (key, value) in properties { let mut prefix = prefix.clone(); prefix.push(key); let value = value.as_object().unwrap(); match value.get("type").and_then(|v| v.as_str()) { Some("object") => { if !only_leaves { let help = value .get("description") .map(|desc| desc.as_str().unwrap().to_string().into()); let escaped_key = prefix.to_string(); acc.push(CompletionCandidate::new(escaped_key).help(help)); } let Some(properties) = value.get("properties") else { continue; }; let properties = properties.as_object().unwrap(); config_keys_rec(prefix, properties, acc, only_leaves, suffix); } _ => { let help = value .get("description") .map(|desc| desc.as_str().unwrap().to_string().into()); let escaped_key = format!("{prefix}{suffix}"); acc.push(CompletionCandidate::new(escaped_key).help(help)); } } } } fn json_keypath<'a>( schema: &'a serde_json::Value, keypath: &str, separator: &str, ) -> Option<&'a serde_json::Value> { keypath .split(separator) .try_fold(schema, |value, step| value.get(step)) } fn jsonschema_keypath<'a>( schema: &'a serde_json::Value, keypath: &ConfigNamePathBuf, ) -> Option<&'a serde_json::Value> { keypath.components().try_fold(schema, |value, step| { let value = value.as_object()?; if value.get("type")?.as_str()? != "object" { return None; } let properties = value.get("properties")?.as_object()?; properties.get(step.get()) }) } fn config_values(path: &ConfigNamePathBuf) -> Option<Vec<String>> { let schema: serde_json::Value = serde_json::from_str(CONFIG_SCHEMA).unwrap(); let mut config_entry = jsonschema_keypath(&schema, path)?; if let Some(reference) = config_entry.get("$ref") { let reference = reference.as_str()?.strip_prefix("#/")?; config_entry = json_keypath(&schema, reference, "/")?; }; if let Some(possible_values) = config_entry.get("enum") { return Some( possible_values .as_array()? .iter() .filter_map(|val| val.as_str()) .map(ToOwned::to_owned) .collect(), ); } Some(match config_entry.get("type")?.as_str()? { "boolean" => vec!["false".into(), "true".into()], _ => vec![], }) } fn config_keys_impl(only_leaves: bool, suffix: &str) -> Vec<CompletionCandidate> { let schema: serde_json::Value = serde_json::from_str(CONFIG_SCHEMA).unwrap(); let schema = schema.as_object().unwrap(); let properties = schema["properties"].as_object().unwrap(); let mut candidates = Vec::new(); config_keys_rec( ConfigNamePathBuf::root(), properties, &mut candidates, only_leaves, suffix, ); candidates } pub fn config_keys() -> Vec<CompletionCandidate> { config_keys_impl(false, "") } pub fn leaf_config_keys() -> Vec<CompletionCandidate> { config_keys_impl(true, "") } pub fn leaf_config_key_value(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { let Some(current) = current.to_str() else { return Vec::new(); }; if let Some((key, current_val)) = current.split_once('=') { let Ok(key) = key.parse() else { return Vec::new(); }; let possible_values = config_values(&key).unwrap_or_default(); possible_values .into_iter() .filter(|x| x.starts_with(current_val)) .map(|x| CompletionCandidate::new(format!("{key}={x}"))) .collect() } else { config_keys_impl(true, "=") .into_iter() .filter(|candidate| candidate.get_value().to_str().unwrap().starts_with(current)) .collect() } } pub fn branch_name_equals_any_revision(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { let Some(current) = current.to_str() else { return Vec::new(); }; let Some((branch_name, revision)) = current.split_once('=') else { // Don't complete branch names since we want to create a new branch return Vec::new(); }; revset_expression(revision.as_ref(), None) .into_iter() .map(|rev| rev.add_prefix(format!("{branch_name}="))) .collect() } fn path_completion_candidate_from( current_prefix: &str, normalized_prefix_path: &Path, path: &Path, mode: Option<clap::builder::StyledStr>, ) -> Option<CompletionCandidate> { let normalized_prefix = match normalized_prefix_path.to_str()? { "." => "", // `.` cannot be normalized further, but doesn't prefix `path`. normalized_prefix => normalized_prefix, }; let path = slash_path(path); let mut remainder = path.to_str()?.strip_prefix(normalized_prefix)?; // Trailing slash might have been normalized away in which case we need to strip // the leading slash in the remainder away, or else the slash would appear // twice. if current_prefix.ends_with(std::path::is_separator) { remainder = remainder.strip_prefix('/').unwrap_or(remainder); } match remainder.split_inclusive('/').at_most_one() { // Completed component is the final component in `path`, so we're completing the file to // which `mode` refers. Ok(file_completion) => Some( CompletionCandidate::new(format!( "{current_prefix}{}", file_completion.unwrap_or_default() )) .help(mode), ), // Omit `mode` when completing only up to the next directory. Err(mut components) => Some(CompletionCandidate::new(format!( "{current_prefix}{}", components.next().unwrap() ))), } } fn current_prefix_to_fileset(current: &str) -> String { let cur_esc = globset::escape(current); let dir_pat = format!("{cur_esc}*/**"); let path_pat = format!("{cur_esc}*"); format!("glob:{dir_pat:?} | glob:{path_pat:?}") } fn all_files_from_rev(rev: String, current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { let Some(current) = current.to_str() else { return Vec::new(); }; let normalized_prefix = normalize_path(Path::new(current)); let normalized_prefix = slash_path(&normalized_prefix); with_jj(|jj, _| { let mut child = jj .build() .arg("file") .arg("list") .arg("--revision") .arg(rev) .arg("--template") .arg(r#"path.display() ++ "\n""#) .arg(current_prefix_to_fileset(current)) .stdout(std::process::Stdio::piped()) .stderr(std::process::Stdio::null()) .spawn() .map_err(user_error)?; let stdout = child.stdout.take().unwrap(); Ok(std::io::BufReader::new(stdout) .lines() .take(1_000) .map_while(Result::ok) .filter_map(|path| { path_completion_candidate_from(current, &normalized_prefix, Path::new(&path), None) }) .dedup() // directories may occur multiple times .collect()) }) } fn modified_files_from_rev_with_jj_cmd( rev: (String, Option<String>), mut cmd: std::process::Command, current: &std::ffi::OsStr, ) -> Result<Vec<CompletionCandidate>, CommandError> { let Some(current) = current.to_str() else { return Ok(Vec::new()); }; let normalized_prefix = normalize_path(Path::new(current)); let normalized_prefix = slash_path(&normalized_prefix); // In case of a rename, one entry of `diff` results in two suggestions. let template = indoc! {r#" concat( status ++ ' ' ++ path.display() ++ "\n", if(status == 'renamed', 'renamed.source ' ++ source.path().display() ++ "\n"), ) "#}; cmd.arg("diff") .args(["--template", template]) .arg(current_prefix_to_fileset(current)); match rev { (rev, None) => cmd.arg("--revisions").arg(rev), (from, Some(to)) => cmd.arg("--from").arg(from).arg("--to").arg(to), }; let output = cmd.output().map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); let mut include_renames = false; let mut candidates: Vec<_> = stdout .lines() .filter_map(|line| line.split_once(' ')) .filter_map(|(mode, path)| { let mode = match mode { "modified" => "Modified".into(), "removed" => "Deleted".into(), "added" => "Added".into(), "renamed" => "Renamed".into(), "renamed.source" => { include_renames = true; "Renamed".into() } "copied" => "Copied".into(), _ => format!("unknown mode: '{mode}'").into(), }; path_completion_candidate_from(current, &normalized_prefix, Path::new(path), Some(mode)) }) .collect(); if include_renames { candidates.sort_unstable_by(|a, b| Path::new(a.get_value()).cmp(Path::new(b.get_value()))); } candidates.dedup(); Ok(candidates) } fn modified_files_from_rev( rev: (String, Option<String>), current: &std::ffi::OsStr, ) -> Vec<CompletionCandidate> { with_jj(|jj, _| modified_files_from_rev_with_jj_cmd(rev, jj.build(), current)) } fn conflicted_files_from_rev(rev: &str, current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { let Some(current) = current.to_str() else { return Vec::new(); }; let normalized_prefix = normalize_path(Path::new(current)); let normalized_prefix = slash_path(&normalized_prefix); with_jj(|jj, _| { let output = jj .build() .arg("resolve") .arg("--list") .arg("--revision") .arg(rev) .arg(current_prefix_to_fileset(current)) .output() .map_err(user_error)?; let stdout = String::from_utf8_lossy(&output.stdout); Ok(stdout .lines() .filter_map(|line| { let path = line .split_whitespace() .next() .expect("resolve --list should contain whitespace after path"); path_completion_candidate_from(current, &normalized_prefix, Path::new(path), None) }) .dedup() // directories may occur multiple times .collect()) }) } pub fn modified_files(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { modified_files_from_rev(("@".into(), None), current) } pub fn all_revision_files(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { all_files_from_rev(parse::revision_or_wc(), current) } pub fn modified_revision_files(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { modified_files_from_rev((parse::revision_or_wc(), None), current) } pub fn modified_range_files(current: &std::ffi::OsStr) -> Vec<CompletionCandidate> { match parse::range() { Some((from, to)) => modified_files_from_rev((from, Some(to)), current), None => modified_files_from_rev(("@".into(), None), current), } } /// Completes files in `@` *or* the `--from` revision (not the diff between
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/config.rs
cli/src/config.rs
// Copyright 2022 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::borrow::Cow; use std::collections::BTreeSet; use std::collections::HashMap; use std::env; use std::env::split_paths; use std::fmt; use std::path::Path; use std::path::PathBuf; use std::process::Command; use std::sync::LazyLock; use etcetera::BaseStrategy as _; use itertools::Itertools as _; use jj_lib::config::ConfigFile; use jj_lib::config::ConfigGetError; use jj_lib::config::ConfigLayer; use jj_lib::config::ConfigLoadError; use jj_lib::config::ConfigMigrationRule; use jj_lib::config::ConfigNamePathBuf; use jj_lib::config::ConfigResolutionContext; use jj_lib::config::ConfigSource; use jj_lib::config::ConfigValue; use jj_lib::config::StackedConfig; use regex::Captures; use regex::Regex; use serde::Serialize as _; use tracing::instrument; use crate::command_error::CommandError; use crate::command_error::config_error; use crate::command_error::config_error_with_message; // TODO(#879): Consider generating entire schema dynamically vs. static file. pub const CONFIG_SCHEMA: &str = include_str!("config-schema.json"); /// Parses a TOML value expression. Interprets the given value as string if it /// can't be parsed and doesn't look like a TOML expression. pub fn parse_value_or_bare_string(value_str: &str) -> Result<ConfigValue, toml_edit::TomlError> { match value_str.parse() { Ok(value) => Ok(value), Err(_) if is_bare_string(value_str) => Ok(value_str.into()), Err(err) => Err(err), } } fn is_bare_string(value_str: &str) -> bool { // leading whitespace isn't ignored when parsing TOML value expression, but // "\n[]" doesn't look like a bare string. let trimmed = value_str.trim_ascii().as_bytes(); if let (Some(&first), Some(&last)) = (trimmed.first(), trimmed.last()) { // string, array, or table constructs? !matches!(first, b'"' | b'\'' | b'[' | b'{') && !matches!(last, b'"' | b'\'' | b']' | b'}') } else { true // empty or whitespace only } } /// Converts [`ConfigValue`] (or [`toml_edit::Value`]) to [`toml::Value`] which /// implements [`serde::Serialize`]. pub fn to_serializable_value(value: ConfigValue) -> toml::Value { match value { ConfigValue::String(v) => toml::Value::String(v.into_value()), ConfigValue::Integer(v) => toml::Value::Integer(v.into_value()), ConfigValue::Float(v) => toml::Value::Float(v.into_value()), ConfigValue::Boolean(v) => toml::Value::Boolean(v.into_value()), ConfigValue::Datetime(v) => toml::Value::Datetime(v.into_value()), ConfigValue::Array(array) => { let array = array.into_iter().map(to_serializable_value).collect(); toml::Value::Array(array) } ConfigValue::InlineTable(table) => { let table = table .into_iter() .map(|(k, v)| (k, to_serializable_value(v))) .collect(); toml::Value::Table(table) } } } /// Configuration variable with its source information. #[derive(Clone, Debug, serde::Serialize)] pub struct AnnotatedValue { /// Dotted name path to the configuration variable. #[serde(serialize_with = "serialize_name")] pub name: ConfigNamePathBuf, /// Configuration value. #[serde(serialize_with = "serialize_value")] pub value: ConfigValue, /// Source of the configuration value. #[serde(serialize_with = "serialize_source")] pub source: ConfigSource, /// Path to the source file, if available. pub path: Option<PathBuf>, /// True if this value is overridden in higher precedence layers. pub is_overridden: bool, } fn serialize_name<S>(name: &ConfigNamePathBuf, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { name.to_string().serialize(serializer) } fn serialize_value<S>(value: &ConfigValue, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { to_serializable_value(value.clone()).serialize(serializer) } fn serialize_source<S>(source: &ConfigSource, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { source.to_string().serialize(serializer) } /// Collects values under the given `filter_prefix` name recursively, from all /// layers. pub fn resolved_config_values( stacked_config: &StackedConfig, filter_prefix: &ConfigNamePathBuf, ) -> Vec<AnnotatedValue> { // Collect annotated values in reverse order and mark each value shadowed by // value or table in upper layers. let mut config_vals = vec![]; let mut upper_value_names = BTreeSet::new(); for layer in stacked_config.layers().iter().rev() { let top_item = match layer.look_up_item(filter_prefix) { Ok(Some(item)) => item, Ok(None) => continue, // parent is a table, but no value found Err(_) => { // parent is not a table, shadows lower layers upper_value_names.insert(filter_prefix.clone()); continue; } }; let mut config_stack = vec![(filter_prefix.clone(), top_item, false)]; while let Some((name, item, is_parent_overridden)) = config_stack.pop() { // Cannot retain inline table formatting because inner values may be // overridden independently. if let Some(table) = item.as_table_like() { // current table and children may be shadowed by value in upper layer let is_overridden = is_parent_overridden || upper_value_names.contains(&name); for (k, v) in table.iter() { let mut sub_name = name.clone(); sub_name.push(k); config_stack.push((sub_name, v, is_overridden)); // in reverse order } } else { // current value may be shadowed by value or table in upper layer let maybe_child = upper_value_names .range(&name..) .next() .filter(|next| next.starts_with(&name)); let is_overridden = is_parent_overridden || maybe_child.is_some(); if maybe_child != Some(&name) { upper_value_names.insert(name.clone()); } let value = item .clone() .into_value() .expect("Item::None should not exist in table"); config_vals.push(AnnotatedValue { name, value, source: layer.source, path: layer.path.clone(), is_overridden, }); } } } config_vals.reverse(); config_vals } /// Newtype for unprocessed (or unresolved) [`StackedConfig`]. /// /// This doesn't provide any strict guarantee about the underlying config /// object. It just requires an explicit cast to access to the config object. #[derive(Clone, Debug)] pub struct RawConfig(StackedConfig); impl AsRef<StackedConfig> for RawConfig { fn as_ref(&self) -> &StackedConfig { &self.0 } } impl AsMut<StackedConfig> for RawConfig { fn as_mut(&mut self) -> &mut StackedConfig { &mut self.0 } } #[derive(Clone, Debug)] enum ConfigPathState { New, Exists, } /// A ConfigPath can be in one of two states: /// /// - exists(): a config file exists at the path /// - !exists(): a config file doesn't exist here, but a new file _can_ be /// created at this path #[derive(Clone, Debug)] struct ConfigPath { path: PathBuf, state: ConfigPathState, } impl ConfigPath { fn new(path: PathBuf) -> Self { use ConfigPathState::*; Self { state: if path.exists() { Exists } else { New }, path, } } fn as_path(&self) -> &Path { &self.path } fn exists(&self) -> bool { match self.state { ConfigPathState::Exists => true, ConfigPathState::New => false, } } } /// Like std::fs::create_dir_all but creates new directories to be accessible to /// the user only on Unix (chmod 700). fn create_dir_all(path: &Path) -> std::io::Result<()> { let mut dir = std::fs::DirBuilder::new(); dir.recursive(true); #[cfg(unix)] { use std::os::unix::fs::DirBuilderExt as _; dir.mode(0o700); } dir.create(path) } // The struct exists so that we can mock certain global values in unit tests. #[derive(Clone, Default, Debug)] struct UnresolvedConfigEnv { config_dir: Option<PathBuf>, home_dir: Option<PathBuf>, jj_config: Option<String>, } impl UnresolvedConfigEnv { fn resolve(self) -> Vec<ConfigPath> { if let Some(paths) = self.jj_config { return split_paths(&paths) .filter(|path| !path.as_os_str().is_empty()) .map(ConfigPath::new) .collect(); } let mut paths = vec![]; let home_config_path = self.home_dir.map(|mut home_dir| { home_dir.push(".jjconfig.toml"); ConfigPath::new(home_dir) }); let platform_config_path = self.config_dir.clone().map(|mut config_dir| { config_dir.push("jj"); config_dir.push("config.toml"); ConfigPath::new(config_dir) }); let platform_config_dir = self.config_dir.map(|mut config_dir| { config_dir.push("jj"); config_dir.push("conf.d"); ConfigPath::new(config_dir) }); if let Some(path) = home_config_path && (path.exists() || platform_config_path.is_none()) { paths.push(path); } // This should be the default config created if there's // no user config and `jj config edit` is executed. if let Some(path) = platform_config_path { paths.push(path); } if let Some(path) = platform_config_dir && path.exists() { paths.push(path); } paths } } #[derive(Clone, Debug)] pub struct ConfigEnv { home_dir: Option<PathBuf>, repo_path: Option<PathBuf>, workspace_path: Option<PathBuf>, user_config_paths: Vec<ConfigPath>, repo_config_path: Option<ConfigPath>, workspace_config_path: Option<ConfigPath>, command: Option<String>, hostname: Option<String>, } impl ConfigEnv { /// Initializes configuration loader based on environment variables. pub fn from_environment() -> Self { let config_dir = etcetera::choose_base_strategy() .ok() .map(|s| s.config_dir()); // Canonicalize home as we do canonicalize cwd in CliRunner. $HOME might // point to symlink. let home_dir = etcetera::home_dir() .ok() .map(|d| dunce::canonicalize(&d).unwrap_or(d)); let env = UnresolvedConfigEnv { config_dir, home_dir: home_dir.clone(), jj_config: env::var("JJ_CONFIG").ok(), }; Self { home_dir, repo_path: None, workspace_path: None, user_config_paths: env.resolve(), repo_config_path: None, workspace_config_path: None, command: None, hostname: whoami::fallible::hostname().ok(), } } pub fn set_command_name(&mut self, command: String) { self.command = Some(command); } /// Returns the paths to the user-specific config files or directories. pub fn user_config_paths(&self) -> impl Iterator<Item = &Path> { self.user_config_paths.iter().map(ConfigPath::as_path) } /// Returns the paths to the existing user-specific config files or /// directories. pub fn existing_user_config_paths(&self) -> impl Iterator<Item = &Path> { self.user_config_paths .iter() .filter(|p| p.exists()) .map(ConfigPath::as_path) } /// Returns user configuration files for modification. Instantiates one if /// `config` has no user configuration layers. /// /// The parent directory for the new file may be created by this function. /// If the user configuration path is unknown, this function returns an /// empty `Vec`. pub fn user_config_files( &self, config: &RawConfig, ) -> Result<Vec<ConfigFile>, ConfigLoadError> { config_files_for(config, ConfigSource::User, || self.new_user_config_file()) } fn new_user_config_file(&self) -> Result<Option<ConfigFile>, ConfigLoadError> { self.user_config_paths() .next() .map(|path| { // No need to propagate io::Error here. If the directory // couldn't be created, file.save() would fail later. if let Some(dir) = path.parent() { create_dir_all(dir).ok(); } // The path doesn't usually exist, but we shouldn't overwrite it // with an empty config if it did exist. ConfigFile::load_or_empty(ConfigSource::User, path) }) .transpose() } /// Loads user-specific config files into the given `config`. The old /// user-config layers will be replaced if any. #[instrument] pub fn reload_user_config(&self, config: &mut RawConfig) -> Result<(), ConfigLoadError> { config.as_mut().remove_layers(ConfigSource::User); for path in self.existing_user_config_paths() { if path.is_dir() { config.as_mut().load_dir(ConfigSource::User, path)?; } else { config.as_mut().load_file(ConfigSource::User, path)?; } } Ok(()) } /// Sets the directory where repo-specific config file is stored. The path /// is usually `.jj/repo`. pub fn reset_repo_path(&mut self, path: &Path) { self.repo_path = Some(path.to_owned()); self.repo_config_path = Some(ConfigPath::new(path.join("config.toml"))); } /// Returns a path to the repo-specific config file. pub fn repo_config_path(&self) -> Option<&Path> { self.repo_config_path.as_ref().map(|p| p.as_path()) } /// Returns a path to the existing repo-specific config file. fn existing_repo_config_path(&self) -> Option<&Path> { match self.repo_config_path { Some(ref path) if path.exists() => Some(path.as_path()), _ => None, } } /// Returns repo configuration files for modification. Instantiates one if /// `config` has no repo configuration layers. /// /// If the repo path is unknown, this function returns an empty `Vec`. Since /// the repo config path cannot be a directory, the returned `Vec` should /// have at most one config file. pub fn repo_config_files( &self, config: &RawConfig, ) -> Result<Vec<ConfigFile>, ConfigLoadError> { config_files_for(config, ConfigSource::Repo, || self.new_repo_config_file()) } fn new_repo_config_file(&self) -> Result<Option<ConfigFile>, ConfigLoadError> { self.repo_config_path() // The path doesn't usually exist, but we shouldn't overwrite it // with an empty config if it did exist. .map(|path| ConfigFile::load_or_empty(ConfigSource::Repo, path)) .transpose() } /// Loads repo-specific config file into the given `config`. The old /// repo-config layer will be replaced if any. #[instrument] pub fn reload_repo_config(&self, config: &mut RawConfig) -> Result<(), ConfigLoadError> { config.as_mut().remove_layers(ConfigSource::Repo); if let Some(path) = self.existing_repo_config_path() { config.as_mut().load_file(ConfigSource::Repo, path)?; } Ok(()) } /// Sets the directory for the workspace and the workspace-specific config /// file. pub fn reset_workspace_path(&mut self, path: &Path) { self.workspace_path = Some(path.to_owned()); self.workspace_config_path = Some(ConfigPath::new( path.join(".jj").join("workspace-config.toml"), )); } /// Returns a path to the workspace-specific config file. pub fn workspace_config_path(&self) -> Option<&Path> { self.workspace_config_path.as_ref().map(|p| p.as_path()) } /// Returns a path to the existing workspace-specific config file. fn existing_workspace_config_path(&self) -> Option<&Path> { match self.workspace_config_path { Some(ref path) if path.exists() => Some(path.as_path()), _ => None, } } /// Returns workspace configuration files for modification. Instantiates one /// if `config` has no workspace configuration layers. /// /// If the workspace path is unknown, this function returns an empty `Vec`. /// Since the workspace config path cannot be a directory, the returned /// `Vec` should have at most one config file. pub fn workspace_config_files( &self, config: &RawConfig, ) -> Result<Vec<ConfigFile>, ConfigLoadError> { config_files_for(config, ConfigSource::Workspace, || { self.new_workspace_config_file() }) } fn new_workspace_config_file(&self) -> Result<Option<ConfigFile>, ConfigLoadError> { self.workspace_config_path() .map(|path| ConfigFile::load_or_empty(ConfigSource::Workspace, path)) .transpose() } /// Loads workspace-specific config file into the given `config`. The old /// workspace-config layer will be replaced if any. #[instrument] pub fn reload_workspace_config(&self, config: &mut RawConfig) -> Result<(), ConfigLoadError> { config.as_mut().remove_layers(ConfigSource::Workspace); if let Some(path) = self.existing_workspace_config_path() { config.as_mut().load_file(ConfigSource::Workspace, path)?; } Ok(()) } /// Resolves conditional scopes within the current environment. Returns new /// resolved config. pub fn resolve_config(&self, config: &RawConfig) -> Result<StackedConfig, ConfigGetError> { let context = ConfigResolutionContext { home_dir: self.home_dir.as_deref(), repo_path: self.repo_path.as_deref(), workspace_path: self.workspace_path.as_deref(), command: self.command.as_deref(), hostname: self.hostname.as_deref().unwrap_or(""), }; jj_lib::config::resolve(config.as_ref(), &context) } } fn config_files_for( config: &RawConfig, source: ConfigSource, new_file: impl FnOnce() -> Result<Option<ConfigFile>, ConfigLoadError>, ) -> Result<Vec<ConfigFile>, ConfigLoadError> { let mut files = config .as_ref() .layers_for(source) .iter() .filter_map(|layer| ConfigFile::from_layer(layer.clone()).ok()) .collect_vec(); if files.is_empty() { files.extend(new_file()?); } Ok(files) } /// Initializes stacked config with the given `default_layers` and infallible /// sources. /// /// Sources from the lowest precedence: /// 1. Default /// 2. Base environment variables /// 3. [User configs](https://docs.jj-vcs.dev/latest/config/) /// 4. Repo config `.jj/repo/config.toml` /// 5. Workspace config `.jj/workspace-config.toml` /// 6. Override environment variables /// 7. Command-line arguments `--config` and `--config-file` /// /// This function sets up 1, 2, and 6. pub fn config_from_environment(default_layers: impl IntoIterator<Item = ConfigLayer>) -> RawConfig { let mut config = StackedConfig::with_defaults(); config.extend_layers(default_layers); config.add_layer(env_base_layer()); config.add_layer(env_overrides_layer()); RawConfig(config) } const OP_HOSTNAME: &str = "operation.hostname"; const OP_USERNAME: &str = "operation.username"; /// Environment variables that should be overridden by config values fn env_base_layer() -> ConfigLayer { let mut layer = ConfigLayer::empty(ConfigSource::EnvBase); if let Ok(value) = whoami::fallible::hostname() .inspect_err(|err| tracing::warn!(?err, "failed to get hostname")) { layer.set_value(OP_HOSTNAME, value).unwrap(); } if let Ok(value) = whoami::fallible::username() .inspect_err(|err| tracing::warn!(?err, "failed to get username")) { layer.set_value(OP_USERNAME, value).unwrap(); } else if let Ok(value) = env::var("USER") { // On Unix, $USER is set by login(1). Use it as a fallback because // getpwuid() of musl libc appears not (fully?) supporting nsswitch. layer.set_value(OP_USERNAME, value).unwrap(); } if !env::var("NO_COLOR").unwrap_or_default().is_empty() { // "User-level configuration files and per-instance command-line arguments // should override $NO_COLOR." https://no-color.org/ layer.set_value("ui.color", "never").unwrap(); } if let Ok(value) = env::var("VISUAL") { layer.set_value("ui.editor", value).unwrap(); } else if let Ok(value) = env::var("EDITOR") { layer.set_value("ui.editor", value).unwrap(); } // Intentionally NOT respecting $PAGER here as it often creates a bad // out-of-the-box experience for users, see http://github.com/jj-vcs/jj/issues/3502. layer } pub fn default_config_layers() -> Vec<ConfigLayer> { // Syntax error in default config isn't a user error. That's why defaults are // loaded by separate builder. let parse = |text: &'static str| ConfigLayer::parse(ConfigSource::Default, text).unwrap(); let mut layers = vec![ parse(include_str!("config/colors.toml")), parse(include_str!("config/hints.toml")), parse(include_str!("config/merge_tools.toml")), parse(include_str!("config/misc.toml")), parse(include_str!("config/revsets.toml")), parse(include_str!("config/templates.toml")), ]; if cfg!(unix) { layers.push(parse(include_str!("config/unix.toml"))); } if cfg!(windows) { layers.push(parse(include_str!("config/windows.toml"))); } layers } /// Environment variables that override config values fn env_overrides_layer() -> ConfigLayer { let mut layer = ConfigLayer::empty(ConfigSource::EnvOverrides); if let Ok(value) = env::var("JJ_USER") { layer.set_value("user.name", value).unwrap(); } if let Ok(value) = env::var("JJ_EMAIL") { layer.set_value("user.email", value).unwrap(); } if let Ok(value) = env::var("JJ_TIMESTAMP") { layer.set_value("debug.commit-timestamp", value).unwrap(); } if let Ok(Ok(value)) = env::var("JJ_RANDOMNESS_SEED").map(|s| s.parse::<i64>()) { layer.set_value("debug.randomness-seed", value).unwrap(); } if let Ok(value) = env::var("JJ_OP_TIMESTAMP") { layer.set_value("debug.operation-timestamp", value).unwrap(); } if let Ok(value) = env::var("JJ_OP_HOSTNAME") { layer.set_value(OP_HOSTNAME, value).unwrap(); } if let Ok(value) = env::var("JJ_OP_USERNAME") { layer.set_value(OP_USERNAME, value).unwrap(); } if let Ok(value) = env::var("JJ_EDITOR") { layer.set_value("ui.editor", value).unwrap(); } layer } /// Configuration source/data type provided as command-line argument. #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum ConfigArgKind { /// `--config=NAME=VALUE` Item, /// `--config-file=PATH` File, } /// Parses `--config*` arguments. pub fn parse_config_args( toml_strs: &[(ConfigArgKind, &str)], ) -> Result<Vec<ConfigLayer>, CommandError> { let source = ConfigSource::CommandArg; let mut layers = Vec::new(); for (kind, chunk) in &toml_strs.iter().chunk_by(|&(kind, _)| kind) { match kind { ConfigArgKind::Item => { let mut layer = ConfigLayer::empty(source); for (_, item) in chunk { let (name, value) = parse_config_arg_item(item)?; // Can fail depending on the argument order, but that // wouldn't matter in practice. layer.set_value(name, value).map_err(|err| { config_error_with_message("--config argument cannot be set", err) })?; } layers.push(layer); } ConfigArgKind::File => { for (_, path) in chunk { layers.push(ConfigLayer::load_from_file(source, path.into())?); } } } } Ok(layers) } /// Parses `NAME=VALUE` string. fn parse_config_arg_item(item_str: &str) -> Result<(ConfigNamePathBuf, ConfigValue), CommandError> { // split NAME=VALUE at the first parsable position let split_candidates = item_str.as_bytes().iter().positions(|&b| b == b'='); let Some((name, value_str)) = split_candidates .map(|p| (&item_str[..p], &item_str[p + 1..])) .map(|(name, value)| name.parse().map(|name| (name, value))) .find_or_last(Result::is_ok) .transpose() .map_err(|err| config_error_with_message("--config name cannot be parsed", err))? else { return Err(config_error("--config must be specified as NAME=VALUE")); }; let value = parse_value_or_bare_string(value_str) .map_err(|err| config_error_with_message("--config value cannot be parsed", err))?; Ok((name, value)) } /// List of rules to migrate deprecated config variables. pub fn default_config_migrations() -> Vec<ConfigMigrationRule> { vec![ // TODO: Delete in jj 0.38.0+ ConfigMigrationRule::rename_value("core.fsmonitor", "fsmonitor.backend"), // TODO: Delete in jj 0.38.0+ ConfigMigrationRule::rename_value( "core.watchman.register-snapshot-trigger", "fsmonitor.watchman.register-snapshot-trigger", ), // TODO: Delete in jj 0.42.0+ ConfigMigrationRule::custom( |layer| { let Ok(Some(val)) = layer.look_up_item("git.auto-local-bookmark") else { return false; }; val.as_bool().is_some_and(|b| b) }, |_| { Ok("`git.auto-local-bookmark` is deprecated; use \ `remotes.<name>.auto-track-bookmarks` instead. Example: jj config set --user remotes.origin.auto-track-bookmarks '*' For details, see: https://docs.jj-vcs.dev/latest/config/#automatic-tracking-of-bookmarks" .into()) }, ), // TODO: Delete in jj 0.42.0+ ConfigMigrationRule::custom( |layer| { let Ok(Some(val)) = layer.look_up_item("git.push-new-bookmarks") else { return false; }; val.as_bool().is_some_and(|b| b) }, |_| { Ok("`git.push-new-bookmarks` is deprecated; use \ `remotes.<name>.auto-track-bookmarks` instead. Example: jj config set --user remotes.origin.auto-track-bookmarks '*' For details, see: https://docs.jj-vcs.dev/latest/config/#automatic-tracking-of-bookmarks" .into()) }, ), ] } /// Command name and arguments specified by config. #[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(untagged)] pub enum CommandNameAndArgs { String(String), Vec(NonEmptyCommandArgsVec), Structured { env: HashMap<String, String>, command: NonEmptyCommandArgsVec, }, } impl CommandNameAndArgs { /// Returns command name without arguments. pub fn split_name(&self) -> Cow<'_, str> { let (name, _) = self.split_name_and_args(); name } /// Returns command name and arguments. /// /// The command name may be an empty string (as well as each argument.) pub fn split_name_and_args(&self) -> (Cow<'_, str>, Cow<'_, [String]>) { match self { Self::String(s) => { // Handle things like `EDITOR=emacs -nw` (TODO: parse shell escapes) let mut args = s.split(' ').map(|s| s.to_owned()); (args.next().unwrap().into(), args.collect()) } Self::Vec(NonEmptyCommandArgsVec(a)) => (Cow::Borrowed(&a[0]), Cow::Borrowed(&a[1..])), Self::Structured { env: _, command: cmd, } => (Cow::Borrowed(&cmd.0[0]), Cow::Borrowed(&cmd.0[1..])), } } /// Returns command string only if the underlying type is a string. /// /// Use this to parse enum strings such as `":builtin"`, which can be /// escaped as `[":builtin"]`. pub fn as_str(&self) -> Option<&str> { match self { Self::String(s) => Some(s), Self::Vec(_) | Self::Structured { .. } => None, } } /// Returns process builder configured with this. pub fn to_command(&self) -> Command { let empty: HashMap<&str, &str> = HashMap::new(); self.to_command_with_variables(&empty) } /// Returns process builder configured with this after interpolating /// variables into the arguments. pub fn to_command_with_variables<V: AsRef<str>>( &self, variables: &HashMap<&str, V>, ) -> Command { let (name, args) = self.split_name_and_args(); let mut cmd = Command::new(interpolate_variables_single(name.as_ref(), variables)); if let Self::Structured { env, .. } = self { cmd.envs(env); } cmd.args(interpolate_variables(&args, variables)); cmd } } impl<T: AsRef<str> + ?Sized> From<&T> for CommandNameAndArgs { fn from(s: &T) -> Self { Self::String(s.as_ref().to_owned()) } } impl fmt::Display for CommandNameAndArgs { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { Self::String(s) => write!(f, "{s}"), // TODO: format with shell escapes Self::Vec(a) => write!(f, "{}", a.0.join(" ")), Self::Structured { env, command } => { for (k, v) in env { write!(f, "{k}={v} ")?; } write!(f, "{}", command.0.join(" ")) } } } } // Not interested in $UPPER_CASE_VARIABLES static VARIABLE_REGEX: LazyLock<Regex> = LazyLock::new(|| Regex::new(r"\$([a-z0-9_]+)\b").unwrap()); pub fn interpolate_variables<V: AsRef<str>>( args: &[String], variables: &HashMap<&str, V>, ) -> Vec<String> { args.iter() .map(|arg| interpolate_variables_single(arg, variables)) .collect() } fn interpolate_variables_single<V: AsRef<str>>(arg: &str, variables: &HashMap<&str, V>) -> String { VARIABLE_REGEX .replace_all(arg, |caps: &Captures| { let name = &caps[1]; if let Some(subst) = variables.get(name) { subst.as_ref().to_owned() } else { caps[0].to_owned() } }) .into_owned() } /// Return all variable names found in the args, without the dollar sign pub fn find_all_variables(args: &[String]) -> impl Iterator<Item = &str> { let regex = &*VARIABLE_REGEX; args.iter() .flat_map(|arg| regex.find_iter(arg)) .map(|single_match| { let s = single_match.as_str(); &s[1..] }) } /// Wrapper to reject an array without command name. // Based on https://github.com/serde-rs/serde/issues/939 #[derive(Clone, Debug, Eq, Hash, PartialEq, serde::Deserialize)] #[serde(try_from = "Vec<String>")] pub struct NonEmptyCommandArgsVec(Vec<String>); impl TryFrom<Vec<String>> for NonEmptyCommandArgsVec { type Error = &'static str; fn try_from(args: Vec<String>) -> Result<Self, Self::Error> { if args.is_empty() { Err("command arguments should not be empty") } else { Ok(Self(args)) } } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/diff_util.rs
cli/src/diff_util.rs
// Copyright 2020-2022 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::borrow::Cow; use std::cmp::max; use std::io; use std::iter; use std::ops::Range; use std::path::Path; use std::path::PathBuf; use bstr::BStr; use bstr::BString; use clap_complete::ArgValueCandidates; use futures::StreamExt as _; use futures::TryStreamExt as _; use futures::executor::block_on_stream; use futures::stream::BoxStream; use itertools::Itertools as _; use jj_lib::backend::BackendError; use jj_lib::backend::BackendResult; use jj_lib::backend::CommitId; use jj_lib::backend::CopyRecord; use jj_lib::backend::TreeValue; use jj_lib::commit::Commit; use jj_lib::config::ConfigGetError; use jj_lib::conflict_labels::ConflictLabels; use jj_lib::conflicts::ConflictMarkerStyle; use jj_lib::conflicts::ConflictMaterializeOptions; use jj_lib::conflicts::MaterializedTreeDiffEntry; use jj_lib::conflicts::MaterializedTreeValue; use jj_lib::conflicts::materialize_merge_result_to_bytes; use jj_lib::conflicts::materialized_diff_stream; use jj_lib::copies::CopiesTreeDiffEntry; use jj_lib::copies::CopiesTreeDiffEntryPath; use jj_lib::copies::CopyOperation; use jj_lib::copies::CopyRecords; use jj_lib::diff::ContentDiff; use jj_lib::diff::DiffHunk; use jj_lib::diff::DiffHunkKind; use jj_lib::diff_presentation::DiffTokenType; use jj_lib::diff_presentation::FileContent; use jj_lib::diff_presentation::LineCompareMode; use jj_lib::diff_presentation::diff_by_line; use jj_lib::diff_presentation::file_content_for_diff; use jj_lib::diff_presentation::unified::DiffLineType; use jj_lib::diff_presentation::unified::UnifiedDiffError; use jj_lib::diff_presentation::unified::git_diff_part; use jj_lib::diff_presentation::unified::unified_diff_hunks; use jj_lib::diff_presentation::unzip_diff_hunks_to_lines; use jj_lib::files; use jj_lib::files::ConflictDiffHunk; use jj_lib::files::DiffLineHunkSide; use jj_lib::files::DiffLineIterator; use jj_lib::files::DiffLineNumber; use jj_lib::matchers::Matcher; use jj_lib::merge::Diff; use jj_lib::merge::Merge; use jj_lib::merge::MergeBuilder; use jj_lib::merge::MergedTreeValue; use jj_lib::merged_tree::MergedTree; use jj_lib::repo::Repo; use jj_lib::repo_path::InvalidRepoPathError; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathUiConverter; use jj_lib::rewrite::rebase_to_dest_parent; use jj_lib::settings::UserSettings; use jj_lib::store::Store; use pollster::FutureExt as _; use thiserror::Error; use tracing::instrument; use unicode_width::UnicodeWidthStr as _; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::commit_templater; use crate::config::CommandNameAndArgs; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::merge_tools; use crate::merge_tools::DiffGenerateError; use crate::merge_tools::DiffToolMode; use crate::merge_tools::ExternalMergeTool; use crate::merge_tools::generate_diff; use crate::merge_tools::invoke_external_diff; use crate::merge_tools::new_utf8_temp_dir; use crate::templater::TemplateRenderer; use crate::text_util; use crate::ui::Ui; #[derive(clap::Args, Clone, Debug)] #[command(next_help_heading = "Diff Formatting Options")] #[command(group(clap::ArgGroup::new("short-format").args(&["summary", "stat", "types", "name_only"])))] #[command(group(clap::ArgGroup::new("long-format").args(&["git", "color_words"])))] pub struct DiffFormatArgs { /// For each path, show only whether it was modified, added, or deleted #[arg(long, short)] pub summary: bool, /// Show a histogram of the changes #[arg(long)] pub stat: bool, /// For each path, show only its type before and after /// /// The diff is shown as two letters. The first letter indicates the type /// before and the second letter indicates the type after. '-' indicates /// that the path was not present, 'F' represents a regular file, `L' /// represents a symlink, 'C' represents a conflict, and 'G' represents a /// Git submodule. #[arg(long)] pub types: bool, /// For each path, show only its path /// /// Typically useful for shell commands like: /// `jj diff -r @- --name-only | xargs perl -pi -e's/OLD/NEW/g` #[arg(long)] pub name_only: bool, /// Show a Git-format diff #[arg(long)] pub git: bool, /// Show a word-level diff with changes indicated only by color #[arg(long)] pub color_words: bool, /// Generate diff by external command /// /// A builtin format can also be specified as `:<name>`. For example, /// `--tool=:git` is equivalent to `--git`. #[arg(long)] #[arg(add = ArgValueCandidates::new(crate::complete::diff_formatters))] pub tool: Option<String>, /// Number of lines of context to show #[arg(long)] context: Option<usize>, // Short flags are set by command to avoid future conflicts. /// Ignore whitespace when comparing lines. #[arg(long)] // short = 'w' ignore_all_space: bool, /// Ignore changes in amount of whitespace when comparing lines. #[arg(long, conflicts_with = "ignore_all_space")] // short = 'b' ignore_space_change: bool, } #[derive(Clone, Debug, Eq, PartialEq)] pub enum DiffFormat { // Non-trivial parameters are boxed in order to keep the variants small Summary, Stat(Box<DiffStatOptions>), Types, NameOnly, Git(Box<UnifiedDiffOptions>), ColorWords(Box<ColorWordsDiffOptions>), Tool(Box<ExternalMergeTool>), } #[derive(Clone, Copy, Debug, Eq, PartialEq)] enum BuiltinFormatKind { Summary, Stat, Types, NameOnly, Git, ColorWords, } impl BuiltinFormatKind { // Alternatively, we could use or vendor one of the crates `strum`, // `enum-iterator`, or `variant_count` (for a check that the length of the array // is correct). The latter is very simple and is also a nightly feature. const ALL_VARIANTS: &[Self] = &[ Self::Summary, Self::Stat, Self::Types, Self::NameOnly, Self::Git, Self::ColorWords, ]; fn from_name(name: &str) -> Result<Self, String> { match name { "summary" => Ok(Self::Summary), "stat" => Ok(Self::Stat), "types" => Ok(Self::Types), "name-only" => Ok(Self::NameOnly), "git" => Ok(Self::Git), "color-words" => Ok(Self::ColorWords), _ => Err(format!("Invalid builtin diff format: {name}")), } } fn short_from_args(args: &DiffFormatArgs) -> Option<Self> { if args.summary { Some(Self::Summary) } else if args.stat { Some(Self::Stat) } else if args.types { Some(Self::Types) } else if args.name_only { Some(Self::NameOnly) } else { None } } fn long_from_args(args: &DiffFormatArgs) -> Option<Self> { if args.git { Some(Self::Git) } else if args.color_words { Some(Self::ColorWords) } else { None } } fn is_short(self) -> bool { match self { Self::Summary | Self::Stat | Self::Types | Self::NameOnly => true, Self::Git | Self::ColorWords => false, } } fn to_arg_name(self) -> &'static str { match self { Self::Summary => "summary", Self::Stat => "stat", Self::Types => "types", Self::NameOnly => "name-only", Self::Git => "git", Self::ColorWords => "color-words", } } fn to_format( self, settings: &UserSettings, args: &DiffFormatArgs, ) -> Result<DiffFormat, ConfigGetError> { match self { Self::Summary => Ok(DiffFormat::Summary), Self::Stat => { let mut options = DiffStatOptions::default(); options.merge_args(args); Ok(DiffFormat::Stat(Box::new(options))) } Self::Types => Ok(DiffFormat::Types), Self::NameOnly => Ok(DiffFormat::NameOnly), Self::Git => { let mut options = UnifiedDiffOptions::from_settings(settings)?; options.merge_args(args); Ok(DiffFormat::Git(Box::new(options))) } Self::ColorWords => { let mut options = ColorWordsDiffOptions::from_settings(settings)?; options.merge_args(args); Ok(DiffFormat::ColorWords(Box::new(options))) } } } } /// Returns the list of builtin diff format names such as `:git` pub fn all_builtin_diff_format_names() -> Vec<String> { BuiltinFormatKind::ALL_VARIANTS .iter() .map(|kind| format!(":{}", kind.to_arg_name())) .collect() } fn diff_formatter_tool( settings: &UserSettings, name: &str, ) -> Result<Option<ExternalMergeTool>, CommandError> { let maybe_tool = merge_tools::get_external_tool_config(settings, name)?; if let Some(tool) = &maybe_tool && tool.diff_args.is_empty() { return Err(cli_error(format!( "The tool `{name}` cannot be used for diff formatting" ))); }; Ok(maybe_tool) } /// Returns a list of requested diff formats, which will never be empty. pub fn diff_formats_for( settings: &UserSettings, args: &DiffFormatArgs, ) -> Result<Vec<DiffFormat>, CommandError> { let formats = diff_formats_from_args(settings, args)?; if formats.iter().all(|f| f.is_none()) { Ok(vec![default_diff_format(settings, args)?]) } else { Ok(formats.into_iter().flatten().collect()) } } /// Returns a list of requested diff formats for log-like commands, which may be /// empty. pub fn diff_formats_for_log( settings: &UserSettings, args: &DiffFormatArgs, patch: bool, ) -> Result<Vec<DiffFormat>, CommandError> { let [short_format, mut long_format] = diff_formats_from_args(settings, args)?; // --patch implies default if no "long" format is specified if patch && long_format.is_none() { // TODO: maybe better to error out if the configured default isn't a // "long" format? let default_format = default_diff_format(settings, args)?; if short_format.as_ref() != Some(&default_format) { long_format = Some(default_format); } } Ok([short_format, long_format].into_iter().flatten().collect()) } fn diff_formats_from_args( settings: &UserSettings, args: &DiffFormatArgs, ) -> Result<[Option<DiffFormat>; 2], CommandError> { let short_kind = BuiltinFormatKind::short_from_args(args); let long_kind = BuiltinFormatKind::long_from_args(args); let mut short_format = short_kind .map(|kind| kind.to_format(settings, args)) .transpose()?; let mut long_format = long_kind .map(|kind| kind.to_format(settings, args)) .transpose()?; if let Some(name) = &args.tool { let ensure_new = |old_kind: Option<BuiltinFormatKind>| match old_kind { Some(old) => Err(cli_error(format!( "--tool={name} cannot be used with --{old}", old = old.to_arg_name() ))), None => Ok(()), }; if let Some(name) = name.strip_prefix(':') { let kind = BuiltinFormatKind::from_name(name).map_err(cli_error)?; let format = kind.to_format(settings, args)?; if kind.is_short() { ensure_new(short_kind)?; short_format = Some(format); } else { ensure_new(long_kind)?; long_format = Some(format); } } else { ensure_new(long_kind)?; let tool = diff_formatter_tool(settings, name)? .unwrap_or_else(|| ExternalMergeTool::with_program(name)); long_format = Some(DiffFormat::Tool(Box::new(tool))); } } Ok([short_format, long_format]) } fn default_diff_format( settings: &UserSettings, args: &DiffFormatArgs, ) -> Result<DiffFormat, CommandError> { let tool_args: CommandNameAndArgs = settings.get("ui.diff-formatter")?; if let Some(name) = tool_args.as_str().and_then(|s| s.strip_prefix(':')) { Ok(BuiltinFormatKind::from_name(name) .map_err(|err| ConfigGetError::Type { name: "ui.diff-formatter".to_owned(), error: err.into(), source_path: None, })? .to_format(settings, args)?) } else { let tool = if let Some(name) = tool_args.as_str() { diff_formatter_tool(settings, name)? } else { None } .unwrap_or_else(|| ExternalMergeTool::with_diff_args(&tool_args)); Ok(DiffFormat::Tool(Box::new(tool))) } } #[derive(Debug, Error)] pub enum DiffRenderError { #[error("Failed to generate diff")] DiffGenerate(#[source] DiffGenerateError), #[error(transparent)] Backend(#[from] BackendError), #[error("Access denied to {path}")] AccessDenied { path: String, source: Box<dyn std::error::Error + Send + Sync>, }, #[error(transparent)] InvalidRepoPath(#[from] InvalidRepoPathError), #[error(transparent)] Io(#[from] io::Error), } impl From<UnifiedDiffError> for DiffRenderError { fn from(value: UnifiedDiffError) -> Self { match value { UnifiedDiffError::Backend(error) => Self::Backend(error), UnifiedDiffError::AccessDenied { path, source } => Self::AccessDenied { path, source }, } } } /// Configuration and environment to render textual diff. pub struct DiffRenderer<'a> { repo: &'a dyn Repo, path_converter: &'a RepoPathUiConverter, conflict_marker_style: ConflictMarkerStyle, formats: Vec<DiffFormat>, } impl<'a> DiffRenderer<'a> { pub fn new( repo: &'a dyn Repo, path_converter: &'a RepoPathUiConverter, conflict_marker_style: ConflictMarkerStyle, formats: Vec<DiffFormat>, ) -> Self { Self { repo, path_converter, conflict_marker_style, formats, } } /// Generates diff between `trees`. pub async fn show_diff( &self, ui: &Ui, // TODO: remove Ui dependency if possible formatter: &mut dyn Formatter, trees: Diff<&MergedTree>, matcher: &dyn Matcher, copy_records: &CopyRecords, width: usize, ) -> Result<(), DiffRenderError> { let mut formatter = formatter.labeled("diff"); self.show_diff_trees(ui, *formatter, trees, matcher, copy_records, width) .await } async fn show_diff_trees( &self, ui: &Ui, formatter: &mut dyn Formatter, trees: Diff<&MergedTree>, matcher: &dyn Matcher, copy_records: &CopyRecords, width: usize, ) -> Result<(), DiffRenderError> { let diff_stream = || { trees .before .diff_stream_with_copies(trees.after, matcher, copy_records) }; let conflict_labels = trees.map(|tree| tree.labels()); let store = self.repo.store(); let path_converter = self.path_converter; for format in &self.formats { match format { DiffFormat::Summary => { let tree_diff = diff_stream(); show_diff_summary(formatter, tree_diff, path_converter).await?; } DiffFormat::Stat(options) => { let tree_diff = diff_stream(); let stats = DiffStats::calculate(store, tree_diff, options, self.conflict_marker_style) .block_on()?; show_diff_stats(formatter, &stats, path_converter, width)?; } DiffFormat::Types => { let tree_diff = diff_stream(); show_types(formatter, tree_diff, path_converter).await?; } DiffFormat::NameOnly => { let tree_diff = diff_stream(); show_names(formatter, tree_diff, path_converter).await?; } DiffFormat::Git(options) => { let tree_diff = diff_stream(); show_git_diff( formatter, store, tree_diff, conflict_labels, options, self.conflict_marker_style, ) .await?; } DiffFormat::ColorWords(options) => { let tree_diff = diff_stream(); show_color_words_diff( formatter, store, tree_diff, conflict_labels, path_converter, options, self.conflict_marker_style, ) .await?; } DiffFormat::Tool(tool) => { match tool.diff_invocation_mode { DiffToolMode::FileByFile => { let tree_diff = diff_stream(); show_file_by_file_diff( ui, formatter, store, tree_diff, conflict_labels, path_converter, tool, self.conflict_marker_style, width, ) .await } DiffToolMode::Dir => { let mut writer = formatter.raw()?; generate_diff( ui, writer.as_mut(), trees, matcher, tool, self.conflict_marker_style, width, ) .map_err(DiffRenderError::DiffGenerate) } }?; } } } Ok(()) } fn show_diff_commit_descriptions( &self, formatter: &mut dyn Formatter, descriptions: Diff<&Merge<&str>>, ) -> Result<(), DiffRenderError> { if !descriptions.is_changed() { return Ok(()); } const DUMMY_PATH: &str = "JJ-COMMIT-DESCRIPTION"; let materialize_options = ConflictMaterializeOptions { marker_style: self.conflict_marker_style, marker_len: None, merge: self.repo.store().merge_options().clone(), }; for format in &self.formats { match format { // Omit diff from "short" formats. Printing dummy file path // wouldn't be useful. DiffFormat::Summary | DiffFormat::Stat(_) | DiffFormat::Types | DiffFormat::NameOnly => {} DiffFormat::Git(options) => { // Git format must be parsable, so use dummy file path. show_git_diff_texts( formatter, Diff::new(DUMMY_PATH, DUMMY_PATH), descriptions, options, &materialize_options, )?; } DiffFormat::ColorWords(options) => { writeln!(formatter.labeled("header"), "Modified commit description:")?; show_color_words_diff_hunks( formatter, descriptions, Diff::new(&ConflictLabels::unlabeled(), &ConflictLabels::unlabeled()), options, &materialize_options, )?; } DiffFormat::Tool(_) => { // TODO: materialize commit description as file? } } } Ok(()) } /// Generates diff between `from_commits` and `to_commit` based off their /// parents. The `from_commits` will temporarily be rebased onto the /// `to_commit` parents to exclude unrelated changes. pub async fn show_inter_diff( &self, ui: &Ui, formatter: &mut dyn Formatter, from_commits: &[Commit], to_commit: &Commit, matcher: &dyn Matcher, width: usize, ) -> Result<(), DiffRenderError> { let mut formatter = formatter.labeled("diff"); let from_description = if from_commits.is_empty() { Merge::resolved("") } else { // TODO: use common predecessor as the base description? MergeBuilder::from_iter(itertools::intersperse( from_commits.iter().map(|c| c.description()), "", )) .build() .simplify() }; let to_description = Merge::resolved(to_commit.description()); let from_tree = rebase_to_dest_parent(self.repo, from_commits, to_commit)?; let to_tree = to_commit.tree(); let copy_records = CopyRecords::default(); // TODO self.show_diff_commit_descriptions( *formatter, Diff::new(&from_description, &to_description), )?; self.show_diff_trees( ui, *formatter, Diff::new(&from_tree, &to_tree), matcher, &copy_records, width, ) .await } /// Generates diff of the given `commit` compared to its parents. pub async fn show_patch( &self, ui: &Ui, formatter: &mut dyn Formatter, commit: &Commit, matcher: &dyn Matcher, width: usize, ) -> Result<(), DiffRenderError> { let from_tree = commit.parent_tree_async(self.repo).await?; let to_tree = commit.tree(); let mut copy_records = CopyRecords::default(); for parent_id in commit.parent_ids() { let records = get_copy_records(self.repo.store(), parent_id, commit.id(), matcher)?; copy_records.add_records(records)?; } self.show_diff( ui, formatter, Diff::new(&from_tree, &to_tree), matcher, &copy_records, width, ) .await } } pub fn get_copy_records<'a>( store: &'a Store, root: &CommitId, head: &CommitId, matcher: &'a dyn Matcher, ) -> BackendResult<impl Iterator<Item = BackendResult<CopyRecord>> + use<'a>> { // TODO: teach backend about matching path prefixes? let stream = store.get_copy_records(None, root, head)?; // TODO: test record.source as well? should be AND-ed or OR-ed? Ok(block_on_stream(stream).filter_ok(|record| matcher.matches(&record.target))) } /// How conflicts are processed and rendered in diffs. #[derive(Clone, Copy, Debug, Default, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all = "kebab-case")] pub enum ConflictDiffMethod { /// Compares materialized contents. #[default] Materialize, /// Compares individual pairs of left and right contents. Pair, } #[derive(Clone, Debug, Default, Eq, PartialEq)] pub struct LineDiffOptions { /// How equivalence of lines is tested. pub compare_mode: LineCompareMode, // TODO: add --ignore-blank-lines, etc. which aren't mutually exclusive. } impl LineDiffOptions { fn merge_args(&mut self, args: &DiffFormatArgs) { self.compare_mode = if args.ignore_all_space { LineCompareMode::IgnoreAllSpace } else if args.ignore_space_change { LineCompareMode::IgnoreSpaceChange } else { LineCompareMode::Exact }; } } #[derive(Clone, Debug, Eq, PartialEq)] pub struct ColorWordsDiffOptions { /// How conflicts are processed and rendered. pub conflict: ConflictDiffMethod, /// Number of context lines to show. pub context: usize, /// How lines are tokenized and compared. pub line_diff: LineDiffOptions, /// Maximum number of removed/added word alternation to inline. pub max_inline_alternation: Option<usize>, } impl ColorWordsDiffOptions { pub fn from_settings(settings: &UserSettings) -> Result<Self, ConfigGetError> { let max_inline_alternation = { let name = "diff.color-words.max-inline-alternation"; match settings.get_int(name)? { -1 => None, // unlimited n => Some(usize::try_from(n).map_err(|err| ConfigGetError::Type { name: name.to_owned(), error: err.into(), source_path: None, })?), } }; Ok(Self { conflict: settings.get("diff.color-words.conflict")?, context: settings.get("diff.color-words.context")?, line_diff: LineDiffOptions::default(), max_inline_alternation, }) } fn merge_args(&mut self, args: &DiffFormatArgs) { if let Some(context) = args.context { self.context = context; } self.line_diff.merge_args(args); } } fn show_color_words_diff_hunks<T: AsRef<[u8]>>( formatter: &mut dyn Formatter, contents: Diff<&Merge<T>>, conflict_labels: Diff<&ConflictLabels>, options: &ColorWordsDiffOptions, materialize_options: &ConflictMaterializeOptions, ) -> io::Result<()> { let line_number = DiffLineNumber { left: 1, right: 1 }; let labels = Diff::new("removed", "added"); if let (Some(left), Some(right)) = (contents.before.as_resolved(), contents.after.as_resolved()) { let contents = Diff::new(left, right).map(BStr::new); show_color_words_resolved_hunks(formatter, contents, line_number, labels, options)?; return Ok(()); } match options.conflict { ConflictDiffMethod::Materialize => { let contents = contents.zip(conflict_labels).map(|(side, labels)| { materialize_merge_result_to_bytes(side, labels, materialize_options) }); show_color_words_resolved_hunks( formatter, contents.as_ref().map(BStr::new), line_number, labels, options, )?; } ConflictDiffMethod::Pair => { let contents = contents.map(|side| files::merge(side, &materialize_options.merge)); show_color_words_conflict_hunks( formatter, contents.as_ref(), line_number, labels, options, )?; } } Ok(()) } fn show_color_words_conflict_hunks( formatter: &mut dyn Formatter, contents: Diff<&Merge<BString>>, mut line_number: DiffLineNumber, labels: Diff<&str>, options: &ColorWordsDiffOptions, ) -> io::Result<DiffLineNumber> { let num_lefts = contents.before.as_slice().len(); let line_diff = diff_by_line( itertools::chain(contents.before, contents.after), &options.line_diff.compare_mode, ); // Matching entries shouldn't appear consecutively in diff of two inputs. // However, if the inputs have conflicts, there may be a hunk that can be // resolved, resulting [matching, resolved, matching] sequence. let mut contexts: Vec<Diff<&BStr>> = Vec::new(); let mut emitted = false; for hunk in files::conflict_diff_hunks(line_diff.hunks(), num_lefts) { match hunk.kind { // There may be conflicts in matching hunk, but just pick one. It // would be too verbose to show all conflict pairs as context. DiffHunkKind::Matching => { contexts.push(Diff::new(hunk.lefts.first(), hunk.rights.first())); } DiffHunkKind::Different => { let num_after = if emitted { options.context } else { 0 }; let num_before = options.context; line_number = show_color_words_context_lines( formatter, &contexts, line_number, labels, options, num_after, num_before, )?; contexts.clear(); emitted = true; line_number = if let (Some(&left), Some(&right)) = (hunk.lefts.as_resolved(), hunk.rights.as_resolved()) { show_color_words_diff_lines( formatter, Diff::new(left, right), line_number, labels, options, )? } else { show_color_words_unresolved_hunk( formatter, &hunk, line_number, labels, options, )? } } } } let num_after = if emitted { options.context } else { 0 }; let num_before = 0; show_color_words_context_lines( formatter, &contexts, line_number, labels, options, num_after, num_before, ) } fn show_color_words_unresolved_hunk( formatter: &mut dyn Formatter, hunk: &ConflictDiffHunk, line_number: DiffLineNumber, labels: Diff<&str>, options: &ColorWordsDiffOptions, ) -> io::Result<DiffLineNumber> { let hunk_desc = if hunk.lefts.is_resolved() { "Created conflict" } else if hunk.rights.is_resolved() { "Resolved conflict" } else { "Modified conflict" }; writeln!(formatter.labeled("hunk_header"), "<<<<<<< {hunk_desc}")?; // Pad with identical (negative, positive) terms. It's common that one of // the sides is resolved, or both sides have the same numbers of terms. If // both sides are conflicts, and the numbers of terms are different, the // choice of padding terms is arbitrary. let num_terms = max(hunk.lefts.as_slice().len(), hunk.rights.as_slice().len()); let lefts = hunk.lefts.iter().enumerate(); let rights = hunk.rights.iter().enumerate(); let padded = iter::zip( lefts.chain(iter::repeat((0, hunk.lefts.first()))), rights.chain(iter::repeat((0, hunk.rights.first()))), ) .take(num_terms); let mut max_line_number = line_number; for (i, ((left_index, &left_content), (right_index, &right_content))) in padded.enumerate() { let positive = i % 2 == 0; writeln!( formatter.labeled("hunk_header"), "{sep} left {left_name} #{left_index} to right {right_name} #{right_index}", sep = if positive { "+++++++" } else { "-------" }, // these numbers should be compatible with the "tree-set" language #5307 left_name = if left_index % 2 == 0 { "side" } else { "base" }, left_index = left_index / 2 + 1, right_name = if right_index % 2 == 0 { "side" } else { "base" }, right_index = right_index / 2 + 1, )?; let contents = Diff::new(left_content, right_content); let labels = match positive { true => labels, false => labels.invert(), }; // Individual hunk pair may be largely the same, so diff it again. let new_line_number = show_color_words_resolved_hunks(formatter, contents, line_number, labels, options)?;
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/command_error.rs
cli/src/command_error.rs
// Copyright 2022-2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::error; use std::error::Error as _; use std::io; use std::io::Write as _; use std::iter; use std::sync::Arc; use itertools::Itertools as _; use jj_lib::absorb::AbsorbError; use jj_lib::backend::BackendError; use jj_lib::backend::CommitId; use jj_lib::bisect::BisectionError; use jj_lib::config::ConfigFileSaveError; use jj_lib::config::ConfigGetError; use jj_lib::config::ConfigLoadError; use jj_lib::config::ConfigMigrateError; use jj_lib::dsl_util::Diagnostics; use jj_lib::evolution::WalkPredecessorsError; use jj_lib::fileset::FilePatternParseError; use jj_lib::fileset::FilesetParseError; use jj_lib::fileset::FilesetParseErrorKind; use jj_lib::fix::FixError; use jj_lib::gitignore::GitIgnoreError; use jj_lib::index::IndexError; use jj_lib::op_heads_store::OpHeadResolutionError; use jj_lib::op_heads_store::OpHeadsStoreError; use jj_lib::op_store::OpStoreError; use jj_lib::op_walk::OpsetEvaluationError; use jj_lib::op_walk::OpsetResolutionError; use jj_lib::repo::CheckOutCommitError; use jj_lib::repo::EditCommitError; use jj_lib::repo::RepoLoaderError; use jj_lib::repo::RewriteRootCommit; use jj_lib::repo_path::RepoPathBuf; use jj_lib::repo_path::UiPathParseError; use jj_lib::revset; use jj_lib::revset::RevsetEvaluationError; use jj_lib::revset::RevsetParseError; use jj_lib::revset::RevsetParseErrorKind; use jj_lib::revset::RevsetResolutionError; use jj_lib::str_util::StringPatternParseError; use jj_lib::trailer::TrailerParseError; use jj_lib::transaction::TransactionCommitError; use jj_lib::view::RenameWorkspaceError; use jj_lib::working_copy::RecoverWorkspaceError; use jj_lib::working_copy::ResetError; use jj_lib::working_copy::SnapshotError; use jj_lib::working_copy::WorkingCopyStateError; use jj_lib::workspace::WorkspaceInitError; use thiserror::Error; use crate::cli_util::short_operation_hash; use crate::description_util::ParseBulkEditMessageError; use crate::description_util::TempTextEditError; use crate::description_util::TextEditError; use crate::diff_util::DiffRenderError; use crate::formatter::FormatRecorder; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::merge_tools::ConflictResolveError; use crate::merge_tools::DiffEditError; use crate::merge_tools::MergeToolConfigError; use crate::merge_tools::MergeToolPartialResolutionError; use crate::revset_util::BookmarkNameParseError; use crate::revset_util::TagNameParseError; use crate::revset_util::UserRevsetEvaluationError; use crate::template_parser::TemplateParseError; use crate::template_parser::TemplateParseErrorKind; use crate::ui::Ui; #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub enum CommandErrorKind { User, Config, /// Invalid command line. The inner error type may be `clap::Error`. Cli, BrokenPipe, Internal, } #[derive(Clone, Debug)] pub struct CommandError { pub kind: CommandErrorKind, pub error: Arc<dyn error::Error + Send + Sync>, pub hints: Vec<ErrorHint>, } impl CommandError { pub fn new( kind: CommandErrorKind, err: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> Self { Self { kind, error: Arc::from(err.into()), hints: vec![], } } pub fn with_message( kind: CommandErrorKind, message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> Self { Self::new(kind, ErrorWithMessage::new(message, source)) } /// Returns error with the given plain-text `hint` attached. pub fn hinted(mut self, hint: impl Into<String>) -> Self { self.add_hint(hint); self } /// Appends plain-text `hint` to the error. pub fn add_hint(&mut self, hint: impl Into<String>) { self.hints.push(ErrorHint::PlainText(hint.into())); } /// Appends formatted `hint` to the error. pub fn add_formatted_hint(&mut self, hint: FormatRecorder) { self.hints.push(ErrorHint::Formatted(hint)); } /// Constructs formatted hint and appends it to the error. pub fn add_formatted_hint_with( &mut self, write: impl FnOnce(&mut dyn Formatter) -> io::Result<()>, ) { let mut formatter = FormatRecorder::new(); write(&mut formatter).expect("write() to FormatRecorder should never fail"); self.add_formatted_hint(formatter); } /// Appends 0 or more plain-text `hints` to the error. pub fn extend_hints(&mut self, hints: impl IntoIterator<Item = String>) { self.hints .extend(hints.into_iter().map(ErrorHint::PlainText)); } } #[derive(Clone, Debug)] pub enum ErrorHint { PlainText(String), Formatted(FormatRecorder), } /// Wraps error with user-visible message. #[derive(Debug, Error)] #[error("{message}")] struct ErrorWithMessage { message: String, source: Box<dyn error::Error + Send + Sync>, } impl ErrorWithMessage { fn new( message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> Self { Self { message: message.into(), source: source.into(), } } } pub fn user_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> CommandError { CommandError::new(CommandErrorKind::User, err) } pub fn user_error_with_hint( err: impl Into<Box<dyn error::Error + Send + Sync>>, hint: impl Into<String>, ) -> CommandError { user_error(err).hinted(hint) } pub fn user_error_with_message( message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> CommandError { CommandError::with_message(CommandErrorKind::User, message, source) } pub fn config_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> CommandError { CommandError::new(CommandErrorKind::Config, err) } pub fn config_error_with_message( message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> CommandError { CommandError::with_message(CommandErrorKind::Config, message, source) } pub fn cli_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> CommandError { CommandError::new(CommandErrorKind::Cli, err) } pub fn cli_error_with_message( message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> CommandError { CommandError::with_message(CommandErrorKind::Cli, message, source) } pub fn internal_error(err: impl Into<Box<dyn error::Error + Send + Sync>>) -> CommandError { CommandError::new(CommandErrorKind::Internal, err) } pub fn internal_error_with_message( message: impl Into<String>, source: impl Into<Box<dyn error::Error + Send + Sync>>, ) -> CommandError { CommandError::with_message(CommandErrorKind::Internal, message, source) } fn format_similarity_hint<S: AsRef<str>>(candidates: &[S]) -> Option<String> { match candidates { [] => None, names => { let quoted_names = names.iter().map(|s| format!("`{}`", s.as_ref())).join(", "); Some(format!("Did you mean {quoted_names}?")) } } } impl From<io::Error> for CommandError { fn from(err: io::Error) -> Self { let kind = match err.kind() { io::ErrorKind::BrokenPipe => CommandErrorKind::BrokenPipe, _ => CommandErrorKind::User, }; Self::new(kind, err) } } impl From<jj_lib::file_util::PathError> for CommandError { fn from(err: jj_lib::file_util::PathError) -> Self { user_error(err) } } impl From<ConfigFileSaveError> for CommandError { fn from(err: ConfigFileSaveError) -> Self { user_error(err) } } impl From<ConfigGetError> for CommandError { fn from(err: ConfigGetError) -> Self { let hint = config_get_error_hint(&err); let mut cmd_err = config_error(err); cmd_err.extend_hints(hint); cmd_err } } impl From<ConfigLoadError> for CommandError { fn from(err: ConfigLoadError) -> Self { let hint = match &err { ConfigLoadError::Read(_) => None, ConfigLoadError::Parse { source_path, .. } => source_path .as_ref() .map(|path| format!("Check the config file: {}", path.display())), }; let mut cmd_err = config_error(err); cmd_err.extend_hints(hint); cmd_err } } impl From<ConfigMigrateError> for CommandError { fn from(err: ConfigMigrateError) -> Self { let hint = err .source_path .as_ref() .map(|path| format!("Check the config file: {}", path.display())); let mut cmd_err = config_error(err); cmd_err.extend_hints(hint); cmd_err } } impl From<RewriteRootCommit> for CommandError { fn from(err: RewriteRootCommit) -> Self { internal_error_with_message("Attempted to rewrite the root commit", err) } } impl From<EditCommitError> for CommandError { fn from(err: EditCommitError) -> Self { internal_error_with_message("Failed to edit a commit", err) } } impl From<CheckOutCommitError> for CommandError { fn from(err: CheckOutCommitError) -> Self { internal_error_with_message("Failed to check out a commit", err) } } impl From<RenameWorkspaceError> for CommandError { fn from(err: RenameWorkspaceError) -> Self { user_error_with_message("Failed to rename a workspace", err) } } impl From<BackendError> for CommandError { fn from(err: BackendError) -> Self { match &err { BackendError::Unsupported(_) => user_error(err), _ => internal_error_with_message("Unexpected error from backend", err), } } } impl From<IndexError> for CommandError { fn from(err: IndexError) -> Self { internal_error_with_message("Unexpected error from index", err) } } impl From<OpHeadsStoreError> for CommandError { fn from(err: OpHeadsStoreError) -> Self { internal_error_with_message("Unexpected error from operation heads store", err) } } impl From<WorkspaceInitError> for CommandError { fn from(err: WorkspaceInitError) -> Self { match err { WorkspaceInitError::DestinationExists(_) => { user_error("The target repo already exists") } WorkspaceInitError::EncodeRepoPath(_) => user_error(err), WorkspaceInitError::CheckOutCommit(err) => { internal_error_with_message("Failed to check out the initial commit", err) } WorkspaceInitError::Path(err) => { internal_error_with_message("Failed to access the repository", err) } WorkspaceInitError::OpHeadsStore(err) => { user_error_with_message("Failed to record initial operation", err) } WorkspaceInitError::Backend(err) => { user_error_with_message("Failed to access the repository", err) } WorkspaceInitError::WorkingCopyState(err) => { internal_error_with_message("Failed to access the repository", err) } WorkspaceInitError::SignInit(err) => user_error(err), WorkspaceInitError::TransactionCommit(err) => err.into(), } } } impl From<OpHeadResolutionError> for CommandError { fn from(err: OpHeadResolutionError) -> Self { match err { OpHeadResolutionError::NoHeads => { internal_error_with_message("Corrupt repository", err) } } } } impl From<OpsetEvaluationError> for CommandError { fn from(err: OpsetEvaluationError) -> Self { match err { OpsetEvaluationError::OpsetResolution(err) => { let hint = opset_resolution_error_hint(&err); let mut cmd_err = user_error(err); cmd_err.extend_hints(hint); cmd_err } OpsetEvaluationError::OpHeadResolution(err) => err.into(), OpsetEvaluationError::OpHeadsStore(err) => err.into(), OpsetEvaluationError::OpStore(err) => err.into(), } } } impl From<SnapshotError> for CommandError { fn from(err: SnapshotError) -> Self { internal_error_with_message("Failed to snapshot the working copy", err) } } impl From<OpStoreError> for CommandError { fn from(err: OpStoreError) -> Self { internal_error_with_message("Failed to load an operation", err) } } impl From<RepoLoaderError> for CommandError { fn from(err: RepoLoaderError) -> Self { internal_error_with_message("Failed to load the repo", err) } } impl From<ResetError> for CommandError { fn from(err: ResetError) -> Self { internal_error_with_message("Failed to reset the working copy", err) } } impl From<TransactionCommitError> for CommandError { fn from(err: TransactionCommitError) -> Self { internal_error(err) } } impl From<WalkPredecessorsError> for CommandError { fn from(err: WalkPredecessorsError) -> Self { match err { WalkPredecessorsError::Backend(err) => err.into(), WalkPredecessorsError::Index(err) => err.into(), WalkPredecessorsError::OpStore(err) => err.into(), WalkPredecessorsError::CycleDetected(_) => internal_error(err), } } } impl From<DiffEditError> for CommandError { fn from(err: DiffEditError) -> Self { user_error_with_message("Failed to edit diff", err) } } impl From<DiffRenderError> for CommandError { fn from(err: DiffRenderError) -> Self { match err { DiffRenderError::DiffGenerate(_) => user_error(err), DiffRenderError::Backend(err) => err.into(), DiffRenderError::AccessDenied { .. } => user_error(err), DiffRenderError::InvalidRepoPath(_) => user_error(err), DiffRenderError::Io(err) => err.into(), } } } impl From<ConflictResolveError> for CommandError { fn from(err: ConflictResolveError) -> Self { match err { ConflictResolveError::Backend(err) => err.into(), ConflictResolveError::Io(err) => err.into(), _ => { let hint = match &err { ConflictResolveError::ConflictTooComplicated { .. } => { Some("Edit the conflict markers manually to resolve this.".to_owned()) } ConflictResolveError::ExecutableConflict { .. } => { Some("Use `jj file chmod` to update the executable bit.".to_owned()) } _ => None, }; let mut cmd_err = user_error_with_message("Failed to resolve conflicts", err); cmd_err.extend_hints(hint); cmd_err } } } } impl From<MergeToolPartialResolutionError> for CommandError { fn from(err: MergeToolPartialResolutionError) -> Self { user_error(err) } } impl From<MergeToolConfigError> for CommandError { fn from(err: MergeToolConfigError) -> Self { match &err { MergeToolConfigError::MergeArgsNotConfigured { tool_name } => { let tool_name = tool_name.clone(); user_error_with_hint( err, format!( "To use `{tool_name}` as a merge tool, the config \ `merge-tools.{tool_name}.merge-args` must be defined (see docs for \ details)" ), ) } _ => user_error_with_message("Failed to load tool configuration", err), } } } impl From<TextEditError> for CommandError { fn from(err: TextEditError) -> Self { user_error(err) } } impl From<TempTextEditError> for CommandError { fn from(err: TempTextEditError) -> Self { let hint = err.path.as_ref().map(|path| { let name = err.name.as_deref().unwrap_or("file"); format!("Edited {name} is left in {path}", path = path.display()) }); let mut cmd_err = user_error(err); cmd_err.extend_hints(hint); cmd_err } } impl From<TrailerParseError> for CommandError { fn from(err: TrailerParseError) -> Self { user_error(err) } } #[cfg(feature = "git")] mod git { use jj_lib::git::GitDefaultRefspecError; use jj_lib::git::GitExportError; use jj_lib::git::GitFetchError; use jj_lib::git::GitImportError; use jj_lib::git::GitPushError; use jj_lib::git::GitRefExpansionError; use jj_lib::git::GitRemoteManagementError; use jj_lib::git::GitResetHeadError; use jj_lib::git::UnexpectedGitBackendError; use super::*; impl From<GitImportError> for CommandError { fn from(err: GitImportError) -> Self { let hint = match &err { GitImportError::MissingHeadTarget { .. } | GitImportError::MissingRefAncestor { .. } => Some( "\ Is this Git repository a partial clone (cloned with the --filter argument)? jj currently does not support partial clones. To use jj with this repository, try re-cloning with \ the full repository contents." .to_string(), ), GitImportError::Backend(_) => None, GitImportError::Index(_) => None, GitImportError::Git(_) => None, GitImportError::UnexpectedBackend(_) => None, }; let mut cmd_err = user_error_with_message("Failed to import refs from underlying Git repo", err); cmd_err.extend_hints(hint); cmd_err } } impl From<GitExportError> for CommandError { fn from(err: GitExportError) -> Self { user_error_with_message("Failed to export refs to underlying Git repo", err) } } impl From<GitFetchError> for CommandError { fn from(err: GitFetchError) -> Self { match err { GitFetchError::NoSuchRemote(_) => user_error(err), GitFetchError::RemoteName(_) => user_error_with_hint( err, "Run `jj git remote rename` to give a different name.", ), GitFetchError::Subprocess(_) => user_error(err), } } } impl From<GitDefaultRefspecError> for CommandError { fn from(err: GitDefaultRefspecError) -> Self { match err { GitDefaultRefspecError::NoSuchRemote(_) => user_error(err), GitDefaultRefspecError::InvalidRemoteConfiguration(_, _) => user_error(err), } } } impl From<GitRefExpansionError> for CommandError { fn from(err: GitRefExpansionError) -> Self { match &err { GitRefExpansionError::Expression(_) => user_error_with_hint( err, "Specify patterns in `(positive | ...) & ~(negative | ...)` form.", ), GitRefExpansionError::InvalidBranchPattern(_) => user_error(err), } } } impl From<GitPushError> for CommandError { fn from(err: GitPushError) -> Self { match err { GitPushError::NoSuchRemote(_) => user_error(err), GitPushError::RemoteName(_) => user_error_with_hint( err, "Run `jj git remote rename` to give a different name.", ), GitPushError::Subprocess(_) => user_error(err), GitPushError::UnexpectedBackend(_) => user_error(err), } } } impl From<GitRemoteManagementError> for CommandError { fn from(err: GitRemoteManagementError) -> Self { user_error(err) } } impl From<GitResetHeadError> for CommandError { fn from(err: GitResetHeadError) -> Self { user_error_with_message("Failed to reset Git HEAD state", err) } } impl From<UnexpectedGitBackendError> for CommandError { fn from(err: UnexpectedGitBackendError) -> Self { user_error(err) } } } impl From<RevsetEvaluationError> for CommandError { fn from(err: RevsetEvaluationError) -> Self { user_error(err) } } impl From<FilesetParseError> for CommandError { fn from(err: FilesetParseError) -> Self { let hint = fileset_parse_error_hint(&err); let mut cmd_err = user_error_with_message(format!("Failed to parse fileset: {}", err.kind()), err); cmd_err.extend_hints(hint); cmd_err } } impl From<RecoverWorkspaceError> for CommandError { fn from(err: RecoverWorkspaceError) -> Self { match err { RecoverWorkspaceError::Backend(err) => err.into(), RecoverWorkspaceError::Reset(err) => err.into(), RecoverWorkspaceError::RewriteRootCommit(err) => err.into(), RecoverWorkspaceError::TransactionCommit(err) => err.into(), err @ RecoverWorkspaceError::WorkspaceMissingWorkingCopy(_) => user_error(err), } } } impl From<RevsetParseError> for CommandError { fn from(err: RevsetParseError) -> Self { let hint = revset_parse_error_hint(&err); let mut cmd_err = user_error_with_message(format!("Failed to parse revset: {}", err.kind()), err); cmd_err.extend_hints(hint); cmd_err } } impl From<RevsetResolutionError> for CommandError { fn from(err: RevsetResolutionError) -> Self { let hints = revset_resolution_error_hints(&err); let mut cmd_err = user_error(err); cmd_err.extend_hints(hints); cmd_err } } impl From<UserRevsetEvaluationError> for CommandError { fn from(err: UserRevsetEvaluationError) -> Self { match err { UserRevsetEvaluationError::Resolution(err) => err.into(), UserRevsetEvaluationError::Evaluation(err) => err.into(), } } } impl From<TemplateParseError> for CommandError { fn from(err: TemplateParseError) -> Self { let hint = template_parse_error_hint(&err); let mut cmd_err = user_error_with_message(format!("Failed to parse template: {}", err.kind()), err); cmd_err.extend_hints(hint); cmd_err } } impl From<UiPathParseError> for CommandError { fn from(err: UiPathParseError) -> Self { user_error(err) } } impl From<clap::Error> for CommandError { fn from(err: clap::Error) -> Self { let hint = find_source_parse_error_hint(&err); let mut cmd_err = cli_error(err); cmd_err.extend_hints(hint); cmd_err } } impl From<WorkingCopyStateError> for CommandError { fn from(err: WorkingCopyStateError) -> Self { internal_error_with_message("Failed to access working copy state", err) } } impl From<GitIgnoreError> for CommandError { fn from(err: GitIgnoreError) -> Self { user_error_with_message("Failed to process .gitignore.", err) } } impl From<ParseBulkEditMessageError> for CommandError { fn from(err: ParseBulkEditMessageError) -> Self { user_error(err) } } impl From<AbsorbError> for CommandError { fn from(err: AbsorbError) -> Self { match err { AbsorbError::Backend(err) => err.into(), AbsorbError::RevsetEvaluation(err) => err.into(), } } } impl From<FixError> for CommandError { fn from(err: FixError) -> Self { match err { FixError::Backend(err) => err.into(), FixError::RevsetEvaluation(err) => err.into(), FixError::IO(err) => err.into(), FixError::FixContent(err) => internal_error_with_message( "An error occurred while attempting to fix file content", err, ), } } } impl From<BisectionError> for CommandError { fn from(err: BisectionError) -> Self { match err { BisectionError::RevsetEvaluationError(_) => user_error(err), } } } fn find_source_parse_error_hint(err: &dyn error::Error) -> Option<String> { let source = err.source()?; if let Some(source) = source.downcast_ref() { bookmark_name_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { config_get_error_hint(source) } else if let Some(source) = source.downcast_ref() { file_pattern_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { fileset_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { revset_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { // TODO: propagate all hints? revset_resolution_error_hints(source).into_iter().next() } else if let Some(UserRevsetEvaluationError::Resolution(source)) = source.downcast_ref() { // TODO: propagate all hints? revset_resolution_error_hints(source).into_iter().next() } else if let Some(source) = source.downcast_ref() { string_pattern_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { tag_name_parse_error_hint(source) } else if let Some(source) = source.downcast_ref() { template_parse_error_hint(source) } else { None } } const REVSET_SYMBOL_HINT: &str = "See https://docs.jj-vcs.dev/latest/revsets/ or use `jj help -k \ revsets` for how to quote symbols."; fn bookmark_name_parse_error_hint(err: &BookmarkNameParseError) -> Option<String> { use revset::ExpressionKind; match revset::parse_program(&err.input).map(|node| node.kind) { Ok(ExpressionKind::RemoteSymbol(symbol)) => Some(format!( "Looks like remote bookmark. Run `jj bookmark track {name} --remote={remote}` to \ track it.", name = symbol.name.as_symbol(), remote = symbol.remote.as_symbol() )), _ => Some(REVSET_SYMBOL_HINT.to_owned()), } } fn config_get_error_hint(err: &ConfigGetError) -> Option<String> { match &err { ConfigGetError::NotFound { .. } => None, ConfigGetError::Type { source_path, .. } => source_path .as_ref() .map(|path| format!("Check the config file: {}", path.display())), } } fn file_pattern_parse_error_hint(err: &FilePatternParseError) -> Option<String> { match err { FilePatternParseError::InvalidKind(_) => Some(String::from( "See https://docs.jj-vcs.dev/latest/filesets/#file-patterns or `jj help -k filesets` \ for valid prefixes.", )), // Suggest root:"<path>" if input can be parsed as repo-relative path FilePatternParseError::UiPath(UiPathParseError::Fs(e)) => { RepoPathBuf::from_relative_path(&e.input).ok().map(|path| { format!(r#"Consider using root:{path:?} to specify repo-relative path"#) }) } FilePatternParseError::RelativePath(_) => None, FilePatternParseError::GlobPattern(_) => None, } } fn fileset_parse_error_hint(err: &FilesetParseError) -> Option<String> { match err.kind() { FilesetParseErrorKind::SyntaxError => Some(String::from( "See https://docs.jj-vcs.dev/latest/filesets/ or use `jj help -k filesets` for \ filesets syntax and how to match file paths.", )), FilesetParseErrorKind::NoSuchFunction { name: _, candidates, } => format_similarity_hint(candidates), FilesetParseErrorKind::InvalidArguments { .. } | FilesetParseErrorKind::Expression(_) => { find_source_parse_error_hint(&err) } } } fn opset_resolution_error_hint(err: &OpsetResolutionError) -> Option<String> { match err { OpsetResolutionError::MultipleOperations { expr: _, candidates, } => Some(format!( "Try specifying one of the operations by ID: {}", candidates.iter().map(short_operation_hash).join(", ") )), OpsetResolutionError::EmptyOperations(_) | OpsetResolutionError::InvalidIdPrefix(_) | OpsetResolutionError::NoSuchOperation(_) | OpsetResolutionError::AmbiguousIdPrefix(_) => None, } } pub(crate) fn revset_parse_error_hint(err: &RevsetParseError) -> Option<String> { // Only for the bottom error, which is usually the root cause let bottom_err = iter::successors(Some(err), |e| e.origin()).last().unwrap(); match bottom_err.kind() { RevsetParseErrorKind::SyntaxError => Some( "See https://docs.jj-vcs.dev/latest/revsets/ or use `jj help -k revsets` for revsets \ syntax and how to quote symbols." .into(), ), RevsetParseErrorKind::NotPrefixOperator { op: _, similar_op, description, } | RevsetParseErrorKind::NotPostfixOperator { op: _, similar_op, description, } | RevsetParseErrorKind::NotInfixOperator { op: _, similar_op, description, } => Some(format!("Did you mean `{similar_op}` for {description}?")), RevsetParseErrorKind::NoSuchFunction { name: _, candidates, } => format_similarity_hint(candidates), RevsetParseErrorKind::InvalidFunctionArguments { .. } | RevsetParseErrorKind::Expression(_) => find_source_parse_error_hint(bottom_err), _ => None, } } fn revset_resolution_error_hints(err: &RevsetResolutionError) -> Vec<String> { let multiple_targets_hint = |targets: &[CommitId]| { format!( "Use commit ID to select single revision from: {}", targets.iter().map(|id| format!("{id:.12}")).join(", ") ) }; match err { RevsetResolutionError::NoSuchRevision { name: _, candidates, } => format_similarity_hint(candidates).into_iter().collect(), RevsetResolutionError::DivergentChangeId { symbol, visible_targets, } => vec![ format!( "Use change offset to select single revision: {}", visible_targets .iter() .map(|(offset, _)| format!("{symbol}/{offset}")) .join(", ") ), format!("Use `change_id({symbol})` to select all revisions"), "To abandon unneeded revisions, run `jj abandon <commit_id>`".to_owned(), ], RevsetResolutionError::ConflictedRef { kind: "bookmark", symbol, targets, } => vec![ multiple_targets_hint(targets), format!("Use `bookmarks({symbol})` to select all revisions"), format!( "To set which revision the bookmark points to, run `jj bookmark set {symbol} -r \ <REVISION>`" ), ], RevsetResolutionError::ConflictedRef { kind: _, symbol: _, targets, } => vec![multiple_targets_hint(targets)], RevsetResolutionError::EmptyString | RevsetResolutionError::WorkspaceMissingWorkingCopy { .. } | RevsetResolutionError::AmbiguousCommitIdPrefix(_) | RevsetResolutionError::AmbiguousChangeIdPrefix(_) | RevsetResolutionError::Backend(_) | RevsetResolutionError::Other(_) => vec![], } } fn string_pattern_parse_error_hint(err: &StringPatternParseError) -> Option<String> { match err { StringPatternParseError::InvalidKind(_) => Some( "Try prefixing with one of `exact:`, `glob:`, `regex:`, `substring:`, or one of these \ with `-i` suffix added (e.g. `glob-i:`) for case-insensitive matching" .into(), ),
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/templater.rs
cli/src/templater.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Tools for lazily evaluating templates that produce text in a fallible //! manner. use std::cell::RefCell; use std::error; use std::fmt; use std::io; use std::io::Write; use std::iter; use std::rc::Rc; use bstr::BStr; use bstr::BString; use jj_lib::backend::Signature; use jj_lib::backend::Timestamp; use jj_lib::config::ConfigValue; use jj_lib::op_store::TimestampRange; use crate::formatter::FormatRecorder; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::formatter::LabeledScope; use crate::formatter::PlainTextFormatter; use crate::text_util; use crate::time_util; /// Represents a printable type or a compiled template containing a placeholder /// value. /// /// This is analogous to [`std::fmt::Display`], but with customized error /// handling. pub trait Template { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()>; } /// Template that supports list-like behavior. pub trait ListTemplate: Template { /// Concatenates items with the given separator. fn join<'a>(self: Box<Self>, separator: Box<dyn Template + 'a>) -> Box<dyn Template + 'a> where Self: 'a; } impl<T: Template + ?Sized> Template for &T { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { <T as Template>::format(self, formatter) } } impl<T: Template + ?Sized> Template for Box<T> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { <T as Template>::format(self, formatter) } } // All optional printable types should be printable, and it's unlikely to // implement different formatting per type. impl<T: Template> Template for Option<T> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { self.as_ref().map_or(Ok(()), |t| t.format(formatter)) } } impl Template for BString { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { formatter.as_mut().write_all(self) } } impl Template for &BStr { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { formatter.as_mut().write_all(self) } } impl Template for ConfigValue { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter, "{self}") } } impl Template for Signature { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter.labeled("name"), "{}", self.name)?; if !self.name.is_empty() && !self.email.is_empty() { write!(formatter, " ")?; } if !self.email.is_empty() { write!(formatter, "<")?; let email = Email(self.email.clone()); email.format(formatter)?; write!(formatter, ">")?; } Ok(()) } } #[derive(Clone, Debug, Eq, PartialEq, serde::Serialize)] #[serde(transparent)] pub struct Email(pub String); impl Template for Email { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let (local, domain) = text_util::split_email(&self.0); write!(formatter.labeled("local"), "{local}")?; if let Some(domain) = domain { write!(formatter, "@")?; write!(formatter.labeled("domain"), "{domain}")?; } Ok(()) } } // In template language, an integer value is represented as i64. However, we use // usize here because it's more convenient to guarantee that the lower value is // bounded to 0. pub type SizeHint = (usize, Option<usize>); impl Template for String { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter, "{self}") } } impl Template for &str { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter, "{self}") } } impl Template for Timestamp { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { match time_util::format_absolute_timestamp(self) { Ok(formatted) => write!(formatter, "{formatted}"), Err(err) => formatter.handle_error(err.into()), } } } impl Template for TimestampRange { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { self.start.format(formatter)?; write!(formatter, " - ")?; self.end.format(formatter)?; Ok(()) } } impl Template for Vec<String> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { format_joined(formatter, self, " ") } } impl Template for bool { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let repr = if *self { "true" } else { "false" }; write!(formatter, "{repr}") } } impl Template for i64 { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter, "{self}") } } pub struct LabelTemplate<T, L> { content: T, labels: L, } impl<T, L> LabelTemplate<T, L> { pub fn new(content: T, labels: L) -> Self where T: Template, L: TemplateProperty<Output = Vec<String>>, { Self { content, labels } } } impl<T, L> Template for LabelTemplate<T, L> where T: Template, L: TemplateProperty<Output = Vec<String>>, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { match self.labels.extract() { Ok(labels) => format_labeled(formatter, &self.content, &labels), Err(err) => formatter.handle_error(err), } } } pub struct RawEscapeSequenceTemplate<T>(pub T); impl<T: Template> Template for RawEscapeSequenceTemplate<T> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let rewrap = formatter.rewrap_fn(); let mut raw_formatter = PlainTextFormatter::new(formatter.raw()?); self.0.format(&mut rewrap(&mut raw_formatter)) } } /// Renders contents in order, and returns the first non-empty output. pub struct CoalesceTemplate<T>(pub Vec<T>); impl<T: Template> Template for CoalesceTemplate<T> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let Some((last, contents)) = self.0.split_last() else { return Ok(()); }; let record_non_empty = record_non_empty_fn(formatter); if let Some(recorder) = contents.iter().find_map(record_non_empty) { recorder?.replay(formatter.as_mut()) } else { last.format(formatter) // no need to capture the last content } } } pub struct ConcatTemplate<T>(pub Vec<T>); impl<T: Template> Template for ConcatTemplate<T> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { for template in &self.0 { template.format(formatter)?; } Ok(()) } } /// Renders the content to buffer, and transforms it without losing labels. pub struct ReformatTemplate<T, F> { content: T, reformat: F, } impl<T, F> ReformatTemplate<T, F> { pub fn new(content: T, reformat: F) -> Self where T: Template, F: Fn(&mut TemplateFormatter, &FormatRecorder) -> io::Result<()>, { Self { content, reformat } } } impl<T, F> Template for ReformatTemplate<T, F> where T: Template, F: Fn(&mut TemplateFormatter, &FormatRecorder) -> io::Result<()>, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let rewrap = formatter.rewrap_fn(); let mut recorder = FormatRecorder::new(); self.content.format(&mut rewrap(&mut recorder))?; (self.reformat)(formatter, &recorder) } } /// Like `ConcatTemplate`, but inserts a separator between contents. pub struct JoinTemplate<S, T> { separator: S, contents: Vec<T>, } impl<S, T> JoinTemplate<S, T> { pub fn new(separator: S, contents: Vec<T>) -> Self where S: Template, T: Template, { Self { separator, contents, } } } impl<S, T> Template for JoinTemplate<S, T> where S: Template, T: Template, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { format_joined(formatter, &self.contents, &self.separator) } } /// Like `JoinTemplate`, but ignores empty contents. pub struct SeparateTemplate<S, T> { separator: S, contents: Vec<T>, } impl<S, T> SeparateTemplate<S, T> { pub fn new(separator: S, contents: Vec<T>) -> Self where S: Template, T: Template, { Self { separator, contents, } } } impl<S, T> Template for SeparateTemplate<S, T> where S: Template, T: Template, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let record_non_empty = record_non_empty_fn(formatter); let content_recorders = self.contents.iter().filter_map(record_non_empty); format_joined_with( formatter, content_recorders, &self.separator, |formatter, recorder| recorder?.replay(formatter.as_mut()), ) } } /// Wrapper around an error occurred during template evaluation. #[derive(Debug)] pub struct TemplatePropertyError(pub Box<dyn error::Error + Send + Sync>); // Implements conversion from any error type to support `expr?` in function // binding. This type doesn't implement `std::error::Error` instead. // <https://github.com/dtolnay/anyhow/issues/25#issuecomment-544140480> impl<E> From<E> for TemplatePropertyError where E: error::Error + Send + Sync + 'static, { fn from(err: E) -> Self { Self(err.into()) } } /// Lazily evaluated value which can fail to evaluate. pub trait TemplateProperty { type Output; fn extract(&self) -> Result<Self::Output, TemplatePropertyError>; } impl<P: TemplateProperty + ?Sized> TemplateProperty for Box<P> { type Output = <P as TemplateProperty>::Output; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { <P as TemplateProperty>::extract(self) } } impl<P: TemplateProperty> TemplateProperty for Option<P> { type Output = Option<P::Output>; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { self.as_ref().map(|property| property.extract()).transpose() } } // Implement TemplateProperty for tuples macro_rules! tuple_impls { ($( ( $($n:tt $T:ident),+ ) )+) => { $( impl<$($T: TemplateProperty,)+> TemplateProperty for ($($T,)+) { type Output = ($($T::Output,)+); fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { Ok(($(self.$n.extract()?,)+)) } } )+ } } tuple_impls! { (0 T0) (0 T0, 1 T1) (0 T0, 1 T1, 2 T2) (0 T0, 1 T1, 2 T2, 3 T3) } /// Type-erased [`TemplateProperty`]. pub type BoxedTemplateProperty<'a, O> = Box<dyn TemplateProperty<Output = O> + 'a>; pub type BoxedSerializeProperty<'a> = BoxedTemplateProperty<'a, Box<dyn erased_serde::Serialize + 'a>>; /// [`TemplateProperty`] adapters that are useful when implementing methods. pub trait TemplatePropertyExt: TemplateProperty { /// Translates to a property that will apply fallible `function` to an /// extracted value. fn and_then<O, F>(self, function: F) -> TemplateFunction<Self, F> where Self: Sized, F: Fn(Self::Output) -> Result<O, TemplatePropertyError>, { TemplateFunction::new(self, function) } /// Translates to a property that will apply `function` to an extracted /// value, leaving `Err` untouched. fn map<O, F>(self, function: F) -> impl TemplateProperty<Output = O> where Self: Sized, F: Fn(Self::Output) -> O, { TemplateFunction::new(self, move |value| Ok(function(value))) } /// Translates to a property that will unwrap an extracted `Option` value /// of the specified `type_name`, mapping `None` to `Err`. fn try_unwrap<O>(self, type_name: &str) -> impl TemplateProperty<Output = O> where Self: TemplateProperty<Output = Option<O>> + Sized, { self.and_then(move |opt| { opt.ok_or_else(|| TemplatePropertyError(format!("No {type_name} available").into())) }) } /// Converts this property into boxed serialize property. fn into_serialize<'a>(self) -> BoxedSerializeProperty<'a> where Self: Sized + 'a, Self::Output: serde::Serialize, { Box::new(self.map(|value| Box::new(value) as Box<dyn erased_serde::Serialize>)) } /// Converts this property into `Template`. fn into_template<'a>(self) -> Box<dyn Template + 'a> where Self: Sized + 'a, Self::Output: Template, { Box::new(FormattablePropertyTemplate::new(self)) } /// Converts this property into boxed trait object. fn into_dyn<'a>(self) -> BoxedTemplateProperty<'a, Self::Output> where Self: Sized + 'a, { Box::new(self) } /// Converts this property into wrapped (or tagged) type. /// /// Use `W::wrap_property()` if the self type is known to be boxed. fn into_dyn_wrapped<'a, W>(self) -> W where Self: Sized + 'a, W: WrapTemplateProperty<'a, Self::Output>, { W::wrap_property(self.into_dyn()) } } impl<P: TemplateProperty + ?Sized> TemplatePropertyExt for P {} /// Wraps template property of type `O` in tagged type. /// /// This is basically [`From<BoxedTemplateProperty<'a, O>>`], but is restricted /// to property types. #[diagnostic::on_unimplemented( message = "the template property of type `{O}` cannot be wrapped in `{Self}`" )] pub trait WrapTemplateProperty<'a, O>: Sized { fn wrap_property(property: BoxedTemplateProperty<'a, O>) -> Self; } /// Adapter that wraps literal value in `TemplateProperty`. pub struct Literal<O>(pub O); impl<O: Template> Template for Literal<O> { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { self.0.format(formatter) } } impl<O: Clone> TemplateProperty for Literal<O> { type Output = O; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { Ok(self.0.clone()) } } /// Adapter to extract template value from property for displaying. pub struct FormattablePropertyTemplate<P> { property: P, } impl<P> FormattablePropertyTemplate<P> { pub fn new(property: P) -> Self where P: TemplateProperty, P::Output: Template, { Self { property } } } impl<P> Template for FormattablePropertyTemplate<P> where P: TemplateProperty, P::Output: Template, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { match self.property.extract() { Ok(template) => template.format(formatter), Err(err) => formatter.handle_error(err), } } } /// Adapter to turn template back to string property. pub struct PlainTextFormattedProperty<T> { template: T, } impl<T> PlainTextFormattedProperty<T> { pub fn new(template: T) -> Self { Self { template } } } impl<T: Template> TemplateProperty for PlainTextFormattedProperty<T> { type Output = String; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { let mut output = vec![]; let mut formatter = PlainTextFormatter::new(&mut output); let mut wrapper = TemplateFormatter::new(&mut formatter, propagate_property_error); self.template.format(&mut wrapper)?; Ok(String::from_utf8(output).map_err(|err| err.utf8_error())?) } } /// Renders template property of list type with the given separator. /// /// Each list item will be formatted by the given `format_item()` function. pub struct ListPropertyTemplate<P, S, F> { property: P, separator: S, format_item: F, } impl<P, S, F> ListPropertyTemplate<P, S, F> { pub fn new<O>(property: P, separator: S, format_item: F) -> Self where P: TemplateProperty, P::Output: IntoIterator<Item = O>, S: Template, F: Fn(&mut TemplateFormatter, O) -> io::Result<()>, { Self { property, separator, format_item, } } } impl<O, P, S, F> Template for ListPropertyTemplate<P, S, F> where P: TemplateProperty, P::Output: IntoIterator<Item = O>, S: Template, F: Fn(&mut TemplateFormatter, O) -> io::Result<()>, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let contents = match self.property.extract() { Ok(contents) => contents, Err(err) => return formatter.handle_error(err), }; format_joined_with(formatter, contents, &self.separator, &self.format_item) } } impl<O, P, S, F> ListTemplate for ListPropertyTemplate<P, S, F> where P: TemplateProperty, P::Output: IntoIterator<Item = O>, S: Template, F: Fn(&mut TemplateFormatter, O) -> io::Result<()>, { fn join<'a>(self: Box<Self>, separator: Box<dyn Template + 'a>) -> Box<dyn Template + 'a> where Self: 'a, { // Once join()-ed, list-like API should be dropped. This is guaranteed by // the return type. Box::new(ListPropertyTemplate::new( self.property, separator, self.format_item, )) } } /// Template which selects an output based on a boolean condition. /// /// When `None` is specified for the false template and the condition is false, /// this writes nothing. pub struct ConditionalTemplate<P, T, U> { pub condition: P, pub true_template: T, pub false_template: Option<U>, } impl<P, T, U> ConditionalTemplate<P, T, U> { pub fn new(condition: P, true_template: T, false_template: Option<U>) -> Self where P: TemplateProperty<Output = bool>, T: Template, U: Template, { Self { condition, true_template, false_template, } } } impl<P, T, U> Template for ConditionalTemplate<P, T, U> where P: TemplateProperty<Output = bool>, T: Template, U: Template, { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { let condition = match self.condition.extract() { Ok(condition) => condition, Err(err) => return formatter.handle_error(err), }; if condition { self.true_template.format(formatter)?; } else if let Some(false_template) = &self.false_template { false_template.format(formatter)?; } Ok(()) } } /// Adapter to apply fallible `function` to the `property`. /// /// This is usually created by `TemplatePropertyExt::and_then()`/`map()`. pub struct TemplateFunction<P, F> { pub property: P, pub function: F, } impl<P, F> TemplateFunction<P, F> { pub fn new<O>(property: P, function: F) -> Self where P: TemplateProperty, F: Fn(P::Output) -> Result<O, TemplatePropertyError>, { Self { property, function } } } impl<O, P, F> TemplateProperty for TemplateFunction<P, F> where P: TemplateProperty, F: Fn(P::Output) -> Result<O, TemplatePropertyError>, { type Output = O; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { (self.function)(self.property.extract()?) } } /// Property which will be compiled into template once, and substituted later. #[derive(Clone, Debug)] pub struct PropertyPlaceholder<O> { value: Rc<RefCell<Option<O>>>, } impl<O> PropertyPlaceholder<O> { pub fn new() -> Self { Self { value: Rc::new(RefCell::new(None)), } } pub fn set(&self, value: O) { *self.value.borrow_mut() = Some(value); } pub fn take(&self) -> Option<O> { self.value.borrow_mut().take() } pub fn with_value<R>(&self, value: O, f: impl FnOnce() -> R) -> R { self.set(value); let result = f(); self.take(); result } } impl<O> Default for PropertyPlaceholder<O> { fn default() -> Self { Self::new() } } impl<O: Clone> TemplateProperty for PropertyPlaceholder<O> { type Output = O; fn extract(&self) -> Result<Self::Output, TemplatePropertyError> { if let Some(value) = self.value.borrow().as_ref() { Ok(value.clone()) } else { Err(TemplatePropertyError("Placeholder value is not set".into())) } } } /// Adapter that renders compiled `template` with the `placeholder` value set. pub struct TemplateRenderer<'a, C> { template: Box<dyn Template + 'a>, placeholder: PropertyPlaceholder<C>, labels: Vec<String>, } impl<'a, C: Clone> TemplateRenderer<'a, C> { pub fn new(template: Box<dyn Template + 'a>, placeholder: PropertyPlaceholder<C>) -> Self { Self { template, placeholder, labels: Vec::new(), } } /// Returns renderer that will format template with the given `labels`. /// /// This is equivalent to wrapping the content template with `label()` /// function. For example, /// `content.labeled(["foo", "bar"]).labeled(["baz"])` can be expressed as /// `label("baz", label("foo bar", content))` in template. pub fn labeled<S: Into<String>>(mut self, labels: impl IntoIterator<Item = S>) -> Self { self.labels.splice(0..0, labels.into_iter().map(Into::into)); self } pub fn format(&self, context: &C, formatter: &mut dyn Formatter) -> io::Result<()> { let mut wrapper = TemplateFormatter::new(formatter, format_property_error_inline); self.placeholder.with_value(context.clone(), || { format_labeled(&mut wrapper, &self.template, &self.labels) }) } /// Renders template into buffer ignoring any color labels. /// /// The output is usually UTF-8, but it can contain arbitrary bytes such as /// file content. pub fn format_plain_text(&self, context: &C) -> Vec<u8> { let mut output = Vec::new(); self.format(context, &mut PlainTextFormatter::new(&mut output)) .expect("write() to vec backed formatter should never fail"); output } } /// Wrapper to pass around `Formatter` and error handler. pub struct TemplateFormatter<'a> { formatter: &'a mut dyn Formatter, error_handler: PropertyErrorHandler, } impl<'a> TemplateFormatter<'a> { fn new(formatter: &'a mut dyn Formatter, error_handler: PropertyErrorHandler) -> Self { Self { formatter, error_handler, } } /// Returns function that wraps another `Formatter` with the current error /// handling strategy. /// /// This does not borrow `self` so the underlying formatter can be mutably /// borrowed. pub fn rewrap_fn(&self) -> impl Fn(&mut dyn Formatter) -> TemplateFormatter<'_> + use<> { let error_handler = self.error_handler; move |formatter| TemplateFormatter::new(formatter, error_handler) } pub fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { self.formatter.raw() } pub fn labeled(&mut self, label: &str) -> LabeledScope<&mut (dyn Formatter + 'a)> { self.formatter.labeled(label) } pub fn push_label(&mut self, label: &str) { self.formatter.push_label(label); } pub fn pop_label(&mut self) { self.formatter.pop_label(); } pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> io::Result<()> { self.formatter.write_fmt(args) } /// Handles the given template property evaluation error. /// /// This usually prints the given error inline, and returns `Ok`. It's up to /// caller to decide whether or not to continue template processing on `Ok`. /// For example, `if(cond, ..)` expression will terminate if the `cond` /// failed to evaluate, whereas `concat(x, y, ..)` will continue processing. /// /// If `Err` is returned, the error should be propagated. pub fn handle_error(&mut self, err: TemplatePropertyError) -> io::Result<()> { (self.error_handler)(self.formatter, err) } } impl<'a> AsMut<dyn Formatter + 'a> for TemplateFormatter<'a> { fn as_mut(&mut self) -> &mut (dyn Formatter + 'a) { self.formatter } } pub fn format_joined<I, S>( formatter: &mut TemplateFormatter, contents: I, separator: S, ) -> io::Result<()> where I: IntoIterator, I::Item: Template, S: Template, { format_joined_with(formatter, contents, separator, |formatter, item| { item.format(formatter) }) } fn format_joined_with<I, S, F>( formatter: &mut TemplateFormatter, contents: I, separator: S, mut format_item: F, ) -> io::Result<()> where I: IntoIterator, S: Template, F: FnMut(&mut TemplateFormatter, I::Item) -> io::Result<()>, { let mut contents_iter = contents.into_iter().fuse(); if let Some(item) = contents_iter.next() { format_item(formatter, item)?; } for item in contents_iter { separator.format(formatter)?; format_item(formatter, item)?; } Ok(()) } fn format_labeled<T: Template + ?Sized>( formatter: &mut TemplateFormatter, content: &T, labels: &[String], ) -> io::Result<()> { for label in labels { formatter.push_label(label); } content.format(formatter)?; for _label in labels { formatter.pop_label(); } Ok(()) } type PropertyErrorHandler = fn(&mut dyn Formatter, TemplatePropertyError) -> io::Result<()>; /// Prints property evaluation error as inline template output. fn format_property_error_inline( formatter: &mut dyn Formatter, err: TemplatePropertyError, ) -> io::Result<()> { let TemplatePropertyError(err) = &err; let mut formatter = formatter.labeled("error"); write!(formatter, "<")?; write!(formatter.labeled("heading"), "Error: ")?; write!(formatter, "{err}")?; for err in iter::successors(err.source(), |err| err.source()) { write!(formatter, ": {err}")?; } write!(formatter, ">")?; Ok(()) } fn propagate_property_error( _formatter: &mut dyn Formatter, err: TemplatePropertyError, ) -> io::Result<()> { Err(io::Error::other(err.0)) } /// Creates function that renders a template to buffer and returns the buffer /// only if it isn't empty. /// /// This inherits the error handling strategy from the given `formatter`. fn record_non_empty_fn<T: Template + ?Sized>( formatter: &TemplateFormatter, // TODO: T doesn't have to be captured, but "currently, all type parameters // are required to be mentioned in the precise captures list" as of rustc // 1.85.0. ) -> impl Fn(&T) -> Option<io::Result<FormatRecorder>> + use<T> { let rewrap = formatter.rewrap_fn(); move |template| { let mut recorder = FormatRecorder::new(); match template.format(&mut rewrap(&mut recorder)) { Ok(()) if recorder.data().is_empty() => None, // omit empty content Ok(()) => Some(Ok(recorder)), Err(e) => Some(Err(e)), } } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/lib.rs
cli/src/lib.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. #![deny(unused_must_use)] pub mod cleanup_guard; pub mod cli_util; pub mod command_error; pub mod commands; pub mod commit_templater; pub mod complete; pub mod config; pub mod description_util; pub mod diff_util; pub mod formatter; pub mod generic_templater; #[cfg(feature = "git")] pub mod git_util; #[cfg(not(feature = "git"))] /// A stub module that provides a no-op implementation of some of the functions /// in the `git` module. pub mod git_util { use jj_lib::repo::ReadonlyRepo; use jj_lib::workspace::Workspace; pub fn is_colocated_git_workspace(_workspace: &Workspace, _repo: &ReadonlyRepo) -> bool { false } } pub mod graphlog; pub mod merge_tools; pub mod movement_util; pub mod operation_templater; mod progress; pub mod revset_util; pub mod template_builder; pub mod template_parser; pub mod templater; pub mod text_util; pub mod time_util; pub mod ui;
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/revset_util.rs
cli/src/revset_util.rs
// Copyright 2022-2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Utility for parsing and evaluating user-provided revset expressions. use std::collections::HashMap; use std::io; use std::sync::Arc; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::config::ConfigGetError; use jj_lib::config::ConfigNamePathBuf; use jj_lib::config::ConfigSource; use jj_lib::config::StackedConfig; use jj_lib::id_prefix::IdPrefixContext; use jj_lib::ref_name::RefNameBuf; use jj_lib::ref_name::RemoteNameBuf; use jj_lib::repo::Repo; use jj_lib::revset; use jj_lib::revset::ResolvedRevsetExpression; use jj_lib::revset::Revset; use jj_lib::revset::RevsetAliasesMap; use jj_lib::revset::RevsetDiagnostics; use jj_lib::revset::RevsetEvaluationError; use jj_lib::revset::RevsetExpression; use jj_lib::revset::RevsetExtensions; use jj_lib::revset::RevsetIteratorExt as _; use jj_lib::revset::RevsetParseContext; use jj_lib::revset::RevsetParseError; use jj_lib::revset::RevsetResolutionError; use jj_lib::revset::SymbolResolver; use jj_lib::revset::SymbolResolverExtension; use jj_lib::revset::UserRevsetExpression; use jj_lib::settings::RemoteSettingsMap; use jj_lib::str_util::StringExpression; use jj_lib::str_util::StringMatcher; use thiserror::Error; use crate::command_error::CommandError; use crate::command_error::config_error_with_message; use crate::command_error::print_parse_diagnostics; use crate::command_error::revset_parse_error_hint; use crate::command_error::user_error; use crate::command_error::user_error_with_message; use crate::formatter::Formatter; use crate::templater::TemplateRenderer; use crate::ui::Ui; const USER_IMMUTABLE_HEADS: &str = "immutable_heads"; #[derive(Debug, Error)] pub enum UserRevsetEvaluationError { #[error(transparent)] Resolution(RevsetResolutionError), #[error(transparent)] Evaluation(RevsetEvaluationError), } /// Wrapper around `UserRevsetExpression` to provide convenient methods. pub struct RevsetExpressionEvaluator<'repo> { repo: &'repo dyn Repo, extensions: Arc<RevsetExtensions>, id_prefix_context: &'repo IdPrefixContext, expression: Arc<UserRevsetExpression>, } impl<'repo> RevsetExpressionEvaluator<'repo> { pub fn new( repo: &'repo dyn Repo, extensions: Arc<RevsetExtensions>, id_prefix_context: &'repo IdPrefixContext, expression: Arc<UserRevsetExpression>, ) -> Self { Self { repo, extensions, id_prefix_context, expression, } } /// Returns the underlying expression. pub fn expression(&self) -> &Arc<UserRevsetExpression> { &self.expression } /// Intersects the underlying expression with the `other` expression. pub fn intersect_with(&mut self, other: &Arc<UserRevsetExpression>) { self.expression = self.expression.intersection(other); } /// Resolves user symbols in the expression, returns new expression. pub fn resolve(&self) -> Result<Arc<ResolvedRevsetExpression>, RevsetResolutionError> { let symbol_resolver = default_symbol_resolver( self.repo, self.extensions.symbol_resolvers(), self.id_prefix_context, ); self.expression .resolve_user_expression(self.repo, &symbol_resolver) } /// Evaluates the expression. pub fn evaluate(&self) -> Result<Box<dyn Revset + 'repo>, UserRevsetEvaluationError> { self.resolve() .map_err(UserRevsetEvaluationError::Resolution)? .evaluate(self.repo) .map_err(UserRevsetEvaluationError::Evaluation) } /// Evaluates the expression to an iterator over commit ids. Entries are /// sorted in reverse topological order. pub fn evaluate_to_commit_ids( &self, ) -> Result< Box<dyn Iterator<Item = Result<CommitId, RevsetEvaluationError>> + 'repo>, UserRevsetEvaluationError, > { Ok(self.evaluate()?.iter()) } /// Evaluates the expression to an iterator over commit objects. Entries are /// sorted in reverse topological order. pub fn evaluate_to_commits( &self, ) -> Result< impl Iterator<Item = Result<Commit, RevsetEvaluationError>> + use<'repo>, UserRevsetEvaluationError, > { Ok(self.evaluate()?.iter().commits(self.repo.store())) } } fn warn_user_redefined_builtin( ui: &Ui, source: ConfigSource, name: &str, ) -> Result<(), CommandError> { match source { ConfigSource::Default => (), ConfigSource::EnvBase | ConfigSource::User | ConfigSource::Repo | ConfigSource::Workspace | ConfigSource::EnvOverrides | ConfigSource::CommandArg => { let checked_mutability_builtins = ["mutable()", "immutable()", "builtin_immutable_heads()"]; if checked_mutability_builtins.contains(&name) { writeln!( ui.warning_default(), "Redefining `revset-aliases.{name}` is not recommended; redefine \ `immutable_heads()` instead", )?; } } } Ok(()) } pub fn load_revset_aliases( ui: &Ui, stacked_config: &StackedConfig, ) -> Result<RevsetAliasesMap, CommandError> { let table_name = ConfigNamePathBuf::from_iter(["revset-aliases"]); let mut aliases_map = RevsetAliasesMap::new(); // Load from all config layers in order. 'f(x)' in default layer should be // overridden by 'f(a)' in user. for layer in stacked_config.layers() { let table = match layer.look_up_table(&table_name) { Ok(Some(table)) => table, Ok(None) => continue, Err(item) => { return Err(ConfigGetError::Type { name: table_name.to_string(), error: format!("Expected a table, but is {}", item.type_name()).into(), source_path: layer.path.clone(), } .into()); } }; for (decl, item) in table.iter() { warn_user_redefined_builtin(ui, layer.source, decl)?; let r = item .as_str() .ok_or_else(|| format!("Expected a string, but is {}", item.type_name())) .and_then(|v| aliases_map.insert(decl, v).map_err(|e| e.to_string())); if let Err(s) = r { writeln!( ui.warning_default(), "Failed to load `{table_name}.{decl}`: {s}" )?; } } } Ok(aliases_map) } /// Wraps the given `IdPrefixContext` in `SymbolResolver` to be passed in to /// `evaluate()`. pub fn default_symbol_resolver<'a>( repo: &'a dyn Repo, extensions: &[impl AsRef<dyn SymbolResolverExtension>], id_prefix_context: &'a IdPrefixContext, ) -> SymbolResolver<'a> { SymbolResolver::new(repo, extensions).with_id_prefix_context(id_prefix_context) } /// Parses user-configured expression defining the heads of the immutable set. /// Includes the root commit. pub fn parse_immutable_heads_expression( diagnostics: &mut RevsetDiagnostics, context: &RevsetParseContext, ) -> Result<Arc<UserRevsetExpression>, RevsetParseError> { let (_, _, immutable_heads_str) = context .aliases_map .get_function(USER_IMMUTABLE_HEADS, 0) .unwrap(); let heads = revset::parse(diagnostics, immutable_heads_str, context)?; Ok(heads.union(&RevsetExpression::root())) } /// Prints warning if `trunk()` alias cannot be resolved. This alias could be /// generated by `jj git init`/`clone`. pub(super) fn warn_unresolvable_trunk( ui: &Ui, repo: &dyn Repo, context: &RevsetParseContext, ) -> io::Result<()> { let (_, _, revset_str) = context .aliases_map .get_function("trunk", 0) .expect("trunk() should be defined by default"); let Ok(expression) = revset::parse(&mut RevsetDiagnostics::new(), revset_str, context) else { // Parse error would have been reported. return Ok(()); }; // Not using IdPrefixContext since trunk() revset shouldn't contain short // prefixes. let symbol_resolver = SymbolResolver::new(repo, context.extensions.symbol_resolvers()); if let Err(err) = expression.resolve_user_expression(repo, &symbol_resolver) { writeln!( ui.warning_default(), "Failed to resolve `revset-aliases.trunk()`: {err}" )?; writeln!( ui.hint_default(), "Use `jj config edit --repo` to adjust the `trunk()` alias." )?; } Ok(()) } pub(super) fn evaluate_revset_to_single_commit<'a>( revision_str: &str, expression: &RevsetExpressionEvaluator<'_>, commit_summary_template: impl FnOnce() -> TemplateRenderer<'a, Commit>, ) -> Result<Commit, CommandError> { let mut iter = expression.evaluate_to_commits()?.fuse(); match (iter.next(), iter.next()) { (Some(commit), None) => Ok(commit?), (None, _) => Err(user_error(format!( "Revset `{revision_str}` didn't resolve to any revisions" ))), (Some(commit0), Some(commit1)) => { let mut iter = [commit0, commit1].into_iter().chain(iter); let commits: Vec<_> = iter.by_ref().take(5).try_collect()?; let elided = iter.next().is_some(); Err(format_multiple_revisions_error( revision_str, &commits, elided, &commit_summary_template(), )) } } } fn format_multiple_revisions_error( revision_str: &str, commits: &[Commit], elided: bool, template: &TemplateRenderer<'_, Commit>, ) -> CommandError { assert!(commits.len() >= 2); let mut cmd_err = user_error(format!( "Revset `{revision_str}` resolved to more than one revision" )); let write_commits_summary = |formatter: &mut dyn Formatter| { for commit in commits { write!(formatter, " ")?; template.format(commit, formatter)?; writeln!(formatter)?; } if elided { writeln!(formatter, " ...")?; } Ok(()) }; cmd_err.add_formatted_hint_with(|formatter| { writeln!( formatter, "The revset `{revision_str}` resolved to these revisions:" )?; write_commits_summary(formatter) }); cmd_err } #[derive(Debug, Error)] #[error("Failed to parse bookmark name: {}", source.kind())] pub struct BookmarkNameParseError { pub input: String, pub source: RevsetParseError, } /// Parses bookmark name specified in revset syntax. pub fn parse_bookmark_name(text: &str) -> Result<RefNameBuf, BookmarkNameParseError> { revset::parse_symbol(text) .map(Into::into) .map_err(|source| BookmarkNameParseError { input: text.to_owned(), source, }) } #[derive(Debug, Error)] #[error("Failed to parse tag name: {}", source.kind())] pub struct TagNameParseError { pub source: RevsetParseError, } /// Parses tag name specified in revset syntax. pub fn parse_tag_name(text: &str) -> Result<RefNameBuf, TagNameParseError> { revset::parse_symbol(text) .map(Into::into) .map_err(|source| TagNameParseError { source }) } /// Parses bookmark/tag/remote name patterns and unions them all. pub fn parse_union_name_patterns<I>(ui: &Ui, texts: I) -> Result<StringExpression, CommandError> where I: IntoIterator, I::Item: AsRef<str>, { let mut diagnostics = RevsetDiagnostics::new(); let expressions = texts .into_iter() .map(|text| revset::parse_string_expression(&mut diagnostics, text.as_ref())) .try_collect() .map_err(|err| { // From<RevsetParseError>, but with different message let hint = revset_parse_error_hint(&err); let message = format!("Failed to parse name pattern: {}", err.kind()); let mut cmd_err = user_error_with_message(message, err); cmd_err.extend_hints(hint); cmd_err })?; print_parse_diagnostics(ui, "In name pattern", &diagnostics)?; Ok(StringExpression::union_all(expressions)) } /// Parses the given `remotes.<name>.auto-track-bookmarks` settings into a map /// of string matchers. pub fn parse_remote_auto_track_bookmarks_map( ui: &Ui, remote_settings: &RemoteSettingsMap, ) -> Result<HashMap<RemoteNameBuf, StringMatcher>, CommandError> { let mut matchers = HashMap::new(); for (name, settings) in remote_settings { let Some(text) = &settings.auto_track_bookmarks else { continue; }; let mut diagnostics = RevsetDiagnostics::new(); let expr = revset::parse_string_expression(&mut diagnostics, text).map_err(|err| { // From<RevsetParseError>, but with different message and error kind let hint = revset_parse_error_hint(&err); let message = format!( "Invalid `remotes.{}.auto-track-bookmarks`: {}", name.as_symbol(), err.kind() ); let mut cmd_err = config_error_with_message(message, err); cmd_err.extend_hints(hint); cmd_err })?; print_parse_diagnostics( ui, &format!("In `remotes.{}.auto-track-bookmarks`", name.as_symbol()), &diagnostics, )?; matchers.insert(name.clone(), expr.to_matcher()); } Ok(matchers) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/generic_templater.rs
cli/src/generic_templater.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::cmp::Ordering; use std::collections::HashMap; use jj_lib::settings::UserSettings; use crate::template_builder; use crate::template_builder::BuildContext; use crate::template_builder::CoreTemplateBuildFnTable; use crate::template_builder::CoreTemplatePropertyKind; use crate::template_builder::CoreTemplatePropertyVar; use crate::template_builder::TemplateLanguage; use crate::template_parser; use crate::template_parser::FunctionCallNode; use crate::template_parser::TemplateDiagnostics; use crate::template_parser::TemplateParseResult; use crate::templater::BoxedSerializeProperty; use crate::templater::BoxedTemplateProperty; use crate::templater::ListTemplate; use crate::templater::Template; use crate::templater::TemplatePropertyExt as _; /// General-purpose template language for basic value types. /// /// This template language only supports the core template property types (plus /// the self type `C`.) The self type `C` is usually a tuple or struct of value /// types. It's cloned several times internally. Keyword functions need to be /// registered to extract properties from the self object. pub struct GenericTemplateLanguage<'a, C> { settings: UserSettings, build_fn_table: GenericTemplateBuildFnTable<'a, C>, } impl<'a, C> GenericTemplateLanguage<'a, C> where C: serde::Serialize + 'a, { /// Sets up environment with no keywords. /// /// New keyword functions can be registered by `add_keyword()`. pub fn new(settings: &UserSettings) -> Self { Self::with_keywords(HashMap::new(), settings) } /// Sets up environment with the given `keywords` table. pub fn with_keywords( keywords: GenericTemplateBuildKeywordFnMap<'a, C>, settings: &UserSettings, ) -> Self { Self { // Clone settings to keep lifetime simple. It's cheap. settings: settings.clone(), build_fn_table: GenericTemplateBuildFnTable { core: CoreTemplateBuildFnTable::builtin(), keywords, }, } } /// Registers new function that translates keyword to property. /// /// A keyword function returns `Self::Property`, which is basically a /// closure tagged by its return type. The inner closure is usually wrapped /// by `TemplateFunction`. /// /// ```ignore /// language.add_keyword("name", |self_property| { /// let out_property = self_property.map(|v| v.to_string()); /// Ok(out_property.into_dyn_wrapped()) /// }); /// ``` pub fn add_keyword<F>(&mut self, name: &'static str, build: F) where F: Fn( BoxedTemplateProperty<'a, C>, ) -> TemplateParseResult<GenericTemplatePropertyKind<'a, C>> + 'a, { self.build_fn_table.keywords.insert(name, Box::new(build)); } } impl<'a, C> TemplateLanguage<'a> for GenericTemplateLanguage<'a, C> where C: serde::Serialize + 'a, { type Property = GenericTemplatePropertyKind<'a, C>; fn settings(&self) -> &UserSettings { &self.settings } fn build_function( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { let table = &self.build_fn_table.core; table.build_function(self, diagnostics, build_ctx, function) } fn build_method( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, property: Self::Property, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { let type_name = property.type_name(); match property { GenericTemplatePropertyKind::Core(property) => { let table = &self.build_fn_table.core; table.build_method(self, diagnostics, build_ctx, property, function) } GenericTemplatePropertyKind::Self_(property) => { let table = &self.build_fn_table.keywords; let build = template_parser::lookup_method(type_name, table, function)?; // For simplicity, only 0-ary method is supported. function.expect_no_arguments()?; build(property) } } } } pub enum GenericTemplatePropertyKind<'a, C> { Core(CoreTemplatePropertyKind<'a>), Self_(BoxedTemplateProperty<'a, C>), } template_builder::impl_core_property_wrappers!(<'a, C> GenericTemplatePropertyKind<'a, C> => Core); /// Implements conversion trait for the self property type. /// /// Since we cannot guarantee that the generic type `C` does not conflict with /// the core template types, the conversion trait has to be implemented for each /// concrete type. macro_rules! impl_self_property_wrapper { ($context:path) => { $crate::template_builder::impl_property_wrappers!( $crate::generic_templater::GenericTemplatePropertyKind<'static, $context> { Self_($context), } ); }; (<$a:lifetime> $context:path) => { $crate::template_builder::impl_property_wrappers!( <$a> $crate::generic_templater::GenericTemplatePropertyKind<$a, $context> { Self_($context), } ); }; } pub(crate) use impl_self_property_wrapper; impl<'a, C> CoreTemplatePropertyVar<'a> for GenericTemplatePropertyKind<'a, C> where C: serde::Serialize + 'a, { fn wrap_template(template: Box<dyn Template + 'a>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_template(template)) } fn wrap_list_template(template: Box<dyn ListTemplate + 'a>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_list_template(template)) } fn type_name(&self) -> &'static str { match self { Self::Core(property) => property.type_name(), Self::Self_(_) => "Self", } } fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'a, bool>> { match self { Self::Core(property) => property.try_into_boolean(), Self::Self_(_) => None, } } fn try_into_integer(self) -> Option<BoxedTemplateProperty<'a, i64>> { match self { Self::Core(property) => property.try_into_integer(), Self::Self_(_) => None, } } fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'a, String>> { match self { Self::Core(property) => property.try_into_stringify(), Self::Self_(_) => None, } } fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'a>> { match self { Self::Core(property) => property.try_into_serialize(), Self::Self_(property) => Some(property.into_serialize()), } } fn try_into_template(self) -> Option<Box<dyn Template + 'a>> { match self { Self::Core(property) => property.try_into_template(), Self::Self_(_) => None, } } fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'a, bool>> { match (self, other) { (Self::Core(lhs), Self::Core(rhs)) => lhs.try_into_eq(rhs), (Self::Core(_), _) => None, (Self::Self_(_), _) => None, } } fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'a, Ordering>> { match (self, other) { (Self::Core(lhs), Self::Core(rhs)) => lhs.try_into_cmp(rhs), (Self::Core(_), _) => None, (Self::Self_(_), _) => None, } } } /// Function that translates keyword (or 0-ary method call node of the self type /// `C`.) /// /// Because the `GenericTemplateLanguage` doesn't provide a way to pass around /// global resources, the keyword function is allowed to capture resources. pub type GenericTemplateBuildKeywordFn<'a, C> = Box< dyn Fn(BoxedTemplateProperty<'a, C>) -> TemplateParseResult<GenericTemplatePropertyKind<'a, C>> + 'a, >; /// Table of functions that translate keyword node. pub type GenericTemplateBuildKeywordFnMap<'a, C> = HashMap<&'static str, GenericTemplateBuildKeywordFn<'a, C>>; /// Symbol table of methods available in the general-purpose template. struct GenericTemplateBuildFnTable<'a, C> { core: CoreTemplateBuildFnTable< 'a, GenericTemplateLanguage<'a, C>, GenericTemplatePropertyKind<'a, C>, >, keywords: GenericTemplateBuildKeywordFnMap<'a, C>, }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/cli_util.rs
cli/src/cli_util.rs
// Copyright 2022 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::borrow::Cow; use std::cell::OnceCell; use std::collections::BTreeMap; use std::collections::HashMap; use std::collections::HashSet; use std::env; use std::ffi::OsString; use std::fmt; use std::fmt::Debug; use std::io; use std::io::Write as _; use std::mem; use std::path::Path; use std::path::PathBuf; use std::rc::Rc; use std::str::FromStr; use std::sync::Arc; use std::time::SystemTime; use bstr::ByteVec as _; use chrono::TimeZone as _; use clap::ArgAction; use clap::ArgMatches; use clap::Command; use clap::FromArgMatches as _; use clap::builder::MapValueParser; use clap::builder::NonEmptyStringValueParser; use clap::builder::TypedValueParser as _; use clap::builder::ValueParserFactory; use clap::error::ContextKind; use clap::error::ContextValue; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use indexmap::IndexMap; use indexmap::IndexSet; use indoc::indoc; use indoc::writedoc; use itertools::Itertools as _; use jj_lib::backend::BackendResult; use jj_lib::backend::ChangeId; use jj_lib::backend::CommitId; use jj_lib::backend::TreeValue; use jj_lib::commit::Commit; use jj_lib::config::ConfigGetError; use jj_lib::config::ConfigGetResultExt as _; use jj_lib::config::ConfigLayer; use jj_lib::config::ConfigMigrationRule; use jj_lib::config::ConfigNamePathBuf; use jj_lib::config::ConfigSource; use jj_lib::config::StackedConfig; use jj_lib::conflicts::ConflictMarkerStyle; use jj_lib::fileset; use jj_lib::fileset::FilesetDiagnostics; use jj_lib::fileset::FilesetExpression; use jj_lib::gitignore::GitIgnoreError; use jj_lib::gitignore::GitIgnoreFile; use jj_lib::id_prefix::IdPrefixContext; use jj_lib::lock::FileLock; use jj_lib::matchers::Matcher; use jj_lib::matchers::NothingMatcher; use jj_lib::merge::Diff; use jj_lib::merge::MergedTreeValue; use jj_lib::merged_tree::MergedTree; use jj_lib::object_id::ObjectId as _; use jj_lib::op_heads_store; use jj_lib::op_store::OpStoreError; use jj_lib::op_store::OperationId; use jj_lib::op_store::RefTarget; use jj_lib::op_walk; use jj_lib::op_walk::OpsetEvaluationError; use jj_lib::operation::Operation; use jj_lib::ref_name::RefName; use jj_lib::ref_name::RefNameBuf; use jj_lib::ref_name::RemoteName; use jj_lib::ref_name::RemoteRefSymbol; use jj_lib::ref_name::WorkspaceName; use jj_lib::ref_name::WorkspaceNameBuf; use jj_lib::repo::CheckOutCommitError; use jj_lib::repo::EditCommitError; use jj_lib::repo::MutableRepo; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo; use jj_lib::repo::RepoLoader; use jj_lib::repo::StoreFactories; use jj_lib::repo::StoreLoadError; use jj_lib::repo::merge_factories_map; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathBuf; use jj_lib::repo_path::RepoPathUiConverter; use jj_lib::repo_path::UiPathParseError; use jj_lib::revset; use jj_lib::revset::ResolvedRevsetExpression; use jj_lib::revset::RevsetAliasesMap; use jj_lib::revset::RevsetDiagnostics; use jj_lib::revset::RevsetExpression; use jj_lib::revset::RevsetExtensions; use jj_lib::revset::RevsetFilterPredicate; use jj_lib::revset::RevsetFunction; use jj_lib::revset::RevsetIteratorExt as _; use jj_lib::revset::RevsetModifier; use jj_lib::revset::RevsetParseContext; use jj_lib::revset::RevsetWorkspaceContext; use jj_lib::revset::SymbolResolverExtension; use jj_lib::revset::UserRevsetExpression; use jj_lib::rewrite::restore_tree; use jj_lib::settings::HumanByteSize; use jj_lib::settings::UserSettings; use jj_lib::store::Store; use jj_lib::str_util::StringExpression; use jj_lib::str_util::StringMatcher; use jj_lib::str_util::StringPattern; use jj_lib::transaction::Transaction; use jj_lib::working_copy; use jj_lib::working_copy::CheckoutStats; use jj_lib::working_copy::LockedWorkingCopy; use jj_lib::working_copy::SnapshotOptions; use jj_lib::working_copy::SnapshotStats; use jj_lib::working_copy::UntrackedReason; use jj_lib::working_copy::WorkingCopy; use jj_lib::working_copy::WorkingCopyFactory; use jj_lib::working_copy::WorkingCopyFreshness; use jj_lib::workspace::DefaultWorkspaceLoaderFactory; use jj_lib::workspace::LockedWorkspace; use jj_lib::workspace::WorkingCopyFactories; use jj_lib::workspace::Workspace; use jj_lib::workspace::WorkspaceLoadError; use jj_lib::workspace::WorkspaceLoader; use jj_lib::workspace::WorkspaceLoaderFactory; use jj_lib::workspace::default_working_copy_factories; use jj_lib::workspace::get_working_copy_factory; use pollster::FutureExt as _; use tracing::instrument; use tracing_chrome::ChromeLayerBuilder; use tracing_subscriber::prelude::*; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::command_error::config_error_with_message; use crate::command_error::handle_command_result; use crate::command_error::internal_error; use crate::command_error::internal_error_with_message; use crate::command_error::print_parse_diagnostics; use crate::command_error::user_error; use crate::command_error::user_error_with_hint; use crate::command_error::user_error_with_message; use crate::commit_templater::CommitTemplateLanguage; use crate::commit_templater::CommitTemplateLanguageExtension; use crate::complete; use crate::config::ConfigArgKind; use crate::config::ConfigEnv; use crate::config::RawConfig; use crate::config::config_from_environment; use crate::config::parse_config_args; use crate::description_util::TextEditor; use crate::diff_util; use crate::diff_util::DiffFormat; use crate::diff_util::DiffFormatArgs; use crate::diff_util::DiffRenderer; use crate::formatter::FormatRecorder; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::merge_tools::DiffEditor; use crate::merge_tools::MergeEditor; use crate::merge_tools::MergeToolConfigError; use crate::operation_templater::OperationTemplateLanguage; use crate::operation_templater::OperationTemplateLanguageExtension; use crate::revset_util; use crate::revset_util::RevsetExpressionEvaluator; use crate::revset_util::parse_union_name_patterns; use crate::template_builder; use crate::template_builder::TemplateLanguage; use crate::template_parser::TemplateAliasesMap; use crate::template_parser::TemplateDiagnostics; use crate::templater::TemplateRenderer; use crate::templater::WrapTemplateProperty; use crate::text_util; use crate::ui::ColorChoice; use crate::ui::Ui; const SHORT_CHANGE_ID_TEMPLATE_TEXT: &str = "format_short_change_id_with_change_offset(self)"; #[derive(Clone)] struct ChromeTracingFlushGuard { _inner: Option<Rc<tracing_chrome::FlushGuard>>, } impl Debug for ChromeTracingFlushGuard { fn fmt(&self, f: &mut std::fmt::Formatter) -> std::fmt::Result { let Self { _inner } = self; f.debug_struct("ChromeTracingFlushGuard") .finish_non_exhaustive() } } /// Handle to initialize or change tracing subscription. #[derive(Clone, Debug)] pub struct TracingSubscription { reload_log_filter: tracing_subscriber::reload::Handle< tracing_subscriber::EnvFilter, tracing_subscriber::Registry, >, _chrome_tracing_flush_guard: ChromeTracingFlushGuard, } impl TracingSubscription { const ENV_VAR_NAME: &str = "JJ_LOG"; /// Initializes tracing with the default configuration. This should be /// called as early as possible. pub fn init() -> Self { let filter = tracing_subscriber::EnvFilter::builder() .with_default_directive(tracing::metadata::LevelFilter::ERROR.into()) .with_env_var(Self::ENV_VAR_NAME) .from_env_lossy(); let (filter, reload_log_filter) = tracing_subscriber::reload::Layer::new(filter); let (chrome_tracing_layer, chrome_tracing_flush_guard) = match std::env::var("JJ_TRACE") { Ok(filename) => { let filename = if filename.is_empty() { format!( "jj-trace-{}.json", SystemTime::now() .duration_since(SystemTime::UNIX_EPOCH) .unwrap() .as_secs(), ) } else { filename }; let include_args = std::env::var("JJ_TRACE_INCLUDE_ARGS").is_ok(); let (layer, guard) = ChromeLayerBuilder::new() .file(filename) .include_args(include_args) .build(); ( Some(layer), ChromeTracingFlushGuard { _inner: Some(Rc::new(guard)), }, ) } Err(_) => (None, ChromeTracingFlushGuard { _inner: None }), }; tracing_subscriber::registry() .with( tracing_subscriber::fmt::Layer::default() .with_writer(std::io::stderr) .with_filter(filter), ) .with(chrome_tracing_layer) .init(); Self { reload_log_filter, _chrome_tracing_flush_guard: chrome_tracing_flush_guard, } } pub fn enable_debug_logging(&self) -> Result<(), CommandError> { self.reload_log_filter .modify(|filter| { // The default is INFO. // jj-lib and jj-cli are whitelisted for DEBUG logging. // This ensures that other crates' logging doesn't show up by default. *filter = tracing_subscriber::EnvFilter::builder() .with_default_directive(tracing::metadata::LevelFilter::INFO.into()) .with_env_var(Self::ENV_VAR_NAME) .from_env_lossy() .add_directive("jj_lib=debug".parse().unwrap()) .add_directive("jj_cli=debug".parse().unwrap()); }) .map_err(|err| internal_error_with_message("failed to enable debug logging", err))?; tracing::info!("debug logging enabled"); Ok(()) } } #[derive(Clone)] pub struct CommandHelper { data: Rc<CommandHelperData>, } struct CommandHelperData { app: Command, cwd: PathBuf, string_args: Vec<String>, matches: ArgMatches, global_args: GlobalArgs, config_env: ConfigEnv, config_migrations: Vec<ConfigMigrationRule>, raw_config: RawConfig, settings: UserSettings, revset_extensions: Arc<RevsetExtensions>, commit_template_extensions: Vec<Arc<dyn CommitTemplateLanguageExtension>>, operation_template_extensions: Vec<Arc<dyn OperationTemplateLanguageExtension>>, maybe_workspace_loader: Result<Box<dyn WorkspaceLoader>, CommandError>, store_factories: StoreFactories, working_copy_factories: WorkingCopyFactories, workspace_loader_factory: Box<dyn WorkspaceLoaderFactory>, } impl CommandHelper { pub fn app(&self) -> &Command { &self.data.app } /// Canonical form of the current working directory path. /// /// A loaded `Workspace::workspace_root()` also returns a canonical path, so /// relative paths can be easily computed from these paths. pub fn cwd(&self) -> &Path { &self.data.cwd } pub fn string_args(&self) -> &Vec<String> { &self.data.string_args } pub fn matches(&self) -> &ArgMatches { &self.data.matches } pub fn global_args(&self) -> &GlobalArgs { &self.data.global_args } pub fn config_env(&self) -> &ConfigEnv { &self.data.config_env } /// Unprocessed (or unresolved) configuration data. /// /// Use this only if the unmodified config data is needed. For example, `jj /// config set` should use this to write updated data back to file. pub fn raw_config(&self) -> &RawConfig { &self.data.raw_config } /// Settings for the current command and workspace. /// /// This may be different from the settings for new workspace created by /// e.g. `jj git init`. There may be conditional variables and repo config /// `.jj/repo/config.toml` loaded for the cwd workspace. pub fn settings(&self) -> &UserSettings { &self.data.settings } /// Resolves configuration for new workspace located at the specified path. pub fn settings_for_new_workspace( &self, workspace_root: &Path, ) -> Result<UserSettings, CommandError> { let mut config_env = self.data.config_env.clone(); let mut raw_config = self.data.raw_config.clone(); let repo_path = workspace_root.join(".jj").join("repo"); config_env.reset_repo_path(&repo_path); config_env.reload_repo_config(&mut raw_config)?; config_env.reset_workspace_path(workspace_root); config_env.reload_workspace_config(&mut raw_config)?; let mut config = config_env.resolve_config(&raw_config)?; // No migration messages here, which would usually be emitted before. jj_lib::config::migrate(&mut config, &self.data.config_migrations)?; Ok(self.data.settings.with_new_config(config)?) } /// Loads text editor from the settings. pub fn text_editor(&self) -> Result<TextEditor, ConfigGetError> { TextEditor::from_settings(self.settings()) } pub fn revset_extensions(&self) -> &Arc<RevsetExtensions> { &self.data.revset_extensions } /// Parses template of the given language into evaluation tree. /// /// This function also loads template aliases from the settings. Use /// `WorkspaceCommandHelper::parse_template()` if you've already /// instantiated the workspace helper. pub fn parse_template<'a, C, L>( &self, ui: &Ui, language: &L, template_text: &str, ) -> Result<TemplateRenderer<'a, C>, CommandError> where C: Clone + 'a, L: TemplateLanguage<'a> + ?Sized, L::Property: WrapTemplateProperty<'a, C>, { let mut diagnostics = TemplateDiagnostics::new(); let aliases = load_template_aliases(ui, self.settings().config())?; let template = template_builder::parse(language, &mut diagnostics, template_text, &aliases)?; print_parse_diagnostics(ui, "In template expression", &diagnostics)?; Ok(template) } pub fn workspace_loader(&self) -> Result<&dyn WorkspaceLoader, CommandError> { self.data .maybe_workspace_loader .as_deref() .map_err(Clone::clone) } fn new_workspace_loader_at( &self, workspace_root: &Path, ) -> Result<Box<dyn WorkspaceLoader>, CommandError> { self.data .workspace_loader_factory .create(workspace_root) .map_err(|err| map_workspace_load_error(err, None)) } /// Loads workspace and repo, then snapshots the working copy if allowed. #[instrument(skip(self, ui))] pub fn workspace_helper(&self, ui: &Ui) -> Result<WorkspaceCommandHelper, CommandError> { let (workspace_command, stats) = self.workspace_helper_with_stats(ui)?; print_snapshot_stats(ui, &stats, workspace_command.env().path_converter())?; Ok(workspace_command) } /// Loads workspace and repo, then snapshots the working copy if allowed and /// returns the SnapshotStats. /// /// Note that unless you have a good reason not to do so, you should always /// call [`print_snapshot_stats`] with the [`SnapshotStats`] returned by /// this function to present possible untracked files to the user. #[instrument(skip(self, ui))] pub fn workspace_helper_with_stats( &self, ui: &Ui, ) -> Result<(WorkspaceCommandHelper, SnapshotStats), CommandError> { let mut workspace_command = self.workspace_helper_no_snapshot(ui)?; let (workspace_command, stats) = match workspace_command.maybe_snapshot_impl(ui) { Ok(stats) => (workspace_command, stats), Err(SnapshotWorkingCopyError::Command(err)) => return Err(err), Err(SnapshotWorkingCopyError::StaleWorkingCopy(err)) => { let auto_update_stale = self.settings().get_bool("snapshot.auto-update-stale")?; if !auto_update_stale { return Err(err); } // We detected the working copy was stale and the client is configured to // auto-update-stale, so let's do that now. We need to do it up here, not at a // lower level (e.g. inside snapshot_working_copy()) to avoid recursive locking // of the working copy. self.recover_stale_working_copy(ui)? } }; Ok((workspace_command, stats)) } /// Loads workspace and repo, but never snapshots the working copy. Most /// commands should use `workspace_helper()` instead. #[instrument(skip(self, ui))] pub fn workspace_helper_no_snapshot( &self, ui: &Ui, ) -> Result<WorkspaceCommandHelper, CommandError> { let workspace = self.load_workspace()?; let op_head = self.resolve_operation(ui, workspace.repo_loader())?; let repo = workspace.repo_loader().load_at(&op_head)?; let env = self.workspace_environment(ui, &workspace)?; revset_util::warn_unresolvable_trunk(ui, repo.as_ref(), &env.revset_parse_context())?; WorkspaceCommandHelper::new(ui, workspace, repo, env, self.is_at_head_operation()) } pub fn get_working_copy_factory(&self) -> Result<&dyn WorkingCopyFactory, CommandError> { let loader = self.workspace_loader()?; // We convert StoreLoadError -> WorkspaceLoadError -> CommandError let factory: Result<_, WorkspaceLoadError> = get_working_copy_factory(loader, &self.data.working_copy_factories) .map_err(|e| e.into()); let factory = factory.map_err(|err| { map_workspace_load_error(err, self.data.global_args.repository.as_deref()) })?; Ok(factory) } /// Loads workspace for the current command. #[instrument(skip_all)] pub fn load_workspace(&self) -> Result<Workspace, CommandError> { let loader = self.workspace_loader()?; loader .load( &self.data.settings, &self.data.store_factories, &self.data.working_copy_factories, ) .map_err(|err| { map_workspace_load_error(err, self.data.global_args.repository.as_deref()) }) } /// Loads workspace located at the specified path. #[instrument(skip(self, settings))] pub fn load_workspace_at( &self, workspace_root: &Path, settings: &UserSettings, ) -> Result<Workspace, CommandError> { let loader = self.new_workspace_loader_at(workspace_root)?; loader .load( settings, &self.data.store_factories, &self.data.working_copy_factories, ) .map_err(|err| map_workspace_load_error(err, None)) } /// Note that unless you have a good reason not to do so, you should always /// call [`print_snapshot_stats`] with the [`SnapshotStats`] returned by /// this function to present possible untracked files to the user. pub fn recover_stale_working_copy( &self, ui: &Ui, ) -> Result<(WorkspaceCommandHelper, SnapshotStats), CommandError> { let workspace = self.load_workspace()?; let op_id = workspace.working_copy().operation_id(); match workspace.repo_loader().load_operation(op_id) { Ok(op) => { let repo = workspace.repo_loader().load_at(&op)?; let mut workspace_command = self.for_workable_repo(ui, workspace, repo)?; workspace_command.check_working_copy_writable()?; // Snapshot the current working copy on top of the last known working-copy // operation, then merge the divergent operations. The wc_commit_id of the // merged repo wouldn't change because the old one wins, but it's probably // fine if we picked the new wc_commit_id. let stale_stats = workspace_command .snapshot_working_copy(ui) .map_err(|err| err.into_command_error())?; let wc_commit_id = workspace_command.get_wc_commit_id().unwrap(); let repo = workspace_command.repo().clone(); let stale_wc_commit = repo.store().get_commit(wc_commit_id)?; let mut workspace_command = self.workspace_helper_no_snapshot(ui)?; let repo = workspace_command.repo().clone(); let (mut locked_ws, desired_wc_commit) = workspace_command.unchecked_start_working_copy_mutation()?; match WorkingCopyFreshness::check_stale( locked_ws.locked_wc(), &desired_wc_commit, &repo, )? { WorkingCopyFreshness::Fresh | WorkingCopyFreshness::Updated(_) => { drop(locked_ws); writeln!( ui.status(), "Attempted recovery, but the working copy is not stale" )?; } WorkingCopyFreshness::WorkingCopyStale | WorkingCopyFreshness::SiblingOperation => { let stats = update_stale_working_copy( locked_ws, repo.op_id().clone(), &stale_wc_commit, &desired_wc_commit, )?; workspace_command.print_updated_working_copy_stats( ui, Some(&stale_wc_commit), &desired_wc_commit, &stats, )?; writeln!( ui.status(), "Updated working copy to fresh commit {}", short_commit_hash(desired_wc_commit.id()) )?; } } // There may be Git refs to import, so snapshot again. Git HEAD // will also be imported if it was updated after the working // copy became stale. The result wouldn't be ideal, but there // should be no data loss at least. let fresh_stats = workspace_command .maybe_snapshot_impl(ui) .map_err(|err| err.into_command_error())?; let merged_stats = { let SnapshotStats { mut untracked_paths, } = stale_stats; untracked_paths.extend(fresh_stats.untracked_paths); SnapshotStats { untracked_paths } }; Ok((workspace_command, merged_stats)) } Err(e @ OpStoreError::ObjectNotFound { .. }) => { writeln!( ui.status(), "Failed to read working copy's current operation; attempting recovery. Error \ message from read attempt: {e}" )?; let mut workspace_command = self.workspace_helper_no_snapshot(ui)?; let stats = workspace_command.create_and_check_out_recovery_commit(ui)?; Ok((workspace_command, stats)) } Err(e) => Err(e.into()), } } /// Loads command environment for the given `workspace`. pub fn workspace_environment( &self, ui: &Ui, workspace: &Workspace, ) -> Result<WorkspaceCommandEnvironment, CommandError> { WorkspaceCommandEnvironment::new(ui, self, workspace) } /// Returns true if the working copy to be loaded is writable, and therefore /// should usually be snapshotted. pub fn is_working_copy_writable(&self) -> bool { self.is_at_head_operation() && !self.data.global_args.ignore_working_copy } /// Returns true if the current operation is considered to be the head. pub fn is_at_head_operation(&self) -> bool { // TODO: should we accept --at-op=<head_id> as the head op? or should we // make --at-op=@ imply --ignore-working-copy (i.e. not at the head.) matches!( self.data.global_args.at_operation.as_deref(), None | Some("@") ) } /// Resolves the current operation from the command-line argument. /// /// If no `--at-operation` is specified, the head operations will be /// loaded. If there are multiple heads, they'll be merged. #[instrument(skip_all)] pub fn resolve_operation( &self, ui: &Ui, repo_loader: &RepoLoader, ) -> Result<Operation, CommandError> { if let Some(op_str) = &self.data.global_args.at_operation { Ok(op_walk::resolve_op_for_load(repo_loader, op_str)?) } else { op_heads_store::resolve_op_heads( repo_loader.op_heads_store().as_ref(), repo_loader.op_store(), |op_heads| { writeln!( ui.status(), "Concurrent modification detected, resolving automatically.", )?; let base_repo = repo_loader.load_at(&op_heads[0])?; // TODO: It may be helpful to print each operation we're merging here let mut tx = start_repo_transaction(&base_repo, &self.data.string_args); for other_op_head in op_heads.into_iter().skip(1) { tx.merge_operation(other_op_head)?; let num_rebased = tx.repo_mut().rebase_descendants()?; if num_rebased > 0 { writeln!( ui.status(), "Rebased {num_rebased} descendant commits onto commits rewritten \ by other operation" )?; } } Ok(tx .write("reconcile divergent operations")? .leave_unpublished() .operation() .clone()) }, ) } } /// Creates helper for the repo whose view is supposed to be in sync with /// the working copy. If `--ignore-working-copy` is not specified, the /// returned helper will attempt to update the working copy. #[instrument(skip_all)] pub fn for_workable_repo( &self, ui: &Ui, workspace: Workspace, repo: Arc<ReadonlyRepo>, ) -> Result<WorkspaceCommandHelper, CommandError> { let env = self.workspace_environment(ui, &workspace)?; let loaded_at_head = true; WorkspaceCommandHelper::new(ui, workspace, repo, env, loaded_at_head) } } /// A ReadonlyRepo along with user-config-dependent derived data. The derived /// data is lazily loaded. struct ReadonlyUserRepo { repo: Arc<ReadonlyRepo>, id_prefix_context: OnceCell<IdPrefixContext>, } impl ReadonlyUserRepo { fn new(repo: Arc<ReadonlyRepo>) -> Self { Self { repo, id_prefix_context: OnceCell::new(), } } } /// A advanceable bookmark to satisfy the "advance-bookmarks" feature. /// /// This is a helper for `WorkspaceCommandTransaction`. It provides a /// type-safe way to separate the work of checking whether a bookmark /// can be advanced and actually advancing it. Advancing the bookmark /// never fails, but can't be done until the new `CommitId` is /// available. Splitting the work in this way also allows us to /// identify eligible bookmarks without actually moving them and /// return config errors to the user early. pub struct AdvanceableBookmark { name: RefNameBuf, old_commit_id: CommitId, } /// Parses advance-bookmarks settings into matcher. /// /// Settings are configured in the jj config.toml as lists of string matcher /// expressions for enabled and disabled bookmarks. Example: /// ```toml /// [experimental-advance-branches] /// # Enable the feature for all branches except "main". /// enabled-branches = ["*"] /// disabled-branches = ["main"] /// ``` fn load_advance_bookmarks_matcher( ui: &Ui, settings: &UserSettings, ) -> Result<Option<StringMatcher>, CommandError> { let get_setting = |setting_key: &str| -> Result<Vec<String>, _> { let name = ConfigNamePathBuf::from_iter(["experimental-advance-branches", setting_key]); settings.get(&name) }; // TODO: When we stabilize this feature, enabled/disabled patterns can be // combined into a single matcher expression. let enabled_names = get_setting("enabled-branches")?; let disabled_names = get_setting("disabled-branches")?; let enabled_expr = parse_union_name_patterns(ui, &enabled_names)?; let disabled_expr = parse_union_name_patterns(ui, &disabled_names)?; if enabled_names.is_empty() { Ok(None) } else { let expr = enabled_expr.intersection(disabled_expr.negated()); Ok(Some(expr.to_matcher())) } } /// Metadata and configuration loaded for a specific workspace. pub struct WorkspaceCommandEnvironment { command: CommandHelper, settings: UserSettings, revset_aliases_map: RevsetAliasesMap, template_aliases_map: TemplateAliasesMap, default_ignored_remote: Option<&'static RemoteName>, revsets_use_glob_by_default: bool, path_converter: RepoPathUiConverter, workspace_name: WorkspaceNameBuf, immutable_heads_expression: Arc<UserRevsetExpression>, short_prefixes_expression: Option<Arc<UserRevsetExpression>>, conflict_marker_style: ConflictMarkerStyle, } impl WorkspaceCommandEnvironment { #[instrument(skip_all)] fn new(ui: &Ui, command: &CommandHelper, workspace: &Workspace) -> Result<Self, CommandError> { let settings = workspace.settings(); let revset_aliases_map = revset_util::load_revset_aliases(ui, settings.config())?; let template_aliases_map = load_template_aliases(ui, settings.config())?; let default_ignored_remote = default_ignored_remote_name(workspace.repo_loader().store()); let path_converter = RepoPathUiConverter::Fs { cwd: command.cwd().to_owned(), base: workspace.workspace_root().to_owned(), }; let mut env = Self { command: command.clone(), settings: settings.clone(), revset_aliases_map, template_aliases_map, default_ignored_remote, revsets_use_glob_by_default: settings.get("ui.revsets-use-glob-by-default")?, path_converter, workspace_name: workspace.workspace_name().to_owned(), immutable_heads_expression: RevsetExpression::root(), short_prefixes_expression: None, conflict_marker_style: settings.get("ui.conflict-marker-style")?, }; env.immutable_heads_expression = env.load_immutable_heads_expression(ui)?; env.short_prefixes_expression = env.load_short_prefixes_expression(ui)?; Ok(env) } pub(crate) fn path_converter(&self) -> &RepoPathUiConverter { &self.path_converter } pub fn workspace_name(&self) -> &WorkspaceName { &self.workspace_name } pub(crate) fn revset_parse_context(&self) -> RevsetParseContext<'_> { let workspace_context = RevsetWorkspaceContext { path_converter: &self.path_converter, workspace_name: &self.workspace_name, }; let now = if let Some(timestamp) = self.settings.commit_timestamp() { chrono::Local .timestamp_millis_opt(timestamp.timestamp.0) .unwrap()
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/graphlog.rs
cli/src/graphlog.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::hash::Hash; use std::io; use std::io::Write; use jj_lib::config::ConfigGetError; use jj_lib::graph::GraphEdge; use jj_lib::graph::GraphEdgeType; use jj_lib::settings::UserSettings; use renderdag::Ancestor; use renderdag::GraphRowRenderer; use renderdag::Renderer; pub trait GraphLog<K: Clone + Eq + Hash> { fn add_node( &mut self, id: &K, edges: &[GraphEdge<K>], node_symbol: &str, text: &str, ) -> io::Result<()>; fn width(&self, id: &K, edges: &[GraphEdge<K>]) -> usize; } pub struct SaplingGraphLog<'writer, R> { renderer: R, writer: &'writer mut dyn Write, } fn convert_graph_edge_into_ancestor<K: Clone>(e: &GraphEdge<K>) -> Ancestor<K> { match e.edge_type { GraphEdgeType::Direct => Ancestor::Parent(e.target.clone()), GraphEdgeType::Indirect => Ancestor::Ancestor(e.target.clone()), GraphEdgeType::Missing => Ancestor::Anonymous, } } impl<K, R> GraphLog<K> for SaplingGraphLog<'_, R> where K: Clone + Eq + Hash, R: Renderer<K, Output = String>, { fn add_node( &mut self, id: &K, edges: &[GraphEdge<K>], node_symbol: &str, text: &str, ) -> io::Result<()> { let row = self.renderer.next_row( id.clone(), edges.iter().map(convert_graph_edge_into_ancestor).collect(), node_symbol.into(), text.into(), ); write!(self.writer, "{row}") } fn width(&self, id: &K, edges: &[GraphEdge<K>]) -> usize { let parents = edges.iter().map(convert_graph_edge_into_ancestor).collect(); let w: u64 = self.renderer.width(Some(id), Some(&parents)); w.try_into().unwrap() } } impl<'writer, R> SaplingGraphLog<'writer, R> { pub fn create<K>( renderer: R, formatter: &'writer mut dyn Write, ) -> Box<dyn GraphLog<K> + 'writer> where K: Clone + Eq + Hash + 'writer, R: Renderer<K, Output = String> + 'writer, { Box::new(SaplingGraphLog { renderer, writer: formatter, }) } } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all(deserialize = "kebab-case"))] pub enum GraphStyle { Ascii, AsciiLarge, Curved, Square, } impl GraphStyle { pub fn from_settings(settings: &UserSettings) -> Result<Self, ConfigGetError> { settings.get("ui.graph.style") } } pub fn get_graphlog<'a, K: Clone + Eq + Hash + 'a>( style: GraphStyle, formatter: &'a mut dyn Write, ) -> Box<dyn GraphLog<K> + 'a> { let builder = GraphRowRenderer::new().output().with_min_row_height(0); match style { GraphStyle::Ascii => SaplingGraphLog::create(builder.build_ascii(), formatter), GraphStyle::AsciiLarge => SaplingGraphLog::create(builder.build_ascii_large(), formatter), GraphStyle::Curved => SaplingGraphLog::create(builder.build_box_drawing(), formatter), GraphStyle::Square => { SaplingGraphLog::create(builder.build_box_drawing().with_square_glyphs(), formatter) } } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/description_util.rs
cli/src/description_util.rs
use std::collections::HashMap; use std::fs; use std::io; use std::io::Write as _; use std::path::Path; use std::path::PathBuf; use std::process::ExitStatus; use bstr::ByteVec as _; use indexmap::IndexMap; use indoc::indoc; use itertools::FoldWhile; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::commit_builder::DetachedCommitBuilder; use jj_lib::config::ConfigGetError; use jj_lib::file_util::IoResultExt as _; use jj_lib::file_util::PathError; use jj_lib::settings::UserSettings; use jj_lib::trailer::parse_description_trailers; use jj_lib::trailer::parse_trailers; use thiserror::Error; use crate::cli_util::WorkspaceCommandTransaction; use crate::cli_util::short_commit_hash; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::config::CommandNameAndArgs; use crate::formatter::PlainTextFormatter; use crate::templater::TemplateRenderer; use crate::text_util; use crate::ui::Ui; #[derive(Debug, Error)] pub enum TextEditError { #[error("Failed to run editor '{name}'")] FailedToRun { name: String, source: io::Error }, #[error("Editor '{command}' exited with {status}")] ExitStatus { command: String, status: ExitStatus }, } #[derive(Debug, Error)] #[error("Failed to edit {name}", name = name.as_deref().unwrap_or("file"))] pub struct TempTextEditError { #[source] pub error: Box<dyn std::error::Error + Send + Sync>, /// Short description of the edited content. pub name: Option<String>, /// Path to the temporary file. pub path: Option<PathBuf>, } impl TempTextEditError { fn new(error: Box<dyn std::error::Error + Send + Sync>, path: Option<PathBuf>) -> Self { Self { error, name: None, path, } } /// Adds short description of the edited content. pub fn with_name(mut self, name: impl Into<String>) -> Self { self.name = Some(name.into()); self } } /// Configured text editor. #[derive(Clone, Debug)] pub struct TextEditor { editor: CommandNameAndArgs, dir: Option<PathBuf>, } impl TextEditor { pub fn from_settings(settings: &UserSettings) -> Result<Self, ConfigGetError> { let editor = settings.get("ui.editor")?; Ok(Self { editor, dir: None }) } pub fn with_temp_dir(mut self, dir: impl Into<PathBuf>) -> Self { self.dir = Some(dir.into()); self } /// Opens the given `path` in editor. pub fn edit_file(&self, path: impl AsRef<Path>) -> Result<(), TextEditError> { let mut cmd = self.editor.to_command(); cmd.arg(path.as_ref()); tracing::info!(?cmd, "running editor"); let status = cmd.status().map_err(|source| TextEditError::FailedToRun { name: self.editor.split_name().into_owned(), source, })?; if status.success() { Ok(()) } else { let command = self.editor.to_string(); Err(TextEditError::ExitStatus { command, status }) } } /// Writes the given `content` to temporary file and opens it in editor. pub fn edit_str( &self, content: impl AsRef<[u8]>, suffix: Option<&str>, ) -> Result<String, TempTextEditError> { let path = self .write_temp_file(content.as_ref(), suffix) .map_err(|err| TempTextEditError::new(err.into(), None))?; self.edit_file(&path) .map_err(|err| TempTextEditError::new(err.into(), Some(path.clone())))?; let edited = fs::read_to_string(&path) .context(&path) .map_err(|err| TempTextEditError::new(err.into(), Some(path.clone())))?; // Delete the file only if everything went well. fs::remove_file(path).ok(); Ok(edited) } fn write_temp_file(&self, content: &[u8], suffix: Option<&str>) -> Result<PathBuf, PathError> { let dir = self.dir.clone().unwrap_or_else(tempfile::env::temp_dir); let mut file = tempfile::Builder::new() .prefix("editor-") .suffix(suffix.unwrap_or("")) .tempfile_in(&dir) .context(&dir)?; file.write_all(content).context(file.path())?; let (_, path) = file .keep() .or_else(|err| Err(err.error).context(err.file.path()))?; Ok(path) } } fn append_blank_line(text: &mut String) { if !text.is_empty() && !text.ends_with('\n') { text.push('\n'); } let last_line = text.lines().next_back(); if last_line.is_some_and(|line| line.starts_with("JJ:")) { text.push_str("JJ:\n"); } else { text.push('\n'); } } /// Cleanup a description by normalizing line endings, and removing leading and /// trailing blank lines. fn cleanup_description_lines<I>(lines: I) -> String where I: IntoIterator, I::Item: AsRef<str>, { let description = lines .into_iter() .fold_while(String::new(), |acc, line| { let line = line.as_ref(); if line.strip_prefix("JJ: ignore-rest").is_some() { FoldWhile::Done(acc) } else if line.starts_with("JJ:") { FoldWhile::Continue(acc) } else { FoldWhile::Continue(acc + line + "\n") } }) .into_inner(); text_util::complete_newline(description.trim_matches('\n')) } pub fn edit_description(editor: &TextEditor, description: &str) -> Result<String, CommandError> { let mut description = description.to_owned(); append_blank_line(&mut description); description.push_str("JJ: Lines starting with \"JJ:\" (like this one) will be removed.\n"); let description = editor .edit_str(description, Some(".jjdescription")) .map_err(|err| err.with_name("description"))?; Ok(cleanup_description_lines(description.lines())) } /// Edits the descriptions of the given commits in a single editor session. pub fn edit_multiple_descriptions( ui: &Ui, editor: &TextEditor, tx: &WorkspaceCommandTransaction, commits: &[(&CommitId, Commit)], ) -> Result<ParsedBulkEditMessage<CommitId>, CommandError> { let mut commits_map = IndexMap::new(); let mut bulk_message = String::new(); bulk_message.push_str(indoc! {r#" JJ: Enter or edit commit descriptions after the `JJ: describe` lines. JJ: Warning: JJ: - The text you enter will be lost on a syntax error. JJ: - The syntax of the separator lines may change in the future. JJ: "#}); for (commit_id, temp_commit) in commits { let commit_hash = short_commit_hash(commit_id); bulk_message.push_str("JJ: describe "); bulk_message.push_str(&commit_hash); bulk_message.push_str(" -------\n"); commits_map.insert(commit_hash, *commit_id); let intro = ""; let template = description_template(ui, tx, intro, temp_commit)?; bulk_message.push_str(&template); append_blank_line(&mut bulk_message); } bulk_message.push_str("JJ: Lines starting with \"JJ:\" (like this one) will be removed.\n"); let bulk_message = editor .edit_str(bulk_message, Some(".jjdescription")) .map_err(|err| err.with_name("description"))?; Ok(parse_bulk_edit_message(&bulk_message, &commits_map)?) } #[derive(Debug)] pub struct ParsedBulkEditMessage<T> { /// The parsed, formatted descriptions. pub descriptions: HashMap<T, String>, /// Commit IDs that were expected while parsing the edited messages, but /// which were not found. pub missing: Vec<String>, /// Commit IDs that were found multiple times while parsing the edited /// messages. pub duplicates: Vec<String>, /// Commit IDs that were found while parsing the edited messages, but which /// were not originally being edited. pub unexpected: Vec<String>, } #[derive(Debug, Error, PartialEq)] pub enum ParseBulkEditMessageError { #[error(r#"Found the following line without a commit header: "{0}""#)] LineWithoutCommitHeader(String), } /// Parse the bulk message of edited commit descriptions. fn parse_bulk_edit_message<T>( message: &str, commit_ids_map: &IndexMap<String, &T>, ) -> Result<ParsedBulkEditMessage<T>, ParseBulkEditMessageError> where T: Eq + std::hash::Hash + Clone, { let mut descriptions = HashMap::new(); let mut duplicates = Vec::new(); let mut unexpected = Vec::new(); let mut messages: Vec<(&str, Vec<&str>)> = vec![]; for line in message.lines() { if let Some(commit_id_prefix) = line.strip_prefix("JJ: describe ") { let commit_id_prefix = commit_id_prefix.trim_end_matches(|c: char| c.is_ascii_whitespace() || c == '-'); messages.push((commit_id_prefix, vec![])); } else if let Some((_, lines)) = messages.last_mut() { lines.push(line); } // Do not allow lines without a commit header, except for empty lines or comments. else if !line.trim().is_empty() && !line.starts_with("JJ:") { return Err(ParseBulkEditMessageError::LineWithoutCommitHeader( line.to_owned(), )); }; } for (commit_id_prefix, description_lines) in messages { let Some(&commit_id) = commit_ids_map.get(commit_id_prefix) else { unexpected.push(commit_id_prefix.to_string()); continue; }; if descriptions.contains_key(commit_id) { duplicates.push(commit_id_prefix.to_string()); continue; } descriptions.insert( commit_id.clone(), cleanup_description_lines(&description_lines), ); } let missing: Vec<_> = commit_ids_map .iter() .filter(|(_, commit_id)| !descriptions.contains_key(*commit_id)) .map(|(commit_id_prefix, _)| commit_id_prefix.clone()) .collect(); Ok(ParsedBulkEditMessage { descriptions, missing, duplicates, unexpected, }) } /// Combines the descriptions from the input commits. If only one is non-empty, /// then that one is used. pub fn try_combine_messages(sources: &[Commit], destination: &Commit) -> Option<String> { let non_empty = sources .iter() .chain(std::iter::once(destination)) .filter(|c| !c.description().is_empty()) .take(2) .collect_vec(); match *non_empty.as_slice() { [] => Some(String::new()), [commit] => Some(commit.description().to_owned()), [_, _, ..] => None, } } /// Produces a combined description with "JJ: " comment lines. /// /// This includes empty descriptins too, so the user doesn't have to wonder why /// they only see 2 descriptions when they combined 3 commits. pub fn combine_messages_for_editing( ui: &Ui, tx: &WorkspaceCommandTransaction, sources: &[Commit], destination: Option<&Commit>, commit_builder: &DetachedCommitBuilder, ) -> Result<String, CommandError> { let mut combined = String::new(); if let Some(destination) = destination { combined.push_str("JJ: Description from the destination commit:\n"); combined.push_str(destination.description()); } for commit in sources { combined.push_str("\nJJ: Description from source commit:\n"); combined.push_str(commit.description()); } if let Some(template) = parse_trailers_template(ui, tx)? { // show the user only trailers that were not in one of the squashed commits let old_trailers: Vec<_> = sources .iter() .chain(destination) .flat_map(|commit| parse_description_trailers(commit.description())) .collect(); let commit = commit_builder.write_hidden()?; let trailer_lines = template .format_plain_text(&commit) .into_string() .map_err(|_| user_error("Trailers should be valid utf-8"))?; let new_trailers = parse_trailers(&trailer_lines)?; let trailers: String = new_trailers .iter() .filter(|trailer| !old_trailers.contains(trailer)) .map(|trailer| format!("{}: {}\n", trailer.key, trailer.value)) .collect(); if !trailers.is_empty() { combined.push_str("\nJJ: Trailers not found in the squashed commits:\n"); combined.push_str(&trailers); } } Ok(combined) } /// Create a description from a list of paragraphs. /// /// Based on the Git CLI behavior. See `opt_parse_m()` and `cleanup_mode` in /// `git/builtin/commit.c`. pub fn join_message_paragraphs(paragraphs: &[String]) -> String { // Ensure each paragraph ends with a newline, then add another newline between // paragraphs. paragraphs .iter() .map(|p| text_util::complete_newline(p.as_str())) .join("\n") } /// Parse the commit trailers template from the configuration /// /// Returns None if the commit trailers template is empty. pub fn parse_trailers_template<'a>( ui: &Ui, tx: &'a WorkspaceCommandTransaction, ) -> Result<Option<TemplateRenderer<'a, Commit>>, CommandError> { let trailer_template = tx.settings().get_string("templates.commit_trailers")?; if trailer_template.is_empty() { Ok(None) } else { tx.parse_commit_template(ui, &trailer_template).map(Some) } } /// Add the trailers from the given `template` in the last paragraph of /// the description /// /// It just lets the description untouched if the trailers are already there. pub fn add_trailers_with_template( template: &TemplateRenderer<'_, Commit>, commit: &Commit, ) -> Result<String, CommandError> { let trailers = parse_description_trailers(commit.description()); let trailer_lines = template .format_plain_text(commit) .into_string() .map_err(|_| user_error("Trailers should be valid utf-8"))?; let new_trailers = parse_trailers(&trailer_lines)?; let mut description = commit.description().to_owned(); if trailers.is_empty() && !new_trailers.is_empty() { if description.is_empty() { // a first empty line where the user will edit the commit summary description.push('\n'); } // create a new paragraph for the trailer description.push('\n'); } for new_trailer in new_trailers { if !trailers.contains(&new_trailer) { description.push_str(&format!("{}: {}\n", new_trailer.key, new_trailer.value)); } } Ok(description) } /// Add the trailers from `templates.commit_trailers` in the last paragraph of /// the description /// /// It just lets the description untouched if the trailers are already there. pub fn add_trailers( ui: &Ui, tx: &WorkspaceCommandTransaction, commit_builder: &DetachedCommitBuilder, ) -> Result<String, CommandError> { if let Some(renderer) = parse_trailers_template(ui, tx)? { let commit = commit_builder.write_hidden()?; add_trailers_with_template(&renderer, &commit) } else { Ok(commit_builder.description().to_owned()) } } /// Renders commit description template, which will be edited by user. pub fn description_template( ui: &Ui, tx: &WorkspaceCommandTransaction, intro: &str, commit: &Commit, ) -> Result<String, CommandError> { // Named as "draft" because the output can contain "JJ:" comment lines. let template_key = "templates.draft_commit_description"; let template_text = tx.settings().get_string(template_key)?; let template = tx.parse_commit_template(ui, &template_text)?; let mut output = Vec::new(); if !intro.is_empty() { writeln!(output, "JJ: {intro}").unwrap(); } template .format(commit, &mut PlainTextFormatter::new(&mut output)) .expect("write() to vec backed formatter should never fail"); // Template output is usually UTF-8, but it can contain file content. Ok(output.into_string_lossy()) } #[cfg(test)] mod tests { use indexmap::indexmap; use indoc::indoc; use maplit::hashmap; use super::parse_bulk_edit_message; use crate::description_util::ParseBulkEditMessageError; #[test] fn test_parse_complete_bulk_edit_message() { let result = parse_bulk_edit_message( indoc! {" JJ: describe 1 ------- Description 1 JJ: describe 2 Description 2 JJ: describe 3 -- Description 3 "}, &indexmap! { "1".to_string() => &1, "2".to_string() => &2, "3".to_string() => &3, }, ) .unwrap(); assert_eq!( result.descriptions, hashmap! { 1 => "Description 1\n".to_string(), 2 => "Description 2\n".to_string(), 3 => "Description 3\n".to_string(), } ); assert!(result.missing.is_empty()); assert!(result.duplicates.is_empty()); assert!(result.unexpected.is_empty()); } #[test] fn test_parse_bulk_edit_message_with_missing_descriptions() { let result = parse_bulk_edit_message( indoc! {" JJ: describe 1 ------- Description 1 "}, &indexmap! { "1".to_string() => &1, "2".to_string() => &2, }, ) .unwrap(); assert_eq!( result.descriptions, hashmap! { 1 => "Description 1\n".to_string(), } ); assert_eq!(result.missing, vec!["2".to_string()]); assert!(result.duplicates.is_empty()); assert!(result.unexpected.is_empty()); } #[test] fn test_parse_bulk_edit_message_with_duplicate_descriptions() { let result = parse_bulk_edit_message( indoc! {" JJ: describe 1 ------- Description 1 JJ: describe 1 ------- Description 1 (repeated) "}, &indexmap! { "1".to_string() => &1, }, ) .unwrap(); assert_eq!( result.descriptions, hashmap! { 1 => "Description 1\n".to_string(), } ); assert!(result.missing.is_empty()); assert_eq!(result.duplicates, vec!["1".to_string()]); assert!(result.unexpected.is_empty()); } #[test] fn test_parse_bulk_edit_message_with_unexpected_descriptions() { let result = parse_bulk_edit_message( indoc! {" JJ: describe 1 ------- Description 1 JJ: describe 3 ------- Description 3 (unexpected) "}, &indexmap! { "1".to_string() => &1, }, ) .unwrap(); assert_eq!( result.descriptions, hashmap! { 1 => "Description 1\n".to_string(), } ); assert!(result.missing.is_empty()); assert!(result.duplicates.is_empty()); assert_eq!(result.unexpected, vec!["3".to_string()]); } #[test] fn test_parse_bulk_edit_message_with_no_header() { let result = parse_bulk_edit_message( indoc! {" Description 1 "}, &indexmap! { "1".to_string() => &1, }, ); assert_eq!( result.unwrap_err(), ParseBulkEditMessageError::LineWithoutCommitHeader("Description 1".to_string()) ); } #[test] fn test_parse_bulk_edit_message_with_comment_before_header() { let result = parse_bulk_edit_message( indoc! {" JJ: Custom comment and empty lines below should be accepted JJ: describe 1 ------- Description 1 "}, &indexmap! { "1".to_string() => &1, }, ) .unwrap(); assert_eq!( result.descriptions, hashmap! { 1 => "Description 1\n".to_string(), } ); assert!(result.missing.is_empty()); assert!(result.duplicates.is_empty()); assert!(result.unexpected.is_empty()); } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commit_templater.rs
cli/src/commit_templater.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Template environment for `jj log`, `jj evolog` and similar. use std::any::Any; use std::cmp::Ordering; use std::cmp::max; use std::collections::HashMap; use std::fmt; use std::fmt::Display; use std::io; use std::rc::Rc; use std::sync::Arc; use bstr::BString; use futures::StreamExt as _; use futures::TryStreamExt as _; use futures::stream::BoxStream; use itertools::Itertools as _; use jj_lib::backend::BackendResult; use jj_lib::backend::ChangeId; use jj_lib::backend::CommitId; use jj_lib::backend::TreeValue; use jj_lib::commit::Commit; use jj_lib::conflict_labels::ConflictLabels; use jj_lib::conflicts; use jj_lib::conflicts::ConflictMarkerStyle; use jj_lib::copies::CopiesTreeDiffEntry; use jj_lib::copies::CopiesTreeDiffEntryPath; use jj_lib::copies::CopyRecords; use jj_lib::evolution::CommitEvolutionEntry; use jj_lib::extensions_map::ExtensionsMap; use jj_lib::fileset; use jj_lib::fileset::FilesetDiagnostics; use jj_lib::fileset::FilesetExpression; use jj_lib::id_prefix::IdPrefixContext; use jj_lib::id_prefix::IdPrefixIndex; use jj_lib::index::IndexResult; use jj_lib::matchers::Matcher; use jj_lib::merge::Diff; use jj_lib::merge::MergedTreeValue; use jj_lib::merged_tree::MergedTree; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::LocalRemoteRefTarget; use jj_lib::op_store::OperationId; use jj_lib::op_store::RefTarget; use jj_lib::op_store::RemoteRef; use jj_lib::ref_name::RefName; use jj_lib::ref_name::WorkspaceName; use jj_lib::ref_name::WorkspaceNameBuf; use jj_lib::repo::Repo; use jj_lib::repo::RepoLoader; use jj_lib::repo_path::RepoPathBuf; use jj_lib::repo_path::RepoPathUiConverter; use jj_lib::revset; use jj_lib::revset::Revset; use jj_lib::revset::RevsetContainingFn; use jj_lib::revset::RevsetDiagnostics; use jj_lib::revset::RevsetModifier; use jj_lib::revset::RevsetParseContext; use jj_lib::revset::UserRevsetExpression; use jj_lib::rewrite::rebase_to_dest_parent; use jj_lib::settings::UserSettings; use jj_lib::signing::SigStatus; use jj_lib::signing::SignError; use jj_lib::signing::SignResult; use jj_lib::signing::Verification; use jj_lib::store::Store; use jj_lib::trailer; use jj_lib::trailer::Trailer; use once_cell::unsync::OnceCell; use pollster::FutureExt as _; use serde::Serialize as _; use crate::diff_util; use crate::diff_util::DiffStatEntry; use crate::diff_util::DiffStats; use crate::formatter::Formatter; use crate::operation_templater; use crate::operation_templater::OperationTemplateBuildFnTable; use crate::operation_templater::OperationTemplateEnvironment; use crate::operation_templater::OperationTemplatePropertyKind; use crate::operation_templater::OperationTemplatePropertyVar; use crate::revset_util; use crate::template_builder; use crate::template_builder::BuildContext; use crate::template_builder::CoreTemplateBuildFnTable; use crate::template_builder::CoreTemplatePropertyKind; use crate::template_builder::CoreTemplatePropertyVar; use crate::template_builder::TemplateBuildMethodFnMap; use crate::template_builder::TemplateLanguage; use crate::template_builder::expect_stringify_expression; use crate::template_builder::merge_fn_map; use crate::template_parser; use crate::template_parser::ExpressionNode; use crate::template_parser::FunctionCallNode; use crate::template_parser::TemplateDiagnostics; use crate::template_parser::TemplateParseError; use crate::template_parser::TemplateParseResult; use crate::templater; use crate::templater::BoxedSerializeProperty; use crate::templater::BoxedTemplateProperty; use crate::templater::ListTemplate; use crate::templater::PlainTextFormattedProperty; use crate::templater::SizeHint; use crate::templater::Template; use crate::templater::TemplateFormatter; use crate::templater::TemplatePropertyError; use crate::templater::TemplatePropertyExt as _; pub trait CommitTemplateLanguageExtension { fn build_fn_table<'repo>(&self) -> CommitTemplateBuildFnTable<'repo>; fn build_cache_extensions(&self, extensions: &mut ExtensionsMap); } /// Template environment for `jj log` and `jj evolog`. pub struct CommitTemplateLanguage<'repo> { repo: &'repo dyn Repo, path_converter: &'repo RepoPathUiConverter, workspace_name: WorkspaceNameBuf, // RevsetParseContext doesn't borrow a repo, but we'll need 'repo lifetime // anyway to capture it to evaluate dynamically-constructed user expression // such as `revset("ancestors(" ++ commit_id ++ ")")`. // TODO: Maybe refactor context structs? RepoPathUiConverter and // WorkspaceName are contained in RevsetParseContext for example. revset_parse_context: RevsetParseContext<'repo>, id_prefix_context: &'repo IdPrefixContext, immutable_expression: Arc<UserRevsetExpression>, conflict_marker_style: ConflictMarkerStyle, build_fn_table: CommitTemplateBuildFnTable<'repo>, keyword_cache: CommitKeywordCache<'repo>, cache_extensions: ExtensionsMap, } impl<'repo> CommitTemplateLanguage<'repo> { /// Sets up environment where commit template will be transformed to /// evaluation tree. #[expect(clippy::too_many_arguments)] pub fn new( repo: &'repo dyn Repo, path_converter: &'repo RepoPathUiConverter, workspace_name: &WorkspaceName, revset_parse_context: RevsetParseContext<'repo>, id_prefix_context: &'repo IdPrefixContext, immutable_expression: Arc<UserRevsetExpression>, conflict_marker_style: ConflictMarkerStyle, extensions: &[impl AsRef<dyn CommitTemplateLanguageExtension>], ) -> Self { let mut build_fn_table = CommitTemplateBuildFnTable::builtin(); let mut cache_extensions = ExtensionsMap::empty(); for extension in extensions { build_fn_table.merge(extension.as_ref().build_fn_table()); extension .as_ref() .build_cache_extensions(&mut cache_extensions); } CommitTemplateLanguage { repo, path_converter, workspace_name: workspace_name.to_owned(), revset_parse_context, id_prefix_context, immutable_expression, conflict_marker_style, build_fn_table, keyword_cache: CommitKeywordCache::default(), cache_extensions, } } } impl<'repo> TemplateLanguage<'repo> for CommitTemplateLanguage<'repo> { type Property = CommitTemplatePropertyKind<'repo>; fn settings(&self) -> &UserSettings { self.repo.base_repo().settings() } fn build_function( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { let table = &self.build_fn_table.core; table.build_function(self, diagnostics, build_ctx, function) } fn build_method( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, property: Self::Property, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { let type_name = property.type_name(); match property { CommitTemplatePropertyKind::Core(property) => { let table = &self.build_fn_table.core; table.build_method(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::Operation(property) => { let table = &self.build_fn_table.operation; table.build_method(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::Commit(property) => { let table = &self.build_fn_table.commit_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CommitOpt(property) => { let type_name = "Commit"; let table = &self.build_fn_table.commit_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::CommitList(property) => { let table = &self.build_fn_table.commit_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CommitEvolutionEntry(property) => { let table = &self.build_fn_table.commit_evolution_entry_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CommitRef(property) => { let table = &self.build_fn_table.commit_ref_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CommitRefOpt(property) => { let type_name = "CommitRef"; let table = &self.build_fn_table.commit_ref_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::CommitRefList(property) => { let table = &self.build_fn_table.commit_ref_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::WorkspaceRef(property) => { let table = &self.build_fn_table.workspace_ref_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::WorkspaceRefOpt(property) => { let type_name = "WorkspaceRef"; let table = &self.build_fn_table.workspace_ref_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::WorkspaceRefList(property) => { let table = &self.build_fn_table.workspace_ref_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::RefSymbol(property) => { let table = &self.build_fn_table.core.string_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.map(|RefSymbolBuf(s)| s).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::RefSymbolOpt(property) => { let type_name = "RefSymbol"; let table = &self.build_fn_table.core.string_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property .try_unwrap(type_name) .map(|RefSymbolBuf(s)| s) .into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::RepoPath(property) => { let table = &self.build_fn_table.repo_path_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::RepoPathOpt(property) => { let type_name = "RepoPath"; let table = &self.build_fn_table.repo_path_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::ChangeId(property) => { let table = &self.build_fn_table.change_id_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CommitId(property) => { let table = &self.build_fn_table.commit_id_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::ShortestIdPrefix(property) => { let table = &self.build_fn_table.shortest_id_prefix_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TreeDiff(property) => { let table = &self.build_fn_table.tree_diff_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TreeDiffEntry(property) => { let table = &self.build_fn_table.tree_diff_entry_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TreeDiffEntryList(property) => { let table = &self.build_fn_table.tree_diff_entry_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TreeEntry(property) => { let table = &self.build_fn_table.tree_entry_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TreeEntryList(property) => { let table = &self.build_fn_table.tree_entry_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::DiffStats(property) => { let table = &self.build_fn_table.diff_stats_methods; let build = template_parser::lookup_method(type_name, table, function)?; // Strip off formatting parameters which are needed only for the // default template output. let property = property.map(|formatted| formatted.stats).into_dyn(); build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::DiffStatEntry(property) => { let table = &self.build_fn_table.diff_stat_entry_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::DiffStatEntryList(property) => { let table = &self.build_fn_table.diff_stat_entry_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::CryptographicSignatureOpt(property) => { let type_name = "CryptographicSignature"; let table = &self.build_fn_table.cryptographic_signature_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(self, diagnostics, build_ctx, inner_property, function) } CommitTemplatePropertyKind::AnnotationLine(property) => { let type_name = "AnnotationLine"; let table = &self.build_fn_table.annotation_line_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::Trailer(property) => { let table = &self.build_fn_table.trailer_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } CommitTemplatePropertyKind::TrailerList(property) => { let table = &self.build_fn_table.trailer_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(self, diagnostics, build_ctx, property, function) } } } } // If we need to add multiple languages that support Commit types, this can be // turned into a trait which extends TemplateLanguage. impl<'repo> CommitTemplateLanguage<'repo> { pub fn repo(&self) -> &'repo dyn Repo { self.repo } pub fn workspace_name(&self) -> &WorkspaceName { &self.workspace_name } pub fn keyword_cache(&self) -> &CommitKeywordCache<'repo> { &self.keyword_cache } pub fn cache_extension<T: Any>(&self) -> Option<&T> { self.cache_extensions.get::<T>() } } impl OperationTemplateEnvironment for CommitTemplateLanguage<'_> { fn repo_loader(&self) -> &RepoLoader { self.repo.base_repo().loader() } fn current_op_id(&self) -> Option<&OperationId> { // TODO: Maybe return None if the repo is a MutableRepo? Some(self.repo.base_repo().op_id()) } } pub enum CommitTemplatePropertyKind<'repo> { Core(CoreTemplatePropertyKind<'repo>), Operation(OperationTemplatePropertyKind<'repo>), Commit(BoxedTemplateProperty<'repo, Commit>), CommitOpt(BoxedTemplateProperty<'repo, Option<Commit>>), CommitList(BoxedTemplateProperty<'repo, Vec<Commit>>), CommitEvolutionEntry(BoxedTemplateProperty<'repo, CommitEvolutionEntry>), CommitRef(BoxedTemplateProperty<'repo, Rc<CommitRef>>), CommitRefOpt(BoxedTemplateProperty<'repo, Option<Rc<CommitRef>>>), CommitRefList(BoxedTemplateProperty<'repo, Vec<Rc<CommitRef>>>), WorkspaceRef(BoxedTemplateProperty<'repo, WorkspaceRef>), WorkspaceRefOpt(BoxedTemplateProperty<'repo, Option<WorkspaceRef>>), WorkspaceRefList(BoxedTemplateProperty<'repo, Vec<WorkspaceRef>>), RefSymbol(BoxedTemplateProperty<'repo, RefSymbolBuf>), RefSymbolOpt(BoxedTemplateProperty<'repo, Option<RefSymbolBuf>>), RepoPath(BoxedTemplateProperty<'repo, RepoPathBuf>), RepoPathOpt(BoxedTemplateProperty<'repo, Option<RepoPathBuf>>), ChangeId(BoxedTemplateProperty<'repo, ChangeId>), CommitId(BoxedTemplateProperty<'repo, CommitId>), ShortestIdPrefix(BoxedTemplateProperty<'repo, ShortestIdPrefix>), TreeDiff(BoxedTemplateProperty<'repo, TreeDiff>), TreeDiffEntry(BoxedTemplateProperty<'repo, TreeDiffEntry>), TreeDiffEntryList(BoxedTemplateProperty<'repo, Vec<TreeDiffEntry>>), TreeEntry(BoxedTemplateProperty<'repo, TreeEntry>), TreeEntryList(BoxedTemplateProperty<'repo, Vec<TreeEntry>>), DiffStats(BoxedTemplateProperty<'repo, DiffStatsFormatted<'repo>>), DiffStatEntry(BoxedTemplateProperty<'repo, DiffStatEntry>), DiffStatEntryList(BoxedTemplateProperty<'repo, Vec<DiffStatEntry>>), CryptographicSignatureOpt(BoxedTemplateProperty<'repo, Option<CryptographicSignature>>), AnnotationLine(BoxedTemplateProperty<'repo, AnnotationLine>), Trailer(BoxedTemplateProperty<'repo, Trailer>), TrailerList(BoxedTemplateProperty<'repo, Vec<Trailer>>), } template_builder::impl_core_property_wrappers!(<'repo> CommitTemplatePropertyKind<'repo> => Core); operation_templater::impl_operation_property_wrappers!(<'repo> CommitTemplatePropertyKind<'repo> => Operation); template_builder::impl_property_wrappers!(<'repo> CommitTemplatePropertyKind<'repo> { Commit(Commit), CommitOpt(Option<Commit>), CommitList(Vec<Commit>), CommitEvolutionEntry(CommitEvolutionEntry), CommitRef(Rc<CommitRef>), CommitRefOpt(Option<Rc<CommitRef>>), CommitRefList(Vec<Rc<CommitRef>>), WorkspaceRef(WorkspaceRef), WorkspaceRefOpt(Option<WorkspaceRef>), WorkspaceRefList(Vec<WorkspaceRef>), RefSymbol(RefSymbolBuf), RefSymbolOpt(Option<RefSymbolBuf>), RepoPath(RepoPathBuf), RepoPathOpt(Option<RepoPathBuf>), ChangeId(ChangeId), CommitId(CommitId), ShortestIdPrefix(ShortestIdPrefix), TreeDiff(TreeDiff), TreeDiffEntry(TreeDiffEntry), TreeDiffEntryList(Vec<TreeDiffEntry>), TreeEntry(TreeEntry), TreeEntryList(Vec<TreeEntry>), DiffStats(DiffStatsFormatted<'repo>), DiffStatEntry(DiffStatEntry), DiffStatEntryList(Vec<DiffStatEntry>), CryptographicSignatureOpt(Option<CryptographicSignature>), AnnotationLine(AnnotationLine), Trailer(Trailer), TrailerList(Vec<Trailer>), }); impl<'repo> CoreTemplatePropertyVar<'repo> for CommitTemplatePropertyKind<'repo> { fn wrap_template(template: Box<dyn Template + 'repo>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_template(template)) } fn wrap_list_template(template: Box<dyn ListTemplate + 'repo>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_list_template(template)) } fn type_name(&self) -> &'static str { match self { Self::Core(property) => property.type_name(), Self::Operation(property) => property.type_name(), Self::Commit(_) => "Commit", Self::CommitOpt(_) => "Option<Commit>", Self::CommitList(_) => "List<Commit>", Self::CommitEvolutionEntry(_) => "CommitEvolutionEntry", Self::CommitRef(_) => "CommitRef", Self::CommitRefOpt(_) => "Option<CommitRef>", Self::CommitRefList(_) => "List<CommitRef>", Self::WorkspaceRef(_) => "WorkspaceRef", Self::WorkspaceRefOpt(_) => "Option<WorkspaceRef>", Self::WorkspaceRefList(_) => "List<WorkspaceRef>", Self::RefSymbol(_) => "RefSymbol", Self::RefSymbolOpt(_) => "Option<RefSymbol>", Self::RepoPath(_) => "RepoPath", Self::RepoPathOpt(_) => "Option<RepoPath>", Self::ChangeId(_) => "ChangeId", Self::CommitId(_) => "CommitId", Self::ShortestIdPrefix(_) => "ShortestIdPrefix", Self::TreeDiff(_) => "TreeDiff", Self::TreeDiffEntry(_) => "TreeDiffEntry", Self::TreeDiffEntryList(_) => "List<TreeDiffEntry>", Self::TreeEntry(_) => "TreeEntry", Self::TreeEntryList(_) => "List<TreeEntry>", Self::DiffStats(_) => "DiffStats", Self::DiffStatEntry(_) => "DiffStatEntry", Self::DiffStatEntryList(_) => "List<DiffStatEntry>", Self::CryptographicSignatureOpt(_) => "Option<CryptographicSignature>", Self::AnnotationLine(_) => "AnnotationLine", Self::Trailer(_) => "Trailer", Self::TrailerList(_) => "List<Trailer>", } } fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'repo, bool>> { match self { Self::Core(property) => property.try_into_boolean(), Self::Operation(property) => property.try_into_boolean(), Self::Commit(_) => None, Self::CommitOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::CommitList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::CommitEvolutionEntry(_) => None, Self::CommitRef(_) => None, Self::CommitRefOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::CommitRefList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::WorkspaceRef(_) => None, Self::WorkspaceRefOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::WorkspaceRefList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::RefSymbol(_) => None, Self::RefSymbolOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::RepoPath(_) => None, Self::RepoPathOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::ChangeId(_) => None, Self::CommitId(_) => None, Self::ShortestIdPrefix(_) => None, // TODO: boolean cast could be implemented, but explicit // diff.empty() method might be better. Self::TreeDiff(_) => None, Self::TreeDiffEntry(_) => None, Self::TreeDiffEntryList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::TreeEntry(_) => None, Self::TreeEntryList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::DiffStats(_) => None, Self::DiffStatEntry(_) => None, Self::DiffStatEntryList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::CryptographicSignatureOpt(property) => { Some(property.map(|sig| sig.is_some()).into_dyn()) } Self::AnnotationLine(_) => None, Self::Trailer(_) => None, Self::TrailerList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), } } fn try_into_integer(self) -> Option<BoxedTemplateProperty<'repo, i64>> { match self { Self::Core(property) => property.try_into_integer(), Self::Operation(property) => property.try_into_integer(), _ => None, } } fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'repo, String>> { match self { Self::Core(property) => property.try_into_stringify(), Self::Operation(property) => property.try_into_stringify(), Self::RefSymbol(property) => Some(property.map(|RefSymbolBuf(s)| s).into_dyn()), Self::RefSymbolOpt(property) => Some( property .map(|opt| opt.map_or_else(String::new, |RefSymbolBuf(s)| s)) .into_dyn(), ), _ => { let template = self.try_into_template()?; Some(PlainTextFormattedProperty::new(template).into_dyn()) } } } fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'repo>> { match self { Self::Core(property) => property.try_into_serialize(), Self::Operation(property) => property.try_into_serialize(), Self::Commit(property) => Some(property.into_serialize()), Self::CommitOpt(property) => Some(property.into_serialize()), Self::CommitList(property) => Some(property.into_serialize()), Self::CommitEvolutionEntry(property) => Some(property.into_serialize()), Self::CommitRef(property) => Some(property.into_serialize()), Self::CommitRefOpt(property) => Some(property.into_serialize()), Self::CommitRefList(property) => Some(property.into_serialize()), Self::WorkspaceRef(property) => Some(property.into_serialize()), Self::WorkspaceRefOpt(property) => Some(property.into_serialize()), Self::WorkspaceRefList(property) => Some(property.into_serialize()), Self::RefSymbol(property) => Some(property.into_serialize()), Self::RefSymbolOpt(property) => Some(property.into_serialize()), Self::RepoPath(property) => Some(property.into_serialize()), Self::RepoPathOpt(property) => Some(property.into_serialize()), Self::ChangeId(property) => Some(property.into_serialize()), Self::CommitId(property) => Some(property.into_serialize()), Self::ShortestIdPrefix(property) => Some(property.into_serialize()), Self::TreeDiff(_) => None, Self::TreeDiffEntry(_) => None, Self::TreeDiffEntryList(_) => None, Self::TreeEntry(_) => None, Self::TreeEntryList(_) => None, Self::DiffStats(_) => None, Self::DiffStatEntry(_) => None, Self::DiffStatEntryList(_) => None, Self::CryptographicSignatureOpt(_) => None, Self::AnnotationLine(_) => None, Self::Trailer(_) => None, Self::TrailerList(_) => None, } } fn try_into_template(self) -> Option<Box<dyn Template + 'repo>> { match self { Self::Core(property) => property.try_into_template(), Self::Operation(property) => property.try_into_template(), Self::Commit(_) => None, Self::CommitOpt(_) => None, Self::CommitList(_) => None, Self::CommitEvolutionEntry(_) => None, Self::CommitRef(property) => Some(property.into_template()), Self::CommitRefOpt(property) => Some(property.into_template()), Self::CommitRefList(property) => Some(property.into_template()), Self::WorkspaceRef(property) => Some(property.into_template()), Self::WorkspaceRefOpt(property) => Some(property.into_template()), Self::WorkspaceRefList(property) => Some(property.into_template()), Self::RefSymbol(property) => Some(property.into_template()), Self::RefSymbolOpt(property) => Some(property.into_template()), Self::RepoPath(property) => Some(property.into_template()), Self::RepoPathOpt(property) => Some(property.into_template()), Self::ChangeId(property) => Some(property.into_template()), Self::CommitId(property) => Some(property.into_template()), Self::ShortestIdPrefix(property) => Some(property.into_template()), Self::TreeDiff(_) => None, Self::TreeDiffEntry(_) => None, Self::TreeDiffEntryList(_) => None, Self::TreeEntry(_) => None, Self::TreeEntryList(_) => None, Self::DiffStats(property) => Some(property.into_template()), Self::DiffStatEntry(_) => None, Self::DiffStatEntryList(_) => None, Self::CryptographicSignatureOpt(_) => None, Self::AnnotationLine(_) => None, Self::Trailer(property) => Some(property.into_template()), Self::TrailerList(property) => Some(property.into_template()), } } fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'repo, bool>> { type Core<'repo> = CoreTemplatePropertyKind<'repo>; match (self, other) { (Self::Core(lhs), Self::Core(rhs)) => lhs.try_into_eq(rhs), (Self::Core(lhs), Self::Operation(rhs)) => rhs.try_into_eq_core(lhs),
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/operation_templater.rs
cli/src/operation_templater.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Template environment for `jj op log`. use std::any::Any; use std::cmp::Ordering; use std::collections::HashMap; use std::io; use itertools::Itertools as _; use jj_lib::extensions_map::ExtensionsMap; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::OperationId; use jj_lib::operation::Operation; use jj_lib::repo::RepoLoader; use jj_lib::settings::UserSettings; use crate::template_builder; use crate::template_builder::BuildContext; use crate::template_builder::CoreTemplateBuildFnTable; use crate::template_builder::CoreTemplatePropertyKind; use crate::template_builder::CoreTemplatePropertyVar; use crate::template_builder::TemplateBuildMethodFnMap; use crate::template_builder::TemplateLanguage; use crate::template_builder::merge_fn_map; use crate::template_parser; use crate::template_parser::FunctionCallNode; use crate::template_parser::TemplateDiagnostics; use crate::template_parser::TemplateParseResult; use crate::templater::BoxedSerializeProperty; use crate::templater::BoxedTemplateProperty; use crate::templater::ListTemplate; use crate::templater::PlainTextFormattedProperty; use crate::templater::Template; use crate::templater::TemplateFormatter; use crate::templater::TemplatePropertyExt as _; use crate::templater::WrapTemplateProperty; pub trait OperationTemplateLanguageExtension { fn build_fn_table(&self) -> OperationTemplateLanguageBuildFnTable; fn build_cache_extensions(&self, extensions: &mut ExtensionsMap); } /// Global resources needed by [`OperationTemplatePropertyKind`] methods. pub trait OperationTemplateEnvironment { fn repo_loader(&self) -> &RepoLoader; fn current_op_id(&self) -> Option<&OperationId>; } /// Template environment for `jj op log`. pub struct OperationTemplateLanguage { repo_loader: RepoLoader, current_op_id: Option<OperationId>, build_fn_table: OperationTemplateLanguageBuildFnTable, cache_extensions: ExtensionsMap, } impl OperationTemplateLanguage { /// Sets up environment where operation template will be transformed to /// evaluation tree. pub fn new( repo_loader: &RepoLoader, current_op_id: Option<&OperationId>, extensions: &[impl AsRef<dyn OperationTemplateLanguageExtension>], ) -> Self { let mut build_fn_table = OperationTemplateLanguageBuildFnTable::builtin(); let mut cache_extensions = ExtensionsMap::empty(); for extension in extensions { build_fn_table.merge(extension.as_ref().build_fn_table()); extension .as_ref() .build_cache_extensions(&mut cache_extensions); } Self { // Clone these to keep lifetime simple repo_loader: repo_loader.clone(), current_op_id: current_op_id.cloned(), build_fn_table, cache_extensions, } } } impl TemplateLanguage<'static> for OperationTemplateLanguage { type Property = OperationTemplateLanguagePropertyKind; fn settings(&self) -> &UserSettings { self.repo_loader.settings() } fn build_function( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { let table = &self.build_fn_table.core; table.build_function(self, diagnostics, build_ctx, function) } fn build_method( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, property: Self::Property, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property> { match property { OperationTemplateLanguagePropertyKind::Core(property) => { let table = &self.build_fn_table.core; table.build_method(self, diagnostics, build_ctx, property, function) } OperationTemplateLanguagePropertyKind::Operation(property) => { let table = &self.build_fn_table.operation; table.build_method(self, diagnostics, build_ctx, property, function) } } } } impl OperationTemplateEnvironment for OperationTemplateLanguage { fn repo_loader(&self) -> &RepoLoader { &self.repo_loader } fn current_op_id(&self) -> Option<&OperationId> { self.current_op_id.as_ref() } } impl OperationTemplateLanguage { pub fn cache_extension<T: Any>(&self) -> Option<&T> { self.cache_extensions.get::<T>() } } /// Wrapper for the operation template property types. pub trait OperationTemplatePropertyVar<'a> where Self: WrapTemplateProperty<'a, Operation>, Self: WrapTemplateProperty<'a, Option<Operation>>, Self: WrapTemplateProperty<'a, Vec<Operation>>, Self: WrapTemplateProperty<'a, OperationId>, { } /// Tagged union of the operation template property types. pub enum OperationTemplatePropertyKind<'a> { Operation(BoxedTemplateProperty<'a, Operation>), OperationOpt(BoxedTemplateProperty<'a, Option<Operation>>), OperationList(BoxedTemplateProperty<'a, Vec<Operation>>), OperationId(BoxedTemplateProperty<'a, OperationId>), } /// Implements `WrapTemplateProperty<type>` for operation property types. /// /// Use `impl_operation_property_wrappers!(<'a> Kind<'a> => Operation);` to /// implement forwarding conversion. macro_rules! impl_operation_property_wrappers { ($($head:tt)+) => { $crate::template_builder::impl_property_wrappers!($($head)+ { Operation(jj_lib::operation::Operation), OperationOpt(Option<jj_lib::operation::Operation>), OperationList(Vec<jj_lib::operation::Operation>), OperationId(jj_lib::op_store::OperationId), }); }; } pub(crate) use impl_operation_property_wrappers; impl_operation_property_wrappers!(<'a> OperationTemplatePropertyKind<'a>); impl<'a> OperationTemplatePropertyKind<'a> { pub fn type_name(&self) -> &'static str { match self { Self::Operation(_) => "Operation", Self::OperationOpt(_) => "Option<Operation>", Self::OperationList(_) => "List<Operation>", Self::OperationId(_) => "OperationId", } } pub fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'a, bool>> { match self { Self::Operation(_) => None, Self::OperationOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::OperationList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::OperationId(_) => None, } } pub fn try_into_integer(self) -> Option<BoxedTemplateProperty<'a, i64>> { None } pub fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'a, String>> { let template = self.try_into_template()?; Some(PlainTextFormattedProperty::new(template).into_dyn()) } pub fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'a>> { match self { Self::Operation(property) => Some(property.into_serialize()), Self::OperationOpt(property) => Some(property.into_serialize()), Self::OperationList(property) => Some(property.into_serialize()), Self::OperationId(property) => Some(property.into_serialize()), } } pub fn try_into_template(self) -> Option<Box<dyn Template + 'a>> { match self { Self::Operation(_) => None, Self::OperationOpt(_) => None, Self::OperationList(_) => None, Self::OperationId(property) => Some(property.into_template()), } } pub fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'a, bool>> { match (self, other) { (Self::Operation(_), _) => None, (Self::OperationOpt(_), _) => None, (Self::OperationList(_), _) => None, (Self::OperationId(_), _) => None, } } pub fn try_into_eq_core( self, other: CoreTemplatePropertyKind<'a>, ) -> Option<BoxedTemplateProperty<'a, bool>> { match (self, other) { (Self::Operation(_), _) => None, (Self::OperationOpt(_), _) => None, (Self::OperationList(_), _) => None, (Self::OperationId(_), _) => None, } } pub fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'a, Ordering>> { match (self, other) { (Self::Operation(_), _) => None, (Self::OperationOpt(_), _) => None, (Self::OperationList(_), _) => None, (Self::OperationId(_), _) => None, } } pub fn try_into_cmp_core( self, other: CoreTemplatePropertyKind<'a>, ) -> Option<BoxedTemplateProperty<'a, Ordering>> { match (self, other) { (Self::Operation(_), _) => None, (Self::OperationOpt(_), _) => None, (Self::OperationList(_), _) => None, (Self::OperationId(_), _) => None, } } } /// Tagged property types available in [`OperationTemplateLanguage`]. pub enum OperationTemplateLanguagePropertyKind { Core(CoreTemplatePropertyKind<'static>), Operation(OperationTemplatePropertyKind<'static>), } template_builder::impl_core_property_wrappers!(OperationTemplateLanguagePropertyKind => Core); impl_operation_property_wrappers!(OperationTemplateLanguagePropertyKind => Operation); impl CoreTemplatePropertyVar<'static> for OperationTemplateLanguagePropertyKind { fn wrap_template(template: Box<dyn Template>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_template(template)) } fn wrap_list_template(template: Box<dyn ListTemplate>) -> Self { Self::Core(CoreTemplatePropertyKind::wrap_list_template(template)) } fn type_name(&self) -> &'static str { match self { Self::Core(property) => property.type_name(), Self::Operation(property) => property.type_name(), } } fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'static, bool>> { match self { Self::Core(property) => property.try_into_boolean(), Self::Operation(property) => property.try_into_boolean(), } } fn try_into_integer(self) -> Option<BoxedTemplateProperty<'static, i64>> { match self { Self::Core(property) => property.try_into_integer(), Self::Operation(property) => property.try_into_integer(), } } fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'static, String>> { match self { Self::Core(property) => property.try_into_stringify(), Self::Operation(property) => property.try_into_stringify(), } } fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'static>> { match self { Self::Core(property) => property.try_into_serialize(), Self::Operation(property) => property.try_into_serialize(), } } fn try_into_template(self) -> Option<Box<dyn Template>> { match self { Self::Core(property) => property.try_into_template(), Self::Operation(property) => property.try_into_template(), } } fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'static, bool>> { match (self, other) { (Self::Core(lhs), Self::Core(rhs)) => lhs.try_into_eq(rhs), (Self::Core(lhs), Self::Operation(rhs)) => rhs.try_into_eq_core(lhs), (Self::Operation(lhs), Self::Core(rhs)) => lhs.try_into_eq_core(rhs), (Self::Operation(lhs), Self::Operation(rhs)) => lhs.try_into_eq(rhs), } } fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'static, Ordering>> { match (self, other) { (Self::Core(lhs), Self::Core(rhs)) => lhs.try_into_cmp(rhs), (Self::Core(lhs), Self::Operation(rhs)) => rhs .try_into_cmp_core(lhs) .map(|property| property.map(Ordering::reverse).into_dyn()), (Self::Operation(lhs), Self::Core(rhs)) => lhs.try_into_cmp_core(rhs), (Self::Operation(lhs), Self::Operation(rhs)) => lhs.try_into_cmp(rhs), } } } impl OperationTemplatePropertyVar<'static> for OperationTemplateLanguagePropertyKind {} /// Symbol table for the operation template property types. pub struct OperationTemplateBuildFnTable<'a, L: ?Sized, P = <L as TemplateLanguage<'a>>::Property> { pub operation_methods: TemplateBuildMethodFnMap<'a, L, Operation, P>, pub operation_list_methods: TemplateBuildMethodFnMap<'a, L, Vec<Operation>, P>, pub operation_id_methods: TemplateBuildMethodFnMap<'a, L, OperationId, P>, } impl<L: ?Sized, P> OperationTemplateBuildFnTable<'_, L, P> { pub fn empty() -> Self { Self { operation_methods: HashMap::new(), operation_list_methods: HashMap::new(), operation_id_methods: HashMap::new(), } } pub fn merge(&mut self, other: Self) { let Self { operation_methods, operation_list_methods, operation_id_methods, } = other; merge_fn_map(&mut self.operation_methods, operation_methods); merge_fn_map(&mut self.operation_list_methods, operation_list_methods); merge_fn_map(&mut self.operation_id_methods, operation_id_methods); } } impl<'a, L> OperationTemplateBuildFnTable<'a, L, L::Property> where L: TemplateLanguage<'a> + OperationTemplateEnvironment + ?Sized, L::Property: OperationTemplatePropertyVar<'a>, { /// Creates new symbol table containing the builtin methods. pub fn builtin() -> Self { Self { operation_methods: builtin_operation_methods(), operation_list_methods: template_builder::builtin_unformattable_list_methods(), operation_id_methods: builtin_operation_id_methods(), } } /// Applies the method call node `function` to the given `property` by using /// this symbol table. pub fn build_method( &self, language: &L, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<L::Property>, property: OperationTemplatePropertyKind<'a>, function: &FunctionCallNode, ) -> TemplateParseResult<L::Property> { let type_name = property.type_name(); match property { OperationTemplatePropertyKind::Operation(property) => { let table = &self.operation_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } OperationTemplatePropertyKind::OperationOpt(property) => { let type_name = "Operation"; let table = &self.operation_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(language, diagnostics, build_ctx, inner_property, function) } OperationTemplatePropertyKind::OperationList(property) => { let table = &self.operation_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } OperationTemplatePropertyKind::OperationId(property) => { let table = &self.operation_id_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } } } } /// Symbol table of methods available in [`OperationTemplateLanguage`]. pub struct OperationTemplateLanguageBuildFnTable { pub core: CoreTemplateBuildFnTable<'static, OperationTemplateLanguage>, pub operation: OperationTemplateBuildFnTable<'static, OperationTemplateLanguage>, } impl OperationTemplateLanguageBuildFnTable { pub fn empty() -> Self { Self { core: CoreTemplateBuildFnTable::empty(), operation: OperationTemplateBuildFnTable::empty(), } } fn merge(&mut self, other: Self) { let Self { core, operation } = other; self.core.merge(core); self.operation.merge(operation); } /// Creates new symbol table containing the builtin methods. fn builtin() -> Self { Self { core: CoreTemplateBuildFnTable::builtin(), operation: OperationTemplateBuildFnTable::builtin(), } } } fn builtin_operation_methods<'a, L>() -> TemplateBuildMethodFnMap<'a, L, Operation> where L: TemplateLanguage<'a> + OperationTemplateEnvironment + ?Sized, L::Property: OperationTemplatePropertyVar<'a>, { // Not using maplit::hashmap!{} or custom declarative macro here because // code completion inside macro is quite restricted. let mut map = TemplateBuildMethodFnMap::<L, Operation>::new(); map.insert( "current_operation", |language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let current_op_id = language.current_op_id().cloned(); let out_property = self_property.map(move |op| Some(op.id()) == current_op_id.as_ref()); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "description", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| op.metadata().description.clone()); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "id", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| op.id().clone()); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "tags", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| { // TODO: introduce map type op.metadata() .tags .iter() .map(|(key, value)| format!("{key}: {value}")) .join("\n") }); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "snapshot", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| op.metadata().is_snapshot); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "time", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| op.metadata().time.clone()); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "user", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.map(|op| { // TODO: introduce dedicated type and provide accessors? format!("{}@{}", op.metadata().username, op.metadata().hostname) }); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "root", |language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let op_store = language.repo_loader().op_store(); let root_op_id = op_store.root_operation_id().clone(); let out_property = self_property.map(move |op| op.id() == &root_op_id); Ok(out_property.into_dyn_wrapped()) }, ); map.insert( "parents", |_language, _diagnostics, _build_ctx, self_property, function| { function.expect_no_arguments()?; let out_property = self_property.and_then(|op| { let ops: Vec<_> = op.parents().try_collect()?; Ok(ops) }); Ok(out_property.into_dyn_wrapped()) }, ); map } impl Template for OperationId { fn format(&self, formatter: &mut TemplateFormatter) -> io::Result<()> { write!(formatter, "{}", self.hex()) } } fn builtin_operation_id_methods<'a, L>() -> TemplateBuildMethodFnMap<'a, L, OperationId> where L: TemplateLanguage<'a> + OperationTemplateEnvironment + ?Sized, L::Property: OperationTemplatePropertyVar<'a>, { // Not using maplit::hashmap!{} or custom declarative macro here because // code completion inside macro is quite restricted. let mut map = TemplateBuildMethodFnMap::<L, OperationId>::new(); map.insert( "short", |language, diagnostics, build_ctx, self_property, function| { let ([], [len_node]) = function.expect_arguments()?; let len_property = len_node .map(|node| { template_builder::expect_usize_expression( language, diagnostics, build_ctx, node, ) }) .transpose()?; let out_property = (self_property, len_property).map(|(id, len)| { let mut hex = id.hex(); hex.truncate(len.unwrap_or(12)); hex }); Ok(out_property.into_dyn_wrapped()) }, ); map }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/progress.rs
cli/src/progress.rs
use std::path::Path; use std::sync::Mutex; use std::time::Duration; use std::time::Instant; use crossterm::terminal::Clear; use crossterm::terminal::ClearType; use jj_lib::repo_path::RepoPath; use crate::text_util; use crate::ui::OutputGuard; use crate::ui::ProgressOutput; use crate::ui::Ui; pub const UPDATE_HZ: u32 = 30; pub const INITIAL_DELAY: Duration = Duration::from_millis(250); pub fn snapshot_progress(ui: &Ui) -> Option<impl Fn(&RepoPath) + use<>> { struct State { guard: Option<OutputGuard>, output: ProgressOutput<std::io::Stderr>, next_display_time: Instant, } let output = ui.progress_output()?; // Don't clutter the output during fast operations. let next_display_time = Instant::now() + INITIAL_DELAY; let state = Mutex::new(State { guard: None, output, next_display_time, }); Some(move |path: &RepoPath| { let mut state = state.lock().unwrap(); let now = Instant::now(); if now < state.next_display_time { // Future work: Display current path after exactly, say, 250ms has elapsed, to // better handle large single files return; } state.next_display_time = now + Duration::from_secs(1) / UPDATE_HZ; if state.guard.is_none() { state.guard = Some( state .output .output_guard(format!("\r{}", Clear(ClearType::CurrentLine))), ); } let line_width = state.output.term_width().map(usize::from).unwrap_or(80); let max_path_width = line_width.saturating_sub(13); // Account for "Snapshotting " let fs_path = path.to_fs_path_unchecked(Path::new("")); let (display_path, _) = text_util::elide_start(fs_path.to_str().unwrap(), "...", max_path_width); write!( state.output, "\r{}Snapshotting {display_path}", Clear(ClearType::CurrentLine), ) .ok(); state.output.flush().ok(); }) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/cleanup_guard.rs
cli/src/cleanup_guard.rs
use std::io; use std::sync::Mutex; use std::sync::Once; use slab::Slab; use tracing::instrument; /// Contains the callbacks passed to currently-live [`CleanupGuard`]s static LIVE_GUARDS: Mutex<GuardTable> = Mutex::new(Slab::new()); type GuardTable = Slab<Box<dyn FnOnce() + Send>>; /// Prepare to run [`CleanupGuard`]s on `SIGINT`/`SIGTERM` pub fn init() { // Safety: `` ensures at most one call static CALLED: Once = Once::new(); CALLED.call_once(|| { if let Err(ref e) = unsafe { platform::init() } { eprintln!("couldn't register signal handler: {e}"); } }); } /// A drop guard that also runs on `SIGINT`/`SIGTERM` pub struct CleanupGuard { slot: usize, } impl CleanupGuard { /// Invoke `f` when dropped or killed by `SIGINT`/`SIGTERM` pub fn new<F: FnOnce() + Send + 'static>(f: F) -> Self { let guards = &mut *LIVE_GUARDS.lock().unwrap(); Self { slot: guards.insert(Box::new(f)), } } } impl Drop for CleanupGuard { #[instrument(skip_all)] fn drop(&mut self) { let guards = &mut *LIVE_GUARDS.lock().unwrap(); let f = guards.remove(self.slot); f(); } } #[cfg(unix)] mod platform { use std::os::unix::io::IntoRawFd as _; use std::os::unix::io::RawFd; use std::os::unix::net::UnixDatagram; use std::panic::AssertUnwindSafe; use std::sync::atomic::AtomicBool; use std::sync::atomic::Ordering; use std::thread; use libc::SIGINT; use libc::SIGTERM; use libc::c_int; use super::*; /// Safety: Must be called at most once pub unsafe fn init() -> io::Result<()> { unsafe { let (send, recv) = UnixDatagram::pair()?; // Spawn a background thread that waits for the signal handler to write a signal // into it thread::spawn(move || { let mut buf = [0]; let signal = match recv.recv(&mut buf) { Ok(1) => c_int::from(buf[0]), _ => unreachable!(), }; // We must hold the lock for the remainder of the process's lifetime to avoid a // race where a guard is created between `on_signal` and `raise`. let guards = &mut *LIVE_GUARDS.lock().unwrap(); if let Err(e) = std::panic::catch_unwind(AssertUnwindSafe(|| on_signal(guards))) { match e.downcast::<String>() { Ok(s) => eprintln!("signal handler panicked: {s}"), Err(_) => eprintln!("signal handler panicked"), } } libc::signal(signal, libc::SIG_DFL); libc::raise(signal); }); SIGNAL_SEND = send.into_raw_fd(); libc::signal(SIGINT, handler as *const () as libc::sighandler_t); libc::signal(SIGTERM, handler as *const () as libc::sighandler_t); Ok(()) } } // Invoked on a background thread. Process exits after this returns. fn on_signal(guards: &mut GuardTable) { for guard in guards.drain() { guard(); } } unsafe extern "C" fn handler(signal: c_int) { unsafe { // Treat the second signal as instantly fatal. static SIGNALED: AtomicBool = AtomicBool::new(false); if SIGNALED.swap(true, Ordering::Relaxed) { libc::signal(signal, libc::SIG_DFL); libc::raise(signal); } let buf = [signal as u8]; libc::write(SIGNAL_SEND, buf.as_ptr().cast(), buf.len()); } } static mut SIGNAL_SEND: RawFd = 0; } #[cfg(not(unix))] mod platform { use super::*; /// Safety: this function is safe to call, but is marked as unsafe to have /// the same signature as other `init` functions in other platforms. pub unsafe fn init() -> io::Result<()> { Ok(()) } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/time_util.rs
cli/src/time_util.rs
use std::sync::LazyLock; use chrono::format::StrftimeItems; use jj_lib::backend::Timestamp; use jj_lib::backend::TimestampOutOfRange; /// Parsed formatting items which should never contain an error. #[derive(Clone, Debug, Eq, PartialEq)] pub struct FormattingItems<'a> { items: Vec<chrono::format::Item<'a>>, } impl<'a> FormattingItems<'a> { /// Parses strftime-like format string. pub fn parse(format: &'a str) -> Option<Self> { // If the parsed format contained an error, format().to_string() would panic. let items = StrftimeItems::new(format) .map(|item| match item { chrono::format::Item::Error => None, _ => Some(item), }) .collect::<Option<_>>()?; Some(FormattingItems { items }) } pub fn into_owned(self) -> FormattingItems<'static> { use chrono::format::Item; let items = self .items .into_iter() .map(|item| match item { Item::Literal(s) => Item::OwnedLiteral(s.into()), Item::OwnedLiteral(s) => Item::OwnedLiteral(s), Item::Space(s) => Item::OwnedSpace(s.into()), Item::OwnedSpace(s) => Item::OwnedSpace(s), Item::Numeric(spec, pad) => Item::Numeric(spec, pad), Item::Fixed(spec) => Item::Fixed(spec), Item::Error => Item::Error, // shouldn't exist, but just copy }) .collect(); FormattingItems { items } } } pub fn format_absolute_timestamp(timestamp: &Timestamp) -> Result<String, TimestampOutOfRange> { static DEFAULT_FORMAT: LazyLock<FormattingItems> = LazyLock::new(|| FormattingItems::parse("%Y-%m-%d %H:%M:%S.%3f %:z").unwrap()); format_absolute_timestamp_with(timestamp, &DEFAULT_FORMAT) } pub fn format_absolute_timestamp_with( timestamp: &Timestamp, format: &FormattingItems, ) -> Result<String, TimestampOutOfRange> { let datetime = timestamp.to_datetime()?; Ok(datetime.format_with_items(format.items.iter()).to_string()) } pub fn format_duration( from: &Timestamp, to: &Timestamp, format: &timeago::Formatter, ) -> Result<String, TimestampOutOfRange> { let duration = to .to_datetime()? .signed_duration_since(from.to_datetime()?) .to_std() .map_err(|_: chrono::OutOfRangeError| TimestampOutOfRange)?; Ok(format.convert(duration)) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/git_util.rs
cli/src/git_util.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! Git utilities shared by various commands. use std::error; use std::io; use std::io::Write as _; use std::iter; use std::mem; use std::path::Path; use std::time::Duration; use std::time::Instant; use crossterm::terminal::Clear; use crossterm::terminal::ClearType; use indoc::writedoc; use itertools::Itertools as _; use jj_lib::fmt_util::binary_prefix; use jj_lib::git; use jj_lib::git::FailedRefExportReason; use jj_lib::git::GitExportStats; use jj_lib::git::GitImportOptions; use jj_lib::git::GitImportStats; use jj_lib::git::GitRefKind; use jj_lib::git::GitSettings; use jj_lib::op_store::RefTarget; use jj_lib::op_store::RemoteRef; use jj_lib::ref_name::RemoteRefSymbol; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo; use jj_lib::settings::RemoteSettingsMap; use jj_lib::workspace::Workspace; use unicode_width::UnicodeWidthStr as _; use crate::cleanup_guard::CleanupGuard; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::command_error::user_error; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::revset_util::parse_remote_auto_track_bookmarks_map; use crate::ui::ProgressOutput; use crate::ui::Ui; pub fn is_colocated_git_workspace(workspace: &Workspace, repo: &ReadonlyRepo) -> bool { let Ok(git_backend) = git::get_git_backend(repo.store()) else { return false; }; let Some(git_workdir) = git_backend.git_workdir() else { return false; // Bare repository }; if git_workdir == workspace.workspace_root() { return true; } // Colocated workspace should have ".git" directory, file, or symlink. Compare // its parent as the git_workdir might be resolved from the real ".git" path. let Ok(dot_git_path) = dunce::canonicalize(workspace.workspace_root().join(".git")) else { return false; }; dunce::canonicalize(git_workdir).ok().as_deref() == dot_git_path.parent() } /// Parses user-specified remote URL or path to absolute form. pub fn absolute_git_url(cwd: &Path, source: &str) -> Result<String, CommandError> { // Git appears to turn URL-like source to absolute path if local git directory // exits, and fails because '$PWD/https' is unsupported protocol. Since it would // be tedious to copy the exact git (or libgit2) behavior, we simply let gix // parse the input as URL, rcp-like, or local path. let mut url = gix::url::parse(source.as_ref()).map_err(cli_error)?; url.canonicalize(cwd).map_err(user_error)?; // As of gix 0.68.0, the canonicalized path uses platform-native directory // separator, which isn't compatible with libgit2 on Windows. if url.scheme == gix::url::Scheme::File { url.path = gix::path::to_unix_separators_on_windows(mem::take(&mut url.path)).into_owned(); } // It's less likely that cwd isn't utf-8, so just fall back to original source. Ok(String::from_utf8(url.to_bstring().into()).unwrap_or_else(|_| source.to_owned())) } // Based on Git's implementation: https://github.com/git/git/blob/43072b4ca132437f21975ac6acc6b72dc22fd398/sideband.c#L178 pub struct GitSidebandProgressMessageWriter { display_prefix: &'static [u8], suffix: &'static [u8], scratch: Vec<u8>, } impl GitSidebandProgressMessageWriter { pub fn new(ui: &Ui) -> Self { let is_terminal = ui.use_progress_indicator(); Self { display_prefix: "remote: ".as_bytes(), suffix: if is_terminal { "\x1B[K" } else { " " }.as_bytes(), scratch: Vec::new(), } } pub fn write(&mut self, ui: &Ui, progress_message: &[u8]) -> std::io::Result<()> { let mut index = 0; // Append a suffix to each nonempty line to clear the end of the screen line. loop { let Some(i) = progress_message[index..] .iter() .position(|&c| c == b'\r' || c == b'\n') .map(|i| index + i) else { break; }; let line_length = i - index; // For messages sent across the packet boundary, there would be a nonempty // "scratch" buffer from last call of this function, and there may be a leading // CR/LF in this message. For this case we should add a clear-to-eol suffix to // clean leftover letters we previously have written on the same line. if !self.scratch.is_empty() && line_length == 0 { self.scratch.extend_from_slice(self.suffix); } if self.scratch.is_empty() { self.scratch.extend_from_slice(self.display_prefix); } // Do not add the clear-to-eol suffix to empty lines: // For progress reporting we may receive a bunch of percentage updates // followed by '\r' to remain on the same line, and at the end receive a single // '\n' to move to the next line. We should preserve the final // status report line by not appending clear-to-eol suffix to this single line // break. if line_length > 0 { self.scratch.extend_from_slice(&progress_message[index..i]); self.scratch.extend_from_slice(self.suffix); } self.scratch.extend_from_slice(&progress_message[i..i + 1]); ui.status().write_all(&self.scratch)?; self.scratch.clear(); index = i + 1; } // Add leftover message to "scratch" buffer to be printed in next call. if index < progress_message.len() { if self.scratch.is_empty() { self.scratch.extend_from_slice(self.display_prefix); } self.scratch.extend_from_slice(&progress_message[index..]); } Ok(()) } pub fn flush(&mut self, ui: &Ui) -> std::io::Result<()> { if !self.scratch.is_empty() { self.scratch.push(b'\n'); ui.status().write_all(&self.scratch)?; self.scratch.clear(); } Ok(()) } } pub fn with_remote_git_callbacks<T>(ui: &Ui, f: impl FnOnce(git::RemoteCallbacks<'_>) -> T) -> T { let mut callbacks = git::RemoteCallbacks::default(); let mut progress_callback; if let Some(mut output) = ui.progress_output() { let mut progress = Progress::new(Instant::now()); progress_callback = move |x: &git::Progress| { progress.update(Instant::now(), x, &mut output).ok(); }; callbacks.progress = Some(&mut progress_callback); } let mut sideband_progress_writer = GitSidebandProgressMessageWriter::new(ui); let mut sideband_progress_callback = |progress_message: &[u8]| { sideband_progress_writer.write(ui, progress_message).ok(); }; callbacks.sideband_progress = Some(&mut sideband_progress_callback); let result = f(callbacks); sideband_progress_writer.flush(ui).ok(); result } pub fn load_git_import_options( ui: &Ui, git_settings: &GitSettings, remote_settings: &RemoteSettingsMap, ) -> Result<GitImportOptions, CommandError> { Ok(GitImportOptions { auto_local_bookmark: git_settings.auto_local_bookmark, abandon_unreachable_commits: git_settings.abandon_unreachable_commits, remote_auto_track_bookmarks: parse_remote_auto_track_bookmarks_map(ui, remote_settings)?, }) } pub fn print_git_import_stats( ui: &Ui, repo: &dyn Repo, stats: &GitImportStats, show_ref_stats: bool, ) -> Result<(), CommandError> { let Some(mut formatter) = ui.status_formatter() else { return Ok(()); }; if show_ref_stats { for (kind, changes) in [ (GitRefKind::Bookmark, &stats.changed_remote_bookmarks), (GitRefKind::Tag, &stats.changed_remote_tags), ] { let refs_stats = changes .iter() .map(|(symbol, (remote_ref, ref_target))| { RefStatus::new(kind, symbol.as_ref(), remote_ref, ref_target, repo) }) .collect_vec(); let Some(max_width) = refs_stats.iter().map(|x| x.symbol.width()).max() else { continue; }; for status in refs_stats { status.output(max_width, &mut *formatter)?; } } } if !stats.abandoned_commits.is_empty() { writeln!( formatter, "Abandoned {} commits that are no longer reachable.", stats.abandoned_commits.len() )?; } if !stats.failed_ref_names.is_empty() { writeln!(ui.warning_default(), "Failed to import some Git refs:")?; let mut formatter = ui.stderr_formatter(); for name in &stats.failed_ref_names { write!(formatter, " ")?; write!(formatter.labeled("git_ref"), "{name}")?; writeln!(formatter)?; } } if stats .failed_ref_names .iter() .any(|name| name.starts_with(git::RESERVED_REMOTE_REF_NAMESPACE.as_bytes())) { writedoc!( ui.hint_default(), " Git remote named '{name}' is reserved for local Git repository. Use `jj git remote rename` to give a different name. ", name = git::REMOTE_NAME_FOR_LOCAL_GIT_REPO.as_symbol(), )?; } Ok(()) } pub struct Progress { next_print: Instant, rate: RateEstimate, buffer: String, guard: Option<CleanupGuard>, } impl Progress { pub fn new(now: Instant) -> Self { Self { next_print: now + crate::progress::INITIAL_DELAY, rate: RateEstimate::new(), buffer: String::new(), guard: None, } } pub fn update<W: std::io::Write>( &mut self, now: Instant, progress: &git::Progress, output: &mut ProgressOutput<W>, ) -> io::Result<()> { use std::fmt::Write as _; if progress.overall == 1.0 { write!(output, "\r{}", Clear(ClearType::CurrentLine))?; output.flush()?; return Ok(()); } let rate = progress .bytes_downloaded .and_then(|x| self.rate.update(now, x)); if now < self.next_print { return Ok(()); } self.next_print = now + Duration::from_secs(1) / crate::progress::UPDATE_HZ; if self.guard.is_none() { let guard = output.output_guard(crossterm::cursor::Show.to_string()); let guard = CleanupGuard::new(move || { drop(guard); }); write!(output, "{}", crossterm::cursor::Hide).ok(); self.guard = Some(guard); } self.buffer.clear(); // Overwrite the current local or sideband progress line if any. self.buffer.push('\r'); let control_chars = self.buffer.len(); write!(self.buffer, "{: >3.0}% ", 100.0 * progress.overall).unwrap(); if let Some(total) = progress.bytes_downloaded { let (scaled, prefix) = binary_prefix(total as f32); write!(self.buffer, "{scaled: >5.1} {prefix}B ").unwrap(); } if let Some(estimate) = rate { let (scaled, prefix) = binary_prefix(estimate); write!(self.buffer, "at {scaled: >5.1} {prefix}B/s ").unwrap(); } let bar_width = output .term_width() .map(usize::from) .unwrap_or(0) .saturating_sub(self.buffer.len() - control_chars + 2); self.buffer.push('['); draw_progress(progress.overall, &mut self.buffer, bar_width); self.buffer.push(']'); write!(self.buffer, "{}", Clear(ClearType::UntilNewLine)).unwrap(); // Move cursor back to the first column so the next sideband message // will overwrite the current progress. self.buffer.push('\r'); write!(output, "{}", self.buffer)?; output.flush()?; Ok(()) } } fn draw_progress(progress: f32, buffer: &mut String, width: usize) { const CHARS: [char; 9] = [' ', '▏', '▎', '▍', '▌', '▋', '▊', '▉', '█']; const RESOLUTION: usize = CHARS.len() - 1; let ticks = (width as f32 * progress.clamp(0.0, 1.0) * RESOLUTION as f32).round() as usize; let whole = ticks / RESOLUTION; for _ in 0..whole { buffer.push(CHARS[CHARS.len() - 1]); } if whole < width { let fraction = ticks % RESOLUTION; buffer.push(CHARS[fraction]); } for _ in (whole + 1)..width { buffer.push(CHARS[0]); } } struct RateEstimate { state: Option<RateEstimateState>, } impl RateEstimate { pub fn new() -> Self { Self { state: None } } /// Compute smoothed rate from an update pub fn update(&mut self, now: Instant, total: u64) -> Option<f32> { if let Some(ref mut state) = self.state { return Some(state.update(now, total)); } self.state = Some(RateEstimateState { total, avg_rate: None, last_sample: now, }); None } } struct RateEstimateState { total: u64, avg_rate: Option<f32>, last_sample: Instant, } impl RateEstimateState { fn update(&mut self, now: Instant, total: u64) -> f32 { let delta = total - self.total; self.total = total; let dt = now - self.last_sample; self.last_sample = now; let sample = delta as f32 / dt.as_secs_f32(); match self.avg_rate { None => *self.avg_rate.insert(sample), Some(ref mut avg_rate) => { // From Algorithms for Unevenly Spaced Time Series: Moving // Averages and Other Rolling Operators (Andreas Eckner, 2019) const TIME_WINDOW: f32 = 2.0; let alpha = 1.0 - (-dt.as_secs_f32() / TIME_WINDOW).exp(); *avg_rate += alpha * (sample - *avg_rate); *avg_rate } } } } struct RefStatus { ref_kind: GitRefKind, symbol: String, tracking_status: TrackingStatus, import_status: ImportStatus, } impl RefStatus { fn new( ref_kind: GitRefKind, symbol: RemoteRefSymbol<'_>, remote_ref: &RemoteRef, ref_target: &RefTarget, repo: &dyn Repo, ) -> Self { let tracking_status = match ref_kind { GitRefKind::Bookmark => { if repo.view().get_remote_bookmark(symbol).is_tracked() { TrackingStatus::Tracked } else { TrackingStatus::Untracked } } GitRefKind::Tag => TrackingStatus::NotApplicable, }; let import_status = match (remote_ref.target.is_absent(), ref_target.is_absent()) { (true, false) => ImportStatus::New, (false, true) => ImportStatus::Deleted, _ => ImportStatus::Updated, }; Self { symbol: symbol.to_string(), tracking_status, import_status, ref_kind, } } fn output(&self, max_symbol_width: usize, out: &mut dyn Formatter) -> std::io::Result<()> { let tracking_status = match self.tracking_status { TrackingStatus::Tracked => "tracked", TrackingStatus::Untracked => "untracked", TrackingStatus::NotApplicable => "", }; let import_status = match self.import_status { ImportStatus::New => "new", ImportStatus::Deleted => "deleted", ImportStatus::Updated => "updated", }; let symbol_width = self.symbol.width(); let pad_width = max_symbol_width.saturating_sub(symbol_width); let padded_symbol = format!("{}{:>pad_width$}", self.symbol, "", pad_width = pad_width); let label = match self.ref_kind { GitRefKind::Bookmark => "bookmark", GitRefKind::Tag => "tag", }; write!(out, "{label}: ")?; write!(out.labeled(label), "{padded_symbol}")?; writeln!(out, " [{import_status}] {tracking_status}") } } enum TrackingStatus { Tracked, Untracked, NotApplicable, // for tags } enum ImportStatus { New, Deleted, Updated, } pub fn print_git_export_stats(ui: &Ui, stats: &GitExportStats) -> Result<(), std::io::Error> { if !stats.failed_bookmarks.is_empty() { writeln!(ui.warning_default(), "Failed to export some bookmarks:")?; let mut formatter = ui.stderr_formatter(); for (symbol, reason) in &stats.failed_bookmarks { write!(formatter, " ")?; write!(formatter.labeled("bookmark"), "{symbol}")?; for err in iter::successors(Some(reason as &dyn error::Error), |err| err.source()) { write!(formatter, ": {err}")?; } writeln!(formatter)?; } } if !stats.failed_tags.is_empty() { writeln!(ui.warning_default(), "Failed to export some tags:")?; let mut formatter = ui.stderr_formatter(); for (symbol, reason) in &stats.failed_tags { write!(formatter, " ")?; write!(formatter.labeled("tag"), "{symbol}")?; for err in iter::successors(Some(reason as &dyn error::Error), |err| err.source()) { write!(formatter, ": {err}")?; } writeln!(formatter)?; } } if itertools::chain(&stats.failed_bookmarks, &stats.failed_tags) .any(|(_, reason)| matches!(reason, FailedRefExportReason::FailedToSet(_))) { writedoc!( ui.hint_default(), r#" Git doesn't allow a branch/tag name that looks like a parent directory of another (e.g. `foo` and `foo/bar`). Try to rename the bookmarks/tags that failed to export or their "parent" bookmarks/tags. "#, )?; } Ok(()) } #[cfg(test)] mod tests { use std::path::MAIN_SEPARATOR; use insta::assert_snapshot; use super::*; #[test] fn test_absolute_git_url() { // gix::Url::canonicalize() works even if the path doesn't exist. // However, we need to ensure that no symlinks exist at the test paths. let temp_dir = testutils::new_temp_dir(); let cwd = dunce::canonicalize(temp_dir.path()).unwrap(); let cwd_slash = cwd.to_str().unwrap().replace(MAIN_SEPARATOR, "/"); // Local path assert_eq!( absolute_git_url(&cwd, "foo").unwrap(), format!("{cwd_slash}/foo") ); assert_eq!( absolute_git_url(&cwd, r"foo\bar").unwrap(), if cfg!(windows) { format!("{cwd_slash}/foo/bar") } else { format!(r"{cwd_slash}/foo\bar") } ); assert_eq!( absolute_git_url(&cwd.join("bar"), &format!("{cwd_slash}/foo")).unwrap(), format!("{cwd_slash}/foo") ); // rcp-like assert_eq!( absolute_git_url(&cwd, "git@example.org:foo/bar.git").unwrap(), "git@example.org:foo/bar.git" ); // URL assert_eq!( absolute_git_url(&cwd, "https://example.org/foo.git").unwrap(), "https://example.org/foo.git" ); // Custom scheme isn't an error assert_eq!( absolute_git_url(&cwd, "custom://example.org/foo.git").unwrap(), "custom://example.org/foo.git" ); // Password shouldn't be redacted (gix::Url::to_string() would do) assert_eq!( absolute_git_url(&cwd, "https://user:pass@example.org/").unwrap(), "https://user:pass@example.org/" ); } #[test] fn test_bar() { let mut buf = String::new(); draw_progress(0.0, &mut buf, 10); assert_eq!(buf, " "); buf.clear(); draw_progress(1.0, &mut buf, 10); assert_eq!(buf, "██████████"); buf.clear(); draw_progress(0.5, &mut buf, 10); assert_eq!(buf, "█████ "); buf.clear(); draw_progress(0.54, &mut buf, 10); assert_eq!(buf, "█████▍ "); buf.clear(); } #[test] fn test_update() { let start = Instant::now(); let mut progress = Progress::new(start); let mut current_time = start; let mut update = |duration, overall| -> String { current_time += duration; let mut buf = vec![]; let mut output = ProgressOutput::for_test(&mut buf, 25); progress .update( current_time, &jj_lib::git::Progress { bytes_downloaded: None, overall, }, &mut output, ) .unwrap(); String::from_utf8(buf).unwrap() }; // First output is after the initial delay assert_snapshot!(update(crate::progress::INITIAL_DELAY - Duration::from_millis(1), 0.1), @""); assert_snapshot!(update(Duration::from_millis(1), 0.10), @"\u{1b}[?25l\r 10% [█▊ ]\u{1b}[K"); // No updates for the next 30 milliseconds assert_snapshot!(update(Duration::from_millis(10), 0.11), @""); assert_snapshot!(update(Duration::from_millis(10), 0.12), @""); assert_snapshot!(update(Duration::from_millis(10), 0.13), @""); // We get an update now that we go over the threshold assert_snapshot!(update(Duration::from_millis(100), 0.30), @"\r 30% [█████▍ ]\u{1b}[K"); // Even though we went over by quite a bit, the new threshold is relative to the // previous output, so we don't get an update here assert_snapshot!(update(Duration::from_millis(30), 0.40), @""); } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/template_builder.rs
cli/src/template_builder.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::cmp::Ordering; use std::collections::HashMap; use std::io; use std::iter; use itertools::Itertools as _; use jj_lib::backend::Signature; use jj_lib::backend::Timestamp; use jj_lib::config::ConfigNamePathBuf; use jj_lib::config::ConfigValue; use jj_lib::content_hash::blake2b_hash; use jj_lib::hex_util; use jj_lib::op_store::TimestampRange; use jj_lib::settings::UserSettings; use jj_lib::time_util::DatePattern; use serde::Deserialize; use serde::de::IntoDeserializer as _; use crate::config; use crate::formatter::FormatRecorder; use crate::formatter::Formatter; use crate::template_parser; use crate::template_parser::BinaryOp; use crate::template_parser::ExpressionKind; use crate::template_parser::ExpressionNode; use crate::template_parser::FunctionCallNode; use crate::template_parser::LambdaNode; use crate::template_parser::TemplateAliasesMap; use crate::template_parser::TemplateDiagnostics; use crate::template_parser::TemplateParseError; use crate::template_parser::TemplateParseErrorKind; use crate::template_parser::TemplateParseResult; use crate::template_parser::UnaryOp; use crate::templater::BoxedSerializeProperty; use crate::templater::BoxedTemplateProperty; use crate::templater::CoalesceTemplate; use crate::templater::ConcatTemplate; use crate::templater::ConditionalTemplate; use crate::templater::Email; use crate::templater::JoinTemplate; use crate::templater::LabelTemplate; use crate::templater::ListPropertyTemplate; use crate::templater::ListTemplate; use crate::templater::Literal; use crate::templater::PlainTextFormattedProperty; use crate::templater::PropertyPlaceholder; use crate::templater::RawEscapeSequenceTemplate; use crate::templater::ReformatTemplate; use crate::templater::SeparateTemplate; use crate::templater::SizeHint; use crate::templater::Template; use crate::templater::TemplateProperty; use crate::templater::TemplatePropertyError; use crate::templater::TemplatePropertyExt as _; use crate::templater::TemplateRenderer; use crate::templater::WrapTemplateProperty; use crate::text_util; use crate::time_util; /// Callbacks to build usage-context-specific evaluation objects from AST nodes. /// /// This is used to implement different meanings of `self` or different /// globally available functions in the template language depending on the /// context in which it is invoked. pub trait TemplateLanguage<'a> { type Property: CoreTemplatePropertyVar<'a>; fn settings(&self) -> &UserSettings; /// Translates the given global `function` call to a property. /// /// This should be delegated to /// `CoreTemplateBuildFnTable::build_function()`. fn build_function( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property>; /// Creates a method call thunk for the given `function` of the given /// `property`. fn build_method( &self, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<Self::Property>, property: Self::Property, function: &FunctionCallNode, ) -> TemplateParseResult<Self::Property>; } /// Implements [`WrapTemplateProperty<'a, O>`] for property types. /// /// - `impl_property_wrappers!(Kind { Foo(Foo), FooList(Vec<Foo>), .. });` to /// implement conversion from types `Foo`, `Vec<Foo>`, ... /// - `impl_property_wrappers!(<'a> Kind<'a> { .. });` for types with lifetime. /// - `impl_property_wrappers!(Kind => Core { .. });` to forward conversion to /// `Kind::Core(_)`. macro_rules! impl_property_wrappers { ($kind:path $(=> $var:ident)? { $($body:tt)* }) => { $crate::template_builder::_impl_property_wrappers_many!( [], 'static, $kind $(=> $var)?, { $($body)* }); }; // capture the first lifetime as the lifetime of template objects. (<$a:lifetime $(, $p:lifetime)* $(, $q:ident)*> $kind:path $(=> $var:ident)? { $($body:tt)* }) => { $crate::template_builder::_impl_property_wrappers_many!( [$a, $($p,)* $($q,)*], $a, $kind $(=> $var)?, { $($body)* }); }; } macro_rules! _impl_property_wrappers_many { // lifetime/type parameters are packed in order to disable zipping. // https://github.com/rust-lang/rust/issues/96184#issuecomment-1294999418 ($ps:tt, $a:lifetime, $kind:path, { $( $var:ident($ty:ty), )* }) => { $( $crate::template_builder::_impl_property_wrappers_one!( $ps, $a, $kind, $var, $ty, std::convert::identity); )* }; // variant part in body is ignored so the same body can be reused for // implementing forwarding conversion. ($ps:tt, $a:lifetime, $kind:path => $var:ident, { $( $ignored_var:ident($ty:ty), )* }) => { $( $crate::template_builder::_impl_property_wrappers_one!( $ps, $a, $kind, $var, $ty, $crate::templater::WrapTemplateProperty::wrap_property); )* }; } macro_rules! _impl_property_wrappers_one { ([$($p:tt)*], $a:lifetime, $kind:path, $var:ident, $ty:ty, $inner:path) => { impl<$($p)*> $crate::templater::WrapTemplateProperty<$a, $ty> for $kind { fn wrap_property(property: $crate::templater::BoxedTemplateProperty<$a, $ty>) -> Self { Self::$var($inner(property)) } } }; } pub(crate) use _impl_property_wrappers_many; pub(crate) use _impl_property_wrappers_one; pub(crate) use impl_property_wrappers; /// Wrapper for the core template property types. pub trait CoreTemplatePropertyVar<'a> where Self: WrapTemplateProperty<'a, String>, Self: WrapTemplateProperty<'a, Vec<String>>, Self: WrapTemplateProperty<'a, bool>, Self: WrapTemplateProperty<'a, i64>, Self: WrapTemplateProperty<'a, Option<i64>>, Self: WrapTemplateProperty<'a, ConfigValue>, Self: WrapTemplateProperty<'a, Signature>, Self: WrapTemplateProperty<'a, Email>, Self: WrapTemplateProperty<'a, SizeHint>, Self: WrapTemplateProperty<'a, Timestamp>, Self: WrapTemplateProperty<'a, TimestampRange>, { fn wrap_template(template: Box<dyn Template + 'a>) -> Self; fn wrap_list_template(template: Box<dyn ListTemplate + 'a>) -> Self; /// Type name of the property output. fn type_name(&self) -> &'static str; fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'a, bool>>; fn try_into_integer(self) -> Option<BoxedTemplateProperty<'a, i64>>; /// Transforms into a string property by formatting the value if needed. fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'a, String>>; fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'a>>; fn try_into_template(self) -> Option<Box<dyn Template + 'a>>; /// Transforms into a property that will evaluate to `self == other`. fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'a, bool>>; /// Transforms into a property that will evaluate to an [`Ordering`]. fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'a, Ordering>>; } pub enum CoreTemplatePropertyKind<'a> { String(BoxedTemplateProperty<'a, String>), StringList(BoxedTemplateProperty<'a, Vec<String>>), Boolean(BoxedTemplateProperty<'a, bool>), Integer(BoxedTemplateProperty<'a, i64>), IntegerOpt(BoxedTemplateProperty<'a, Option<i64>>), ConfigValue(BoxedTemplateProperty<'a, ConfigValue>), Signature(BoxedTemplateProperty<'a, Signature>), Email(BoxedTemplateProperty<'a, Email>), SizeHint(BoxedTemplateProperty<'a, SizeHint>), Timestamp(BoxedTemplateProperty<'a, Timestamp>), TimestampRange(BoxedTemplateProperty<'a, TimestampRange>), // Both TemplateProperty and Template can represent a value to be evaluated // dynamically, which suggests that `Box<dyn Template + 'a>` could be // composed as `Box<dyn TemplateProperty<Output = Box<dyn Template ..`. // However, there's a subtle difference: TemplateProperty is strict on // error, whereas Template is usually lax and prints an error inline. If // `concat(x, y)` were a property returning Template, and if `y` failed to // evaluate, the whole expression would fail. In this example, a partial // evaluation output is more useful. That's one reason why Template isn't // wrapped in a TemplateProperty. Another reason is that the outermost // caller expects a Template, not a TemplateProperty of Template output. Template(Box<dyn Template + 'a>), ListTemplate(Box<dyn ListTemplate + 'a>), } /// Implements `WrapTemplateProperty<type>` for core property types. /// /// Use `impl_core_property_wrappers!(<'a> Kind<'a> => Core);` to implement /// forwarding conversion. macro_rules! impl_core_property_wrappers { ($($head:tt)+) => { $crate::template_builder::impl_property_wrappers!($($head)+ { String(String), StringList(Vec<String>), Boolean(bool), Integer(i64), IntegerOpt(Option<i64>), ConfigValue(jj_lib::config::ConfigValue), Signature(jj_lib::backend::Signature), Email($crate::templater::Email), SizeHint($crate::templater::SizeHint), Timestamp(jj_lib::backend::Timestamp), TimestampRange(jj_lib::op_store::TimestampRange), }); }; } pub(crate) use impl_core_property_wrappers; impl_core_property_wrappers!(<'a> CoreTemplatePropertyKind<'a>); impl<'a> CoreTemplatePropertyVar<'a> for CoreTemplatePropertyKind<'a> { fn wrap_template(template: Box<dyn Template + 'a>) -> Self { Self::Template(template) } fn wrap_list_template(template: Box<dyn ListTemplate + 'a>) -> Self { Self::ListTemplate(template) } fn type_name(&self) -> &'static str { match self { Self::String(_) => "String", Self::StringList(_) => "List<String>", Self::Boolean(_) => "Boolean", Self::Integer(_) => "Integer", Self::IntegerOpt(_) => "Option<Integer>", Self::ConfigValue(_) => "ConfigValue", Self::Signature(_) => "Signature", Self::Email(_) => "Email", Self::SizeHint(_) => "SizeHint", Self::Timestamp(_) => "Timestamp", Self::TimestampRange(_) => "TimestampRange", Self::Template(_) => "Template", Self::ListTemplate(_) => "ListTemplate", } } fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'a, bool>> { match self { Self::String(property) => Some(property.map(|s| !s.is_empty()).into_dyn()), Self::StringList(property) => Some(property.map(|l| !l.is_empty()).into_dyn()), Self::Boolean(property) => Some(property), Self::Integer(_) => None, Self::IntegerOpt(property) => Some(property.map(|opt| opt.is_some()).into_dyn()), Self::ConfigValue(_) => None, Self::Signature(_) => None, Self::Email(property) => Some(property.map(|e| !e.0.is_empty()).into_dyn()), Self::SizeHint(_) => None, Self::Timestamp(_) => None, Self::TimestampRange(_) => None, // Template types could also be evaluated to boolean, but it's less likely // to apply label() or .map() and use the result as conditional. It's also // unclear whether ListTemplate should behave as a "list" or a "template". Self::Template(_) => None, Self::ListTemplate(_) => None, } } fn try_into_integer(self) -> Option<BoxedTemplateProperty<'a, i64>> { match self { Self::Integer(property) => Some(property), Self::IntegerOpt(property) => Some(property.try_unwrap("Integer").into_dyn()), _ => None, } } fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'a, String>> { match self { Self::String(property) => Some(property), _ => { let template = self.try_into_template()?; Some(PlainTextFormattedProperty::new(template).into_dyn()) } } } fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'a>> { match self { Self::String(property) => Some(property.into_serialize()), Self::StringList(property) => Some(property.into_serialize()), Self::Boolean(property) => Some(property.into_serialize()), Self::Integer(property) => Some(property.into_serialize()), Self::IntegerOpt(property) => Some(property.into_serialize()), Self::ConfigValue(property) => { Some(property.map(config::to_serializable_value).into_serialize()) } Self::Signature(property) => Some(property.into_serialize()), Self::Email(property) => Some(property.into_serialize()), Self::SizeHint(property) => Some(property.into_serialize()), Self::Timestamp(property) => Some(property.into_serialize()), Self::TimestampRange(property) => Some(property.into_serialize()), Self::Template(_) => None, Self::ListTemplate(_) => None, } } fn try_into_template(self) -> Option<Box<dyn Template + 'a>> { match self { Self::String(property) => Some(property.into_template()), Self::StringList(property) => Some(property.into_template()), Self::Boolean(property) => Some(property.into_template()), Self::Integer(property) => Some(property.into_template()), Self::IntegerOpt(property) => Some(property.into_template()), Self::ConfigValue(property) => Some(property.into_template()), Self::Signature(property) => Some(property.into_template()), Self::Email(property) => Some(property.into_template()), Self::SizeHint(_) => None, Self::Timestamp(property) => Some(property.into_template()), Self::TimestampRange(property) => Some(property.into_template()), Self::Template(template) => Some(template), Self::ListTemplate(template) => Some(template), } } fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'a, bool>> { match (self, other) { (Self::String(lhs), Self::String(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r).into_dyn()) } (Self::String(lhs), Self::Email(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r.0).into_dyn()) } (Self::Boolean(lhs), Self::Boolean(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r).into_dyn()) } (Self::Integer(lhs), Self::Integer(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r).into_dyn()) } (Self::Integer(lhs), Self::IntegerOpt(rhs)) => { Some((lhs, rhs).map(|(l, r)| Some(l) == r).into_dyn()) } (Self::IntegerOpt(lhs), Self::Integer(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == Some(r)).into_dyn()) } (Self::IntegerOpt(lhs), Self::IntegerOpt(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r).into_dyn()) } (Self::Email(lhs), Self::Email(rhs)) => { Some((lhs, rhs).map(|(l, r)| l == r).into_dyn()) } (Self::Email(lhs), Self::String(rhs)) => { Some((lhs, rhs).map(|(l, r)| l.0 == r).into_dyn()) } (Self::String(_), _) => None, (Self::StringList(_), _) => None, (Self::Boolean(_), _) => None, (Self::Integer(_), _) => None, (Self::IntegerOpt(_), _) => None, (Self::ConfigValue(_), _) => None, (Self::Signature(_), _) => None, (Self::Email(_), _) => None, (Self::SizeHint(_), _) => None, (Self::Timestamp(_), _) => None, (Self::TimestampRange(_), _) => None, (Self::Template(_), _) => None, (Self::ListTemplate(_), _) => None, } } fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'a, Ordering>> { match (self, other) { (Self::Integer(lhs), Self::Integer(rhs)) => { Some((lhs, rhs).map(|(l, r)| l.cmp(&r)).into_dyn()) } (Self::Integer(lhs), Self::IntegerOpt(rhs)) => { Some((lhs, rhs).map(|(l, r)| Some(l).cmp(&r)).into_dyn()) } (Self::IntegerOpt(lhs), Self::Integer(rhs)) => { Some((lhs, rhs).map(|(l, r)| l.cmp(&Some(r))).into_dyn()) } (Self::IntegerOpt(lhs), Self::IntegerOpt(rhs)) => { Some((lhs, rhs).map(|(l, r)| l.cmp(&r)).into_dyn()) } (Self::String(_), _) => None, (Self::StringList(_), _) => None, (Self::Boolean(_), _) => None, (Self::Integer(_), _) => None, (Self::IntegerOpt(_), _) => None, (Self::ConfigValue(_), _) => None, (Self::Signature(_), _) => None, (Self::Email(_), _) => None, (Self::SizeHint(_), _) => None, (Self::Timestamp(_), _) => None, (Self::TimestampRange(_), _) => None, (Self::Template(_), _) => None, (Self::ListTemplate(_), _) => None, } } } /// Function that translates global function call node. // The lifetime parameter 'a could be replaced with for<'a> to keep the method // table away from a certain lifetime. That's technically more correct, but I // couldn't find an easy way to expand that to the core template methods, which // are defined for L: TemplateLanguage<'a>. That's why the build fn table is // bound to a named lifetime, and therefore can't be cached statically. pub type TemplateBuildFunctionFn<'a, L, P> = fn(&L, &mut TemplateDiagnostics, &BuildContext<P>, &FunctionCallNode) -> TemplateParseResult<P>; type BuildMethodFn<'a, L, T, P> = fn( &L, &mut TemplateDiagnostics, &BuildContext<P>, T, &FunctionCallNode, ) -> TemplateParseResult<P>; /// Function that translates method call node of self type `T`. pub type TemplateBuildMethodFn<'a, L, T, P> = BuildMethodFn<'a, L, BoxedTemplateProperty<'a, T>, P>; /// Function that translates method call node of `Template`. pub type BuildTemplateMethodFn<'a, L, P> = BuildMethodFn<'a, L, Box<dyn Template + 'a>, P>; /// Function that translates method call node of `ListTemplate`. pub type BuildListTemplateMethodFn<'a, L, P> = BuildMethodFn<'a, L, Box<dyn ListTemplate + 'a>, P>; /// Table of functions that translate global function call node. pub type TemplateBuildFunctionFnMap<'a, L, P = <L as TemplateLanguage<'a>>::Property> = HashMap<&'static str, TemplateBuildFunctionFn<'a, L, P>>; /// Table of functions that translate method call node of self type `T`. pub type TemplateBuildMethodFnMap<'a, L, T, P = <L as TemplateLanguage<'a>>::Property> = HashMap<&'static str, TemplateBuildMethodFn<'a, L, T, P>>; /// Table of functions that translate method call node of `Template`. pub type BuildTemplateMethodFnMap<'a, L, P = <L as TemplateLanguage<'a>>::Property> = HashMap<&'static str, BuildTemplateMethodFn<'a, L, P>>; /// Table of functions that translate method call node of `ListTemplate`. pub type BuildListTemplateMethodFnMap<'a, L, P = <L as TemplateLanguage<'a>>::Property> = HashMap<&'static str, BuildListTemplateMethodFn<'a, L, P>>; /// Symbol table of functions and methods available in the core template. pub struct CoreTemplateBuildFnTable<'a, L: ?Sized, P = <L as TemplateLanguage<'a>>::Property> { pub functions: TemplateBuildFunctionFnMap<'a, L, P>, pub string_methods: TemplateBuildMethodFnMap<'a, L, String, P>, pub string_list_methods: TemplateBuildMethodFnMap<'a, L, Vec<String>, P>, pub boolean_methods: TemplateBuildMethodFnMap<'a, L, bool, P>, pub integer_methods: TemplateBuildMethodFnMap<'a, L, i64, P>, pub config_value_methods: TemplateBuildMethodFnMap<'a, L, ConfigValue, P>, pub email_methods: TemplateBuildMethodFnMap<'a, L, Email, P>, pub signature_methods: TemplateBuildMethodFnMap<'a, L, Signature, P>, pub size_hint_methods: TemplateBuildMethodFnMap<'a, L, SizeHint, P>, pub timestamp_methods: TemplateBuildMethodFnMap<'a, L, Timestamp, P>, pub timestamp_range_methods: TemplateBuildMethodFnMap<'a, L, TimestampRange, P>, pub template_methods: BuildTemplateMethodFnMap<'a, L, P>, pub list_template_methods: BuildListTemplateMethodFnMap<'a, L, P>, } pub fn merge_fn_map<'s, F>(base: &mut HashMap<&'s str, F>, extension: HashMap<&'s str, F>) { for (name, function) in extension { if base.insert(name, function).is_some() { panic!("Conflicting template definitions for '{name}' function"); } } } impl<L: ?Sized, P> CoreTemplateBuildFnTable<'_, L, P> { pub fn empty() -> Self { Self { functions: HashMap::new(), string_methods: HashMap::new(), string_list_methods: HashMap::new(), boolean_methods: HashMap::new(), integer_methods: HashMap::new(), config_value_methods: HashMap::new(), signature_methods: HashMap::new(), email_methods: HashMap::new(), size_hint_methods: HashMap::new(), timestamp_methods: HashMap::new(), timestamp_range_methods: HashMap::new(), template_methods: HashMap::new(), list_template_methods: HashMap::new(), } } pub fn merge(&mut self, other: Self) { let Self { functions, string_methods, string_list_methods, boolean_methods, integer_methods, config_value_methods, signature_methods, email_methods, size_hint_methods, timestamp_methods, timestamp_range_methods, template_methods, list_template_methods, } = other; merge_fn_map(&mut self.functions, functions); merge_fn_map(&mut self.string_methods, string_methods); merge_fn_map(&mut self.string_list_methods, string_list_methods); merge_fn_map(&mut self.boolean_methods, boolean_methods); merge_fn_map(&mut self.integer_methods, integer_methods); merge_fn_map(&mut self.config_value_methods, config_value_methods); merge_fn_map(&mut self.signature_methods, signature_methods); merge_fn_map(&mut self.email_methods, email_methods); merge_fn_map(&mut self.size_hint_methods, size_hint_methods); merge_fn_map(&mut self.timestamp_methods, timestamp_methods); merge_fn_map(&mut self.timestamp_range_methods, timestamp_range_methods); merge_fn_map(&mut self.template_methods, template_methods); merge_fn_map(&mut self.list_template_methods, list_template_methods); } } impl<'a, L> CoreTemplateBuildFnTable<'a, L, L::Property> where L: TemplateLanguage<'a> + ?Sized, { /// Creates new symbol table containing the builtin functions and methods. pub fn builtin() -> Self { Self { functions: builtin_functions(), string_methods: builtin_string_methods(), string_list_methods: builtin_formattable_list_methods(), boolean_methods: HashMap::new(), integer_methods: HashMap::new(), config_value_methods: builtin_config_value_methods(), signature_methods: builtin_signature_methods(), email_methods: builtin_email_methods(), size_hint_methods: builtin_size_hint_methods(), timestamp_methods: builtin_timestamp_methods(), timestamp_range_methods: builtin_timestamp_range_methods(), template_methods: HashMap::new(), list_template_methods: builtin_list_template_methods(), } } /// Translates the function call node `function` by using this symbol table. pub fn build_function( &self, language: &L, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<L::Property>, function: &FunctionCallNode, ) -> TemplateParseResult<L::Property> { let table = &self.functions; let build = template_parser::lookup_function(table, function)?; build(language, diagnostics, build_ctx, function) } /// Applies the method call node `function` to the given `property` by using /// this symbol table. pub fn build_method( &self, language: &L, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<L::Property>, property: CoreTemplatePropertyKind<'a>, function: &FunctionCallNode, ) -> TemplateParseResult<L::Property> { let type_name = property.type_name(); match property { CoreTemplatePropertyKind::String(property) => { let table = &self.string_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::StringList(property) => { let table = &self.string_list_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Boolean(property) => { let table = &self.boolean_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Integer(property) => { let table = &self.integer_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::IntegerOpt(property) => { let type_name = "Integer"; let table = &self.integer_methods; let build = template_parser::lookup_method(type_name, table, function)?; let inner_property = property.try_unwrap(type_name).into_dyn(); build(language, diagnostics, build_ctx, inner_property, function) } CoreTemplatePropertyKind::ConfigValue(property) => { let table = &self.config_value_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Signature(property) => { let table = &self.signature_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Email(property) => { let table = &self.email_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::SizeHint(property) => { let table = &self.size_hint_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Timestamp(property) => { let table = &self.timestamp_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::TimestampRange(property) => { let table = &self.timestamp_range_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, property, function) } CoreTemplatePropertyKind::Template(template) => { let table = &self.template_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, template, function) } CoreTemplatePropertyKind::ListTemplate(template) => { let table = &self.list_template_methods; let build = template_parser::lookup_method(type_name, table, function)?; build(language, diagnostics, build_ctx, template, function) } } } } /// Opaque struct that represents a template value. pub struct Expression<P> { property: P, labels: Vec<String>, } impl<P> Expression<P> { fn unlabeled(property: P) -> Self { let labels = vec![]; Self { property, labels } } fn with_label(property: P, label: impl Into<String>) -> Self { let labels = vec![label.into()]; Self { property, labels } } } impl<'a, P: CoreTemplatePropertyVar<'a>> Expression<P> { pub fn type_name(&self) -> &'static str { self.property.type_name() } pub fn try_into_boolean(self) -> Option<BoxedTemplateProperty<'a, bool>> { self.property.try_into_boolean() } pub fn try_into_integer(self) -> Option<BoxedTemplateProperty<'a, i64>> { self.property.try_into_integer() } pub fn try_into_stringify(self) -> Option<BoxedTemplateProperty<'a, String>> { self.property.try_into_stringify() } pub fn try_into_serialize(self) -> Option<BoxedSerializeProperty<'a>> { self.property.try_into_serialize() } pub fn try_into_template(self) -> Option<Box<dyn Template + 'a>> { let template = self.property.try_into_template()?; if self.labels.is_empty() { Some(template) } else { Some(Box::new(LabelTemplate::new(template, Literal(self.labels)))) } } pub fn try_into_eq(self, other: Self) -> Option<BoxedTemplateProperty<'a, bool>> { self.property.try_into_eq(other.property) } pub fn try_into_cmp(self, other: Self) -> Option<BoxedTemplateProperty<'a, Ordering>> { self.property.try_into_cmp(other.property) } } /// Environment (locals and self) in a stack frame. pub struct BuildContext<'i, P> { /// Map of functions to create `L::Property`. local_variables: HashMap<&'i str, &'i dyn Fn() -> P>, /// Function to create `L::Property` representing `self`. /// /// This could be `local_variables["self"]`, but keyword lookup shouldn't be /// overridden by a user-defined `self` variable. self_variable: &'i dyn Fn() -> P, } fn build_keyword<'a, L: TemplateLanguage<'a> + ?Sized>( language: &L, diagnostics: &mut TemplateDiagnostics, build_ctx: &BuildContext<L::Property>, name: &str, name_span: pest::Span<'_>, ) -> TemplateParseResult<L::Property> { // Keyword is a 0-ary method on the "self" property let self_property = (build_ctx.self_variable)(); let function = FunctionCallNode { name, name_span, args: vec![], keyword_args: vec![], args_span: name_span.end_pos().span(&name_span.end_pos()), }; language .build_method(diagnostics, build_ctx, self_property, &function) .map_err(|err| match err.kind() { TemplateParseErrorKind::NoSuchMethod { candidates, .. } => { let kind = TemplateParseErrorKind::NoSuchKeyword { name: name.to_owned(), // TODO: filter methods by arity? candidates: candidates.clone(), };
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/ui.rs
cli/src/ui.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::env; use std::error; use std::fmt; use std::io; use std::io::IsTerminal as _; use std::io::PipeWriter; use std::io::Stderr; use std::io::StderrLock; use std::io::Stdout; use std::io::StdoutLock; use std::io::Write; use std::iter; use std::mem; use std::process::Child; use std::process::ChildStdin; use std::process::Stdio; use std::thread; use std::thread::JoinHandle; use itertools::Itertools as _; use jj_lib::config::ConfigGetError; use jj_lib::config::StackedConfig; use tracing::instrument; use crate::command_error::CommandError; use crate::config::CommandNameAndArgs; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::formatter::FormatterFactory; use crate::formatter::HeadingLabeledWriter; use crate::formatter::LabeledScope; use crate::formatter::PlainTextFormatter; const BUILTIN_PAGER_NAME: &str = ":builtin"; enum UiOutput { Terminal { stdout: Stdout, stderr: Stderr, }, Paged { child: Child, child_stdin: ChildStdin, }, BuiltinPaged { out_wr: PipeWriter, err_wr: PipeWriter, pager_thread: JoinHandle<streampager::Result<()>>, }, Null, } impl UiOutput { fn new_terminal() -> Self { Self::Terminal { stdout: io::stdout(), stderr: io::stderr(), } } fn new_paged(pager_cmd: &CommandNameAndArgs) -> io::Result<Self> { let mut cmd = pager_cmd.to_command(); tracing::info!(?cmd, "spawning pager"); let mut child = cmd.stdin(Stdio::piped()).spawn()?; let child_stdin = child.stdin.take().unwrap(); Ok(Self::Paged { child, child_stdin }) } fn new_builtin_paged(config: &StreampagerConfig) -> streampager::Result<Self> { let streampager_config = streampager::config::Config { wrapping_mode: config.wrapping.into(), interface_mode: config.streampager_interface_mode(), show_ruler: config.show_ruler, // We could make scroll-past-eof configurable, but I'm guessing people // will not miss it. If we do make it configurable, we should mention // that it's a bad idea to turn this on if `interface=quit-if-one-page`, // as it can leave a lot of empty lines on the screen after exiting. scroll_past_eof: false, ..Default::default() }; // Initialize with tty instead of stdin/stdout. We spawn pager so long // as stdout is a tty, which means stdin may be redirected. let mut pager = streampager::Pager::new_using_system_terminal_with_config(streampager_config)?; // Use native pipe, which can be attached to child process. The stdout // stream could be an in-process channel, but the cost of extra syscalls // wouldn't matter. let (out_rd, out_wr) = io::pipe()?; let (err_rd, err_wr) = io::pipe()?; pager.add_stream(out_rd, "")?; pager.add_error_stream(err_rd, "stderr")?; Ok(Self::BuiltinPaged { out_wr, err_wr, pager_thread: thread::spawn(|| pager.run()), }) } fn finalize(self, ui: &Ui) { match self { Self::Terminal { .. } => { /* no-op */ } Self::Paged { mut child, child_stdin, } => { drop(child_stdin); if let Err(err) = child.wait() { // It's possible (though unlikely) that this write fails, but // this function gets called so late that there's not much we // can do about it. writeln!( ui.warning_default(), "Failed to wait on pager: {err}", err = format_error_with_sources(&err), ) .ok(); } } Self::BuiltinPaged { out_wr, err_wr, pager_thread, } => { drop(out_wr); drop(err_wr); match pager_thread.join() { Ok(Ok(())) => {} Ok(Err(err)) => { writeln!( ui.warning_default(), "Failed to run builtin pager: {err}", err = format_error_with_sources(&err), ) .ok(); } Err(_) => { writeln!(ui.warning_default(), "Builtin pager crashed.").ok(); } } } Self::Null => {} } } } pub enum UiStdout<'a> { Terminal(StdoutLock<'static>), Paged(&'a ChildStdin), Builtin(&'a PipeWriter), Null(io::Sink), } pub enum UiStderr<'a> { Terminal(StderrLock<'static>), Paged(&'a ChildStdin), Builtin(&'a PipeWriter), Null(io::Sink), } macro_rules! for_outputs { ($ty:ident, $output:expr, $pat:pat => $expr:expr) => { match $output { $ty::Terminal($pat) => $expr, $ty::Paged($pat) => $expr, $ty::Builtin($pat) => $expr, $ty::Null($pat) => $expr, } }; } impl Write for UiStdout<'_> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { for_outputs!(Self, self, w => w.write(buf)) } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { for_outputs!(Self, self, w => w.write_all(buf)) } fn flush(&mut self) -> io::Result<()> { for_outputs!(Self, self, w => w.flush()) } } impl Write for UiStderr<'_> { fn write(&mut self, buf: &[u8]) -> io::Result<usize> { for_outputs!(Self, self, w => w.write(buf)) } fn write_all(&mut self, buf: &[u8]) -> io::Result<()> { for_outputs!(Self, self, w => w.write_all(buf)) } fn flush(&mut self) -> io::Result<()> { for_outputs!(Self, self, w => w.flush()) } } pub struct Ui { quiet: bool, pager: PagerConfig, progress_indicator: bool, formatter_factory: FormatterFactory, output: UiOutput, } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize, clap::ValueEnum)] #[serde(rename_all = "kebab-case")] pub enum ColorChoice { Always, Never, Debug, Auto, } impl fmt::Display for ColorChoice { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { let s = match self { Self::Always => "always", Self::Never => "never", Self::Debug => "debug", Self::Auto => "auto", }; write!(f, "{s}") } } fn prepare_formatter_factory( config: &StackedConfig, stdout: &Stdout, ) -> Result<FormatterFactory, ConfigGetError> { let terminal = stdout.is_terminal(); let (color, debug) = match config.get("ui.color")? { ColorChoice::Always => (true, false), ColorChoice::Never => (false, false), ColorChoice::Debug => (true, true), ColorChoice::Auto => (terminal, false), }; if color { FormatterFactory::color(config, debug) } else if terminal { // Sanitize ANSI escape codes if we're printing to a terminal. Doesn't // affect ANSI escape codes that originate from the formatter itself. Ok(FormatterFactory::sanitized()) } else { Ok(FormatterFactory::plain_text()) } } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all(deserialize = "kebab-case"))] pub enum PaginationChoice { Never, Auto, } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all(deserialize = "kebab-case"))] pub enum StreampagerAlternateScreenMode { QuitIfOnePage, FullScreenClearOutput, QuitQuicklyOrClearOutput, } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all(deserialize = "kebab-case"))] enum StreampagerWrappingMode { None, Word, Anywhere, } impl From<StreampagerWrappingMode> for streampager::config::WrappingMode { fn from(val: StreampagerWrappingMode) -> Self { match val { StreampagerWrappingMode::None => Self::Unwrapped, StreampagerWrappingMode::Word => Self::WordBoundary, StreampagerWrappingMode::Anywhere => Self::GraphemeBoundary, } } } #[derive(Clone, Copy, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all(deserialize = "kebab-case"))] struct StreampagerConfig { interface: StreampagerAlternateScreenMode, wrapping: StreampagerWrappingMode, show_ruler: bool, // TODO: Add an `quit-quickly-delay-seconds` floating point option or a // `quit-quickly-delay` option that takes a 's' or 'ms' suffix. Note that as // of this writing, floating point numbers do not work with `--config` } impl StreampagerConfig { fn streampager_interface_mode(&self) -> streampager::config::InterfaceMode { use StreampagerAlternateScreenMode::*; use streampager::config::InterfaceMode; match self.interface { // InterfaceMode::Direct not implemented FullScreenClearOutput => InterfaceMode::FullScreen, QuitIfOnePage => InterfaceMode::Hybrid, QuitQuicklyOrClearOutput => InterfaceMode::Delayed(std::time::Duration::from_secs(2)), } } } enum PagerConfig { Disabled, Builtin(StreampagerConfig), External(CommandNameAndArgs), } impl PagerConfig { fn from_config(config: &StackedConfig) -> Result<Self, ConfigGetError> { if matches!(config.get("ui.paginate")?, PaginationChoice::Never) { return Ok(Self::Disabled); }; let args: CommandNameAndArgs = config.get("ui.pager")?; if args.as_str() == Some(BUILTIN_PAGER_NAME) { Ok(Self::Builtin(config.get("ui.streampager")?)) } else { Ok(Self::External(args)) } } } impl Ui { pub fn null() -> Self { Self { quiet: true, pager: PagerConfig::Disabled, progress_indicator: false, formatter_factory: FormatterFactory::plain_text(), output: UiOutput::Null, } } pub fn with_config(config: &StackedConfig) -> Result<Self, CommandError> { let formatter_factory = prepare_formatter_factory(config, &io::stdout())?; Ok(Self { quiet: config.get("ui.quiet")?, formatter_factory, pager: PagerConfig::from_config(config)?, progress_indicator: config.get("ui.progress-indicator")?, output: UiOutput::new_terminal(), }) } pub fn reset(&mut self, config: &StackedConfig) -> Result<(), CommandError> { self.quiet = config.get("ui.quiet")?; self.pager = PagerConfig::from_config(config)?; self.progress_indicator = config.get("ui.progress-indicator")?; self.formatter_factory = prepare_formatter_factory(config, &io::stdout())?; Ok(()) } /// Switches the output to use the pager, if allowed. #[instrument(skip_all)] pub fn request_pager(&mut self) { if !matches!(&self.output, UiOutput::Terminal { stdout, .. } if stdout.is_terminal()) { return; } let new_output = match &self.pager { PagerConfig::Disabled => { return; } PagerConfig::Builtin(streampager_config) => { UiOutput::new_builtin_paged(streampager_config) .inspect_err(|err| { writeln!( self.warning_default(), "Failed to set up builtin pager: {err}", err = format_error_with_sources(err), ) .ok(); }) .ok() } PagerConfig::External(command_name_and_args) => { UiOutput::new_paged(command_name_and_args) .inspect_err(|err| { // The pager executable couldn't be found or couldn't be run writeln!( self.warning_default(), "Failed to spawn pager '{name}': {err}", name = command_name_and_args.split_name(), err = format_error_with_sources(err), ) .ok(); writeln!(self.hint_default(), "Consider using the `:builtin` pager.").ok(); }) .ok() } }; if let Some(output) = new_output { self.output = output; } } pub fn color(&self) -> bool { self.formatter_factory.is_color() } pub fn new_formatter<'output, W: Write + 'output>( &self, output: W, ) -> Box<dyn Formatter + 'output> { self.formatter_factory.new_formatter(output) } /// Locked stdout stream. pub fn stdout(&self) -> UiStdout<'_> { match &self.output { UiOutput::Terminal { stdout, .. } => UiStdout::Terminal(stdout.lock()), UiOutput::Paged { child_stdin, .. } => UiStdout::Paged(child_stdin), UiOutput::BuiltinPaged { out_wr, .. } => UiStdout::Builtin(out_wr), UiOutput::Null => UiStdout::Null(io::sink()), } } /// Creates a formatter for the locked stdout stream. /// /// Labels added to the returned formatter should be removed by caller. /// Otherwise the last color would persist. pub fn stdout_formatter(&self) -> Box<dyn Formatter + '_> { for_outputs!(UiStdout, self.stdout(), w => self.new_formatter(w)) } /// Locked stderr stream. pub fn stderr(&self) -> UiStderr<'_> { match &self.output { UiOutput::Terminal { stderr, .. } => UiStderr::Terminal(stderr.lock()), UiOutput::Paged { child_stdin, .. } => UiStderr::Paged(child_stdin), UiOutput::BuiltinPaged { err_wr, .. } => UiStderr::Builtin(err_wr), UiOutput::Null => UiStderr::Null(io::sink()), } } /// Creates a formatter for the locked stderr stream. pub fn stderr_formatter(&self) -> Box<dyn Formatter + '_> { for_outputs!(UiStderr, self.stderr(), w => self.new_formatter(w)) } /// Stderr stream to be attached to a child process. pub fn stderr_for_child(&self) -> io::Result<Stdio> { match &self.output { UiOutput::Terminal { .. } => Ok(Stdio::inherit()), UiOutput::Paged { child_stdin, .. } => Ok(duplicate_child_stdin(child_stdin)?.into()), UiOutput::BuiltinPaged { err_wr, .. } => Ok(err_wr.try_clone()?.into()), UiOutput::Null => Ok(Stdio::null()), } } /// Whether continuous feedback should be displayed for long-running /// operations pub fn use_progress_indicator(&self) -> bool { match &self.output { UiOutput::Terminal { stderr, .. } => self.progress_indicator && stderr.is_terminal(), UiOutput::Paged { .. } => false, UiOutput::BuiltinPaged { .. } => false, UiOutput::Null => false, } } pub fn progress_output(&self) -> Option<ProgressOutput<std::io::Stderr>> { self.use_progress_indicator() .then(ProgressOutput::for_stderr) } /// Writer to print an update that's not part of the command's main output. pub fn status(&self) -> Box<dyn Write + '_> { if self.quiet { Box::new(io::sink()) } else { Box::new(self.stderr()) } } /// A formatter to print an update that's not part of the command's main /// output. Returns `None` if `--quiet` was requested. pub fn status_formatter(&self) -> Option<Box<dyn Formatter + '_>> { (!self.quiet).then(|| self.stderr_formatter()) } /// Writer to print hint with the default "Hint: " heading. pub fn hint_default(&self) -> HeadingLabeledWriter<Box<dyn Formatter + '_>, &'static str> { self.hint_with_heading("Hint: ") } /// Writer to print hint without the "Hint: " heading. pub fn hint_no_heading(&self) -> LabeledScope<Box<dyn Formatter + '_>> { let formatter = self .status_formatter() .unwrap_or_else(|| Box::new(PlainTextFormatter::new(io::sink()))); formatter.into_labeled("hint") } /// Writer to print hint with the given heading. pub fn hint_with_heading<H: fmt::Display>( &self, heading: H, ) -> HeadingLabeledWriter<Box<dyn Formatter + '_>, H> { self.hint_no_heading().with_heading(heading) } /// Writer to print warning with the default "Warning: " heading. pub fn warning_default(&self) -> HeadingLabeledWriter<Box<dyn Formatter + '_>, &'static str> { self.warning_with_heading("Warning: ") } /// Writer to print warning without the "Warning: " heading. pub fn warning_no_heading(&self) -> LabeledScope<Box<dyn Formatter + '_>> { self.stderr_formatter().into_labeled("warning") } /// Writer to print warning with the given heading. pub fn warning_with_heading<H: fmt::Display>( &self, heading: H, ) -> HeadingLabeledWriter<Box<dyn Formatter + '_>, H> { self.warning_no_heading().with_heading(heading) } /// Writer to print error without the "Error: " heading. pub fn error_no_heading(&self) -> LabeledScope<Box<dyn Formatter + '_>> { self.stderr_formatter().into_labeled("error") } /// Writer to print error with the given heading. pub fn error_with_heading<H: fmt::Display>( &self, heading: H, ) -> HeadingLabeledWriter<Box<dyn Formatter + '_>, H> { self.error_no_heading().with_heading(heading) } /// Waits for the pager exits. #[instrument(skip_all)] pub fn finalize_pager(&mut self) { let old_output = mem::replace(&mut self.output, UiOutput::new_terminal()); old_output.finalize(self); } pub fn can_prompt() -> bool { io::stderr().is_terminal() || env::var("JJ_INTERACTIVE") .map(|v| v == "1") .unwrap_or(false) } pub fn prompt(&self, prompt: &str) -> io::Result<String> { if !Self::can_prompt() { return Err(io::Error::new( io::ErrorKind::Unsupported, "Cannot prompt for input since the output is not connected to a terminal", )); } write!(self.stderr(), "{prompt}: ")?; self.stderr().flush()?; let mut buf = String::new(); io::stdin().read_line(&mut buf)?; if buf.is_empty() { return Err(io::Error::new( io::ErrorKind::UnexpectedEof, "Prompt canceled by EOF", )); } if let Some(trimmed) = buf.strip_suffix('\n') { buf.truncate(trimmed.len()); } Ok(buf) } /// Repeat the given prompt until the input is one of the specified choices. /// Returns the index of the choice. pub fn prompt_choice( &self, prompt: &str, choices: &[impl AsRef<str>], default_index: Option<usize>, ) -> io::Result<usize> { self.prompt_choice_with( prompt, default_index.map(|index| { choices .get(index) .expect("default_index should be within range") .as_ref() }), |input| { choices .iter() .position(|c| input == c.as_ref()) .ok_or("unrecognized response") }, ) } /// Prompts for a yes-or-no response, with yes = true and no = false. pub fn prompt_yes_no(&self, prompt: &str, default: Option<bool>) -> io::Result<bool> { let default_str = match &default { Some(true) => "(Yn)", Some(false) => "(yN)", None => "(yn)", }; self.prompt_choice_with( &format!("{prompt} {default_str}"), default.map(|v| if v { "y" } else { "n" }), |input| { if input.eq_ignore_ascii_case("y") || input.eq_ignore_ascii_case("yes") { Ok(true) } else if input.eq_ignore_ascii_case("n") || input.eq_ignore_ascii_case("no") { Ok(false) } else { Err("unrecognized response") } }, ) } /// Repeats the given prompt until `parse(input)` returns a value. /// /// If the default `text` is given, an empty input will be mapped to it. It /// will also be used in non-interactive session. The default `text` must /// be parsable. If no default is given, this function will fail in /// non-interactive session. pub fn prompt_choice_with<T, E: fmt::Debug + fmt::Display>( &self, prompt: &str, default: Option<&str>, mut parse: impl FnMut(&str) -> Result<T, E>, ) -> io::Result<T> { // Parse the default to ensure that the text is valid. let default = default.map(|text| (parse(text).expect("default should be valid"), text)); if !Self::can_prompt() && let Some((value, text)) = default { // Choose the default automatically without waiting. writeln!(self.stderr(), "{prompt}: {text}")?; return Ok(value); } loop { let input = self.prompt(prompt)?; let input = input.trim(); if input.is_empty() { if let Some((value, _)) = default { return Ok(value); } else { continue; } } match parse(input) { Ok(value) => return Ok(value), Err(err) => writeln!(self.warning_no_heading(), "{err}")?, } } } pub fn prompt_password(&self, prompt: &str) -> io::Result<String> { if !io::stdout().is_terminal() { return Err(io::Error::new( io::ErrorKind::Unsupported, "Cannot prompt for input since the output is not connected to a terminal", )); } rpassword::prompt_password(format!("{prompt}: ")) } pub fn term_width(&self) -> usize { term_width().unwrap_or(80).into() } } #[derive(Debug)] pub struct ProgressOutput<W> { output: W, term_width: Option<u16>, } impl ProgressOutput<io::Stderr> { pub fn for_stderr() -> Self { Self { output: io::stderr(), term_width: None, } } } impl<W> ProgressOutput<W> { pub fn for_test(output: W, term_width: u16) -> Self { Self { output, term_width: Some(term_width), } } pub fn term_width(&self) -> Option<u16> { // Terminal can be resized while progress is displayed, so don't cache it. self.term_width.or_else(term_width) } /// Construct a guard object which writes `text` when dropped. Useful for /// restoring terminal state. pub fn output_guard(&self, text: String) -> OutputGuard { OutputGuard { text, output: io::stderr(), } } } impl<W: Write> ProgressOutput<W> { pub fn write_fmt(&mut self, fmt: fmt::Arguments<'_>) -> io::Result<()> { self.output.write_fmt(fmt) } pub fn flush(&mut self) -> io::Result<()> { self.output.flush() } } pub struct OutputGuard { text: String, output: Stderr, } impl Drop for OutputGuard { #[instrument(skip_all)] fn drop(&mut self) { self.output.write_all(self.text.as_bytes()).ok(); self.output.flush().ok(); } } #[cfg(unix)] fn duplicate_child_stdin(stdin: &ChildStdin) -> io::Result<std::os::fd::OwnedFd> { use std::os::fd::AsFd as _; stdin.as_fd().try_clone_to_owned() } #[cfg(windows)] fn duplicate_child_stdin(stdin: &ChildStdin) -> io::Result<std::os::windows::io::OwnedHandle> { use std::os::windows::io::AsHandle as _; stdin.as_handle().try_clone_to_owned() } fn format_error_with_sources(err: &dyn error::Error) -> impl fmt::Display { iter::successors(Some(err), |&err| err.source()).format(": ") } fn term_width() -> Option<u16> { if let Some(cols) = env::var("COLUMNS").ok().and_then(|s| s.parse().ok()) { Some(cols) } else { crossterm::terminal::size().ok().map(|(cols, _)| cols) } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/template_parser.rs
cli/src/template_parser.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::error; use std::mem; use std::sync::LazyLock; use itertools::Itertools as _; use jj_lib::dsl_util; use jj_lib::dsl_util::AliasDeclaration; use jj_lib::dsl_util::AliasDeclarationParser; use jj_lib::dsl_util::AliasDefinitionParser; use jj_lib::dsl_util::AliasExpandError; use jj_lib::dsl_util::AliasExpandableExpression; use jj_lib::dsl_util::AliasId; use jj_lib::dsl_util::AliasesMap; use jj_lib::dsl_util::Diagnostics; use jj_lib::dsl_util::ExpressionFolder; use jj_lib::dsl_util::FoldableExpression; use jj_lib::dsl_util::FunctionCallParser; use jj_lib::dsl_util::InvalidArguments; use jj_lib::dsl_util::StringLiteralParser; use jj_lib::dsl_util::collect_similar; use jj_lib::str_util::StringPattern; use pest::Parser as _; use pest::iterators::Pair; use pest::iterators::Pairs; use pest::pratt_parser::Assoc; use pest::pratt_parser::Op; use pest::pratt_parser::PrattParser; use pest_derive::Parser; use thiserror::Error; #[derive(Parser)] #[grammar = "template.pest"] struct TemplateParser; const STRING_LITERAL_PARSER: StringLiteralParser<Rule> = StringLiteralParser { content_rule: Rule::string_content, escape_rule: Rule::string_escape, }; const FUNCTION_CALL_PARSER: FunctionCallParser<Rule> = FunctionCallParser { function_name_rule: Rule::identifier, function_arguments_rule: Rule::function_arguments, keyword_argument_rule: Rule::keyword_argument, argument_name_rule: Rule::identifier, argument_value_rule: Rule::template, }; impl Rule { fn to_symbol(self) -> Option<&'static str> { match self { Self::EOI => None, Self::WHITESPACE => None, Self::string_escape => None, Self::string_content_char => None, Self::string_content => None, Self::string_literal => None, Self::raw_string_content => None, Self::raw_string_literal => None, Self::any_string_literal => None, Self::integer_literal => None, Self::identifier => None, Self::concat_op => Some("++"), Self::logical_or_op => Some("||"), Self::logical_and_op => Some("&&"), Self::eq_op => Some("=="), Self::ne_op => Some("!="), Self::ge_op => Some(">="), Self::gt_op => Some(">"), Self::le_op => Some("<="), Self::lt_op => Some("<"), Self::add_op => Some("+"), Self::sub_op => Some("-"), Self::mul_op => Some("*"), Self::div_op => Some("/"), Self::rem_op => Some("%"), Self::logical_not_op => Some("!"), Self::negate_op => Some("-"), Self::pattern_kind_op => Some(":"), Self::prefix_ops => None, Self::infix_ops => None, Self::function => None, Self::keyword_argument => None, Self::argument => None, Self::function_arguments => None, Self::lambda => None, Self::formal_parameters => None, Self::string_pattern_identifier => None, Self::string_pattern => None, Self::primary => None, Self::term => None, Self::expression => None, Self::template => None, Self::program => None, Self::function_alias_declaration => None, Self::alias_declaration => None, } } } /// Manages diagnostic messages emitted during template parsing and building. pub type TemplateDiagnostics = Diagnostics<TemplateParseError>; pub type TemplateParseResult<T> = Result<T, TemplateParseError>; #[derive(Debug, Error)] #[error("{pest_error}")] pub struct TemplateParseError { kind: TemplateParseErrorKind, pest_error: Box<pest::error::Error<Rule>>, source: Option<Box<dyn error::Error + Send + Sync>>, } #[derive(Clone, Debug, Eq, Error, PartialEq)] pub enum TemplateParseErrorKind { #[error("Syntax error")] SyntaxError, #[error("Keyword `{name}` doesn't exist")] NoSuchKeyword { name: String, candidates: Vec<String>, }, #[error("Function `{name}` doesn't exist")] NoSuchFunction { name: String, candidates: Vec<String>, }, #[error("Method `{name}` doesn't exist for type `{type_name}`")] NoSuchMethod { type_name: String, name: String, candidates: Vec<String>, }, #[error("Function `{name}`: {message}")] InvalidArguments { name: String, message: String }, #[error("Redefinition of function parameter")] RedefinedFunctionParameter, #[error("{0}")] Expression(String), #[error("In alias `{0}`")] InAliasExpansion(String), #[error("In function parameter `{0}`")] InParameterExpansion(String), #[error("Alias `{0}` expanded recursively")] RecursiveAlias(String), } impl TemplateParseError { pub fn with_span(kind: TemplateParseErrorKind, span: pest::Span<'_>) -> Self { let message = kind.to_string(); let pest_error = Box::new(pest::error::Error::new_from_span( pest::error::ErrorVariant::CustomError { message }, span, )); Self { kind, pest_error, source: None, } } pub fn with_source(mut self, source: impl Into<Box<dyn error::Error + Send + Sync>>) -> Self { self.source = Some(source.into()); self } pub fn expected_type(expected: &str, actual: &str, span: pest::Span<'_>) -> Self { let message = format!("Expected expression of type `{expected}`, but actual type is `{actual}`"); Self::expression(message, span) } /// Some other expression error. pub fn expression(message: impl Into<String>, span: pest::Span<'_>) -> Self { Self::with_span(TemplateParseErrorKind::Expression(message.into()), span) } /// If this is a `NoSuchKeyword` error, expands the candidates list with the /// given `other_keywords`. pub fn extend_keyword_candidates<I>(mut self, other_keywords: I) -> Self where I: IntoIterator, I::Item: AsRef<str>, { if let TemplateParseErrorKind::NoSuchKeyword { name, candidates } = &mut self.kind { let other_candidates = collect_similar(name, other_keywords); *candidates = itertools::merge(mem::take(candidates), other_candidates) .dedup() .collect(); } self } /// If this is a `NoSuchFunction` error, expands the candidates list with /// the given `other_functions`. pub fn extend_function_candidates<I>(mut self, other_functions: I) -> Self where I: IntoIterator, I::Item: AsRef<str>, { if let TemplateParseErrorKind::NoSuchFunction { name, candidates } = &mut self.kind { let other_candidates = collect_similar(name, other_functions); *candidates = itertools::merge(mem::take(candidates), other_candidates) .dedup() .collect(); } self } /// Expands keyword/function candidates with the given aliases. pub fn extend_alias_candidates(self, aliases_map: &TemplateAliasesMap) -> Self { self.extend_keyword_candidates(aliases_map.symbol_names()) .extend_function_candidates(aliases_map.function_names()) } pub fn kind(&self) -> &TemplateParseErrorKind { &self.kind } /// Original parsing error which typically occurred in an alias expression. pub fn origin(&self) -> Option<&Self> { self.source.as_ref().and_then(|e| e.downcast_ref()) } } impl AliasExpandError for TemplateParseError { fn invalid_arguments(err: InvalidArguments<'_>) -> Self { err.into() } fn recursive_expansion(id: AliasId<'_>, span: pest::Span<'_>) -> Self { Self::with_span(TemplateParseErrorKind::RecursiveAlias(id.to_string()), span) } fn within_alias_expansion(self, id: AliasId<'_>, span: pest::Span<'_>) -> Self { let kind = match id { AliasId::Symbol(_) | AliasId::Function(..) => { TemplateParseErrorKind::InAliasExpansion(id.to_string()) } AliasId::Parameter(_) => TemplateParseErrorKind::InParameterExpansion(id.to_string()), }; Self::with_span(kind, span).with_source(self) } } impl From<pest::error::Error<Rule>> for TemplateParseError { fn from(err: pest::error::Error<Rule>) -> Self { Self { kind: TemplateParseErrorKind::SyntaxError, pest_error: Box::new(rename_rules_in_pest_error(err)), source: None, } } } impl From<InvalidArguments<'_>> for TemplateParseError { fn from(err: InvalidArguments<'_>) -> Self { let kind = TemplateParseErrorKind::InvalidArguments { name: err.name.to_owned(), message: err.message, }; Self::with_span(kind, err.span) } } fn rename_rules_in_pest_error(err: pest::error::Error<Rule>) -> pest::error::Error<Rule> { err.renamed_rules(|rule| { rule.to_symbol() .map(|sym| format!("`{sym}`")) .unwrap_or_else(|| format!("<{rule:?}>")) }) } #[derive(Clone, Debug, PartialEq)] pub enum ExpressionKind<'i> { Identifier(&'i str), Boolean(bool), Integer(i64), String(String), /// `<kind>:"<value>"` StringPattern { kind: &'i str, value: String, }, Unary(UnaryOp, Box<ExpressionNode<'i>>), Binary(BinaryOp, Box<ExpressionNode<'i>>, Box<ExpressionNode<'i>>), Concat(Vec<ExpressionNode<'i>>), FunctionCall(Box<FunctionCallNode<'i>>), MethodCall(Box<MethodCallNode<'i>>), Lambda(Box<LambdaNode<'i>>), /// Identity node to preserve the span in the source template text. AliasExpanded(AliasId<'i>, Box<ExpressionNode<'i>>), } impl<'i> FoldableExpression<'i> for ExpressionKind<'i> { fn fold<F>(self, folder: &mut F, span: pest::Span<'i>) -> Result<Self, F::Error> where F: ExpressionFolder<'i, Self> + ?Sized, { match self { Self::Identifier(name) => folder.fold_identifier(name, span), ExpressionKind::Boolean(_) | ExpressionKind::Integer(_) | ExpressionKind::String(_) | ExpressionKind::StringPattern { .. } => Ok(self), Self::Unary(op, arg) => { let arg = Box::new(folder.fold_expression(*arg)?); Ok(Self::Unary(op, arg)) } Self::Binary(op, lhs, rhs) => { let lhs = Box::new(folder.fold_expression(*lhs)?); let rhs = Box::new(folder.fold_expression(*rhs)?); Ok(Self::Binary(op, lhs, rhs)) } Self::Concat(nodes) => Ok(Self::Concat(dsl_util::fold_expression_nodes( folder, nodes, )?)), Self::FunctionCall(function) => folder.fold_function_call(function, span), Self::MethodCall(method) => { // Method call is syntactically different from function call. let method = Box::new(MethodCallNode { object: folder.fold_expression(method.object)?, function: dsl_util::fold_function_call_args(folder, method.function)?, }); Ok(Self::MethodCall(method)) } Self::Lambda(lambda) => { let lambda = Box::new(LambdaNode { params: lambda.params, params_span: lambda.params_span, body: folder.fold_expression(lambda.body)?, }); Ok(Self::Lambda(lambda)) } Self::AliasExpanded(id, subst) => { let subst = Box::new(folder.fold_expression(*subst)?); Ok(Self::AliasExpanded(id, subst)) } } } } impl<'i> AliasExpandableExpression<'i> for ExpressionKind<'i> { fn identifier(name: &'i str) -> Self { ExpressionKind::Identifier(name) } fn function_call(function: Box<FunctionCallNode<'i>>) -> Self { ExpressionKind::FunctionCall(function) } fn alias_expanded(id: AliasId<'i>, subst: Box<ExpressionNode<'i>>) -> Self { ExpressionKind::AliasExpanded(id, subst) } } #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum UnaryOp { /// `!` LogicalNot, /// `-` Negate, } #[derive(Clone, Copy, Debug, Eq, Hash, PartialEq)] pub enum BinaryOp { /// `||` LogicalOr, /// `&&` LogicalAnd, /// `==` Eq, /// `!=` Ne, /// `>=` Ge, /// `>` Gt, /// `<=` Le, /// `<` Lt, /// `+` Add, /// `-` Sub, /// `*` Mul, /// `/` Div, /// `%` Rem, } pub type ExpressionNode<'i> = dsl_util::ExpressionNode<'i, ExpressionKind<'i>>; pub type FunctionCallNode<'i> = dsl_util::FunctionCallNode<'i, ExpressionKind<'i>>; #[derive(Clone, Debug, PartialEq)] pub struct MethodCallNode<'i> { pub object: ExpressionNode<'i>, pub function: FunctionCallNode<'i>, } #[derive(Clone, Debug, PartialEq)] pub struct LambdaNode<'i> { pub params: Vec<&'i str>, pub params_span: pest::Span<'i>, pub body: ExpressionNode<'i>, } fn parse_identifier_or_literal(pair: Pair<Rule>) -> ExpressionKind { assert_eq!(pair.as_rule(), Rule::identifier); match pair.as_str() { "false" => ExpressionKind::Boolean(false), "true" => ExpressionKind::Boolean(true), name => ExpressionKind::Identifier(name), } } fn parse_identifier_name(pair: Pair<'_, Rule>) -> TemplateParseResult<&str> { let span = pair.as_span(); if let ExpressionKind::Identifier(name) = parse_identifier_or_literal(pair) { Ok(name) } else { Err(TemplateParseError::expression("Expected identifier", span)) } } fn parse_formal_parameters(params_pair: Pair<'_, Rule>) -> TemplateParseResult<Vec<&str>> { assert_eq!(params_pair.as_rule(), Rule::formal_parameters); let params_span = params_pair.as_span(); let params: Vec<_> = params_pair .into_inner() .map(parse_identifier_name) .try_collect()?; if params.iter().all_unique() { Ok(params) } else { Err(TemplateParseError::with_span( TemplateParseErrorKind::RedefinedFunctionParameter, params_span, )) } } fn parse_lambda_node(pair: Pair<Rule>) -> TemplateParseResult<LambdaNode> { assert_eq!(pair.as_rule(), Rule::lambda); let mut inner = pair.into_inner(); let params_pair = inner.next().unwrap(); let params_span = params_pair.as_span(); let body_pair = inner.next().unwrap(); let params = parse_formal_parameters(params_pair)?; let body = parse_template_node(body_pair)?; Ok(LambdaNode { params, params_span, body, }) } fn parse_raw_string_literal(pair: Pair<Rule>) -> String { let [content] = pair.into_inner().collect_array().unwrap(); assert_eq!(content.as_rule(), Rule::raw_string_content); content.as_str().to_owned() } fn parse_term_node(pair: Pair<Rule>) -> TemplateParseResult<ExpressionNode> { assert_eq!(pair.as_rule(), Rule::term); let mut inner = pair.into_inner(); let primary = inner.next().unwrap(); assert_eq!(primary.as_rule(), Rule::primary); let primary_span = primary.as_span(); let expr = primary.into_inner().next().unwrap(); let primary_kind = match expr.as_rule() { Rule::string_literal => { let text = STRING_LITERAL_PARSER.parse(expr.into_inner()); ExpressionKind::String(text) } Rule::raw_string_literal => { let text = parse_raw_string_literal(expr); ExpressionKind::String(text) } Rule::integer_literal => { let value = expr.as_str().parse().map_err(|err| { TemplateParseError::expression("Invalid integer literal", expr.as_span()) .with_source(err) })?; ExpressionKind::Integer(value) } Rule::string_pattern => { let [kind, op, literal] = expr.into_inner().collect_array().unwrap(); assert_eq!(kind.as_rule(), Rule::string_pattern_identifier); assert_eq!(op.as_rule(), Rule::pattern_kind_op); let kind = kind.as_str(); let text = match literal.as_rule() { Rule::string_literal => STRING_LITERAL_PARSER.parse(literal.into_inner()), Rule::raw_string_literal => parse_raw_string_literal(literal), other => { panic!("Unexpected literal rule in string pattern: {other:?}") } }; // The actual parsing and construction of the pattern is deferred to later. ExpressionKind::StringPattern { kind, value: text } } Rule::identifier => parse_identifier_or_literal(expr), Rule::function => { let function = Box::new(FUNCTION_CALL_PARSER.parse( expr, parse_identifier_name, parse_template_node, )?); ExpressionKind::FunctionCall(function) } Rule::lambda => { let lambda = Box::new(parse_lambda_node(expr)?); ExpressionKind::Lambda(lambda) } // Ignore inner span to preserve parenthesized expression as such. Rule::template => parse_template_node(expr)?.kind, other => panic!("unexpected term: {other:?}"), }; let primary_node = ExpressionNode::new(primary_kind, primary_span); inner.try_fold(primary_node, |object, chain| { assert_eq!(chain.as_rule(), Rule::function); let span = object.span.start_pos().span(&chain.as_span().end_pos()); let method = Box::new(MethodCallNode { object, function: FUNCTION_CALL_PARSER.parse( chain, parse_identifier_name, parse_template_node, )?, }); Ok(ExpressionNode::new( ExpressionKind::MethodCall(method), span, )) }) } fn parse_expression_node(pair: Pair<Rule>) -> TemplateParseResult<ExpressionNode> { assert_eq!(pair.as_rule(), Rule::expression); static PRATT: LazyLock<PrattParser<Rule>> = LazyLock::new(|| { PrattParser::new() .op(Op::infix(Rule::logical_or_op, Assoc::Left)) .op(Op::infix(Rule::logical_and_op, Assoc::Left)) .op(Op::infix(Rule::eq_op, Assoc::Left) | Op::infix(Rule::ne_op, Assoc::Left)) .op(Op::infix(Rule::ge_op, Assoc::Left) | Op::infix(Rule::gt_op, Assoc::Left) | Op::infix(Rule::le_op, Assoc::Left) | Op::infix(Rule::lt_op, Assoc::Left)) .op(Op::infix(Rule::add_op, Assoc::Left) | Op::infix(Rule::sub_op, Assoc::Left)) .op(Op::infix(Rule::mul_op, Assoc::Left) | Op::infix(Rule::div_op, Assoc::Left) | Op::infix(Rule::rem_op, Assoc::Left)) .op(Op::prefix(Rule::logical_not_op) | Op::prefix(Rule::negate_op)) }); PRATT .map_primary(parse_term_node) .map_prefix(|op, rhs| { let op_kind = match op.as_rule() { Rule::logical_not_op => UnaryOp::LogicalNot, Rule::negate_op => UnaryOp::Negate, r => panic!("unexpected prefix operator rule {r:?}"), }; let rhs = Box::new(rhs?); let span = op.as_span().start_pos().span(&rhs.span.end_pos()); let expr = ExpressionKind::Unary(op_kind, rhs); Ok(ExpressionNode::new(expr, span)) }) .map_infix(|lhs, op, rhs| { let op_kind = match op.as_rule() { Rule::logical_or_op => BinaryOp::LogicalOr, Rule::logical_and_op => BinaryOp::LogicalAnd, Rule::eq_op => BinaryOp::Eq, Rule::ne_op => BinaryOp::Ne, Rule::ge_op => BinaryOp::Ge, Rule::gt_op => BinaryOp::Gt, Rule::le_op => BinaryOp::Le, Rule::lt_op => BinaryOp::Lt, Rule::add_op => BinaryOp::Add, Rule::sub_op => BinaryOp::Sub, Rule::mul_op => BinaryOp::Mul, Rule::div_op => BinaryOp::Div, Rule::rem_op => BinaryOp::Rem, r => panic!("unexpected infix operator rule {r:?}"), }; let lhs = Box::new(lhs?); let rhs = Box::new(rhs?); let span = lhs.span.start_pos().span(&rhs.span.end_pos()); let expr = ExpressionKind::Binary(op_kind, lhs, rhs); Ok(ExpressionNode::new(expr, span)) }) .parse(pair.into_inner()) } fn parse_template_node(pair: Pair<Rule>) -> TemplateParseResult<ExpressionNode> { assert_eq!(pair.as_rule(), Rule::template); let span = pair.as_span(); let inner = pair.into_inner(); let mut nodes: Vec<_> = inner .filter_map(|pair| match pair.as_rule() { Rule::concat_op => None, Rule::expression => Some(parse_expression_node(pair)), r => panic!("unexpected template item rule {r:?}"), }) .try_collect()?; if nodes.len() == 1 { Ok(nodes.pop().unwrap()) } else { Ok(ExpressionNode::new(ExpressionKind::Concat(nodes), span)) } } /// Parses text into AST nodes. No type/name checking is made at this stage. pub fn parse_template(template_text: &str) -> TemplateParseResult<ExpressionNode<'_>> { let mut pairs: Pairs<Rule> = TemplateParser::parse(Rule::program, template_text)?; let first_pair = pairs.next().unwrap(); if first_pair.as_rule() == Rule::EOI { let span = first_pair.as_span(); Ok(ExpressionNode::new(ExpressionKind::Concat(vec![]), span)) } else { parse_template_node(first_pair) } } pub type TemplateAliasesMap = AliasesMap<TemplateAliasParser, String>; #[derive(Clone, Debug, Default)] pub struct TemplateAliasParser; impl AliasDeclarationParser for TemplateAliasParser { type Error = TemplateParseError; fn parse_declaration(&self, source: &str) -> Result<AliasDeclaration, Self::Error> { let mut pairs = TemplateParser::parse(Rule::alias_declaration, source)?; let first = pairs.next().unwrap(); match first.as_rule() { Rule::identifier => { let name = parse_identifier_name(first)?.to_owned(); Ok(AliasDeclaration::Symbol(name)) } Rule::function_alias_declaration => { let mut inner = first.into_inner(); let name_pair = inner.next().unwrap(); let params_pair = inner.next().unwrap(); let name = parse_identifier_name(name_pair)?.to_owned(); let params = parse_formal_parameters(params_pair)? .into_iter() .map(|s| s.to_owned()) .collect(); Ok(AliasDeclaration::Function(name, params)) } r => panic!("unexpected alias declaration rule {r:?}"), } } } impl AliasDefinitionParser for TemplateAliasParser { type Output<'i> = ExpressionKind<'i>; type Error = TemplateParseError; fn parse_definition<'i>(&self, source: &'i str) -> Result<ExpressionNode<'i>, Self::Error> { parse_template(source) } } /// Parses text into AST nodes, and expands aliases. /// /// No type/name checking is made at this stage. pub fn parse<'i>( template_text: &'i str, aliases_map: &'i TemplateAliasesMap, ) -> TemplateParseResult<ExpressionNode<'i>> { let node = parse_template(template_text)?; dsl_util::expand_aliases(node, aliases_map) } /// Unwraps inner value if the given `node` is a string literal. pub fn expect_string_literal<'a>(node: &'a ExpressionNode<'_>) -> TemplateParseResult<&'a str> { catch_aliases_no_diagnostics(node, |node| match &node.kind { ExpressionKind::String(s) => Ok(s.as_str()), _ => Err(TemplateParseError::expression( "Expected string literal", node.span, )), }) } /// Unwraps inner value if the given `node` is a string pattern /// /// This forces it to be static so that it need not be part of the type system. pub fn expect_string_pattern(node: &ExpressionNode<'_>) -> TemplateParseResult<StringPattern> { catch_aliases_no_diagnostics(node, |node| match &node.kind { ExpressionKind::StringPattern { kind, value } => StringPattern::from_str_kind(value, kind) .map_err(|err| { TemplateParseError::expression("Bad string pattern", node.span).with_source(err) }), ExpressionKind::String(string) => Ok(StringPattern::Substring(string.clone())), _ => Err(TemplateParseError::expression( "Expected string pattern", node.span, )), }) } /// Unwraps inner node if the given `node` is a lambda. pub fn expect_lambda<'a, 'i>( node: &'a ExpressionNode<'i>, ) -> TemplateParseResult<&'a LambdaNode<'i>> { catch_aliases_no_diagnostics(node, |node| match &node.kind { ExpressionKind::Lambda(lambda) => Ok(lambda.as_ref()), _ => Err(TemplateParseError::expression( "Expected lambda expression", node.span, )), }) } /// Applies the given function to the innermost `node` by unwrapping alias /// expansion nodes. Appends alias expansion stack to error and diagnostics. pub fn catch_aliases<'a, 'i, T>( diagnostics: &mut TemplateDiagnostics, node: &'a ExpressionNode<'i>, f: impl FnOnce(&mut TemplateDiagnostics, &'a ExpressionNode<'i>) -> TemplateParseResult<T>, ) -> TemplateParseResult<T> { let (node, stack) = skip_aliases(node); if stack.is_empty() { f(diagnostics, node) } else { let mut inner_diagnostics = TemplateDiagnostics::new(); let result = f(&mut inner_diagnostics, node); diagnostics.extend_with(inner_diagnostics, |diag| attach_aliases_err(diag, &stack)); result.map_err(|err| attach_aliases_err(err, &stack)) } } fn catch_aliases_no_diagnostics<'a, 'i, T>( node: &'a ExpressionNode<'i>, f: impl FnOnce(&'a ExpressionNode<'i>) -> TemplateParseResult<T>, ) -> TemplateParseResult<T> { let (node, stack) = skip_aliases(node); f(node).map_err(|err| attach_aliases_err(err, &stack)) } fn skip_aliases<'a, 'i>( mut node: &'a ExpressionNode<'i>, ) -> (&'a ExpressionNode<'i>, Vec<(AliasId<'i>, pest::Span<'i>)>) { let mut stack = Vec::new(); while let ExpressionKind::AliasExpanded(id, subst) = &node.kind { stack.push((*id, node.span)); node = subst; } (node, stack) } fn attach_aliases_err( err: TemplateParseError, stack: &[(AliasId<'_>, pest::Span<'_>)], ) -> TemplateParseError { stack .iter() .rfold(err, |err, &(id, span)| err.within_alias_expansion(id, span)) } /// Looks up `table` by the given function name. pub fn lookup_function<'a, V>( table: &'a HashMap<&str, V>, function: &FunctionCallNode, ) -> TemplateParseResult<&'a V> { if let Some(value) = table.get(function.name) { Ok(value) } else { let candidates = collect_similar(function.name, table.keys()); Err(TemplateParseError::with_span( TemplateParseErrorKind::NoSuchFunction { name: function.name.to_owned(), candidates, }, function.name_span, )) } } /// Looks up `table` by the given method name. pub fn lookup_method<'a, V>( type_name: impl Into<String>, table: &'a HashMap<&str, V>, function: &FunctionCallNode, ) -> TemplateParseResult<&'a V> { if let Some(value) = table.get(function.name) { Ok(value) } else { let candidates = collect_similar(function.name, table.keys()); Err(TemplateParseError::with_span( TemplateParseErrorKind::NoSuchMethod { type_name: type_name.into(), name: function.name.to_owned(), candidates, }, function.name_span, )) } } #[cfg(test)] mod tests { use assert_matches::assert_matches; use jj_lib::dsl_util::KeywordArgument; use super::*; #[derive(Debug)] struct WithTemplateAliasesMap(TemplateAliasesMap); impl WithTemplateAliasesMap { fn parse<'i>(&'i self, template_text: &'i str) -> TemplateParseResult<ExpressionNode<'i>> { parse(template_text, &self.0) } fn parse_normalized<'i>(&'i self, template_text: &'i str) -> ExpressionNode<'i> { normalize_tree(self.parse(template_text).unwrap()) } } fn with_aliases( aliases: impl IntoIterator<Item = (impl AsRef<str>, impl Into<String>)>, ) -> WithTemplateAliasesMap { let mut aliases_map = TemplateAliasesMap::new(); for (decl, defn) in aliases { aliases_map.insert(decl, defn).unwrap(); } WithTemplateAliasesMap(aliases_map) } fn parse_into_kind(template_text: &str) -> Result<ExpressionKind<'_>, TemplateParseErrorKind> { parse_template(template_text) .map(|node| node.kind) .map_err(|err| err.kind) } fn parse_normalized(template_text: &str) -> ExpressionNode<'_> { normalize_tree(parse_template(template_text).unwrap()) } /// Drops auxiliary data of AST so it can be compared with other node. fn normalize_tree(node: ExpressionNode) -> ExpressionNode { fn empty_span() -> pest::Span<'static> { pest::Span::new("", 0, 0).unwrap() } fn normalize_list(nodes: Vec<ExpressionNode>) -> Vec<ExpressionNode> { nodes.into_iter().map(normalize_tree).collect() } fn normalize_function_call(function: FunctionCallNode) -> FunctionCallNode { FunctionCallNode { name: function.name, name_span: empty_span(), args: normalize_list(function.args), keyword_args: function .keyword_args .into_iter() .map(|arg| KeywordArgument { name: arg.name, name_span: empty_span(), value: normalize_tree(arg.value), }) .collect(), args_span: empty_span(), } } let normalized_kind = match node.kind { ExpressionKind::Identifier(_) | ExpressionKind::Boolean(_) | ExpressionKind::Integer(_) | ExpressionKind::String(_) => node.kind, ExpressionKind::StringPattern { .. } => node.kind, ExpressionKind::Unary(op, arg) => { let arg = Box::new(normalize_tree(*arg)); ExpressionKind::Unary(op, arg) } ExpressionKind::Binary(op, lhs, rhs) => { let lhs = Box::new(normalize_tree(*lhs)); let rhs = Box::new(normalize_tree(*rhs)); ExpressionKind::Binary(op, lhs, rhs) } ExpressionKind::Concat(nodes) => ExpressionKind::Concat(normalize_list(nodes)), ExpressionKind::FunctionCall(function) => { let function = Box::new(normalize_function_call(*function)); ExpressionKind::FunctionCall(function) } ExpressionKind::MethodCall(method) => { let method = Box::new(MethodCallNode { object: normalize_tree(method.object), function: normalize_function_call(method.function), }); ExpressionKind::MethodCall(method) } ExpressionKind::Lambda(lambda) => { let lambda = Box::new(LambdaNode {
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/movement_util.rs
cli/src/movement_util.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use std::sync::Arc; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::repo::Repo as _; use jj_lib::revset::ResolvedRevsetExpression; use jj_lib::revset::RevsetExpression; use jj_lib::revset::RevsetFilterPredicate; use jj_lib::revset::RevsetIteratorExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::WorkspaceCommandHelper; use crate::cli_util::short_commit_hash; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::command_error::user_error_with_hint; use crate::ui::Ui; #[derive(Clone, Debug, Eq, PartialEq)] pub(crate) struct MovementArgs { pub offset: u64, pub edit: bool, pub no_edit: bool, pub conflict: bool, } #[derive(Clone, Debug, Eq, PartialEq)] struct MovementArgsInternal { offset: u64, should_edit: bool, conflict: bool, } #[derive(Clone, Copy, Debug, Eq, PartialEq)] pub(crate) enum Direction { Next, Prev, } impl Direction { fn cmd(&self) -> &'static str { match self { Self::Next => "next", Self::Prev => "prev", } } fn target_not_found_error( &self, workspace_command: &WorkspaceCommandHelper, args: &MovementArgsInternal, commits: &[Commit], ) -> CommandError { let offset = args.offset; let err_msg = match (self, args.should_edit, args.conflict) { // in edit mode, start_revset is the WC, so we only look for direct descendants. (Self::Next, true, true) => { String::from("The working copy has no descendants with conflicts") } (Self::Next, true, false) => { format!("No descendant found {offset} commit(s) forward from the working copy",) } // in non-edit mode, start_revset is the parent of WC, so we look for other descendants // of start_revset. (Self::Next, false, true) => { String::from("The working copy parent(s) have no other descendants with conflicts") } (Self::Next, false, false) => format!( "No other descendant found {offset} commit(s) forward from the working copy \ parent(s)", ), // The WC can never be an ancestor of the start_revset since start_revset is either // itself or it's parent. (Self::Prev, true, true) => { String::from("The working copy has no ancestors with conflicts") } (Self::Prev, true, false) => { format!("No ancestor found {offset} commit(s) back from the working copy",) } (Self::Prev, false, true) => { String::from("The working copy parent(s) have no ancestors with conflicts") } (Self::Prev, false, false) => format!( "No ancestor found {offset} commit(s) back from the working copy parents(s)", ), }; let template = workspace_command.commit_summary_template(); let mut cmd_err = user_error(err_msg); for commit in commits { cmd_err.add_formatted_hint_with(|formatter| { if args.should_edit { write!(formatter, "Working copy: ")?; } else { write!(formatter, "Working copy parent: ")?; } template.format(commit, formatter) }); } cmd_err } fn build_target_revset( &self, working_revset: &Arc<ResolvedRevsetExpression>, start_revset: &Arc<ResolvedRevsetExpression>, args: &MovementArgsInternal, ) -> Result<Arc<ResolvedRevsetExpression>, CommandError> { let nth = match (self, args.should_edit) { (Self::Next, true) => start_revset.descendants_at(args.offset), (Self::Next, false) => start_revset .children() .minus(working_revset) .descendants_at(args.offset - 1), (Self::Prev, _) => start_revset.ancestors_at(args.offset), }; let target_revset = match (self, args.conflict) { (_, false) => nth, (Self::Next, true) => nth .descendants() .filtered(RevsetFilterPredicate::HasConflict) .roots(), // If people desire to move to the root conflict, replace the `heads()` below // with `roots(). But let's wait for feedback. (Self::Prev, true) => nth .ancestors() .filtered(RevsetFilterPredicate::HasConflict) .heads(), }; Ok(target_revset) } } fn get_target_commit( ui: &mut Ui, workspace_command: &WorkspaceCommandHelper, direction: Direction, working_commit_id: &CommitId, args: &MovementArgsInternal, ) -> Result<Commit, CommandError> { let wc_revset = RevsetExpression::commit(working_commit_id.clone()); // If we're not editing, the working-copy shouldn't have any children if !args.should_edit && !workspace_command .repo() .view() .heads() .contains(working_commit_id) { return Err(user_error_with_hint( "The working copy must not have any children", "Create a new commit on top of this one or use `--edit`", )); } // If we're editing, start at the working-copy commit. Otherwise, start from // its direct parent(s). let start_revset = if args.should_edit { wc_revset.clone() } else { wc_revset.parents() }; let target_revset = direction.build_target_revset(&wc_revset, &start_revset, args)?; let targets: Vec<Commit> = target_revset .evaluate(workspace_command.repo().as_ref())? .iter() .commits(workspace_command.repo().store()) .try_collect()?; let target = match targets.as_slice() { [target] => target, [] => { // We found no ancestor/descendant. let start_commits: Vec<Commit> = start_revset .evaluate(workspace_command.repo().as_ref())? .iter() .commits(workspace_command.repo().store()) .try_collect()?; return Err(direction.target_not_found_error(workspace_command, args, &start_commits)); } commits => choose_commit(ui, workspace_command, direction, commits)?, }; Ok(target.clone()) } fn choose_commit<'a>( ui: &Ui, workspace_command: &WorkspaceCommandHelper, direction: Direction, commits: &'a [Commit], ) -> Result<&'a Commit, CommandError> { writeln!( ui.stderr(), "ambiguous {} commit, choose one to target:", direction.cmd() )?; let mut formatter = ui.stderr_formatter(); let template = workspace_command.commit_summary_template(); let mut choices: Vec<String> = Default::default(); for (i, commit) in commits.iter().enumerate() { write!(formatter, "{}: ", i + 1)?; template.format(commit, formatter.as_mut())?; writeln!(formatter)?; choices.push(format!("{}", i + 1)); } writeln!(formatter, "q: quit the prompt")?; choices.push("q".to_string()); drop(formatter); let index = ui.prompt_choice( "enter the index of the commit you want to target", &choices, None, )?; commits .get(index) .ok_or_else(|| user_error("ambiguous target commit")) } pub(crate) fn move_to_commit( ui: &mut Ui, command: &CommandHelper, direction: Direction, args: &MovementArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let current_wc_id = workspace_command .get_wc_commit_id() .ok_or_else(|| user_error("This command requires a working copy"))?; let config_edit_flag = workspace_command.settings().get_bool("ui.movement.edit")?; let args = MovementArgsInternal { should_edit: args.edit || (!args.no_edit && config_edit_flag), offset: args.offset, conflict: args.conflict, }; let target = get_target_commit(ui, &workspace_command, direction, current_wc_id, &args)?; let current_short = short_commit_hash(current_wc_id); let target_short = short_commit_hash(target.id()); let cmd = direction.cmd(); // We're editing, just move to the target commit. if args.should_edit { // We're editing, the target must be rewritable. workspace_command.check_rewritable([target.id()])?; let mut tx = workspace_command.start_transaction(); tx.edit(&target)?; tx.finish( ui, format!("{cmd}: {current_short} -> editing {target_short}"), )?; return Ok(()); } let mut tx = workspace_command.start_transaction(); // Move the working-copy commit to the new parent. tx.check_out(&target)?; tx.finish(ui, format!("{cmd}: {current_short} -> {target_short}"))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/main.rs
cli/src/main.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use jj_cli::cli_util::CliRunner; fn main() -> std::process::ExitCode { CliRunner::init().version(env!("JJ_VERSION")).run().into() }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/formatter.rs
cli/src/formatter.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::fmt; use std::io; use std::io::Error; use std::io::Write; use std::mem; use std::ops::Deref; use std::ops::DerefMut; use std::ops::Range; use std::sync::Arc; use crossterm::queue; use crossterm::style::Attribute; use crossterm::style::Color; use crossterm::style::SetAttribute; use crossterm::style::SetBackgroundColor; use crossterm::style::SetForegroundColor; use itertools::Itertools as _; use jj_lib::config::ConfigGetError; use jj_lib::config::StackedConfig; use serde::de::Deserialize as _; use serde::de::Error as _; use serde::de::IntoDeserializer as _; // Lets the caller label strings and translates the labels to colors pub trait Formatter: Write { /// Returns the backing `Write`. This is useful for writing data that is /// already formatted, such as in the graphical log. fn raw(&mut self) -> io::Result<Box<dyn Write + '_>>; fn push_label(&mut self, label: &str); fn pop_label(&mut self); } impl<T: Formatter + ?Sized> Formatter for &mut T { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { <T as Formatter>::raw(self) } fn push_label(&mut self, label: &str) { <T as Formatter>::push_label(self, label); } fn pop_label(&mut self) { <T as Formatter>::pop_label(self); } } impl<T: Formatter + ?Sized> Formatter for Box<T> { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { <T as Formatter>::raw(self) } fn push_label(&mut self, label: &str) { <T as Formatter>::push_label(self, label); } fn pop_label(&mut self) { <T as Formatter>::pop_label(self); } } /// [`Formatter`] adapters. pub trait FormatterExt: Formatter { fn labeled(&mut self, label: &str) -> LabeledScope<&mut Self> { LabeledScope::new(self, label) } fn into_labeled(self, label: &str) -> LabeledScope<Self> where Self: Sized, { LabeledScope::new(self, label) } } impl<T: Formatter + ?Sized> FormatterExt for T {} /// [`Formatter`] wrapper to apply a label within a lexical scope. #[must_use] pub struct LabeledScope<T: Formatter> { formatter: T, } impl<T: Formatter> LabeledScope<T> { pub fn new(mut formatter: T, label: &str) -> Self { formatter.push_label(label); Self { formatter } } // TODO: move to FormatterExt? /// Turns into writer that prints labeled message with the `heading`. pub fn with_heading<H>(self, heading: H) -> HeadingLabeledWriter<T, H> { HeadingLabeledWriter::new(self, heading) } } impl<T: Formatter> Drop for LabeledScope<T> { fn drop(&mut self) { self.formatter.pop_label(); } } impl<T: Formatter> Deref for LabeledScope<T> { type Target = T; fn deref(&self) -> &Self::Target { &self.formatter } } impl<T: Formatter> DerefMut for LabeledScope<T> { fn deref_mut(&mut self) -> &mut Self::Target { &mut self.formatter } } // There's no `impl Formatter for LabeledScope<T>` so nested .labeled() calls // wouldn't construct `LabeledScope<LabeledScope<T>>`. /// [`Formatter`] wrapper that prints the `heading` once. /// /// The `heading` will be printed within the first `write!()` or `writeln!()` /// invocation, which is handy because `io::Error` can be handled there. pub struct HeadingLabeledWriter<T: Formatter, H> { formatter: LabeledScope<T>, heading: Option<H>, } impl<T: Formatter, H> HeadingLabeledWriter<T, H> { pub fn new(formatter: LabeledScope<T>, heading: H) -> Self { Self { formatter, heading: Some(heading), } } } impl<T: Formatter, H: fmt::Display> HeadingLabeledWriter<T, H> { pub fn write_fmt(&mut self, args: fmt::Arguments<'_>) -> io::Result<()> { if let Some(heading) = self.heading.take() { write!(self.formatter.labeled("heading"), "{heading}")?; } self.formatter.write_fmt(args) } } type Rules = Vec<(Vec<String>, Style)>; /// Creates `Formatter` instances with preconfigured parameters. #[derive(Clone, Debug)] pub struct FormatterFactory { kind: FormatterFactoryKind, } #[derive(Clone, Debug)] enum FormatterFactoryKind { PlainText, Sanitized, Color { rules: Arc<Rules>, debug: bool }, } impl FormatterFactory { pub fn plain_text() -> Self { let kind = FormatterFactoryKind::PlainText; Self { kind } } pub fn sanitized() -> Self { let kind = FormatterFactoryKind::Sanitized; Self { kind } } pub fn color(config: &StackedConfig, debug: bool) -> Result<Self, ConfigGetError> { let rules = Arc::new(rules_from_config(config)?); let kind = FormatterFactoryKind::Color { rules, debug }; Ok(Self { kind }) } pub fn new_formatter<'output, W: Write + 'output>( &self, output: W, ) -> Box<dyn Formatter + 'output> { match &self.kind { FormatterFactoryKind::PlainText => Box::new(PlainTextFormatter::new(output)), FormatterFactoryKind::Sanitized => Box::new(SanitizingFormatter::new(output)), FormatterFactoryKind::Color { rules, debug } => { Box::new(ColorFormatter::new(output, rules.clone(), *debug)) } } } pub fn is_color(&self) -> bool { matches!(self.kind, FormatterFactoryKind::Color { .. }) } } pub struct PlainTextFormatter<W> { output: W, } impl<W> PlainTextFormatter<W> { pub fn new(output: W) -> Self { Self { output } } } impl<W: Write> Write for PlainTextFormatter<W> { fn write(&mut self, data: &[u8]) -> Result<usize, Error> { self.output.write(data) } fn flush(&mut self) -> Result<(), Error> { self.output.flush() } } impl<W: Write> Formatter for PlainTextFormatter<W> { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { Ok(Box::new(self.output.by_ref())) } fn push_label(&mut self, _label: &str) {} fn pop_label(&mut self) {} } pub struct SanitizingFormatter<W> { output: W, } impl<W> SanitizingFormatter<W> { pub fn new(output: W) -> Self { Self { output } } } impl<W: Write> Write for SanitizingFormatter<W> { fn write(&mut self, data: &[u8]) -> Result<usize, Error> { write_sanitized(&mut self.output, data)?; Ok(data.len()) } fn flush(&mut self) -> Result<(), Error> { self.output.flush() } } impl<W: Write> Formatter for SanitizingFormatter<W> { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { Ok(Box::new(self.output.by_ref())) } fn push_label(&mut self, _label: &str) {} fn pop_label(&mut self) {} } #[derive(Clone, Debug, Default, PartialEq, Eq, serde::Deserialize)] #[serde(default, rename_all = "kebab-case")] pub struct Style { #[serde(deserialize_with = "deserialize_color_opt")] pub fg: Option<Color>, #[serde(deserialize_with = "deserialize_color_opt")] pub bg: Option<Color>, pub bold: Option<bool>, pub dim: Option<bool>, pub italic: Option<bool>, pub underline: Option<bool>, pub reverse: Option<bool>, } impl Style { fn merge(&mut self, other: &Self) { self.fg = other.fg.or(self.fg); self.bg = other.bg.or(self.bg); self.bold = other.bold.or(self.bold); self.dim = other.dim.or(self.dim); self.italic = other.italic.or(self.italic); self.underline = other.underline.or(self.underline); self.reverse = other.reverse.or(self.reverse); } } #[derive(Clone, Debug)] pub struct ColorFormatter<W: Write> { output: W, rules: Arc<Rules>, /// The stack of currently applied labels. These determine the desired /// style. labels: Vec<String>, cached_styles: HashMap<Vec<String>, Style>, /// The style we last wrote to the output. current_style: Style, /// The debug string (space-separated labels) we last wrote to the output. /// Initialize to None to turn debug strings off. current_debug: Option<String>, } impl<W: Write> ColorFormatter<W> { pub fn new(output: W, rules: Arc<Rules>, debug: bool) -> Self { Self { output, rules, labels: vec![], cached_styles: HashMap::new(), current_style: Style::default(), current_debug: debug.then(String::new), } } pub fn for_config( output: W, config: &StackedConfig, debug: bool, ) -> Result<Self, ConfigGetError> { let rules = rules_from_config(config)?; Ok(Self::new(output, Arc::new(rules), debug)) } fn requested_style(&mut self) -> Style { if let Some(cached) = self.cached_styles.get(&self.labels) { cached.clone() } else { // We use the reverse list of matched indices as a measure of how well the rule // matches the actual labels. For example, for rule "a d" and the actual labels // "a b c d", we'll get [3,0]. We compare them by Rust's default Vec comparison. // That means "a d" will trump both rule "d" (priority [3]) and rule // "a b c" (priority [2,1,0]). let mut matched_styles = vec![]; for (labels, style) in self.rules.as_ref() { let mut labels_iter = self.labels.iter().enumerate(); // The indexes in the current label stack that match the required label. let mut matched_indices = vec![]; for required_label in labels { for (label_index, label) in &mut labels_iter { if label == required_label { matched_indices.push(label_index); break; } } } if matched_indices.len() == labels.len() { matched_indices.reverse(); matched_styles.push((style, matched_indices)); } } matched_styles.sort_by_key(|(_, indices)| indices.clone()); let mut style = Style::default(); for (matched_style, _) in matched_styles { style.merge(matched_style); } self.cached_styles .insert(self.labels.clone(), style.clone()); style } } fn write_new_style(&mut self) -> io::Result<()> { let new_debug = match &self.current_debug { Some(current) => { let joined = self.labels.join(" "); if joined == *current { None } else { if !current.is_empty() { write!(self.output, ">>")?; } Some(joined) } } None => None, }; let new_style = self.requested_style(); if new_style != self.current_style { // Bold and Dim change intensity, and NormalIntensity would reset // both. Also, NoBold results in double underlining on some // terminals. Therefore, we use Reset instead. However, that resets // other attributes as well, so we reset our record of the current // style so we re-apply the other attributes below. Maybe we can use // NormalIntensity instead of Reset, but let's simply reset all // attributes to work around potential terminal incompatibility. let new_bold = new_style.bold.unwrap_or_default(); let new_dim = new_style.dim.unwrap_or_default(); if (new_style.bold != self.current_style.bold && !new_bold) || (new_style.dim != self.current_style.dim && !new_dim) { queue!(self.output, SetAttribute(Attribute::Reset))?; self.current_style = Style::default(); }; if new_style.bold != self.current_style.bold && new_bold { queue!(self.output, SetAttribute(Attribute::Bold))?; } if new_style.dim != self.current_style.dim && new_dim { queue!(self.output, SetAttribute(Attribute::Dim))?; } if new_style.italic != self.current_style.italic { if new_style.italic.unwrap_or_default() { queue!(self.output, SetAttribute(Attribute::Italic))?; } else { queue!(self.output, SetAttribute(Attribute::NoItalic))?; } } if new_style.underline != self.current_style.underline { if new_style.underline.unwrap_or_default() { queue!(self.output, SetAttribute(Attribute::Underlined))?; } else { queue!(self.output, SetAttribute(Attribute::NoUnderline))?; } } if new_style.reverse != self.current_style.reverse { if new_style.reverse.unwrap_or_default() { queue!(self.output, SetAttribute(Attribute::Reverse))?; } else { queue!(self.output, SetAttribute(Attribute::NoReverse))?; } } if new_style.fg != self.current_style.fg { queue!( self.output, SetForegroundColor(new_style.fg.unwrap_or(Color::Reset)) )?; } if new_style.bg != self.current_style.bg { queue!( self.output, SetBackgroundColor(new_style.bg.unwrap_or(Color::Reset)) )?; } self.current_style = new_style; } if let Some(d) = new_debug { if !d.is_empty() { write!(self.output, "<<{d}::")?; } self.current_debug = Some(d); } Ok(()) } } fn rules_from_config(config: &StackedConfig) -> Result<Rules, ConfigGetError> { config .table_keys("colors") .map(|key| { let labels = key .split_whitespace() .map(ToString::to_string) .collect_vec(); let style = config.get_value_with(["colors", key], |value| { if value.is_str() { Ok(Style { fg: Some(deserialize_color(value.into_deserializer())?), bg: None, bold: None, dim: None, italic: None, underline: None, reverse: None, }) } else if value.is_inline_table() { Style::deserialize(value.into_deserializer()) } else { Err(toml_edit::de::Error::custom(format!( "invalid type: {}, expected a color name or a table of styles", value.type_name() ))) } })?; Ok((labels, style)) }) .collect() } fn deserialize_color<'de, D>(deserializer: D) -> Result<Color, D::Error> where D: serde::Deserializer<'de>, { let color_str = String::deserialize(deserializer)?; color_for_string(&color_str).map_err(D::Error::custom) } fn deserialize_color_opt<'de, D>(deserializer: D) -> Result<Option<Color>, D::Error> where D: serde::Deserializer<'de>, { deserialize_color(deserializer).map(Some) } fn color_for_string(color_str: &str) -> Result<Color, String> { match color_str { "default" => Ok(Color::Reset), "black" => Ok(Color::Black), "red" => Ok(Color::DarkRed), "green" => Ok(Color::DarkGreen), "yellow" => Ok(Color::DarkYellow), "blue" => Ok(Color::DarkBlue), "magenta" => Ok(Color::DarkMagenta), "cyan" => Ok(Color::DarkCyan), "white" => Ok(Color::Grey), "bright black" => Ok(Color::DarkGrey), "bright red" => Ok(Color::Red), "bright green" => Ok(Color::Green), "bright yellow" => Ok(Color::Yellow), "bright blue" => Ok(Color::Blue), "bright magenta" => Ok(Color::Magenta), "bright cyan" => Ok(Color::Cyan), "bright white" => Ok(Color::White), _ => color_for_ansi256_index(color_str) .or_else(|| color_for_hex(color_str)) .ok_or_else(|| format!("Invalid color: {color_str}")), } } fn color_for_ansi256_index(color: &str) -> Option<Color> { color .strip_prefix("ansi-color-") .filter(|s| *s == "0" || !s.starts_with('0')) .and_then(|n| n.parse::<u8>().ok()) .map(Color::AnsiValue) } fn color_for_hex(color: &str) -> Option<Color> { if color.len() == 7 && color.starts_with('#') && color[1..].chars().all(|c| c.is_ascii_hexdigit()) { let r = u8::from_str_radix(&color[1..3], 16); let g = u8::from_str_radix(&color[3..5], 16); let b = u8::from_str_radix(&color[5..7], 16); match (r, g, b) { (Ok(r), Ok(g), Ok(b)) => Some(Color::Rgb { r, g, b }), _ => None, } } else { None } } impl<W: Write> Write for ColorFormatter<W> { fn write(&mut self, data: &[u8]) -> Result<usize, Error> { /* We clear the current style at the end of each line, and then we re-apply the style after the newline. There are several reasons for this: * We can more easily skip styling a trailing blank line, which other internal code then can correctly detect as having a trailing newline. * Some tools (like `less -R`) add an extra newline if the final character is not a newline (e.g. if there's a color reset after it), which led to an annoying blank line after the diff summary in e.g. `jj status`. * Since each line is styled independently, you get all the necessary escapes even when grepping through the output. * Some terminals extend background color to the end of the terminal (i.e. past the newline character), which is probably not what the user wanted. * Some tools (like `less -R`) get confused and lose coloring of lines after a newline. */ for line in data.split_inclusive(|b| *b == b'\n') { if line.ends_with(b"\n") { self.write_new_style()?; write_sanitized(&mut self.output, &line[..line.len() - 1])?; let labels = mem::take(&mut self.labels); self.write_new_style()?; self.output.write_all(b"\n")?; self.labels = labels; } else { self.write_new_style()?; write_sanitized(&mut self.output, line)?; } } Ok(data.len()) } fn flush(&mut self) -> Result<(), Error> { self.write_new_style()?; self.output.flush() } } impl<W: Write> Formatter for ColorFormatter<W> { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { self.write_new_style()?; Ok(Box::new(self.output.by_ref())) } fn push_label(&mut self, label: &str) { self.labels.push(label.to_owned()); } fn pop_label(&mut self) { self.labels.pop(); } } impl<W: Write> Drop for ColorFormatter<W> { fn drop(&mut self) { // If a `ColorFormatter` was dropped without flushing, let's try to // reset any currently active style. self.labels.clear(); self.write_new_style().ok(); } } /// Like buffered formatter, but records `push`/`pop_label()` calls. /// /// This allows you to manipulate the recorded data without losing labels. /// The recorded data and labels can be written to another formatter. If /// the destination formatter has already been labeled, the recorded labels /// will be stacked on top of the existing labels, and the subsequent data /// may be colorized differently. #[derive(Clone, Debug, Default)] pub struct FormatRecorder { data: Vec<u8>, ops: Vec<(usize, FormatOp)>, } #[derive(Clone, Debug, Eq, PartialEq)] enum FormatOp { PushLabel(String), PopLabel, RawEscapeSequence(Vec<u8>), } impl FormatRecorder { pub fn new() -> Self { Self::default() } /// Creates new buffer containing the given `data`. pub fn with_data(data: impl Into<Vec<u8>>) -> Self { Self { data: data.into(), ops: vec![], } } pub fn data(&self) -> &[u8] { &self.data } fn push_op(&mut self, op: FormatOp) { self.ops.push((self.data.len(), op)); } pub fn replay(&self, formatter: &mut dyn Formatter) -> io::Result<()> { self.replay_with(formatter, |formatter, range| { formatter.write_all(&self.data[range]) }) } pub fn replay_with( &self, formatter: &mut dyn Formatter, mut write_data: impl FnMut(&mut dyn Formatter, Range<usize>) -> io::Result<()>, ) -> io::Result<()> { let mut last_pos = 0; let mut flush_data = |formatter: &mut dyn Formatter, pos| -> io::Result<()> { if last_pos != pos { write_data(formatter, last_pos..pos)?; last_pos = pos; } Ok(()) }; for (pos, op) in &self.ops { flush_data(formatter, *pos)?; match op { FormatOp::PushLabel(label) => formatter.push_label(label), FormatOp::PopLabel => formatter.pop_label(), FormatOp::RawEscapeSequence(raw_escape_sequence) => { formatter.raw()?.write_all(raw_escape_sequence)?; } } } flush_data(formatter, self.data.len()) } } impl Write for FormatRecorder { fn write(&mut self, data: &[u8]) -> io::Result<usize> { self.data.extend_from_slice(data); Ok(data.len()) } fn flush(&mut self) -> io::Result<()> { Ok(()) } } struct RawEscapeSequenceRecorder<'a>(&'a mut FormatRecorder); impl Write for RawEscapeSequenceRecorder<'_> { fn write(&mut self, data: &[u8]) -> io::Result<usize> { self.0.push_op(FormatOp::RawEscapeSequence(data.to_vec())); Ok(data.len()) } fn flush(&mut self) -> io::Result<()> { self.0.flush() } } impl Formatter for FormatRecorder { fn raw(&mut self) -> io::Result<Box<dyn Write + '_>> { Ok(Box::new(RawEscapeSequenceRecorder(self))) } fn push_label(&mut self, label: &str) { self.push_op(FormatOp::PushLabel(label.to_owned())); } fn pop_label(&mut self) { self.push_op(FormatOp::PopLabel); } } fn write_sanitized(output: &mut impl Write, buf: &[u8]) -> Result<(), Error> { if buf.contains(&b'\x1b') { let mut sanitized = Vec::with_capacity(buf.len()); for b in buf { if *b == b'\x1b' { sanitized.extend_from_slice("␛".as_bytes()); } else { sanitized.push(*b); } } output.write_all(&sanitized) } else { output.write_all(buf) } } #[cfg(test)] mod tests { use std::error::Error as _; use bstr::BString; use indexmap::IndexMap; use indoc::indoc; use jj_lib::config::ConfigLayer; use jj_lib::config::ConfigSource; use super::*; fn config_from_string(text: &str) -> StackedConfig { let mut config = StackedConfig::empty(); config.add_layer(ConfigLayer::parse(ConfigSource::User, text).unwrap()); config } /// Appends "[EOF]" marker to the output text. /// /// This is a workaround for https://github.com/mitsuhiko/insta/issues/384. fn to_snapshot_string(output: impl Into<Vec<u8>>) -> BString { let mut output = output.into(); output.extend_from_slice(b"[EOF]\n"); BString::new(output) } #[test] fn test_plaintext_formatter() { // Test that PlainTextFormatter ignores labels. let mut output: Vec<u8> = vec![]; let mut formatter = PlainTextFormatter::new(&mut output); formatter.push_label("warning"); write!(formatter, "hello").unwrap(); formatter.pop_label(); insta::assert_snapshot!(to_snapshot_string(output), @"hello[EOF]"); } #[test] fn test_plaintext_formatter_ansi_codes_in_text() { // Test that ANSI codes in the input text are NOT escaped. let mut output: Vec<u8> = vec![]; let mut formatter = PlainTextFormatter::new(&mut output); write!(formatter, "\x1b[1mactually bold\x1b[0m").unwrap(); insta::assert_snapshot!(to_snapshot_string(output), @"actually bold[EOF]"); } #[test] fn test_sanitizing_formatter_ansi_codes_in_text() { // Test that ANSI codes in the input text are escaped. let mut output: Vec<u8> = vec![]; let mut formatter = SanitizingFormatter::new(&mut output); write!(formatter, "\x1b[1mnot actually bold\x1b[0m").unwrap(); insta::assert_snapshot!(to_snapshot_string(output), @"␛[1mnot actually bold␛[0m[EOF]"); } #[test] fn test_color_formatter_color_codes() { // Test the color code for each color. // Use the color name as the label. let config = config_from_string(indoc! {" [colors] black = 'black' red = 'red' green = 'green' yellow = 'yellow' blue = 'blue' magenta = 'magenta' cyan = 'cyan' white = 'white' bright-black = 'bright black' bright-red = 'bright red' bright-green = 'bright green' bright-yellow = 'bright yellow' bright-blue = 'bright blue' bright-magenta = 'bright magenta' bright-cyan = 'bright cyan' bright-white = 'bright white' "}); let colors: IndexMap<String, String> = config.get("colors").unwrap(); let mut output: Vec<u8> = vec![]; let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); for (label, color) in &colors { formatter.push_label(label); write!(formatter, " {color} ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); } drop(formatter); insta::assert_snapshot!(to_snapshot_string(output), @r"  black   red   green   yellow   blue   magenta   cyan   white   bright black   bright red   bright green   bright yellow   bright blue   bright magenta   bright cyan   bright white  [EOF] "); } #[test] fn test_color_for_ansi256_index() { assert_eq!( color_for_ansi256_index("ansi-color-0"), Some(Color::AnsiValue(0)) ); assert_eq!( color_for_ansi256_index("ansi-color-10"), Some(Color::AnsiValue(10)) ); assert_eq!( color_for_ansi256_index("ansi-color-255"), Some(Color::AnsiValue(255)) ); assert_eq!(color_for_ansi256_index("ansi-color-256"), None); assert_eq!(color_for_ansi256_index("ansi-color-00"), None); assert_eq!(color_for_ansi256_index("ansi-color-010"), None); assert_eq!(color_for_ansi256_index("ansi-color-0255"), None); } #[test] fn test_color_formatter_ansi256() { let config = config_from_string( r#" [colors] purple-bg = { fg = "ansi-color-15", bg = "ansi-color-93" } gray = "ansi-color-244" "#, ); let mut output: Vec<u8> = vec![]; let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); formatter.push_label("purple-bg"); write!(formatter, " purple background ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); formatter.push_label("gray"); write!(formatter, " gray ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); drop(formatter); insta::assert_snapshot!(to_snapshot_string(output), @r"  purple background   gray  [EOF] "); } #[test] fn test_color_formatter_hex_colors() { // Test the color code for each color. let config = config_from_string(indoc! {" [colors] black = '#000000' white = '#ffffff' pastel-blue = '#AFE0D9' "}); let colors: IndexMap<String, String> = config.get("colors").unwrap(); let mut output: Vec<u8> = vec![]; let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); for label in colors.keys() { formatter.push_label(&label.replace(' ', "-")); write!(formatter, " {label} ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); } drop(formatter); insta::assert_snapshot!(to_snapshot_string(output), @r"  black   white   pastel-blue  [EOF] "); } #[test] fn test_color_formatter_single_label() { // Test that a single label can be colored and that the color is reset // afterwards. let config = config_from_string( r#" colors.inside = "green" "#, ); let mut output: Vec<u8> = vec![]; let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); write!(formatter, " before ").unwrap(); formatter.push_label("inside"); write!(formatter, " inside ").unwrap(); formatter.pop_label(); write!(formatter, " after ").unwrap(); drop(formatter); insta::assert_snapshot!( to_snapshot_string(output), @" before  inside  after [EOF]"); } #[test] fn test_color_formatter_attributes() { // Test that each attribute of the style can be set and that they can be // combined in a single rule or by using multiple rules. let config = config_from_string( r#" colors.red_fg = { fg = "red" } colors.blue_bg = { bg = "blue" } colors.bold_font = { bold = true } colors.dim_font = { dim = true } colors.italic_text = { italic = true } colors.underlined_text = { underline = true } colors.reversed_colors = { reverse = true } colors.multiple = { fg = "green", bg = "yellow", bold = true, italic = true, underline = true, reverse = true } "#, ); let mut output: Vec<u8> = vec![]; let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); formatter.push_label("red_fg"); write!(formatter, " fg only ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); formatter.push_label("blue_bg"); write!(formatter, " bg only ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); formatter.push_label("bold_font"); write!(formatter, " bold only ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap(); formatter.push_label("dim_font"); write!(formatter, " dim only ").unwrap(); formatter.pop_label(); writeln!(formatter).unwrap();
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/text_util.rs
cli/src/text_util.rs
// Copyright 2022-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::borrow::Cow; use std::cmp; use std::io; use bstr::ByteSlice as _; use unicode_width::UnicodeWidthChar as _; use unicode_width::UnicodeWidthStr as _; use crate::formatter::FormatRecorder; use crate::formatter::Formatter; pub fn complete_newline(s: impl Into<String>) -> String { let mut s = s.into(); if !s.is_empty() && !s.ends_with('\n') { s.push('\n'); } s } pub fn split_email(email: &str) -> (&str, Option<&str>) { if let Some((username, rest)) = email.split_once('@') { (username, Some(rest)) } else { (email, None) } } /// Shortens `text` to `max_width` by removing leading characters. `ellipsis` is /// added if the `text` gets truncated. /// /// The returned string (including `ellipsis`) never exceeds the `max_width`. pub fn elide_start<'a>( text: &'a str, ellipsis: &'a str, max_width: usize, ) -> (Cow<'a, str>, usize) { let (text_start, text_width) = truncate_start_pos(text, max_width); if text_start == 0 { return (Cow::Borrowed(text), text_width); } let (ellipsis_start, ellipsis_width) = truncate_start_pos(ellipsis, max_width); if ellipsis_start != 0 { let ellipsis = trim_start_zero_width_chars(&ellipsis[ellipsis_start..]); return (Cow::Borrowed(ellipsis), ellipsis_width); } let text = &text[text_start..]; let max_text_width = max_width - ellipsis_width; let (skip, skipped_width) = skip_start_pos(text, text_width.saturating_sub(max_text_width)); let text = trim_start_zero_width_chars(&text[skip..]); let concat_width = ellipsis_width + (text_width - skipped_width); assert!(concat_width <= max_width); (Cow::Owned([ellipsis, text].concat()), concat_width) } /// Shortens `text` to `max_width` by removing trailing characters. `ellipsis` /// is added if the `text` gets truncated. /// /// The returned string (including `ellipsis`) never exceeds the `max_width`. pub fn elide_end<'a>(text: &'a str, ellipsis: &'a str, max_width: usize) -> (Cow<'a, str>, usize) { let (text_end, text_width) = truncate_end_pos(text, max_width); if text_end == text.len() { return (Cow::Borrowed(text), text_width); } let (ellipsis_end, ellipsis_width) = truncate_end_pos(ellipsis, max_width); if ellipsis_end != ellipsis.len() { let ellipsis = &ellipsis[..ellipsis_end]; return (Cow::Borrowed(ellipsis), ellipsis_width); } let text = &text[..text_end]; let max_text_width = max_width - ellipsis_width; let (skip, skipped_width) = skip_end_pos(text, text_width.saturating_sub(max_text_width)); let text = &text[..skip]; let concat_width = (text_width - skipped_width) + ellipsis_width; assert!(concat_width <= max_width); (Cow::Owned([text, ellipsis].concat()), concat_width) } /// Shortens `text` to `max_width` by removing leading characters, returning /// `(start_index, width)`. /// /// The truncated string may have 0-width decomposed characters at start. fn truncate_start_pos(text: &str, max_width: usize) -> (usize, usize) { truncate_start_pos_with_indices( text.char_indices() .rev() .map(|(start, c)| (start + c.len_utf8(), c)), max_width, ) } fn truncate_start_pos_bytes(text: &[u8], max_width: usize) -> (usize, usize) { truncate_start_pos_with_indices( text.char_indices().rev().map(|(_, end, c)| (end, c)), max_width, ) } fn truncate_start_pos_with_indices( char_indices_rev: impl Iterator<Item = (usize, char)>, max_width: usize, ) -> (usize, usize) { let mut acc_width = 0; for (end, c) in char_indices_rev { let new_width = acc_width + c.width().unwrap_or(0); if new_width > max_width { return (end, acc_width); } acc_width = new_width; } (0, acc_width) } /// Shortens `text` to `max_width` by removing trailing characters, returning /// `(end_index, width)`. fn truncate_end_pos(text: &str, max_width: usize) -> (usize, usize) { truncate_end_pos_with_indices(text.char_indices(), text.len(), max_width) } fn truncate_end_pos_bytes(text: &[u8], max_width: usize) -> (usize, usize) { truncate_end_pos_with_indices( text.char_indices().map(|(start, _, c)| (start, c)), text.len(), max_width, ) } fn truncate_end_pos_with_indices( char_indices_fwd: impl Iterator<Item = (usize, char)>, text_len: usize, max_width: usize, ) -> (usize, usize) { let mut acc_width = 0; for (start, c) in char_indices_fwd { let new_width = acc_width + c.width().unwrap_or(0); if new_width > max_width { return (start, acc_width); } acc_width = new_width; } (text_len, acc_width) } /// Skips `width` leading characters, returning `(start_index, skipped_width)`. /// /// The `skipped_width` may exceed the given `width` if `width` is not at /// character boundary. /// /// The truncated string may have 0-width decomposed characters at start. fn skip_start_pos(text: &str, width: usize) -> (usize, usize) { skip_start_pos_with_indices(text.char_indices(), text.len(), width) } fn skip_start_pos_with_indices( char_indices_fwd: impl Iterator<Item = (usize, char)>, text_len: usize, width: usize, ) -> (usize, usize) { let mut acc_width = 0; for (start, c) in char_indices_fwd { if acc_width >= width { return (start, acc_width); } acc_width += c.width().unwrap_or(0); } (text_len, acc_width) } /// Skips `width` trailing characters, returning `(end_index, skipped_width)`. /// /// The `skipped_width` may exceed the given `width` if `width` is not at /// character boundary. fn skip_end_pos(text: &str, width: usize) -> (usize, usize) { skip_end_pos_with_indices( text.char_indices() .rev() .map(|(start, c)| (start + c.len_utf8(), c)), width, ) } fn skip_end_pos_with_indices( char_indices_rev: impl Iterator<Item = (usize, char)>, width: usize, ) -> (usize, usize) { let mut acc_width = 0; for (end, c) in char_indices_rev { if acc_width >= width { return (end, acc_width); } acc_width += c.width().unwrap_or(0); } (0, acc_width) } /// Removes leading 0-width characters. fn trim_start_zero_width_chars(text: &str) -> &str { text.trim_start_matches(|c: char| c.width().unwrap_or(0) == 0) } /// Returns bytes length of leading 0-width characters. fn count_start_zero_width_chars_bytes(text: &[u8]) -> usize { text.char_indices() .find(|(_, _, c)| c.width().unwrap_or(0) != 0) .map(|(start, _, _)| start) .unwrap_or(text.len()) } /// Writes text truncated to `max_width` by removing leading characters. Returns /// width of the truncated text, which may be shorter than `max_width`. /// /// The input `recorded_content` should be a single-line text. pub fn write_truncated_start( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, recorded_ellipsis: &FormatRecorder, max_width: usize, ) -> io::Result<usize> { let data = recorded_content.data(); let data_width = String::from_utf8_lossy(data).width(); let ellipsis_data = recorded_ellipsis.data(); let ellipsis_width = String::from_utf8_lossy(ellipsis_data).width(); let (start, mut truncated_width) = if data_width > max_width { truncate_start_pos_bytes(data, max_width.saturating_sub(ellipsis_width)) } else { (0, data_width) }; let mut replay_truncated = |recorded: &FormatRecorder, truncated_start: usize| { recorded.replay_with(formatter, |formatter, range| { let start = cmp::max(range.start, truncated_start); if start < range.end { formatter.write_all(&recorded.data()[start..range.end])?; } Ok(()) }) }; if data_width > max_width { // The ellipsis itself may be larger than max_width, so maybe truncate it too. let (start, ellipsis_width) = truncate_start_pos_bytes(ellipsis_data, max_width); let truncated_start = start + count_start_zero_width_chars_bytes(&ellipsis_data[start..]); truncated_width += ellipsis_width; replay_truncated(recorded_ellipsis, truncated_start)?; } let truncated_start = start + count_start_zero_width_chars_bytes(&data[start..]); replay_truncated(recorded_content, truncated_start)?; Ok(truncated_width) } /// Writes text truncated to `max_width` by removing trailing characters. /// Returns width of the truncated text, which may be shorter than `max_width`. /// /// The input `recorded_content` should be a single-line text. pub fn write_truncated_end( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, recorded_ellipsis: &FormatRecorder, max_width: usize, ) -> io::Result<usize> { let data = recorded_content.data(); let data_width = String::from_utf8_lossy(data).width(); let ellipsis_data = recorded_ellipsis.data(); let ellipsis_width = String::from_utf8_lossy(ellipsis_data).width(); let (truncated_end, mut truncated_width) = if data_width > max_width { truncate_end_pos_bytes(data, max_width.saturating_sub(ellipsis_width)) } else { (data.len(), data_width) }; let mut replay_truncated = |recorded: &FormatRecorder, truncated_end: usize| { recorded.replay_with(formatter, |formatter, range| { let end = cmp::min(range.end, truncated_end); if range.start < end { formatter.write_all(&recorded.data()[range.start..end])?; } Ok(()) }) }; replay_truncated(recorded_content, truncated_end)?; if data_width > max_width { // The ellipsis itself may be larger than max_width, so maybe truncate it too. let (truncated_end, ellipsis_width) = truncate_end_pos_bytes(ellipsis_data, max_width); truncated_width += ellipsis_width; replay_truncated(recorded_ellipsis, truncated_end)?; } Ok(truncated_width) } /// Writes text padded to `min_width` by adding leading fill characters. /// /// The input `recorded_content` should be a single-line text. The /// `recorded_fill_char` should be bytes of 1-width character. pub fn write_padded_start( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, recorded_fill_char: &FormatRecorder, min_width: usize, ) -> io::Result<()> { // We don't care about the width of non-UTF-8 bytes, but should not panic. let width = String::from_utf8_lossy(recorded_content.data()).width(); let fill_width = min_width.saturating_sub(width); write_padding(formatter, recorded_fill_char, fill_width)?; recorded_content.replay(formatter)?; Ok(()) } /// Writes text padded to `min_width` by adding leading fill characters. /// /// The input `recorded_content` should be a single-line text. The /// `recorded_fill_char` should be bytes of 1-width character. pub fn write_padded_end( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, recorded_fill_char: &FormatRecorder, min_width: usize, ) -> io::Result<()> { // We don't care about the width of non-UTF-8 bytes, but should not panic. let width = String::from_utf8_lossy(recorded_content.data()).width(); let fill_width = min_width.saturating_sub(width); recorded_content.replay(formatter)?; write_padding(formatter, recorded_fill_char, fill_width)?; Ok(()) } /// Writes text padded to `min_width` by adding leading and trailing fill /// characters. /// /// The input `recorded_content` should be a single-line text. The /// `recorded_fill_char` should be bytes of a 1-width character. pub fn write_padded_centered( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, recorded_fill_char: &FormatRecorder, min_width: usize, ) -> io::Result<()> { // We don't care about the width of non-UTF-8 bytes, but should not panic. let width = String::from_utf8_lossy(recorded_content.data()).width(); let fill_width = min_width.saturating_sub(width); let fill_left = fill_width / 2; let fill_right = fill_width - fill_left; write_padding(formatter, recorded_fill_char, fill_left)?; recorded_content.replay(formatter)?; write_padding(formatter, recorded_fill_char, fill_right)?; Ok(()) } fn write_padding( formatter: &mut dyn Formatter, recorded_fill_char: &FormatRecorder, fill_width: usize, ) -> io::Result<()> { if fill_width == 0 { return Ok(()); } let data = recorded_fill_char.data(); recorded_fill_char.replay_with(formatter, |formatter, range| { // Don't emit labels repeatedly, just repeat content. Suppose fill char // is a single character, the byte sequence shouldn't be broken up to // multiple labeled regions. for _ in 0..fill_width { formatter.write_all(&data[range.clone()])?; } Ok(()) }) } /// Indents each line by the given prefix preserving labels. pub fn write_indented( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, mut write_prefix: impl FnMut(&mut dyn Formatter) -> io::Result<()>, ) -> io::Result<()> { let data = recorded_content.data(); let mut new_line = true; recorded_content.replay_with(formatter, |formatter, range| { for line in data[range].split_inclusive(|&c| c == b'\n') { if new_line && line != b"\n" { // Prefix inherits the current labels. This is implementation detail // and may be fixed later. write_prefix(formatter)?; } formatter.write_all(line)?; new_line = line.ends_with(b"\n"); } Ok(()) }) } /// Word with trailing whitespace. #[derive(Clone, Copy, Debug, Eq, PartialEq)] struct ByteFragment<'a> { word: &'a [u8], whitespace_len: usize, word_width: usize, } impl<'a> ByteFragment<'a> { fn new(word: &'a [u8], whitespace_len: usize) -> Self { // We don't care about the width of non-UTF-8 bytes, but should not panic. let word_width = textwrap::core::display_width(&String::from_utf8_lossy(word)); Self { word, whitespace_len, word_width, } } fn offset_in(&self, text: &[u8]) -> usize { byte_offset_from(text, self.word) } } impl textwrap::core::Fragment for ByteFragment<'_> { fn width(&self) -> f64 { self.word_width as f64 } fn whitespace_width(&self) -> f64 { self.whitespace_len as f64 } fn penalty_width(&self) -> f64 { 0.0 } } fn byte_offset_from(outer: &[u8], inner: &[u8]) -> usize { let outer_start = outer.as_ptr() as usize; let inner_start = inner.as_ptr() as usize; assert!(outer_start <= inner_start); assert!(inner_start + inner.len() <= outer_start + outer.len()); inner_start - outer_start } fn split_byte_line_to_words(line: &[u8]) -> Vec<ByteFragment<'_>> { let mut words = Vec::new(); let mut tail = line; while let Some(word_end) = tail.iter().position(|&c| c == b' ') { let word = &tail[..word_end]; let ws_end = tail[word_end + 1..] .iter() .position(|&c| c != b' ') .map(|p| p + word_end + 1) .unwrap_or(tail.len()); words.push(ByteFragment::new(word, ws_end - word_end)); tail = &tail[ws_end..]; } if !tail.is_empty() { words.push(ByteFragment::new(tail, 0)); } words } /// Wraps lines at the given width, returns a vector of lines (excluding "\n".) /// /// Existing newline characters will never be removed. For `str` content, you /// can use `textwrap::refill()` to refill a pre-formatted text. /// /// Each line is a sub-slice of the given text, even if the line is empty. /// /// The wrapping logic is more restricted than the default of the `textwrap`. /// Notably, this doesn't support hyphenation nor unicode line break. The /// display width is calculated based on unicode property in the same manner /// as `textwrap::wrap()`. pub fn wrap_bytes(text: &[u8], width: usize) -> Vec<&[u8]> { let mut split_lines = Vec::new(); for line in text.split(|&c| c == b'\n') { let words = split_byte_line_to_words(line); let split = textwrap::wrap_algorithms::wrap_first_fit(&words, &[width as f64]); split_lines.extend(split.iter().map(|words| match words { [] => &line[..0], // Empty line [a] => a.word, [a, .., b] => { let start = a.offset_in(line); let end = b.offset_in(line) + b.word.len(); &line[start..end] } })); } split_lines } /// Wraps lines at the given width preserving labels. /// /// `textwrap::wrap()` can also process text containing ANSI escape sequences. /// The main difference is that this function will reset the style for each line /// and recreate it on the following line if the output `formatter` is /// a `ColorFormatter`. pub fn write_wrapped( formatter: &mut dyn Formatter, recorded_content: &FormatRecorder, width: usize, ) -> io::Result<()> { let data = recorded_content.data(); let mut line_ranges = wrap_bytes(data, width) .into_iter() .map(|line| { let start = byte_offset_from(data, line); start..start + line.len() }) .peekable(); // The recorded data ranges are contiguous, and the line ranges are increasing // sequence (with some holes.) Both ranges should start from data[0]. recorded_content.replay_with(formatter, |formatter, data_range| { while let Some(line_range) = line_ranges.peek() { let start = cmp::max(data_range.start, line_range.start); let end = cmp::min(data_range.end, line_range.end); if start < end { formatter.write_all(&data[start..end])?; } if data_range.end <= line_range.end { break; // No more lines in this data range } line_ranges.next().unwrap(); if line_ranges.peek().is_some() { writeln!(formatter)?; // Not the last line } } Ok(()) }) } pub fn parse_author(author: &str) -> Result<(String, String), &'static str> { let re = regex::Regex::new(r"(?<name>.*?)\s*<(?<email>.+)>$").unwrap(); let captures = re.captures(author).ok_or("Invalid author string")?; Ok((captures["name"].to_string(), captures["email"].to_string())) } #[cfg(test)] mod tests { use std::io::Write as _; use indoc::indoc; use jj_lib::config::ConfigLayer; use jj_lib::config::ConfigSource; use jj_lib::config::StackedConfig; use super::*; use crate::formatter::ColorFormatter; use crate::formatter::PlainTextFormatter; fn format_colored(write: impl FnOnce(&mut dyn Formatter) -> io::Result<()>) -> String { let mut config = StackedConfig::empty(); config.add_layer( ConfigLayer::parse( ConfigSource::Default, indoc! {" colors.cyan = 'cyan' colors.red = 'red' "}, ) .unwrap(), ); let mut output = Vec::new(); let mut formatter = ColorFormatter::for_config(&mut output, &config, false).unwrap(); write(&mut formatter).unwrap(); drop(formatter); String::from_utf8(output).unwrap() } fn format_plain_text(write: impl FnOnce(&mut dyn Formatter) -> io::Result<()>) -> String { let mut output = Vec::new(); let mut formatter = PlainTextFormatter::new(&mut output); write(&mut formatter).unwrap(); String::from_utf8(output).unwrap() } #[test] fn test_elide_start() { // Empty string assert_eq!(elide_start("", "", 1), ("".into(), 0)); // Basic truncation assert_eq!(elide_start("abcdef", "", 6), ("abcdef".into(), 6)); assert_eq!(elide_start("abcdef", "", 5), ("bcdef".into(), 5)); assert_eq!(elide_start("abcdef", "", 1), ("f".into(), 1)); assert_eq!(elide_start("abcdef", "", 0), ("".into(), 0)); assert_eq!(elide_start("abcdef", "-=~", 6), ("abcdef".into(), 6)); assert_eq!(elide_start("abcdef", "-=~", 5), ("-=~ef".into(), 5)); assert_eq!(elide_start("abcdef", "-=~", 4), ("-=~f".into(), 4)); assert_eq!(elide_start("abcdef", "-=~", 3), ("-=~".into(), 3)); assert_eq!(elide_start("abcdef", "-=~", 2), ("=~".into(), 2)); assert_eq!(elide_start("abcdef", "-=~", 1), ("~".into(), 1)); assert_eq!(elide_start("abcdef", "-=~", 0), ("".into(), 0)); // East Asian characters (char.width() == 2) assert_eq!(elide_start("一二三", "", 6), ("一二三".into(), 6)); assert_eq!(elide_start("一二三", "", 5), ("二三".into(), 4)); assert_eq!(elide_start("一二三", "", 4), ("二三".into(), 4)); assert_eq!(elide_start("一二三", "", 1), ("".into(), 0)); assert_eq!(elide_start("一二三", "-=~", 6), ("一二三".into(), 6)); assert_eq!(elide_start("一二三", "-=~", 5), ("-=~三".into(), 5)); assert_eq!(elide_start("一二三", "-=~", 4), ("-=~".into(), 3)); assert_eq!(elide_start("一二三", "略", 6), ("一二三".into(), 6)); assert_eq!(elide_start("一二三", "略", 5), ("略三".into(), 4)); assert_eq!(elide_start("一二三", "略", 4), ("略三".into(), 4)); assert_eq!(elide_start("一二三", "略", 2), ("略".into(), 2)); assert_eq!(elide_start("一二三", "略", 1), ("".into(), 0)); assert_eq!(elide_start("一二三", ".", 5), (".二三".into(), 5)); assert_eq!(elide_start("一二三", ".", 4), (".三".into(), 3)); assert_eq!(elide_start("一二三", "略.", 5), ("略.三".into(), 5)); assert_eq!(elide_start("一二三", "略.", 4), ("略.".into(), 3)); // Multi-byte character at boundary assert_eq!(elide_start("àbcdè", "", 5), ("àbcdè".into(), 5)); assert_eq!(elide_start("àbcdè", "", 4), ("bcdè".into(), 4)); assert_eq!(elide_start("àbcdè", "", 1), ("è".into(), 1)); assert_eq!(elide_start("àbcdè", "", 0), ("".into(), 0)); assert_eq!(elide_start("àbcdè", "ÀÇÈ", 4), ("ÀÇÈè".into(), 4)); assert_eq!(elide_start("àbcdè", "ÀÇÈ", 3), ("ÀÇÈ".into(), 3)); assert_eq!(elide_start("àbcdè", "ÀÇÈ", 2), ("ÇÈ".into(), 2)); // Decomposed character at boundary assert_eq!( elide_start("a\u{300}bcde\u{300}", "", 5), ("a\u{300}bcde\u{300}".into(), 5) ); assert_eq!( elide_start("a\u{300}bcde\u{300}", "", 4), ("bcde\u{300}".into(), 4) ); assert_eq!( elide_start("a\u{300}bcde\u{300}", "", 1), ("e\u{300}".into(), 1) ); assert_eq!(elide_start("a\u{300}bcde\u{300}", "", 0), ("".into(), 0)); assert_eq!( elide_start("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 4), ("A\u{300}CE\u{300}e\u{300}".into(), 4) ); assert_eq!( elide_start("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 3), ("A\u{300}CE\u{300}".into(), 3) ); assert_eq!( elide_start("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 2), ("CE\u{300}".into(), 2) ); } #[test] fn test_elide_end() { // Empty string assert_eq!(elide_end("", "", 1), ("".into(), 0)); // Basic truncation assert_eq!(elide_end("abcdef", "", 6), ("abcdef".into(), 6)); assert_eq!(elide_end("abcdef", "", 5), ("abcde".into(), 5)); assert_eq!(elide_end("abcdef", "", 1), ("a".into(), 1)); assert_eq!(elide_end("abcdef", "", 0), ("".into(), 0)); assert_eq!(elide_end("abcdef", "-=~", 6), ("abcdef".into(), 6)); assert_eq!(elide_end("abcdef", "-=~", 5), ("ab-=~".into(), 5)); assert_eq!(elide_end("abcdef", "-=~", 4), ("a-=~".into(), 4)); assert_eq!(elide_end("abcdef", "-=~", 3), ("-=~".into(), 3)); assert_eq!(elide_end("abcdef", "-=~", 2), ("-=".into(), 2)); assert_eq!(elide_end("abcdef", "-=~", 1), ("-".into(), 1)); assert_eq!(elide_end("abcdef", "-=~", 0), ("".into(), 0)); // East Asian characters (char.width() == 2) assert_eq!(elide_end("一二三", "", 6), ("一二三".into(), 6)); assert_eq!(elide_end("一二三", "", 5), ("一二".into(), 4)); assert_eq!(elide_end("一二三", "", 4), ("一二".into(), 4)); assert_eq!(elide_end("一二三", "", 1), ("".into(), 0)); assert_eq!(elide_end("一二三", "-=~", 6), ("一二三".into(), 6)); assert_eq!(elide_end("一二三", "-=~", 5), ("一-=~".into(), 5)); assert_eq!(elide_end("一二三", "-=~", 4), ("-=~".into(), 3)); assert_eq!(elide_end("一二三", "略", 6), ("一二三".into(), 6)); assert_eq!(elide_end("一二三", "略", 5), ("一略".into(), 4)); assert_eq!(elide_end("一二三", "略", 4), ("一略".into(), 4)); assert_eq!(elide_end("一二三", "略", 2), ("略".into(), 2)); assert_eq!(elide_end("一二三", "略", 1), ("".into(), 0)); assert_eq!(elide_end("一二三", ".", 5), ("一二.".into(), 5)); assert_eq!(elide_end("一二三", ".", 4), ("一.".into(), 3)); assert_eq!(elide_end("一二三", "略.", 5), ("一略.".into(), 5)); assert_eq!(elide_end("一二三", "略.", 4), ("略.".into(), 3)); // Multi-byte character at boundary assert_eq!(elide_end("àbcdè", "", 5), ("àbcdè".into(), 5)); assert_eq!(elide_end("àbcdè", "", 4), ("àbcd".into(), 4)); assert_eq!(elide_end("àbcdè", "", 1), ("à".into(), 1)); assert_eq!(elide_end("àbcdè", "", 0), ("".into(), 0)); assert_eq!(elide_end("àbcdè", "ÀÇÈ", 4), ("àÀÇÈ".into(), 4)); assert_eq!(elide_end("àbcdè", "ÀÇÈ", 3), ("ÀÇÈ".into(), 3)); assert_eq!(elide_end("àbcdè", "ÀÇÈ", 2), ("ÀÇ".into(), 2)); // Decomposed character at boundary assert_eq!( elide_end("a\u{300}bcde\u{300}", "", 5), ("a\u{300}bcde\u{300}".into(), 5) ); assert_eq!( elide_end("a\u{300}bcde\u{300}", "", 4), ("a\u{300}bcd".into(), 4) ); assert_eq!( elide_end("a\u{300}bcde\u{300}", "", 1), ("a\u{300}".into(), 1) ); assert_eq!(elide_end("a\u{300}bcde\u{300}", "", 0), ("".into(), 0)); assert_eq!( elide_end("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 4), ("a\u{300}A\u{300}CE\u{300}".into(), 4) ); assert_eq!( elide_end("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 3), ("A\u{300}CE\u{300}".into(), 3) ); assert_eq!( elide_end("a\u{300}bcde\u{300}", "A\u{300}CE\u{300}", 2), ("A\u{300}C".into(), 2) ); } #[test] fn test_write_truncated_labeled() { let ellipsis_recorder = FormatRecorder::new(); let mut recorder = FormatRecorder::new(); for (label, word) in [("red", "foo"), ("cyan", "bar")] { recorder.push_label(label); write!(recorder, "{word}").unwrap(); recorder.pop_label(); } // Truncate start insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 6).map(|_| ()) }), @"foobar" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 5).map(|_| ()) }), @"oobar" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 3).map(|_| ()) }), @"bar" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 2).map(|_| ()) }), @"ar" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 0).map(|_| ()) }), @"" ); // Truncate end insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 6).map(|_| ()) }), @"foobar" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 5).map(|_| ()) }), @"fooba" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 3).map(|_| ()) }), @"foo" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 2).map(|_| ()) }), @"fo" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 0).map(|_| ()) }), @"" ); } #[test] fn test_write_truncated_non_ascii_chars() { let ellipsis_recorder = FormatRecorder::new(); let mut recorder = FormatRecorder::new(); write!(recorder, "a\u{300}bc\u{300}一二三").unwrap(); // Truncate start insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 1).map(|_| ()) }), @"" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 2).map(|_| ()) }), @"三" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 3).map(|_| ()) }), @"三" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 6).map(|_| ()) }), @"一二三" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 7).map(|_| ()) }), @"c̀一二三" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 9).map(|_| ()) }), @"àbc̀一二三" ); insta::assert_snapshot!( format_colored(|formatter| { write_truncated_start(formatter, &recorder, &ellipsis_recorder, 10).map(|_| ()) }), @"àbc̀一二三" ); // Truncate end insta::assert_snapshot!( format_colored(|formatter| { write_truncated_end(formatter, &recorder, &ellipsis_recorder, 1).map(|_| ()) }), @"à" ); insta::assert_snapshot!( format_colored(|formatter| {
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
true
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/interdiff.rs
cli/src/commands/interdiff.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::slice; use clap::ArgGroup; use clap_complete::ArgValueCompleter; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::ui::Ui; /// Show differences between the diffs of two revisions /// /// This is like running `jj diff -r` on each change, then comparing those /// results. It answers: "How do the modifications introduced by revision A /// differ from the modifications introduced by revision B?" /// /// For example, if two changes both add a feature but implement it /// differently, `jj interdiff --from @- --to other` shows what one /// implementation adds or removes that the other doesn't. /// /// A common use of this command is to compare how a change has changed /// since the last push to a remote: /// /// ```sh /// $ jj interdiff --from push-xyz@origin --to push-xyz /// ``` /// /// This command is different from `jj diff --from A --to B`, which compares /// file contents directly. `interdiff` compares what the changes do in terms of /// their patches, rather than their file contents. This makes a difference when /// the two revisions have different parents: `jj diff --from A --to B` will /// include the changes between their parents while `jj interdiff --from A --to /// B` will not. /// /// Technically, this works by rebasing `--from` onto `--to`'s parents and /// comparing the result to `--to`. /// /// To see the changes throughout the whole evolution of a change instead of /// between just two revisions, use `jj evolog -p instead`. #[derive(clap::Args, Clone, Debug)] #[command(group(ArgGroup::new("to_diff").args(&["from", "to"]).multiple(true).required(true)))] #[command(mut_arg("ignore_all_space", |a| a.short('w')))] #[command(mut_arg("ignore_space_change", |a| a.short('b')))] pub(crate) struct InterdiffArgs { /// The first revision to compare (default: @) #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] from: Option<RevisionArg>, /// The second revision to compare (default: @) #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] to: Option<RevisionArg>, /// Restrict the diff to these paths #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::interdiff_files))] paths: Vec<String>, #[command(flatten)] format: DiffFormatArgs, } #[instrument(skip_all)] pub(crate) fn cmd_interdiff( ui: &mut Ui, command: &CommandHelper, args: &InterdiffArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let from = workspace_command.resolve_single_rev(ui, args.from.as_ref().unwrap_or(&RevisionArg::AT))?; let to = workspace_command.resolve_single_rev(ui, args.to.as_ref().unwrap_or(&RevisionArg::AT))?; let repo = workspace_command.repo(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); print_unmatched_explicit_paths( ui, &workspace_command, &fileset_expression, // We check the parent commits to account for deleted files. [ &from.parent_tree(repo.as_ref())?, &from.tree(), &to.parent_tree(repo.as_ref())?, &to.tree(), ], )?; let diff_renderer = workspace_command.diff_renderer_for(&args.format)?; ui.request_pager(); diff_renderer .show_inter_diff( ui, ui.stdout_formatter().as_mut(), slice::from_ref(&from), &to, matcher.as_ref(), ui.term_width(), ) .block_on()?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/evolog.rs
cli/src/commands/evolog.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::commit::Commit; use jj_lib::evolution::CommitEvolutionEntry; use jj_lib::evolution::walk_predecessors; use jj_lib::graph::GraphEdge; use jj_lib::graph::TopoGroupedGraphIterator; use jj_lib::graph::reverse_graph; use jj_lib::matchers::EverythingMatcher; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::LogContentFormat; use crate::cli_util::RevisionArg; use crate::cli_util::format_template; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::graphlog::GraphStyle; use crate::graphlog::get_graphlog; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Show how a change has evolved over time /// /// Lists the previous commits which a change has pointed to. The current commit /// of a change evolves when the change is updated, rebased, etc. #[derive(clap::Args, Clone, Debug)] pub(crate) struct EvologArgs { /// Follow changes from these revisions #[arg( long, short, default_value = "@", value_name = "REVSETS", alias = "revision" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions: Vec<RevisionArg>, /// Limit number of revisions to show /// /// Applied after revisions are reordered topologically, but before being /// reversed. #[arg(long, short = 'n')] limit: Option<usize>, /// Show revisions in the opposite order (older revisions first) #[arg(long)] reversed: bool, /// Don't show the graph, show a flat list of revisions #[arg(long, short = 'G')] no_graph: bool, /// Render each revision using the given template /// /// All 0-argument methods of the [`CommitEvolutionEntry` type] are /// available as keywords in the template expression. See [`jj help -k /// templates`] for more information. /// /// If not specified, this defaults to the `templates.evolog` setting. /// /// [`CommitEvolutionEntry` type]: /// https://docs.jj-vcs.dev/latest/templates/#commitevolutionentry-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Show patch compared to the previous version of this change /// /// If the previous version has different parents, it will be temporarily /// rebased to the parents of the new version, so the diff is not /// contaminated by unrelated changes. #[arg(long, short = 'p')] patch: bool, #[command(flatten)] diff_format: DiffFormatArgs, } #[instrument(skip_all)] pub(crate) fn cmd_evolog( ui: &mut Ui, command: &CommandHelper, args: &EvologArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let start_commit_ids: Vec<_> = workspace_command .parse_union_revsets(ui, &args.revisions)? .evaluate_to_commit_ids()? .try_collect()?; let diff_renderer = workspace_command.diff_renderer_for_log(&args.diff_format, args.patch)?; let graph_style = GraphStyle::from_settings(workspace_command.settings())?; let with_content_format = LogContentFormat::new(ui, workspace_command.settings())?; let template: TemplateRenderer<CommitEvolutionEntry>; let node_template: TemplateRenderer<Option<Commit>>; { let language = workspace_command.commit_template_language(); let template_string = match &args.template { Some(value) => value.clone(), None => workspace_command.settings().get("templates.evolog")?, }; template = workspace_command .parse_template(ui, &language, &template_string)? .labeled(["evolog"]); // TODO: add label for the context type? node_template = workspace_command .parse_template( ui, &language, // TODO: should we add templates.evolog_node? &workspace_command .settings() .get_string("templates.log_node")?, )? .labeled(["evolog", "commit", "node"]); } ui.request_pager(); let mut formatter = ui.stdout_formatter(); let formatter = formatter.as_mut(); let repo = workspace_command.repo(); let evolution_entries = walk_predecessors(repo, &start_commit_ids); if !args.no_graph { let mut raw_output = formatter.raw()?; let mut graph = get_graphlog(graph_style, raw_output.as_mut()); let evolution_nodes = evolution_entries.map_ok(|entry| { let ids = entry.predecessor_ids(); let edges = ids.iter().cloned().map(GraphEdge::direct).collect_vec(); (entry, edges) }); // TopoGroupedGraphIterator also helps emit squashed commits in reverse // chronological order. Predecessors don't need to follow any defined // order. However in practice, if there are multiple predecessors, then // usually the first predecessor is the previous version of the same // change, and the other predecessors are commits that were squashed // into it. If multiple commits are squashed at once, then they are // usually recorded in chronological order. We want to show squashed // commits in reverse chronological order, and we also want to show // squashed commits before the squash destination (since the // destination's subgraph may contain earlier squashed commits as well. let evolution_nodes = TopoGroupedGraphIterator::new(evolution_nodes, |node| node.commit.id()); let evolution_nodes = evolution_nodes.take(args.limit.unwrap_or(usize::MAX)); let evolution_nodes: Box<dyn Iterator<Item = _>> = if args.reversed { let nodes = reverse_graph(evolution_nodes, |entry| entry.commit.id())?; Box::new(nodes.into_iter().map(Ok)) } else { Box::new(evolution_nodes) }; for node in evolution_nodes { let (entry, edges) = node?; let mut buffer = vec![]; let within_graph = with_content_format.sub_width(graph.width(entry.commit.id(), &edges)); within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| { template.format(&entry, formatter) })?; if let Some(renderer) = &diff_renderer { let predecessors: Vec<_> = entry.predecessors().try_collect()?; let mut formatter = ui.new_formatter(&mut buffer); renderer .show_inter_diff( ui, formatter.as_mut(), &predecessors, &entry.commit, &EverythingMatcher, within_graph.width(), ) .block_on()?; } let node_symbol = format_template(ui, &Some(entry.commit.clone()), &node_template); graph.add_node( entry.commit.id(), &edges, &node_symbol, &String::from_utf8_lossy(&buffer), )?; } } else { let evolution_entries = evolution_entries.take(args.limit.unwrap_or(usize::MAX)); let evolution_entries: Box<dyn Iterator<Item = _>> = if args.reversed { let entries: Vec<_> = evolution_entries.try_collect()?; Box::new(entries.into_iter().rev().map(Ok)) } else { Box::new(evolution_entries) }; for entry in evolution_entries { let entry = entry?; with_content_format.write(formatter, |formatter| template.format(&entry, formatter))?; if let Some(renderer) = &diff_renderer { let predecessors: Vec<_> = entry.predecessors().try_collect()?; let width = ui.term_width(); renderer .show_inter_diff( ui, formatter, &predecessors, &entry.commit, &EverythingMatcher, width, ) .block_on()?; } } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/abandon.rs
cli/src/commands/abandon.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::io::Write as _; use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::refs::diff_named_ref_targets; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetExpression; use jj_lib::rewrite::RewriteRefsOptions; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; #[cfg(feature = "git")] use crate::cli_util::has_tracked_remote_bookmarks; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Abandon a revision /// /// Abandon a revision, rebasing descendants onto its parent(s). The behavior is /// similar to `jj restore --changes-in`; the difference is that `jj abandon` /// gives you a new change, while `jj restore` updates the existing change. /// /// If a working-copy commit gets abandoned, it will be given a new, empty /// commit. This is true in general; it is not specific to this command. #[derive(clap::Args, Clone, Debug)] pub(crate) struct AbandonArgs { /// The revision(s) to abandon (default: @) [aliases: -r] #[arg(value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_pos: Vec<RevisionArg>, #[arg(short = 'r', hide = true, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_opt: Vec<RevisionArg>, /// Do not delete bookmarks pointing to the revisions to abandon /// /// Bookmarks will be moved to the parent revisions instead. #[arg(long)] retain_bookmarks: bool, /// Do not modify the content of the children of the abandoned commits #[arg(long)] restore_descendants: bool, } #[instrument(skip_all)] pub(crate) fn cmd_abandon( ui: &mut Ui, command: &CommandHelper, args: &AbandonArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let to_abandon = { let target_expr = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() { workspace_command .parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())? } else { workspace_command.parse_revset(ui, &RevisionArg::AT)? } .resolve()?; let visible_expr = target_expr.intersection(&RevsetExpression::visible_heads().ancestors()); workspace_command.check_rewritable_expr(&visible_expr)?; let visible: IndexSet<_> = visible_expr .evaluate(workspace_command.repo().as_ref())? .iter() .try_collect()?; let targets: Vec<_> = target_expr .evaluate(workspace_command.repo().as_ref())? .iter() .try_collect()?; if visible.len() < targets.len() { writeln!( ui.status(), "Skipping {n} revisions that are already hidden.", n = targets.len() - visible.len() )?; } visible }; if to_abandon.is_empty() { writeln!(ui.status(), "No revisions to abandon.")?; return Ok(()); } let mut tx = workspace_command.start_transaction(); let options = RewriteRefsOptions { delete_abandoned_bookmarks: !args.retain_bookmarks, }; let mut num_rebased = 0; tx.repo_mut().transform_descendants_with_options( to_abandon.iter().cloned().collect(), &HashMap::new(), &options, async |rewriter| { if to_abandon.contains(rewriter.old_commit().id()) { rewriter.abandon(); } else if args.restore_descendants { rewriter.reparent().write()?; num_rebased += 1; } else { rewriter.rebase().await?.write()?; num_rebased += 1; } Ok(()) }, )?; let deleted_bookmarks = diff_named_ref_targets( tx.base_repo().view().local_bookmarks(), tx.repo().view().local_bookmarks(), ) .filter(|(_, (_old, new))| new.is_absent()) .map(|(name, _)| name.to_owned()) .collect_vec(); if let Some(mut formatter) = ui.status_formatter() { writeln!(formatter, "Abandoned {} commits:", to_abandon.len())?; let abandoned_commits: Vec<_> = to_abandon .iter() .map(|id| tx.base_repo().store().get_commit(id)) .try_collect()?; print_updated_commits( formatter.as_mut(), &tx.base_workspace_helper().commit_summary_template(), &abandoned_commits, )?; if !deleted_bookmarks.is_empty() { writeln!( formatter, "Deleted bookmarks: {}", deleted_bookmarks.iter().map(|n| n.as_symbol()).join(", ") )?; } if num_rebased > 0 { if args.restore_descendants { writeln!( formatter, "Rebased {num_rebased} descendant commits (while preserving their content) \ onto parents of abandoned commits", )?; } else { writeln!( formatter, "Rebased {num_rebased} descendant commits onto parents of abandoned commits", )?; } } } let transaction_description = if to_abandon.len() == 1 { format!("abandon commit {}", to_abandon[0]) } else { format!( "abandon commit {} and {} more", to_abandon[0], to_abandon.len() - 1 ) }; tx.finish(ui, transaction_description)?; #[cfg(feature = "git")] if jj_lib::git::get_git_backend(workspace_command.repo().store()).is_ok() { let repo = workspace_command.repo().as_ref(); if deleted_bookmarks .iter() .any(|name| has_tracked_remote_bookmarks(repo, name)) { writeln!( ui.hint_default(), "Deleted bookmarks can be pushed by name or all at once with `jj git push \ --deleted`." )?; } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/squash.rs
cli/src/commands/squash.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::iter::once; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use indoc::formatdoc; use itertools::Itertools as _; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::matchers::Matcher; use jj_lib::merge::Diff; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::Repo as _; use jj_lib::rewrite; use jj_lib::rewrite::CommitWithSelection; use jj_lib::rewrite::merge_commit_trees; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::DiffSelector; use crate::cli_util::RevisionArg; use crate::cli_util::WorkspaceCommandTransaction; use crate::cli_util::compute_commit_location; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::command_error::user_error_with_hint; use crate::complete; use crate::description_util::add_trailers; use crate::description_util::combine_messages_for_editing; use crate::description_util::description_template; use crate::description_util::edit_description; use crate::description_util::join_message_paragraphs; use crate::description_util::try_combine_messages; use crate::ui::Ui; /// Move changes from a revision into another revision /// /// Without any options, moves the changes from the working-copy revision to the /// parent revision. /// /// With the `-r` option, moves the changes from the specified revision to the /// parent revision. Fails if there are several parent revisions (i.e., the /// given revision is a merge). /// /// With the `--from` and/or `--into` options, moves changes from/to the given /// revisions. If either is left out, it defaults to the working-copy commit. /// For example, `jj squash --into @--` moves changes from the working-copy /// commit to the grandparent. /// /// If, after moving changes out, the source revision is empty compared to its /// parent(s), and `--keep-emptied` is not set, it will be abandoned. Without /// `--interactive` or paths, the source revision will always be empty. /// /// If the source was abandoned and both the source and destination had a /// non-empty description, you will be asked for the combined description. If /// either was empty, then the other one will be used. /// /// If a working-copy commit gets abandoned, it will be given a new, empty /// commit. This is true in general; it is not specific to this command. /// /// The name "squash" comes from the idea of combining (squashing) the changes /// from multiple revisions together. /// /// EXPERIMENTAL FEATURES /// /// An alternative squashing UI is available via the `-o`, `-A`, and `-B` /// options. Using any of these options creates a new commit. They can be used /// together with one or more `--from` options (if no `--from` is specified, /// `--from @` is assumed). #[derive(clap::Args, Clone, Debug)] pub(crate) struct SquashArgs { /// Revision to squash into its parent (default: @). Incompatible with the /// experimental `-o`/`-A`/`-B` options. #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision: Option<RevisionArg>, /// Revision(s) to squash from (default: @) #[arg(long, short, conflicts_with = "revision", value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] from: Vec<RevisionArg>, /// Revision to squash into (default: @) #[arg( long, short = 't', conflicts_with = "revision", visible_alias = "to", value_name = "REVSET" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] into: Option<RevisionArg>, /// (Experimental) The revision(s) to use as parent for the new commit (can /// be repeated to create a merge commit) #[arg( long, visible_alias = "destination", short, visible_short_alias = 'd', conflicts_with = "into", conflicts_with = "revision", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] onto: Option<Vec<RevisionArg>>, /// (Experimental) The revision(s) to insert the new commit after (can be /// repeated to create a merge commit) #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "onto", conflicts_with = "into", conflicts_with = "revision", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// (Experimental) The revision(s) to insert the new commit before (can be /// repeated to create a merge commit) #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "onto", conflicts_with = "into", conflicts_with = "revision", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, /// The description to use for squashed revision (don't open editor) #[arg(long = "message", short, value_name = "MESSAGE")] message_paragraphs: Vec<String>, /// Use the description of the destination revision and discard the /// description(s) of the source revision(s) #[arg(long, short, conflicts_with = "message_paragraphs")] use_destination_message: bool, /// Open an editor to edit the change description /// /// Forces an editor to open when using `--message` to allow the /// message to be edited afterwards. #[arg(long)] editor: bool, /// Interactively choose which parts to squash #[arg(long, short)] interactive: bool, /// Specify diff editor to be used (implies --interactive) #[arg(long, value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::diff_editors))] tool: Option<String>, /// Move only changes to these paths (instead of all paths) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::squash_revision_files))] paths: Vec<String>, /// The source revision will not be abandoned #[arg(long, short)] keep_emptied: bool, } #[instrument(skip_all)] pub(crate) fn cmd_squash( ui: &mut Ui, command: &CommandHelper, args: &SquashArgs, ) -> Result<(), CommandError> { let insert_destination_commit = args.onto.is_some() || args.insert_after.is_some() || args.insert_before.is_some(); let mut workspace_command = command.workspace_helper(ui)?; let mut sources: Vec<Commit>; let pre_existing_destination; if !args.from.is_empty() || args.into.is_some() || insert_destination_commit { sources = if args.from.is_empty() { workspace_command.parse_revset(ui, &RevisionArg::AT)? } else { workspace_command.parse_union_revsets(ui, &args.from)? } .evaluate_to_commits()? .try_collect()?; if insert_destination_commit { pre_existing_destination = None; } else { let destination = workspace_command .resolve_single_rev(ui, args.into.as_ref().unwrap_or(&RevisionArg::AT))?; // remove the destination from the sources sources.retain(|source| source.id() != destination.id()); pre_existing_destination = Some(destination); } // Reverse the set so we apply the oldest commits first. It shouldn't affect the // result, but it avoids creating transient conflicts and is therefore probably // a little faster. sources.reverse(); } else { let source = workspace_command .resolve_single_rev(ui, args.revision.as_ref().unwrap_or(&RevisionArg::AT))?; let mut parents: Vec<_> = source.parents().try_collect()?; if parents.len() != 1 { return Err(user_error_with_hint( "Cannot squash merge commits without a specified destination", "Use `--into` to specify which parent to squash into", )); } sources = vec![source]; pre_existing_destination = Some(parents.pop().unwrap()); }; workspace_command.check_rewritable(sources.iter().chain(&pre_existing_destination).ids())?; // prepare the tx description before possibly rebasing the source commits let source_ids: Vec<_> = sources.iter().ids().collect(); let tx_description = if let Some(destination) = &pre_existing_destination { format!("squash commits into {}", destination.id().hex()) } else { match &source_ids[..] { [] => format!("squash {} commits", source_ids.len()), [id] => format!("squash commit {}", id.hex()), [first, others @ ..] => { format!("squash commit {} and {} more", first.hex(), others.len()) } } }; let mut tx = workspace_command.start_transaction(); let mut num_rebased = 0; let destination = if let Some(commit) = pre_existing_destination { commit } else { // create the new destination commit let (parent_ids, child_ids) = compute_commit_location( ui, tx.base_workspace_helper(), args.onto.as_deref(), args.insert_after.as_deref(), args.insert_before.as_deref(), "squashed commit", )?; let parent_commits: Vec<_> = parent_ids .iter() .map(|commit_id| { tx.base_workspace_helper() .repo() .store() .get_commit(commit_id) }) .try_collect()?; let merged_tree = merge_commit_trees(tx.repo(), &parent_commits).block_on()?; let commit = tx .repo_mut() .new_commit(parent_ids.clone(), merged_tree) .write()?; let mut rewritten = HashMap::new(); tx.repo_mut() .transform_descendants(child_ids.clone(), async |mut rewriter| { let old_commit_id = rewriter.old_commit().id().clone(); for parent_id in &parent_ids { rewriter.replace_parent(parent_id, [commit.id()]); } let new_parents = rewriter.new_parents(); if child_ids.contains(&old_commit_id) && !new_parents.contains(commit.id()) { rewriter.set_new_parents( new_parents .iter() .cloned() .chain(once(commit.id().clone())) .collect(), ); } let new_commit = rewriter.rebase().await?.write()?; rewritten.insert(old_commit_id, new_commit); num_rebased += 1; Ok(()) })?; for source in &mut *sources { if let Some(rewritten_source) = rewritten.remove(source.id()) { *source = rewritten_source; } } commit }; let fileset_expression = tx .base_workspace_helper() .parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let diff_selector = tx.base_workspace_helper() .diff_selector(ui, args.tool.as_deref(), args.interactive)?; let text_editor = tx.base_workspace_helper().text_editor()?; let squashed_description = SquashedDescription::from_args(args); let source_commits = select_diff(&tx, &sources, &destination, &matcher, &diff_selector)?; print_unmatched_explicit_paths( ui, tx.base_workspace_helper(), &fileset_expression, source_commits.iter().map(|commit| &commit.selected_tree), )?; if let Some(squashed) = rewrite::squash_commits( tx.repo_mut(), &source_commits, &destination, args.keep_emptied, )? { let mut commit_builder = squashed.commit_builder.detach(); let single_description = match squashed_description { SquashedDescription::Exact(description) => Some(description), SquashedDescription::UseDestination => Some(destination.description().to_owned()), SquashedDescription::Combine => { let abandoned_commits = &squashed.abandoned_commits; try_combine_messages(abandoned_commits, &destination) } }; let description = if let Some(description) = single_description { if description.is_empty() && !args.editor { description } else { commit_builder.set_description(&description); let description_with_trailers = add_trailers(ui, &tx, &commit_builder)?; if args.editor { commit_builder.set_description(&description_with_trailers); let temp_commit = commit_builder.write_hidden()?; let intro = ""; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? } else { description_with_trailers } } } else { // edit combined let abandoned_commits = &squashed.abandoned_commits; let combined = combine_messages_for_editing( ui, &tx, abandoned_commits, (!insert_destination_commit).then_some(&destination), &commit_builder, )?; // It's weird that commit.description() contains "JJ: " lines, but works. commit_builder.set_description(combined); let temp_commit = commit_builder.write_hidden()?; let intro = "Enter a description for the combined commit."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? }; commit_builder.set_description(description); if insert_destination_commit { // forget about the intermediate commit commit_builder.set_predecessors( commit_builder .predecessors() .iter() .filter(|p| p != &destination.id()) .cloned() .collect(), ); } let commit = commit_builder.write(tx.repo_mut())?; let num_rebased = tx.repo_mut().rebase_descendants()?; if let Some(mut formatter) = ui.status_formatter() { if insert_destination_commit { write!(formatter, "Created new commit ")?; tx.write_commit_summary(formatter.as_mut(), &commit)?; writeln!(formatter)?; } if num_rebased > 0 { writeln!(formatter, "Rebased {num_rebased} descendant commits")?; } } } else { if diff_selector.is_interactive() { return Err(user_error("No changes selected")); } if let Some(mut formatter) = ui.status_formatter() { if insert_destination_commit { write!(formatter, "Created new commit ")?; tx.write_commit_summary(formatter.as_mut(), &destination)?; writeln!(formatter)?; } if num_rebased > 0 { writeln!(formatter, "Rebased {num_rebased} descendant commits")?; } } if let [only_path] = &*args.paths { let no_rev_arg = args.revision.is_none() && args.from.is_empty() && args.into.is_none(); if no_rev_arg && tx .base_workspace_helper() .parse_revset(ui, &RevisionArg::from(only_path.to_owned())) .is_ok() { writeln!( ui.warning_default(), "The argument {only_path:?} is being interpreted as a fileset expression. To \ specify a revset, pass -r {only_path:?} instead." )?; } } } tx.finish(ui, tx_description)?; Ok(()) } enum SquashedDescription { // Use this exact description. Exact(String), // Use the destination's description and discard the descriptions of the // source revisions. UseDestination, // Combine the descriptions of the source and destination revisions. Combine, } impl SquashedDescription { fn from_args(args: &SquashArgs) -> Self { // These options are incompatible and Clap is configured to prevent this. assert!(args.message_paragraphs.is_empty() || !args.use_destination_message); if !args.message_paragraphs.is_empty() { let desc = join_message_paragraphs(&args.message_paragraphs); Self::Exact(desc) } else if args.use_destination_message { Self::UseDestination } else { Self::Combine } } } fn select_diff( tx: &WorkspaceCommandTransaction, sources: &[Commit], destination: &Commit, matcher: &dyn Matcher, diff_selector: &DiffSelector, ) -> Result<Vec<CommitWithSelection>, CommandError> { let mut source_commits = vec![]; for source in sources { let parent_tree = source.parent_tree(tx.repo())?; let source_tree = source.tree(); let format_instructions = || { formatdoc! {" You are moving changes from: {source} into commit: {destination} The left side of the diff shows the contents of the parent commit. The right side initially shows the contents of the commit you're moving changes from. Adjust the right side until the diff shows the changes you want to move to the destination. If you don't make any changes, then all the changes from the source will be moved into the destination. ", source = tx.format_commit_summary(source), destination = tx.format_commit_summary(destination), } }; let selected_tree = diff_selector.select( Diff::new(&parent_tree, &source_tree), matcher, format_instructions, )?; source_commits.push(CommitWithSelection { commit: source.clone(), selected_tree, parent_tree, }); } Ok(source_commits) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/revert.rs
cli/src/commands/revert.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashSet; use bstr::ByteVec as _; use clap::ArgGroup; use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::conflict_label_for_commits; use jj_lib::merge::Merge; use jj_lib::merged_tree::MergedTree; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::Repo as _; use jj_lib::rewrite::merge_commit_trees; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::compute_commit_location; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Apply the reverse of the given revision(s) /// /// The reverse of each of the given revisions is applied sequentially in /// reverse topological order at the given location. /// /// The description of the new revisions can be customized with the /// `templates.revert_description` config variable. #[derive(clap::Args, Clone, Debug)] #[command(group(ArgGroup::new("location").args(&["onto", "insert_after", "insert_before"]).required(true).multiple(true)))] pub(crate) struct RevertArgs { /// The revision(s) to apply the reverse of #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions: Vec<RevisionArg>, /// The revision(s) to apply the reverse changes on top of #[arg( long, visible_alias = "destination", short, visible_short_alias = 'd', value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] onto: Option<Vec<RevisionArg>>, /// The revision(s) to insert the reverse changes after (can be repeated to /// create a merge commit) #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// The revision(s) to insert the reverse changes before (can be repeated to /// create a merge commit) #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, } #[instrument(skip_all)] pub(crate) fn cmd_revert( ui: &mut Ui, command: &CommandHelper, args: &RevertArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let to_revert: Vec<_> = workspace_command .parse_union_revsets(ui, &args.revisions)? .evaluate_to_commits()? .try_collect()?; // in reverse topological order if to_revert.is_empty() { writeln!(ui.status(), "No revisions to revert.")?; return Ok(()); } let (new_parent_ids, new_child_ids) = compute_commit_location( ui, &workspace_command, args.onto.as_deref(), args.insert_after.as_deref(), args.insert_before.as_deref(), "reverted commits", )?; let transaction_description = if to_revert.len() == 1 { format!("revert commit {}", to_revert[0].id().hex()) } else { format!( "revert commit {} and {} more", to_revert[0].id().hex(), to_revert.len() - 1 ) }; let commits_to_revert_with_new_commit_descriptions = { let template_text = command .settings() .get_string("templates.revert_description")?; let template = workspace_command.parse_commit_template(ui, &template_text)?; to_revert .into_iter() .map(|commit| { let commit_description = template.format_plain_text(&commit).into_string_lossy(); (commit, commit_description) }) .collect_vec() }; let mut tx = workspace_command.start_transaction(); let original_parent_commit_ids: HashSet<_> = new_parent_ids.iter().cloned().collect(); let new_parents: Vec<_> = new_parent_ids .iter() .map(|id| tx.repo().store().get_commit(id)) .try_collect()?; let mut new_base_tree = merge_commit_trees(tx.repo(), &new_parents).block_on()?; let mut parent_ids = new_parent_ids; let mut parent_labels = conflict_label_for_commits(&new_parents); let mut reverted_commits = vec![]; for (commit_to_revert, new_commit_description) in &commits_to_revert_with_new_commit_descriptions { let old_parents: Vec<_> = commit_to_revert.parents().try_collect()?; let old_base_tree = commit_to_revert.parent_tree(tx.repo())?; let old_tree = commit_to_revert.tree(); let new_tree = MergedTree::merge(Merge::from_vec(vec![ ( new_base_tree, format!("{parent_labels} (revert destination)"), ), ( old_tree, format!("{} (reverted revision)", commit_to_revert.conflict_label()), ), ( old_base_tree, format!( "{} (parents of reverted revision)", conflict_label_for_commits(&old_parents) ), ), ])) .block_on()?; let new_commit = tx .repo_mut() .new_commit(parent_ids, new_tree.clone()) .set_description(new_commit_description) .write()?; parent_ids = vec![new_commit.id().clone()]; parent_labels = new_commit.conflict_label(); reverted_commits.push(new_commit); new_base_tree = new_tree; } // Rebase new children onto the reverted commit. let new_head_ids: Vec<_> = parent_ids; let children_commit_ids_set: HashSet<CommitId> = new_child_ids.iter().cloned().collect(); let mut num_rebased = 0; tx.repo_mut() .transform_descendants(new_child_ids, async |mut rewriter| { if children_commit_ids_set.contains(rewriter.old_commit().id()) { let mut child_new_parent_ids = IndexSet::new(); for old_parent_id in rewriter.old_commit().parent_ids() { // If the original parents of the new children are the new parents of // `target_head_ids`, replace them with `new_head_ids` since we are // "inserting" the new commits in between the new parents and the new // children. if original_parent_commit_ids.contains(old_parent_id) { child_new_parent_ids.extend(new_head_ids.clone()); } else { child_new_parent_ids.insert(old_parent_id.clone()); } } // If not already present, add `new_head_ids` as parents of the new child // commit. child_new_parent_ids.extend(new_head_ids.clone()); rewriter.set_new_parents(child_new_parent_ids.into_iter().collect()); } num_rebased += 1; rewriter.rebase().await?.write()?; Ok(()) })?; if let Some(mut formatter) = ui.status_formatter() { writeln!( formatter, "Reverted {} commits as follows:", reverted_commits.len() )?; print_updated_commits( formatter.as_mut(), &tx.commit_summary_template(), &reverted_commits, )?; if num_rebased > 0 { writeln!(formatter, "Rebased {num_rebased} descendant commits")?; } } tx.finish(ui, transaction_description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/version.rs
cli/src/commands/version.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Display version information #[derive(clap::Args, Clone, Debug)] pub(crate) struct VersionArgs {} #[instrument(skip_all)] pub(crate) fn cmd_version( ui: &mut Ui, command: &CommandHelper, _args: &VersionArgs, ) -> Result<(), CommandError> { write!(ui.stdout(), "{}", command.app().render_version())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/rebase.rs
cli/src/commands/rebase.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use std::sync::Arc; use clap::ArgGroup; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetExpression; use jj_lib::rewrite::EmptyBehavior; use jj_lib::rewrite::MoveCommitsLocation; use jj_lib::rewrite::MoveCommitsStats; use jj_lib::rewrite::MoveCommitsTarget; use jj_lib::rewrite::RebaseOptions; use jj_lib::rewrite::RewriteRefsOptions; use jj_lib::rewrite::compute_move_commits; use jj_lib::rewrite::find_duplicate_divergent_commits; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::WorkspaceCommandHelper; use crate::cli_util::compute_commit_location; use crate::cli_util::print_updated_commits; use crate::cli_util::short_commit_hash; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; /// Move revisions to different parent(s) /// /// This command moves revisions to different parent(s) while preserving the /// changes (diff) in the revisions. /// /// There are three different ways of specifying which revisions to rebase: /// /// * `--source/-s` to rebase a revision and its descendants /// * `--branch/-b` to rebase a whole branch, relative to the destination /// * `--revisions/-r` to rebase the specified revisions without their /// descendants /// /// If no option is specified, it defaults to `-b @`. /// /// There are three different ways of specifying where the revisions should be /// rebased to: /// /// * `--onto/-o` to rebase the revisions onto the specified targets /// * `--insert-after/-A` to rebase the revisions onto the specified targets and /// to rebase the targets' descendants onto the rebased revisions /// * `--insert-before/-B` to rebase the revisions onto the specified targets' /// parents and to rebase the targets and their descendants onto the rebased /// revisions /// /// See the sections below for details about the different ways of specifying /// which revisions to rebase where. /// /// If a working-copy revision gets abandoned, it will be given a new, empty /// revision. This is true in general; it is not specific to this command. /// /// ### Specifying which revisions to rebase /// /// With `--source/-s`, the command rebases the specified revision and its /// descendants to the destination. For example, `jj rebase -s M -o O` would /// transform your history like this (letters followed by an apostrophe are /// post-rebase versions): /// /// ```text /// O N' /// | | /// | N M' /// | | | /// | M O /// | | => | /// | | L | L /// | |/ | | /// | K | K /// |/ |/ /// J J /// ``` /// /// Each revision passed to `-s` will become a direct child of the destination, /// so if you instead run `jj rebase -s M -s N -o O` (or `jj rebase -s 'M|N' -o /// O`) in the example above, then N' would instead be a direct child of O. /// /// With `--branch/-b`, the command rebases the whole "branch" containing the /// specified revision. A "branch" is the set of revisions that includes: /// /// * the specified revision and ancestors that are not also ancestors of the /// destination /// * all descendants of those revisions /// /// In other words, `jj rebase -b X -o Y` rebases revisions in the revset /// `(Y..X)::` (which is equivalent to `jj rebase -s 'roots(Y..X)' -o Y` for a /// single root). For example, either `jj rebase -b L -o O` or `jj rebase -b M /// -o O` would transform your history like this (because `L` and `M` are on the /// same "branch", relative to the destination): /// /// ```text /// O N' /// | | /// | N M' /// | | | /// | M | L' /// | | => |/ /// | | L K' /// | |/ | /// | K O /// |/ | /// J J /// ``` /// /// With `--revisions/-r`, the command rebases only the specified revisions to /// the destination. Any "hole" left behind will be filled by rebasing /// descendants onto the specified revisions' parent(s). For example, /// `jj rebase -r K -o M` would transform your history like this: /// /// ```text /// M K' /// | | /// | L M /// | | => | /// | K | L' /// |/ |/ /// J J /// ``` /// /// Multiple revisions can be specified, and any dependencies (graph edges) /// within the set will be preserved. For example, `jj rebase -r 'K|N' -o O` /// would transform your history like this: /// /// ```text /// O N' /// | | /// | N K' /// | | | /// | M O /// | | => | /// | | L | M' /// | |/ |/ /// | K | L' /// |/ |/ /// J J /// ``` /// /// `jj rebase -s X` is similar to `jj rebase -r X::` and will behave the same /// if X is a single revision. However, if X is a set of multiple revisions, /// or if you passed multiple `-s` arguments, then `jj rebase -s` will make each /// of the specified revisions an immediate child of the destination, while /// `jj rebase -r` will preserve dependencies within the set. /// /// Note that you can create a merge revision by repeating the `-o` argument. /// For example, if you realize that revision L actually depends on revision M /// in order to work (in addition to its current parent K), you can run `jj /// rebase -s L -o K -o M`: /// /// ```text /// M L' /// | |\ /// | L M | /// | | => | | /// | K | K /// |/ |/ /// J J /// ``` /// /// ### Specifying where to rebase the revisions /// /// With `--onto/-o`, the command rebases the selected revisions onto the /// targets. Existing descendants of the targets will not be affected. See /// the section above for examples. /// /// With `--insert-after/-A`, the selected revisions will be inserted after the /// targets. This is similar to `-o`, but if the targets have any existing /// descendants, then those will be rebased onto the rebased selected revisions. /// /// For example, `jj rebase -r K -A L` will rewrite history like this: /// ```text /// N N' /// | | /// | M | M' /// |/ |/ /// L => K' /// | | /// | K L /// |/ | /// J J /// ``` /// /// The `-A` (and `-B`) argument can also be used for reordering revisions. For /// example, `jj rebase -r M -A J` will rewrite history like this: /// ```text /// M L' /// | | /// L K' /// | => | /// K M' /// | | /// J J /// ``` /// /// With `--insert-before/-B`, the selected revisions will be inserted before /// the targets. This is achieved by rebasing the selected revisions onto the /// target revisions' parents, and then rebasing the target revisions and their /// descendants onto the rebased revisions. /// /// For example, `jj rebase -r K -B L` will rewrite history like this: /// ```text /// N N' /// | | /// | M | M' /// |/ |/ /// L => L' /// | | /// | K K' /// |/ | /// J J /// ``` /// /// The `-A` and `-B` arguments can also be combined, which can be useful around /// merges. For example, you can use `jj rebase -r K -A J -B M` to create a new /// merge (but `jj rebase -r M -o L -o K` might be simpler in this particular /// case): /// ```text /// M M' /// | |\ /// L L | /// | => | | /// | K | K' /// |/ |/ /// J J /// ``` /// /// To insert a commit inside an existing merge with `jj rebase -r O -A K -B M`: /// ```text /// O N' /// | |\ /// N | M' /// |\ | |\ /// | M | O'| /// | | => |/ / /// | L | L /// | | | | /// K | K | /// |/ |/ /// J J /// ``` #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] #[command(group(ArgGroup::new("to_rebase").args(&["branch", "source", "revisions"])))] pub(crate) struct RebaseArgs { /// Rebase the whole branch relative to destination's ancestors (can be /// repeated) /// /// `jj rebase -b=br -o=dst` is equivalent to `jj rebase '-s=roots(dst..br)' /// -o=dst`. /// /// If none of `-b`, `-s`, or `-r` is provided, then the default is `-b @`. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] branch: Vec<RevisionArg>, /// Rebase specified revision(s) together with their trees of descendants /// (can be repeated) /// /// Each specified revision will become a direct child of the destination /// revision(s), even if some of the source revisions are descendants /// of others. /// /// If none of `-b`, `-s`, or `-r` is provided, then the default is `-b @`. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] source: Vec<RevisionArg>, /// Rebase the given revisions, rebasing descendants onto this revision's /// parent(s) /// /// Unlike `-s` or `-b`, you may `jj rebase -r` a revision `A` onto a /// descendant of `A`. /// /// If none of `-b`, `-s`, or `-r` is provided, then the default is `-b @`. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions: Vec<RevisionArg>, #[command(flatten)] destination: RebaseDestinationArgs, /// If true, when rebasing would produce an empty commit, the commit is /// abandoned. It will not be abandoned if it was already empty before the /// rebase. Will never skip merge commits with multiple non-empty /// parents. #[arg(long)] skip_emptied: bool, /// Keep divergent commits while rebasing /// /// Without this flag, divergent commits are abandoned while rebasing if /// another commit with the same change ID is already present in the /// destination with identical changes. #[arg(long)] keep_divergent: bool, } #[derive(clap::Args, Clone, Debug)] #[group(required = true)] pub struct RebaseDestinationArgs { /// The revision(s) to rebase onto (can be repeated to create a merge /// commit) #[arg( long, alias = "destination", short, visible_short_alias = 'd', value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] onto: Option<Vec<RevisionArg>>, /// The revision(s) to insert after (can be repeated to create a merge /// commit) #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// The revision(s) to insert before (can be repeated to create a merge /// commit) #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, } #[instrument(skip_all)] pub(crate) fn cmd_rebase( ui: &mut Ui, command: &CommandHelper, args: &RebaseArgs, ) -> Result<(), CommandError> { let rebase_options = RebaseOptions { empty: match args.skip_emptied { true => EmptyBehavior::AbandonNewlyEmpty, false => EmptyBehavior::Keep, }, rewrite_refs: RewriteRefsOptions { delete_abandoned_bookmarks: false, }, simplify_ancestor_merge: false, }; let mut workspace_command = command.workspace_helper(ui)?; let loc = if !args.revisions.is_empty() { plan_rebase_revisions(ui, &workspace_command, &args.revisions, &args.destination)? } else if !args.source.is_empty() { plan_rebase_source(ui, &workspace_command, &args.source, &args.destination)? } else { plan_rebase_branch(ui, &workspace_command, &args.branch, &args.destination)? }; let mut tx = workspace_command.start_transaction(); let mut computed_move = compute_move_commits(tx.repo(), &loc)?; if !args.keep_divergent { let abandoned_divergent = find_duplicate_divergent_commits(tx.repo(), &loc.new_parent_ids, &loc.target)?; computed_move.record_to_abandon(abandoned_divergent.iter().map(Commit::id).cloned()); if !abandoned_divergent.is_empty() && let Some(mut formatter) = ui.status_formatter() { writeln!( formatter, "Abandoned {} divergent commits that were already present in the destination:", abandoned_divergent.len(), )?; print_updated_commits( formatter.as_mut(), &tx.base_workspace_helper().commit_summary_template(), &abandoned_divergent, )?; } }; let stats = computed_move.apply(tx.repo_mut(), &rebase_options)?; print_move_commits_stats(ui, &stats)?; tx.finish(ui, tx_description(&loc.target))?; Ok(()) } fn plan_rebase_revisions( ui: &Ui, workspace_command: &WorkspaceCommandHelper, revisions: &[RevisionArg], rebase_destination: &RebaseDestinationArgs, ) -> Result<MoveCommitsLocation, CommandError> { let target_expr = workspace_command .parse_union_revsets(ui, revisions)? .resolve()?; workspace_command.check_rewritable_expr(&target_expr)?; let target_commit_ids: Vec<_> = target_expr .evaluate(workspace_command.repo().as_ref())? .iter() .try_collect()?; // in reverse topological order let (new_parent_ids, new_child_ids) = compute_commit_location( ui, workspace_command, rebase_destination.onto.as_deref(), rebase_destination.insert_after.as_deref(), rebase_destination.insert_before.as_deref(), "rebased commits", )?; if rebase_destination.onto.is_some() { for id in &target_commit_ids { if new_parent_ids.contains(id) { return Err(user_error(format!( "Cannot rebase {} onto itself", short_commit_hash(id), ))); } } } Ok(MoveCommitsLocation { new_parent_ids, new_child_ids, target: MoveCommitsTarget::Commits(target_commit_ids), }) } fn plan_rebase_source( ui: &Ui, workspace_command: &WorkspaceCommandHelper, source: &[RevisionArg], rebase_destination: &RebaseDestinationArgs, ) -> Result<MoveCommitsLocation, CommandError> { let source_commit_ids = Vec::from_iter(workspace_command.resolve_some_revsets(ui, source)?); workspace_command.check_rewritable(&source_commit_ids)?; let (new_parent_ids, new_child_ids) = compute_commit_location( ui, workspace_command, rebase_destination.onto.as_deref(), rebase_destination.insert_after.as_deref(), rebase_destination.insert_before.as_deref(), "rebased commits", )?; if rebase_destination.onto.is_some() { for id in &source_commit_ids { let commit = workspace_command.repo().store().get_commit(id)?; check_rebase_destinations(workspace_command.repo(), &new_parent_ids, &commit)?; } } Ok(MoveCommitsLocation { new_parent_ids, new_child_ids, target: MoveCommitsTarget::Roots(source_commit_ids), }) } fn plan_rebase_branch( ui: &Ui, workspace_command: &WorkspaceCommandHelper, branch: &[RevisionArg], rebase_destination: &RebaseDestinationArgs, ) -> Result<MoveCommitsLocation, CommandError> { let branch_commit_ids: Vec<_> = if branch.is_empty() { vec![ workspace_command .resolve_single_rev(ui, &RevisionArg::AT)? .id() .clone(), ] } else { workspace_command .resolve_some_revsets(ui, branch)? .into_iter() .collect() }; let (new_parent_ids, new_child_ids) = compute_commit_location( ui, workspace_command, rebase_destination.onto.as_deref(), rebase_destination.insert_after.as_deref(), rebase_destination.insert_before.as_deref(), "rebased commits", )?; let roots_expression = RevsetExpression::commits(new_parent_ids.clone()) .range(&RevsetExpression::commits(branch_commit_ids)) .roots(); workspace_command.check_rewritable_expr(&roots_expression)?; let root_commit_ids: Vec<_> = roots_expression .evaluate(workspace_command.repo().as_ref()) .unwrap() .iter() .try_collect()?; if rebase_destination.onto.is_some() { for id in &root_commit_ids { let commit = workspace_command.repo().store().get_commit(id)?; check_rebase_destinations(workspace_command.repo(), &new_parent_ids, &commit)?; } } Ok(MoveCommitsLocation { new_parent_ids, new_child_ids, target: MoveCommitsTarget::Roots(root_commit_ids), }) } fn check_rebase_destinations( repo: &Arc<ReadonlyRepo>, new_parents: &[CommitId], commit: &Commit, ) -> Result<(), CommandError> { for parent_id in new_parents { if parent_id == commit.id() { return Err(user_error(format!( "Cannot rebase {} onto itself", short_commit_hash(commit.id()), ))); } if repo.index().is_ancestor(commit.id(), parent_id)? { return Err(user_error(format!( "Cannot rebase {} onto descendant {}", short_commit_hash(commit.id()), short_commit_hash(parent_id) ))); } } Ok(()) } fn tx_description(target: &MoveCommitsTarget) -> String { match &target { MoveCommitsTarget::Commits(ids) => match &ids[..] { [] => format!("rebase {} commits", ids.len()), [id] => format!("rebase commit {}", id.hex()), [first, others @ ..] => { format!("rebase commit {} and {} more", first.hex(), others.len()) } }, MoveCommitsTarget::Roots(ids) => match &ids[..] { [id] => format!("rebase commit {} and descendants", id.hex()), _ => format!("rebase {} commits and their descendants", ids.len()), }, } } /// Print details about the provided [`MoveCommitsStats`]. fn print_move_commits_stats(ui: &Ui, stats: &MoveCommitsStats) -> std::io::Result<()> { let Some(mut formatter) = ui.status_formatter() else { return Ok(()); }; let &MoveCommitsStats { num_rebased_targets, num_rebased_descendants, num_skipped_rebases, num_abandoned_empty, rebased_commits: _, } = stats; if num_skipped_rebases > 0 { writeln!( formatter, "Skipped rebase of {num_skipped_rebases} commits that were already in place" )?; } if num_rebased_targets > 0 { writeln!( formatter, "Rebased {num_rebased_targets} commits to destination" )?; } if num_rebased_descendants > 0 { writeln!( formatter, "Rebased {num_rebased_descendants} descendant commits" )?; } if num_abandoned_empty > 0 { writeln!( formatter, "Abandoned {num_abandoned_empty} newly emptied commits" )?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/parallelize.rs
cli/src/commands/parallelize.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Parallelize revisions by making them siblings /// /// Running `jj parallelize 1::2` will transform the history like this: /// ```text /// 3 /// | 3 /// 2 / \ /// | -> 1 2 /// 1 \ / /// | 0 /// 0 /// ``` /// /// The command effectively says "these revisions are actually independent", /// meaning that they should no longer be ancestors/descendants of each other. /// However, revisions outside the set that were previously ancestors of a /// revision in the set will remain ancestors of it. For example, revision 0 /// above remains an ancestor of both 1 and 2. Similarly, /// revisions outside the set that were previously descendants of a revision /// in the set will remain descendants of it. For example, revision 3 above /// remains a descendant of both 1 and 2. /// /// Therefore, `jj parallelize '1 | 3'` is a no-op. That's because 2, which is /// not in the target set, was a descendant of 1 before, so it remains a /// descendant, and it was an ancestor of 3 before, so it remains an ancestor. #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct ParallelizeArgs { /// Revisions to parallelize #[arg(value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions: Vec<RevisionArg>, } #[instrument(skip_all)] pub(crate) fn cmd_parallelize( ui: &mut Ui, command: &CommandHelper, args: &ParallelizeArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; // The target commits are the commits being parallelized. They are ordered // here with children before parents. let target_commits: Vec<Commit> = workspace_command .parse_union_revsets(ui, &args.revisions)? .evaluate_to_commits()? .try_collect()?; // New parents for commits in the target set. Since commits in the set are now // supposed to be independent, they inherit the parent's non-target parents, // recursively. let mut new_target_parents: HashMap<CommitId, Vec<CommitId>> = HashMap::new(); let mut needs_rewrite = Vec::new(); for commit in target_commits.iter().rev() { let mut new_parents = vec![]; for old_parent in commit.parent_ids() { if let Some(grand_parents) = new_target_parents.get(old_parent) { new_parents.extend_from_slice(grand_parents); needs_rewrite.push(commit.id()); } else { new_parents.push(old_parent.clone()); } } new_target_parents.insert(commit.id().clone(), new_parents); } workspace_command.check_rewritable(needs_rewrite)?; let mut tx = workspace_command.start_transaction(); // If a commit outside the target set has a commit in the target set as parent, // then - after the transformation - it should also have that commit's // parents as direct parents, if those commits are also in the target set. let mut new_child_parents: HashMap<CommitId, IndexSet<CommitId>> = HashMap::new(); for commit in target_commits.iter().rev() { let mut new_parents = IndexSet::new(); for old_parent in commit.parent_ids() { if let Some(parents) = new_child_parents.get(old_parent) { new_parents.extend(parents.iter().cloned()); } } new_parents.insert(commit.id().clone()); new_child_parents.insert(commit.id().clone(), new_parents); } tx.repo_mut().transform_descendants( target_commits.iter().ids().cloned().collect_vec(), async |mut rewriter| { // Commits in the target set do not depend on each other but they still depend // on other parents if let Some(new_parents) = new_target_parents.get(rewriter.old_commit().id()) { rewriter.set_new_rewritten_parents(new_parents); } else if rewriter .old_commit() .parent_ids() .iter() .any(|id| new_child_parents.contains_key(id)) { let mut new_parents = vec![]; for parent in rewriter.old_commit().parent_ids() { if let Some(parents) = new_child_parents.get(parent) { new_parents.extend(parents.iter().cloned()); } else { new_parents.push(parent.clone()); } } rewriter.set_new_rewritten_parents(&new_parents); } if rewriter.parents_changed() { let builder = rewriter.rebase().await?; builder.write()?; } Ok(()) }, )?; tx.finish(ui, format!("parallelize {} commits", target_commits.len())) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/fix.rs
cli/src/commands/fix.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::io::Write as _; use std::path::Path; use std::process::Stdio; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::FileId; use jj_lib::commit::Commit; use jj_lib::fileset; use jj_lib::fileset::FilesetDiagnostics; use jj_lib::fileset::FilesetExpression; use jj_lib::fix::FileToFix; use jj_lib::fix::FixError; use jj_lib::fix::ParallelFileFixer; use jj_lib::fix::fix_files; use jj_lib::matchers::Matcher; use jj_lib::repo::Repo as _; use jj_lib::repo_path::RepoPathUiConverter; use jj_lib::revset::RevsetIteratorExt as _; use jj_lib::settings::UserSettings; use jj_lib::store::Store; use pollster::FutureExt as _; use tokio::io::AsyncReadExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::config_error; use crate::command_error::print_parse_diagnostics; use crate::complete; use crate::config::CommandNameAndArgs; use crate::ui::Ui; /// Update files with formatting fixes or other changes /// /// The primary use case for this command is to apply the results of automatic /// code formatting tools to revisions that may not be properly formatted yet. /// It can also be used to modify files with other tools like `sed` or `sort`. /// /// The modification made by `jj fix` can be reviewed by `jj op show -p`. /// /// ### How it works /// /// The changed files in the given revisions will be updated with any fixes /// determined by passing their file content through any external tools the user /// has configured for those files. Descendants will also be updated by passing /// their versions of the same files through the same tools, which will ensure /// that the fixes are not lost. This will never result in new conflicts. Files /// with existing conflicts will be updated on all sides of the conflict, which /// can potentially increase or decrease the number of conflict markers. /// /// ### Configuration /// /// See `jj help -k config` chapter `Code formatting and other file content /// transformations` to understand how to configure your tools. /// /// ### Execution Example /// /// Let's consider the following configuration is set. We have two code /// formatters (`clang-format` and `black`), which apply to three different /// file extensions (`.cc`, `.h`, and `.py`): /// /// ```toml /// [fix.tools.clang-format] /// command = ["/usr/bin/clang-format", "--assume-filename=$path"] /// patterns = ["glob:'**/*.cc'", /// "glob:'**/*.h'"] /// /// [fix.tools.black] /// command = ["/usr/bin/black", "-", "--stdin-filename=$path"] /// patterns = ["glob:'**/*.py'"] /// ``` /// /// Now, let's see what would happen to the following history, when executing /// `jj fix`. /// /// ```text /// C (mutable) /// | Modifies file: foo.py /// | /// B @ (working copy - mutable) /// | Modifies file: README.md /// | /// A (mutable) /// | Modifies files: src/bar.cc and src/bar.h /// | /// X (immutable) /// ``` /// /// By default, `jj fix` will modify revisions that matches the revset /// `reachable(@, mutable())` (see `jj help -k revsets`) which corresponds to /// the revisions `A`, `B` and `C` here. /// /// The following operations will then happen: /// /// - For revision `A`, content from this revision for files `src/bar.cc` and /// `src/bar.h` will each be provided to `clang-format` and the result output /// will be used to recreate revision `A` which we will call `A'`. All other /// files are untouched. /// - For revision `B`, same thing happen for files `src/bar.cc` and `src/bar.h` /// Their content from revision `B` will go through `clang-format`. The file /// `README.md` as any other files, are untouched as no pattern matches it. We /// obtain revision `B'`. /// - For revision `C`, `src/bar.cc` and `src/bar.h` goes through `clang-format` /// and file `foo.py` is fixed using `black`. Any other file is untouched. We /// obtain revision `C'`. /// /// ```text /// C (mutable) /-> C' /// | src/bar.cc -> clang-format -| | /// | src/bar.h --> clang-format -| | /// | foo.py -----> black --------| | /// | * --------------------------/ | /// | | /// B @ (working copy - mutable) /-> B' @ /// | src/bar.cc -> clang-format -| | /// | src/bar.h --> clang-format -| | /// | * --------------------------| | /// | | /// A (mutable) /-> A' /// | src/bar.cc -> clang-format -| | /// | src/bar.h --> clang-format -| | /// | * --------------------------/ | /// | | /// X (immutable) X /// ``` /// /// The revisions are now all correctly formatted according to the /// configuration. #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct FixArgs { /// Fix files in the specified revision(s) and their descendants. If no /// revisions are specified, this defaults to the `revsets.fix` setting, or /// `reachable(@, mutable())` if it is not set. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] source: Vec<RevisionArg>, /// Fix only these paths #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] paths: Vec<String>, /// Fix unchanged files in addition to changed ones. If no paths are /// specified, all files in the repo will be fixed. #[arg(long)] include_unchanged_files: bool, } #[instrument(skip_all)] pub(crate) fn cmd_fix( ui: &mut Ui, command: &CommandHelper, args: &FixArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let workspace_root = workspace_command.workspace_root().to_owned(); let path_converter = workspace_command.path_converter().to_owned(); let tools_config = get_tools_config(ui, workspace_command.settings())?; let target_expr = if args.source.is_empty() { let revs = workspace_command.settings().get_string("revsets.fix")?; workspace_command.parse_revset(ui, &RevisionArg::from(revs))? } else { workspace_command.parse_union_revsets(ui, &args.source)? } .resolve()?; workspace_command.check_rewritable_expr(&target_expr)?; let repo = workspace_command.repo(); let commits: Vec<Commit> = target_expr .descendants() .evaluate(repo.as_ref())? .iter() .commits(repo.store()) .try_collect()?; let commit_ids = commits .iter() .map(|commit| commit.id().clone()) .collect_vec(); let trees: Vec<_> = commits.iter().map(|commit| commit.tree()).collect(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let mut tx = workspace_command.start_transaction(); let mut parallel_fixer = ParallelFileFixer::new(|store, file_to_fix| { fix_one_file( ui, &workspace_root, &path_converter, &tools_config, store, file_to_fix, ) .block_on() }); print_unmatched_explicit_paths(ui, tx.base_workspace_helper(), &fileset_expression, &trees)?; let summary = fix_files( commit_ids, &matcher, args.include_unchanged_files, tx.repo_mut(), &mut parallel_fixer, ) .block_on()?; writeln!( ui.status(), "Fixed {} commits of {} checked.", summary.num_fixed_commits, summary.num_checked_commits )?; tx.finish(ui, format!("fixed {} commits", summary.num_fixed_commits)) } /// Invokes all matching tools (if any) to file_to_fix. If the content is /// successfully transformed the new content is written and the new FileId is /// returned. Returns None if the content is unchanged. /// /// The matching tools are invoked in order, with the result of one tool feeding /// into the next tool. Returns FixError if there is an error reading or writing /// the file. However, if a tool invocation fails for whatever reason, the tool /// is simply skipped and we proceed to invoke the next tool (this is /// indistinguishable from succeeding with no changes). /// /// TODO: Better error handling so we can tell the user what went wrong with /// each failed input. async fn fix_one_file( ui: &Ui, workspace_root: &Path, path_converter: &RepoPathUiConverter, tools_config: &ToolsConfig, store: &Store, file_to_fix: &FileToFix, ) -> Result<Option<FileId>, FixError> { let mut matching_tools = tools_config .tools .iter() .filter(|tool_config| tool_config.matcher.matches(&file_to_fix.repo_path)) .peekable(); if matching_tools.peek().is_some() { // The first matching tool gets its input from the committed file, and any // subsequent matching tool gets its input from the previous matching tool's // output. let mut old_content = vec![]; let mut read = store .read_file(&file_to_fix.repo_path, &file_to_fix.file_id) .await?; read.read_to_end(&mut old_content).await?; let new_content = matching_tools.fold(old_content.clone(), |prev_content, tool_config| { match run_tool( ui, workspace_root, path_converter, &tool_config.command, file_to_fix, &prev_content, ) { Ok(next_content) => next_content, // TODO: Because the stderr is passed through, this isn't always failing // silently, but it should do something better will the exit code, tool // name, etc. Err(()) => prev_content, } }); if new_content != old_content { // TODO: send futures back over channel let new_file_id = store .write_file(&file_to_fix.repo_path, &mut new_content.as_slice()) .await?; return Ok(Some(new_file_id)); } } Ok(None) } /// Runs the `tool_command` to fix the given file content. /// /// The `old_content` is assumed to be that of the `file_to_fix`'s `FileId`, but /// this is not verified. /// /// Returns the new file content, whose value will be the same as `old_content` /// unless the command introduced changes. Returns `None` if there were any /// failures when starting, stopping, or communicating with the subprocess. fn run_tool( ui: &Ui, workspace_root: &Path, path_converter: &RepoPathUiConverter, tool_command: &CommandNameAndArgs, file_to_fix: &FileToFix, old_content: &[u8], ) -> Result<Vec<u8>, ()> { let mut vars: HashMap<&str, &str> = HashMap::new(); vars.insert("path", file_to_fix.repo_path.as_internal_file_string()); // TODO: workspace_root.to_str() returns None if the workspace path is not // UTF-8, but we ignore that failure so `jj fix` still runs in that // situation. Maybe we should do something like substituting bytes instead // of strings so we can handle any Path here. if let Some(root) = workspace_root.to_str() { vars.insert("root", root); } let mut command = tool_command.to_command_with_variables(&vars); tracing::debug!(?command, ?file_to_fix.repo_path, "spawning fix tool"); let mut child = match command .current_dir(workspace_root) .stdin(Stdio::piped()) .stdout(Stdio::piped()) .stderr(Stdio::piped()) .spawn() { Ok(child) => child, Err(_) => { writeln!( ui.warning_default(), "Failed to start `{}`", tool_command.split_name(), ) .ok(); return Err(()); } }; let mut stdin = child.stdin.take().expect( "The child process is created with piped stdin, and it's our first access to stdin.", ); let output = std::thread::scope(|s| { s.spawn(move || { stdin.write_all(old_content).ok(); }); child.wait_with_output().or(Err(())) })?; tracing::debug!(?command, ?output.status, "fix tool exited:"); if !output.stderr.is_empty() { let mut stderr = ui.stderr(); writeln!( stderr, "{}:", path_converter.format_file_path(&file_to_fix.repo_path) ) .ok(); stderr.write_all(&output.stderr).ok(); writeln!(stderr).ok(); } if output.status.success() { Ok(output.stdout) } else { writeln!( ui.warning_default(), "Fix tool `{}` exited with non-zero exit code for `{}`", tool_command.split_name(), path_converter.format_file_path(&file_to_fix.repo_path) ) .ok(); Err(()) } } /// Represents an entry in the `fix.tools` config table. struct ToolConfig { /// The command that will be run to fix a matching file. command: CommandNameAndArgs, /// The matcher that determines if this tool matches a file. matcher: Box<dyn Matcher>, /// Whether the tool is enabled enabled: bool, // TODO: Store the `name` field here and print it with the command's stderr, to clearly // associate any errors/warnings with the tool and its configuration entry. } /// Represents the `fix.tools` config table. struct ToolsConfig { /// Some tools, stored in the order they will be executed if more than one /// of them matches the same file. tools: Vec<ToolConfig>, } /// Simplifies deserialization of the config values while building a ToolConfig. #[derive(Clone, Debug, Eq, PartialEq, serde::Deserialize)] #[serde(rename_all = "kebab-case")] struct RawToolConfig { command: CommandNameAndArgs, patterns: Vec<String>, #[serde(default = "default_tool_enabled")] enabled: bool, } fn default_tool_enabled() -> bool { true } /// Parses the `fix.tools` config table. /// /// Fails if any of the commands or patterns are obviously unusable, but does /// not check for issues that might still occur later like missing executables. /// This is a place where we could fail earlier in some cases, though. fn get_tools_config(ui: &mut Ui, settings: &UserSettings) -> Result<ToolsConfig, CommandError> { let mut tools: Vec<ToolConfig> = settings .table_keys("fix.tools") // Sort keys early so errors are deterministic. .sorted() .map(|name| -> Result<ToolConfig, CommandError> { let mut diagnostics = FilesetDiagnostics::new(); let tool: RawToolConfig = settings.get(["fix", "tools", name])?; let expression = FilesetExpression::union_all( tool.patterns .iter() .map(|arg| { fileset::parse( &mut diagnostics, arg, &RepoPathUiConverter::Fs { cwd: "".into(), base: "".into(), }, ) }) .try_collect()?, ); print_parse_diagnostics(ui, &format!("In `fix.tools.{name}`"), &diagnostics)?; Ok(ToolConfig { command: tool.command, matcher: expression.to_matcher(), enabled: tool.enabled, }) }) .try_collect()?; if tools.is_empty() { return Err(config_error("No `fix.tools` are configured")); } tools.retain(|t| t.enabled); if tools.is_empty() { Err(config_error( "At least one entry of `fix.tools` must be enabled.".to_string(), )) } else { Ok(ToolsConfig { tools }) } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/diff.rs
cli/src/commands/diff.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::copies::CopyRecords; use jj_lib::merge::Diff; use jj_lib::repo::Repo as _; use jj_lib::rewrite::merge_commit_trees; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::cli_util::short_commit_hash; use crate::command_error::CommandError; use crate::command_error::user_error_with_hint; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::diff_util::get_copy_records; use crate::diff_util::show_templated; use crate::ui::Ui; /// Compare file contents between two revisions /// /// With the `-r` option, shows the changes compared to the parent revision. /// If there are several parent revisions (i.e., the given revision is a /// merge), then they will be merged and the changes from the result to the /// given revision will be shown. /// /// With the `--from` and/or `--to` options, shows the difference from/to the /// given revisions. If either is left out, it defaults to the working-copy /// commit. For example, `jj diff --from main` shows the changes from "main" /// (perhaps a bookmark name) to the working-copy commit. /// /// If no option is specified, it defaults to `-r @`. #[derive(clap::Args, Clone, Debug)] #[command(mut_arg("ignore_all_space", |a| a.short('w')))] #[command(mut_arg("ignore_space_change", |a| a.short('b')))] pub(crate) struct DiffArgs { /// Show changes in these revisions /// /// If there are multiple revisions, then the total diff for all of them /// will be shown. For example, if you have a linear chain of revisions /// A..D, then `jj diff -r B::D` equals `jj diff --from A --to D`. Multiple /// heads and/or roots are supported, but gaps in the revset are not /// supported (e.g. `jj diff -r 'A|C'` in a linear chain A..C). /// /// If a revision is a merge commit, this shows changes *from* the /// automatic merge of the contents of all of its parents *to* the contents /// of the revision itself. /// /// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`. #[arg(long, short, value_name = "REVSETS", alias = "revision")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions: Option<Vec<RevisionArg>>, /// Show changes from this revision /// /// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`. #[arg(long, short, conflicts_with = "revisions", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] from: Option<RevisionArg>, /// Show changes to this revision /// /// If none of `-r`, `-f`, or `-t` is provided, then the default is `-r @`. #[arg(long, short, conflicts_with = "revisions", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] to: Option<RevisionArg>, /// Restrict the diff to these paths #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_revision_or_range_files))] paths: Vec<String>, /// Render each file diff entry using the given template /// /// All 0-argument methods of the [`TreeDiffEntry` type] are available as /// keywords in the template expression. See [`jj help -k templates`] for /// more information. /// /// [`TreeDiffEntry` type]: /// https://docs.jj-vcs.dev/latest/templates/#treediffentry-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg( long, short = 'T', conflicts_with_all = ["short-format", "long-format", "tool"], help_heading = "Diff Formatting Options", )] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, #[command(flatten)] format: DiffFormatArgs, } #[instrument(skip_all)] pub(crate) fn cmd_diff( ui: &mut Ui, command: &CommandHelper, args: &DiffArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let repo = workspace_command.repo(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let from_tree; let to_tree; let mut copy_records = CopyRecords::default(); if args.from.is_some() || args.to.is_some() { let resolve_revision = |r: &Option<RevisionArg>| { workspace_command.resolve_single_rev(ui, r.as_ref().unwrap_or(&RevisionArg::AT)) }; let from = resolve_revision(&args.from)?; let to = resolve_revision(&args.to)?; from_tree = from.tree(); to_tree = to.tree(); let records = get_copy_records(repo.store(), from.id(), to.id(), &matcher)?; copy_records.add_records(records)?; } else { let revision_args = args .revisions .as_deref() .unwrap_or(std::slice::from_ref(&RevisionArg::AT)); let revisions_evaluator = workspace_command.parse_union_revsets(ui, revision_args)?; let target_expression = revisions_evaluator.expression(); let mut gaps_revset = workspace_command .attach_revset_evaluator( target_expression .roots() .range(&target_expression.heads()) .minus(target_expression), ) .evaluate_to_commit_ids()?; if let Some(commit_id) = gaps_revset.next() { return Err(user_error_with_hint( "Cannot diff revsets with gaps in.", format!( "Revision {} would need to be in the set.", short_commit_hash(&commit_id?) ), )); } let heads: Vec<_> = workspace_command .attach_revset_evaluator(target_expression.heads()) .evaluate_to_commits()? .try_collect()?; let roots: Vec<_> = workspace_command .attach_revset_evaluator(target_expression.roots()) .evaluate_to_commits()? .try_collect()?; // Collect parents outside of revset to preserve parent order let parents: IndexSet<_> = roots.iter().flat_map(|c| c.parents()).try_collect()?; let parents = parents.into_iter().collect_vec(); from_tree = merge_commit_trees(repo.as_ref(), &parents).block_on()?; to_tree = merge_commit_trees(repo.as_ref(), &heads).block_on()?; for p in &parents { for to in &heads { let records = get_copy_records(repo.store(), p.id(), to.id(), &matcher)?; copy_records.add_records(records)?; } } } // -T disables both short/long rendering formats, but it might be okay to // enable long format if explicitly specified (assuming -T is for short or // summary output.) let maybe_template; let diff_renderer; if let Some(text) = &args.template { let language = workspace_command.commit_template_language(); let template = workspace_command .parse_template(ui, &language, text)? .labeled(["diff"]); maybe_template = Some(template); diff_renderer = workspace_command.diff_renderer(vec![]); } else { maybe_template = None; diff_renderer = workspace_command.diff_renderer_for(&args.format)?; } ui.request_pager(); if let Some(template) = &maybe_template { let tree_diff = from_tree.diff_stream_with_copies(&to_tree, &matcher, &copy_records); show_templated(ui.stdout_formatter().as_mut(), tree_diff, template).block_on()?; } diff_renderer .show_diff( ui, ui.stdout_formatter().as_mut(), Diff::new(&from_tree, &to_tree), &matcher, &copy_records, ui.term_width(), ) .block_on()?; print_unmatched_explicit_paths( ui, &workspace_command, &fileset_expression, [&from_tree, &to_tree], )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/next.rs
cli/src/commands/next.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::movement_util::Direction; use crate::movement_util::MovementArgs; use crate::movement_util::move_to_commit; use crate::ui::Ui; /// Move the working-copy commit to the child revision /// /// The command creates a new empty working copy revision that is the child of a /// descendant `offset` revisions ahead of the parent of the current working /// copy. /// /// For example, when the offset is 1: /// /// ```text /// D D @ /// | |/ /// C @ => C /// |/ | /// B B /// ``` /// /// If `--edit` is passed, the working copy revision is changed to the child of /// the current working copy revision. /// /// ```text /// D D /// | | /// C C /// | | /// B => @ /// | | /// @ A /// ``` #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct NextArgs { /// How many revisions to move forward. Advances to the next child by /// default #[arg(default_value = "1")] offset: u64, /// Instead of creating a new working-copy commit on top of the target /// commit (like `jj new`), edit the target commit directly (like `jj /// edit`) /// /// Takes precedence over config in `ui.movement.edit`; i.e. /// will negate `ui.movement.edit = false` #[arg(long, short)] edit: bool, /// The inverse of `--edit` /// /// Takes precedence over config in `ui.movement.edit`; i.e. /// will negate `ui.movement.edit = true` #[arg(long, short, conflicts_with = "edit")] no_edit: bool, /// Jump to the next conflicted descendant #[arg(long, conflicts_with = "offset")] conflict: bool, } impl From<&NextArgs> for MovementArgs { fn from(val: &NextArgs) -> Self { Self { offset: val.offset, edit: val.edit, no_edit: val.no_edit, conflict: val.conflict, } } } pub(crate) fn cmd_next( ui: &mut Ui, command: &CommandHelper, args: &NextArgs, ) -> Result<(), CommandError> { move_to_commit(ui, command, Direction::Next, &MovementArgs::from(args)) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/sign.rs
cli/src/commands/sign.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetIteratorExt as _; use jj_lib::signing::SignBehavior; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::command_error::user_error_with_hint; use crate::complete; use crate::ui::Ui; /// Cryptographically sign a revision /// /// This command requires configuring a [commit signing] backend. /// /// [commit signing]: /// https://docs.jj-vcs.dev/latest/config/#commit-signing #[derive(clap::Args, Clone, Debug)] pub struct SignArgs { /// What revision(s) to sign /// /// If no revisions are specified, this defaults to the `revsets.sign` /// setting. /// /// Note that revisions are always re-signed. /// /// While that leads to discomfort for users, which sign with hardware /// devices, as of now we cannot reliably check if a commit is already /// signed by the user without creating a signature (see [#5786]). /// /// [#5786]: /// https://github.com/jj-vcs/jj/issues/5786 #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions: Vec<RevisionArg>, /// The key used for signing #[arg(long)] key: Option<String>, } pub fn cmd_sign(ui: &mut Ui, command: &CommandHelper, args: &SignArgs) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; if !workspace_command.repo().store().signer().can_sign() { return Err(user_error_with_hint( "No signing backend configured", "For configuring a signing backend, see https://docs.jj-vcs.dev/latest/config/#commit-signing", )); } let revset_expression = if args.revisions.is_empty() { let revset_string = workspace_command.settings().get_string("revsets.sign")?; workspace_command.parse_revset(ui, &RevisionArg::from(revset_string))? } else { workspace_command.parse_union_revsets(ui, &args.revisions)? } .resolve()?; workspace_command.check_rewritable_expr(&revset_expression)?; let to_sign: IndexSet<Commit> = revset_expression .evaluate(workspace_command.repo().as_ref())? .iter() .commits(workspace_command.repo().store()) .try_collect()?; let mut tx = workspace_command.start_transaction(); let mut signed_commits = vec![]; let mut num_reparented = 0; tx.repo_mut().transform_descendants( to_sign.iter().ids().cloned().collect_vec(), async |rewriter| { let old_commit = rewriter.old_commit().clone(); let mut commit_builder = rewriter.reparent(); if to_sign.contains(&old_commit) { if let Some(key) = &args.key { commit_builder = commit_builder.set_sign_key(key.clone()); } let new_commit = commit_builder .set_sign_behavior(SignBehavior::Force) .write()?; signed_commits.push(new_commit); } else { commit_builder.write()?; num_reparented += 1; } Ok(()) }, )?; if let Some(mut formatter) = ui.status_formatter() && !signed_commits.is_empty() { writeln!(formatter, "Signed {} commits:", signed_commits.len())?; print_updated_commits( formatter.as_mut(), &tx.commit_summary_template(), &signed_commits, )?; } let num_not_authored_by_me = signed_commits .iter() .filter(|commit| commit.author().email != tx.settings().user_email()) .count(); if num_not_authored_by_me > 0 { writeln!( ui.warning_default(), "{num_not_authored_by_me} of these commits are not authored by you", )?; } if num_reparented > 0 { writeln!(ui.status(), "Rebased {num_reparented} descendant commits")?; } let transaction_description = match &*signed_commits { [] => "".to_string(), [commit] => format!("sign commit {}", commit.id()), commits => format!( "sign commit {} and {} more", commits[0].id(), commits.len() - 1 ), }; tx.finish(ui, transaction_description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/status.rs
cli/src/commands/status.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use itertools::Itertools as _; use jj_lib::copies::CopyRecords; use jj_lib::merge::Diff; use jj_lib::merged_tree::MergedTree; use jj_lib::repo::Repo as _; use jj_lib::repo_path::RepoPath; use jj_lib::repo_path::RepoPathBuf; use jj_lib::revset::RevsetExpression; use jj_lib::revset::RevsetFilterPredicate; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::print_conflicted_paths; use crate::cli_util::print_snapshot_stats; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::diff_util::DiffFormat; use crate::diff_util::get_copy_records; use crate::formatter::FormatterExt as _; use crate::ui::Ui; /// Show high-level repo status [default alias: st] /// /// This includes: /// /// * The working copy commit and its parents, and a summary of the changes in /// the working copy (compared to the merged parents) /// /// * Conflicts in the working copy /// /// * [Conflicted bookmarks] /// /// [Conflicted bookmarks]: /// https://docs.jj-vcs.dev/latest/bookmarks/#conflicts #[derive(clap::Args, Clone, Debug)] pub(crate) struct StatusArgs { /// Restrict the status display to these paths #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_status( ui: &mut Ui, command: &CommandHelper, args: &StatusArgs, ) -> Result<(), CommandError> { let (workspace_command, snapshot_stats) = command.workspace_helper_with_stats(ui)?; print_snapshot_stats( ui, &snapshot_stats, workspace_command.env().path_converter(), )?; let repo = workspace_command.repo(); let maybe_wc_commit = workspace_command .get_wc_commit_id() .map(|id| repo.store().get_commit(id)) .transpose()?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); ui.request_pager(); let mut formatter = ui.stdout_formatter(); let formatter = formatter.as_mut(); if let Some(wc_commit) = &maybe_wc_commit { let parent_tree = wc_commit.parent_tree(repo.as_ref())?; let tree = wc_commit.tree(); print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; let wc_has_changes = tree.tree_ids() != parent_tree.tree_ids(); let wc_has_untracked = !snapshot_stats.untracked_paths.is_empty(); if !wc_has_changes && !wc_has_untracked { writeln!(formatter, "The working copy has no changes.")?; } else { if wc_has_changes { writeln!(formatter, "Working copy changes:")?; let mut copy_records = CopyRecords::default(); for parent in wc_commit.parent_ids() { let records = get_copy_records(repo.store(), parent, wc_commit.id(), &matcher)?; copy_records.add_records(records)?; } let diff_renderer = workspace_command.diff_renderer(vec![DiffFormat::Summary]); let width = ui.term_width(); diff_renderer .show_diff( ui, formatter, Diff::new(&parent_tree, &tree), &matcher, &copy_records, width, ) .block_on()?; } if wc_has_untracked { writeln!(formatter, "Untracked paths:")?; visit_collapsed_untracked_files( snapshot_stats.untracked_paths.keys(), tree.clone(), |path, is_dir| { let ui_path = workspace_command.path_converter().format_file_path(path); writeln!( formatter.labeled("diff").labeled("untracked"), "? {ui_path}{}", if is_dir { std::path::MAIN_SEPARATOR_STR } else { "" } )?; Ok(()) }, ) .block_on()?; } } let template = workspace_command.commit_summary_template(); write!(formatter, "Working copy (@) : ")?; template.format(wc_commit, formatter)?; writeln!(formatter)?; for parent in wc_commit.parents() { let parent = parent?; // "Working copy (@) : " write!(formatter, "Parent commit (@-): ")?; template.format(&parent, formatter)?; writeln!(formatter)?; } if wc_commit.has_conflict() { let conflicts = wc_commit.tree().conflicts_matching(&matcher).collect_vec(); writeln!( formatter.labeled("warning").with_heading("Warning: "), "There are unresolved conflicts at these paths:" )?; print_conflicted_paths(conflicts, formatter, &workspace_command)?; let wc_revset = RevsetExpression::commit(wc_commit.id().clone()); // Ancestors with conflicts, excluding the current working copy commit. let ancestors_conflicts: Vec<_> = workspace_command .attach_revset_evaluator( wc_revset .parents() .ancestors() .filtered(RevsetFilterPredicate::HasConflict) .minus(&workspace_command.env().immutable_expression()), ) .evaluate_to_commit_ids()? .try_collect()?; workspace_command.report_repo_conflicts(formatter, repo, ancestors_conflicts)?; } else { for parent in wc_commit.parents() { let parent = parent?; if parent.has_conflict() { writeln!( formatter.labeled("hint").with_heading("Hint: "), "Conflict in parent commit has been resolved in working copy" )?; break; } } } } else { writeln!(formatter, "No working copy")?; } let conflicted_local_bookmarks = repo .view() .local_bookmarks() .filter(|(_, target)| target.has_conflict()) .map(|(bookmark_name, _)| bookmark_name) .collect_vec(); let conflicted_remote_bookmarks = repo .view() .all_remote_bookmarks() .filter(|(_, remote_ref)| remote_ref.target.has_conflict()) .map(|(symbol, _)| symbol) .collect_vec(); if !conflicted_local_bookmarks.is_empty() { writeln!( formatter.labeled("warning").with_heading("Warning: "), "These bookmarks have conflicts:" )?; for name in conflicted_local_bookmarks { write!(formatter, " ")?; write!(formatter.labeled("bookmark"), "{}", name.as_symbol())?; writeln!(formatter)?; } writeln!( formatter.labeled("hint").with_heading("Hint: "), "Use `jj bookmark list` to see details. Use `jj bookmark set <name> -r <rev>` to \ resolve." )?; } if !conflicted_remote_bookmarks.is_empty() { writeln!( formatter.labeled("warning").with_heading("Warning: "), "These remote bookmarks have conflicts:" )?; for symbol in conflicted_remote_bookmarks { write!(formatter, " ")?; write!(formatter.labeled("bookmark"), "{symbol}")?; writeln!(formatter)?; } writeln!( formatter.labeled("hint").with_heading("Hint: "), "Use `jj bookmark list` to see details. Use `jj git fetch` to resolve." )?; } Ok(()) } async fn visit_collapsed_untracked_files( untracked_paths: impl IntoIterator<Item = impl AsRef<RepoPath>>, tree: MergedTree, mut on_path: impl FnMut(&RepoPath, bool) -> Result<(), CommandError>, ) -> Result<(), CommandError> { let trees = tree.trees().await?; let mut stack = vec![trees]; // TODO: This loop can be improved with BTreeMap cursors once that's stable, // would remove the need for the whole `skip_prefixed_by` thing and turn it // into a B-tree lookup. let mut skip_prefixed_by_dir: Option<RepoPathBuf> = None; 'untracked: for path in untracked_paths { let path = path.as_ref(); if skip_prefixed_by_dir .as_ref() .is_some_and(|p| path.starts_with(p)) { continue; } else { skip_prefixed_by_dir = None; } let mut it = path.components().dropping_back(1); let first_mismatch = it.by_ref().enumerate().find(|(i, component)| { stack.get(i + 1).is_none_or(|tree| { tree.dir() .components() .next_back() .expect("should always have at least one element (the root)") != *component }) }); if let Some((i, component)) = first_mismatch { stack.truncate(i + 1); for component in std::iter::once(component).chain(it) { let parent = stack .last() .expect("should always have at least one element (the root)"); if let Some(subtree) = parent.sub_tree(component).await? { stack.push(subtree); } else { let dir = parent.dir().join(component); on_path(&dir, true)?; skip_prefixed_by_dir = Some(dir); continue 'untracked; } } } on_path(path, false)?; } Ok(()) } #[cfg(test)] mod test { use testutils::TestRepo; use testutils::TestTreeBuilder; use testutils::repo_path; use super::*; fn collect_collapsed_untracked_files_string( untracked_paths: &[&RepoPath], tree: MergedTree, ) -> String { let mut result = String::new(); visit_collapsed_untracked_files(untracked_paths, tree, |path, is_dir| { result.push_str("? "); if is_dir { result.push_str(&path.to_internal_dir_string()); } else { result.push_str(path.as_internal_file_string()); } result.push('\n'); Ok(()) }) .block_on() .unwrap(); result } #[test] fn test_collapsed_untracked_files() { let repo = TestRepo::init(); let tracked = { let mut builder = TestTreeBuilder::new(repo.repo.store().clone()); builder.file(repo_path("top_level_file"), ""); // ? "untracked_top_level_file" // ? "dir" // ? "dir2/c" builder.file(repo_path("dir2/d"), ""); // ? "dir3/partially_tracked/e" builder.file(repo_path("dir3/partially_tracked/f"), ""); // ? "dir3/fully_untracked/" builder.file(repo_path("dir3/j"), ""); // ? "dir3/k" builder.write_merged_tree() }; let untracked = &[ repo_path("untracked_top_level_file"), repo_path("dir/a"), repo_path("dir/b"), repo_path("dir2/c"), repo_path("dir3/partially_tracked/e"), repo_path("dir3/fully_untracked/g"), repo_path("dir3/fully_untracked/h"), repo_path("dir3/k"), ]; insta::assert_snapshot!( collect_collapsed_untracked_files_string(untracked, tracked), @r" ? untracked_top_level_file ? dir/ ? dir2/c ? dir3/partially_tracked/e ? dir3/fully_untracked/ ? dir3/k " ); } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/help.rs
cli/src/commands/help.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fmt::Write as _; use std::io::Write as _; use clap::builder::PossibleValue; use clap::builder::StyledStr; use clap::error::ContextKind; use crossterm::style::Stylize as _; use itertools::Itertools as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::ui::Ui; /// Print this message or the help of the given subcommand(s) #[derive(clap::Args, Clone, Debug)] pub(crate) struct HelpArgs { /// Print help for the subcommand(s) pub(crate) command: Vec<String>, /// Show help for keywords instead of commands #[arg( long, short = 'k', conflicts_with = "command", value_parser = KEYWORDS .iter() .map(|k| PossibleValue::new(k.name).help(k.description)) .collect_vec() )] pub(crate) keyword: Option<String>, } #[instrument(skip_all)] pub(crate) fn cmd_help( ui: &mut Ui, command: &CommandHelper, args: &HelpArgs, ) -> Result<(), CommandError> { if let Some(name) = &args.keyword { let keyword = find_keyword(name).expect("clap should check this with `value_parser`"); ui.request_pager(); write!(ui.stdout(), "{}", keyword.content)?; return Ok(()); } let bin_name = command .string_args() .first() .map_or(command.app().get_name(), |name| name.as_ref()); let mut args_to_get_command = vec![bin_name]; args_to_get_command.extend(args.command.iter().map(|s| s.as_str())); let mut app = command.app().clone(); // This propagates global arguments to subcommand, and generates error if // the subcommand doesn't exist. if let Err(err) = app.try_get_matches_from_mut(args_to_get_command) { if err.get(ContextKind::InvalidSubcommand).is_some() { return Err(err.into()); } else { // `help log -- -r`, etc. shouldn't generate an argument error. } } let command = args .command .iter() .try_fold(&mut app, |cmd, name| cmd.find_subcommand_mut(name)) .ok_or_else(|| cli_error(format!("Unknown command: {}", args.command.join(" "))))?; ui.request_pager(); let help_text = command.render_long_help(); if ui.color() { write!(ui.stdout(), "{}", help_text.ansi())?; } else { write!(ui.stdout(), "{help_text}")?; } Ok(()) } #[derive(Clone)] struct Keyword { name: &'static str, description: &'static str, content: &'static str, } // TODO: Add all documentation to keywords // // Maybe adding some code to build.rs to find all the docs files and build the // `KEYWORDS` at compile time. // // It would be cool to follow the docs hierarchy somehow. // // One of the problems would be `config.md`, as it has the same name as a // subcommand. // // TODO: Find a way to render markdown using ANSI escape codes. // // Maybe we can steal some ideas from https://github.com/jj-vcs/jj/pull/3130 const KEYWORDS: &[Keyword] = &[ Keyword { name: "bookmarks", description: "Named pointers to revisions (similar to Git's branches)", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "bookmarks.md")), }, Keyword { name: "config", description: "How and where to set configuration options", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "config.md")), }, Keyword { name: "filesets", description: "A functional language for selecting a set of files", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "filesets.md")), }, Keyword { name: "glossary", description: "Definitions of various terms", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "glossary.md")), }, Keyword { name: "revsets", description: "A functional language for selecting a set of revision", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "revsets.md")), }, Keyword { name: "templates", description: "A functional language to customize command output", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "templates.md")), }, Keyword { name: "tutorial", description: "Show a tutorial to get started with jj", content: include_str!(concat!("../../", env!("JJ_DOCS_DIR"), "tutorial.md")), }, ]; fn find_keyword(name: &str) -> Option<&Keyword> { KEYWORDS.iter().find(|keyword| keyword.name == name) } pub fn show_keyword_hint_after_help() -> StyledStr { let mut ret = StyledStr::new(); writeln!( ret, "{} lists available keywords. Use {} to show help for one of these keywords.", "'jj help --help'".bold(), "'jj help -k'".bold(), ) .unwrap(); ret }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/log.rs
cli/src/commands/log.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::cmp::min; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::graph::GraphEdge; use jj_lib::graph::GraphEdgeType; use jj_lib::graph::TopoGroupedGraphIterator; use jj_lib::graph::reverse_graph; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetEvaluationError; use jj_lib::revset::RevsetExpression; use jj_lib::revset::RevsetFilterPredicate; use jj_lib::revset::RevsetIteratorExt as _; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::LogContentFormat; use crate::cli_util::RevisionArg; use crate::cli_util::format_template; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::formatter::FormatterExt as _; use crate::graphlog::GraphStyle; use crate::graphlog::get_graphlog; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Show revision history /// /// Renders a graphical view of the project's history, ordered with children /// before parents. By default, the output only includes mutable revisions, /// along with some additional revisions for context. Use `jj log -r ::` to see /// all revisions. See [`jj help -k revsets`] for information about the syntax. /// /// [`jj help -k revsets`]: /// https://docs.jj-vcs.dev/latest/revsets/ /// /// Spans of revisions that are not included in the graph per `--revisions` are /// rendered as a synthetic node labeled "(elided revisions)". /// /// The working-copy commit is indicated by a `@` symbol in the graph. /// [Immutable revisions] have a `◆` symbol. Other commits have a `○` symbol. /// All of these symbols can be [customized]. /// /// [Immutable revisions]: /// https://docs.jj-vcs.dev/latest/config/#set-of-immutable-commits /// /// [customized]: /// https://docs.jj-vcs.dev/latest/config/#node-style #[derive(clap::Args, Clone, Debug)] pub(crate) struct LogArgs { /// Which revisions to show /// /// If no paths nor revisions are specified, this defaults to the /// `revsets.log` setting. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions: Vec<RevisionArg>, /// Show revisions modifying the given paths #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::log_files))] paths: Vec<String>, /// Limit number of revisions to show /// /// Applied after revisions are filtered and reordered topologically, but /// before being reversed. #[arg(long, short = 'n')] limit: Option<usize>, /// Show revisions in the opposite order (older revisions first) #[arg(long)] reversed: bool, /// Don't show the graph, show a flat list of revisions #[arg(long, short = 'G')] no_graph: bool, /// Render each revision using the given template /// /// Run `jj log -T` to list the built-in templates. /// /// You can also specify arbitrary template expressions using the /// [built-in keywords]. See [`jj help -k templates`] for more /// information. /// /// If not specified, this defaults to the `templates.log` setting. /// /// [built-in keywords]: /// https://docs.jj-vcs.dev/latest/templates/#commit-keywords /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Show patch #[arg(long, short = 'p')] patch: bool, /// Print the number of commits instead of showing them #[arg(long, conflicts_with_all = ["DiffFormatArgs", "no_graph", "patch", "reversed", "template"])] count: bool, #[command(flatten)] diff_format: DiffFormatArgs, } #[instrument(skip_all)] pub(crate) fn cmd_log( ui: &mut Ui, command: &CommandHelper, args: &LogArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let settings = workspace_command.settings(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let mut explicit_paths = fileset_expression.explicit_paths().collect_vec(); let revset_expression = { // only use default revset if neither revset nor path are specified let mut expression = if args.revisions.is_empty() && args.paths.is_empty() { let revset_string = settings.get_string("revsets.log")?; workspace_command.parse_revset(ui, &RevisionArg::from(revset_string))? } else if !args.revisions.is_empty() { workspace_command.parse_union_revsets(ui, &args.revisions)? } else { // a path was specified so we use all() and add path filter later workspace_command.attach_revset_evaluator(RevsetExpression::all()) }; if !args.paths.is_empty() { // Beware that args.paths = ["root:."] is not identical to []. The // former will filter out empty commits. let predicate = RevsetFilterPredicate::File(fileset_expression.clone()); expression.intersect_with(&RevsetExpression::filter(predicate)); } expression }; let revset = revset_expression.evaluate()?; if args.count { let (lower, upper) = revset.count_estimate()?; let limit = args.limit.unwrap_or(usize::MAX); let count = if limit <= lower { limit } else if upper == Some(lower) { min(lower, limit) } else { revset .iter() .take(limit) .process_results(|iter| iter.count())? }; let mut formatter = ui.stdout_formatter(); writeln!(formatter, "{count}")?; return Ok(()); } let prio_revset = settings.get_string("revsets.log-graph-prioritize")?; let prio_revset = workspace_command.parse_revset(ui, &RevisionArg::from(prio_revset))?; let repo = workspace_command.repo(); let matcher = fileset_expression.to_matcher(); let store = repo.store(); let diff_renderer = workspace_command.diff_renderer_for_log(&args.diff_format, args.patch)?; let graph_style = GraphStyle::from_settings(settings)?; let use_elided_nodes = settings.get_bool("ui.log-synthetic-elided-nodes")?; let with_content_format = LogContentFormat::new(ui, settings)?; let template: TemplateRenderer<Commit>; let node_template: TemplateRenderer<Option<Commit>>; { let language = workspace_command.commit_template_language(); let template_string = match &args.template { Some(value) => value.clone(), None => settings.get_string("templates.log")?, }; template = workspace_command .parse_template(ui, &language, &template_string)? .labeled(["log", "commit"]); node_template = workspace_command .parse_template(ui, &language, &settings.get_string("templates.log_node")?)? .labeled(["log", "commit", "node"]); } { ui.request_pager(); let mut formatter = ui.stdout_formatter(); let formatter = formatter.as_mut(); if !args.no_graph { let mut raw_output = formatter.raw()?; let mut graph = get_graphlog(graph_style, raw_output.as_mut()); let iter: Box<dyn Iterator<Item = _>> = { let mut forward_iter = TopoGroupedGraphIterator::new(revset.iter_graph(), |id| id); let has_commit = revset.containing_fn(); for prio in prio_revset.evaluate_to_commit_ids()? { let prio = prio?; if has_commit(&prio)? { forward_iter.prioritize_branch(prio); } } // The input to TopoGroupedGraphIterator shouldn't be truncated // because the prioritized commit must exist in the input set. let forward_iter = forward_iter.take(args.limit.unwrap_or(usize::MAX)); if args.reversed { Box::new(reverse_graph(forward_iter, |id| id)?.into_iter().map(Ok)) } else { Box::new(forward_iter) } }; for node in iter { let (commit_id, edges) = node?; // The graph is keyed by (CommitId, is_synthetic) let mut graphlog_edges = vec![]; // TODO: Should we update revset.iter_graph() to yield a `has_missing` flag // instead of all the missing edges since we don't care about // where they point here anyway? let mut missing_edge_id = None; let mut elided_targets = vec![]; for edge in edges { match edge.edge_type { GraphEdgeType::Missing => { missing_edge_id = Some(edge.target); } GraphEdgeType::Direct => { graphlog_edges.push(GraphEdge::direct((edge.target, false))); } GraphEdgeType::Indirect => { if use_elided_nodes { elided_targets.push(edge.target.clone()); graphlog_edges.push(GraphEdge::direct((edge.target, true))); } else { graphlog_edges.push(GraphEdge::indirect((edge.target, false))); } } } } if let Some(missing_edge_id) = missing_edge_id { graphlog_edges.push(GraphEdge::missing((missing_edge_id, false))); } let mut buffer = vec![]; let key = (commit_id, false); let commit = store.get_commit(&key.0)?; let within_graph = with_content_format.sub_width(graph.width(&key, &graphlog_edges)); within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| { template.format(&commit, formatter) })?; if let Some(renderer) = &diff_renderer { let mut formatter = ui.new_formatter(&mut buffer); renderer .show_patch( ui, formatter.as_mut(), &commit, matcher.as_ref(), within_graph.width(), ) .block_on()?; } let commit = Some(commit); let node_symbol = format_template(ui, &commit, &node_template); graph.add_node( &key, &graphlog_edges, &node_symbol, &String::from_utf8_lossy(&buffer), )?; let tree = commit.map(|c| c.tree()).unwrap(); // TODO: propagate errors explicit_paths.retain(|&path| tree.path_value(path).unwrap().is_absent()); for elided_target in elided_targets { let elided_key = (elided_target, true); let real_key = (elided_key.0.clone(), false); let edges = [GraphEdge::direct(real_key)]; let mut buffer = vec![]; let within_graph = with_content_format.sub_width(graph.width(&elided_key, &edges)); within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| { writeln!(formatter.labeled("elided"), "(elided revisions)") })?; let node_symbol = format_template(ui, &None, &node_template); graph.add_node( &elided_key, &edges, &node_symbol, &String::from_utf8_lossy(&buffer), )?; } } } else { let iter: Box<dyn Iterator<Item = Result<CommitId, RevsetEvaluationError>>> = { let forward_iter = revset.iter().take(args.limit.unwrap_or(usize::MAX)); if args.reversed { let entries: Vec<_> = forward_iter.try_collect()?; Box::new(entries.into_iter().rev().map(Ok)) } else { Box::new(forward_iter) } }; for commit_or_error in iter.commits(store) { let commit = commit_or_error?; with_content_format .write(formatter, |formatter| template.format(&commit, formatter))?; if let Some(renderer) = &diff_renderer { let width = ui.term_width(); renderer .show_patch(ui, formatter, &commit, matcher.as_ref(), width) .block_on()?; } let tree = commit.tree(); // TODO: propagate errors explicit_paths.retain(|&path| tree.path_value(path).unwrap().is_absent()); } } if !explicit_paths.is_empty() { let ui_paths = explicit_paths .iter() .map(|&path| workspace_command.format_file_path(path)) .join(", "); writeln!( ui.warning_default(), "No matching entries for paths: {ui_paths}" )?; } } // Check to see if the user might have specified a path when they intended // to specify a revset. if let ([], [only_path]) = (args.revisions.as_slice(), args.paths.as_slice()) { if only_path == "." && workspace_command.parse_file_path(only_path)?.is_root() { // For users of e.g. Mercurial, where `.` indicates the current commit. writeln!( ui.warning_default(), "The argument {only_path:?} is being interpreted as a fileset expression, but \ this is often not useful because all non-empty commits touch '.'. If you meant \ to show the working copy commit, pass -r '@' instead." )?; } else if revset.is_empty() && workspace_command .parse_revset(ui, &RevisionArg::from(only_path.to_owned())) .is_ok() { writeln!( ui.warning_default(), "The argument {only_path:?} is being interpreted as a fileset expression. To \ specify a revset, pass -r {only_path:?} instead." )?; } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/root.rs
cli/src/commands/root.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use jj_lib::file_util; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::ui::Ui; /// Show the current workspace root directory (shortcut for `jj workspace root`) #[derive(clap::Args, Clone, Debug)] pub(crate) struct RootArgs {} #[instrument(skip_all)] pub(crate) fn cmd_root( ui: &mut Ui, command: &CommandHelper, RootArgs {}: &RootArgs, ) -> Result<(), CommandError> { let loader = command.workspace_loader()?; let path_bytes = file_util::path_to_bytes(loader.workspace_root()).map_err(user_error)?; ui.stdout().write_all(path_bytes)?; writeln!(ui.stdout())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/commit.rs
cli/src/commands/commit.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use indoc::writedoc; use jj_lib::backend::Signature; use jj_lib::merge::Diff; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::Repo as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::description_util::add_trailers; use crate::description_util::description_template; use crate::description_util::edit_description; use crate::description_util::join_message_paragraphs; use crate::text_util::parse_author; use crate::ui::Ui; /// Update the description and create a new change on top [default alias: ci] /// /// When called without path arguments or `--interactive`, `jj commit` is /// equivalent to `jj describe` followed by `jj new`. /// /// When using `--interactive` or path arguments, the selected changes stay in /// the current commit while the remaining changes are moved to a new /// working-copy commit on top. This is very similar to `jj split`. Differences /// include: /// /// * `jj commit` is not interactive by default (it selects all changes). /// /// * `jj commit` doesn't have a `-r` option. It always acts on the working-copy /// commit (@). /// /// * `jj split` (without `-o`/`-A`/`-B`) will move bookmarks forward from the /// old change to the child change. `jj commit` doesn't move bookmarks /// forward. /// /// * `jj split` allows you to move the selected changes to a different /// destination with `-o`/`-A`/`-B`. #[derive(clap::Args, Clone, Debug)] pub(crate) struct CommitArgs { /// Interactively choose which changes to include in the current commit #[arg(short, long)] interactive: bool, /// Specify diff editor to be used (implies --interactive) #[arg(long, value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::diff_editors))] tool: Option<String>, /// The change description to use (don't open editor) #[arg(long = "message", short, value_name = "MESSAGE")] message_paragraphs: Vec<String>, /// Open an editor to edit the change description /// /// Forces an editor to open when using `--message` to allow the /// message to be edited afterwards. #[arg(long)] editor: bool, /// Put these paths in the current commit #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_files))] paths: Vec<String>, // TODO: Delete in jj 0.40.0+ /// Reset the author to the configured user /// /// This resets the author name, email, and timestamp. /// /// You can use it in combination with the JJ_USER and JJ_EMAIL /// environment variables to set a different author: /// /// $ JJ_USER='Foo Bar' JJ_EMAIL=foo@bar.com jj commit --reset-author #[arg(long, hide = true)] reset_author: bool, // TODO: Delete in jj 0.40.0+ /// Set author to the provided string /// /// This changes author name and email while retaining author /// timestamp for non-discardable commits. #[arg( long, hide = true, conflicts_with = "reset_author", value_parser = parse_author )] author: Option<(String, String)>, } #[instrument(skip_all)] pub(crate) fn cmd_commit( ui: &mut Ui, command: &CommandHelper, args: &CommitArgs, ) -> Result<(), CommandError> { if args.reset_author { writeln!( ui.warning_default(), "`jj commit --reset-author` is deprecated; use `jj metaedit --update-author` instead" )?; } if args.author.is_some() { writeln!( ui.warning_default(), "`jj commit --author` is deprecated; use `jj metaedit --author` instead" )?; } let mut workspace_command = command.workspace_helper(ui)?; let commit_id = workspace_command .get_wc_commit_id() .ok_or_else(|| user_error("This command requires a working copy"))?; let commit = workspace_command.repo().store().get_commit(commit_id)?; let matcher = workspace_command .parse_file_patterns(ui, &args.paths)? .to_matcher(); let advanceable_bookmarks = workspace_command.get_advanceable_bookmarks(ui, commit.parent_ids())?; let diff_selector = workspace_command.diff_selector(ui, args.tool.as_deref(), args.interactive)?; let text_editor = workspace_command.text_editor()?; let mut tx = workspace_command.start_transaction(); let base_tree = commit.parent_tree(tx.repo())?; let format_instructions = || { format!( "\ You are splitting the working-copy commit: {} The diff initially shows all changes. Adjust the right side until it shows the contents you want for the first commit. The remainder will be included in the new working-copy commit. ", tx.format_commit_summary(&commit) ) }; let tree = diff_selector.select( Diff::new(&base_tree, &commit.tree()), matcher.as_ref(), format_instructions, )?; if !args.paths.is_empty() && tree.tree_ids() == base_tree.tree_ids() { writeln!( ui.warning_default(), "The given paths do not match any file: {}", args.paths.join(" ") )?; } let mut commit_builder = tx.repo_mut().rewrite_commit(&commit).detach(); commit_builder.set_tree(tree); if args.reset_author { commit_builder.set_author(commit_builder.committer().clone()); } if let Some((name, email)) = args.author.clone() { let new_author = Signature { name, email, timestamp: commit_builder.author().timestamp, }; commit_builder.set_author(new_author); } let description = if !args.message_paragraphs.is_empty() { let mut description = join_message_paragraphs(&args.message_paragraphs); if !description.is_empty() || args.editor { // The first trailer would become the first line of the description. // Also, a commit with no description is treated in a special way in jujutsu: it // can be discarded as soon as it's no longer the working copy. Adding a // trailer to an empty description would break that logic. commit_builder.set_description(description); description = add_trailers(ui, &tx, &commit_builder)?; } description } else { add_trailers(ui, &tx, &commit_builder)? }; let description = if args.message_paragraphs.is_empty() || args.editor { commit_builder.set_description(description); let temp_commit = commit_builder.write_hidden()?; let intro = ""; let description = description_template(ui, &tx, intro, &temp_commit)?; let description = edit_description(&text_editor, &description)?; if description.is_empty() { writedoc!( ui.hint_default(), " The commit message was left empty. If this was not intentional, run `jj undo` to restore the previous state. Or run `jj desc @-` to add a description to the parent commit. " )?; } description } else { description }; commit_builder.set_description(description); let new_commit = commit_builder.write(tx.repo_mut())?; let workspace_names = tx.repo().view().workspaces_for_wc_commit_id(commit.id()); if !workspace_names.is_empty() { let new_wc_commit = tx .repo_mut() .new_commit(vec![new_commit.id().clone()], commit.tree()) .write()?; // Does nothing if there's no bookmarks to advance. tx.advance_bookmarks(advanceable_bookmarks, new_commit.id())?; for name in workspace_names { tx.repo_mut().edit(name, &new_wc_commit).unwrap(); } } tx.finish(ui, format!("commit {}", commit.id().hex()))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/simplify_parents.rs
cli/src/commands/simplify_parents.rs
use std::collections::HashSet; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::BackendError; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Simplify parent edges for the specified revision(s). /// /// Removes all parents of each of the specified revisions that are also /// indirect ancestors of the same revisions through other parents. This has no /// effect on any revision's contents, including the working copy. /// /// In other words, for all (A, B, C) where A has (B, C) as parents and C is an /// ancestor of B, A will be rewritten to have only B as a parent instead of /// B+C. #[derive(clap::Args, Clone, Debug)] pub(crate) struct SimplifyParentsArgs { /// Simplify specified revision(s) together with their trees of descendants /// (can be repeated) #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] source: Vec<RevisionArg>, /// Simplify specified revision(s) (can be repeated) /// /// If both `--source` and `--revisions` are not provided, this defaults to /// the `revsets.simplify-parents` setting, or `reachable(@, mutable())` /// if it is not set. #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions: Vec<RevisionArg>, } pub(crate) fn cmd_simplify_parents( ui: &mut Ui, command: &CommandHelper, args: &SimplifyParentsArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let revs = if args.source.is_empty() && args.revisions.is_empty() { let revs = workspace_command .settings() .get_string("revsets.simplify-parents")?; workspace_command .parse_revset(ui, &RevisionArg::from(revs))? .resolve()? } else { workspace_command .parse_union_revsets(ui, &args.source)? .resolve()? .descendants() .union( &workspace_command .parse_union_revsets(ui, &args.revisions)? .resolve()?, ) }; workspace_command.check_rewritable_expr(&revs)?; let commit_ids: Vec<_> = revs .evaluate(workspace_command.repo().as_ref())? .iter() .try_collect()?; let commit_ids_set: HashSet<_> = commit_ids.iter().cloned().collect(); let num_orig_commits = commit_ids.len(); let mut tx = workspace_command.start_transaction(); let mut simplified_commits = 0; let mut edges = 0; let mut reparented_descendants = 0; tx.repo_mut() .transform_descendants(commit_ids, async |mut rewriter| { let num_old_heads = rewriter.new_parents().len(); if commit_ids_set.contains(rewriter.old_commit().id()) && num_old_heads > 1 { // TODO: BackendError is not the right error here because // the error does not come from `Backend`, but `Index`. rewriter .simplify_ancestor_merge() .map_err(|err| BackendError::Other(err.into()))?; } let num_new_heads = rewriter.new_parents().len(); if rewriter.parents_changed() { rewriter.reparent().write()?; if num_new_heads < num_old_heads { simplified_commits += 1; edges += num_old_heads - num_new_heads; } else { reparented_descendants += 1; } } Ok(()) })?; if let Some(mut formatter) = ui.status_formatter() && simplified_commits > 0 { writeln!( formatter, "Removed {edges} edges from {simplified_commits} out of {num_orig_commits} commits.", )?; if reparented_descendants > 0 { writeln!( formatter, "Rebased {reparented_descendants} descendant commits", )?; } } tx.finish(ui, format!("simplify {num_orig_commits} commits"))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/describe.rs
cli/src/commands/describe.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::io; use std::io::Read as _; use std::iter; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::Signature; use jj_lib::object_id::ObjectId as _; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetIteratorExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::description_util::ParsedBulkEditMessage; use crate::description_util::add_trailers_with_template; use crate::description_util::description_template; use crate::description_util::edit_description; use crate::description_util::edit_multiple_descriptions; use crate::description_util::join_message_paragraphs; use crate::description_util::parse_trailers_template; use crate::text_util::complete_newline; use crate::text_util::parse_author; use crate::ui::Ui; /// Update the change description or other metadata [default alias: desc] /// /// Starts an editor to let you edit the description of changes. The editor /// will be $EDITOR, or `nano` if that's not defined (`Notepad` on Windows). #[derive(clap::Args, Clone, Debug)] pub(crate) struct DescribeArgs { /// The revision(s) whose description to edit (default: @) [aliases: -r] #[arg(value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_pos: Vec<RevisionArg>, #[arg(short = 'r', hide = true, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_opt: Vec<RevisionArg>, /// The change description to use (don't open editor) /// /// If multiple revisions are specified, the same description will be used /// for all of them. #[arg( long = "message", short, value_name = "MESSAGE", conflicts_with = "stdin" )] message_paragraphs: Vec<String>, /// Read the change description from stdin /// /// If multiple revisions are specified, the same description will be used /// for all of them. #[arg(long)] stdin: bool, // TODO: Delete in jj 0.40.0+ /// Don't open an editor /// /// This is mainly useful in combination with e.g. `--reset-author`. #[arg(long, hide = true, conflicts_with_all = ["edit", "editor"])] no_edit: bool, /// Open an editor to edit the change description /// /// Forces an editor to open when using `--stdin` or `--message` to /// allow the message to be edited afterwards. #[arg(long)] editor: bool, // TODO: Delete in jj 0.42.0+ /// Open an editor to edit the change description /// /// Forces an editor to open when using `--stdin` or `--message` to /// allow the message to be edited afterwards. #[arg(long, hide = true, conflicts_with = "editor")] edit: bool, // TODO: Delete in jj 0.40.0+ /// Reset the author name, email, and timestamp /// /// This resets the author name and email to the configured user and sets /// the author timestamp to the current time. /// /// You can use it in combination with the JJ_USER and JJ_EMAIL /// environment variables to set a different author: /// /// $ JJ_USER='Foo Bar' JJ_EMAIL=foo@bar.com jj describe --reset-author #[arg(long, hide = true)] reset_author: bool, // TODO: Delete in jj 0.40.0+ /// Set author to the provided string /// /// This changes author name and email while retaining author /// timestamp for non-discardable commits. #[arg( long, hide = true, conflicts_with = "reset_author", value_parser = parse_author )] author: Option<(String, String)>, } #[instrument(skip_all)] pub(crate) fn cmd_describe( ui: &mut Ui, command: &CommandHelper, args: &DescribeArgs, ) -> Result<(), CommandError> { if args.no_edit { writeln!( ui.warning_default(), "`jj describe --no-edit` is deprecated; use `jj metaedit` instead" )?; } if args.edit { writeln!( ui.warning_default(), "`jj describe --edit` is deprecated; use `jj describe --editor` instead" )?; } if args.reset_author { writeln!( ui.warning_default(), "`jj describe --reset-author` is deprecated; use `jj metaedit --update-author` instead" )?; } if args.author.is_some() { writeln!( ui.warning_default(), "`jj describe --author` is deprecated; use `jj metaedit --author` instead" )?; } let mut workspace_command = command.workspace_helper(ui)?; let target_expr = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() { workspace_command .parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())? } else { workspace_command.parse_revset(ui, &RevisionArg::AT)? } .resolve()?; workspace_command.check_rewritable_expr(&target_expr)?; let commits: Vec<_> = target_expr .evaluate(workspace_command.repo().as_ref())? .iter() .commits(workspace_command.repo().store()) // in reverse topological order .try_collect()?; if commits.is_empty() { writeln!(ui.status(), "No revisions to describe.")?; return Ok(()); } let text_editor = workspace_command.text_editor()?; let mut tx = workspace_command.start_transaction(); let tx_description = match commits.as_slice() { [] => unreachable!(), [commit] => format!("describe commit {}", commit.id().hex()), [first_commit, remaining_commits @ ..] => { format!( "describe commit {} and {} more", first_commit.id().hex(), remaining_commits.len() ) } }; let shared_description = if args.stdin { let mut buffer = String::new(); io::stdin().read_to_string(&mut buffer)?; Some(complete_newline(buffer)) } else if !args.message_paragraphs.is_empty() { Some(join_message_paragraphs(&args.message_paragraphs)) } else { None }; let mut commit_builders = commits .iter() .map(|commit| { let mut commit_builder = tx.repo_mut().rewrite_commit(commit).detach(); if let Some(description) = &shared_description { commit_builder.set_description(description); } if args.reset_author { let new_author = commit_builder.committer().clone(); commit_builder.set_author(new_author); } if let Some((name, email)) = args.author.clone() { let new_author = Signature { name, email, timestamp: commit_builder.author().timestamp, }; commit_builder.set_author(new_author); } commit_builder }) .collect_vec(); let use_editor = args.editor || args.edit || (shared_description.is_none() && !args.no_edit); if let Some(trailer_template) = parse_trailers_template(ui, &tx)? { for commit_builder in &mut commit_builders { // The first trailer would become the first line of the description. // Also, a commit with no description is treated in a special way in jujutsu: it // can be discarded as soon as it's no longer the working copy. Adding a // trailer to an empty description would break that logic. if use_editor || !commit_builder.description().is_empty() { let temp_commit = commit_builder.write_hidden()?; let new_description = add_trailers_with_template(&trailer_template, &temp_commit)?; commit_builder.set_description(new_description); } } } if use_editor { let temp_commits: Vec<_> = iter::zip(&commits, &commit_builders) // Edit descriptions in topological order .rev() .map(|(commit, commit_builder)| { commit_builder .write_hidden() .map(|temp_commit| (commit.id(), temp_commit)) }) .try_collect()?; if let [(_, temp_commit)] = &*temp_commits { let intro = ""; let template = description_template(ui, &tx, intro, temp_commit)?; let description = edit_description(&text_editor, &template)?; commit_builders[0].set_description(description); } else { let ParsedBulkEditMessage { descriptions, missing, duplicates, unexpected, } = edit_multiple_descriptions(ui, &text_editor, &tx, &temp_commits)?; if !missing.is_empty() { return Err(user_error(format!( "The description for the following commits were not found in the edited \ message: {}", missing.join(", ") ))); } if !duplicates.is_empty() { return Err(user_error(format!( "The following commits were found in the edited message multiple times: {}", duplicates.join(", ") ))); } if !unexpected.is_empty() { return Err(user_error(format!( "The following commits were not being edited, but were found in the edited \ message: {}", unexpected.join(", ") ))); } for (commit, commit_builder) in iter::zip(&commits, &mut commit_builders) { let description = descriptions.get(commit.id()).unwrap(); commit_builder.set_description(description); } } }; // Filter out unchanged commits to avoid rebasing descendants in // `transform_descendants` below unnecessarily. let commit_builders: HashMap<_, _> = iter::zip(&commits, commit_builders) .filter(|(old_commit, commit_builder)| { old_commit.description() != commit_builder.description() || args.reset_author // Ignore author timestamp which could be updated if the old // commit was discardable. || old_commit.author().name != commit_builder.author().name || old_commit.author().email != commit_builder.author().email }) .map(|(old_commit, commit_builder)| (old_commit.id(), commit_builder)) .collect(); let mut num_described = 0; let mut num_reparented = 0; // Even though `MutableRepo::rewrite_commit` and // `MutableRepo::rebase_descendants` can handle rewriting of a commit even // if it is a descendant of another commit being rewritten, using // `MutableRepo::transform_descendants` prevents us from rewriting the same // commit multiple times, and adding additional entries in the predecessor // chain. tx.repo_mut().transform_descendants( commit_builders.keys().map(|&id| id.clone()).collect(), async |rewriter| { let old_commit_id = rewriter.old_commit().id().clone(); let commit_builder = rewriter.reparent(); if let Some(temp_builder) = commit_builders.get(&old_commit_id) { commit_builder .set_description(temp_builder.description()) .set_author(temp_builder.author().clone()) // Copy back committer for consistency with author timestamp .set_committer(temp_builder.committer().clone()) .write()?; num_described += 1; } else { commit_builder.write()?; num_reparented += 1; } Ok(()) }, )?; if num_described > 1 { writeln!(ui.status(), "Updated {num_described} commits")?; } if num_reparented > 0 { writeln!(ui.status(), "Rebased {num_reparented} descendant commits")?; } tx.finish(ui, tx_description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/undo.rs
cli/src/commands/undo.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::OperationId; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::internal_error; use crate::command_error::user_error; use crate::command_error::user_error_with_hint; #[cfg(feature = "git")] use crate::commands::git::is_push_operation; use crate::commands::operation::DEFAULT_REVERT_WHAT; use crate::commands::operation::RevertWhatToRestore; use crate::commands::operation::revert::OperationRevertArgs; use crate::commands::operation::revert::cmd_op_revert; use crate::commands::operation::view_with_desired_portions_restored; use crate::complete; use crate::ui::Ui; /// Undo the last operation /// /// If used once after a normal (non-`undo`) operation, this will undo that last /// operation by restoring its parent. If `jj undo` is used repeatedly, it will /// restore increasingly older operations, going further back into the past. /// /// There is also a complementary `jj redo` command that would instead move in /// the direction of the future after one or more `jj undo`s. /// /// Use `jj op log` to visualize the log of past operations, including a /// detailed description of any past undo/redo operations. See also `jj op /// restore` to explicitly restore an older operation by its id (available in /// the operation log). #[derive(clap::Args, Clone, Debug)] pub struct UndoArgs { /// (deprecated, use `jj op revert <operation>`) /// /// The operation to undo /// /// Use `jj op log` to find an operation to undo. // TODO: Delete in jj 0.39+ #[arg(default_value = "@")] #[arg(add = ArgValueCandidates::new(complete::operations))] operation: String, /// (deprecated, use `jj op revert --what`) /// /// What portions of the local state to restore (can be repeated) /// /// This option is EXPERIMENTAL. #[arg(long, value_enum, hide = true, default_values_t = DEFAULT_REVERT_WHAT)] what: Vec<RevertWhatToRestore>, } pub(crate) const UNDO_OP_DESC_PREFIX: &str = "undo: restore to operation "; pub fn cmd_undo(ui: &mut Ui, command: &CommandHelper, args: &UndoArgs) -> Result<(), CommandError> { if args.operation != "@" { writeln!( ui.warning_default(), "`jj undo <operation>` is deprecated; use `jj op revert <operation>` instead" )?; let args = OperationRevertArgs { operation: args.operation.clone(), what: args.what.clone(), }; return cmd_op_revert(ui, command, &args); } if args.what != DEFAULT_REVERT_WHAT { writeln!( ui.warning_default(), "`jj undo --what` is deprecated; use `jj op revert --what` instead" )?; let args = OperationRevertArgs { operation: args.operation.clone(), what: args.what.clone(), }; return cmd_op_revert(ui, command, &args); } let mut workspace_command = command.workspace_helper(ui)?; let mut op_to_undo = workspace_command.resolve_single_op(&args.operation)?; // Growing the "undo-stack" works as follows. See also the // [redo-stack](./redo.rs), which works in a similar way. // // - If the operation to undo is a regular one (not an undo-operation), simply // undo it (== restore its parent). // - If the operation to undo is an undo-operation itself, undo that operation // to which the previous undo-operation restored the repo. // - If the operation to restore to is an undo-operation, restore directly to // the original operation. This avoids creating a linked list of // undo-operations, which subsequently may have to be walked with an // inefficient loop. // // This described behavior leads to "jumping over" old undo-stacks if the // current one grows into it. Consider this op-log example: // // * G "undo: restore A" -------+ // | | // * F "undo: restore B" -----+ | // | | | // * E | | // | | | // * D "undo: restore B" -+ | | // | | | | // * C | | | // | | | | // * B <----------------+ <-+ | // | | // * A <----------------------+ // // It was produced by the following sequence of events: // - do normal operations A, B and C // - undo C, restoring to B // - do normal operation E // - undo E, restoring to B again (NOT to D) // - undo F, restoring to A // // Notice that running `undo` after having undone E leads to A being // restored (as opposed to C). The undo-stack spanning from F to B was // "jumped over". // if let Some(id_of_restored_op) = op_to_undo .metadata() .description .strip_prefix(UNDO_OP_DESC_PREFIX) { let Some(id_of_restored_op) = OperationId::try_from_hex(id_of_restored_op) else { return Err(internal_error( "Failed to parse ID of restored operation in undo-stack", )); }; op_to_undo = workspace_command .repo() .loader() .load_operation(&id_of_restored_op)?; } #[cfg(feature = "git")] if is_push_operation(&op_to_undo) { writeln!( ui.warning_default(), "Undoing a push operation often leads to conflicted bookmarks." )?; writeln!(ui.hint_default(), "To avoid this, run `jj redo` now.")?; }; let mut op_to_restore = match op_to_undo.parents().at_most_one() { Ok(Some(parent_of_op_to_undo)) => parent_of_op_to_undo?, Ok(None) => return Err(user_error("Cannot undo root operation")), Err(_) => { return Err(user_error_with_hint( "Cannot undo a merge operation", "Consider using `jj op restore` instead", )); } }; // Avoid the creation of a linked list by restoring to the original // operation directly, if we're about to restore an undo-operation. If we // didn't to this, repeated calls of `jj new ; jj undo` would create an // ever-growing linked list of undo-operations that restore each other. // Calling `jj undo` one more time would have to restore to the operation // at the very beginning of the linked list, which would require walking the // entire thing unnecessarily. if let Some(original_op) = op_to_restore .metadata() .description .strip_prefix(UNDO_OP_DESC_PREFIX) { let Some(id_of_original_op) = OperationId::try_from_hex(original_op) else { return Err(internal_error( "Failed to parse ID of restored operation in undo-stack", )); }; op_to_restore = workspace_command .repo() .loader() .load_operation(&id_of_original_op)?; } let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( op_to_restore.view()?.store_view(), tx.base_repo().view().store_view(), &DEFAULT_REVERT_WHAT, ); tx.repo_mut().set_view(new_view); if let Some(mut formatter) = ui.status_formatter() { write!(formatter, "Restored to operation: ")?; let template = tx.base_workspace_helper().operation_summary_template(); template.format(&op_to_restore, formatter.as_mut())?; writeln!(formatter)?; } tx.finish( ui, format!("{UNDO_OP_DESC_PREFIX}{}", op_to_restore.id().hex()), )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/mod.rs
cli/src/commands/mod.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod abandon; mod absorb; #[cfg(feature = "bench")] mod bench; mod bisect; mod bookmark; mod commit; mod config; mod debug; mod describe; mod diff; mod diffedit; mod duplicate; mod edit; mod evolog; mod file; mod fix; #[cfg(feature = "git")] mod gerrit; #[cfg(feature = "git")] mod git; mod help; mod interdiff; mod log; mod metaedit; mod new; mod next; mod operation; mod parallelize; mod prev; mod rebase; mod redo; mod resolve; mod restore; mod revert; mod root; mod run; mod show; mod sign; mod simplify_parents; mod sparse; mod split; mod squash; mod status; mod tag; mod undo; mod unsign; mod util; mod version; mod workspace; use std::fmt::Debug; use clap::CommandFactory as _; use clap::FromArgMatches as _; use clap::Subcommand as _; use clap::builder::Styles; use clap::builder::styling::AnsiColor; use clap_complete::engine::SubcommandCandidates; use tracing::instrument; use crate::cli_util::Args; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; const STYLES: Styles = Styles::styled() .header(AnsiColor::Yellow.on_default().bold()) .usage(AnsiColor::Yellow.on_default().bold()) .literal(AnsiColor::Green.on_default().bold()) .placeholder(AnsiColor::Green.on_default()); #[derive(clap::Parser, Clone, Debug)] #[command(styles = STYLES)] #[command(disable_help_subcommand = true)] #[command(after_long_help = help::show_keyword_hint_after_help())] #[command(add = SubcommandCandidates::new(complete::aliases))] enum Command { Abandon(abandon::AbandonArgs), Absorb(absorb::AbsorbArgs), #[cfg(feature = "bench")] #[command(subcommand)] Bench(bench::BenchCommand), #[command(subcommand)] Bisect(bisect::BisectCommand), #[command(subcommand)] Bookmark(bookmark::BookmarkCommand), Commit(commit::CommitArgs), #[command(subcommand)] Config(config::ConfigCommand), #[command(subcommand)] Debug(debug::DebugCommand), Describe(describe::DescribeArgs), Diff(diff::DiffArgs), Diffedit(diffedit::DiffeditArgs), Duplicate(duplicate::DuplicateArgs), Edit(edit::EditArgs), #[command(alias = "obslog", visible_alias = "evolution-log")] Evolog(evolog::EvologArgs), #[command(subcommand)] File(file::FileCommand), Fix(fix::FixArgs), #[cfg(feature = "git")] #[command(subcommand)] Gerrit(gerrit::GerritCommand), #[cfg(feature = "git")] #[command(subcommand)] Git(git::GitCommand), Help(help::HelpArgs), Interdiff(interdiff::InterdiffArgs), Log(log::LogArgs), Metaedit(metaedit::MetaeditArgs), New(new::NewArgs), Next(next::NextArgs), #[command(subcommand)] #[command(visible_alias = "op")] Operation(operation::OperationCommand), Parallelize(parallelize::ParallelizeArgs), Prev(prev::PrevArgs), Rebase(rebase::RebaseArgs), Redo(redo::RedoArgs), Resolve(resolve::ResolveArgs), Restore(restore::RestoreArgs), Revert(revert::RevertArgs), Root(root::RootArgs), #[command(hide = true)] // TODO: Flesh out. Run(run::RunArgs), Show(show::ShowArgs), Sign(sign::SignArgs), SimplifyParents(simplify_parents::SimplifyParentsArgs), #[command(subcommand)] Sparse(sparse::SparseCommand), Split(split::SplitArgs), Squash(squash::SquashArgs), Status(status::StatusArgs), #[command(subcommand)] Tag(tag::TagCommand), Undo(undo::UndoArgs), Unsign(unsign::UnsignArgs), #[command(subcommand)] Util(util::UtilCommand), Version(version::VersionArgs), #[command(subcommand)] Workspace(workspace::WorkspaceCommand), } pub fn default_app() -> clap::Command { Command::augment_subcommands(Args::command()) } #[instrument(skip_all)] pub fn run_command(ui: &mut Ui, command_helper: &CommandHelper) -> Result<(), CommandError> { let subcommand = Command::from_arg_matches(command_helper.matches()).unwrap(); match &subcommand { Command::Abandon(args) => abandon::cmd_abandon(ui, command_helper, args), Command::Absorb(args) => absorb::cmd_absorb(ui, command_helper, args), #[cfg(feature = "bench")] Command::Bench(args) => bench::cmd_bench(ui, command_helper, args), Command::Bisect(args) => bisect::cmd_bisect(ui, command_helper, args), Command::Bookmark(args) => bookmark::cmd_bookmark(ui, command_helper, args), Command::Commit(args) => commit::cmd_commit(ui, command_helper, args), Command::Config(args) => config::cmd_config(ui, command_helper, args), Command::Debug(args) => debug::cmd_debug(ui, command_helper, args), Command::Describe(args) => describe::cmd_describe(ui, command_helper, args), Command::Diff(args) => diff::cmd_diff(ui, command_helper, args), Command::Diffedit(args) => diffedit::cmd_diffedit(ui, command_helper, args), Command::Duplicate(args) => duplicate::cmd_duplicate(ui, command_helper, args), Command::Edit(args) => edit::cmd_edit(ui, command_helper, args), Command::Evolog(args) => evolog::cmd_evolog(ui, command_helper, args), Command::File(args) => file::cmd_file(ui, command_helper, args), Command::Fix(args) => fix::cmd_fix(ui, command_helper, args), #[cfg(feature = "git")] Command::Gerrit(sub_args) => gerrit::cmd_gerrit(ui, command_helper, sub_args), #[cfg(feature = "git")] Command::Git(args) => git::cmd_git(ui, command_helper, args), Command::Help(args) => help::cmd_help(ui, command_helper, args), Command::Interdiff(args) => interdiff::cmd_interdiff(ui, command_helper, args), Command::Log(args) => log::cmd_log(ui, command_helper, args), Command::Metaedit(args) => metaedit::cmd_metaedit(ui, command_helper, args), Command::New(args) => new::cmd_new(ui, command_helper, args), Command::Next(args) => next::cmd_next(ui, command_helper, args), Command::Operation(args) => operation::cmd_operation(ui, command_helper, args), Command::Parallelize(args) => parallelize::cmd_parallelize(ui, command_helper, args), Command::Prev(args) => prev::cmd_prev(ui, command_helper, args), Command::Rebase(args) => rebase::cmd_rebase(ui, command_helper, args), Command::Redo(args) => redo::cmd_redo(ui, command_helper, args), Command::Resolve(args) => resolve::cmd_resolve(ui, command_helper, args), Command::Restore(args) => restore::cmd_restore(ui, command_helper, args), Command::Revert(args) => revert::cmd_revert(ui, command_helper, args), Command::Root(args) => root::cmd_root(ui, command_helper, args), Command::Run(args) => run::cmd_run(ui, command_helper, args), Command::SimplifyParents(args) => { simplify_parents::cmd_simplify_parents(ui, command_helper, args) } Command::Show(args) => show::cmd_show(ui, command_helper, args), Command::Sign(args) => sign::cmd_sign(ui, command_helper, args), Command::Sparse(args) => sparse::cmd_sparse(ui, command_helper, args), Command::Split(args) => split::cmd_split(ui, command_helper, args), Command::Squash(args) => squash::cmd_squash(ui, command_helper, args), Command::Status(args) => status::cmd_status(ui, command_helper, args), Command::Tag(args) => tag::cmd_tag(ui, command_helper, args), Command::Undo(args) => undo::cmd_undo(ui, command_helper, args), Command::Unsign(args) => unsign::cmd_unsign(ui, command_helper, args), Command::Util(args) => util::cmd_util(ui, command_helper, args), Command::Version(args) => version::cmd_version(ui, command_helper, args), Command::Workspace(args) => workspace::cmd_workspace(ui, command_helper, args), } } /// Wraps deprecated command of `old_name` which has been renamed to `new_name`. pub(crate) fn renamed_cmd<Args>( old_name: &'static str, new_name: &'static str, cmd: impl Fn(&mut Ui, &CommandHelper, &Args) -> Result<(), CommandError>, ) -> impl Fn(&mut Ui, &CommandHelper, &Args) -> Result<(), CommandError> { move |ui: &mut Ui, command: &CommandHelper, args: &Args| -> Result<(), CommandError> { writeln!( ui.warning_default(), "`jj {old_name}` is deprecated; use `jj {new_name}` instead, which is equivalent" )?; writeln!( ui.warning_default(), "`jj {old_name}` will be removed in a future version, and this will be a hard error" )?; cmd(ui, command, args) } } #[cfg(test)] mod tests { use super::*; #[test] fn verify_app() { default_app().debug_assert(); } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/resolve.rs
cli/src/commands/resolve.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_conflicted_paths; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::complete; use crate::formatter::FormatterExt as _; use crate::ui::Ui; /// Resolve conflicted files with an external merge tool /// /// Only conflicts that can be resolved with a 3-way merge are supported. See /// docs for merge tool configuration instructions. External merge tools will be /// invoked for each conflicted file one-by-one until all conflicts are /// resolved. To stop resolving conflicts, exit the merge tool without making /// any changes. /// /// Note that conflicts can also be resolved without using this command. You may /// edit the conflict markers in the conflicted file directly with a text /// editor. // TODOs: // - `jj resolve --editor` to resolve a conflict in the default text editor. Should work for // conflicts with 3+ adds. Useful to resolve conflicts in a commit other than the current one. // - A way to help split commits with conflicts that are too complicated (more than two sides) // into commits with simpler conflicts. In case of a tree with many merges, we could for example // point to existing commits with simpler conflicts where resolving those conflicts would help // simplify the present one. #[derive(clap::Args, Clone, Debug)] pub(crate) struct ResolveArgs { #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable_conflicts))] revision: RevisionArg, /// Instead of resolving conflicts, list all the conflicts // TODO: Also have a `--summary` option. `--list` currently acts like // `diff --summary`, but should be more verbose. #[arg(long, short)] list: bool, /// Specify 3-way merge tool to be used /// /// The built-in merge tools `:ours` and `:theirs` can be used to choose /// side #1 and side #2 of the conflict respectively. #[arg(long, conflicts_with = "list", value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::merge_editors))] tool: Option<String>, /// Only resolve conflicts in these paths. You can use the `--list` argument /// to find paths to use here. #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::revision_conflicted_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_resolve( ui: &mut Ui, command: &CommandHelper, args: &ResolveArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let commit = workspace_command.resolve_single_rev(ui, &args.revision)?; let tree = commit.tree(); let conflicts = tree.conflicts_matching(&matcher).collect_vec(); print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; if conflicts.is_empty() { return Err(cli_error(if args.paths.is_empty() { "No conflicts found at this revision" } else { "No conflicts found at the given path(s)" })); } if args.list { return print_conflicted_paths( conflicts, ui.stdout_formatter().as_mut(), &workspace_command, ); }; let repo_paths = conflicts .iter() .map(|(path, _)| path.as_ref()) .collect_vec(); workspace_command.check_rewritable([commit.id()])?; let merge_editor = workspace_command.merge_editor(ui, args.tool.as_deref())?; let mut tx = workspace_command.start_transaction(); let (new_tree, partial_resolution_error) = merge_editor.edit_files(ui, &tree, &repo_paths)?; let new_commit = tx .repo_mut() .rewrite_commit(&commit) .set_tree(new_tree) .write()?; tx.finish( ui, format!("Resolve conflicts in commit {}", commit.id().hex()), )?; // Print conflicts that are still present after resolution if the workspace // working copy is not at the commit. Otherwise, the conflicting paths will // be printed by the `tx.finish()` instead. if workspace_command.get_wc_commit_id() != Some(new_commit.id()) && let Some(mut formatter) = ui.status_formatter() && new_commit.has_conflict() { let new_tree = new_commit.tree(); let new_conflicts = new_tree.conflicts().collect_vec(); writeln!( formatter.labeled("warning").with_heading("Warning: "), "After this operation, some files at this revision still have conflicts:" )?; print_conflicted_paths(new_conflicts, formatter.as_mut(), &workspace_command)?; } if let Some(err) = partial_resolution_error { return Err(err.into()); } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/run.rs
cli/src/commands/run.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. //! This file contains the internal implementation of `run`. use itertools::Itertools as _; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::ui::Ui; /// (**Stub**, does not work yet) Run a command across a set of revisions. /// /// /// All recorded state will be persisted in the `.jj` directory, so occasionally /// a `jj run --clean` is needed to clean up disk space. /// /// # Example /// /// # Run pre-commit on your local work /// $ jj run 'pre-commit run .github/pre-commit.yaml' -r (trunk()..@) -j 4 /// /// This allows pre-commit integration and other funny stuff. #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub struct RunArgs { /// The command to run across all selected revisions. shell_command: String, /// The revisions to change. #[arg(long, short, default_value = "@", value_name = "REVSETS")] revisions: Vec<RevisionArg>, /// A no-op option to match the interface of `git rebase -x`. #[arg(short = 'x', hide = true)] unused_command: bool, /// How many processes should run in parallel, uses by default all cores. #[arg(long, short)] jobs: Option<usize>, } pub fn cmd_run(ui: &mut Ui, command: &CommandHelper, args: &RunArgs) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let _resolved_commits: Vec<_> = workspace_command .parse_union_revsets(ui, &args.revisions)? .evaluate_to_commits()? .try_collect()?; // Jobs are resolved in this order: // 1. Commandline argument iff > 0. // 2. the amount of cores available. // 3. a single job, if all of the above fails. let _jobs = match args.jobs { Some(0) | None => std::thread::available_parallelism().map(|t| t.into()).ok(), Some(jobs) => Some(jobs), } // Fallback to a single user-visible job. .unwrap_or(1usize); Err(user_error("This is a stub, do not use")) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/unsign.rs
cli/src/commands/unsign.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCompleter; use indexmap::IndexSet; use itertools::Itertools as _; use jj_lib::commit::Commit; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::repo::Repo as _; use jj_lib::revset::RevsetIteratorExt as _; use jj_lib::signing::SignBehavior; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Drop a cryptographic signature /// /// See also [commit signing] docs. /// /// [commit signing]: /// https://docs.jj-vcs.dev/latest/config/#commit-signing #[derive(clap::Args, Clone, Debug)] pub struct UnsignArgs { /// What revision(s) to unsign #[arg(long, short, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions: Vec<RevisionArg>, } pub fn cmd_unsign( ui: &mut Ui, command: &CommandHelper, args: &UnsignArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let target_expr = workspace_command .parse_union_revsets(ui, &args.revisions)? .resolve()?; workspace_command.check_rewritable_expr(&target_expr)?; let commits: IndexSet<Commit> = target_expr .evaluate(workspace_command.repo().as_ref())? .iter() .commits(workspace_command.repo().store()) .try_collect()?; let to_unsign: IndexSet<Commit> = commits .into_iter() .filter(|commit| commit.is_signed()) .collect(); let mut tx = workspace_command.start_transaction(); let mut unsigned_commits = vec![]; let mut num_reparented = 0; tx.repo_mut().transform_descendants( to_unsign.iter().ids().cloned().collect_vec(), async |rewriter| { let old_commit = rewriter.old_commit().clone(); let commit_builder = rewriter.reparent(); if to_unsign.contains(&old_commit) { let new_commit = commit_builder .set_sign_behavior(SignBehavior::Drop) .write()?; unsigned_commits.push(new_commit); } else { commit_builder.write()?; num_reparented += 1; } Ok(()) }, )?; if let Some(mut formatter) = ui.status_formatter() && !unsigned_commits.is_empty() { writeln!(formatter, "Unsigned {} commits:", unsigned_commits.len())?; print_updated_commits( formatter.as_mut(), &tx.commit_summary_template(), &unsigned_commits, )?; } let num_not_authored_by_me = unsigned_commits .iter() .filter(|commit| commit.author().email != tx.settings().user_email()) .count(); if num_not_authored_by_me > 0 { writeln!( ui.warning_default(), "{num_not_authored_by_me} of these commits are not authored by you", )?; } if num_reparented > 0 { writeln!(ui.status(), "Rebased {num_reparented} descendant commits")?; } let transaction_description = match &*unsigned_commits { [] => "".to_string(), [commit] => format!("unsign commit {}", commit.id()), commits => format!( "unsign commit {} and {} more", commits[0].id(), commits.len() - 1 ), }; tx.finish(ui, transaction_description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/prev.rs
cli/src/commands/prev.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::movement_util::Direction; use crate::movement_util::MovementArgs; use crate::movement_util::move_to_commit; use crate::ui::Ui; /// Change the working copy revision relative to the parent revision /// /// The command creates a new empty working copy revision that is the child of /// an ancestor `offset` revisions behind the parent of the current working /// copy. /// /// For example, when the offset is 1: /// /// ```text /// D @ D /// |/ | /// A => A @ /// | |/ /// B B /// ``` /// /// If `--edit` is passed, the working copy revision is changed to the parent of /// the current working copy revision. /// /// ```text /// D @ D /// |/ | /// C => @ /// | | /// B B /// | | /// A A /// ``` #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct PrevArgs { /// How many revisions to move backward. Moves to the parent by default #[arg(default_value = "1")] offset: u64, /// Edit the parent directly, instead of moving the working-copy commit /// /// Takes precedence over config in `ui.movement.edit`; i.e. /// will negate `ui.movement.edit = false` #[arg(long, short)] edit: bool, /// The inverse of `--edit` /// /// Takes precedence over config in `ui.movement.edit`; i.e. /// will negate `ui.movement.edit = true` #[arg(long, short, conflicts_with = "edit")] no_edit: bool, /// Jump to the previous conflicted ancestor #[arg(long, conflicts_with = "offset")] conflict: bool, } impl From<&PrevArgs> for MovementArgs { fn from(val: &PrevArgs) -> Self { Self { offset: val.offset, edit: val.edit, no_edit: val.no_edit, conflict: val.conflict, } } } pub(crate) fn cmd_prev( ui: &mut Ui, command: &CommandHelper, args: &PrevArgs, ) -> Result<(), CommandError> { move_to_commit(ui, command, Direction::Prev, &MovementArgs::from(args)) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/redo.rs
cli/src/commands/redo.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::op_store::OperationId; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::internal_error; use crate::command_error::user_error; use crate::commands::operation::DEFAULT_REVERT_WHAT; use crate::commands::operation::view_with_desired_portions_restored; use crate::commands::undo::UNDO_OP_DESC_PREFIX; use crate::ui::Ui; /// Redo the most recently undone operation /// /// This is the natural counterpart of `jj undo`. Repeated invocations of `jj /// undo` and `jj redo` act similarly to Undo/Redo commands in a text editor. /// /// Use `jj op log` to visualize the log of past operations, including a /// detailed description of any past undo/redo operations. See also `jj op /// restore` to explicitly restore an older operation by its id (available in /// the operation log). #[derive(clap::Args, Clone, Debug)] pub struct RedoArgs {} const REDO_OP_DESC_PREFIX: &str = "redo: restore to operation "; pub fn cmd_redo(ui: &mut Ui, command: &CommandHelper, _: &RedoArgs) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let mut op_to_redo = workspace_command.repo().operation().clone(); // Growing the "redo-stack" works very similar to the // [undo-stack](./undo.rs). `jj redo` and `jj undo` track their stacks // separately. // // - If the operation to redo is a regular one (neither an undo- or // redo-operation): Fail, because there is nothing to redo. // - If the operation to redo is an undo-operation, try to redo it (by restoring // its parent operation). // - If the operation to redo is a redo-operation itself, redo the operation the // early redo-operation restored to. // - If the operation to restore to is a redo-operation itself, restore directly // to the original operation. This avoids creating a linked list of // redo-operations, which subsequently may have to be walked with an // inefficient loop. // // This described behavior leads to "jumping over" old redo-stacks if the // current one grows into it. Consider the following op-log example, where // redo-stacks are shown on the left and undo-stacks on the right: // // +------- "redo: restore C" * I // | | // | +----- "redo: restore D" * H // | | | // | | * G "undo: restore A" -------+ // | | | | // | | +- "redo: restore D" * F | // | | | | | // | | | * E "undo: restore A" ---+ | // | | | | | | // | +-> +----------------> * D "undo: restore B" -+ | | // | | | | | // +----------------------> * C | | | // | | | | // * B <----------------+ | | // | | | // * A <------------------+ <-+ // // The first interesting operation here is I: // - Attempt to redo G. // - G is an undo-operation, attempt to restore its parent F. // - F is a redo-opreation. Restore its original operation D, instead of F. // // The operation H is also noteworthy: // - Attempt to redo H. // - H is a redo-operation restoring to D, so attempt to redo D. // - D is an undo-operation. Redo it by restoring its parent C. // if let Some(id_of_restored_op) = op_to_redo .metadata() .description .strip_prefix(REDO_OP_DESC_PREFIX) { let Some(id_of_restored_op) = OperationId::try_from_hex(id_of_restored_op) else { return Err(internal_error( "Failed to parse ID of restored operation in redo-stack", )); }; op_to_redo = workspace_command .repo() .loader() .load_operation(&id_of_restored_op)?; } if !op_to_redo .metadata() .description .starts_with(UNDO_OP_DESC_PREFIX) { // cannot redo a non-undo-operation return Err(user_error("Nothing to redo")); } let mut op_to_restore = match op_to_redo.parents().at_most_one().ok().flatten() { Some(parent_of_op_to_redo) => parent_of_op_to_redo?, None => { return Err(internal_error("Undo operation should have a single parent")); } }; // Avoid the creation of a linked list by restoring to the original // operation directly, if we're about to restore a redo-operation. If // we didn't to this, repeated calls of `jj undo ; jj redo` would create // an ever-growing linked list of redo-operations that restore each // other. Calling `jj redo` one more time would have to redo a potential // undo-operation at the very beginning of the linked list, which would // require walking the entire thing unnecessarily. if let Some(original_op) = op_to_restore .metadata() .description .strip_prefix(REDO_OP_DESC_PREFIX) { let Some(id_of_original_op) = OperationId::try_from_hex(original_op) else { return Err(internal_error( "Failed to parse ID of restored operation in redo-stack", )); }; op_to_restore = workspace_command .repo() .loader() .load_operation(&id_of_original_op)?; } let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( op_to_restore.view()?.store_view(), tx.base_repo().view().store_view(), &DEFAULT_REVERT_WHAT, ); tx.repo_mut().set_view(new_view); if let Some(mut formatter) = ui.status_formatter() { write!(formatter, "Restored to operation: ")?; let template = tx.base_workspace_helper().operation_summary_template(); template.format(&op_to_restore, formatter.as_mut())?; writeln!(formatter)?; } tx.finish( ui, format!("{REDO_OP_DESC_PREFIX}{}", op_to_restore.id().hex()), )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/show.rs
cli/src/commands/show.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use jj_lib::matchers::EverythingMatcher; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::ui::Ui; /// Show commit description and changes in a revision #[derive(clap::Args, Clone, Debug)] #[command(group(clap::ArgGroup::new("revision")))] #[command(mut_arg("ignore_all_space", |a| a.short('w')))] #[command(mut_arg("ignore_space_change", |a| a.short('b')))] pub(crate) struct ShowArgs { /// Show changes in this revision, compared to its parent(s) [default: @] /// [aliases: -r] #[arg(group = "revision", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision_pos: Option<RevisionArg>, #[arg(short = 'r', group = "revision", hide = true, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision_opt: Option<RevisionArg>, /// Render a revision using the given template /// /// You can specify arbitrary template expressions using the /// [built-in keywords]. See [`jj help -k templates`] for more information. /// /// [built-in keywords]: /// https://docs.jj-vcs.dev/latest/templates/#commit-keywords /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, #[command(flatten)] format: DiffFormatArgs, /// Do not show the patch #[arg(long, conflicts_with = "DiffFormatArgs")] no_patch: bool, } #[instrument(skip_all)] pub(crate) fn cmd_show( ui: &mut Ui, command: &CommandHelper, args: &ShowArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let revision_arg = args .revision_pos .as_ref() .or(args.revision_opt.as_ref()) .unwrap_or(&RevisionArg::AT); let commit = workspace_command.resolve_single_rev(ui, revision_arg)?; let template_string = match &args.template { Some(value) => value.clone(), None => workspace_command.settings().get_string("templates.show")?, }; let template = workspace_command .parse_commit_template(ui, &template_string)? .labeled(["show", "commit"]); let diff_renderer = workspace_command.diff_renderer_for(&args.format)?; ui.request_pager(); let mut formatter = ui.stdout_formatter(); let formatter = formatter.as_mut(); template.format(&commit, formatter)?; if !args.no_patch { diff_renderer .show_patch(ui, formatter, &commit, &EverythingMatcher, ui.term_width()) .block_on()?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/edit.rs
cli/src/commands/edit.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap_complete::ArgValueCompleter; use jj_lib::object_id::ObjectId as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Sets the specified revision as the working-copy revision /// /// Note: it is [generally recommended] to instead use `jj new` and `jj /// squash`. /// /// [generally recommended]: /// https://docs.jj-vcs.dev/latest/FAQ#how-do-i-resume-working-on-an-existing-change #[derive(clap::Args, Clone, Debug)] #[command(group(clap::ArgGroup::new("revision").required(true)))] pub(crate) struct EditArgs { /// The commit to edit [aliases: -r] #[arg(group = "revision", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision_pos: Option<RevisionArg>, #[arg(short = 'r', group = "revision", hide = true, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision_opt: Option<RevisionArg>, } #[instrument(skip_all)] pub(crate) fn cmd_edit( ui: &mut Ui, command: &CommandHelper, args: &EditArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let revision_arg = args .revision_pos .as_ref() .or(args.revision_opt.as_ref()) .expect("either positional or -r arg should be provided"); let new_commit = workspace_command.resolve_single_rev(ui, revision_arg)?; workspace_command.check_rewritable([new_commit.id()])?; if workspace_command.get_wc_commit_id() == Some(new_commit.id()) { writeln!(ui.status(), "Already editing that commit")?; } else { let mut tx = workspace_command.start_transaction(); tx.edit(&new_commit)?; tx.finish(ui, format!("edit commit {}", new_commit.id().hex()))?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/diffedit.rs
cli/src/commands/diffedit.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::merge::Diff; use jj_lib::object_id::ObjectId as _; use jj_lib::rewrite::merge_commit_trees; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Touch up the content changes in a revision with a diff editor /// /// With the `-r` option, starts a [diff editor] on the changes in the revision. /// /// With the `--from` and/or `--to` options, starts a [diff editor] comparing /// the "from" revision to the "to" revision. /// /// [diff editor]: /// https://docs.jj-vcs.dev/latest/config/#editing-diffs /// /// Edit the right side of the diff until it looks the way you want. Once you /// close the editor, the revision specified with `-r` or `--to` will be /// updated. Unless `--restore-descendants` is used, descendants will be /// rebased on top as usual, which may result in conflicts. /// /// See `jj restore` if you want to move entire files from one revision to /// another. For moving changes between revisions, see `jj squash -i`. #[derive(clap::Args, Clone, Debug)] pub(crate) struct DiffeditArgs { /// The revision to touch up /// /// Defaults to @ if neither --to nor --from are specified. #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision: Option<RevisionArg>, /// Show changes from this revision /// /// Defaults to @ if --to is specified. #[arg(long, short, conflicts_with = "revision", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] from: Option<RevisionArg>, /// Edit changes in this revision /// /// Defaults to @ if --from is specified. #[arg(long, short, conflicts_with = "revision", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] to: Option<RevisionArg>, /// Edit only these paths (unmatched paths will remain unchanged) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_revision_or_range_files))] paths: Vec<String>, /// Specify diff editor to be used #[arg(long, value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::diff_editors))] tool: Option<String>, /// Preserve the content (not the diff) when rebasing descendants /// /// When rebasing a descendant on top of the rewritten revision, its diff /// compared to its parent(s) is normally preserved, i.e. the same way that /// descendants are always rebased. This flag makes it so the content/state /// is preserved instead of preserving the diff. #[arg(long)] restore_descendants: bool, } #[instrument(skip_all)] pub(crate) fn cmd_diffedit( ui: &mut Ui, command: &CommandHelper, args: &DiffeditArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let (target_commit, base_commits, diff_description); if args.from.is_some() || args.to.is_some() { target_commit = workspace_command .resolve_single_rev(ui, args.to.as_ref().unwrap_or(&RevisionArg::AT))?; base_commits = vec![ workspace_command .resolve_single_rev(ui, args.from.as_ref().unwrap_or(&RevisionArg::AT))?, ]; diff_description = format!( "The diff initially shows the commit's changes relative to:\n{}", workspace_command.format_commit_summary(&base_commits[0]) ); } else { target_commit = workspace_command .resolve_single_rev(ui, args.revision.as_ref().unwrap_or(&RevisionArg::AT))?; base_commits = target_commit.parents().try_collect()?; diff_description = "The diff initially shows the commit's changes.".to_string(); }; workspace_command.check_rewritable([target_commit.id()])?; let diff_editor = workspace_command.diff_editor(ui, args.tool.as_deref())?; let mut tx = workspace_command.start_transaction(); let format_instructions = || { format!( "\ You are editing changes in: {} {diff_description} Adjust the right side until it shows the contents you want. If you don't make any changes, then the operation will be aborted.", tx.format_commit_summary(&target_commit), ) }; let base_tree = merge_commit_trees(tx.repo(), base_commits.as_slice()).block_on()?; let tree = target_commit.tree(); let edited_tree = diff_editor.edit(Diff::new(&base_tree, &tree), &matcher, format_instructions)?; if edited_tree.tree_ids() == target_commit.tree_ids() { writeln!(ui.status(), "Nothing changed.")?; } else { tx.repo_mut() .rewrite_commit(&target_commit) .set_tree(edited_tree) .write()?; // rebase_descendants early; otherwise `new_commit` would always have // a conflicted change id at this point. let (num_rebased, extra_msg) = if args.restore_descendants { ( tx.repo_mut().reparent_descendants()?, " (while preserving their content)", ) } else { (tx.repo_mut().rebase_descendants()?, "") }; if let Some(mut formatter) = ui.status_formatter() && num_rebased > 0 { writeln!( formatter, "Rebased {num_rebased} descendant commits{extra_msg}" )?; } tx.finish(ui, format!("edit commit {}", target_commit.id().hex()))?; } print_unmatched_explicit_paths( ui, &workspace_command, &fileset_expression, [&base_tree, &tree], )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/restore.rs
cli/src/commands/restore.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use indoc::formatdoc; use itertools::Itertools as _; use jj_lib::merge::Diff; use jj_lib::object_id::ObjectId as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; /// Restore paths from another revision /// /// That means that the paths get the same content in the destination (`--into`) /// as they had in the source (`--from`). This is typically used for undoing /// changes to some paths in the working copy (`jj restore <paths>`). /// /// If only one of `--from` or `--into` is specified, the other one defaults to /// the working copy. /// /// When neither `--from` nor `--into` is specified, the command restores into /// the working copy from its parent(s). `jj restore` without arguments is /// similar to `jj abandon`, except that it leaves an empty revision with its /// description and other metadata preserved. /// /// See `jj diffedit` if you'd like to restore portions of files rather than /// entire files. #[derive(clap::Args, Clone, Debug)] pub(crate) struct RestoreArgs { /// Restore only these paths (instead of all paths) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_changes_in_or_range_files))] paths: Vec<String>, /// Revision to restore from (source) #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] from: Option<RevisionArg>, /// Revision to restore into (destination) #[arg(long, short = 't', visible_alias = "to", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] into: Option<RevisionArg>, /// Undo the changes in a revision as compared to the merge of its parents. /// /// This undoes the changes that can be seen with `jj diff -r REVSET`. If /// `REVSET` only has a single parent, this option is equivalent to `jj /// restore --into REVSET --from REVSET-`. /// /// The default behavior of `jj restore` is equivalent to `jj restore /// --changes-in @`. #[arg(long, short, value_name = "REVSET", conflicts_with_all = ["into", "from"])] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] changes_in: Option<RevisionArg>, /// Prints an error. DO NOT USE. /// /// If we followed the pattern of `jj diff` and `jj diffedit`, we would use /// `--revision` instead of `--changes-in` However, that would make it /// likely that someone unfamiliar with this pattern would use `-r` when /// they wanted `--from`. This would make a different revision empty, and /// the user might not even realize something went wrong. #[arg(long, short, hide = true)] revision: Option<RevisionArg>, /// Interactively choose which parts to restore #[arg(long, short)] interactive: bool, /// Specify diff editor to be used (implies --interactive) #[arg(long, value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::diff_editors))] tool: Option<String>, /// Preserve the content (not the diff) when rebasing descendants #[arg(long)] restore_descendants: bool, } #[instrument(skip_all)] pub(crate) fn cmd_restore( ui: &mut Ui, command: &CommandHelper, args: &RestoreArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let (from_commits, from_tree, to_commit); if args.revision.is_some() { return Err( user_error("`jj restore` does not have a `--revision`/`-r` option.") .hinted("To modify the current revision, use `--from`.") .hinted( "To undo changes in a revision compared to its parents, use `--changes-in`.", ), ); } if args.from.is_some() || args.into.is_some() { to_commit = workspace_command .resolve_single_rev(ui, args.into.as_ref().unwrap_or(&RevisionArg::AT))?; let from_commit = workspace_command .resolve_single_rev(ui, args.from.as_ref().unwrap_or(&RevisionArg::AT))?; from_tree = from_commit.tree(); from_commits = vec![from_commit]; } else { to_commit = workspace_command .resolve_single_rev(ui, args.changes_in.as_ref().unwrap_or(&RevisionArg::AT))?; from_tree = to_commit.parent_tree(workspace_command.repo().as_ref())?; from_commits = to_commit.parents().try_collect()?; } workspace_command.check_rewritable([to_commit.id()])?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let diff_selector = workspace_command.diff_selector(ui, args.tool.as_deref(), args.interactive)?; let to_tree = to_commit.tree(); let format_instructions = || { formatdoc! {" You are restoring changes from: {from_commits} to commit: {to_commit} The diff initially shows all changes restored. Adjust the right side until it shows the contents you want for the destination commit. ", from_commits = from_commits .iter() .map(|commit| workspace_command.format_commit_summary(commit)) // "You are restoring changes from: " .join("\n "), to_commit = workspace_command.format_commit_summary(&to_commit), } }; let new_tree = diff_selector.select( Diff::new(&to_tree, &from_tree), &matcher, format_instructions, )?; print_unmatched_explicit_paths( ui, &workspace_command, &fileset_expression, [&to_tree, &from_tree], )?; if new_tree.tree_ids() == to_commit.tree_ids() { writeln!(ui.status(), "Nothing changed.")?; } else { let mut tx = workspace_command.start_transaction(); tx.repo_mut() .rewrite_commit(&to_commit) .set_tree(new_tree) .write()?; // rebase_descendants early; otherwise the new commit would always have // a conflicted change id at this point. let (num_rebased, extra_msg) = if args.restore_descendants { ( tx.repo_mut().reparent_descendants()?, " (while preserving their content)", ) } else { (tx.repo_mut().rebase_descendants()?, "") }; if let Some(mut formatter) = ui.status_formatter() && num_rebased > 0 { writeln!( formatter, "Rebased {num_rebased} descendant commits{extra_msg}" )?; } tx.finish(ui, format!("restore into commit {}", to_commit.id().hex()))?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/absorb.rs
cli/src/commands/absorb.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCompleter; use jj_lib::absorb::AbsorbSource; use jj_lib::absorb::absorb_hunks; use jj_lib::absorb::split_hunks_to_trees; use jj_lib::matchers::EverythingMatcher; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormat; use crate::ui::Ui; /// Move changes from a revision into the stack of mutable revisions /// /// This command splits changes in the source revision and moves each change to /// the closest mutable ancestor where the corresponding lines were modified /// last. If the destination revision cannot be determined unambiguously, the /// change will be left in the source revision. /// /// The source revision will be abandoned if all changes are absorbed into the /// destination revisions, and if the source revision has no description. /// /// The modification made by `jj absorb` can be reviewed by `jj op show -p`. #[derive(clap::Args, Clone, Debug)] pub(crate) struct AbsorbArgs { /// Source revision to absorb from #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] from: RevisionArg, /// Destination revisions to absorb into /// /// Only ancestors of the source revision will be considered. #[arg( long, short = 't', visible_alias = "to", default_value = "mutable()", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] into: Vec<RevisionArg>, /// Move only changes to these paths (instead of all paths) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_from_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_absorb( ui: &mut Ui, command: &CommandHelper, args: &AbsorbArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let source_commit = workspace_command.resolve_single_rev(ui, &args.from)?; let destinations = workspace_command .parse_union_revsets(ui, &args.into)? .resolve()?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let repo = workspace_command.repo().as_ref(); let source = AbsorbSource::from_commit(repo, source_commit.clone())?; let selected_trees = split_hunks_to_trees(repo, &source, &destinations, &matcher).block_on()?; print_unmatched_explicit_paths( ui, &workspace_command, &fileset_expression, [&source_commit.tree()], )?; let path_converter = workspace_command.path_converter(); for (path, reason) in selected_trees.skipped_paths { let ui_path = path_converter.format_file_path(&path); writeln!(ui.warning_default(), "Skipping {ui_path}: {reason}")?; } workspace_command.check_rewritable(selected_trees.target_commits.keys())?; let mut tx = workspace_command.start_transaction(); let stats = absorb_hunks(tx.repo_mut(), &source, selected_trees.target_commits)?; if let Some(mut formatter) = ui.status_formatter() { if !stats.rewritten_destinations.is_empty() { writeln!( formatter, "Absorbed changes into {} revisions:", stats.rewritten_destinations.len() )?; print_updated_commits( formatter.as_mut(), &tx.commit_summary_template(), stats.rewritten_destinations.iter().rev(), )?; } if stats.num_rebased > 0 { writeln!( formatter, "Rebased {} descendant commits.", stats.num_rebased )?; } } tx.finish( ui, format!( "absorb changes into {} commits", stats.rewritten_destinations.len() ), )?; if let Some(mut formatter) = ui.status_formatter() && let Some(commit) = &stats.rewritten_source { let repo = workspace_command.repo().as_ref(); if !commit.is_empty(repo)? { writeln!(formatter, "Remaining changes:")?; let diff_renderer = workspace_command.diff_renderer(vec![DiffFormat::Summary]); let matcher = &EverythingMatcher; // also print excluded paths let width = ui.term_width(); diff_renderer .show_patch(ui, formatter.as_mut(), commit, matcher, width) .block_on()?; } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/metaedit.rs
cli/src/commands/metaedit.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashSet; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::Timestamp; use jj_lib::commit::Commit; use jj_lib::object_id::ObjectId as _; use jj_lib::time_util::parse_datetime; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_updated_commits; use crate::command_error::CommandError; use crate::complete; use crate::description_util::join_message_paragraphs; use crate::text_util::parse_author; use crate::ui::Ui; /// Modify the metadata of a revision without changing its content /// /// Whenever any metadata is updated, the committer name, email, and timestamp /// are also updated for all rebased commits. The name and email may come from /// the `JJ_USER` and `JJ_EMAIL` environment variables, as well as by passing /// `--config user.name` and `--config user.email`. #[derive(clap::Args, Clone, Debug)] pub(crate) struct MetaeditArgs { /// The revision(s) to modify (default: @) [aliases: -r] #[arg(value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_pos: Vec<RevisionArg>, #[arg(short = 'r', hide = true, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revisions_opt: Vec<RevisionArg>, /// Generate a new change-id /// /// This generates a new change-id for the revision. #[arg(long)] update_change_id: bool, /// Update the change description /// /// This updates the change description, without opening the editor. /// /// Use `jj describe` if you want to use an editor. #[arg(long = "message", short, value_name = "MESSAGE")] message_paragraphs: Vec<String>, /// Update the author timestamp /// /// This updates the author date to the current time, without modifying the /// author. #[arg(long)] update_author_timestamp: bool, /// Update the author to the configured user /// /// This updates the author name and email. The author timestamp is /// not modified – use --update-author-timestamp to update the author /// timestamp. /// /// You can use it in combination with the JJ_USER and JJ_EMAIL /// environment variables to set a different author: /// /// $ JJ_USER='Foo Bar' JJ_EMAIL=foo@bar.com jj metaedit --update-author #[arg(long)] update_author: bool, /// Set author to the provided string /// /// This changes author name and email while retaining author /// timestamp for non-discardable commits. #[arg( long, conflicts_with = "update_author", value_parser = parse_author )] author: Option<(String, String)>, /// Set the author date to the given date /// /// The date can either be human readable /// ([RFC2822], eg 'Sun, 23 Jan 2000 01:23:45 PST') /// or a time stamp ([RFC3339], eg '2000-01-23T01:23:45-08:00'). /// /// [RFC2822]: https://datatracker.ietf.org/doc/html/rfc2822 /// /// [RFC3339]: https://datatracker.ietf.org/doc/html/rfc3339 #[arg( long, conflicts_with = "update_author_timestamp", value_parser = parse_datetime )] author_timestamp: Option<Timestamp>, /// Rewrite the commit, even if no other metadata changed /// /// This updates the committer timestamp to the current time, as well as the /// committer name and email. /// /// Even if this option is not passed, the committer name, email, and /// timestamp will be updated if other metadata is updated. This option /// just forces every commit to be rewritten whether or not there are other /// changes. /// /// You can use it in combination with the `JJ_USER` and `JJ_EMAIL` /// environment variables to set a different committer: /// /// $ JJ_USER='Foo Bar' JJ_EMAIL=foo@bar.com jj metaedit --force-rewrite #[arg(long)] force_rewrite: bool, // TODO: remove in jj 0.41.0+ /// Deprecated. Use `--force-rewrite` instead. #[arg( long = "update-committer-timestamp", hide = true, conflicts_with = "force_rewrite" )] legacy_update_committer_timestamp: bool, } #[instrument(skip_all)] pub(crate) fn cmd_metaedit( ui: &mut Ui, command: &CommandHelper, args: &MetaeditArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; if args.legacy_update_committer_timestamp { writeln!( ui.warning_default(), "`--update-committer-timestamp` is deprecated; use `--force-rewrite` instead" )?; } let target_expr = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() { workspace_command .parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())? } else { workspace_command.parse_revset(ui, &RevisionArg::AT)? } .resolve()?; workspace_command.check_rewritable_expr(&target_expr)?; let commit_ids: Vec<_> = target_expr .evaluate(workspace_command.repo().as_ref())? .iter() .try_collect()?; if commit_ids.is_empty() { writeln!(ui.status(), "No revisions to modify.")?; return Ok(()); } let mut tx = workspace_command.start_transaction(); let tx_description = match commit_ids.as_slice() { [] => unreachable!(), [commit] => format!("edit commit metadata for commit {}", commit.hex()), [first_commit, remaining_commits @ ..] => { format!( "edit commit metadata for commit {} and {} more", first_commit.hex(), remaining_commits.len() ) } }; let new_description = if !args.message_paragraphs.is_empty() { Some(join_message_paragraphs(&args.message_paragraphs)) } else { None }; let mut num_reparented = 0; let commit_ids_set: HashSet<_> = commit_ids.iter().cloned().collect(); let mut modified: Vec<Commit> = Vec::new(); // Even though `MutableRepo::rewrite_commit` and // `MutableRepo::rebase_descendants` can handle rewriting of a commit even // if it is a descendant of another commit being rewritten, using // `MutableRepo::transform_descendants` prevents us from rewriting the same // commit multiple times, and adding additional entries in the predecessor // chain. tx.repo_mut() .transform_descendants(commit_ids, async |rewriter| { if commit_ids_set.contains(rewriter.old_commit().id()) { let mut rewrite = args.force_rewrite || args.legacy_update_committer_timestamp || rewriter.parents_changed(); let old_author = rewriter.old_commit().author().clone(); let mut commit_builder = rewriter.reparent(); let mut new_author = commit_builder.author().clone(); if let Some((name, email)) = args.author.clone() { new_author.name = name; new_author.email = email; } else if args.update_author { new_author.name = commit_builder.committer().name.clone(); new_author.email = commit_builder.committer().email.clone(); } if args.update_author_timestamp { new_author.timestamp = commit_builder.committer().timestamp; } if let Some(author_date) = args.author_timestamp { new_author.timestamp = author_date; } // If the old commit had an unset author, the commit builder // may already have the author updated from the current config. // Thus, compare to the actual old_author to correctly detect // changes. if new_author.name != old_author.name || new_author.email != old_author.email || (new_author.timestamp != commit_builder.author().timestamp && new_author.timestamp != old_author.timestamp) { commit_builder = commit_builder.set_author(new_author); rewrite = true; } if let Some(description) = &new_description && description != commit_builder.description() { commit_builder = commit_builder.set_description(description); rewrite = true; } if args.update_change_id { commit_builder = commit_builder.generate_new_change_id(); rewrite = true; } if rewrite { let new_commit = commit_builder.write()?; modified.push(new_commit); } } else if rewriter.parents_changed() { rewriter.reparent().write()?; num_reparented += 1; } Ok(()) })?; if !modified.is_empty() { writeln!(ui.status(), "Modified {} commits:", modified.len())?; if let Some(mut formatter) = ui.status_formatter() { print_updated_commits(formatter.as_mut(), &tx.commit_summary_template(), &modified)?; } } if num_reparented > 0 { writeln!(ui.status(), "Rebased {num_reparented} descendant commits")?; } tx.finish(ui, tx_description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/duplicate.rs
cli/src/commands/duplicate.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use bstr::ByteVec as _; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::BackendResult; use jj_lib::backend::CommitId; use jj_lib::repo::Repo as _; use jj_lib::rewrite::DuplicateCommitsStats; use jj_lib::rewrite::duplicate_commits; use jj_lib::rewrite::duplicate_commits_onto_parents; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::compute_commit_location; use crate::cli_util::short_commit_hash; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; /// Create new changes with the same content as existing ones /// /// When none of the `--onto`, `--insert-after`, or `--insert-before` arguments /// are provided, commits will be duplicated onto their existing parents or onto /// other newly duplicated commits. /// /// When any of the `--onto`, `--insert-after`, or `--insert-before` arguments /// are provided, the roots of the specified commits will be duplicated onto the /// destination indicated by the arguments. Other specified commits will be /// duplicated onto these newly duplicated commits. If the `--insert-after` or /// `--insert-before` arguments are provided, the new children indicated by the /// arguments will be rebased onto the heads of the specified commits. /// /// By default, the duplicated commits retain the descriptions of the originals. /// This can be customized with the `templates.duplicate_description` setting. #[derive(clap::Args, Clone, Debug)] pub(crate) struct DuplicateArgs { /// The revision(s) to duplicate (default: @) [aliases: -r] #[arg(value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions_pos: Vec<RevisionArg>, #[arg(short = 'r', hide = true, value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions_opt: Vec<RevisionArg>, /// The revision(s) to duplicate onto (can be repeated to create a merge /// commit) #[arg( long, visible_alias = "destination", short, visible_short_alias = 'd', value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] onto: Option<Vec<RevisionArg>>, /// The revision(s) to insert after (can be repeated to create a merge /// commit) #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// The revision(s) to insert before (can be repeated to create a merge /// commit) #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "onto", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, } #[instrument(skip_all)] pub(crate) fn cmd_duplicate( ui: &mut Ui, command: &CommandHelper, args: &DuplicateArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let to_duplicate: Vec<CommitId> = if !args.revisions_pos.is_empty() || !args.revisions_opt.is_empty() { workspace_command .parse_union_revsets(ui, &[&*args.revisions_pos, &*args.revisions_opt].concat())? } else { workspace_command.parse_revset(ui, &RevisionArg::AT)? } .evaluate_to_commit_ids()? .try_collect()?; // in reverse topological order if to_duplicate.is_empty() { writeln!(ui.status(), "No revisions to duplicate.")?; return Ok(()); } if to_duplicate.last() == Some(workspace_command.repo().store().root_commit_id()) { return Err(user_error("Cannot duplicate the root commit")); } let location = if args.onto.is_none() && args.insert_after.is_none() && args.insert_before.is_none() { None } else { Some(compute_commit_location( ui, &workspace_command, args.onto.as_deref(), args.insert_after.as_deref(), args.insert_before.as_deref(), "duplicated commits", )?) }; let mut tx = workspace_command.start_transaction(); if let Some((parent_commit_ids, children_commit_ids)) = &location && !parent_commit_ids.is_empty() { for commit_id in &to_duplicate { for parent_commit_id in parent_commit_ids { if tx.repo().index().is_ancestor(commit_id, parent_commit_id)? { writeln!( ui.warning_default(), "Duplicating commit {} as a descendant of itself", short_commit_hash(commit_id) )?; break; } } } for commit_id in &to_duplicate { for child_commit_id in children_commit_ids { if tx.repo().index().is_ancestor(child_commit_id, commit_id)? { writeln!( ui.warning_default(), "Duplicating commit {} as an ancestor of itself", short_commit_hash(commit_id) )?; break; } } } } let new_descs = { let template = tx .settings() .get_string("templates.duplicate_description")?; let parsed = tx.parse_commit_template(ui, &template)?; to_duplicate .iter() .map(|commit_id| -> BackendResult<_> { let commit = tx.repo().store().get_commit(commit_id)?; let output = parsed.format_plain_text(&commit); Ok((commit_id.clone(), output.into_string_lossy())) }) .try_collect()? }; let num_to_duplicate = to_duplicate.len(); let DuplicateCommitsStats { duplicated_commits, num_rebased, } = if let Some((parent_commit_ids, children_commit_ids)) = location { duplicate_commits( tx.repo_mut(), &to_duplicate, &new_descs, &parent_commit_ids, &children_commit_ids, ) .block_on()? } else { duplicate_commits_onto_parents(tx.repo_mut(), &to_duplicate, &new_descs)? }; if let Some(mut formatter) = ui.status_formatter() { for (old_id, new_commit) in &duplicated_commits { write!(formatter, "Duplicated {} as ", short_commit_hash(old_id))?; tx.write_commit_summary(formatter.as_mut(), new_commit)?; writeln!(formatter)?; } if num_rebased > 0 { writeln!( ui.status(), "Rebased {num_rebased} commits onto duplicated commits" )?; } } tx.finish(ui, format!("duplicate {num_to_duplicate} commit(s)"))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/split.rs
cli/src/commands/split.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::io::Write as _; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::matchers::Matcher; use jj_lib::merge::Diff; use jj_lib::merge::Merge; use jj_lib::merged_tree::MergedTree; use jj_lib::object_id::ObjectId as _; use jj_lib::rewrite::CommitWithSelection; use jj_lib::rewrite::EmptyBehavior; use jj_lib::rewrite::MoveCommitsLocation; use jj_lib::rewrite::MoveCommitsTarget; use jj_lib::rewrite::RebaseOptions; use jj_lib::rewrite::RebasedCommit; use jj_lib::rewrite::RewriteRefsOptions; use jj_lib::rewrite::move_commits; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::DiffSelector; use crate::cli_util::RevisionArg; use crate::cli_util::WorkspaceCommandHelper; use crate::cli_util::WorkspaceCommandTransaction; use crate::cli_util::compute_commit_location; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error_with_hint; use crate::complete; use crate::description_util::add_trailers; use crate::description_util::description_template; use crate::description_util::edit_description; use crate::description_util::join_message_paragraphs; use crate::ui::Ui; /// Split a revision in two /// /// Starts a [diff editor] on the changes in the revision. Edit the right side /// of the diff until it has the content you want in the first commit. Once you /// close the editor, your revision will be split into two commits. /// /// [diff editor]: /// https://docs.jj-vcs.dev/latest/config/#editing-diffs /// /// By default, the selected changes stay in the original commit, and the /// remaining changes go into a new child commit: /// /// ```text /// L L' /// | | /// K (split) => K" (remaining) /// | | /// J K' (selected) /// | /// J /// ``` /// /// With `--parallel/-p`, the two parts become sibling commits instead of /// parent and child: /// /// ```text /// L' /// L / \ /// | K' | (selected) /// K (split) => | K" (remaining) /// | \ / /// J J /// ``` /// /// With `-o`, `-A`, or `-B`, the selected changes are extracted into a new /// commit at the specified location, while the remaining changes stay in place: /// /// ```text /// M M' /// | | /// L L' /// | | /// K (split) => K' (remaining, stays here) /// | | /// J J' /// | /// K" (selected, inserted before J with -B J) /// ``` /// /// If the change you split had a description, you will be asked to enter a /// change description for each commit. If the change did not have a /// description, the second commit will not get a description, and you will be /// asked for a description only for the first commit. /// /// Splitting an empty commit is not supported because the same effect can be /// achieved with `jj new`. #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct SplitArgs { /// Interactively choose which parts to split /// /// This is the default if no filesets are provided. #[arg(long, short)] interactive: bool, /// Specify diff editor to be used (implies --interactive) #[arg(long, value_name = "NAME")] #[arg(add = ArgValueCandidates::new(complete::diff_editors))] tool: Option<String>, /// The revision to split #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision: RevisionArg, /// The revision(s) to rebase the selected changes onto (can be repeated to /// create a merge commit) /// /// Extracts the selected changes into a new commit based on the given /// revision(s). The remaining changes stay in the original commit's /// location. #[arg( long, visible_alias = "destination", short, visible_short_alias = 'd', conflicts_with = "parallel", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] onto: Option<Vec<RevisionArg>>, /// The revision(s) to insert after (can be repeated to create a merge /// commit) /// /// Extracts the selected changes into a new commit inserted after the /// given revision(s). The remaining changes stay in the original commit's /// location. #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "onto", conflicts_with = "parallel", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// The revision(s) to insert before (can be repeated to create a merge /// commit) /// /// Extracts the selected changes into a new commit inserted before the /// given revision(s). The remaining changes stay in the original commit's /// location. #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "onto", conflicts_with = "parallel", value_name = "REVSETS" )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, /// The change description to use (don't open editor) /// /// Sets the description for the first commit (the one containing the /// selected changes). The second commit keeps the original description. #[arg(long = "message", short, value_name = "MESSAGE")] message_paragraphs: Vec<String>, /// Open an editor to edit the change description /// /// Forces an editor to open when using `--message` to allow the /// message to be edited afterwards. #[arg(long)] editor: bool, /// Split the revision into two parallel revisions instead of a parent and /// child #[arg(long, short)] parallel: bool, /// Files matching any of these filesets are put in the selected changes #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::modified_revision_files))] paths: Vec<String>, } impl SplitArgs { /// Resolves the raw SplitArgs into the components necessary to run the /// command. Returns an error if the command cannot proceed. fn resolve( &self, ui: &Ui, workspace_command: &WorkspaceCommandHelper, ) -> Result<ResolvedSplitArgs, CommandError> { let target_commit = workspace_command.resolve_single_rev(ui, &self.revision)?; if target_commit.is_empty(workspace_command.repo().as_ref())? { return Err(user_error_with_hint( format!( "Refusing to split empty commit {}.", target_commit.id().hex() ), "Use `jj new` if you want to create another empty commit.", )); } workspace_command.check_rewritable([target_commit.id()])?; let repo = workspace_command.repo(); let fileset_expression = workspace_command.parse_file_patterns(ui, &self.paths)?; let matcher = fileset_expression.to_matcher(); let diff_selector = workspace_command.diff_selector( ui, self.tool.as_deref(), self.interactive || self.paths.is_empty(), )?; let use_move_flags = self.onto.is_some() || self.insert_after.is_some() || self.insert_before.is_some(); let (new_parent_ids, new_child_ids) = if use_move_flags { compute_commit_location( ui, workspace_command, self.onto.as_deref(), self.insert_after.as_deref(), self.insert_before.as_deref(), "split-out commit", )? } else { Default::default() }; print_unmatched_explicit_paths( ui, workspace_command, &fileset_expression, [ // We check the parent commit to account for deleted files. &target_commit.parent_tree(repo.as_ref())?, &target_commit.tree(), ], )?; Ok(ResolvedSplitArgs { target_commit, matcher, diff_selector, parallel: self.parallel, use_move_flags, new_parent_ids, new_child_ids, }) } } struct ResolvedSplitArgs { target_commit: Commit, matcher: Box<dyn Matcher>, diff_selector: DiffSelector, parallel: bool, use_move_flags: bool, new_parent_ids: Vec<CommitId>, new_child_ids: Vec<CommitId>, } #[instrument(skip_all)] pub(crate) fn cmd_split( ui: &mut Ui, command: &CommandHelper, args: &SplitArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let ResolvedSplitArgs { target_commit, matcher, diff_selector, parallel, use_move_flags, new_parent_ids, new_child_ids, } = args.resolve(ui, &workspace_command)?; let text_editor = workspace_command.text_editor()?; let mut tx = workspace_command.start_transaction(); // Prompt the user to select the changes they want for the first commit. let target = select_diff(ui, &tx, &target_commit, &matcher, &diff_selector)?; // Create the first commit, which includes the changes selected by the user. let first_commit = { let mut commit_builder = tx.repo_mut().rewrite_commit(&target.commit).detach(); commit_builder.set_tree(target.selected_tree.clone()); if use_move_flags { commit_builder.clear_rewrite_source(); // Generate a new change id so that the commit being split doesn't // become divergent. commit_builder.generate_new_change_id(); } let description = if args.message_paragraphs.is_empty() { commit_builder.description().to_owned() } else { join_message_paragraphs(&args.message_paragraphs) }; let description = if !description.is_empty() || args.editor { commit_builder.set_description(description); add_trailers(ui, &tx, &commit_builder)? } else { description }; let description = if args.editor || args.message_paragraphs.is_empty() { commit_builder.set_description(description); let temp_commit = commit_builder.write_hidden()?; let intro = "Enter a description for the selected changes."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? } else { description }; commit_builder.set_description(description); commit_builder.write(tx.repo_mut())? }; // Create the second commit, which includes everything the user didn't // select. let second_commit = { let target_tree = target.commit.tree(); let new_tree = if parallel { // Merge the original commit tree with its parent using the tree // containing the user selected changes as the base for the merge. // This results in a tree with the changes the user didn't select. let selected_diff = target.diff_with_labels( "parents of split revision", "selected changes for split", "split revision", )?; MergedTree::merge(Merge::from_diffs( ( target_tree, format!("split revision ({})", target.commit.conflict_label()), ), [selected_diff.invert()], )) .block_on()? } else { target_tree }; let parents = if parallel { target.commit.parent_ids().to_vec() } else { vec![first_commit.id().clone()] }; let mut commit_builder = tx.repo_mut().rewrite_commit(&target.commit).detach(); commit_builder.set_parents(parents).set_tree(new_tree); let mut show_editor = args.editor; if !use_move_flags { commit_builder.clear_rewrite_source(); // Generate a new change id so that the commit being split doesn't // become divergent. commit_builder.generate_new_change_id(); } let description = if target.commit.description().is_empty() { // If there was no description before, don't ask for one for the // second commit. "".to_string() } else { show_editor = show_editor || args.message_paragraphs.is_empty(); // Just keep the original message unchanged commit_builder.description().to_owned() }; let description = if show_editor { let new_description = add_trailers(ui, &tx, &commit_builder)?; commit_builder.set_description(new_description); let temp_commit = commit_builder.write_hidden()?; let intro = "Enter a description for the remaining changes."; let template = description_template(ui, &tx, intro, &temp_commit)?; edit_description(&text_editor, &template)? } else { description }; commit_builder.set_description(description); commit_builder.write(tx.repo_mut())? }; let (first_commit, second_commit, num_rebased) = if use_move_flags { move_first_commit( &mut tx, &target, first_commit, second_commit, new_parent_ids, new_child_ids, )? } else { rewrite_descendants(&mut tx, &target, first_commit, second_commit, parallel)? }; if let Some(mut formatter) = ui.status_formatter() { if num_rebased > 0 { writeln!(formatter, "Rebased {num_rebased} descendant commits")?; } write!(formatter, "Selected changes : ")?; tx.write_commit_summary(formatter.as_mut(), &first_commit)?; write!(formatter, "\nRemaining changes: ")?; tx.write_commit_summary(formatter.as_mut(), &second_commit)?; writeln!(formatter)?; } tx.finish(ui, format!("split commit {}", target.commit.id().hex()))?; Ok(()) } fn move_first_commit( tx: &mut WorkspaceCommandTransaction, target: &CommitWithSelection, mut first_commit: Commit, mut second_commit: Commit, new_parent_ids: Vec<CommitId>, new_child_ids: Vec<CommitId>, ) -> Result<(Commit, Commit, usize), CommandError> { let mut rewritten_commits: HashMap<CommitId, CommitId> = HashMap::new(); rewritten_commits.insert(target.commit.id().clone(), second_commit.id().clone()); tx.repo_mut() .transform_descendants(vec![target.commit.id().clone()], async |rewriter| { let old_commit_id = rewriter.old_commit().id().clone(); let new_commit = rewriter.rebase().await?.write()?; rewritten_commits.insert(old_commit_id, new_commit.id().clone()); Ok(()) })?; let new_parent_ids: Vec<_> = new_parent_ids .iter() .map(|commit_id| rewritten_commits.get(commit_id).unwrap_or(commit_id)) .cloned() .collect(); let new_child_ids: Vec<_> = new_child_ids .iter() .map(|commit_id| rewritten_commits.get(commit_id).unwrap_or(commit_id)) .cloned() .collect(); let stats = move_commits( tx.repo_mut(), &MoveCommitsLocation { new_parent_ids, new_child_ids, target: MoveCommitsTarget::Commits(vec![first_commit.id().clone()]), }, &RebaseOptions { empty: EmptyBehavior::Keep, rewrite_refs: RewriteRefsOptions { delete_abandoned_bookmarks: false, }, simplify_ancestor_merge: false, }, )?; // 1 for the transformation of the original commit to the second commit // that was inserted in rewritten_commits let mut num_new_rebased = 1; if let Some(RebasedCommit::Rewritten(commit)) = stats.rebased_commits.get(first_commit.id()) { first_commit = commit.clone(); num_new_rebased += 1; } if let Some(RebasedCommit::Rewritten(commit)) = stats.rebased_commits.get(second_commit.id()) { second_commit = commit.clone(); } let num_rebased = rewritten_commits.len() + stats.rebased_commits.len() // don't count the commit generated by the split in the rebased commits - num_new_rebased // only count once a commit that may have been rewritten twice in the process - rewritten_commits .iter() .filter(|(_, rewritten)| stats.rebased_commits.contains_key(rewritten)) .count(); Ok((first_commit, second_commit, num_rebased)) } fn rewrite_descendants( tx: &mut WorkspaceCommandTransaction, target: &CommitWithSelection, first_commit: Commit, second_commit: Commit, parallel: bool, ) -> Result<(Commit, Commit, usize), CommandError> { let legacy_bookmark_behavior = tx.settings().get_bool("split.legacy-bookmark-behavior")?; if legacy_bookmark_behavior { // Mark the commit being split as rewritten to the second commit. This // moves any bookmarks pointing to the target commit to the second // commit. tx.repo_mut() .set_rewritten_commit(target.commit.id().clone(), second_commit.id().clone()); } let mut num_rebased = 0; tx.repo_mut().transform_descendants( vec![target.commit.id().clone()], async |mut rewriter| { num_rebased += 1; if parallel && legacy_bookmark_behavior { // The old_parent is the second commit due to the rewrite above. rewriter .replace_parent(second_commit.id(), [first_commit.id(), second_commit.id()]); } else if parallel { rewriter.replace_parent(first_commit.id(), [first_commit.id(), second_commit.id()]); } else { rewriter.replace_parent(first_commit.id(), [second_commit.id()]); } rewriter.rebase().await?.write()?; Ok(()) }, )?; // Move the working copy commit (@) to the second commit for any workspaces // where the target commit is the working copy commit. for (name, working_copy_commit) in tx.base_repo().clone().view().wc_commit_ids() { if working_copy_commit == target.commit.id() { tx.repo_mut().edit(name.clone(), &second_commit)?; } } Ok((first_commit, second_commit, num_rebased)) } /// Prompts the user to select the content they want in the first commit and /// returns the target commit and the tree corresponding to the selection. fn select_diff( ui: &Ui, tx: &WorkspaceCommandTransaction, target_commit: &Commit, matcher: &dyn Matcher, diff_selector: &DiffSelector, ) -> Result<CommitWithSelection, CommandError> { let format_instructions = || { format!( "\ You are splitting a commit into two: {} The diff initially shows the changes in the commit you're splitting. Adjust the right side until it shows the contents you want to split into the new commit. The changes that are not selected will replace the original commit. ", tx.format_commit_summary(target_commit) ) }; let parent_tree = target_commit.parent_tree(tx.repo())?; let selected_tree = diff_selector.select( Diff::new(&parent_tree, &target_commit.tree()), matcher, format_instructions, )?; let selection = CommitWithSelection { commit: target_commit.clone(), selected_tree, parent_tree, }; if selection.is_full_selection() { writeln!( ui.warning_default(), "All changes have been selected, so the original revision will become empty" )?; } else if selection.is_empty_selection() { writeln!( ui.warning_default(), "No changes have been selected, so the new revision will be empty" )?; } Ok(selection) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/new.rs
cli/src/commands/new.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashSet; use std::io::Write as _; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::CommitId; use jj_lib::repo::Repo as _; use jj_lib::rewrite::merge_commit_trees; use jj_lib::rewrite::rebase_commit; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::compute_commit_location; use crate::cli_util::merge_args_with; use crate::command_error::CommandError; use crate::complete; use crate::description_util::add_trailers; use crate::description_util::join_message_paragraphs; use crate::ui::Ui; /// Create a new, empty change and (by default) edit it in the working copy /// /// By default, `jj` will edit the new change, making the [working copy] /// represent the new commit. This can be avoided with `--no-edit`. /// /// Note that you can create a merge commit by specifying multiple revisions as /// argument. For example, `jj new @ main` will create a new commit with the /// working copy and the `main` bookmark as parents. /// /// [working copy]: /// https://docs.jj-vcs.dev/latest/working-copy/ #[derive(clap::Args, Clone, Debug)] #[command(group(clap::ArgGroup::new("revisions").multiple(true)))] pub(crate) struct NewArgs { /// Parent(s) of the new change [default: @] [aliases: -o, -r] #[arg(group = "revisions", value_name = "REVSETS")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions_pos: Option<Vec<RevisionArg>>, #[arg( short = 'o', group = "revisions", hide = true, short_aliases = ['d', 'r'], value_name = "REVSETS", )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revisions_opt: Option<Vec<RevisionArg>>, /// The change description to use #[arg(long = "message", short, value_name = "MESSAGE")] message_paragraphs: Vec<String>, /// Do not edit the newly created change #[arg(long, conflicts_with = "_edit")] no_edit: bool, /// No-op flag to pair with --no-edit #[arg(long, hide = true)] _edit: bool, /// Insert the new change after the given commit(s) /// /// Example: `jj new --insert-after A` creates a new change between `A` and /// its children: /// /// ```text /// B C /// \ / /// B C => @ /// \ / | /// A A /// ``` /// /// Specifying `--insert-after` multiple times will relocate all children of /// the given commits. /// /// Example: `jj new --insert-after A --insert-after X` creates a change /// with `A` and `X` as parents, and rebases all children on top of the new /// change: /// /// ```text /// B Y /// \ / /// B Y => @ /// | | / \ /// A X A X /// ``` #[arg( long, short = 'A', visible_alias = "after", conflicts_with = "revisions", value_name = "REVSETS", verbatim_doc_comment )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] insert_after: Option<Vec<RevisionArg>>, /// Insert the new change before the given commit(s) /// /// Example: `jj new --insert-before C` creates a new change between `C` and /// its parents: /// /// ```text /// C /// | /// C => @ /// / \ / \ /// A B A B /// ``` /// /// `--insert-after` and `--insert-before` can be combined. /// /// Example: `jj new --insert-after A --insert-before D`: /// /// ```text /// /// D D /// | / \ /// C | C /// | => @ | /// B | B /// | \ / /// A A /// ``` /// /// Similar to `--insert-after`, you can specify `--insert-before` multiple /// times. #[arg( long, short = 'B', visible_alias = "before", conflicts_with = "revisions", value_name = "REVSETS", verbatim_doc_comment )] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] insert_before: Option<Vec<RevisionArg>>, } #[instrument(skip_all)] pub(crate) fn cmd_new( ui: &mut Ui, command: &CommandHelper, args: &NewArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let revision_args = match (&args.revisions_pos, &args.revisions_opt) { (None, None) => (args.insert_before.is_none() && args.insert_after.is_none()) .then(|| vec![RevisionArg::AT]), (None, Some(args)) | (Some(args), None) => Some(args.clone()), (Some(pos), Some(opt)) => Some(merge_args_with( command.matches().subcommand_matches("new").unwrap(), &[("revisions_pos", pos), ("revisions_opt", opt)], |_id, value| value.clone(), )), }; let (parent_commit_ids, child_commit_ids) = compute_commit_location( ui, &workspace_command, revision_args.as_deref(), args.insert_after.as_deref(), args.insert_before.as_deref(), "new commit", )?; let parent_commits: Vec<_> = parent_commit_ids .iter() .map(|commit_id| workspace_command.repo().store().get_commit(commit_id)) .try_collect()?; let mut advance_bookmarks_target = None; let mut advanceable_bookmarks = vec![]; if args.insert_before.is_none() && args.insert_after.is_none() { let should_advance_bookmarks = parent_commits.len() == 1; if should_advance_bookmarks { advance_bookmarks_target = Some(parent_commit_ids[0].clone()); advanceable_bookmarks = workspace_command.get_advanceable_bookmarks(ui, parent_commits[0].parent_ids())?; } }; let parent_commit_ids_set: HashSet<CommitId> = parent_commit_ids.iter().cloned().collect(); let mut tx = workspace_command.start_transaction(); let merged_tree = merge_commit_trees(tx.repo(), &parent_commits).block_on()?; let mut commit_builder = tx .repo_mut() .new_commit(parent_commit_ids, merged_tree) .detach(); let mut description = join_message_paragraphs(&args.message_paragraphs); if !description.is_empty() { // The first trailer would become the first line of the description. // Also, a commit with no description is treated in a special way in jujutsu: it // can be discarded as soon as it's no longer the working copy. Adding a // trailer to an empty description would break that logic. commit_builder.set_description(description); description = add_trailers(ui, &tx, &commit_builder)?; } commit_builder.set_description(&description); let new_commit = commit_builder.write(tx.repo_mut())?; let child_commits: Vec<_> = child_commit_ids .iter() .map(|commit_id| tx.repo().store().get_commit(commit_id)) .try_collect()?; let mut num_rebased = 0; for child_commit in child_commits { let new_parent_ids = child_commit .parent_ids() .iter() .filter(|id| !parent_commit_ids_set.contains(id)) .cloned() .chain(std::iter::once(new_commit.id().clone())) .collect_vec(); rebase_commit(tx.repo_mut(), child_commit, new_parent_ids).block_on()?; num_rebased += 1; } num_rebased += tx.repo_mut().rebase_descendants()?; if args.no_edit { if let Some(mut formatter) = ui.status_formatter() { write!(formatter, "Created new commit ")?; tx.write_commit_summary(formatter.as_mut(), &new_commit)?; writeln!(formatter)?; } } else { tx.edit(&new_commit)?; // The description of the new commit will be printed by tx.finish() } if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } // Does nothing if there's no bookmarks to advance. if let Some(target) = advance_bookmarks_target { tx.advance_bookmarks(advanceable_bookmarks, &target)?; } tx.finish(ui, "new empty commit")?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/gc.rs
cli/src/commands/util/gc.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::slice; use std::time::Duration; use std::time::SystemTime; use jj_lib::repo::Repo as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::ui::Ui; /// Run backend-dependent garbage collection. /// /// To garbage-collect old operations and the commits/objects referenced by /// them, run `jj op abandon ..<some old operation>` before `jj util gc`. #[derive(clap::Args, Clone, Debug)] pub struct UtilGcArgs { /// Time threshold /// /// By default, only obsolete objects and operations older than 2 weeks are /// pruned. /// /// Only the string "now" can be passed to this parameter. Support for /// arbitrary absolute and relative timestamps will come in a subsequent /// release. #[arg(long)] expire: Option<String>, } pub fn cmd_util_gc( ui: &mut Ui, command: &CommandHelper, args: &UtilGcArgs, ) -> Result<(), CommandError> { if !command.is_at_head_operation() { return Err(user_error( "Cannot garbage collect from a non-head operation", )); } let keep_newer = match args.expire.as_deref() { None => SystemTime::now() - Duration::from_secs(14 * 86400), Some("now") => SystemTime::now() - Duration::ZERO, _ => return Err(user_error("--expire only accepts 'now'")), }; let workspace_command = command.workspace_helper(ui)?; let repo = workspace_command.repo(); repo.op_store() .gc(slice::from_ref(repo.op_id()), keep_newer)?; repo.store().gc(repo.index(), keep_newer)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/completion.rs
cli/src/commands/util/completion.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap::Command; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; // Using an explicit `doc` attribute prevents rustfmt from mangling the list // formatting without disabling rustfmt for the entire struct. #[doc = r#"Print a command-line-completion script Apply it by running one of these: - Bash: `source <(jj util completion bash)` - Fish: `jj util completion fish | source` - Nushell: ```nu jj util completion nushell | save -f "completions-jj.nu" use "completions-jj.nu" * # Or `source "completions-jj.nu"` ``` - Zsh: ```shell autoload -U compinit compinit source <(jj util completion zsh) ``` See the docs on [command-line completion] for more details. [command-line completion]: https://docs.jj-vcs.dev/latest/install-and-setup/#command-line-completion "#] #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub struct UtilCompletionArgs { shell: ShellCompletion, } pub fn cmd_util_completion( ui: &mut Ui, command: &CommandHelper, args: &UtilCompletionArgs, ) -> Result<(), CommandError> { let mut app = command.app().clone(); let buf = args.shell.generate(&mut app); ui.stdout().write_all(&buf)?; Ok(()) } /// Available shell completions #[derive(clap::ValueEnum, Clone, Copy, Debug, Eq, Hash, PartialEq)] enum ShellCompletion { Bash, Elvish, Fish, Nushell, PowerShell, Zsh, } impl ShellCompletion { fn generate(&self, cmd: &mut Command) -> Vec<u8> { use clap_complete::Shell; use clap_complete::generate; use clap_complete_nushell::Nushell; let mut buf = Vec::new(); let bin_name = "jj"; match self { Self::Bash => generate(Shell::Bash, cmd, bin_name, &mut buf), Self::Elvish => generate(Shell::Elvish, cmd, bin_name, &mut buf), Self::Fish => generate(Shell::Fish, cmd, bin_name, &mut buf), Self::Nushell => generate(Nushell, cmd, bin_name, &mut buf), Self::PowerShell => generate(Shell::PowerShell, cmd, bin_name, &mut buf), Self::Zsh => generate(Shell::Zsh, cmd, bin_name, &mut buf), } buf } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/install_man_pages.rs
cli/src/commands/util/install_man_pages.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::path::PathBuf; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Install Jujutsu's manpages to the provided path #[derive(clap::Args, Clone, Debug)] pub struct UtilInstallManPagesArgs { /// The path where manpages will installed. An example path might be /// `/usr/share/man`. The provided path will be appended with `man1`, /// etc., as appropriate path: PathBuf, } pub fn cmd_util_install_man_pages( _ui: &mut Ui, command: &CommandHelper, args: &UtilInstallManPagesArgs, ) -> Result<(), CommandError> { let man1_dir = args.path.join("man1"); std::fs::create_dir_all(&man1_dir)?; let app = command.app().clone(); clap_mangen::generate_to(app, man1_dir)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/markdown_help.rs
cli/src/commands/util/markdown_help.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Print the CLI help for all subcommands in Markdown #[derive(clap::Args, Clone, Debug)] pub struct UtilMarkdownHelp {} pub fn cmd_util_markdown_help( ui: &mut Ui, command: &CommandHelper, _args: &UtilMarkdownHelp, ) -> Result<(), CommandError> { // If we ever need more flexibility, the code of `clap_markdown` is simple and // readable. We could reimplement the parts we need without trouble. let markdown = clap_markdown::help_markdown_command(command.app()).into_bytes(); ui.stdout().write_all(&markdown)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/mod.rs
cli/src/commands/util/mod.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod completion; mod config_schema; mod exec; mod gc; mod install_man_pages; mod markdown_help; use clap::Subcommand; use tracing::instrument; use self::completion::UtilCompletionArgs; use self::completion::cmd_util_completion; use self::config_schema::UtilConfigSchemaArgs; use self::config_schema::cmd_util_config_schema; use self::exec::UtilExecArgs; use self::exec::cmd_util_exec; use self::gc::UtilGcArgs; use self::gc::cmd_util_gc; use self::install_man_pages::UtilInstallManPagesArgs; use self::install_man_pages::cmd_util_install_man_pages; use self::markdown_help::UtilMarkdownHelp; use self::markdown_help::cmd_util_markdown_help; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Infrequently used commands such as for generating shell completions #[derive(Subcommand, Clone, Debug)] pub(crate) enum UtilCommand { Completion(UtilCompletionArgs), ConfigSchema(UtilConfigSchemaArgs), Exec(UtilExecArgs), Gc(UtilGcArgs), InstallManPages(UtilInstallManPagesArgs), MarkdownHelp(UtilMarkdownHelp), } #[instrument(skip_all)] pub(crate) fn cmd_util( ui: &mut Ui, command: &CommandHelper, subcommand: &UtilCommand, ) -> Result<(), CommandError> { match subcommand { UtilCommand::Completion(args) => cmd_util_completion(ui, command, args), UtilCommand::ConfigSchema(args) => cmd_util_config_schema(ui, command, args), UtilCommand::Exec(args) => cmd_util_exec(ui, command, args), UtilCommand::Gc(args) => cmd_util_gc(ui, command, args), UtilCommand::InstallManPages(args) => cmd_util_install_man_pages(ui, command, args), UtilCommand::MarkdownHelp(args) => cmd_util_markdown_help(ui, command, args), } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/config_schema.rs
cli/src/commands/util/config_schema.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::config::CONFIG_SCHEMA; use crate::ui::Ui; /// Print the JSON schema for the jj TOML config format. #[derive(clap::Args, Clone, Debug)] pub struct UtilConfigSchemaArgs {} pub fn cmd_util_config_schema( ui: &mut Ui, _command: &CommandHelper, _args: &UtilConfigSchemaArgs, ) -> Result<(), CommandError> { ui.stdout().write_all(CONFIG_SCHEMA.as_bytes())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/util/exec.rs
cli/src/commands/util/exec.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::command_error::user_error_with_message; use crate::ui::Ui; /// Execute an external command via jj /// /// This command will have access to the environment variable JJ_WORKSPACE_ROOT. /// /// This is useful for arbitrary aliases. /// /// !! WARNING !! /// /// The following technique just provides a convenient syntax for running /// arbitrary code on your system. Using it irresponsibly may cause damage /// ranging from breaking the behavior of `jj undo` to wiping your file system. /// Exercise the same amount of caution while writing these aliases as you would /// when typing commands into the terminal! /// /// This feature may be removed or replaced by an embedded scripting language in /// the future. /// /// Let's assume you have a script called "my-jj-script" in you $PATH and you /// would like to execute it as "jj my-script". You would add the following line /// to your configuration file to achieve that: /// /// ```toml /// [aliases] /// my-script = ["util", "exec", "--", "my-jj-script"] /// # ^^^^ /// # This makes sure that flags are passed to your script instead of parsed by jj. /// ``` /// /// If you don't want to manage your script as a separate file, you can even /// inline it into your config file: /// /// ```toml /// [aliases] /// my-inline-script = ["util", "exec", "--", "bash", "-c", """ /// set -euo pipefail /// echo "Look Ma, everything in one file!" /// echo "args: $@" /// """, ""] /// # ^^ /// # This last empty string will become "$0" in bash, so your actual arguments /// # are all included in "$@" and start at "$1" as expected. /// ``` /// /// > Note: Shebangs (e.g. `#!/usr/bin/env`) aren't necessary since you're /// > already explicitly passing your script into the right shell. #[derive(clap::Args, Clone, Debug)] #[command(verbatim_doc_comment)] pub(crate) struct UtilExecArgs { /// External command to execute command: String, /// Arguments to pass to the external command #[arg(value_hint = clap::ValueHint::FilePath)] args: Vec<String>, } pub fn cmd_util_exec( _: &mut Ui, command: &CommandHelper, args: &UtilExecArgs, ) -> Result<(), CommandError> { let workspace_root = command .workspace_loader() .ok() .map(|loader| loader.workspace_root()); let mut cmd = std::process::Command::new(&args.command); cmd.args(&args.args); if let Some(workspace_root) = workspace_root { cmd.env("JJ_WORKSPACE_ROOT", workspace_root); } let status = cmd.status().map_err(|err| { user_error_with_message( format!("Failed to execute external command '{}'", &args.command), err, ) })?; // Try to match the exit status of the executed process. if let Some(exit_code) = status.code() { std::process::exit(exit_code); } if !status.success() { // signal return Err(user_error(format!( "External command was terminated by {status}" ))); } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/untrack.rs
cli/src/commands/file/untrack.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::merge::Merge; use jj_lib::merged_tree::MergedTreeBuilder; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::export_working_copy_changes_to_git; use crate::cli_util::print_snapshot_stats; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error_with_hint; use crate::complete; use crate::ui::Ui; /// Stop tracking specified paths in the working copy #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileUntrackArgs { /// Paths to untrack. They must already be ignored. /// /// The paths could be ignored via a .gitignore or .git/info/exclude (in /// colocated workspaces). #[arg(required = true, value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_untrack( ui: &mut Ui, command: &CommandHelper, args: &FileUntrackArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let auto_tracking_matcher = workspace_command.auto_tracking_matcher(ui)?; let options = workspace_command.snapshot_options_with_start_tracking_matcher(&auto_tracking_matcher)?; let working_copy_shared_with_git = workspace_command.working_copy_shared_with_git(); let mut tx = workspace_command.start_transaction().into_inner(); let (mut locked_ws, wc_commit) = workspace_command.start_working_copy_mutation()?; // Create a new tree without the unwanted files let mut tree_builder = MergedTreeBuilder::new(wc_commit.tree()); let wc_tree = wc_commit.tree(); for (path, _value) in wc_tree.entries_matching(matcher.as_ref()) { tree_builder.set_or_remove(path, Merge::absent()); } let new_tree = tree_builder.write_tree()?; let new_commit = tx .repo_mut() .rewrite_commit(&wc_commit) .set_tree(new_tree) .write()?; // Reset the working copy to the new commit locked_ws.locked_wc().reset(&new_commit).block_on()?; // Commit the working copy again so we can inform the user if paths couldn't be // untracked because they're not ignored. let (new_wc_tree, stats) = locked_ws.locked_wc().snapshot(&options).block_on()?; if new_wc_tree.tree_ids() != new_commit.tree_ids() { let added_back = new_wc_tree.entries_matching(matcher.as_ref()).collect_vec(); if !added_back.is_empty() { drop(locked_ws); let path = &added_back[0].0; let ui_path = workspace_command.format_file_path(path); let message = if added_back.len() > 1 { format!( "'{}' and {} other files are not ignored.", ui_path, added_back.len() - 1 ) } else { format!("'{ui_path}' is not ignored.") }; return Err(user_error_with_hint( message, "Files that are not ignored will be added back by the next command. Make sure they're ignored, then try again.", )); } else { // This means there were some concurrent changes made in the working copy. We // don't want to mix those in, so reset the working copy again. locked_ws.locked_wc().reset(&new_commit).block_on()?; } } let num_rebased = tx.repo_mut().rebase_descendants()?; if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } if working_copy_shared_with_git { export_working_copy_changes_to_git(ui, tx.repo_mut(), &wc_tree, &new_commit.tree())?; } let repo = tx.commit("untrack paths")?; locked_ws.finish(repo.op_id().clone())?; print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&wc_tree])?; print_snapshot_stats(ui, &stats, workspace_command.env().path_converter())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/track.rs
cli/src/commands/file/track.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io; use std::io::Write as _; use indoc::writedoc; use itertools::Itertools as _; use jj_lib::repo_path::RepoPathUiConverter; use jj_lib::working_copy::SnapshotStats; use jj_lib::working_copy::UntrackedReason; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::print_untracked_files; use crate::command_error::CommandError; use crate::ui::Ui; /// Start tracking specified paths in the working copy /// /// Without arguments, all paths that are not ignored will be tracked. /// /// By default, new files in the working copy are automatically tracked, so /// this command has no effect. /// You can configure which paths to automatically track by setting /// `snapshot.auto-track` (e.g. to `"none()"` or `"glob:**/*.rs"`). Files that /// don't match the pattern can be manually tracked using this command. The /// default pattern is `all()`. #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileTrackArgs { /// Paths to track #[arg(required = true, value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] paths: Vec<String>, /// Track paths even if they're ignored or too large /// /// By default, `jj file track` will not track files that are ignored by /// .gitignore or exceed the maximum file size. This flag overrides those /// restrictions, explicitly tracking the specified paths. #[arg(long)] include_ignored: bool, } #[instrument(skip_all)] pub(crate) fn cmd_file_track( ui: &mut Ui, command: &CommandHelper, args: &FileTrackArgs, ) -> Result<(), CommandError> { let (mut workspace_command, auto_stats) = command.workspace_helper_with_stats(ui)?; let matcher = workspace_command .parse_file_patterns(ui, &args.paths)? .to_matcher(); let mut options = workspace_command.snapshot_options_with_start_tracking_matcher(&matcher)?; if args.include_ignored { options.force_tracking_matcher = &matcher; } let mut tx = workspace_command.start_transaction().into_inner(); let (mut locked_ws, _wc_commit) = workspace_command.start_working_copy_mutation()?; let (_tree, track_stats) = locked_ws.locked_wc().snapshot(&options).block_on()?; let num_rebased = tx.repo_mut().rebase_descendants()?; if num_rebased > 0 { writeln!(ui.status(), "Rebased {num_rebased} descendant commits")?; } let repo = tx.commit("track paths")?; locked_ws.finish(repo.op_id().clone())?; print_track_snapshot_stats( ui, auto_stats, track_stats, workspace_command.env().path_converter(), )?; Ok(()) } pub fn print_track_snapshot_stats( ui: &Ui, auto_stats: SnapshotStats, track_stats: SnapshotStats, path_converter: &RepoPathUiConverter, ) -> io::Result<()> { let mut merged_untracked_paths = auto_stats.untracked_paths; for (path, reason) in track_stats .untracked_paths .into_iter() // focus on files that are now tracked with `file track` .filter(|(_, reason)| !matches!(reason, UntrackedReason::FileNotAutoTracked)) { // if the path was previously rejected because it wasn't tracked, update its // reason merged_untracked_paths.insert(path, reason); } print_untracked_files(ui, &merged_untracked_paths, path_converter)?; let (large_files, sizes): (Vec<_>, Vec<_>) = merged_untracked_paths .iter() .filter_map(|(path, reason)| match reason { UntrackedReason::FileTooLarge { size, .. } => Some((path, *size)), UntrackedReason::FileNotAutoTracked => None, }) .unzip(); if let Some(size) = sizes.iter().max() { let large_files_list = large_files .iter() .map(|path| path_converter.format_file_path(path)) .join(" "); writedoc!( ui.hint_default(), r" This is to prevent large files from being added by accident. You can fix this by: - Adding the file to `.gitignore` - Run `jj config set --repo snapshot.max-new-file-size {size}` This will increase the maximum file size allowed for new files, in this repository only. - Run `jj --config snapshot.max-new-file-size={size} file track {large_files_list}` This will increase the maximum file size allowed for new files, for this command only. - Run `jj file track --include-ignored {large_files_list}` This will track the files even though they exceed the size limit. " )?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/list.rs
cli/src/commands/file/list.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::commit_templater::TreeEntry; use crate::complete; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// List files in a revision #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileListArgs { /// The revision to list files in #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision: RevisionArg, /// Render each file entry using the given template /// /// All 0-argument methods of the [`TreeEntry` type] are available as /// keywords in the template expression. See [`jj help -k templates`] for /// more information. /// /// [`TreeEntry` type]: /// https://docs.jj-vcs.dev/latest/templates/#treeentry-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Only list files matching these prefixes (instead of all files) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_list( ui: &mut Ui, command: &CommandHelper, args: &FileListArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let commit = workspace_command.resolve_single_rev(ui, &args.revision)?; let tree = commit.tree(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); let template: TemplateRenderer<TreeEntry> = { let language = workspace_command.commit_template_language(); let text = match &args.template { Some(value) => value.to_owned(), None => workspace_command.settings().get("templates.file_list")?, }; workspace_command .parse_template(ui, &language, &text)? .labeled(["file_list"]) }; ui.request_pager(); let mut formatter = ui.stdout_formatter(); for (path, value) in tree.entries_matching(matcher.as_ref()) { let entry = TreeEntry { path, value: value?, }; template.format(&entry, formatter.as_mut())?; } print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/search.rs
cli/src/commands/file/search.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCompleter; use jj_lib::conflicts::MaterializedTreeValue; use jj_lib::conflicts::materialize_tree_value; use jj_lib::repo::Repo as _; use jj_lib::str_util::StringPattern; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::complete; use crate::ui::Ui; /// Search for content in files /// /// Lists files containing the specified pattern. /// /// This is an early version of the command. It only supports glob matching for /// now, it doesn't search files concurrently, and it doesn't indicate where in /// the file the match was found. #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileSearchArgs { /// The revision to search files in #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision: RevisionArg, /// The glob pattern to search for /// /// The whole line must match the pattern, so you may want to pass something /// like `--pattern '*foo*'`. #[arg(long, short, value_name = "PATTERN")] pattern: String, /// Only search files matching these prefixes (instead of all files) #[arg(value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_search( ui: &mut Ui, command: &CommandHelper, args: &FileSearchArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let commit = workspace_command.resolve_single_rev(ui, &args.revision)?; let tree = commit.tree(); let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let file_matcher = fileset_expression.to_matcher(); ui.request_pager(); let mut formatter = ui.stdout_formatter(); let store = workspace_command.repo().store().clone(); // TODO: Support other patterns than glob let pattern = StringPattern::glob(&args.pattern).map_err(|err| cli_error(err.to_string()))?; let pattern_matcher = pattern.to_matcher(); // TODO: Read files concurrently (depending on backend) for (path, value) in tree.entries_matching(file_matcher.as_ref()) { let value = value?; let materialized = materialize_tree_value(store.as_ref(), &path, value, tree.labels()).block_on()?; match materialized { MaterializedTreeValue::Absent => panic!("Entry for absent path in file listing"), MaterializedTreeValue::AccessDenied(error) => { let ui_path = workspace_command.format_file_path(&path); writeln!( ui.warning_default(), "Skipping '{ui_path}' due to permission error: {error}" )?; } MaterializedTreeValue::File(mut materialized_file_value) => { let content = materialized_file_value.read_all(&path).block_on()?; // TODO: Make output templated let ui_path = workspace_command.format_file_path(&path); if let Some(_line) = pattern_matcher.match_lines(&content).next() { // TODO: Optionally also print the line and line number writeln!(formatter, "{ui_path}")?; } } MaterializedTreeValue::Symlink { .. } => {} MaterializedTreeValue::FileConflict(materialized_file_value) => { let ui_path = workspace_command.format_file_path(&path); for content in materialized_file_value.contents.adds() { if let Some(_line) = pattern_matcher.match_lines(content).next() { // TODO: Optionally also print the conflict side, line and line number writeln!(formatter, "{ui_path}")?; break; } } } MaterializedTreeValue::OtherConflict { .. } => {} MaterializedTreeValue::GitSubmodule(_) => {} MaterializedTreeValue::Tree(_) => panic!("Entry for tree in file listing"), } } print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/mod.rs
cli/src/commands/file/mod.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod annotate; mod chmod; mod list; mod search; mod show; mod track; mod untrack; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// File operations. #[derive(clap::Subcommand, Clone, Debug)] pub enum FileCommand { Annotate(annotate::FileAnnotateArgs), Chmod(chmod::FileChmodArgs), List(list::FileListArgs), Search(search::FileSearchArgs), Show(show::FileShowArgs), Track(track::FileTrackArgs), Untrack(untrack::FileUntrackArgs), } pub fn cmd_file( ui: &mut Ui, command: &CommandHelper, subcommand: &FileCommand, ) -> Result<(), CommandError> { match subcommand { FileCommand::Annotate(args) => annotate::cmd_file_annotate(ui, command, args), FileCommand::Chmod(args) => chmod::cmd_file_chmod(ui, command, args), FileCommand::List(args) => list::cmd_file_list(ui, command, args), FileCommand::Search(args) => search::cmd_file_search(ui, command, args), FileCommand::Show(args) => show::cmd_file_show(ui, command, args), FileCommand::Track(args) => track::cmd_file_track(ui, command, args), FileCommand::Untrack(args) => untrack::cmd_file_untrack(ui, command, args), } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/chmod.rs
cli/src/commands/file/chmod.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCompleter; use jj_lib::backend::TreeValue; use jj_lib::merged_tree::MergedTreeBuilder; use jj_lib::object_id::ObjectId as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)] enum ChmodMode { /// Make a path non-executable (alias: normal) // We use short names for enum values so that errors say that the possible values are `n, x`. #[value(name = "n", alias("normal"))] Normal, /// Make a path executable (alias: executable) #[value(name = "x", alias("executable"))] Executable, } /// Sets or removes the executable bit for paths in the repo /// /// Unlike the POSIX `chmod`, `jj file chmod` also works on Windows, on /// conflicted files, and on arbitrary revisions. #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileChmodArgs { mode: ChmodMode, /// The revision to update #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_mutable))] revision: RevisionArg, /// Paths to change the executable bit for #[arg(required = true, value_name = "FILESETS", value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_chmod( ui: &mut Ui, command: &CommandHelper, args: &FileChmodArgs, ) -> Result<(), CommandError> { let executable_bit = match args.mode { ChmodMode::Executable => true, ChmodMode::Normal => false, }; let mut workspace_command = command.workspace_helper(ui)?; let commit = workspace_command.resolve_single_rev(ui, &args.revision)?; workspace_command.check_rewritable([commit.id()])?; let tree = commit.tree(); // TODO: No need to add special case for empty paths when switching to // parse_union_filesets(). paths = [] should be "none()" if supported. let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let matcher = fileset_expression.to_matcher(); print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; let mut tx = workspace_command.start_transaction(); let mut tree_builder = MergedTreeBuilder::new(commit.tree()); for (repo_path, result) in tree.entries_matching(matcher.as_ref()) { let mut tree_value = result?; let user_error_with_path = |msg: &str| { user_error(format!( "{msg} at '{}'.", tx.base_workspace_helper().format_file_path(&repo_path) )) }; let all_files = tree_value .adds() .flatten() .all(|tree_value| matches!(tree_value, TreeValue::File { .. })); if !all_files { let message = if tree_value.is_resolved() { "Found neither a file nor a conflict" } else { "Some of the sides of the conflict are not files" }; return Err(user_error_with_path(message)); } for value in tree_value.iter_mut().flatten() { if let TreeValue::File { id: _, executable, copy_id: _, } = value { *executable = executable_bit; } } tree_builder.set_or_remove(repo_path, tree_value); } let new_tree = tree_builder.write_tree()?; tx.repo_mut() .rewrite_commit(&commit) .set_tree(new_tree) .write()?; tx.finish( ui, format!( "make paths {} in commit {}", if executable_bit { "executable" } else { "non-executable" }, commit.id().hex(), ), ) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/annotate.rs
cli/src/commands/file/annotate.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use jj_lib::annotate::FileAnnotation; use jj_lib::annotate::FileAnnotator; use jj_lib::annotate::LineOrigin; use jj_lib::repo::Repo; use jj_lib::revset::RevsetExpression; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::commit_templater::AnnotationLine; use crate::complete; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Show the source change for each line of the target file. /// /// Annotates a revision line by line. Each line includes the source change that /// introduced the associated line. A path to the desired file must be provided. #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileAnnotateArgs { /// the file to annotate #[arg(value_hint = clap::ValueHint::AnyPath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] path: String, /// an optional revision to start at #[arg(long, short, value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision: Option<RevisionArg>, /// Render each line using the given template /// /// All 0-argument methods of the [`AnnotationLine` type] are available as /// keywords in the template expression. See [`jj help -k templates`] for /// more information. /// /// If not specified, this defaults to the `templates.file_annotate` /// setting. /// /// [`AnnotationLine` type]: /// https://docs.jj-vcs.dev/latest/templates/#annotationline-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_annotate( ui: &mut Ui, command: &CommandHelper, args: &FileAnnotateArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let repo = workspace_command.repo(); let starting_commit = workspace_command .resolve_single_rev(ui, args.revision.as_ref().unwrap_or(&RevisionArg::AT))?; let file_path = workspace_command.parse_file_path(&args.path)?; let file_value = starting_commit.tree().path_value(&file_path)?; let ui_path = workspace_command.format_file_path(&file_path); if file_value.is_absent() { return Err(user_error(format!("No such path: {ui_path}"))); } if file_value.to_file_merge().is_none() { return Err(user_error(format!( "Path exists but is not a regular file: {ui_path}" ))); } let template_text = match &args.template { Some(value) => value.clone(), None => workspace_command .settings() .get_string("templates.file_annotate")?, }; let language = workspace_command.commit_template_language(); let template = workspace_command.parse_template(ui, &language, &template_text)?; // TODO: Should we add an option to limit the domain to e.g. recent commits? // Note that this is probably different from "--skip REVS", which won't // exclude the revisions, but will ignore diffs in those revisions as if // ancestor revisions had new content. let mut annotator = FileAnnotator::from_commit(&starting_commit, &file_path)?; annotator.compute(repo.as_ref(), &RevsetExpression::all())?; let annotation = annotator.to_annotation(); render_file_annotation(repo.as_ref(), ui, &template, &annotation)?; Ok(()) } fn render_file_annotation( repo: &dyn Repo, ui: &mut Ui, template_render: &TemplateRenderer<AnnotationLine>, annotation: &FileAnnotation, ) -> Result<(), CommandError> { ui.request_pager(); let mut formatter = ui.stdout_formatter(); let mut last_id = None; // At least in cases where the repository was jj-initialized shallowly, // then unshallow'd with git, some changes will not have a commit id // because jj does not import the unshallow'd commits. So we default // to the root commit id for now. let default_line_origin = LineOrigin { commit_id: repo.store().root_commit_id().clone(), line_number: 0, }; for (line_number, (line_origin, content)) in annotation.line_origins().enumerate() { let line_origin = line_origin.unwrap_or(&default_line_origin); let commit = repo.store().get_commit(&line_origin.commit_id)?; let first_line_in_hunk = last_id != Some(&line_origin.commit_id); let annotation_line = AnnotationLine { commit, content: content.to_owned(), line_number: line_number + 1, original_line_number: line_origin.line_number + 1, first_line_in_hunk, }; template_render.format(&annotation_line, formatter.as_mut())?; last_id = Some(&line_origin.commit_id); } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/file/show.rs
cli/src/commands/file/show.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use clap_complete::ArgValueCandidates; use clap_complete::ArgValueCompleter; use itertools::Itertools as _; use jj_lib::backend::BackendResult; use jj_lib::conflicts::ConflictMaterializeOptions; use jj_lib::conflicts::MaterializedTreeValue; use jj_lib::conflicts::materialize_merge_result; use jj_lib::conflicts::materialize_tree_value; use jj_lib::file_util::copy_async_to_sync; use jj_lib::fileset::FilePattern; use jj_lib::fileset::FilesetExpression; use jj_lib::merged_tree::MergedTree; use jj_lib::repo::Repo as _; use jj_lib::repo_path::RepoPath; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::WorkspaceCommandHelper; use crate::cli_util::print_unmatched_explicit_paths; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::commit_templater::TreeEntry; use crate::complete; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Print contents of files in a revision /// /// If the given path is a directory, files in the directory will be visited /// recursively. #[derive(clap::Args, Clone, Debug)] pub(crate) struct FileShowArgs { /// The revision to get the file contents from #[arg(long, short, default_value = "@", value_name = "REVSET")] #[arg(add = ArgValueCompleter::new(complete::revset_expression_all))] revision: RevisionArg, /// Render each file metadata using the given template /// /// All 0-argument methods of the [`TreeEntry` type] are available as /// keywords in the template expression. See [`jj help -k templates`] for /// more information. /// /// If not specified, this defaults to the `templates.file_show` setting. /// /// [`TreeEntry` type]: /// https://docs.jj-vcs.dev/latest/templates/#treeentry-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Paths to print #[arg(required = true, value_name = "FILESETS", value_hint = clap::ValueHint::FilePath)] #[arg(add = ArgValueCompleter::new(complete::all_revision_files))] paths: Vec<String>, } #[instrument(skip_all)] pub(crate) fn cmd_file_show( ui: &mut Ui, command: &CommandHelper, args: &FileShowArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let commit = workspace_command.resolve_single_rev(ui, &args.revision)?; let tree = commit.tree(); // TODO: No need to add special case for empty paths when switching to // parse_union_filesets(). paths = [] should be "none()" if supported. let fileset_expression = workspace_command.parse_file_patterns(ui, &args.paths)?; let template = { let language = workspace_command.commit_template_language(); let text = match &args.template { Some(value) => value.to_owned(), None => workspace_command.settings().get("templates.file_show")?, }; workspace_command .parse_template(ui, &language, &text)? .labeled(["file_show"]) }; // Try fast path for single file entry if let Some(path) = get_single_path(&fileset_expression) { let value = tree.path_value(path)?; if value.is_absent() { let ui_path = workspace_command.format_file_path(path); return Err(user_error(format!("No such path: {ui_path}"))); } if !value.is_tree() { ui.request_pager(); let entry = TreeEntry { path: path.to_owned(), value, }; write_tree_entries(ui, &workspace_command, &template, &tree, [Ok(entry)])?; return Ok(()); } } let matcher = fileset_expression.to_matcher(); ui.request_pager(); write_tree_entries( ui, &workspace_command, &template, &tree, tree.entries_matching(matcher.as_ref()) .map(|(path, value)| Ok((path, value?))) .map_ok(|(path, value)| TreeEntry { path, value }), )?; print_unmatched_explicit_paths(ui, &workspace_command, &fileset_expression, [&tree])?; Ok(()) } fn get_single_path(expression: &FilesetExpression) -> Option<&RepoPath> { match &expression { FilesetExpression::Pattern(pattern) => match pattern { // Not using pattern.as_path() because files-in:<path> shouldn't // select the literal <path> itself. FilePattern::FilePath(path) | FilePattern::PrefixPath(path) => Some(path), FilePattern::FileGlob { .. } | FilePattern::PrefixGlob { .. } => None, }, _ => None, } } fn write_tree_entries( ui: &Ui, workspace_command: &WorkspaceCommandHelper, template: &TemplateRenderer<TreeEntry>, tree: &MergedTree, entries: impl IntoIterator<Item = BackendResult<TreeEntry>>, ) -> Result<(), CommandError> { let repo = workspace_command.repo(); for entry in entries { let entry = entry?; template.format(&entry, ui.stdout_formatter().as_mut())?; let materialized = materialize_tree_value(repo.store(), &entry.path, entry.value, tree.labels()) .block_on()?; match materialized { MaterializedTreeValue::Absent => panic!("absent values should be excluded"), MaterializedTreeValue::AccessDenied(err) => { let ui_path = workspace_command.format_file_path(&entry.path); writeln!( ui.warning_default(), "Path '{ui_path}' exists but access is denied: {err}" )?; } MaterializedTreeValue::File(file) => { copy_async_to_sync(file.reader, ui.stdout_formatter().as_mut()).block_on()?; } MaterializedTreeValue::FileConflict(file) => { let options = ConflictMaterializeOptions { marker_style: workspace_command.env().conflict_marker_style(), marker_len: None, merge: repo.store().merge_options().clone(), }; materialize_merge_result( &file.contents, &file.labels, &mut ui.stdout_formatter(), &options, )?; } MaterializedTreeValue::OtherConflict { id, labels } => { ui.stdout_formatter() .write_all(id.describe(&labels).as_bytes())?; } MaterializedTreeValue::Symlink { .. } | MaterializedTreeValue::GitSubmodule(_) => { let ui_path = workspace_command.format_file_path(&entry.path); writeln!( ui.warning_default(), "Path '{ui_path}' exists but is not a file" )?; } MaterializedTreeValue::Tree(_) => panic!("entries should not contain trees"), } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/bench/is_ancestor.rs
cli/src/commands/bench/is_ancestor.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use jj_lib::repo::Repo as _; use super::CriterionArgs; use super::run_bench; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::ui::Ui; /// Checks if the first commit is an ancestor of the second commit #[derive(clap::Args, Clone, Debug)] pub struct BenchIsAncestorArgs { ancestor: RevisionArg, descendant: RevisionArg, #[command(flatten)] criterion: CriterionArgs, } pub fn cmd_bench_is_ancestor( ui: &mut Ui, command: &CommandHelper, args: &BenchIsAncestorArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let ancestor_commit = workspace_command.resolve_single_rev(ui, &args.ancestor)?; let descendant_commit = workspace_command.resolve_single_rev(ui, &args.descendant)?; let index = workspace_command.repo().index(); let routine = || index.is_ancestor(ancestor_commit.id(), descendant_commit.id()); run_bench( ui, &format!("is-ancestor-{}-{}", args.ancestor, args.descendant), &args.criterion, routine, )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/bench/common_ancestors.rs
cli/src/commands/bench/common_ancestors.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::slice; use jj_lib::repo::Repo as _; use super::CriterionArgs; use super::run_bench; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::ui::Ui; /// Find the common ancestor(s) of a set of commits #[derive(clap::Args, Clone, Debug)] pub struct BenchCommonAncestorsArgs { revision1: RevisionArg, revision2: RevisionArg, #[command(flatten)] criterion: CriterionArgs, } pub fn cmd_bench_common_ancestors( ui: &mut Ui, command: &CommandHelper, args: &BenchCommonAncestorsArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let commit1 = workspace_command.resolve_single_rev(ui, &args.revision1)?; let commit2 = workspace_command.resolve_single_rev(ui, &args.revision2)?; let index = workspace_command.repo().index(); let routine = || index.common_ancestors(slice::from_ref(commit1.id()), slice::from_ref(commit2.id())); run_bench( ui, &format!("common-ancestors-{}-{}", args.revision1, args.revision2), &args.criterion, routine, )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/bench/resolve_prefix.rs
cli/src/commands/bench/resolve_prefix.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use jj_lib::object_id::HexPrefix; use jj_lib::repo::Repo as _; use super::CriterionArgs; use super::run_bench; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Resolve a commit ID prefix #[derive(clap::Args, Clone, Debug)] pub struct BenchResolvePrefixArgs { prefix: String, #[command(flatten)] criterion: CriterionArgs, } pub fn cmd_bench_resolve_prefix( ui: &mut Ui, command: &CommandHelper, args: &BenchResolvePrefixArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let prefix = HexPrefix::try_from_hex(&args.prefix).unwrap(); let index = workspace_command.repo().index(); let routine = || index.resolve_commit_id_prefix(&prefix); run_bench( ui, &format!("resolve-prefix-{}", prefix.hex()), &args.criterion, routine, )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/bench/revset.rs
cli/src/commands/bench/revset.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::sync::Arc; use std::time::Instant; use criterion::BatchSize; use criterion::BenchmarkGroup; use criterion::BenchmarkId; use criterion::measurement::Measurement; use jj_lib::revset::SymbolResolver; use jj_lib::revset::SymbolResolverExtension; use jj_lib::revset::UserRevsetExpression; use super::CriterionArgs; use super::new_criterion; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::cli_util::WorkspaceCommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Walk the revisions in the revset #[derive(clap::Args, Clone, Debug)] #[command(group(clap::ArgGroup::new("revset_source").required(true)))] pub struct BenchRevsetArgs { #[arg(group = "revset_source")] revisions: Vec<RevisionArg>, /// Read revsets from file #[arg(long, short = 'f', group = "revset_source", value_hint = clap::ValueHint::FilePath)] file: Option<String>, #[command(flatten)] criterion: CriterionArgs, } pub fn cmd_bench_revset( ui: &mut Ui, command: &CommandHelper, args: &BenchRevsetArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let revsets = if let Some(file_path) = &args.file { std::fs::read_to_string(command.cwd().join(file_path))? .lines() .map(|line| line.trim().to_owned()) .filter(|line| !line.is_empty() && !line.starts_with('#')) .map(RevisionArg::from) .collect() } else { args.revisions.clone() }; let mut criterion = new_criterion(ui, &args.criterion); let mut group = criterion.benchmark_group("revsets"); for revset in &revsets { bench_revset(ui, command, &workspace_command, &mut group, revset)?; } // Neither of these seem to report anything... group.finish(); criterion.final_summary(); Ok(()) } fn bench_revset<M: Measurement>( ui: &mut Ui, command: &CommandHelper, workspace_command: &WorkspaceCommandHelper, group: &mut BenchmarkGroup<M>, revset: &RevisionArg, ) -> Result<(), CommandError> { writeln!(ui.status(), "----------Testing revset: {revset}----------")?; let expression = workspace_command .parse_revset(ui, revset)? .expression() .clone(); // Time both evaluation and iteration. let routine = |workspace_command: &WorkspaceCommandHelper, expression: Arc<UserRevsetExpression>| { // Evaluate the expression without parsing/evaluating short-prefixes. let repo = workspace_command.repo().as_ref(); let symbol_resolver = SymbolResolver::new(repo, &([] as [Box<dyn SymbolResolverExtension>; 0])); let resolved = expression .resolve_user_expression(repo, &symbol_resolver) .unwrap(); let revset = resolved.evaluate(repo).unwrap(); revset.iter().count() }; let before = Instant::now(); let result = routine(workspace_command, expression.clone()); let after = Instant::now(); writeln!( ui.status(), "First run took {:?} and produced {result} commits", after.duration_since(before), )?; group.bench_with_input( BenchmarkId::from_parameter(revset), &expression, |bencher, expression| { bencher.iter_batched( // Reload repo and backend store to clear caches (such as commit objects // in `Store`), but preload index since it's more likely to be loaded // by preceding operation. `repo.reload_at()` isn't enough to clear // store cache. || { let workspace_command = command.workspace_helper_no_snapshot(ui).unwrap(); workspace_command.repo().readonly_index(); workspace_command }, |workspace_command| routine(&workspace_command, expression.clone()), // Index-preloaded repo may consume a fair amount of memory BatchSize::LargeInput, ); }, ); Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/bench/mod.rs
cli/src/commands/bench/mod.rs
// Copyright 2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod common_ancestors; mod is_ancestor; mod resolve_prefix; mod revset; use std::fmt::Debug; use std::io; use std::time::Instant; use clap::Subcommand; use criterion::Criterion; use self::common_ancestors::BenchCommonAncestorsArgs; use self::common_ancestors::cmd_bench_common_ancestors; use self::is_ancestor::BenchIsAncestorArgs; use self::is_ancestor::cmd_bench_is_ancestor; use self::resolve_prefix::BenchResolvePrefixArgs; use self::resolve_prefix::cmd_bench_resolve_prefix; use self::revset::BenchRevsetArgs; use self::revset::cmd_bench_revset; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Commands for benchmarking internal operations #[derive(Subcommand, Clone, Debug)] #[command(hide = true)] pub enum BenchCommand { CommonAncestors(BenchCommonAncestorsArgs), IsAncestor(BenchIsAncestorArgs), ResolvePrefix(BenchResolvePrefixArgs), Revset(BenchRevsetArgs), } pub(crate) fn cmd_bench( ui: &mut Ui, command: &CommandHelper, subcommand: &BenchCommand, ) -> Result<(), CommandError> { match subcommand { BenchCommand::CommonAncestors(args) => cmd_bench_common_ancestors(ui, command, args), BenchCommand::IsAncestor(args) => cmd_bench_is_ancestor(ui, command, args), BenchCommand::ResolvePrefix(args) => cmd_bench_resolve_prefix(ui, command, args), BenchCommand::Revset(args) => cmd_bench_revset(ui, command, args), } } #[derive(clap::Args, Clone, Debug)] struct CriterionArgs { /// Name of baseline to save results #[arg(long, short = 's', group = "baseline_mode", default_value = "base")] save_baseline: String, /// Name of baseline to compare with #[arg(long, short = 'b', group = "baseline_mode")] baseline: Option<String>, /// Sample size for the benchmarks, which must be at least 10 #[arg(long, default_value_t = 100, value_parser = clap::value_parser!(u32).range(10..))] sample_size: u32, // not usize because https://github.com/clap-rs/clap/issues/4253 } fn new_criterion(ui: &Ui, args: &CriterionArgs) -> Criterion { let criterion = Criterion::default().with_output_color(ui.color()); let criterion = if let Some(name) = &args.baseline { let strict = false; // Do not panic if previous baseline doesn't exist. criterion.retain_baseline(name.clone(), strict) } else { criterion.save_baseline(args.save_baseline.clone()) }; criterion.sample_size(args.sample_size as usize) } fn run_bench<R, O>(ui: &mut Ui, id: &str, args: &CriterionArgs, mut routine: R) -> io::Result<()> where R: (FnMut() -> O) + Copy, O: Debug, { let mut criterion = new_criterion(ui, args); let before = Instant::now(); let result = routine(); let after = Instant::now(); writeln!( ui.status(), "First run took {:?} and produced: {:?}", after.duration_since(before), result )?; criterion.bench_function(id, |bencher: &mut criterion::Bencher| { bencher.iter(routine); }); Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/rename.rs
cli/src/commands/workspace/rename.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use jj_lib::ref_name::WorkspaceNameBuf; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::ui::Ui; /// Renames the current workspace #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceRenameArgs { /// The name of the workspace to update to. new_workspace_name: WorkspaceNameBuf, } #[instrument(skip_all)] pub fn cmd_workspace_rename( ui: &mut Ui, command: &CommandHelper, args: &WorkspaceRenameArgs, ) -> Result<(), CommandError> { if args.new_workspace_name.as_str().is_empty() { return Err(user_error("New workspace name cannot be empty")); } let mut workspace_command = command.workspace_helper(ui)?; let old_name = workspace_command.working_copy().workspace_name().to_owned(); let new_name = &*args.new_workspace_name; if new_name == old_name { writeln!(ui.status(), "Nothing changed.")?; return Ok(()); } if workspace_command .repo() .view() .get_wc_commit_id(&old_name) .is_none() { return Err(user_error(format!( "The current workspace '{}' is not tracked in the repo.", old_name.as_symbol() ))); } let mut tx = workspace_command.start_transaction().into_inner(); let (mut locked_ws, _wc_commit) = workspace_command.start_working_copy_mutation()?; locked_ws.locked_wc().rename_workspace(new_name.to_owned()); tx.repo_mut() .rename_workspace(&old_name, new_name.to_owned())?; let repo = tx.commit(format!( "Renamed workspace '{old}' to '{new}'", old = old_name.as_symbol(), new = new_name.as_symbol() ))?; locked_ws.finish(repo.op_id().clone())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/list.rs
cli/src/commands/workspace/list.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use jj_lib::repo::Repo as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::commit_templater::WorkspaceRef; use crate::complete; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// List workspaces #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceListArgs { /// Render each workspace using the given template /// /// All 0-argument methods of the [`WorkspaceRef` type] are available as /// keywords in the template expression. See [`jj help -k templates`] for /// more information. /// /// [`WorkspaceRef` type]: /// https://docs.jj-vcs.dev/latest/templates/#workspaceref-type /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, } #[instrument(skip_all)] pub fn cmd_workspace_list( ui: &mut Ui, command: &CommandHelper, args: &WorkspaceListArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let template: TemplateRenderer<WorkspaceRef> = { let language = workspace_command.commit_template_language(); let text = match &args.template { Some(value) => value.to_owned(), None => workspace_command .settings() .get("templates.workspace_list")?, }; workspace_command .parse_template(ui, &language, &text)? .labeled(["workspace_list"]) }; let repo = workspace_command.repo(); let mut formatter = ui.stdout_formatter(); for (name, wc_commit_id) in repo.view().wc_commit_ids() { let commit = repo.store().get_commit(wc_commit_id)?; let ws_ref = WorkspaceRef::new(name.clone(), commit); template.format(&ws_ref, formatter.as_mut())?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/root.rs
cli/src/commands/workspace/root.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use jj_lib::file_util; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::ui::Ui; /// Show the current workspace root directory #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceRootArgs {} #[instrument(skip_all)] pub fn cmd_workspace_root( ui: &mut Ui, command: &CommandHelper, _args: &WorkspaceRootArgs, ) -> Result<(), CommandError> { let loader = command.workspace_loader()?; let path_bytes = file_util::path_to_bytes(loader.workspace_root()).map_err(user_error)?; ui.stdout().write_all(path_bytes)?; writeln!(ui.stdout())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/update_stale.rs
cli/src/commands/workspace/update_stale.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::print_snapshot_stats; use crate::command_error::CommandError; use crate::ui::Ui; /// Update a workspace that has become stale /// /// See the [stale working copy documentation] for more information. /// /// [stale working copy documentation]: /// https://docs.jj-vcs.dev/latest/working-copy/#stale-working-copy #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceUpdateStaleArgs {} #[instrument(skip_all)] pub fn cmd_workspace_update_stale( ui: &mut Ui, command: &CommandHelper, _args: &WorkspaceUpdateStaleArgs, ) -> Result<(), CommandError> { let (workspace_command, stats) = command.recover_stale_working_copy(ui)?; print_snapshot_stats(ui, &stats, workspace_command.env().path_converter())?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/forget.rs
cli/src/commands/workspace/forget.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::ref_name::WorkspaceNameBuf; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Stop tracking a workspace's working-copy commit in the repo /// /// The workspace will not be touched on disk. It can be deleted from disk /// before or after running this command. #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceForgetArgs { /// Names of the workspaces to forget. By default, forgets only the current /// workspace. #[arg(add = ArgValueCandidates::new(complete::workspaces))] workspaces: Vec<WorkspaceNameBuf>, } #[instrument(skip_all)] pub fn cmd_workspace_forget( ui: &mut Ui, command: &CommandHelper, args: &WorkspaceForgetArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let wss = if args.workspaces.is_empty() { vec![workspace_command.workspace_name().to_owned()] } else { args.workspaces.clone() }; let mut forget_ws = Vec::new(); for ws in &wss { if workspace_command .repo() .view() .get_wc_commit_id(ws) .is_none() { writeln!( ui.warning_default(), "No such workspace: {}", ws.as_symbol(), )?; } else { forget_ws.push(ws); } } if forget_ws.is_empty() { writeln!(ui.status(), "Nothing changed.")?; return Ok(()); } // bundle every workspace forget into a single transaction, so that e.g. // undo correctly restores all of them at once. let mut tx = workspace_command.start_transaction(); forget_ws .iter() .try_for_each(|ws| tx.repo_mut().remove_wc_commit(ws))?; let description = if let [ws] = forget_ws.as_slice() { format!("forget workspace {}", ws.as_symbol()) } else { format!( "forget workspaces {}", forget_ws.iter().map(|ws| ws.as_symbol()).join(", ") ) }; tx.finish(ui, description)?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/mod.rs
cli/src/commands/workspace/mod.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod add; mod forget; mod list; mod rename; mod root; mod update_stale; use clap::Subcommand; use tracing::instrument; use self::add::WorkspaceAddArgs; use self::add::cmd_workspace_add; use self::forget::WorkspaceForgetArgs; use self::forget::cmd_workspace_forget; use self::list::WorkspaceListArgs; use self::list::cmd_workspace_list; use self::rename::WorkspaceRenameArgs; use self::rename::cmd_workspace_rename; use self::root::WorkspaceRootArgs; use self::root::cmd_workspace_root; use self::update_stale::WorkspaceUpdateStaleArgs; use self::update_stale::cmd_workspace_update_stale; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::ui::Ui; /// Commands for working with workspaces /// /// Workspaces let you add additional working copies attached to the same repo. /// A common use case is so you can run a slow build or test in one workspace /// while you're continuing to write code in another workspace. /// /// Each workspace has its own working-copy commit. When you have more than one /// workspace attached to a repo, they are indicated by `<workspace name>@` in /// `jj log`. /// /// Each workspace also has own sparse patterns. #[derive(Subcommand, Clone, Debug)] pub(crate) enum WorkspaceCommand { Add(WorkspaceAddArgs), Forget(WorkspaceForgetArgs), List(WorkspaceListArgs), Rename(WorkspaceRenameArgs), Root(WorkspaceRootArgs), UpdateStale(WorkspaceUpdateStaleArgs), } #[instrument(skip_all)] pub(crate) fn cmd_workspace( ui: &mut Ui, command: &CommandHelper, subcommand: &WorkspaceCommand, ) -> Result<(), CommandError> { match subcommand { WorkspaceCommand::Add(args) => cmd_workspace_add(ui, command, args), WorkspaceCommand::Forget(args) => cmd_workspace_forget(ui, command, args), WorkspaceCommand::List(args) => cmd_workspace_list(ui, command, args), WorkspaceCommand::Rename(args) => cmd_workspace_rename(ui, command, args), WorkspaceCommand::Root(args) => cmd_workspace_root(ui, command, args), WorkspaceCommand::UpdateStale(args) => cmd_workspace_update_stale(ui, command, args), } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/workspace/add.rs
cli/src/commands/workspace/add.rs
// Copyright 2020 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::fs; use itertools::Itertools as _; use jj_lib::commit::CommitIteratorExt as _; use jj_lib::file_util; use jj_lib::file_util::IoResultExt as _; use jj_lib::ref_name::WorkspaceNameBuf; use jj_lib::repo::Repo as _; use jj_lib::rewrite::merge_commit_trees; use jj_lib::workspace::Workspace; use pollster::FutureExt as _; use tracing::instrument; use crate::cli_util::CommandHelper; use crate::cli_util::RevisionArg; use crate::command_error::CommandError; use crate::command_error::internal_error_with_message; use crate::command_error::user_error; use crate::ui::Ui; /// How to handle sparse patterns when creating a new workspace. #[derive(clap::ValueEnum, Clone, Debug, Eq, PartialEq)] enum SparseInheritance { /// Copy all sparse patterns from the current workspace. Copy, /// Include all files in the new workspace. Full, /// Clear all files from the workspace (it will be empty). Empty, } /// Add a workspace /// /// By default, the new workspace inherits the sparse patterns of the current /// workspace. You can override this with the `--sparse-patterns` option. #[derive(clap::Args, Clone, Debug)] pub struct WorkspaceAddArgs { /// Where to create the new workspace #[arg(value_hint = clap::ValueHint::DirPath)] destination: String, /// A name for the workspace /// /// To override the default, which is the basename of the destination /// directory. #[arg(long)] name: Option<WorkspaceNameBuf>, /// A list of parent revisions for the working-copy commit of the newly /// created workspace. You may specify nothing, or any number of parents. /// /// If no revisions are specified, the new workspace will be created, and /// its working-copy commit will exist on top of the parent(s) of the /// working-copy commit in the current workspace, i.e. they will share the /// same parent(s). /// /// If any revisions are specified, the new workspace will be created, and /// the new working-copy commit will be created with all these revisions as /// parents, i.e. the working-copy commit will exist as if you had run `jj /// new r1 r2 r3 ...`. #[arg(long, short, value_name = "REVSETS")] revision: Vec<RevisionArg>, /// How to handle sparse patterns when creating a new workspace. #[arg(long, value_enum, default_value_t = SparseInheritance::Copy)] sparse_patterns: SparseInheritance, } #[instrument(skip_all)] pub fn cmd_workspace_add( ui: &mut Ui, command: &CommandHelper, args: &WorkspaceAddArgs, ) -> Result<(), CommandError> { let old_workspace_command = command.workspace_helper(ui)?; let destination_path = command.cwd().join(&args.destination); let workspace_name = if let Some(name) = &args.name { name.to_owned() } else { let file_name = destination_path.file_name().unwrap(); file_name .to_str() .ok_or_else(|| user_error("Destination path is not valid UTF-8"))? .into() }; if workspace_name.as_str().is_empty() { return Err(user_error("New workspace name cannot be empty")); } let repo = old_workspace_command.repo(); if repo.view().get_wc_commit_id(&workspace_name).is_some() { return Err(user_error(format!( "Workspace named '{name}' already exists", name = workspace_name.as_symbol() ))); } if !destination_path.exists() { fs::create_dir(&destination_path).context(&destination_path)?; } else if !file_util::is_empty_dir(&destination_path)? { return Err(user_error( "Destination path exists and is not an empty directory", )); } let working_copy_factory = command.get_working_copy_factory()?; let repo_path = old_workspace_command.repo_path(); // If we add per-workspace configuration, we'll need to reload settings for // the new workspace. let (new_workspace, repo) = Workspace::init_workspace_with_existing_repo( &destination_path, repo_path, repo, working_copy_factory, workspace_name.clone(), )?; writeln!( ui.status(), "Created workspace in \"{}\"", file_util::relative_path(command.cwd(), &destination_path).display() )?; // Show a warning if the user passed a path without a separator, since they // may have intended the argument to only be the name for the workspace. if !args.destination.contains(std::path::is_separator) { writeln!( ui.warning_default(), r#"Workspace created inside current directory. If this was unintentional, delete the "{}" directory and run `jj workspace forget {name}` to remove it."#, args.destination, name = workspace_name.as_symbol() )?; } let mut new_workspace_command = command.for_workable_repo(ui, new_workspace, repo)?; let sparsity = match args.sparse_patterns { SparseInheritance::Full => None, SparseInheritance::Empty => Some(vec![]), SparseInheritance::Copy => { let sparse_patterns = old_workspace_command .working_copy() .sparse_patterns()? .to_vec(); Some(sparse_patterns) } }; if let Some(sparse_patterns) = sparsity { let (mut locked_ws, _wc_commit) = new_workspace_command.start_working_copy_mutation()?; locked_ws .locked_wc() .set_sparse_patterns(sparse_patterns) .block_on() .map_err(|err| internal_error_with_message("Failed to set sparse patterns", err))?; let operation_id = locked_ws.locked_wc().old_operation_id().clone(); locked_ws.finish(operation_id)?; } let mut tx = new_workspace_command.start_transaction(); // If no parent revisions are specified, create a working-copy commit based // on the parent of the current working-copy commit. let parents = if args.revision.is_empty() { // Check out parents of the current workspace's working-copy commit, or the // root if there is no working-copy commit in the current workspace. if let Some(old_wc_commit_id) = tx .base_repo() .view() .get_wc_commit_id(old_workspace_command.workspace_name()) { tx.repo() .store() .get_commit(old_wc_commit_id)? .parents() .try_collect()? } else { vec![tx.repo().store().root_commit()] } } else { old_workspace_command .resolve_some_revsets(ui, &args.revision)? .iter() .map(|id| tx.repo().store().get_commit(id)) .try_collect()? }; let tree = merge_commit_trees(tx.repo(), &parents).block_on()?; let parent_ids = parents.iter().ids().cloned().collect_vec(); let new_wc_commit = tx.repo_mut().new_commit(parent_ids, tree).write()?; tx.edit(&new_wc_commit)?; tx.finish( ui, format!( "create initial working-copy commit in workspace {name}", name = workspace_name.as_symbol() ), )?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/abandon.rs
cli/src/commands/operation/abandon.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::io::Write as _; use std::iter; use std::slice; use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::op_walk; use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::short_operation_hash; use crate::command_error::CommandError; use crate::command_error::cli_error; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; /// Abandon operation history /// /// To discard old operation history, use `jj op abandon ..<operation ID>`. It /// will abandon the specified operation and all its ancestors. The descendants /// will be reparented onto the root operation. /// /// To discard recent operations, use `jj op restore <operation ID>` followed /// by `jj op abandon <operation ID>..@-`. /// /// Previous versions of a change (or predecessors) are also discarded if they /// become unreachable from the operation history. The abandoned operations, /// commits, and other unreachable objects can later be garbage collected by /// using `jj util gc` command. #[derive(clap::Args, Clone, Debug)] pub struct OperationAbandonArgs { /// The operation or operation range to abandon #[arg(add = ArgValueCandidates::new(complete::operations))] operation: String, } pub fn cmd_op_abandon( ui: &mut Ui, command: &CommandHelper, args: &OperationAbandonArgs, ) -> Result<(), CommandError> { // Don't load the repo so that this command can be used to recover from // corrupted repo state. let mut workspace = command.load_workspace()?; let repo_loader = workspace.repo_loader(); let op_store = repo_loader.op_store(); let op_heads_store = repo_loader.op_heads_store(); // It doesn't make sense to create divergent operations that will be merged // with the current head. if command.global_args().at_operation.is_some() { return Err(cli_error("--at-op is not respected")); } let current_head_ops = op_walk::get_current_head_ops(op_store, op_heads_store.as_ref())?; let resolve_op = |op_str| op_walk::resolve_op_at(op_store, &current_head_ops, op_str); let (abandon_root_op, abandon_head_ops) = if let Some((root_op_str, head_op_str)) = args.operation.split_once("..") { let root_op = if root_op_str.is_empty() { repo_loader.root_operation() } else { resolve_op(root_op_str)? }; let head_ops = if head_op_str.is_empty() { current_head_ops.clone() } else { vec![resolve_op(head_op_str)?] }; (root_op, head_ops) } else { let op = resolve_op(&args.operation)?; let parent_ops: Vec<_> = op.parents().try_collect()?; let parent_op = match parent_ops.len() { 0 => return Err(user_error("Cannot abandon the root operation")), 1 => parent_ops.into_iter().next().unwrap(), _ => return Err(user_error("Cannot abandon a merge operation")), }; (parent_op, vec![op]) }; if let Some(op) = abandon_head_ops .iter() .find(|op| current_head_ops.contains(op)) { let mut err = user_error(format!( "Cannot abandon the current operation {}", short_operation_hash(op.id()) )); if current_head_ops.len() == 1 { err.add_hint("Run `jj undo` to revert the current operation, then use `jj op abandon`"); } return Err(err); } // Reparent descendants, count the number of abandoned operations. let stats = op_walk::reparent_range( op_store.as_ref(), &abandon_head_ops, &current_head_ops, &abandon_root_op, )?; assert_eq!( current_head_ops.len(), stats.new_head_ids.len(), "all current_head_ops should be reparented as they aren't included in abandon_head_ops" ); let reparented_head_ops = || iter::zip(&current_head_ops, &stats.new_head_ids); if reparented_head_ops().all(|(old, new_id)| old.id() == new_id) { writeln!(ui.status(), "Nothing changed.")?; return Ok(()); } writeln!( ui.status(), "Abandoned {} operations and reparented {} descendant operations.", stats.unreachable_count, stats.rewritten_count, )?; for (old, new_id) in reparented_head_ops().filter(|&(old, new_id)| old.id() != new_id) { op_heads_store .update_op_heads(slice::from_ref(old.id()), new_id) .block_on()?; } // Remap the operation id of the current workspace. If there were any // divergent operations, user will need to re-abandon their ancestors. if !command.global_args().ignore_working_copy { let mut locked_ws = workspace.start_working_copy_mutation()?; let old_op_id = locked_ws.locked_wc().old_operation_id(); if let Some((_, new_id)) = reparented_head_ops().find(|(old, _)| old.id() == old_op_id) { locked_ws.finish(new_id.clone())?; } else { writeln!( ui.warning_default(), "The working copy operation {} is not updated because it differs from the repo {}.", short_operation_hash(old_op_id), current_head_ops .iter() .map(|op| short_operation_hash(op.id())) .join(", "), )?; } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/revert.rs
cli/src/commands/operation/revert.rs
// Copyright 2025 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::object_id::ObjectId as _; use jj_lib::operation::Operation; use jj_lib::repo::Repo as _; use super::DEFAULT_REVERT_WHAT; use super::RevertWhatToRestore; use super::view_with_desired_portions_restored; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::command_error::user_error; use crate::complete; use crate::ui::Ui; /// Create a new operation that reverts an earlier operation /// /// This reverts an individual operation by applying the inverse of the /// operation. #[derive(clap::Args, Clone, Debug)] pub struct OperationRevertArgs { /// The operation to revert /// /// Use `jj op log` to find an operation to revert. #[arg(default_value = "@")] #[arg(add = ArgValueCandidates::new(complete::operations))] pub(crate) operation: String, // pub for `jj undo` /// What portions of the local state to restore (can be repeated) /// /// This option is EXPERIMENTAL. #[arg(long, value_enum, default_values_t = DEFAULT_REVERT_WHAT)] pub(crate) what: Vec<RevertWhatToRestore>, // pub for `jj undo` } fn tx_description(op: &Operation) -> String { format!("revert operation {}", op.id().hex()) } pub fn cmd_op_revert( ui: &mut Ui, command: &CommandHelper, args: &OperationRevertArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let bad_op = workspace_command.resolve_single_op(&args.operation)?; let parent_of_bad_op = match bad_op.parents().at_most_one() { Ok(Some(parent_of_bad_op)) => parent_of_bad_op?, Ok(None) => return Err(user_error("Cannot revert root operation")), Err(_) => return Err(user_error("Cannot revert a merge operation")), }; let mut tx = workspace_command.start_transaction(); let repo_loader = tx.base_repo().loader(); let bad_repo = repo_loader.load_at(&bad_op)?; let parent_repo = repo_loader.load_at(&parent_of_bad_op)?; tx.repo_mut().merge(&bad_repo, &parent_repo)?; let new_view = view_with_desired_portions_restored( tx.repo().view().store_view(), tx.base_repo().view().store_view(), &args.what, ); tx.repo_mut().set_view(new_view); if let Some(mut formatter) = ui.status_formatter() { write!(formatter, "Reverted operation: ")?; let template = tx.base_workspace_helper().operation_summary_template(); template.format(&bad_op, formatter.as_mut())?; writeln!(formatter)?; } tx.finish(ui, tx_description(&bad_op))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/diff.rs
cli/src/commands/operation/diff.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::collections::HashMap; use std::collections::HashSet; use std::slice; use std::sync::Arc; use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::backend::ChangeId; use jj_lib::backend::CommitId; use jj_lib::commit::Commit; use jj_lib::evolution::accumulate_predecessors; use jj_lib::graph::TopoGroupedGraphIterator; use jj_lib::matchers::EverythingMatcher; use jj_lib::op_store::RefTarget; use jj_lib::op_store::RemoteRef; use jj_lib::op_store::RemoteRefState; use jj_lib::refs::diff_named_commit_ids; use jj_lib::refs::diff_named_ref_targets; use jj_lib::refs::diff_named_remote_refs; use jj_lib::repo::ReadonlyRepo; use jj_lib::repo::Repo; use jj_lib::revset::RevsetExpression; use pollster::FutureExt as _; use crate::cli_util::CommandHelper; use crate::cli_util::LogContentFormat; use crate::cli_util::default_ignored_remote_name; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::diff_util::DiffRenderer; use crate::diff_util::diff_formats_for_log; use crate::formatter::Formatter; use crate::formatter::FormatterExt as _; use crate::graphlog::GraphStyle; use crate::graphlog::get_graphlog; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Compare changes to the repository between two operations #[derive(clap::Args, Clone, Debug)] pub struct OperationDiffArgs { /// Show repository changes in this operation, compared to its parent #[arg(long, visible_alias = "op")] #[arg(add = ArgValueCandidates::new(complete::operations))] operation: Option<String>, /// Show repository changes from this operation #[arg(long, short, conflicts_with = "operation")] #[arg(add = ArgValueCandidates::new(complete::operations))] from: Option<String>, /// Show repository changes to this operation #[arg(long, short, conflicts_with = "operation")] #[arg(add = ArgValueCandidates::new(complete::operations))] to: Option<String>, /// Don't show the graph, show a flat list of modified changes #[arg(long, short = 'G')] no_graph: bool, /// Show patch of modifications to changes /// /// If the previous version has different parents, it will be temporarily /// rebased to the parents of the new version, so the diff is not /// contaminated by unrelated changes. #[arg(long, short = 'p')] patch: bool, #[command(flatten)] diff_format: DiffFormatArgs, } pub fn cmd_op_diff( ui: &mut Ui, command: &CommandHelper, args: &OperationDiffArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let workspace_env = workspace_command.env(); let repo_loader = workspace_command.workspace().repo_loader(); let settings = workspace_command.settings(); let from_ops; let to_op; if args.from.is_some() || args.to.is_some() { from_ops = vec![workspace_command.resolve_single_op(args.from.as_deref().unwrap_or("@"))?]; to_op = workspace_command.resolve_single_op(args.to.as_deref().unwrap_or("@"))?; } else { to_op = workspace_command.resolve_single_op(args.operation.as_deref().unwrap_or("@"))?; from_ops = to_op.parents().try_collect()?; } let graph_style = GraphStyle::from_settings(settings)?; let with_content_format = LogContentFormat::new(ui, settings)?; let merged_from_op = repo_loader.merge_operations(from_ops.clone(), None)?; let from_repo = repo_loader.load_at(&merged_from_op)?; let to_repo = repo_loader.load_at(&to_op)?; // Create a new transaction starting from `to_repo`. let mut tx = to_repo.start_transaction(); // Merge index from `from_repo` to `to_repo`, so commits in `from_repo` are // accessible. tx.repo_mut().merge_index(&from_repo)?; let merged_repo = tx.repo(); let diff_renderer = { let formats = diff_formats_for_log(settings, &args.diff_format, args.patch)?; let path_converter = workspace_env.path_converter(); let conflict_marker_style = workspace_env.conflict_marker_style(); (!formats.is_empty()) .then(|| DiffRenderer::new(merged_repo, path_converter, conflict_marker_style, formats)) }; let id_prefix_context = workspace_env.new_id_prefix_context(); let commit_summary_template = { let language = workspace_env.commit_template_language(merged_repo, &id_prefix_context); let text = settings.get_string("templates.commit_summary")?; workspace_env .parse_template(ui, &language, &text)? .labeled(["op_diff", "commit"]) }; let op_summary_template = workspace_command .operation_summary_template() .labeled(["op_diff"]); ui.request_pager(); let mut formatter = ui.stdout_formatter(); for op in &from_ops { write!(formatter, "From operation: ")?; op_summary_template.format(op, &mut *formatter)?; writeln!(formatter)?; } // "From operation: " write!(formatter, " To operation: ")?; op_summary_template.format(&to_op, &mut *formatter)?; writeln!(formatter)?; show_op_diff( ui, formatter.as_mut(), merged_repo, &from_repo, &to_repo, &commit_summary_template, (!args.no_graph).then_some(graph_style), &with_content_format, diff_renderer.as_ref(), ) } /// Computes and shows the differences between two operations, using the given /// `ReadonlyRepo`s for the operations. /// `current_repo` should contain a `Repo` with the indices of both repos merged /// into it. #[expect(clippy::too_many_arguments)] pub fn show_op_diff( ui: &Ui, formatter: &mut dyn Formatter, current_repo: &dyn Repo, from_repo: &Arc<ReadonlyRepo>, to_repo: &Arc<ReadonlyRepo>, commit_summary_template: &TemplateRenderer<Commit>, graph_style: Option<GraphStyle>, with_content_format: &LogContentFormat, diff_renderer: Option<&DiffRenderer>, ) -> Result<(), CommandError> { let changes = compute_operation_commits_diff(current_repo, from_repo, to_repo)?; if !changes.is_empty() { let revset = RevsetExpression::commits(changes.keys().cloned().collect()).evaluate(current_repo)?; writeln!(formatter)?; with_content_format.write(formatter, |formatter| { writeln!(formatter, "Changed commits:") })?; if let Some(graph_style) = graph_style { let mut raw_output = formatter.raw()?; let mut graph = get_graphlog(graph_style, raw_output.as_mut()); let graph_iter = TopoGroupedGraphIterator::new(revset.iter_graph(), |id| id); for node in graph_iter { let (commit_id, mut edges) = node?; let modified_change = changes.get(&commit_id).unwrap(); // Omit "missing" edge to keep the graph concise. edges.retain(|edge| !edge.is_missing()); let mut buffer = vec![]; let within_graph = with_content_format.sub_width(graph.width(&commit_id, &edges)); within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| { write_modified_change_summary( formatter, commit_summary_template, modified_change, ) })?; if let Some(diff_renderer) = diff_renderer { let mut formatter = ui.new_formatter(&mut buffer); show_change_diff( ui, formatter.as_mut(), diff_renderer, modified_change, within_graph.width(), ) .block_on()?; } // TODO: customize node symbol? let node_symbol = "○"; graph.add_node( &commit_id, &edges, node_symbol, &String::from_utf8_lossy(&buffer), )?; } } else { for commit_id in revset.iter() { let commit_id = commit_id?; let modified_change = changes.get(&commit_id).unwrap(); with_content_format.write(formatter, |formatter| { write_modified_change_summary( formatter, commit_summary_template, modified_change, ) })?; if let Some(diff_renderer) = diff_renderer { let width = with_content_format.width(); show_change_diff(ui, formatter, diff_renderer, modified_change, width) .block_on()?; } } } } let changed_working_copies = diff_named_commit_ids( from_repo.view().wc_commit_ids(), to_repo.view().wc_commit_ids(), ) .collect_vec(); if !changed_working_copies.is_empty() { writeln!(formatter)?; for (name, (from_commit, to_commit)) in changed_working_copies { with_content_format.write(formatter, |formatter| { // Usually, there is at most one working copy changed per operation, so we put // the working copy name in the heading. write!(formatter, "Changed working copy ")?; write!(formatter.labeled("working_copies"), "{}@", name.as_symbol())?; writeln!(formatter, ":")?; write_ref_target_summary( formatter, current_repo, commit_summary_template, &RefTarget::resolved(to_commit.cloned()), true, None, )?; write_ref_target_summary( formatter, current_repo, commit_summary_template, &RefTarget::resolved(from_commit.cloned()), false, None, ) })?; } } let changed_local_bookmarks = diff_named_ref_targets( from_repo.view().local_bookmarks(), to_repo.view().local_bookmarks(), ) .collect_vec(); if !changed_local_bookmarks.is_empty() { writeln!(formatter)?; with_content_format.write(formatter, |formatter| { writeln!(formatter, "Changed local bookmarks:") })?; for (name, (from_target, to_target)) in changed_local_bookmarks { with_content_format.write(formatter, |formatter| { writeln!(formatter, "{name}:", name = name.as_symbol())?; write_ref_target_summary( formatter, current_repo, commit_summary_template, to_target, true, None, )?; write_ref_target_summary( formatter, current_repo, commit_summary_template, from_target, false, None, ) })?; } } let changed_local_tags = diff_named_ref_targets(from_repo.view().local_tags(), to_repo.view().local_tags()) .collect_vec(); if !changed_local_tags.is_empty() { writeln!(formatter)?; with_content_format.write(formatter, |formatter| writeln!(formatter, "Changed tags:"))?; for (name, (from_target, to_target)) in changed_local_tags { with_content_format.write(formatter, |formatter| { writeln!(formatter, "{name}:", name = name.as_symbol())?; write_ref_target_summary( formatter, current_repo, commit_summary_template, to_target, true, None, )?; write_ref_target_summary( formatter, current_repo, commit_summary_template, from_target, false, None, ) })?; } } let ignored_remote = default_ignored_remote_name(current_repo.store()); let changed_remote_bookmarks = diff_named_remote_refs( from_repo.view().all_remote_bookmarks(), to_repo.view().all_remote_bookmarks(), ) // Skip updates to the local git repo, since they should typically be covered in // local branches. .filter(|(symbol, _)| ignored_remote.is_none_or(|ignored| symbol.remote != ignored)) .collect_vec(); if !changed_remote_bookmarks.is_empty() { writeln!(formatter)?; with_content_format.write(formatter, |formatter| { writeln!(formatter, "Changed remote bookmarks:") })?; let get_remote_ref_prefix = |remote_ref: &RemoteRef| match remote_ref.state { RemoteRefState::New => "untracked", RemoteRefState::Tracked => "tracked", }; for (symbol, (from_ref, to_ref)) in changed_remote_bookmarks { with_content_format.write(formatter, |formatter| { writeln!(formatter, "{symbol}:")?; write_ref_target_summary( formatter, current_repo, commit_summary_template, &to_ref.target, true, Some(get_remote_ref_prefix(to_ref)), )?; write_ref_target_summary( formatter, current_repo, commit_summary_template, &from_ref.target, false, Some(get_remote_ref_prefix(from_ref)), ) })?; } } Ok(()) } /// Writes a summary for the given `ModifiedChange`. fn write_modified_change_summary( formatter: &mut dyn Formatter, commit_summary_template: &TemplateRenderer<Commit>, modified_change: &ModifiedChange, ) -> Result<(), std::io::Error> { for commit in modified_change.added_commits() { write!(formatter.labeled("diff").labeled("added"), "+")?; write!(formatter, " ")?; commit_summary_template.format(commit, formatter)?; writeln!(formatter)?; } for commit in modified_change.removed_commits() { write!(formatter.labeled("diff").labeled("removed"), "-")?; write!(formatter, " ")?; commit_summary_template.format(commit, formatter)?; writeln!(formatter)?; } Ok(()) } /// Writes a summary for the given `RefTarget`. fn write_ref_target_summary( formatter: &mut dyn Formatter, repo: &dyn Repo, commit_summary_template: &TemplateRenderer<Commit>, ref_target: &RefTarget, added: bool, prefix: Option<&str>, ) -> Result<(), CommandError> { let write_prefix = |formatter: &mut dyn Formatter, added: bool, prefix: Option<&str>| -> Result<(), CommandError> { if added { write!(formatter.labeled("diff").labeled("added"), "+")?; } else { write!(formatter.labeled("diff").labeled("removed"), "-")?; } write!(formatter, " ")?; if let Some(prefix) = prefix { write!(formatter, "{prefix} ")?; } Ok(()) }; if ref_target.is_absent() { write_prefix(formatter, added, prefix)?; writeln!(formatter, "(absent)")?; } else if ref_target.has_conflict() { for commit_id in ref_target.added_ids() { write_prefix(formatter, added, prefix)?; write!(formatter, "(added) ")?; let commit = repo.store().get_commit(commit_id)?; commit_summary_template.format(&commit, formatter)?; writeln!(formatter)?; } for commit_id in ref_target.removed_ids() { write_prefix(formatter, added, prefix)?; write!(formatter, "(removed) ")?; let commit = repo.store().get_commit(commit_id)?; commit_summary_template.format(&commit, formatter)?; writeln!(formatter)?; } } else { write_prefix(formatter, added, prefix)?; let commit_id = ref_target.as_normal().unwrap(); let commit = repo.store().get_commit(commit_id)?; commit_summary_template.format(&commit, formatter)?; writeln!(formatter)?; } Ok(()) } #[derive(Clone, Debug, PartialEq, Eq)] enum ModifiedChange { /// Created or rewritten commit. Existing { commit: Commit, predecessors: Vec<Commit>, }, /// Abandoned commit. Abandoned { commit: Commit }, } impl ModifiedChange { fn removed_commits(&self) -> &[Commit] { match self { Self::Existing { predecessors, .. } => predecessors, Self::Abandoned { commit } => slice::from_ref(commit), } } fn added_commits(&self) -> &[Commit] { match self { Self::Existing { commit, .. } => slice::from_ref(commit), Self::Abandoned { .. } => &[], } } } /// Computes created/rewritten/abandoned commits between two operations. /// /// Returns a map of [`ModifiedChange`]s containing the new and old commits. For /// created/rewritten commits, the map entries are indexed by new ids. For /// abandoned commits, the entries are indexed by old ids. fn compute_operation_commits_diff( repo: &dyn Repo, from_repo: &ReadonlyRepo, to_repo: &ReadonlyRepo, ) -> Result<HashMap<CommitId, ModifiedChange>, CommandError> { let store = repo.store(); let from_heads = from_repo.view().heads().iter().cloned().collect_vec(); let to_heads = to_repo.view().heads().iter().cloned().collect_vec(); let from_expr = RevsetExpression::commits(from_heads); let to_expr = RevsetExpression::commits(to_heads); let predecessor_commits = accumulate_predecessors( slice::from_ref(to_repo.operation()), slice::from_ref(from_repo.operation()), )?; // Collect hidden commits to find abandoned/rewritten changes. let mut hidden_commits_by_change: HashMap<ChangeId, CommitId> = HashMap::new(); let mut abandoned_commits: HashSet<CommitId> = HashSet::new(); let newly_hidden = to_expr.range(&from_expr).evaluate(repo)?; for item in newly_hidden.commit_change_ids() { let (commit_id, change_id) = item?; // Just pick one if diverged. Divergent commits shouldn't be considered // "squashed" into the new commit. hidden_commits_by_change .entry(change_id) .or_insert_with(|| commit_id.clone()); abandoned_commits.insert(commit_id); } // For each new commit, copy/deduce predecessors based on change id. let mut changes: HashMap<CommitId, ModifiedChange> = HashMap::new(); let newly_visible = from_expr.range(&to_expr).evaluate(repo)?; for item in newly_visible.commit_change_ids() { let (commit_id, change_id) = item?; let predecessor_ids = if let Some(ids) = predecessor_commits.get(&commit_id) { ids // including visible predecessors } else if let Some(id) = hidden_commits_by_change.get(&change_id) { slice::from_ref(id) } else { &[] }; for id in predecessor_ids { abandoned_commits.remove(id); } let change = ModifiedChange::Existing { commit: store.get_commit(&commit_id)?, predecessors: predecessor_ids .iter() .map(|id| store.get_commit(id)) .try_collect()?, }; changes.insert(commit_id, change); } // Record remainders as abandoned. for commit_id in abandoned_commits { let change = ModifiedChange::Abandoned { commit: store.get_commit(&commit_id)?, }; changes.insert(commit_id, change); } Ok(changes) } /// Displays the diffs of a modified change. /// /// For created/rewritten commits, the diff is shown between the old (or /// predecessor) commits and the new commit. The old commits are temporarily /// rebased onto the new commit's parents. For abandoned commits, the diff is /// shown of that commit's contents. async fn show_change_diff( ui: &Ui, formatter: &mut dyn Formatter, diff_renderer: &DiffRenderer<'_>, change: &ModifiedChange, width: usize, ) -> Result<(), CommandError> { match change { ModifiedChange::Existing { commit, predecessors, } => { diff_renderer .show_inter_diff( ui, formatter, // TODO: It's technically wrong to show diffs from the first // predecessor, but diff of partial "squash" operation would be // unreadable otherwise. We have the same problem in "evolog", // but it's less of an issue there because "evolog" shows the // predecessors recursively. predecessors.get(..1).unwrap_or(&[]), commit, &EverythingMatcher, width, ) .await?; } ModifiedChange::Abandoned { commit } => { // TODO: Should we show a reverse diff? diff_renderer .show_patch(ui, formatter, commit, &EverythingMatcher, width) .await?; } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/log.rs
cli/src/commands/operation/log.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use std::slice; use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::graph::GraphEdge; use jj_lib::graph::reverse_graph; use jj_lib::op_store::OpStoreError; use jj_lib::op_walk; use jj_lib::operation::Operation; use jj_lib::repo::RepoLoader; use super::diff::show_op_diff; use crate::cli_util::CommandHelper; use crate::cli_util::LogContentFormat; use crate::cli_util::WorkspaceCommandEnvironment; use crate::cli_util::format_template; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::diff_util::DiffRenderer; use crate::diff_util::diff_formats_for_log; use crate::formatter::Formatter; use crate::graphlog::GraphStyle; use crate::graphlog::get_graphlog; use crate::operation_templater::OperationTemplateLanguage; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Show the operation log /// /// Like other commands, `jj op log` snapshots the current working-copy changes /// and reconciles divergent operations. Use `--at-op=@ --ignore-working-copy` /// to inspect the current state without mutation. #[derive(clap::Args, Clone, Debug)] pub struct OperationLogArgs { /// Limit number of operations to show /// /// Applied after operations are reordered topologically, but before being /// reversed. #[arg(long, short = 'n')] limit: Option<usize>, /// Show operations in the opposite order (older operations first) #[arg(long)] reversed: bool, /// Don't show the graph, show a flat list of operations #[arg(long, short = 'G')] no_graph: bool, /// Render each operation using the given template /// /// You can specify arbitrary template expressions using the /// [built-in keywords]. See [`jj help -k templates`] for more /// information. /// /// [built-in keywords]: /// https://docs.jj-vcs.dev/latest/templates/#operation-keywords /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Show changes to the repository at each operation #[arg(long, short = 'd')] op_diff: bool, /// Show patch of modifications to changes (implies --op-diff) /// /// If the previous version has different parents, it will be temporarily /// rebased to the parents of the new version, so the diff is not /// contaminated by unrelated changes. #[arg(long, short = 'p')] patch: bool, #[command(flatten)] diff_format: DiffFormatArgs, } pub fn cmd_op_log( ui: &mut Ui, command: &CommandHelper, args: &OperationLogArgs, ) -> Result<(), CommandError> { if command.is_working_copy_writable() { let workspace_command = command.workspace_helper(ui)?; let current_op = workspace_command.repo().operation(); let repo_loader = workspace_command.workspace().repo_loader(); do_op_log(ui, workspace_command.env(), repo_loader, current_op, args) } else { // Don't load the repo so that the operation history can be inspected // even with a corrupted repo state. For example, you can find the first // bad operation id to be abandoned. let workspace = command.load_workspace()?; let workspace_env = command.workspace_environment(ui, &workspace)?; let repo_loader = workspace.repo_loader(); let current_op = command.resolve_operation(ui, workspace.repo_loader())?; do_op_log(ui, &workspace_env, repo_loader, &current_op, args) } } fn do_op_log( ui: &mut Ui, workspace_env: &WorkspaceCommandEnvironment, repo_loader: &RepoLoader, current_op: &Operation, args: &OperationLogArgs, ) -> Result<(), CommandError> { let settings = repo_loader.settings(); let graph_style = GraphStyle::from_settings(settings)?; let with_content_format = LogContentFormat::new(ui, settings)?; let template: TemplateRenderer<Operation>; let op_node_template: TemplateRenderer<Operation>; { let language = OperationTemplateLanguage::new( repo_loader, Some(current_op.id()), workspace_env.operation_template_extensions(), ); let text = match &args.template { Some(value) => value.to_owned(), None => settings.get_string("templates.op_log")?, }; template = workspace_env .parse_template(ui, &language, &text)? .labeled(["op_log", "operation"]); op_node_template = workspace_env .parse_template( ui, &language, &settings.get_string("templates.op_log_node")?, )? .labeled(["op_log", "operation", "node"]); } let diff_formats = diff_formats_for_log(settings, &args.diff_format, args.patch)?; let maybe_show_op_diff = if args.op_diff || !diff_formats.is_empty() { let template_text = settings.get_string("templates.commit_summary")?; let show = move |ui: &Ui, formatter: &mut dyn Formatter, op: &Operation, with_content_format: &LogContentFormat| { let parent_ops: Vec<_> = op.parents().try_collect()?; let merged_parent_op = repo_loader.merge_operations(parent_ops.clone(), None)?; let parent_repo = repo_loader.load_at(&merged_parent_op)?; let repo = repo_loader.load_at(op)?; let id_prefix_context = workspace_env.new_id_prefix_context(); let commit_summary_template = { let language = workspace_env.commit_template_language(repo.as_ref(), &id_prefix_context); workspace_env .parse_template(ui, &language, &template_text)? .labeled(["op_log", "commit"]) }; let path_converter = workspace_env.path_converter(); let conflict_marker_style = workspace_env.conflict_marker_style(); let diff_renderer = (!diff_formats.is_empty()).then(|| { DiffRenderer::new( repo.as_ref(), path_converter, conflict_marker_style, diff_formats.clone(), ) }); // TODO: Merged repo may have newly rebased commits, which wouldn't // exist in the index. (#4465) if parent_ops.len() > 1 { return Ok(()); } show_op_diff( ui, formatter, repo.as_ref(), &parent_repo, &repo, &commit_summary_template, (!args.no_graph).then_some(graph_style), with_content_format, diff_renderer.as_ref(), ) }; Some(show) } else { None }; ui.request_pager(); let mut formatter = ui.stdout_formatter(); let formatter = formatter.as_mut(); let iter = op_walk::walk_ancestors(slice::from_ref(current_op)).take(args.limit.unwrap_or(usize::MAX)); if !args.no_graph { let mut raw_output = formatter.raw()?; let mut graph = get_graphlog(graph_style, raw_output.as_mut()); let iter = iter.map(|op| -> Result<_, OpStoreError> { let op = op?; let ids = op.parent_ids(); let edges = ids.iter().cloned().map(GraphEdge::direct).collect(); Ok((op, edges)) }); let iter_nodes: Box<dyn Iterator<Item = _>> = if args.reversed { Box::new(reverse_graph(iter, Operation::id)?.into_iter().map(Ok)) } else { Box::new(iter) }; for node in iter_nodes { let (op, edges) = node?; let mut buffer = vec![]; let within_graph = with_content_format.sub_width(graph.width(op.id(), &edges)); within_graph.write(ui.new_formatter(&mut buffer).as_mut(), |formatter| { template.format(&op, formatter) })?; if let Some(show) = &maybe_show_op_diff { let mut formatter = ui.new_formatter(&mut buffer); show(ui, formatter.as_mut(), &op, &within_graph)?; } let node_symbol = format_template(ui, &op, &op_node_template); graph.add_node( op.id(), &edges, &node_symbol, &String::from_utf8_lossy(&buffer), )?; } } else { let iter: Box<dyn Iterator<Item = _>> = if args.reversed { Box::new(iter.collect_vec().into_iter().rev()) } else { Box::new(iter) }; for op in iter { let op = op?; with_content_format.write(formatter, |formatter| template.format(&op, formatter))?; if let Some(show) = &maybe_show_op_diff { show(ui, formatter, &op, &with_content_format)?; } } } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/mod.rs
cli/src/commands/operation/mod.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. mod abandon; mod diff; mod log; mod restore; pub mod revert; mod show; use abandon::OperationAbandonArgs; use abandon::cmd_op_abandon; use clap::Subcommand; use diff::OperationDiffArgs; use diff::cmd_op_diff; use log::OperationLogArgs; use log::cmd_op_log; use restore::OperationRestoreArgs; use restore::cmd_op_restore; use revert::OperationRevertArgs; use revert::cmd_op_revert; use show::OperationShowArgs; use show::cmd_op_show; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::commands::renamed_cmd; use crate::ui::Ui; /// Commands for working with the operation log /// /// See the [operation log documentation] for more information. /// /// [operation log documentation]: /// https://docs.jj-vcs.dev/latest/operation-log/ #[derive(Subcommand, Clone, Debug)] pub enum OperationCommand { Abandon(OperationAbandonArgs), Diff(OperationDiffArgs), Log(OperationLogArgs), Restore(OperationRestoreArgs), Revert(OperationRevertArgs), Show(OperationShowArgs), // TODO: Delete in jj 0.39.0+ #[command(hide = true)] Undo(OperationRevertArgs), } pub fn cmd_operation( ui: &mut Ui, command: &CommandHelper, subcommand: &OperationCommand, ) -> Result<(), CommandError> { match subcommand { OperationCommand::Abandon(args) => cmd_op_abandon(ui, command, args), OperationCommand::Diff(args) => cmd_op_diff(ui, command, args), OperationCommand::Log(args) => cmd_op_log(ui, command, args), OperationCommand::Restore(args) => cmd_op_restore(ui, command, args), OperationCommand::Revert(args) => cmd_op_revert(ui, command, args), OperationCommand::Show(args) => cmd_op_show(ui, command, args), OperationCommand::Undo(args) => { let cmd = renamed_cmd("op undo", "op revert", cmd_op_revert); cmd(ui, command, args) } } } // pub for `jj undo` #[derive(Copy, Clone, Debug, PartialEq, Eq, PartialOrd, Ord, clap::ValueEnum)] pub(crate) enum RevertWhatToRestore { /// The jj repo state and local bookmarks Repo, /// The remote-tracking bookmarks. Do not restore these if you'd like to /// push after the undo RemoteTracking, } // pub for `jj undo` pub(crate) const DEFAULT_REVERT_WHAT: [RevertWhatToRestore; 2] = [ RevertWhatToRestore::Repo, RevertWhatToRestore::RemoteTracking, ]; /// Restore only the portions of the view specified by the `what` argument pub(crate) fn view_with_desired_portions_restored( view_being_restored: &jj_lib::op_store::View, current_view: &jj_lib::op_store::View, what: &[RevertWhatToRestore], ) -> jj_lib::op_store::View { let repo_source = if what.contains(&RevertWhatToRestore::Repo) { view_being_restored } else { current_view }; let remote_source = if what.contains(&RevertWhatToRestore::RemoteTracking) { view_being_restored } else { current_view }; jj_lib::op_store::View { head_ids: repo_source.head_ids.clone(), local_bookmarks: repo_source.local_bookmarks.clone(), local_tags: repo_source.local_tags.clone(), remote_views: remote_source.remote_views.clone(), git_refs: current_view.git_refs.clone(), git_head: current_view.git_head.clone(), wc_commit_ids: repo_source.wc_commit_ids.clone(), } }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/show.rs
cli/src/commands/operation/show.rs
// Copyright 2024 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use itertools::Itertools as _; use jj_lib::operation::Operation; use super::diff::show_op_diff; use crate::cli_util::CommandHelper; use crate::cli_util::LogContentFormat; use crate::command_error::CommandError; use crate::complete; use crate::diff_util::DiffFormatArgs; use crate::diff_util::DiffRenderer; use crate::diff_util::diff_formats_for_log; use crate::graphlog::GraphStyle; use crate::templater::TemplateRenderer; use crate::ui::Ui; /// Show changes to the repository in an operation #[derive(clap::Args, Clone, Debug)] pub struct OperationShowArgs { /// Show repository changes in this operation, compared to its parent(s) #[arg(default_value = "@")] #[arg(add = ArgValueCandidates::new(complete::operations))] operation: String, /// Don't show the graph, show a flat list of modified changes #[arg(long, short = 'G')] no_graph: bool, /// Render the operation using the given template /// /// You can specify arbitrary template expressions using the /// [built-in keywords]. See [`jj help -k templates`] for more /// information. /// /// [built-in keywords]: /// https://docs.jj-vcs.dev/latest/templates/#operation-keywords /// /// [`jj help -k templates`]: /// https://docs.jj-vcs.dev/latest/templates/ #[arg(long, short = 'T')] #[arg(add = ArgValueCandidates::new(complete::template_aliases))] template: Option<String>, /// Show patch of modifications to changes /// /// If the previous version has different parents, it will be temporarily /// rebased to the parents of the new version, so the diff is not /// contaminated by unrelated changes. #[arg(long, short = 'p')] patch: bool, /// Do not show operation diff #[arg(long, conflicts_with_all = ["patch", "DiffFormatArgs"])] no_op_diff: bool, #[command(flatten)] diff_format: DiffFormatArgs, } pub fn cmd_op_show( ui: &mut Ui, command: &CommandHelper, args: &OperationShowArgs, ) -> Result<(), CommandError> { let workspace_command = command.workspace_helper(ui)?; let workspace_env = workspace_command.env(); let repo_loader = workspace_command.workspace().repo_loader(); let settings = workspace_command.settings(); let op = workspace_command.resolve_single_op(&args.operation)?; let parent_ops: Vec<_> = op.parents().try_collect()?; let merged_parent_op = repo_loader.merge_operations(parent_ops.clone(), None)?; let parent_repo = repo_loader.load_at(&merged_parent_op)?; let repo = repo_loader.load_at(&op)?; let id_prefix_context = workspace_env.new_id_prefix_context(); let commit_summary_template = { let language = workspace_env.commit_template_language(repo.as_ref(), &id_prefix_context); let text = settings.get_string("templates.commit_summary")?; workspace_env .parse_template(ui, &language, &text)? .labeled(["op_show", "commit"]) }; let graph_style = GraphStyle::from_settings(settings)?; let with_content_format = LogContentFormat::new(ui, settings)?; let diff_renderer = { let formats = diff_formats_for_log(settings, &args.diff_format, args.patch)?; let path_converter = workspace_env.path_converter(); let conflict_marker_style = workspace_env.conflict_marker_style(); (!formats.is_empty()).then(|| { DiffRenderer::new( repo.as_ref(), path_converter, conflict_marker_style, formats, ) }) }; let template: TemplateRenderer<Operation> = { let text = match &args.template { Some(value) => value.to_owned(), None => settings.get_string("templates.op_show")?, }; workspace_command .parse_operation_template(ui, &text)? .labeled(["op_show", "operation"]) }; ui.request_pager(); let mut formatter = ui.stdout_formatter(); template.format(&op, formatter.as_mut())?; if !args.no_op_diff { // TODO: Merged repo may have newly rebased commits, which wouldn't exist in // the index. (#4465) if parent_ops.len() > 1 { return Ok(()); } show_op_diff( ui, formatter.as_mut(), repo.as_ref(), &parent_repo, &repo, &commit_summary_template, (!args.no_graph).then_some(graph_style), &with_content_format, diff_renderer.as_ref(), )?; } Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false
jj-vcs/jj
https://github.com/jj-vcs/jj/blob/10efcf35613c9c2076278f1721b5e6826e77c144/cli/src/commands/operation/restore.rs
cli/src/commands/operation/restore.rs
// Copyright 2020-2023 The Jujutsu Authors // // Licensed under the Apache License, Version 2.0 (the "License"); // you may not use this file except in compliance with the License. // You may obtain a copy of the License at // // https://www.apache.org/licenses/LICENSE-2.0 // // Unless required by applicable law or agreed to in writing, software // distributed under the License is distributed on an "AS IS" BASIS, // WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. // See the License for the specific language governing permissions and // limitations under the License. use clap_complete::ArgValueCandidates; use jj_lib::object_id::ObjectId as _; use super::DEFAULT_REVERT_WHAT; use super::RevertWhatToRestore; use super::view_with_desired_portions_restored; use crate::cli_util::CommandHelper; use crate::command_error::CommandError; use crate::complete; use crate::ui::Ui; /// Create a new operation that restores the repo to an earlier state /// /// This restores the repo to the state at the specified operation, effectively /// undoing all later operations. It does so by creating a new operation. #[derive(clap::Args, Clone, Debug)] pub struct OperationRestoreArgs { /// The operation to restore to /// /// Use `jj op log` to find an operation to restore to. Use e.g. `jj /// --at-op=<operation ID> log` before restoring to an operation to see the /// state of the repo at that operation. #[arg(add = ArgValueCandidates::new(complete::operations))] operation: String, /// What portions of the local state to restore (can be repeated) /// /// This option is EXPERIMENTAL. #[arg(long, value_enum, default_values_t = DEFAULT_REVERT_WHAT)] what: Vec<RevertWhatToRestore>, } pub fn cmd_op_restore( ui: &mut Ui, command: &CommandHelper, args: &OperationRestoreArgs, ) -> Result<(), CommandError> { let mut workspace_command = command.workspace_helper(ui)?; let target_op = workspace_command.resolve_single_op(&args.operation)?; let mut tx = workspace_command.start_transaction(); let new_view = view_with_desired_portions_restored( target_op.view()?.store_view(), tx.base_repo().view().store_view(), &args.what, ); tx.repo_mut().set_view(new_view); if let Some(mut formatter) = ui.status_formatter() { write!(formatter, "Restored to operation: ")?; let template = tx.base_workspace_helper().operation_summary_template(); template.format(&target_op, formatter.as_mut())?; writeln!(formatter)?; } tx.finish(ui, format!("restore to operation {}", target_op.id().hex()))?; Ok(()) }
rust
Apache-2.0
10efcf35613c9c2076278f1721b5e6826e77c144
2026-01-04T15:37:48.912814Z
false