repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/reusable_box.rs | tokio-util/src/sync/reusable_box.rs | use std::alloc::Layout;
use std::fmt;
use std::future::{self, Future};
use std::mem::{self, ManuallyDrop};
use std::pin::Pin;
use std::ptr;
use std::task::{Context, Poll};
/// A reusable `Pin<Box<dyn Future<Output = T> + Send + 'a>>`.
///
/// This type lets you replace the future stored in the box without
/// reallocating when the size and alignment permits this.
pub struct ReusableBoxFuture<'a, T> {
boxed: Pin<Box<dyn Future<Output = T> + Send + 'a>>,
}
impl<'a, T> ReusableBoxFuture<'a, T> {
/// Create a new `ReusableBoxFuture<T>` containing the provided future.
pub fn new<F>(future: F) -> Self
where
F: Future<Output = T> + Send + 'a,
{
Self {
boxed: Box::pin(future),
}
}
/// Replace the future currently stored in this box.
///
/// This reallocates if and only if the layout of the provided future is
/// different from the layout of the currently stored future.
pub fn set<F>(&mut self, future: F)
where
F: Future<Output = T> + Send + 'a,
{
if let Err(future) = self.try_set(future) {
*self = Self::new(future);
}
}
/// Replace the future currently stored in this box.
///
/// This function never reallocates, but returns an error if the provided
/// future has a different size or alignment from the currently stored
/// future.
pub fn try_set<F>(&mut self, future: F) -> Result<(), F>
where
F: Future<Output = T> + Send + 'a,
{
// If we try to inline the contents of this function, the type checker complains because
// the bound `T: 'a` is not satisfied in the call to `pending()`. But by putting it in an
// inner function that doesn't have `T` as a generic parameter, we implicitly get the bound
// `F::Output: 'a` transitively through `F: 'a`, allowing us to call `pending()`.
#[inline(always)]
fn real_try_set<'a, F>(
this: &mut ReusableBoxFuture<'a, F::Output>,
future: F,
) -> Result<(), F>
where
F: Future + Send + 'a,
{
// future::Pending<T> is a ZST so this never allocates.
let boxed = mem::replace(&mut this.boxed, Box::pin(future::pending()));
reuse_pin_box(boxed, future, |boxed| this.boxed = Pin::from(boxed))
}
real_try_set(self, future)
}
/// Get a pinned reference to the underlying future.
pub fn get_pin(&mut self) -> Pin<&mut (dyn Future<Output = T> + Send)> {
self.boxed.as_mut()
}
/// Poll the future stored inside this box.
pub fn poll(&mut self, cx: &mut Context<'_>) -> Poll<T> {
self.get_pin().poll(cx)
}
}
impl<T> Future for ReusableBoxFuture<'_, T> {
type Output = T;
/// Poll the future stored inside this box.
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<T> {
Pin::into_inner(self).get_pin().poll(cx)
}
}
// The only method called on self.boxed is poll, which takes &mut self, so this
// struct being Sync does not permit any invalid access to the Future, even if
// the future is not Sync.
unsafe impl<T> Sync for ReusableBoxFuture<'_, T> {}
impl<T> fmt::Debug for ReusableBoxFuture<'_, T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ReusableBoxFuture").finish()
}
}
fn reuse_pin_box<T: ?Sized, U, O, F>(boxed: Pin<Box<T>>, new_value: U, callback: F) -> Result<O, U>
where
F: FnOnce(Box<U>) -> O,
{
let layout = Layout::for_value::<T>(&*boxed);
if layout != Layout::new::<U>() {
return Err(new_value);
}
// SAFETY: We don't ever construct a non-pinned reference to the old `T` from now on, and we
// always drop the `T`.
let raw: *mut T = Box::into_raw(unsafe { Pin::into_inner_unchecked(boxed) });
// When dropping the old value panics, we still want to call `callback` — so move the rest of
// the code into a guard type.
let guard = CallOnDrop::new(|| {
let raw: *mut U = raw.cast::<U>();
unsafe { raw.write(new_value) };
// SAFETY:
// - `T` and `U` have the same layout.
// - `raw` comes from a `Box` that uses the same allocator as this one.
// - `raw` points to a valid instance of `U` (we just wrote it in).
let boxed = unsafe { Box::from_raw(raw) };
callback(boxed)
});
// Drop the old value.
unsafe { ptr::drop_in_place(raw) };
// Run the rest of the code.
Ok(guard.call())
}
struct CallOnDrop<O, F: FnOnce() -> O> {
f: ManuallyDrop<F>,
}
impl<O, F: FnOnce() -> O> CallOnDrop<O, F> {
fn new(f: F) -> Self {
let f = ManuallyDrop::new(f);
Self { f }
}
fn call(self) -> O {
let mut this = ManuallyDrop::new(self);
let f = unsafe { ManuallyDrop::take(&mut this.f) };
f()
}
}
impl<O, F: FnOnce() -> O> Drop for CallOnDrop<O, F> {
fn drop(&mut self) {
let f = unsafe { ManuallyDrop::take(&mut self.f) };
f();
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/tests/loom_cancellation_token.rs | tokio-util/src/sync/tests/loom_cancellation_token.rs | use crate::sync::CancellationToken;
use loom::{future::block_on, thread};
use tokio_test::assert_ok;
#[test]
fn cancel_token() {
loom::model(|| {
let token = CancellationToken::new();
let token1 = token.clone();
let th1 = thread::spawn(move || {
block_on(async {
token1.cancelled().await;
});
});
let th2 = thread::spawn(move || {
token.cancel();
});
assert_ok!(th1.join());
assert_ok!(th2.join());
});
}
#[test]
fn cancel_token_owned() {
loom::model(|| {
let token = CancellationToken::new();
let token1 = token.clone();
let th1 = thread::spawn(move || {
block_on(async {
token1.cancelled_owned().await;
});
});
let th2 = thread::spawn(move || {
token.cancel();
});
assert_ok!(th1.join());
assert_ok!(th2.join());
});
}
#[test]
fn cancel_with_child() {
loom::model(|| {
let token = CancellationToken::new();
let token1 = token.clone();
let token2 = token.clone();
let child_token = token.child_token();
let th1 = thread::spawn(move || {
block_on(async {
token1.cancelled().await;
});
});
let th2 = thread::spawn(move || {
token2.cancel();
});
let th3 = thread::spawn(move || {
block_on(async {
child_token.cancelled().await;
});
});
assert_ok!(th1.join());
assert_ok!(th2.join());
assert_ok!(th3.join());
});
}
#[test]
fn drop_token_no_child() {
loom::model(|| {
let token = CancellationToken::new();
let token1 = token.clone();
let token2 = token.clone();
let th1 = thread::spawn(move || {
drop(token1);
});
let th2 = thread::spawn(move || {
drop(token2);
});
let th3 = thread::spawn(move || {
drop(token);
});
assert_ok!(th1.join());
assert_ok!(th2.join());
assert_ok!(th3.join());
});
}
// Temporarily disabled due to a false positive in loom -
// see https://github.com/tokio-rs/tokio/pull/7644#issuecomment-3328381344
#[ignore]
#[test]
fn drop_token_with_children() {
loom::model(|| {
let token1 = CancellationToken::new();
let child_token1 = token1.child_token();
let child_token2 = token1.child_token();
let th1 = thread::spawn(move || {
drop(token1);
});
let th2 = thread::spawn(move || {
drop(child_token1);
});
let th3 = thread::spawn(move || {
drop(child_token2);
});
assert_ok!(th1.join());
assert_ok!(th2.join());
assert_ok!(th3.join());
});
}
// Temporarily disabled due to a false positive in loom -
// see https://github.com/tokio-rs/tokio/pull/7644#issuecomment-3328381344
#[ignore]
#[test]
fn drop_and_cancel_token() {
loom::model(|| {
let token1 = CancellationToken::new();
let token2 = token1.clone();
let child_token = token1.child_token();
let th1 = thread::spawn(move || {
drop(token1);
});
let th2 = thread::spawn(move || {
token2.cancel();
});
let th3 = thread::spawn(move || {
drop(child_token);
});
assert_ok!(th1.join());
assert_ok!(th2.join());
assert_ok!(th3.join());
});
}
// Temporarily disabled due to a false positive in loom -
// see https://github.com/tokio-rs/tokio/pull/7644#issuecomment-3328381344
#[ignore]
#[test]
fn cancel_parent_and_child() {
loom::model(|| {
let token1 = CancellationToken::new();
let token2 = token1.clone();
let child_token = token1.child_token();
let th1 = thread::spawn(move || {
drop(token1);
});
let th2 = thread::spawn(move || {
token2.cancel();
});
let th3 = thread::spawn(move || {
child_token.cancel();
});
assert_ok!(th1.join());
assert_ok!(th2.join());
assert_ok!(th3.join());
});
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/tests/mod.rs | tokio-util/src/sync/tests/mod.rs | #[cfg(loom)]
mod loom_cancellation_token;
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/cancellation_token/guard_ref.rs | tokio-util/src/sync/cancellation_token/guard_ref.rs | use crate::sync::CancellationToken;
/// A wrapper for cancellation token which automatically cancels
/// it on drop. It is created using [`drop_guard_ref`] method on the [`CancellationToken`].
///
/// This is a borrowed version of [`DropGuard`].
///
/// [`drop_guard_ref`]: CancellationToken::drop_guard_ref
/// [`DropGuard`]: super::DropGuard
#[derive(Debug)]
pub struct DropGuardRef<'a> {
pub(super) inner: Option<&'a CancellationToken>,
}
impl<'a> DropGuardRef<'a> {
/// Returns stored cancellation token and removes this drop guard instance
/// (i.e. it will no longer cancel token). Other guards for this token
/// are not affected.
pub fn disarm(mut self) -> &'a CancellationToken {
self.inner
.take()
.expect("`inner` can be only None in a destructor")
}
}
impl Drop for DropGuardRef<'_> {
fn drop(&mut self) {
if let Some(inner) = self.inner {
inner.cancel();
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/cancellation_token/guard.rs | tokio-util/src/sync/cancellation_token/guard.rs | use crate::sync::CancellationToken;
/// A wrapper for cancellation token which automatically cancels
/// it on drop. It is created using [`drop_guard`] method on the [`CancellationToken`].
///
/// [`drop_guard`]: CancellationToken::drop_guard
#[derive(Debug)]
pub struct DropGuard {
pub(super) inner: Option<CancellationToken>,
}
impl DropGuard {
/// Returns stored cancellation token and removes this drop guard instance
/// (i.e. it will no longer cancel token). Other guards for this token
/// are not affected.
pub fn disarm(mut self) -> CancellationToken {
self.inner
.take()
.expect("`inner` can be only None in a destructor")
}
}
impl Drop for DropGuard {
fn drop(&mut self) {
if let Some(inner) = &self.inner {
inner.cancel();
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/src/sync/cancellation_token/tree_node.rs | tokio-util/src/sync/cancellation_token/tree_node.rs | //! This mod provides the logic for the inner tree structure of the `CancellationToken`.
//!
//! `CancellationTokens` are only light handles with references to [`TreeNode`].
//! All the logic is actually implemented in the [`TreeNode`].
//!
//! A [`TreeNode`] is part of the cancellation tree and may have one parent and an arbitrary number of
//! children.
//!
//! A [`TreeNode`] can receive the request to perform a cancellation through a `CancellationToken`.
//! This cancellation request will cancel the node and all of its descendants.
//!
//! As soon as a node cannot get cancelled any more (because it was already cancelled or it has no
//! more `CancellationTokens` pointing to it any more), it gets removed from the tree, to keep the
//! tree as small as possible.
//!
//! # Invariants
//!
//! Those invariants shall be true at any time.
//!
//! 1. A node that has no parents and no handles can no longer be cancelled.
//! This is important during both cancellation and refcounting.
//!
//! 2. If node B *is* or *was* a child of node A, then node B was created *after* node A.
//! This is important for deadlock safety, as it is used for lock order.
//! Node B can only become the child of node A in two ways:
//! - being created with `child_node()`, in which case it is trivially true that
//! node A already existed when node B was created
//! - being moved A->C->B to A->B because node C was removed in `decrease_handle_refcount()`
//! or `cancel()`. In this case the invariant still holds, as B was younger than C, and C
//! was younger than A, therefore B is also younger than A.
//!
//! 3. If two nodes are both unlocked and node A is the parent of node B, then node B is a child of
//! node A. It is important to always restore that invariant before dropping the lock of a node.
//!
//! # Deadlock safety
//!
//! We always lock in the order of creation time. We can prove this through invariant #2.
//! Specifically, through invariant #2, we know that we always have to lock a parent
//! before its child.
//!
use crate::loom::sync::{Arc, Mutex, MutexGuard};
/// A node of the cancellation tree structure
///
/// The actual data it holds is wrapped inside a mutex for synchronization.
pub(crate) struct TreeNode {
inner: Mutex<Inner>,
waker: tokio::sync::Notify,
}
impl TreeNode {
pub(crate) fn new() -> Self {
Self {
inner: Mutex::new(Inner {
parent: None,
parent_idx: 0,
children: vec![],
is_cancelled: false,
num_handles: 1,
}),
waker: tokio::sync::Notify::new(),
}
}
pub(crate) fn notified(&self) -> tokio::sync::futures::Notified<'_> {
self.waker.notified()
}
}
/// The data contained inside a `TreeNode`.
///
/// This struct exists so that the data of the node can be wrapped
/// in a Mutex.
struct Inner {
parent: Option<Arc<TreeNode>>,
parent_idx: usize,
children: Vec<Arc<TreeNode>>,
is_cancelled: bool,
num_handles: usize,
}
/// Returns whether or not the node is cancelled
pub(crate) fn is_cancelled(node: &Arc<TreeNode>) -> bool {
node.inner.lock().unwrap().is_cancelled
}
/// Creates a child node
pub(crate) fn child_node(parent: &Arc<TreeNode>) -> Arc<TreeNode> {
let mut locked_parent = parent.inner.lock().unwrap();
// Do not register as child if we are already cancelled.
// Cancelled trees can never be uncancelled and therefore
// need no connection to parents or children any more.
if locked_parent.is_cancelled {
return Arc::new(TreeNode {
inner: Mutex::new(Inner {
parent: None,
parent_idx: 0,
children: vec![],
is_cancelled: true,
num_handles: 1,
}),
waker: tokio::sync::Notify::new(),
});
}
let child = Arc::new(TreeNode {
inner: Mutex::new(Inner {
parent: Some(parent.clone()),
parent_idx: locked_parent.children.len(),
children: vec![],
is_cancelled: false,
num_handles: 1,
}),
waker: tokio::sync::Notify::new(),
});
locked_parent.children.push(child.clone());
child
}
/// Disconnects the given parent from all of its children.
///
/// Takes a reference to [Inner] to make sure the parent is already locked.
fn disconnect_children(node: &mut Inner) {
for child in std::mem::take(&mut node.children) {
let mut locked_child = child.inner.lock().unwrap();
locked_child.parent_idx = 0;
locked_child.parent = None;
}
}
/// Figures out the parent of the node and locks the node and its parent atomically.
///
/// The basic principle of preventing deadlocks in the tree is
/// that we always lock the parent first, and then the child.
/// For more info look at *deadlock safety* and *invariant #2*.
///
/// Sadly, it's impossible to figure out the parent of a node without
/// locking it. To then achieve locking order consistency, the node
/// has to be unlocked before the parent gets locked.
/// This leaves a small window where we already assume that we know the parent,
/// but neither the parent nor the node is locked. Therefore, the parent could change.
///
/// To prevent that this problem leaks into the rest of the code, it is abstracted
/// in this function.
///
/// The locked child and optionally its locked parent, if a parent exists, get passed
/// to the `func` argument via (node, None) or (node, Some(parent)).
fn with_locked_node_and_parent<F, Ret>(node: &Arc<TreeNode>, func: F) -> Ret
where
F: FnOnce(MutexGuard<'_, Inner>, Option<MutexGuard<'_, Inner>>) -> Ret,
{
use std::sync::TryLockError;
let mut locked_node = node.inner.lock().unwrap();
// Every time this fails, the number of ancestors of the node decreases,
// so the loop must succeed after a finite number of iterations.
loop {
// Look up the parent of the currently locked node.
let potential_parent = match locked_node.parent.as_ref() {
Some(potential_parent) => potential_parent.clone(),
None => return func(locked_node, None),
};
// Lock the parent. This may require unlocking the child first.
let locked_parent = match potential_parent.inner.try_lock() {
Ok(locked_parent) => locked_parent,
Err(TryLockError::WouldBlock) => {
drop(locked_node);
// Deadlock safety:
//
// Due to invariant #2, the potential parent must come before
// the child in the creation order. Therefore, we can safely
// lock the child while holding the parent lock.
let locked_parent = potential_parent.inner.lock().unwrap();
locked_node = node.inner.lock().unwrap();
locked_parent
}
// https://github.com/tokio-rs/tokio/pull/6273#discussion_r1443752911
#[allow(clippy::unnecessary_literal_unwrap)]
Err(TryLockError::Poisoned(err)) => Err(err).unwrap(),
};
// If we unlocked the child, then the parent may have changed. Check
// that we still have the right parent.
if let Some(actual_parent) = locked_node.parent.as_ref() {
if Arc::ptr_eq(actual_parent, &potential_parent) {
return func(locked_node, Some(locked_parent));
}
}
}
}
/// Moves all children from `node` to `parent`.
///
/// `parent` MUST have been a parent of the node when they both got locked,
/// otherwise there is a potential for a deadlock as invariant #2 would be violated.
///
/// To acquire the locks for node and parent, use [`with_locked_node_and_parent`].
fn move_children_to_parent(node: &mut Inner, parent: &mut Inner) {
// Pre-allocate in the parent, for performance
parent.children.reserve(node.children.len());
for child in std::mem::take(&mut node.children) {
{
let mut child_locked = child.inner.lock().unwrap();
child_locked.parent.clone_from(&node.parent);
child_locked.parent_idx = parent.children.len();
}
parent.children.push(child);
}
}
/// Removes a child from the parent.
///
/// `parent` MUST be the parent of `node`.
/// To acquire the locks for node and parent, use [`with_locked_node_and_parent`].
fn remove_child(parent: &mut Inner, mut node: MutexGuard<'_, Inner>) {
// Query the position from where to remove a node
let pos = node.parent_idx;
node.parent = None;
node.parent_idx = 0;
// Unlock node, so that only one child at a time is locked.
// Otherwise we would violate the lock order (see 'deadlock safety') as we
// don't know the creation order of the child nodes
drop(node);
// If `node` is the last element in the list, we don't need any swapping
if parent.children.len() == pos + 1 {
parent.children.pop().unwrap();
} else {
// If `node` is not the last element in the list, we need to
// replace it with the last element
let replacement_child = parent.children.pop().unwrap();
replacement_child.inner.lock().unwrap().parent_idx = pos;
parent.children[pos] = replacement_child;
}
let len = parent.children.len();
if 4 * len <= parent.children.capacity() {
parent.children.shrink_to(2 * len);
}
}
/// Increases the reference count of handles.
pub(crate) fn increase_handle_refcount(node: &Arc<TreeNode>) {
let mut locked_node = node.inner.lock().unwrap();
// Once no handles are left over, the node gets detached from the tree.
// There should never be a new handle once all handles are dropped.
assert!(locked_node.num_handles > 0);
locked_node.num_handles += 1;
}
/// Decreases the reference count of handles.
///
/// Once no handle is left, we can remove the node from the
/// tree and connect its parent directly to its children.
pub(crate) fn decrease_handle_refcount(node: &Arc<TreeNode>) {
let num_handles = {
let mut locked_node = node.inner.lock().unwrap();
locked_node.num_handles -= 1;
locked_node.num_handles
};
if num_handles == 0 {
with_locked_node_and_parent(node, |mut node, parent| {
// Remove the node from the tree
match parent {
Some(mut parent) => {
// As we want to remove ourselves from the tree,
// we have to move the children to the parent, so that
// they still receive the cancellation event without us.
// Moving them does not violate invariant #1.
move_children_to_parent(&mut node, &mut parent);
// Remove the node from the parent
remove_child(&mut parent, node);
}
None => {
// Due to invariant #1, we can assume that our
// children can no longer be cancelled through us.
// (as we now have neither a parent nor handles)
// Therefore we can disconnect them.
disconnect_children(&mut node);
}
}
});
}
}
/// Cancels a node and its children.
pub(crate) fn cancel(node: &Arc<TreeNode>) {
let mut locked_node = node.inner.lock().unwrap();
if locked_node.is_cancelled {
return;
}
// One by one, adopt grandchildren and then cancel and detach the child
while let Some(child) = locked_node.children.pop() {
// This can't deadlock because the mutex we are already
// holding is the parent of child.
let mut locked_child = child.inner.lock().unwrap();
// Detach the child from node
// No need to modify node.children, as the child already got removed with `.pop`
locked_child.parent = None;
locked_child.parent_idx = 0;
// If child is already cancelled, detaching is enough
if locked_child.is_cancelled {
continue;
}
// Cancel or adopt grandchildren
while let Some(grandchild) = locked_child.children.pop() {
// This can't deadlock because the two mutexes we are already
// holding is the parent and grandparent of grandchild.
let mut locked_grandchild = grandchild.inner.lock().unwrap();
// Detach the grandchild
locked_grandchild.parent = None;
locked_grandchild.parent_idx = 0;
// If grandchild is already cancelled, detaching is enough
if locked_grandchild.is_cancelled {
continue;
}
// For performance reasons, only adopt grandchildren that have children.
// Otherwise, just cancel them right away, no need for another iteration.
if locked_grandchild.children.is_empty() {
// Cancel the grandchild
locked_grandchild.is_cancelled = true;
locked_grandchild.children = Vec::new();
drop(locked_grandchild);
grandchild.waker.notify_waiters();
} else {
// Otherwise, adopt grandchild
locked_grandchild.parent = Some(node.clone());
locked_grandchild.parent_idx = locked_node.children.len();
drop(locked_grandchild);
locked_node.children.push(grandchild);
}
}
// Cancel the child
locked_child.is_cancelled = true;
locked_child.children = Vec::new();
drop(locked_child);
child.waker.notify_waiters();
// Now the child is cancelled and detached and all its children are adopted.
// Just continue until all (including adopted) children are cancelled and detached.
}
// Cancel the node itself.
locked_node.is_cancelled = true;
locked_node.children = Vec::new();
drop(locked_node);
node.waker.notify_waiters();
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/_require_full.rs | tokio-util/tests/_require_full.rs | #![cfg(not(feature = "full"))]
compile_error!("run tokio-util tests with `--features full`");
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/framed_stream.rs | tokio-util/tests/framed_stream.rs | use futures_core::stream::Stream;
use std::{io, pin::Pin};
use tokio_test::{assert_ready, io::Builder, task};
use tokio_util::codec::{BytesCodec, FramedRead};
macro_rules! pin {
($id:ident) => {
Pin::new(&mut $id)
};
}
macro_rules! assert_read {
($e:expr, $n:expr) => {{
let val = assert_ready!($e);
assert_eq!(val.unwrap().unwrap(), $n);
}};
}
#[tokio::test]
async fn return_none_after_error() {
let mut io = FramedRead::new(
Builder::new()
.read(b"abcdef")
.read_error(io::Error::new(io::ErrorKind::Other, "Resource errored out"))
.read(b"more data")
.build(),
BytesCodec::new(),
);
let mut task = task::spawn(());
task.enter(|cx, _| {
assert_read!(pin!(io).poll_next(cx), b"abcdef".to_vec());
assert!(assert_ready!(pin!(io).poll_next(cx)).unwrap().is_err());
assert!(assert_ready!(pin!(io).poll_next(cx)).is_none());
assert_read!(pin!(io).poll_next(cx), b"more data".to_vec());
})
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/framed_write.rs | tokio-util/tests/framed_write.rs | #![warn(rust_2018_idioms)]
use tokio::io::AsyncWrite;
use tokio_test::{assert_ready, task};
use tokio_util::codec::{Encoder, FramedWrite};
use bytes::{BufMut, BytesMut};
use futures_sink::Sink;
use std::collections::VecDeque;
use std::io::{self, Write};
use std::pin::Pin;
use std::task::Poll::{Pending, Ready};
use std::task::{Context, Poll};
macro_rules! mock {
($($x:expr,)*) => {{
let mut v = VecDeque::new();
v.extend(vec![$($x),*]);
Mock { calls: v }
}};
}
macro_rules! pin {
($id:ident) => {
Pin::new(&mut $id)
};
}
struct U32Encoder;
impl Encoder<u32> for U32Encoder {
type Error = io::Error;
fn encode(&mut self, item: u32, dst: &mut BytesMut) -> io::Result<()> {
// Reserve space
dst.reserve(4);
dst.put_u32(item);
Ok(())
}
}
struct U64Encoder;
impl Encoder<u64> for U64Encoder {
type Error = io::Error;
fn encode(&mut self, item: u64, dst: &mut BytesMut) -> io::Result<()> {
// Reserve space
dst.reserve(8);
dst.put_u64(item);
Ok(())
}
}
#[test]
fn write_multi_frame_in_packet() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02".to_vec()),
};
let mut framed = FramedWrite::new(mock, U32Encoder);
task.enter(|cx, _| {
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(0).is_ok());
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(1).is_ok());
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(2).is_ok());
// Nothing written yet
assert_eq!(1, framed.get_ref().calls.len());
// Flush the writes
assert!(assert_ready!(pin!(framed).poll_flush(cx)).is_ok());
assert_eq!(0, framed.get_ref().calls.len());
});
}
#[test]
fn write_multi_frame_after_codec_changed() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x08".to_vec()),
};
let mut framed = FramedWrite::new(mock, U32Encoder);
task.enter(|cx, _| {
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(0x04).is_ok());
let mut framed = framed.map_encoder(|_| U64Encoder);
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(0x08).is_ok());
// Nothing written yet
assert_eq!(1, framed.get_ref().calls.len());
// Flush the writes
assert!(assert_ready!(pin!(framed).poll_flush(cx)).is_ok());
assert_eq!(0, framed.get_ref().calls.len());
});
}
#[test]
fn write_hits_backpressure() {
const ITER: usize = 2 * 1024;
let mut mock = mock! {
// Block the `ITER*2`th write
Err(io::Error::new(io::ErrorKind::WouldBlock, "not ready")),
Ok(b"".to_vec()),
};
for i in 0..=ITER * 2 {
let mut b = BytesMut::with_capacity(4);
b.put_u32(i as u32);
// Append to the end
match mock.calls.back_mut().unwrap() {
Ok(ref mut data) => {
// Write in 2kb chunks
if data.len() < ITER {
data.extend_from_slice(&b[..]);
continue;
} // else fall through and create a new buffer
}
_ => unreachable!(),
}
// Push a new chunk
mock.calls.push_back(Ok(b[..].to_vec()));
}
// 1 'wouldblock', 8 * 2KB buffers, 1 b-byte buffer
assert_eq!(mock.calls.len(), 10);
let mut task = task::spawn(());
let mut framed = FramedWrite::new(mock, U32Encoder);
framed.set_backpressure_boundary(ITER * 8);
task.enter(|cx, _| {
// Send 16KB. This fills up FramedWrite buffer
for i in 0..ITER * 2 {
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
assert!(pin!(framed).start_send(i as u32).is_ok());
}
// Now we poll_ready which forces a flush. The mock pops the front message
// and decides to block.
assert!(pin!(framed).poll_ready(cx).is_pending());
// We poll again, forcing another flush, which this time succeeds
// The whole 16KB buffer is flushed
assert!(assert_ready!(pin!(framed).poll_ready(cx)).is_ok());
// Send more data. This matches the final message expected by the mock
assert!(pin!(framed).start_send((ITER * 2) as u32).is_ok());
// Flush the rest of the buffer
assert!(assert_ready!(pin!(framed).poll_flush(cx)).is_ok());
// Ensure the mock is empty
assert_eq!(0, framed.get_ref().calls.len());
})
}
// // ===== Mock ======
struct Mock {
calls: VecDeque<io::Result<Vec<u8>>>,
}
impl Write for Mock {
fn write(&mut self, src: &[u8]) -> io::Result<usize> {
match self.calls.pop_front() {
Some(Ok(data)) => {
assert!(src.len() >= data.len());
assert_eq!(&data[..], &src[..data.len()]);
Ok(data.len())
}
Some(Err(e)) => Err(e),
None => panic!("unexpected write; {src:?}"),
}
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
impl AsyncWrite for Mock {
fn poll_write(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, io::Error>> {
match Pin::get_mut(self).write(buf) {
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Pending,
other => Ready(other),
}
}
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
match Pin::get_mut(self).flush() {
Err(ref e) if e.kind() == io::ErrorKind::WouldBlock => Pending,
other => Ready(other),
}
}
fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
unimplemented!()
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/mpsc.rs | tokio-util/tests/mpsc.rs | use futures::sink::SinkExt;
use std::future::poll_fn;
use tokio::sync::mpsc::channel;
use tokio_test::task::spawn;
use tokio_test::{
assert_ok, assert_pending, assert_ready, assert_ready_eq, assert_ready_err, assert_ready_ok,
};
use tokio_util::sync::PollSender;
#[tokio::test]
async fn simple() {
let (send, mut recv) = channel(3);
let mut send = PollSender::new(send);
for i in 1..=3i32 {
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(i).unwrap();
}
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
assert_eq!(recv.recv().await.unwrap(), 1);
assert!(reserve.is_woken());
assert_ready_ok!(reserve.poll());
drop(recv);
send.send_item(42).unwrap();
}
#[tokio::test]
async fn simple_ref() {
let v = [1, 2, 3i32];
let (send, mut recv) = channel(3);
let mut send = PollSender::new(send);
for vi in v.iter() {
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(vi).unwrap();
}
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
assert_eq!(*recv.recv().await.unwrap(), 1);
assert!(reserve.is_woken());
assert_ready_ok!(reserve.poll());
drop(recv);
send.send_item(&42).unwrap();
}
#[tokio::test]
async fn repeated_poll_reserve() {
let (send, mut recv) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
assert_ready_ok!(reserve.poll());
send.send_item(1).unwrap();
assert_eq!(recv.recv().await.unwrap(), 1);
}
#[tokio::test]
async fn abort_send() {
let (send, mut recv) = channel(3);
let mut send = PollSender::new(send);
let send2 = send.get_ref().cloned().unwrap();
for i in 1..=3i32 {
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(i).unwrap();
}
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
assert_eq!(recv.recv().await.unwrap(), 1);
assert!(reserve.is_woken());
assert_ready_ok!(reserve.poll());
let mut send2_send = spawn(send2.send(5));
assert_pending!(send2_send.poll());
assert!(send.abort_send());
assert!(send2_send.is_woken());
assert_ready_ok!(send2_send.poll());
assert_eq!(recv.recv().await.unwrap(), 2);
assert_eq!(recv.recv().await.unwrap(), 3);
assert_eq!(recv.recv().await.unwrap(), 5);
}
#[tokio::test]
async fn close_sender_last() {
let (send, mut recv) = channel::<i32>(3);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
send.close();
assert!(recv_task.is_woken());
assert!(assert_ready!(recv_task.poll()).is_none());
}
#[tokio::test]
async fn close_sender_not_last() {
let (send, mut recv) = channel::<i32>(3);
let mut send = PollSender::new(send);
let send2 = send.get_ref().cloned().unwrap();
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
send.close();
assert!(!recv_task.is_woken());
assert_pending!(recv_task.poll());
drop(send2);
assert!(recv_task.is_woken());
assert!(assert_ready!(recv_task.poll()).is_none());
}
#[tokio::test]
async fn close_sender_before_reserve() {
let (send, mut recv) = channel::<i32>(3);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
send.close();
assert!(recv_task.is_woken());
assert!(assert_ready!(recv_task.poll()).is_none());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_err!(reserve.poll());
}
#[tokio::test]
async fn close_sender_after_pending_reserve() {
let (send, mut recv) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(1).unwrap();
assert!(recv_task.is_woken());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
drop(reserve);
send.close();
assert!(send.is_closed());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_err!(reserve.poll());
}
#[tokio::test]
async fn close_sender_after_successful_reserve() {
let (send, mut recv) = channel::<i32>(3);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
drop(reserve);
send.close();
assert!(send.is_closed());
assert!(!recv_task.is_woken());
assert_pending!(recv_task.poll());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
}
#[tokio::test]
async fn abort_send_after_pending_reserve() {
let (send, mut recv) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(1).unwrap();
assert_eq!(send.get_ref().unwrap().capacity(), 0);
assert!(!send.abort_send());
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
assert!(send.abort_send());
assert_eq!(send.get_ref().unwrap().capacity(), 0);
}
#[tokio::test]
async fn abort_send_after_successful_reserve() {
let (send, mut recv) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
assert_eq!(send.get_ref().unwrap().capacity(), 1);
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
assert_eq!(send.get_ref().unwrap().capacity(), 0);
assert!(send.abort_send());
assert_eq!(send.get_ref().unwrap().capacity(), 1);
}
#[tokio::test]
async fn closed_when_receiver_drops() {
let (send, _) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_err!(reserve.poll());
}
#[should_panic]
#[test]
fn start_send_panics_when_idle() {
let (send, _) = channel::<i32>(3);
let mut send = PollSender::new(send);
send.send_item(1).unwrap();
}
#[should_panic]
#[test]
fn start_send_panics_when_acquiring() {
let (send, _) = channel::<i32>(1);
let mut send = PollSender::new(send);
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_ready_ok!(reserve.poll());
send.send_item(1).unwrap();
let mut reserve = spawn(poll_fn(|cx| send.poll_reserve(cx)));
assert_pending!(reserve.poll());
send.send_item(2).unwrap();
}
#[test]
fn sink_send_then_flush() {
let (send, mut recv) = channel(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_ready_ok!(ready.poll());
assert_ok!(send.start_send_unpin(()));
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_pending!(ready.poll());
let mut flush = spawn(poll_fn(|cx| send.poll_flush_unpin(cx)));
assert_ready_ok!(flush.poll());
// Flushing does not mean that the sender becomes ready.
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_pending!(ready.poll());
assert_ready_eq!(recv_task.poll(), Some(()));
assert!(ready.is_woken());
assert_ready_ok!(ready.poll());
}
#[test]
fn sink_send_then_close() {
let (send, mut recv) = channel(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_ready_ok!(ready.poll());
assert_ok!(send.start_send_unpin(1));
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_pending!(ready.poll());
assert!(recv_task.is_woken());
assert_ready_eq!(recv_task.poll(), Some(1));
assert!(ready.is_woken());
assert_ready_ok!(ready.poll());
drop(recv_task);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
assert_ok!(send.start_send_unpin(2));
let mut close = spawn(poll_fn(|cx| send.poll_close_unpin(cx)));
assert_ready_ok!(close.poll());
assert!(recv_task.is_woken());
assert_ready_eq!(recv_task.poll(), Some(2));
drop(recv_task);
let mut recv_task = spawn(recv.recv());
assert_ready_eq!(recv_task.poll(), None);
}
#[test]
fn sink_send_ref() {
let data = "data".to_owned();
let (send, mut recv) = channel(1);
let mut send = PollSender::new(send);
let mut recv_task = spawn(recv.recv());
assert_pending!(recv_task.poll());
let mut ready = spawn(poll_fn(|cx| send.poll_ready_unpin(cx)));
assert_ready_ok!(ready.poll());
assert_ok!(send.start_send_unpin(data.as_str()));
let mut flush = spawn(poll_fn(|cx| send.poll_flush_unpin(cx)));
assert_ready_ok!(flush.poll());
assert_ready_eq!(recv_task.poll(), Some("data"));
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_stream_reader.rs | tokio-util/tests/io_stream_reader.rs | #![warn(rust_2018_idioms)]
use bytes::Bytes;
use tokio::io::AsyncReadExt;
use tokio_stream::iter;
use tokio_util::io::StreamReader;
#[tokio::test]
async fn test_stream_reader() -> std::io::Result<()> {
let stream = iter(vec![
std::io::Result::Ok(Bytes::from_static(&[])),
Ok(Bytes::from_static(&[0, 1, 2, 3])),
Ok(Bytes::from_static(&[])),
Ok(Bytes::from_static(&[4, 5, 6, 7])),
Ok(Bytes::from_static(&[])),
Ok(Bytes::from_static(&[8, 9, 10, 11])),
Ok(Bytes::from_static(&[])),
]);
let mut read = StreamReader::new(stream);
let mut buf = [0; 5];
read.read_exact(&mut buf).await?;
assert_eq!(buf, [0, 1, 2, 3, 4]);
assert_eq!(read.read(&mut buf).await?, 3);
assert_eq!(&buf[..3], [5, 6, 7]);
assert_eq!(read.read(&mut buf).await?, 4);
assert_eq!(&buf[..4], [8, 9, 10, 11]);
assert_eq!(read.read(&mut buf).await?, 0);
Ok(())
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_reader_stream.rs | tokio-util/tests/io_reader_stream.rs | #![warn(rust_2018_idioms)]
use std::pin::Pin;
use std::task::{Context, Poll};
use tokio::io::{AsyncRead, ReadBuf};
use tokio_stream::StreamExt;
/// produces at most `remaining` zeros, that returns error.
/// each time it reads at most 31 byte.
struct Reader {
remaining: usize,
}
impl AsyncRead for Reader {
fn poll_read(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
let this = Pin::into_inner(self);
assert_ne!(buf.remaining(), 0);
if this.remaining > 0 {
let n = std::cmp::min(this.remaining, buf.remaining());
let n = std::cmp::min(n, 31);
for x in &mut buf.initialize_unfilled_to(n)[..n] {
*x = 0;
}
buf.advance(n);
this.remaining -= n;
Poll::Ready(Ok(()))
} else {
Poll::Ready(Err(std::io::Error::from_raw_os_error(22)))
}
}
}
#[tokio::test]
async fn correct_behavior_on_errors() {
let reader = Reader { remaining: 8000 };
let mut stream = tokio_util::io::ReaderStream::new(reader);
let mut zeros_received = 0;
let mut had_error = false;
loop {
let item = stream.next().await.unwrap();
println!("{item:?}");
match item {
Ok(bytes) => {
let bytes = &*bytes;
for byte in bytes {
assert_eq!(*byte, 0);
zeros_received += 1;
}
}
Err(_) => {
assert!(!had_error);
had_error = true;
break;
}
}
}
assert!(had_error);
assert_eq!(zeros_received, 8000);
assert!(stream.next().await.is_none());
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/spawn_pinned.rs | tokio-util/tests/spawn_pinned.rs | #![warn(rust_2018_idioms)]
#![cfg(not(target_os = "wasi"))] // Wasi doesn't support threads
use std::rc::Rc;
use std::sync::Arc;
use tokio::sync::Barrier;
use tokio_util::task;
/// Simple test of running a !Send future via spawn_pinned
#[tokio::test]
async fn can_spawn_not_send_future() {
let pool = task::LocalPoolHandle::new(1);
let output = pool
.spawn_pinned(|| {
// Rc is !Send + !Sync
let local_data = Rc::new("test");
// This future holds an Rc, so it is !Send
async move { local_data.to_string() }
})
.await
.unwrap();
assert_eq!(output, "test");
}
/// Dropping the join handle still lets the task execute
#[test]
fn can_drop_future_and_still_get_output() {
let pool = task::LocalPoolHandle::new(1);
let (sender, receiver) = std::sync::mpsc::channel();
pool.spawn_pinned(move || {
// Rc is !Send + !Sync
let local_data = Rc::new("test");
// This future holds an Rc, so it is !Send
async move {
let _ = sender.send(local_data.to_string());
}
});
assert_eq!(receiver.recv(), Ok("test".to_string()));
}
#[test]
#[should_panic(expected = "assertion failed: pool_size > 0")]
fn cannot_create_zero_sized_pool() {
let _pool = task::LocalPoolHandle::new(0);
}
/// We should be able to spawn multiple futures onto the pool at the same time.
#[tokio::test]
async fn can_spawn_multiple_futures() {
let pool = task::LocalPoolHandle::new(2);
let join_handle1 = pool.spawn_pinned(|| {
let local_data = Rc::new("test1");
async move { local_data.to_string() }
});
let join_handle2 = pool.spawn_pinned(|| {
let local_data = Rc::new("test2");
async move { local_data.to_string() }
});
assert_eq!(join_handle1.await.unwrap(), "test1");
assert_eq!(join_handle2.await.unwrap(), "test2");
}
/// A panic in the spawned task causes the join handle to return an error.
/// But, you can continue to spawn tasks.
#[tokio::test]
#[cfg(panic = "unwind")]
async fn task_panic_propagates() {
let pool = task::LocalPoolHandle::new(1);
let join_handle = pool.spawn_pinned(|| async {
panic!("Test panic");
});
let result = join_handle.await;
assert!(result.is_err());
let error = result.unwrap_err();
assert!(error.is_panic());
let panic_str = error.into_panic().downcast::<&'static str>().unwrap();
assert_eq!(*panic_str, "Test panic");
// Trying again with a "safe" task still works
let join_handle = pool.spawn_pinned(|| async { "test" });
let result = join_handle.await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), "test");
}
/// A panic during task creation causes the join handle to return an error.
/// But, you can continue to spawn tasks.
#[tokio::test]
#[cfg(panic = "unwind")]
async fn callback_panic_does_not_kill_worker() {
let pool = task::LocalPoolHandle::new(1);
let join_handle = pool.spawn_pinned(|| {
panic!("Test panic");
#[allow(unreachable_code)]
async {}
});
let result = join_handle.await;
assert!(result.is_err());
let error = result.unwrap_err();
assert!(error.is_panic());
let panic_str = error.into_panic().downcast::<&'static str>().unwrap();
assert_eq!(*panic_str, "Test panic");
// Trying again with a "safe" callback works
let join_handle = pool.spawn_pinned(|| async { "test" });
let result = join_handle.await;
assert!(result.is_ok());
assert_eq!(result.unwrap(), "test");
}
/// Canceling the task via the returned join handle cancels the spawned task
/// (which has a different, internal join handle).
#[tokio::test]
async fn task_cancellation_propagates() {
let pool = task::LocalPoolHandle::new(1);
let notify_dropped = Arc::new(());
let weak_notify_dropped = Arc::downgrade(¬ify_dropped);
let (start_sender, start_receiver) = tokio::sync::oneshot::channel();
let (drop_sender, drop_receiver) = tokio::sync::oneshot::channel::<()>();
let join_handle = pool.spawn_pinned(|| async move {
let _drop_sender = drop_sender;
// Move the Arc into the task
let _notify_dropped = notify_dropped;
let _ = start_sender.send(());
// Keep the task running until it gets aborted
futures::future::pending::<()>().await;
});
// Wait for the task to start
let _ = start_receiver.await;
join_handle.abort();
// Wait for the inner task to abort, dropping the sender.
// The top level join handle aborts quicker than the inner task (the abort
// needs to propagate and get processed on the worker thread), so we can't
// just await the top level join handle.
let _ = drop_receiver.await;
// Check that the Arc has been dropped. This verifies that the inner task
// was canceled as well.
assert!(weak_notify_dropped.upgrade().is_none());
}
/// Tasks should be given to the least burdened worker. When spawning two tasks
/// on a pool with two empty workers the tasks should be spawned on separate
/// workers.
#[tokio::test]
async fn tasks_are_balanced() {
let pool = task::LocalPoolHandle::new(2);
// Spawn a task so one thread has a task count of 1
let (start_sender1, start_receiver1) = tokio::sync::oneshot::channel();
let (end_sender1, end_receiver1) = tokio::sync::oneshot::channel();
let join_handle1 = pool.spawn_pinned(|| async move {
let _ = start_sender1.send(());
let _ = end_receiver1.await;
std::thread::current().id()
});
// Wait for the first task to start up
let _ = start_receiver1.await;
// This task should be spawned on the other thread
let (start_sender2, start_receiver2) = tokio::sync::oneshot::channel();
let join_handle2 = pool.spawn_pinned(|| async move {
let _ = start_sender2.send(());
std::thread::current().id()
});
// Wait for the second task to start up
let _ = start_receiver2.await;
// Allow the first task to end
let _ = end_sender1.send(());
let thread_id1 = join_handle1.await.unwrap();
let thread_id2 = join_handle2.await.unwrap();
// Since the first task was active when the second task spawned, they should
// be on separate workers/threads.
assert_ne!(thread_id1, thread_id2);
}
#[tokio::test]
async fn spawn_by_idx() {
let pool = task::LocalPoolHandle::new(3);
let barrier = Arc::new(Barrier::new(4));
let barrier1 = barrier.clone();
let barrier2 = barrier.clone();
let barrier3 = barrier.clone();
let handle1 = pool.spawn_pinned_by_idx(
|| async move {
barrier1.wait().await;
std::thread::current().id()
},
0,
);
pool.spawn_pinned_by_idx(
|| async move {
barrier2.wait().await;
std::thread::current().id()
},
0,
);
let handle2 = pool.spawn_pinned_by_idx(
|| async move {
barrier3.wait().await;
std::thread::current().id()
},
1,
);
let loads = pool.get_task_loads_for_each_worker();
barrier.wait().await;
assert_eq!(loads[0], 2);
assert_eq!(loads[1], 1);
assert_eq!(loads[2], 0);
let thread_id1 = handle1.await.unwrap();
let thread_id2 = handle2.await.unwrap();
assert_ne!(thread_id1, thread_id2);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/future.rs | tokio-util/tests/future.rs | use std::{
future::{pending, ready, Future},
task::{Context, Poll},
};
use futures_test::task::new_count_waker;
use tokio::pin;
use tokio_test::{assert_pending, assert_ready_eq};
use tokio_util::{future::FutureExt, sync::CancellationToken};
#[derive(Default)]
struct ReadyOnTheSecondPollFuture {
polled: bool,
}
impl Future for ReadyOnTheSecondPollFuture {
type Output = ();
fn poll(mut self: std::pin::Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Self::Output> {
if !self.polled {
self.polled = true;
return Poll::Pending;
}
Poll::Ready(())
}
}
#[test]
fn ready_fut_with_cancellation_token_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
let ready_fut = ready(());
let ready_with_token_fut = ready_fut.with_cancellation_token(&token);
pin!(ready_with_token_fut);
let res = ready_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, Some(()));
}
#[test]
fn pending_fut_with_cancellation_token_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token(&token);
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_pending!(res);
}
#[test]
fn ready_fut_with_already_cancelled_token_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
token.cancel();
let ready_fut = ready(());
let ready_fut_with_token_fut = ready_fut.with_cancellation_token(&token);
pin!(ready_fut_with_token_fut);
let res = ready_fut_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
}
#[test]
fn pending_fut_with_already_cancelled_token_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
token.cancel();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token(&token);
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
assert_eq!(wake_count, 0);
}
#[test]
fn pending_fut_with_token_cancelled_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token(&token);
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_pending!(res);
token.cancel();
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
assert_eq!(wake_count, 1);
}
#[test]
fn pending_only_on_first_poll_with_cancellation_token_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
let fut = ReadyOnTheSecondPollFuture::default().with_cancellation_token(&token);
pin!(fut);
// first poll, ReadyOnTheSecondPollFuture returned Pending
let res = fut.as_mut().poll(&mut Context::from_waker(&waker));
assert_pending!(res);
token.cancel();
assert_eq!(wake_count, 1);
// due to the polling fairness (biased behavior) of `WithCancellationToken` Future,
// subsequent polls are biased toward polling ReadyOnTheSecondPollFuture,
// which results in always returning Ready.
let res = fut.as_mut().poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, Some(()));
}
#[test]
fn ready_fut_with_cancellation_owned_token_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
let ready_fut = ready(());
let ready_with_token_fut = ready_fut.with_cancellation_token_owned(token);
pin!(ready_with_token_fut);
let res = ready_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, Some(()));
}
#[test]
fn pending_fut_with_cancellation_token_owned_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token_owned(token);
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_pending!(res);
}
#[test]
fn ready_fut_with_already_cancelled_token_owned_test() {
let (waker, _) = new_count_waker();
let token = CancellationToken::new();
token.cancel();
let ready_fut = ready(());
let ready_fut_with_token_fut = ready_fut.with_cancellation_token_owned(token);
pin!(ready_fut_with_token_fut);
let res = ready_fut_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
}
#[test]
fn pending_fut_with_already_cancelled_token_owned_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
token.cancel();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token_owned(token);
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
assert_eq!(wake_count, 0);
}
#[test]
fn pending_fut_with_owned_token_cancelled_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
let pending_fut = pending::<()>();
let pending_with_token_fut = pending_fut.with_cancellation_token_owned(token.clone());
pin!(pending_with_token_fut);
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_pending!(res);
token.cancel();
let res = pending_with_token_fut
.as_mut()
.poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, None);
assert_eq!(wake_count, 1);
}
#[test]
fn pending_only_on_first_poll_with_cancellation_token_owned_test() {
let (waker, wake_count) = new_count_waker();
let token = CancellationToken::new();
let fut = ReadyOnTheSecondPollFuture::default().with_cancellation_token_owned(token.clone());
pin!(fut);
// first poll, ReadyOnTheSecondPollFuture returned Pending
let res = fut.as_mut().poll(&mut Context::from_waker(&waker));
assert_pending!(res);
token.cancel();
assert_eq!(wake_count, 1);
// due to the polling fairness (biased behavior) of `WithCancellationToken` Future,
// subsequent polls are biased toward polling ReadyOnTheSecondPollFuture,
// which results in always returning Ready.
let res = fut.as_mut().poll(&mut Context::from_waker(&waker));
assert_ready_eq!(res, Some(()));
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/task_tracker.rs | tokio-util/tests/task_tracker.rs | #![warn(rust_2018_idioms)]
use futures::future::pending;
#[cfg(tokio_unstable)]
use std::rc::Rc;
use tokio::sync::mpsc;
use tokio::task::LocalSet;
use tokio_test::{assert_pending, assert_ready, task};
use tokio_util::task::TaskTracker;
#[test]
fn open_close() {
let tracker = TaskTracker::new();
assert!(!tracker.is_closed());
assert!(tracker.is_empty());
assert_eq!(tracker.len(), 0);
tracker.close();
assert!(tracker.is_closed());
assert!(tracker.is_empty());
assert_eq!(tracker.len(), 0);
tracker.reopen();
assert!(!tracker.is_closed());
tracker.reopen();
assert!(!tracker.is_closed());
assert!(tracker.is_empty());
assert_eq!(tracker.len(), 0);
tracker.close();
assert!(tracker.is_closed());
tracker.close();
assert!(tracker.is_closed());
assert!(tracker.is_empty());
assert_eq!(tracker.len(), 0);
}
#[test]
fn token_len() {
let tracker = TaskTracker::new();
let mut tokens = Vec::new();
for i in 0..10 {
assert_eq!(tracker.len(), i);
tokens.push(tracker.token());
}
assert!(!tracker.is_empty());
assert_eq!(tracker.len(), 10);
for (i, token) in tokens.into_iter().enumerate() {
drop(token);
assert_eq!(tracker.len(), 9 - i);
}
}
#[test]
fn notify_immediately() {
let tracker = TaskTracker::new();
tracker.close();
let mut wait = task::spawn(tracker.wait());
assert_ready!(wait.poll());
}
#[test]
fn notify_immediately_on_reopen() {
let tracker = TaskTracker::new();
tracker.close();
let mut wait = task::spawn(tracker.wait());
tracker.reopen();
assert_ready!(wait.poll());
}
#[test]
fn notify_on_close() {
let tracker = TaskTracker::new();
let mut wait = task::spawn(tracker.wait());
assert_pending!(wait.poll());
tracker.close();
assert_ready!(wait.poll());
}
#[test]
fn notify_on_close_reopen() {
let tracker = TaskTracker::new();
let mut wait = task::spawn(tracker.wait());
assert_pending!(wait.poll());
tracker.close();
tracker.reopen();
assert_ready!(wait.poll());
}
#[test]
fn notify_on_last_task() {
let tracker = TaskTracker::new();
tracker.close();
let token = tracker.token();
let mut wait = task::spawn(tracker.wait());
assert_pending!(wait.poll());
drop(token);
assert_ready!(wait.poll());
}
#[test]
fn notify_on_last_task_respawn() {
let tracker = TaskTracker::new();
tracker.close();
let token = tracker.token();
let mut wait = task::spawn(tracker.wait());
assert_pending!(wait.poll());
drop(token);
let token2 = tracker.token();
assert_ready!(wait.poll());
drop(token2);
}
#[test]
fn no_notify_on_respawn_if_open() {
let tracker = TaskTracker::new();
let token = tracker.token();
let mut wait = task::spawn(tracker.wait());
assert_pending!(wait.poll());
drop(token);
let token2 = tracker.token();
assert_pending!(wait.poll());
drop(token2);
}
#[test]
fn close_during_exit() {
const ITERS: usize = 5;
for close_spot in 0..=ITERS {
let tracker = TaskTracker::new();
let tokens: Vec<_> = (0..ITERS).map(|_| tracker.token()).collect();
let mut wait = task::spawn(tracker.wait());
for (i, token) in tokens.into_iter().enumerate() {
assert_pending!(wait.poll());
if i == close_spot {
tracker.close();
assert_pending!(wait.poll());
}
drop(token);
}
if close_spot == ITERS {
assert_pending!(wait.poll());
tracker.close();
}
assert_ready!(wait.poll());
}
}
#[test]
fn notify_many() {
let tracker = TaskTracker::new();
let mut waits: Vec<_> = (0..10).map(|_| task::spawn(tracker.wait())).collect();
for wait in &mut waits {
assert_pending!(wait.poll());
}
tracker.close();
for wait in &mut waits {
assert_ready!(wait.poll());
}
}
#[cfg(tokio_unstable)]
mod spawn {
use super::*;
/// Spawn several tasks, and then close the [`TaskTracker`].
#[tokio::test(flavor = "local")]
async fn spawn_then_close() {
const N: usize = 8;
let tracker = TaskTracker::new();
for _ in 0..N {
tracker.spawn(async {});
}
for _ in 0..N {
tracker.spawn_on(async {}, &tokio::runtime::Handle::current());
}
tracker.close();
tracker.wait().await;
assert!(tracker.is_empty());
assert!(tracker.is_closed());
}
}
#[cfg(tokio_unstable)]
mod spawn_local {
use super::*;
#[test]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
fn panic_outside_any_runtime() {
let tracker = TaskTracker::new();
tracker.spawn_local(async {});
}
#[tokio::test(flavor = "multi_thread")]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
async fn panic_in_multi_thread_runtime() {
let tracker = TaskTracker::new();
tracker.spawn_local(async {});
}
/// Spawn several tasks, and then close the [`TaskTracker`].
#[tokio::test(flavor = "local")]
async fn spawn_then_close() {
const N: usize = 8;
let tracker = TaskTracker::new();
for _ in 0..N {
let rc = Rc::new(());
tracker.spawn_local(async move {
drop(rc);
});
}
tracker.close();
tracker.wait().await;
assert!(tracker.is_empty());
assert!(tracker.is_closed());
}
/// Close the [`TaskTracker`], and then spawn several tasks
#[tokio::test(flavor = "local")]
async fn spawn_after_close() {
const N: usize = 8;
let tracker = TaskTracker::new();
tracker.close();
for _ in 0..N {
let rc = Rc::new(());
tracker.spawn_local(async move {
drop(rc);
});
}
tracker.wait().await;
assert!(tracker.is_closed());
assert!(tracker.is_empty());
}
}
mod spawn_local_on {
use super::*;
#[cfg(tokio_unstable)]
mod local_runtime {
use super::*;
/// Spawn several tasks, and then close the [`TaskTracker`].
#[tokio::test(flavor = "local")]
async fn spawn_then_close() {
const N: usize = 8;
let local_set = LocalSet::new();
let tracker = TaskTracker::new();
for _ in 0..N {
let rc = Rc::new(());
tracker.spawn_local_on(
async move {
drop(rc);
},
&local_set,
);
}
local_set
.run_until(async {
tracker.close();
tracker.wait().await;
assert!(tracker.is_empty());
assert!(tracker.is_closed());
})
.await;
}
}
mod local_set {
use super::*;
/// Spawn several pending-forever tasks, and then drop the [`TaskTracker`]
/// while the `LocalSet` is already driven.
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_drop() {
const N: usize = 8;
let local = LocalSet::new();
let tracker = TaskTracker::new();
let (tx, mut rx) = mpsc::unbounded_channel::<()>();
for _i in 0..N {
let tx = tx.clone();
tracker.spawn_local_on(
async move {
pending::<()>().await;
drop(tx);
},
&local,
);
}
drop(tx);
local
.run_until(async move {
drop(tracker);
tokio::task::yield_now().await;
use tokio::sync::mpsc::error::TryRecvError;
assert!(matches!(rx.try_recv(), Err(TryRecvError::Empty)));
})
.await;
}
/// Close the tracker first, spawn several pending-forever tasks,
/// then wait while the`LocalSet` is already driven.
#[tokio::test(flavor = "current_thread")]
async fn close_then_spawn() {
const N: usize = 8;
let local = LocalSet::new();
let tracker = TaskTracker::new();
tracker.close();
for _ in 0..N {
let rc = std::rc::Rc::new(());
tracker.spawn_local_on(
async move {
drop(rc);
},
&local,
);
}
local
.run_until(async move {
tracker.wait().await;
assert!(tracker.is_closed());
assert!(tracker.is_empty());
})
.await;
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/length_delimited.rs | tokio-util/tests/length_delimited.rs | #![warn(rust_2018_idioms)]
use tokio::io::{AsyncRead, AsyncWrite, ReadBuf};
use tokio_test::task;
use tokio_test::{
assert_err, assert_ok, assert_pending, assert_ready, assert_ready_err, assert_ready_ok,
};
use tokio_util::codec::*;
use bytes::{BufMut, Bytes, BytesMut};
use futures::{pin_mut, Sink, Stream};
use std::collections::VecDeque;
use std::io;
use std::pin::Pin;
use std::task::{Context, Poll};
macro_rules! mock {
($($x:expr,)*) => {{
let mut v = VecDeque::new();
v.extend(vec![$($x),*]);
Mock { calls: v }
}};
}
macro_rules! assert_next_eq {
($io:ident, $expect:expr) => {{
task::spawn(()).enter(|cx, _| {
let res = assert_ready!($io.as_mut().poll_next(cx));
match res {
Some(Ok(v)) => assert_eq!(v, $expect.as_ref()),
Some(Err(e)) => panic!("error = {:?}", e),
None => panic!("none"),
}
});
}};
}
macro_rules! assert_next_pending {
($io:ident) => {{
task::spawn(()).enter(|cx, _| match $io.as_mut().poll_next(cx) {
Poll::Ready(Some(Ok(v))) => panic!("value = {:?}", v),
Poll::Ready(Some(Err(e))) => panic!("error = {:?}", e),
Poll::Ready(None) => panic!("done"),
Poll::Pending => {}
});
}};
}
macro_rules! assert_next_err {
($io:ident) => {{
task::spawn(()).enter(|cx, _| match $io.as_mut().poll_next(cx) {
Poll::Ready(Some(Ok(v))) => panic!("value = {:?}", v),
Poll::Ready(Some(Err(_))) => {}
Poll::Ready(None) => panic!("done"),
Poll::Pending => panic!("pending"),
});
}};
}
macro_rules! assert_done {
($io:ident) => {{
task::spawn(()).enter(|cx, _| {
let res = assert_ready!($io.as_mut().poll_next(cx));
match res {
Some(Ok(v)) => panic!("value = {:?}", v),
Some(Err(e)) => panic!("error = {:?}", e),
None => {}
}
});
}};
}
#[test]
fn read_empty_io_yields_nothing() {
let io = Box::pin(FramedRead::new(mock!(), LengthDelimitedCodec::new()));
pin_mut!(io);
assert_done!(io);
}
#[test]
fn read_single_frame_one_packet() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00\x00\x09abcdefghi"),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_single_frame_one_packet_little_endian() {
let io = length_delimited::Builder::new()
.little_endian()
.new_read(mock! {
data(b"\x09\x00\x00\x00abcdefghi"),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_single_frame_one_packet_native_endian() {
let d = if cfg!(target_endian = "big") {
b"\x00\x00\x00\x09abcdefghi"
} else {
b"\x09\x00\x00\x00abcdefghi"
};
let io = length_delimited::Builder::new()
.native_endian()
.new_read(mock! {
data(d),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_single_multi_frame_one_packet() {
let mut d: Vec<u8> = vec![];
d.extend_from_slice(b"\x00\x00\x00\x09abcdefghi");
d.extend_from_slice(b"\x00\x00\x00\x03123");
d.extend_from_slice(b"\x00\x00\x00\x0bhello world");
let io = FramedRead::new(
mock! {
data(&d),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_next_eq!(io, b"123");
assert_next_eq!(io, b"hello world");
assert_done!(io);
}
#[test]
fn read_single_frame_multi_packet() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00"),
data(b"\x00\x09abc"),
data(b"defghi"),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_multi_frame_multi_packet() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00"),
data(b"\x00\x09abc"),
data(b"defghi"),
data(b"\x00\x00\x00\x0312"),
data(b"3\x00\x00\x00\x0bhello world"),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_next_eq!(io, b"123");
assert_next_eq!(io, b"hello world");
assert_done!(io);
}
#[test]
fn read_single_frame_multi_packet_wait() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00"),
Poll::Pending,
data(b"\x00\x09abc"),
Poll::Pending,
data(b"defghi"),
Poll::Pending,
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_pending!(io);
assert_next_pending!(io);
assert_next_eq!(io, b"abcdefghi");
assert_next_pending!(io);
assert_done!(io);
}
#[test]
fn read_multi_frame_multi_packet_wait() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00"),
Poll::Pending,
data(b"\x00\x09abc"),
Poll::Pending,
data(b"defghi"),
Poll::Pending,
data(b"\x00\x00\x00\x0312"),
Poll::Pending,
data(b"3\x00\x00\x00\x0bhello world"),
Poll::Pending,
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_pending!(io);
assert_next_pending!(io);
assert_next_eq!(io, b"abcdefghi");
assert_next_pending!(io);
assert_next_pending!(io);
assert_next_eq!(io, b"123");
assert_next_eq!(io, b"hello world");
assert_next_pending!(io);
assert_done!(io);
}
#[test]
fn read_incomplete_head() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00"),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_err!(io);
}
#[test]
fn read_incomplete_head_multi() {
let io = FramedRead::new(
mock! {
Poll::Pending,
data(b"\x00"),
Poll::Pending,
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_pending!(io);
assert_next_pending!(io);
assert_next_err!(io);
}
#[test]
fn read_incomplete_payload() {
let io = FramedRead::new(
mock! {
data(b"\x00\x00\x00\x09ab"),
Poll::Pending,
data(b"cd"),
Poll::Pending,
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
assert_next_pending!(io);
assert_next_pending!(io);
assert_next_err!(io);
}
#[test]
fn read_max_frame_len() {
let io = length_delimited::Builder::new()
.max_frame_length(5)
.new_read(mock! {
data(b"\x00\x00\x00\x09abcdefghi"),
});
pin_mut!(io);
assert_next_err!(io);
}
#[test]
fn read_update_max_frame_len_at_rest() {
let io = length_delimited::Builder::new().new_read(mock! {
data(b"\x00\x00\x00\x09abcdefghi"),
data(b"\x00\x00\x00\x09abcdefghi"),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
io.decoder_mut().set_max_frame_length(5);
assert_next_err!(io);
}
#[test]
fn read_update_max_frame_len_in_flight() {
let io = length_delimited::Builder::new().new_read(mock! {
data(b"\x00\x00\x00\x09abcd"),
Poll::Pending,
data(b"efghi"),
data(b"\x00\x00\x00\x09abcdefghi"),
});
pin_mut!(io);
assert_next_pending!(io);
io.decoder_mut().set_max_frame_length(5);
assert_next_eq!(io, b"abcdefghi");
assert_next_err!(io);
}
#[test]
fn read_one_byte_length_field() {
let io = length_delimited::Builder::new()
.length_field_length(1)
.new_read(mock! {
data(b"\x09abcdefghi"),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_header_offset() {
let io = length_delimited::Builder::new()
.length_field_length(2)
.length_field_offset(4)
.new_read(mock! {
data(b"zzzz\x00\x09abcdefghi"),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_done!(io);
}
#[test]
fn read_single_multi_frame_one_packet_skip_none_adjusted() {
let mut d: Vec<u8> = vec![];
d.extend_from_slice(b"xx\x00\x09abcdefghi");
d.extend_from_slice(b"yy\x00\x03123");
d.extend_from_slice(b"zz\x00\x0bhello world");
let io = length_delimited::Builder::new()
.length_field_length(2)
.length_field_offset(2)
.num_skip(0)
.length_adjustment(4)
.new_read(mock! {
data(&d),
});
pin_mut!(io);
assert_next_eq!(io, b"xx\x00\x09abcdefghi");
assert_next_eq!(io, b"yy\x00\x03123");
assert_next_eq!(io, b"zz\x00\x0bhello world");
assert_done!(io);
}
#[test]
fn read_single_frame_length_adjusted() {
let mut d: Vec<u8> = vec![];
d.extend_from_slice(b"\x00\x00\x0b\x0cHello world");
let io = length_delimited::Builder::new()
.length_field_offset(0)
.length_field_length(3)
.length_adjustment(0)
.num_skip(4)
.new_read(mock! {
data(&d),
});
pin_mut!(io);
assert_next_eq!(io, b"Hello world");
assert_done!(io);
}
#[test]
fn read_single_multi_frame_one_packet_length_includes_head() {
let mut d: Vec<u8> = vec![];
d.extend_from_slice(b"\x00\x0babcdefghi");
d.extend_from_slice(b"\x00\x05123");
d.extend_from_slice(b"\x00\x0dhello world");
let io = length_delimited::Builder::new()
.length_field_length(2)
.length_adjustment(-2)
.new_read(mock! {
data(&d),
});
pin_mut!(io);
assert_next_eq!(io, b"abcdefghi");
assert_next_eq!(io, b"123");
assert_next_eq!(io, b"hello world");
assert_done!(io);
}
#[test]
fn write_single_frame_length_adjusted() {
let io = length_delimited::Builder::new()
.length_adjustment(-2)
.new_write(mock! {
data(b"\x00\x00\x00\x0b"),
data(b"abcdefghi"),
flush(),
});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_nothing_yields_nothing() {
let io = FramedWrite::new(mock!(), LengthDelimitedCodec::new());
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.poll_flush(cx));
});
}
#[test]
fn write_single_frame_one_packet() {
let io = FramedWrite::new(
mock! {
data(b"\x00\x00\x00\x09"),
data(b"abcdefghi"),
flush(),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_single_multi_frame_one_packet() {
let io = FramedWrite::new(
mock! {
data(b"\x00\x00\x00\x09"),
data(b"abcdefghi"),
data(b"\x00\x00\x00\x03"),
data(b"123"),
data(b"\x00\x00\x00\x0b"),
data(b"hello world"),
flush(),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("123")));
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("hello world")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_single_multi_frame_multi_packet() {
let io = FramedWrite::new(
mock! {
data(b"\x00\x00\x00\x09"),
data(b"abcdefghi"),
flush(),
data(b"\x00\x00\x00\x03"),
data(b"123"),
flush(),
data(b"\x00\x00\x00\x0b"),
data(b"hello world"),
flush(),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("123")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("hello world")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_single_frame_would_block() {
let io = FramedWrite::new(
mock! {
Poll::Pending,
data(b"\x00\x00"),
Poll::Pending,
data(b"\x00\x09"),
data(b"abcdefghi"),
flush(),
},
LengthDelimitedCodec::new(),
);
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_pending!(io.as_mut().poll_flush(cx));
assert_pending!(io.as_mut().poll_flush(cx));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_single_frame_little_endian() {
let io = length_delimited::Builder::new()
.little_endian()
.new_write(mock! {
data(b"\x09\x00\x00\x00"),
data(b"abcdefghi"),
flush(),
});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_single_frame_with_short_length_field() {
let io = length_delimited::Builder::new()
.length_field_length(1)
.new_write(mock! {
data(b"\x09"),
data(b"abcdefghi"),
flush(),
});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdefghi")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_max_frame_len() {
let io = length_delimited::Builder::new()
.max_frame_length(5)
.new_write(mock! {});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_err!(io.as_mut().start_send(Bytes::from("abcdef")));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_update_max_frame_len_at_rest() {
let io = length_delimited::Builder::new().new_write(mock! {
data(b"\x00\x00\x00\x06"),
data(b"abcdef"),
flush(),
});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdef")));
assert_ready_ok!(io.as_mut().poll_flush(cx));
io.encoder_mut().set_max_frame_length(5);
assert_err!(io.as_mut().start_send(Bytes::from("abcdef")));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_update_max_frame_len_in_flight() {
let io = length_delimited::Builder::new().new_write(mock! {
data(b"\x00\x00\x00\x06"),
data(b"ab"),
Poll::Pending,
data(b"cdef"),
flush(),
});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdef")));
assert_pending!(io.as_mut().poll_flush(cx));
io.encoder_mut().set_max_frame_length(5);
assert_ready_ok!(io.as_mut().poll_flush(cx));
assert_err!(io.as_mut().start_send(Bytes::from("abcdef")));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn write_zero() {
let io = length_delimited::Builder::new().new_write(mock! {});
pin_mut!(io);
task::spawn(()).enter(|cx, _| {
assert_ready_ok!(io.as_mut().poll_ready(cx));
assert_ok!(io.as_mut().start_send(Bytes::from("abcdef")));
assert_ready_err!(io.as_mut().poll_flush(cx));
assert!(io.get_ref().calls.is_empty());
});
}
#[test]
fn encode_overflow() {
// Test reproducing tokio-rs/tokio#681.
let mut codec = length_delimited::Builder::new().new_codec();
let mut buf = BytesMut::with_capacity(1024);
// Put some data into the buffer without resizing it to hold more.
let some_as = std::iter::repeat(b'a').take(1024).collect::<Vec<_>>();
buf.put_slice(&some_as[..]);
// Trying to encode the length header should resize the buffer if it won't fit.
codec.encode(Bytes::from("hello"), &mut buf).unwrap();
}
#[test]
fn frame_does_not_fit() {
let codec = LengthDelimitedCodec::builder()
.length_field_length(1)
.max_frame_length(256)
.new_codec();
assert_eq!(codec.max_frame_length(), 255);
}
#[test]
fn neg_adjusted_frame_does_not_fit() {
let codec = LengthDelimitedCodec::builder()
.length_field_length(1)
.length_adjustment(-1)
.new_codec();
assert_eq!(codec.max_frame_length(), 254);
}
#[test]
fn pos_adjusted_frame_does_not_fit() {
let codec = LengthDelimitedCodec::builder()
.length_field_length(1)
.length_adjustment(1)
.new_codec();
assert_eq!(codec.max_frame_length(), 256);
}
#[test]
fn max_allowed_frame_fits() {
let codec = LengthDelimitedCodec::builder()
.length_field_length(std::mem::size_of::<usize>())
.max_frame_length(usize::MAX)
.new_codec();
assert_eq!(codec.max_frame_length(), usize::MAX);
}
#[test]
fn smaller_frame_len_not_adjusted() {
let codec = LengthDelimitedCodec::builder()
.max_frame_length(10)
.length_field_length(std::mem::size_of::<usize>())
.new_codec();
assert_eq!(codec.max_frame_length(), 10);
}
#[test]
fn max_allowed_length_field() {
let codec = LengthDelimitedCodec::builder()
.length_field_length(8)
.max_frame_length(usize::MAX)
.new_codec();
assert_eq!(codec.max_frame_length(), usize::MAX);
}
// ===== Test utils =====
struct Mock {
calls: VecDeque<Poll<io::Result<Op>>>,
}
enum Op {
Data(Vec<u8>),
Flush,
}
impl AsyncRead for Mock {
fn poll_read(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
dst: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
match self.calls.pop_front() {
Some(Poll::Ready(Ok(Op::Data(data)))) => {
debug_assert!(dst.remaining() >= data.len());
dst.put_slice(&data);
Poll::Ready(Ok(()))
}
Some(Poll::Ready(Ok(_))) => panic!(),
Some(Poll::Ready(Err(e))) => Poll::Ready(Err(e)),
Some(Poll::Pending) => Poll::Pending,
None => Poll::Ready(Ok(())),
}
}
}
impl AsyncWrite for Mock {
fn poll_write(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
src: &[u8],
) -> Poll<Result<usize, io::Error>> {
match self.calls.pop_front() {
Some(Poll::Ready(Ok(Op::Data(data)))) => {
let len = data.len();
assert!(src.len() >= len, "expect={data:?}; actual={src:?}");
assert_eq!(&data[..], &src[..len]);
Poll::Ready(Ok(len))
}
Some(Poll::Ready(Ok(_))) => panic!(),
Some(Poll::Ready(Err(e))) => Poll::Ready(Err(e)),
Some(Poll::Pending) => Poll::Pending,
None => Poll::Ready(Ok(0)),
}
}
fn poll_flush(mut self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
match self.calls.pop_front() {
Some(Poll::Ready(Ok(Op::Flush))) => Poll::Ready(Ok(())),
Some(Poll::Ready(Ok(_))) => panic!(),
Some(Poll::Ready(Err(e))) => Poll::Ready(Err(e)),
Some(Poll::Pending) => Poll::Pending,
None => Poll::Ready(Ok(())),
}
}
fn poll_shutdown(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), io::Error>> {
Poll::Ready(Ok(()))
}
}
impl<'a> From<&'a [u8]> for Op {
fn from(src: &'a [u8]) -> Op {
Op::Data(src.into())
}
}
impl From<Vec<u8>> for Op {
fn from(src: Vec<u8>) -> Op {
Op::Data(src)
}
}
fn data(bytes: &[u8]) -> Poll<io::Result<Op>> {
Poll::Ready(Ok(bytes.into()))
}
fn flush() -> Poll<io::Result<Op>> {
Poll::Ready(Ok(Op::Flush))
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_sink_writer.rs | tokio-util/tests/io_sink_writer.rs | #![warn(rust_2018_idioms)]
use bytes::Bytes;
use futures_util::SinkExt;
use std::io::{self, Error, ErrorKind};
use tokio::io::AsyncWriteExt;
use tokio_util::codec::{Encoder, FramedWrite};
use tokio_util::io::{CopyToBytes, SinkWriter};
use tokio_util::sync::PollSender;
#[tokio::test]
async fn test_copied_sink_writer() -> Result<(), Error> {
// Construct a channel pair to send data across and wrap a pollable sink.
// Note that the sink must mimic a writable object, e.g. have `std::io::Error`
// as its error type.
// As `PollSender` requires an owned copy of the buffer, we wrap it additionally
// with a `CopyToBytes` helper.
let (tx, mut rx) = tokio::sync::mpsc::channel::<Bytes>(1);
let mut writer = SinkWriter::new(CopyToBytes::new(
PollSender::new(tx).sink_map_err(|_| io::Error::from(ErrorKind::BrokenPipe)),
));
// Write data to our interface...
let data: [u8; 4] = [1, 2, 3, 4];
let _ = writer.write(&data).await;
// ... and receive it.
assert_eq!(data.to_vec(), rx.recv().await.unwrap().to_vec());
Ok(())
}
/// A trivial encoder.
struct SliceEncoder;
impl SliceEncoder {
fn new() -> Self {
Self {}
}
}
impl<'a> Encoder<&'a [u8]> for SliceEncoder {
type Error = Error;
fn encode(&mut self, item: &'a [u8], dst: &mut bytes::BytesMut) -> Result<(), Self::Error> {
// This is where we'd write packet headers, lengths, etc. in a real encoder.
// For simplicity and demonstration purposes, we just pack a copy of
// the slice at the end of a buffer.
dst.extend_from_slice(item);
Ok(())
}
}
#[tokio::test]
async fn test_direct_sink_writer() -> Result<(), Error> {
// We define a framed writer which accepts byte slices
// and 'reverse' this construction immediately.
let framed_byte_lc = FramedWrite::new(Vec::new(), SliceEncoder::new());
let mut writer = SinkWriter::new(framed_byte_lc);
// Write multiple slices to the sink...
let _ = writer.write(&[1, 2, 3]).await;
let _ = writer.write(&[4, 5, 6]).await;
// ... and compare it with the buffer.
assert_eq!(
writer.into_inner().write_buffer().to_vec().as_slice(),
&[1, 2, 3, 4, 5, 6]
);
Ok(())
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/framed.rs | tokio-util/tests/framed.rs | #![warn(rust_2018_idioms)]
use tokio_stream::StreamExt;
use tokio_test::assert_ok;
use tokio_util::codec::{Decoder, Encoder, Framed, FramedParts};
use bytes::{Buf, BufMut, BytesMut};
use std::io::{self, Read};
use std::pin::Pin;
use std::task::{Context, Poll};
const INITIAL_CAPACITY: usize = 8 * 1024;
/// Encode and decode u32 values.
#[derive(Default)]
struct U32Codec {
read_bytes: usize,
}
impl Decoder for U32Codec {
type Item = u32;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<u32>> {
if buf.len() < 4 {
return Ok(None);
}
let n = buf.split_to(4).get_u32();
self.read_bytes += 4;
Ok(Some(n))
}
}
impl Encoder<u32> for U32Codec {
type Error = io::Error;
fn encode(&mut self, item: u32, dst: &mut BytesMut) -> io::Result<()> {
// Reserve space
dst.reserve(4);
dst.put_u32(item);
Ok(())
}
}
/// Encode and decode u64 values.
#[derive(Default)]
struct U64Codec {
read_bytes: usize,
}
impl Decoder for U64Codec {
type Item = u64;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<u64>> {
if buf.len() < 8 {
return Ok(None);
}
let n = buf.split_to(8).get_u64();
self.read_bytes += 8;
Ok(Some(n))
}
}
impl Encoder<u64> for U64Codec {
type Error = io::Error;
fn encode(&mut self, item: u64, dst: &mut BytesMut) -> io::Result<()> {
// Reserve space
dst.reserve(8);
dst.put_u64(item);
Ok(())
}
}
/// This value should never be used
struct DontReadIntoThis;
impl Read for DontReadIntoThis {
fn read(&mut self, _: &mut [u8]) -> io::Result<usize> {
Err(io::Error::new(
io::ErrorKind::Other,
"Read into something you weren't supposed to.",
))
}
}
impl tokio::io::AsyncRead for DontReadIntoThis {
fn poll_read(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
_buf: &mut tokio::io::ReadBuf<'_>,
) -> Poll<io::Result<()>> {
unreachable!()
}
}
#[tokio::test]
async fn can_read_from_existing_buf() {
let mut parts = FramedParts::new(DontReadIntoThis, U32Codec::default());
parts.read_buf = BytesMut::from(&[0, 0, 0, 42][..]);
let mut framed = Framed::from_parts(parts);
let num = assert_ok!(framed.next().await.unwrap());
assert_eq!(num, 42);
assert_eq!(framed.codec().read_bytes, 4);
}
#[tokio::test]
async fn can_read_from_existing_buf_after_codec_changed() {
let mut parts = FramedParts::new(DontReadIntoThis, U32Codec::default());
parts.read_buf = BytesMut::from(&[0, 0, 0, 42, 0, 0, 0, 0, 0, 0, 0, 84][..]);
let mut framed = Framed::from_parts(parts);
let num = assert_ok!(framed.next().await.unwrap());
assert_eq!(num, 42);
assert_eq!(framed.codec().read_bytes, 4);
let mut framed = framed.map_codec(|codec| U64Codec {
read_bytes: codec.read_bytes,
});
let num = assert_ok!(framed.next().await.unwrap());
assert_eq!(num, 84);
assert_eq!(framed.codec().read_bytes, 12);
}
#[test]
fn external_buf_grows_to_init() {
let mut parts = FramedParts::new(DontReadIntoThis, U32Codec::default());
parts.read_buf = BytesMut::from(&[0, 0, 0, 42][..]);
let framed = Framed::from_parts(parts);
let FramedParts { read_buf, .. } = framed.into_parts();
assert_eq!(read_buf.capacity(), INITIAL_CAPACITY);
}
#[test]
fn external_buf_does_not_shrink() {
let mut parts = FramedParts::new(DontReadIntoThis, U32Codec::default());
parts.read_buf = BytesMut::from(&vec![0; INITIAL_CAPACITY * 2][..]);
let framed = Framed::from_parts(parts);
let FramedParts { read_buf, .. } = framed.into_parts();
assert_eq!(read_buf.capacity(), INITIAL_CAPACITY * 2);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/panic.rs | tokio-util/tests/panic.rs | #![warn(rust_2018_idioms)]
#![cfg(all(feature = "full", not(target_os = "wasi")))] // Wasi doesn't support panic recovery
#![cfg(panic = "unwind")]
use parking_lot::{const_mutex, Mutex};
use std::error::Error;
use std::panic;
use std::sync::Arc;
use tokio::runtime::Runtime;
use tokio::sync::mpsc::channel;
use tokio::time::{Duration, Instant};
use tokio_test::task;
use tokio_util::io::SyncIoBridge;
use tokio_util::sync::PollSender;
use tokio_util::task::LocalPoolHandle;
use tokio_util::time::DelayQueue;
// Taken from tokio-util::time::wheel, if that changes then
const MAX_DURATION_MS: u64 = (1 << (36)) - 1;
fn test_panic<Func: FnOnce() + panic::UnwindSafe>(func: Func) -> Option<String> {
static PANIC_MUTEX: Mutex<()> = const_mutex(());
{
let _guard = PANIC_MUTEX.lock();
let panic_file: Arc<Mutex<Option<String>>> = Arc::new(Mutex::new(None));
let prev_hook = panic::take_hook();
{
let panic_file = panic_file.clone();
panic::set_hook(Box::new(move |panic_info| {
let panic_location = panic_info.location().unwrap();
panic_file
.lock()
.clone_from(&Some(panic_location.file().to_string()));
}));
}
let result = panic::catch_unwind(func);
// Return to the previously set panic hook (maybe default) so that we get nice error
// messages in the tests.
panic::set_hook(prev_hook);
if result.is_err() {
panic_file.lock().clone()
} else {
None
}
}
}
#[test]
fn sync_bridge_new_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let _ = SyncIoBridge::new(tokio::io::empty());
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn poll_sender_send_item_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let (send, _) = channel::<u32>(3);
let mut send = PollSender::new(send);
let _ = send.send_item(42);
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn local_pool_handle_new_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let _ = LocalPoolHandle::new(0);
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn local_pool_handle_spawn_pinned_by_idx_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let handle = LocalPoolHandle::new(2);
handle.spawn_pinned_by_idx(|| async { "test" }, 3);
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_insert_at_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::with_capacity(3));
//let st = std::time::Instant::from(SystemTime::UNIX_EPOCH);
let _k = queue.insert_at(
"1",
Instant::now() + Duration::from_millis(MAX_DURATION_MS + 1),
);
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_insert_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::with_capacity(3));
let _k = queue.insert("1", Duration::from_millis(MAX_DURATION_MS + 1));
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_remove_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::with_capacity(3));
let key = queue.insert_at("1", Instant::now());
queue.remove(&key);
queue.remove(&key);
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_reset_at_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::with_capacity(3));
let key = queue.insert_at("1", Instant::now());
queue.reset_at(
&key,
Instant::now() + Duration::from_millis(MAX_DURATION_MS + 1),
);
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_reset_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::with_capacity(3));
let key = queue.insert_at("1", Instant::now());
queue.reset(&key, Duration::from_millis(MAX_DURATION_MS + 1));
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn delay_queue_reserve_panic_caller() -> Result<(), Box<dyn Error>> {
let panic_location_file = test_panic(|| {
let rt = basic();
rt.block_on(async {
let mut queue = task::spawn(DelayQueue::<u32>::with_capacity(3));
queue.reserve((1 << 30) as usize);
});
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
#[test]
fn future_ext_to_panic_caller() -> Result<(), Box<dyn Error>> {
use tokio::{sync::oneshot, time::Duration};
use tokio_util::future::FutureExt;
let panic_location_file = test_panic(|| {
let (_tx, rx) = oneshot::channel::<()>();
// this panics because there is no runtime available
let _res = rx.timeout(Duration::from_millis(10));
});
// The panic location should be in this file
assert_eq!(&panic_location_file.unwrap(), file!());
Ok(())
}
fn basic() -> Runtime {
tokio::runtime::Builder::new_current_thread()
.enable_all()
.build()
.unwrap()
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_sync_bridge.rs | tokio-util/tests/io_sync_bridge.rs | #![cfg(feature = "io-util")]
#![cfg(not(target_os = "wasi"))] // Wasi doesn't support threads
use std::error::Error;
use std::io::{Cursor, Read, Result as IoResult, Write};
use tokio::io::{AsyncRead, AsyncReadExt};
use tokio_util::io::SyncIoBridge;
async fn test_reader_len(
r: impl AsyncRead + Unpin + Send + 'static,
expected_len: usize,
) -> IoResult<()> {
let mut r = SyncIoBridge::new(r);
let res = tokio::task::spawn_blocking(move || {
let mut buf = Vec::new();
r.read_to_end(&mut buf)?;
Ok::<_, std::io::Error>(buf)
})
.await?;
assert_eq!(res?.len(), expected_len);
Ok(())
}
#[tokio::test]
async fn test_async_read_to_sync() -> Result<(), Box<dyn Error>> {
test_reader_len(tokio::io::empty(), 0).await?;
let buf = b"hello world";
test_reader_len(Cursor::new(buf), buf.len()).await?;
Ok(())
}
#[tokio::test]
async fn test_async_write_to_sync() -> Result<(), Box<dyn Error>> {
let mut dest = Vec::new();
let src = b"hello world";
let dest = tokio::task::spawn_blocking(move || -> Result<_, String> {
let mut w = SyncIoBridge::new(Cursor::new(&mut dest));
std::io::copy(&mut Cursor::new(src), &mut w).map_err(|e| e.to_string())?;
Ok(dest)
})
.await??;
assert_eq!(dest.as_slice(), src);
Ok(())
}
#[tokio::test]
async fn test_into_inner() -> Result<(), Box<dyn Error>> {
let mut buf = Vec::new();
SyncIoBridge::new(tokio::io::empty())
.into_inner()
.read_to_end(&mut buf)
.await
.unwrap();
assert_eq!(buf.len(), 0);
Ok(())
}
#[tokio::test]
async fn test_shutdown() -> Result<(), Box<dyn Error>> {
let (s1, mut s2) = tokio::io::duplex(1024);
let (_rh, wh) = tokio::io::split(s1);
tokio::task::spawn_blocking(move || -> std::io::Result<_> {
let mut wh = SyncIoBridge::new(wh);
wh.write_all(b"hello")?;
wh.shutdown()?;
assert!(wh.write_all(b" world").is_err());
Ok(())
})
.await??;
let mut buf = vec![];
s2.read_to_end(&mut buf).await?;
assert_eq!(buf, b"hello");
Ok(())
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/abort_on_drop.rs | tokio-util/tests/abort_on_drop.rs | use tokio::{sync::oneshot, task::yield_now};
use tokio_util::task::AbortOnDropHandle;
#[tokio::test]
async fn aborts_task_on_drop() {
let (mut tx, rx) = oneshot::channel::<bool>();
let handle = tokio::spawn(async move {
let _ = rx.await;
});
let handle = AbortOnDropHandle::new(handle);
drop(handle);
tx.closed().await;
assert!(tx.is_closed());
}
#[tokio::test]
async fn aborts_task_directly() {
let (mut tx, rx) = oneshot::channel::<bool>();
let handle = tokio::spawn(async move {
let _ = rx.await;
});
let handle = AbortOnDropHandle::new(handle);
handle.abort();
tx.closed().await;
assert!(tx.is_closed());
assert!(handle.is_finished());
}
#[tokio::test]
async fn does_not_abort_after_detach() {
let (tx, rx) = oneshot::channel::<bool>();
let handle = tokio::spawn(async move {
let _ = rx.await;
});
let handle = AbortOnDropHandle::new(handle);
handle.detach(); // returns and drops the original join handle
yield_now().await;
assert!(!tx.is_closed()); // task is still live
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/udp.rs | tokio-util/tests/udp.rs | #![warn(rust_2018_idioms)]
#![cfg(not(target_os = "wasi"))] // Wasi doesn't support UDP
#![cfg(not(miri))] // No `socket` in Miri.
#![cfg(not(loom))] // No udp / UdpFramed in loom
use tokio::net::UdpSocket;
use tokio_stream::StreamExt;
use tokio_util::codec::{Decoder, Encoder, LinesCodec};
use tokio_util::udp::UdpFramed;
use bytes::{BufMut, BytesMut};
use futures::future::try_join;
use futures::future::FutureExt;
use futures::sink::SinkExt;
use std::io;
use std::sync::Arc;
#[cfg_attr(
any(
target_os = "macos",
target_os = "ios",
target_os = "tvos",
target_os = "watchos",
target_os = "visionos"
),
allow(unused_assignments)
)]
#[tokio::test]
async fn send_framed_byte_codec() -> std::io::Result<()> {
let mut a_soc = UdpSocket::bind("127.0.0.1:0").await?;
let mut b_soc = UdpSocket::bind("127.0.0.1:0").await?;
let a_addr = a_soc.local_addr()?;
let b_addr = b_soc.local_addr()?;
// test sending & receiving bytes
{
let mut a = UdpFramed::new(a_soc, ByteCodec);
let mut b = UdpFramed::new(b_soc, ByteCodec);
let msg = b"4567";
let send = a.send((msg, b_addr));
let recv = b.next().map(|e| e.unwrap());
let (_, received) = try_join(send, recv).await.unwrap();
let (data, addr) = received;
assert_eq!(msg, &*data);
assert_eq!(a_addr, addr);
a_soc = a.into_inner();
b_soc = b.into_inner();
}
#[cfg(not(any(
target_os = "macos",
target_os = "ios",
target_os = "tvos",
target_os = "watchos",
target_os = "visionos"
)))]
// test sending & receiving an empty message
{
let mut a = UdpFramed::new(a_soc, ByteCodec);
let mut b = UdpFramed::new(b_soc, ByteCodec);
let msg = b"";
let send = a.send((msg, b_addr));
let recv = b.next().map(|e| e.unwrap());
let (_, received) = try_join(send, recv).await.unwrap();
let (data, addr) = received;
assert_eq!(msg, &*data);
assert_eq!(a_addr, addr);
}
Ok(())
}
pub struct ByteCodec;
impl Decoder for ByteCodec {
type Item = Vec<u8>;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Vec<u8>>, io::Error> {
let len = buf.len();
Ok(Some(buf.split_to(len).to_vec()))
}
}
impl Encoder<&[u8]> for ByteCodec {
type Error = io::Error;
fn encode(&mut self, data: &[u8], buf: &mut BytesMut) -> Result<(), io::Error> {
buf.reserve(data.len());
buf.put_slice(data);
Ok(())
}
}
#[tokio::test]
async fn send_framed_lines_codec() -> std::io::Result<()> {
let a_soc = UdpSocket::bind("127.0.0.1:0").await?;
let b_soc = UdpSocket::bind("127.0.0.1:0").await?;
let a_addr = a_soc.local_addr()?;
let b_addr = b_soc.local_addr()?;
let mut a = UdpFramed::new(a_soc, ByteCodec);
let mut b = UdpFramed::new(b_soc, LinesCodec::new());
let msg = b"1\r\n2\r\n3\r\n".to_vec();
a.send((&msg, b_addr)).await?;
assert_eq!(b.next().await.unwrap().unwrap(), ("1".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("2".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("3".to_string(), a_addr));
Ok(())
}
#[tokio::test]
async fn framed_half() -> std::io::Result<()> {
let a_soc = Arc::new(UdpSocket::bind("127.0.0.1:0").await?);
let b_soc = a_soc.clone();
let a_addr = a_soc.local_addr()?;
let b_addr = b_soc.local_addr()?;
let mut a = UdpFramed::new(a_soc, ByteCodec);
let mut b = UdpFramed::new(b_soc, LinesCodec::new());
let msg = b"1\r\n2\r\n3\r\n".to_vec();
a.send((&msg, b_addr)).await?;
let msg = b"4\r\n5\r\n6\r\n".to_vec();
a.send((&msg, b_addr)).await?;
assert_eq!(b.next().await.unwrap().unwrap(), ("1".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("2".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("3".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("4".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("5".to_string(), a_addr));
assert_eq!(b.next().await.unwrap().unwrap(), ("6".to_string(), a_addr));
Ok(())
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/codecs.rs | tokio-util/tests/codecs.rs | #![warn(rust_2018_idioms)]
use tokio_util::codec::{AnyDelimiterCodec, BytesCodec, Decoder, Encoder, LinesCodec};
use bytes::{BufMut, Bytes, BytesMut};
#[test]
fn bytes_decoder() {
let mut codec = BytesCodec::new();
let buf = &mut BytesMut::new();
buf.put_slice(b"abc");
assert_eq!("abc", codec.decode(buf).unwrap().unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"a");
assert_eq!("a", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn bytes_encoder() {
let mut codec = BytesCodec::new();
// Default capacity of BytesMut
#[cfg(target_pointer_width = "64")]
const INLINE_CAP: usize = 4 * 8 - 1;
#[cfg(target_pointer_width = "32")]
const INLINE_CAP: usize = 4 * 4 - 1;
let mut buf = BytesMut::new();
codec
.encode(Bytes::from_static(&[0; INLINE_CAP + 1]), &mut buf)
.unwrap();
// Default capacity of Framed Read
const INITIAL_CAPACITY: usize = 8 * 1024;
let mut buf = BytesMut::with_capacity(INITIAL_CAPACITY);
codec
.encode(Bytes::from_static(&[0; INITIAL_CAPACITY + 1]), &mut buf)
.unwrap();
codec
.encode(BytesMut::from(&b"hello"[..]), &mut buf)
.unwrap();
}
#[test]
fn lines_decoder() {
let mut codec = LinesCodec::new();
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line 1\nline 2\r\nline 3\n\r\n\r");
assert_eq!("line 1", codec.decode(buf).unwrap().unwrap());
assert_eq!("line 2", codec.decode(buf).unwrap().unwrap());
assert_eq!("line 3", codec.decode(buf).unwrap().unwrap());
assert_eq!("", codec.decode(buf).unwrap().unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
buf.put_slice(b"k");
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!("\rk", codec.decode_eof(buf).unwrap().unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
}
#[test]
fn lines_decoder_invalid_utf8() {
let mut codec = LinesCodec::new();
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line 1\xc3\x28");
assert_eq!(None, codec.decode(buf).unwrap());
assert!(codec.decode_eof(buf).is_err());
assert_eq!(None, codec.decode_eof(buf).unwrap());
buf.put_slice(b"line 22222222222222\n");
assert_eq!("line 22222222222222", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn lines_decoder_max_length() {
const MAX_LENGTH: usize = 6;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line 1 is too long\nline 2\nline 3\r\nline 4\n\r\n\r");
assert!(codec.decode(buf).is_err());
let line = codec.decode(buf).unwrap().unwrap();
assert!(line.len() <= MAX_LENGTH, "{line:?}.len() <= {MAX_LENGTH:?}");
assert_eq!("line 2", line);
assert!(codec.decode(buf).is_err());
let line = codec.decode(buf).unwrap().unwrap();
assert!(line.len() <= MAX_LENGTH, "{line:?}.len() <= {MAX_LENGTH:?}");
assert_eq!("line 4", line);
let line = codec.decode(buf).unwrap().unwrap();
assert!(line.len() <= MAX_LENGTH, "{line:?}.len() <= {MAX_LENGTH:?}");
assert_eq!("", line);
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
buf.put_slice(b"k");
assert_eq!(None, codec.decode(buf).unwrap());
let line = codec.decode_eof(buf).unwrap().unwrap();
assert!(line.len() <= MAX_LENGTH, "{line:?}.len() <= {MAX_LENGTH:?}");
assert_eq!("\rk", line);
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
// Line that's one character too long. This could cause an out of bounds
// error if we peek at the next characters using slice indexing.
buf.put_slice(b"aaabbbc");
assert!(codec.decode(buf).is_err());
}
#[test]
fn lines_decoder_max_length_underrun() {
const MAX_LENGTH: usize = 6;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too l");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"ong\n");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"line 2");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"\n");
assert_eq!("line 2", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn lines_decoder_max_length_bursts() {
const MAX_LENGTH: usize = 10;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too l");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"ong\n");
assert!(codec.decode(buf).is_err());
}
#[test]
fn lines_decoder_max_length_big_burst() {
const MAX_LENGTH: usize = 10;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too long!\n");
assert!(codec.decode(buf).is_err());
}
#[test]
fn lines_decoder_max_length_newline_between_decodes() {
const MAX_LENGTH: usize = 5;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"hello");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"\nworld");
assert_eq!("hello", codec.decode(buf).unwrap().unwrap());
}
// Regression test for [infinite loop bug](https://github.com/tokio-rs/tokio/issues/1483)
#[test]
fn lines_decoder_discard_repeat() {
const MAX_LENGTH: usize = 1;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"aa");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"a");
assert_eq!(None, codec.decode(buf).unwrap());
}
// Regression test for [subsequent calls to LinesCodec decode does not return the desired results bug](https://github.com/tokio-rs/tokio/issues/3555)
#[test]
fn lines_decoder_max_length_underrun_twice() {
const MAX_LENGTH: usize = 11;
let mut codec = LinesCodec::new_with_max_length(MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"line ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too very l");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"aaaaaaaaaaaaaaaaaaaaaaa");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"ong\nshort\n");
assert_eq!("short", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn lines_encoder() {
let mut codec = LinesCodec::new();
let mut buf = BytesMut::new();
codec.encode("line 1", &mut buf).unwrap();
assert_eq!("line 1\n", buf);
codec.encode("line 2", &mut buf).unwrap();
assert_eq!("line 1\nline 2\n", buf);
}
#[test]
fn any_delimiters_decoder_any_character() {
let mut codec = AnyDelimiterCodec::new(b",;\n\r".to_vec(), b",".to_vec());
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk 1,chunk 2;chunk 3\n\r");
assert_eq!("chunk 1", codec.decode(buf).unwrap().unwrap());
assert_eq!("chunk 2", codec.decode(buf).unwrap().unwrap());
assert_eq!("chunk 3", codec.decode(buf).unwrap().unwrap());
assert_eq!("", codec.decode(buf).unwrap().unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
buf.put_slice(b"k");
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!("k", codec.decode_eof(buf).unwrap().unwrap());
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
}
#[test]
fn any_delimiters_decoder_max_length() {
const MAX_LENGTH: usize = 7;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk 1 is too long\nchunk 2\nchunk 3\r\nchunk 4\n\r\n");
assert!(codec.decode(buf).is_err());
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("chunk 2", chunk);
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("chunk 3", chunk);
// \r\n cause empty chunk
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("", chunk);
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("chunk 4", chunk);
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("", chunk);
let chunk = codec.decode(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("", chunk);
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
buf.put_slice(b"k");
assert_eq!(None, codec.decode(buf).unwrap());
let chunk = codec.decode_eof(buf).unwrap().unwrap();
assert!(
chunk.len() <= MAX_LENGTH,
"{chunk:?}.len() <= {MAX_LENGTH:?}"
);
assert_eq!("k", chunk);
assert_eq!(None, codec.decode(buf).unwrap());
assert_eq!(None, codec.decode_eof(buf).unwrap());
// Delimiter that's one character too long. This could cause an out of bounds
// error if we peek at the next characters using slice indexing.
buf.put_slice(b"aaabbbcc");
assert!(codec.decode(buf).is_err());
}
#[test]
fn any_delimiter_decoder_max_length_underrun() {
const MAX_LENGTH: usize = 7;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too l");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"ong\n");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"chunk 2");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b",");
assert_eq!("chunk 2", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn any_delimiter_decoder_max_length_underrun_twice() {
const MAX_LENGTH: usize = 11;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too very l");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"aaaaaaaaaaaaaaaaaaaaaaa");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"ong\nshort\n");
assert_eq!("short", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn any_delimiter_decoder_max_length_bursts() {
const MAX_LENGTH: usize = 11;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too l");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"ong\n");
assert!(codec.decode(buf).is_err());
}
#[test]
fn any_delimiter_decoder_max_length_big_burst() {
const MAX_LENGTH: usize = 11;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"chunk ");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b"too long!\n");
assert!(codec.decode(buf).is_err());
}
#[test]
fn any_delimiter_decoder_max_length_delimiter_between_decodes() {
const MAX_LENGTH: usize = 5;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"hello");
assert_eq!(None, codec.decode(buf).unwrap());
buf.put_slice(b",world");
assert_eq!("hello", codec.decode(buf).unwrap().unwrap());
}
#[test]
fn any_delimiter_decoder_discard_repeat() {
const MAX_LENGTH: usize = 1;
let mut codec =
AnyDelimiterCodec::new_with_max_length(b",;\n\r".to_vec(), b",".to_vec(), MAX_LENGTH);
let buf = &mut BytesMut::new();
buf.reserve(200);
buf.put_slice(b"aa");
assert!(codec.decode(buf).is_err());
buf.put_slice(b"a");
assert_eq!(None, codec.decode(buf).unwrap());
}
#[test]
fn any_delimiter_encoder() {
let mut codec = AnyDelimiterCodec::new(b",".to_vec(), b";--;".to_vec());
let mut buf = BytesMut::new();
codec.encode("chunk 1", &mut buf).unwrap();
assert_eq!("chunk 1;--;", buf);
codec.encode("chunk 2", &mut buf).unwrap();
assert_eq!("chunk 1;--;chunk 2;--;", buf);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/time_delay_queue.rs | tokio-util/tests/time_delay_queue.rs | #![allow(clippy::disallowed_names)]
#![warn(rust_2018_idioms)]
#![cfg(feature = "full")]
use futures::StreamExt;
use tokio::time::{self, sleep, sleep_until, Duration, Instant};
use tokio_test::{assert_pending, assert_ready, task};
use tokio_util::time::DelayQueue;
macro_rules! poll {
($queue:ident) => {
$queue.enter(|cx, mut queue| queue.poll_expired(cx))
};
}
macro_rules! assert_ready_some {
($e:expr) => {{
match assert_ready!($e) {
Some(v) => v,
None => panic!("None"),
}
}};
}
#[tokio::test]
async fn single_immediate_delay() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let _key = queue.insert_at("foo", Instant::now());
// Advance time by 1ms to handle thee rounding
sleep(ms(1)).await;
assert_ready_some!(poll!(queue));
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none())
}
#[tokio::test]
async fn multi_immediate_delays() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let _k = queue.insert_at("1", Instant::now());
let _k = queue.insert_at("2", Instant::now());
let _k = queue.insert_at("3", Instant::now());
sleep(ms(1)).await;
let mut res = vec![];
while res.len() < 3 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none());
res.sort_unstable();
assert_eq!("1", res[0]);
assert_eq!("2", res[1]);
assert_eq!("3", res[2]);
}
#[tokio::test]
async fn single_short_delay() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let _key = queue.insert_at("foo", Instant::now() + ms(5));
assert_pending!(poll!(queue));
sleep(ms(1)).await;
assert!(!queue.is_woken());
sleep(ms(5)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue));
assert_eq!(*entry.get_ref(), "foo");
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none());
}
#[tokio::test]
#[cfg_attr(miri, ignore)] // Too slow on miri.
async fn multi_delay_at_start() {
time::pause();
let long = 262_144 + 9 * 4096;
let delays = &[1000, 2, 234, long, 60, 10];
let mut queue = task::spawn(DelayQueue::new());
// Setup the delays
for &i in delays {
let _key = queue.insert_at(i, Instant::now() + ms(i));
}
assert_pending!(poll!(queue));
assert!(!queue.is_woken());
let start = Instant::now();
for elapsed in 0..1200 {
println!("elapsed: {elapsed:?}");
let elapsed = elapsed + 1;
tokio::time::sleep_until(start + ms(elapsed)).await;
if delays.contains(&elapsed) {
assert!(queue.is_woken());
assert_ready!(poll!(queue));
assert_pending!(poll!(queue));
} else if queue.is_woken() {
let cascade = &[192, 960];
assert!(
cascade.contains(&elapsed),
"elapsed={} dt={:?}",
elapsed,
Instant::now() - start
);
assert_pending!(poll!(queue));
}
}
println!("finished multi_delay_start");
}
#[tokio::test]
async fn insert_in_past_fires_immediately() {
println!("running insert_in_past_fires_immediately");
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
sleep(ms(10)).await;
queue.insert_at("foo", now);
assert_ready!(poll!(queue));
println!("finished insert_in_past_fires_immediately");
}
#[tokio::test]
async fn remove_entry() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let key = queue.insert_at("foo", Instant::now() + ms(5));
assert_pending!(poll!(queue));
let entry = queue.remove(&key);
assert_eq!(entry.into_inner(), "foo");
sleep(ms(10)).await;
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none());
}
#[tokio::test]
async fn reset_entry() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key = queue.insert_at("foo", now + ms(5));
assert_pending!(poll!(queue));
sleep(ms(1)).await;
queue.reset_at(&key, now + ms(10));
assert_pending!(poll!(queue));
sleep(ms(7)).await;
assert!(!queue.is_woken());
assert_pending!(poll!(queue));
sleep(ms(3)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue));
assert_eq!(*entry.get_ref(), "foo");
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none())
}
// Reproduces tokio-rs/tokio#849.
#[tokio::test]
async fn reset_much_later() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
sleep(ms(1)).await;
let key = queue.insert_at("foo", now + ms(200));
assert_pending!(poll!(queue));
sleep(ms(3)).await;
queue.reset_at(&key, now + ms(10));
sleep(ms(20)).await;
assert!(queue.is_woken());
}
// Reproduces tokio-rs/tokio#849.
#[tokio::test]
async fn reset_twice() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
sleep(ms(1)).await;
let key = queue.insert_at("foo", now + ms(200));
assert_pending!(poll!(queue));
sleep(ms(3)).await;
queue.reset_at(&key, now + ms(50));
sleep(ms(20)).await;
queue.reset_at(&key, now + ms(40));
sleep(ms(20)).await;
assert!(queue.is_woken());
}
/// Regression test: Given an entry inserted with a deadline in the past, so
/// that it is placed directly on the expired queue, reset the entry to a
/// deadline in the future. Validate that this leaves the entry and queue in an
/// internally consistent state by running an additional reset on the entry
/// before polling it to completion.
#[tokio::test]
async fn repeatedly_reset_entry_inserted_as_expired() {
time::pause();
// Instants before the start of the test seem to break in wasm.
time::sleep(ms(1000)).await;
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key = queue.insert_at("foo", now - ms(100));
queue.reset_at(&key, now + ms(100));
queue.reset_at(&key, now + ms(50));
assert_pending!(poll!(queue));
time::sleep_until(now + ms(60)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "foo");
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none());
}
#[tokio::test]
async fn remove_expired_item() {
time::pause();
let mut queue = DelayQueue::new();
let now = Instant::now();
sleep(ms(10)).await;
let key = queue.insert_at("foo", now);
let entry = queue.remove(&key);
assert_eq!(entry.into_inner(), "foo");
}
/// Regression test: it should be possible to remove entries which fall in the
/// 0th slot of the internal timer wheel — that is, entries whose expiration
/// (a) falls at the beginning of one of the wheel's hierarchical levels and (b)
/// is equal to the wheel's current elapsed time.
#[tokio::test]
async fn remove_at_timer_wheel_threshold() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key1 = queue.insert_at("foo", now + ms(64));
let key2 = queue.insert_at("bar", now + ms(64));
sleep(ms(80)).await;
let entry = assert_ready_some!(poll!(queue)).into_inner();
match entry {
"foo" => {
let entry = queue.remove(&key2).into_inner();
assert_eq!(entry, "bar");
}
"bar" => {
let entry = queue.remove(&key1).into_inner();
assert_eq!(entry, "foo");
}
other => panic!("other: {other:?}"),
}
}
#[tokio::test]
async fn expires_before_last_insert() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("foo", now + ms(10_000));
// Delay should be set to 8.192s here.
assert_pending!(poll!(queue));
// Delay should be set to the delay of the new item here
queue.insert_at("bar", now + ms(600));
assert_pending!(poll!(queue));
sleep(ms(600)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "bar");
}
#[tokio::test]
async fn multi_reset() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let one = queue.insert_at("one", now + ms(200));
let two = queue.insert_at("two", now + ms(250));
assert_pending!(poll!(queue));
queue.reset_at(&one, now + ms(300));
queue.reset_at(&two, now + ms(350));
queue.reset_at(&one, now + ms(400));
sleep(ms(310)).await;
assert_pending!(poll!(queue));
sleep(ms(50)).await;
let entry = assert_ready_some!(poll!(queue));
assert_eq!(*entry.get_ref(), "two");
assert_pending!(poll!(queue));
sleep(ms(50)).await;
let entry = assert_ready_some!(poll!(queue));
assert_eq!(*entry.get_ref(), "one");
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none())
}
#[tokio::test]
async fn expire_first_key_when_reset_to_expire_earlier() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let one = queue.insert_at("one", now + ms(200));
queue.insert_at("two", now + ms(250));
assert_pending!(poll!(queue));
queue.reset_at(&one, now + ms(100));
sleep(ms(100)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "one");
}
#[tokio::test]
async fn expire_second_key_when_reset_to_expire_earlier() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("one", now + ms(200));
let two = queue.insert_at("two", now + ms(250));
assert_pending!(poll!(queue));
queue.reset_at(&two, now + ms(100));
sleep(ms(100)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "two");
}
#[tokio::test]
async fn reset_first_expiring_item_to_expire_later() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let one = queue.insert_at("one", now + ms(200));
let _two = queue.insert_at("two", now + ms(250));
assert_pending!(poll!(queue));
queue.reset_at(&one, now + ms(300));
sleep(ms(250)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "two");
}
#[tokio::test]
async fn insert_before_first_after_poll() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let _one = queue.insert_at("one", now + ms(200));
assert_pending!(poll!(queue));
let _two = queue.insert_at("two", now + ms(100));
sleep(ms(99)).await;
assert_pending!(poll!(queue));
sleep(ms(1)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "two");
}
#[tokio::test]
async fn insert_after_ready_poll() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("1", now + ms(100));
queue.insert_at("2", now + ms(100));
queue.insert_at("3", now + ms(100));
assert_pending!(poll!(queue));
sleep(ms(100)).await;
assert!(queue.is_woken());
let mut res = vec![];
while res.len() < 3 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
queue.insert_at("foo", now + ms(500));
}
res.sort_unstable();
assert_eq!("1", res[0]);
assert_eq!("2", res[1]);
assert_eq!("3", res[2]);
}
#[tokio::test]
async fn reset_later_after_slot_starts() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let foo = queue.insert_at("foo", now + ms(100));
assert_pending!(poll!(queue));
sleep_until(now + Duration::from_millis(80)).await;
assert!(!queue.is_woken());
// At this point the queue hasn't been polled, so `elapsed` on the wheel
// for the queue is still at 0 and hence the 1ms resolution slots cover
// [0-64). Resetting the time on the entry to 120 causes it to get put in
// the [64-128) slot. As the queue knows that the first entry is within
// that slot, but doesn't know when, it must wake immediately to advance
// the wheel.
queue.reset_at(&foo, now + ms(120));
assert!(queue.is_woken());
assert_pending!(poll!(queue));
sleep_until(now + Duration::from_millis(119)).await;
assert!(!queue.is_woken());
sleep(ms(1)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "foo");
}
#[tokio::test]
async fn reset_inserted_expired() {
time::pause();
// Instants before the start of the test seem to break in wasm.
time::sleep(ms(1000)).await;
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key = queue.insert_at("foo", now - ms(100));
// this causes the panic described in #2473
queue.reset_at(&key, now + ms(100));
assert_eq!(1, queue.len());
sleep(ms(200)).await;
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "foo");
assert_eq!(queue.len(), 0);
}
#[tokio::test]
async fn reset_earlier_after_slot_starts() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let foo = queue.insert_at("foo", now + ms(200));
assert_pending!(poll!(queue));
sleep_until(now + Duration::from_millis(80)).await;
assert!(!queue.is_woken());
// At this point the queue hasn't been polled, so `elapsed` on the wheel
// for the queue is still at 0 and hence the 1ms resolution slots cover
// [0-64). Resetting the time on the entry to 120 causes it to get put in
// the [64-128) slot. As the queue knows that the first entry is within
// that slot, but doesn't know when, it must wake immediately to advance
// the wheel.
queue.reset_at(&foo, now + ms(120));
assert!(queue.is_woken());
assert_pending!(poll!(queue));
sleep_until(now + Duration::from_millis(119)).await;
assert!(!queue.is_woken());
sleep(ms(1)).await;
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "foo");
}
#[tokio::test]
async fn insert_in_past_after_poll_fires_immediately() {
time::pause();
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("foo", now + ms(200));
assert_pending!(poll!(queue));
sleep(ms(80)).await;
assert!(!queue.is_woken());
queue.insert_at("bar", now + ms(40));
assert!(queue.is_woken());
let entry = assert_ready_some!(poll!(queue)).into_inner();
assert_eq!(entry, "bar");
}
#[tokio::test]
async fn delay_queue_poll_expired_when_empty() {
let mut delay_queue = task::spawn(DelayQueue::new());
let key = delay_queue.insert(0, std::time::Duration::from_secs(10));
assert_pending!(poll!(delay_queue));
delay_queue.remove(&key);
assert!(assert_ready!(poll!(delay_queue)).is_none());
}
#[tokio::test(start_paused = true)]
async fn compact_expire_empty() {
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("foo1", now + ms(10));
queue.insert_at("foo2", now + ms(10));
sleep(ms(10)).await;
let mut res = vec![];
while res.len() < 2 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
queue.compact();
assert_eq!(queue.len(), 0);
assert_eq!(queue.capacity(), 0);
}
#[tokio::test(start_paused = true)]
async fn compact_remove_empty() {
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key1 = queue.insert_at("foo1", now + ms(10));
let key2 = queue.insert_at("foo2", now + ms(10));
queue.remove(&key1);
queue.remove(&key2);
queue.compact();
assert_eq!(queue.len(), 0);
assert_eq!(queue.capacity(), 0);
}
#[tokio::test(start_paused = true)]
// Trigger a re-mapping of keys in the slab due to a `compact` call and
// test removal of re-mapped keys
async fn compact_remove_remapped_keys() {
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
queue.insert_at("foo1", now + ms(10));
queue.insert_at("foo2", now + ms(10));
// should be assigned indices 3 and 4
let key3 = queue.insert_at("foo3", now + ms(20));
let key4 = queue.insert_at("foo4", now + ms(20));
sleep(ms(10)).await;
let mut res = vec![];
while res.len() < 2 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
// items corresponding to `foo3` and `foo4` will be assigned
// new indices here
queue.compact();
queue.insert_at("foo5", now + ms(10));
// test removal of re-mapped keys
let expired3 = queue.remove(&key3);
let expired4 = queue.remove(&key4);
assert_eq!(expired3.into_inner(), "foo3");
assert_eq!(expired4.into_inner(), "foo4");
queue.compact();
assert_eq!(queue.len(), 1);
assert_eq!(queue.capacity(), 1);
}
#[tokio::test(start_paused = true)]
async fn compact_change_deadline() {
let mut queue = task::spawn(DelayQueue::new());
let mut now = Instant::now();
queue.insert_at("foo1", now + ms(10));
queue.insert_at("foo2", now + ms(10));
// should be assigned indices 3 and 4
queue.insert_at("foo3", now + ms(20));
let key4 = queue.insert_at("foo4", now + ms(20));
sleep(ms(10)).await;
let mut res = vec![];
while res.len() < 2 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
// items corresponding to `foo3` and `foo4` should be assigned
// new indices
queue.compact();
now = Instant::now();
queue.insert_at("foo5", now + ms(10));
let key6 = queue.insert_at("foo6", now + ms(10));
queue.reset_at(&key4, now + ms(20));
queue.reset_at(&key6, now + ms(20));
// foo3 and foo5 will expire
sleep(ms(10)).await;
while res.len() < 4 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
sleep(ms(10)).await;
while res.len() < 6 {
let entry = assert_ready_some!(poll!(queue));
res.push(entry.into_inner());
}
let entry = assert_ready!(poll!(queue));
assert!(entry.is_none());
}
#[tokio::test(start_paused = true)]
async fn item_expiry_greater_than_wheel() {
// This function tests that a delay queue that has existed for at least 2^36 milliseconds won't panic when a new item is inserted.
let mut queue = DelayQueue::new();
for _ in 0..2 {
tokio::time::advance(Duration::from_millis(1 << 35)).await;
queue.insert(0, Duration::from_millis(0));
queue.next().await;
}
// This should not panic
let no_panic = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
queue.insert(1, Duration::from_millis(1));
}));
assert!(no_panic.is_ok());
}
#[cfg_attr(target_os = "wasi", ignore = "FIXME: Does not seem to work with WASI")]
#[tokio::test(start_paused = true)]
#[cfg(panic = "unwind")]
async fn remove_after_compact() {
let now = Instant::now();
let mut queue = DelayQueue::new();
let foo_key = queue.insert_at("foo", now + ms(10));
queue.insert_at("bar", now + ms(20));
queue.remove(&foo_key);
queue.compact();
let panic = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
queue.remove(&foo_key);
}));
assert!(panic.is_err());
}
#[cfg_attr(target_os = "wasi", ignore = "FIXME: Does not seem to work with WASI")]
#[tokio::test(start_paused = true)]
#[cfg(panic = "unwind")]
async fn remove_after_compact_poll() {
let now = Instant::now();
let mut queue = task::spawn(DelayQueue::new());
let foo_key = queue.insert_at("foo", now + ms(10));
queue.insert_at("bar", now + ms(20));
sleep(ms(10)).await;
assert_eq!(assert_ready_some!(poll!(queue)).key(), foo_key);
queue.compact();
let panic = std::panic::catch_unwind(std::panic::AssertUnwindSafe(|| {
queue.remove(&foo_key);
}));
assert!(panic.is_err());
}
#[tokio::test(start_paused = true)]
async fn peek() {
let mut queue = task::spawn(DelayQueue::new());
let now = Instant::now();
let key = queue.insert_at("foo", now + ms(5));
let key2 = queue.insert_at("bar", now);
let key3 = queue.insert_at("baz", now + ms(10));
assert_eq!(queue.peek(), Some(key2));
sleep(ms(6)).await;
assert_eq!(queue.peek(), Some(key2));
let entry = assert_ready_some!(poll!(queue));
assert_eq!(entry.get_ref(), &"bar");
assert_eq!(queue.peek(), Some(key));
let entry = assert_ready_some!(poll!(queue));
assert_eq!(entry.get_ref(), &"foo");
assert_eq!(queue.peek(), Some(key3));
assert_pending!(poll!(queue));
sleep(ms(5)).await;
assert_eq!(queue.peek(), Some(key3));
let entry = assert_ready_some!(poll!(queue));
assert_eq!(entry.get_ref(), &"baz");
assert!(queue.peek().is_none());
}
#[tokio::test(start_paused = true)]
async fn wake_after_remove_last() {
let mut queue = task::spawn(DelayQueue::new());
let key = queue.insert("foo", ms(1000));
assert_pending!(poll!(queue));
queue.remove(&key);
assert!(queue.is_woken());
assert!(assert_ready!(poll!(queue)).is_none());
}
fn ms(n: u64) -> Duration {
Duration::from_millis(n)
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/framed_read.rs | tokio-util/tests/framed_read.rs | #![warn(rust_2018_idioms)]
use tokio::io::{AsyncRead, ReadBuf};
use tokio_test::assert_ready;
use tokio_test::task;
use tokio_util::codec::{Decoder, FramedRead};
use bytes::{Buf, BytesMut};
use futures::Stream;
use std::collections::VecDeque;
use std::io;
use std::pin::Pin;
use std::task::Poll::{Pending, Ready};
use std::task::{Context, Poll};
macro_rules! mock {
($($x:expr,)*) => {{
let mut v = VecDeque::new();
v.extend(vec![$($x),*]);
Mock { calls: v }
}};
}
macro_rules! assert_read {
($e:expr, $n:expr) => {{
let val = assert_ready!($e);
assert_eq!(val.unwrap().unwrap(), $n);
}};
}
macro_rules! pin {
($id:ident) => {
Pin::new(&mut $id)
};
}
struct U32Decoder;
impl Decoder for U32Decoder {
type Item = u32;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<u32>> {
if buf.len() < 4 {
return Ok(None);
}
let n = buf.split_to(4).get_u32();
Ok(Some(n))
}
}
struct U64Decoder;
impl Decoder for U64Decoder {
type Item = u64;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<u64>> {
if buf.len() < 8 {
return Ok(None);
}
let n = buf.split_to(8).get_u64();
Ok(Some(n))
}
}
#[test]
fn read_multi_frame_in_packet() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0);
assert_read!(pin!(framed).poll_next(cx), 1);
assert_read!(pin!(framed).poll_next(cx), 2);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_multi_frame_across_packets() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x00".to_vec()),
Ok(b"\x00\x00\x00\x01".to_vec()),
Ok(b"\x00\x00\x00\x02".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0);
assert_read!(pin!(framed).poll_next(cx), 1);
assert_read!(pin!(framed).poll_next(cx), 2);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_multi_frame_in_packet_after_codec_changed() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x04\x00\x00\x00\x00\x00\x00\x00\x08".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0x04);
let mut framed = framed.map_decoder(|_| U64Decoder);
assert_read!(pin!(framed).poll_next(cx), 0x08);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_not_ready() {
let mut task = task::spawn(());
let mock = mock! {
Err(io::Error::new(io::ErrorKind::WouldBlock, "")),
Ok(b"\x00\x00\x00\x00".to_vec()),
Ok(b"\x00\x00\x00\x01".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert!(pin!(framed).poll_next(cx).is_pending());
assert_read!(pin!(framed).poll_next(cx), 0);
assert_read!(pin!(framed).poll_next(cx), 1);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_partial_then_not_ready() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00".to_vec()),
Err(io::Error::new(io::ErrorKind::WouldBlock, "")),
Ok(b"\x00\x00\x00\x00\x00\x01\x00\x00\x00\x02".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert!(pin!(framed).poll_next(cx).is_pending());
assert_read!(pin!(framed).poll_next(cx), 0);
assert_read!(pin!(framed).poll_next(cx), 1);
assert_read!(pin!(framed).poll_next(cx), 2);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_err() {
let mut task = task::spawn(());
let mock = mock! {
Err(io::Error::new(io::ErrorKind::Other, "")),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_eq!(
io::ErrorKind::Other,
assert_ready!(pin!(framed).poll_next(cx))
.unwrap()
.unwrap_err()
.kind()
)
});
}
#[test]
fn read_partial_then_err() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00".to_vec()),
Err(io::Error::new(io::ErrorKind::Other, "")),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_eq!(
io::ErrorKind::Other,
assert_ready!(pin!(framed).poll_next(cx))
.unwrap()
.unwrap_err()
.kind()
)
});
}
#[test]
fn read_partial_would_block_then_err() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00".to_vec()),
Err(io::Error::new(io::ErrorKind::WouldBlock, "")),
Err(io::Error::new(io::ErrorKind::Other, "")),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert!(pin!(framed).poll_next(cx).is_pending());
assert_eq!(
io::ErrorKind::Other,
assert_ready!(pin!(framed).poll_next(cx))
.unwrap()
.unwrap_err()
.kind()
)
});
}
#[test]
fn huge_size() {
let mut task = task::spawn(());
let data = &[0; 32 * 1024][..];
let mut framed = FramedRead::new(data, BigDecoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
struct BigDecoder;
impl Decoder for BigDecoder {
type Item = u32;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> io::Result<Option<u32>> {
if buf.len() < 32 * 1024 {
return Ok(None);
}
buf.advance(32 * 1024);
Ok(Some(0))
}
}
}
#[test]
fn data_remaining_is_error() {
let mut task = task::spawn(());
let slice = &[0; 5][..];
let mut framed = FramedRead::new(slice, U32Decoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0);
assert!(assert_ready!(pin!(framed).poll_next(cx)).unwrap().is_err());
});
}
#[test]
fn multi_frames_on_eof() {
let mut task = task::spawn(());
struct MyDecoder(Vec<u32>);
impl Decoder for MyDecoder {
type Item = u32;
type Error = io::Error;
fn decode(&mut self, _buf: &mut BytesMut) -> io::Result<Option<u32>> {
unreachable!();
}
fn decode_eof(&mut self, _buf: &mut BytesMut) -> io::Result<Option<u32>> {
if self.0.is_empty() {
return Ok(None);
}
Ok(Some(self.0.remove(0)))
}
}
let mut framed = FramedRead::new(mock!(), MyDecoder(vec![0, 1, 2, 3]));
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 0);
assert_read!(pin!(framed).poll_next(cx), 1);
assert_read!(pin!(framed).poll_next(cx), 2);
assert_read!(pin!(framed).poll_next(cx), 3);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
#[test]
fn read_eof_then_resume() {
let mut task = task::spawn(());
let mock = mock! {
Ok(b"\x00\x00\x00\x01".to_vec()),
Ok(b"".to_vec()),
Ok(b"\x00\x00\x00\x02".to_vec()),
Ok(b"".to_vec()),
Ok(b"\x00\x00\x00\x03".to_vec()),
};
let mut framed = FramedRead::new(mock, U32Decoder);
task.enter(|cx, _| {
assert_read!(pin!(framed).poll_next(cx), 1);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
assert_read!(pin!(framed).poll_next(cx), 2);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
assert_read!(pin!(framed).poll_next(cx), 3);
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
assert!(assert_ready!(pin!(framed).poll_next(cx)).is_none());
});
}
// ===== Mock ======
struct Mock {
calls: VecDeque<io::Result<Vec<u8>>>,
}
impl AsyncRead for Mock {
fn poll_read(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<io::Result<()>> {
use io::ErrorKind::WouldBlock;
match self.calls.pop_front() {
Some(Ok(data)) => {
debug_assert!(buf.remaining() >= data.len());
buf.put_slice(&data);
Ready(Ok(()))
}
Some(Err(ref e)) if e.kind() == WouldBlock => Pending,
Some(Err(e)) => Ready(Err(e)),
None => Ready(Ok(())),
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_simplex.rs | tokio-util/tests/io_simplex.rs | use futures::pin_mut;
use futures_test::task::noop_context;
use std::io::IoSlice;
use std::task::Poll;
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, ReadBuf};
use tokio_test::task::spawn;
use tokio_test::{assert_pending, assert_ready};
use tokio_util::io::simplex;
/// Sanity check for single-threaded operation.
#[tokio::test]
async fn single_thread() {
const N: usize = 64;
const MSG: &[u8] = b"Hello, world!";
const CAPS: &[usize] = &[1, MSG.len() / 2, MSG.len() - 1, MSG.len(), MSG.len() + 1];
// test different buffer capacities to cover edge cases
for &capacity in CAPS {
let (mut tx, mut rx) = simplex::new(capacity);
for _ in 0..N {
let mut read = 0;
let mut write = 0;
let mut buf = [0; MSG.len()];
while read < MSG.len() || write < MSG.len() {
if write < MSG.len() {
let n = tx.write(&MSG[write..]).await.unwrap();
write += n;
}
if read < MSG.len() {
let n = rx.read(&mut buf[read..]).await.unwrap();
read += n;
}
}
assert_eq!(&buf[..], MSG);
}
}
}
/// Sanity check for multi-threaded operation.
#[test]
#[cfg(not(target_os = "wasi"))] // No thread on wasi.
fn multi_thread() {
use futures::executor::block_on;
use std::thread;
const N: usize = 64;
const MSG: &[u8] = b"Hello, world!";
const CAPS: &[usize] = &[1, MSG.len() / 2, MSG.len() - 1, MSG.len(), MSG.len() + 1];
// test different buffer capacities to cover edge cases
for &capacity in CAPS {
let (mut tx, mut rx) = simplex::new(capacity);
let jh0 = thread::spawn(move || {
block_on(async {
let mut buf = vec![0; MSG.len()];
for _ in 0..N {
rx.read_exact(&mut buf).await.unwrap();
assert_eq!(&buf[..], MSG);
buf.clear();
buf.resize(MSG.len(), 0);
}
});
});
let jh1 = thread::spawn(move || {
block_on(async {
for _ in 0..N {
tx.write_all(MSG).await.unwrap();
}
});
});
jh0.join().unwrap();
jh1.join().unwrap();
}
}
#[test]
#[should_panic(expected = "capacity must be greater than zero")]
fn zero_capacity() {
let _ = simplex::new(0);
}
/// The `Receiver::poll_read` should return `Poll::Ready(Ok(()))`
/// if the `ReadBuf` has zero remaining capacity.
#[tokio::test]
async fn read_buf_is_full() {
let (_tx, rx) = simplex::new(32);
let mut buf = ReadBuf::new(&mut []);
tokio::pin!(rx);
assert_ready!(rx.as_mut().poll_read(&mut noop_context(), &mut buf)).unwrap();
assert_eq!(buf.filled().len(), 0);
}
/// The `Sender::poll_write` should return `Poll::Ready(Ok(0))`
/// if the input buffer has zero length.
#[tokio::test]
async fn write_buf_is_empty() {
let (tx, _rx) = simplex::new(32);
tokio::pin!(tx);
let n = assert_ready!(tx.as_mut().poll_write(&mut noop_context(), &[])).unwrap();
assert_eq!(n, 0);
}
/// The `Sender` should returns error if the `Receiver` has been dropped.
#[tokio::test]
async fn drop_receiver_0() {
let (mut tx, rx) = simplex::new(32);
drop(rx);
tx.write_u8(1).await.unwrap_err();
}
/// The `Sender` should be woken up if the `Receiver` has been dropped.
#[tokio::test]
async fn drop_receiver_1() {
let (mut tx, rx) = simplex::new(1);
let mut write_task = spawn(tx.write_u16(1));
assert_pending!(write_task.poll());
assert!(!write_task.is_woken());
drop(rx);
assert!(write_task.is_woken());
}
/// The `Receiver` should return error if:
///
/// - The `Sender` has been dropped.
/// - AND there is no remaining data in the buffer.
#[tokio::test]
async fn drop_sender_0() {
const MSG: &[u8] = b"Hello, world!";
let (tx, mut rx) = simplex::new(32);
drop(tx);
let mut buf = vec![0; MSG.len()];
rx.read_exact(&mut buf).await.unwrap_err();
}
/// The `Receiver` should be woken up if:
///
/// - The `Sender` has been dropped.
/// - AND there is still remaining data in the buffer.
#[tokio::test]
async fn drop_sender_1() {
let (mut tx, mut rx) = simplex::new(2);
let mut buf = vec![];
let mut read_task = spawn(rx.read_to_end(&mut buf));
assert_pending!(read_task.poll());
tx.write_u8(1).await.unwrap();
assert_pending!(read_task.poll());
assert!(!read_task.is_woken());
drop(tx);
assert!(read_task.is_woken());
read_task.await.unwrap();
assert_eq!(buf, vec![1]);
}
/// All following calls to `Sender::poll_write` and `Sender::poll_flush`
/// should return error after `shutdown` has been called.
#[tokio::test]
async fn shutdown_sender_0() {
const MSG: &[u8] = b"Hello, world!";
let (mut tx, _rx) = simplex::new(32);
tx.shutdown().await.unwrap();
tx.write_all(MSG).await.unwrap_err();
tx.flush().await.unwrap_err();
}
/// The `Sender::poll_shutdown` should be called multiple times
/// without error.
#[tokio::test]
async fn shutdown_sender_1() {
let (mut tx, _rx) = simplex::new(32);
tx.shutdown().await.unwrap();
tx.shutdown().await.unwrap();
}
/// The `Sender::poll_shutdown` should wake up the `Receiver`
#[tokio::test]
async fn shutdown_sender_2() {
let (mut tx, mut rx) = simplex::new(32);
let mut buf = vec![];
let mut read_task = spawn(rx.read_to_end(&mut buf));
assert_pending!(read_task.poll());
tx.write_u8(1).await.unwrap();
assert_pending!(read_task.poll());
assert!(!read_task.is_woken());
tx.shutdown().await.unwrap();
assert!(read_task.is_woken());
read_task.await.unwrap();
assert_eq!(buf, vec![1]);
}
/// Both `Sender` and `Receiver` should yield periodically
/// in a tight-loop.
#[tokio::test]
#[cfg(feature = "rt")]
async fn cooperative_scheduling() {
// this magic number is copied from
// https://github.com/tokio-rs/tokio/blob/925c614c89d0a26777a334612e2ed6ad0e7935c3/tokio/src/task/coop/mod.rs#L116
const INITIAL_BUDGET: usize = 128;
let (tx, _rx) = simplex::new(INITIAL_BUDGET * 2);
pin_mut!(tx);
let mut is_pending = false;
for _ in 0..INITIAL_BUDGET + 1 {
match tx.as_mut().poll_write(&mut noop_context(), &[0u8; 1]) {
Poll::Pending => {
is_pending = true;
break;
}
Poll::Ready(Ok(1)) => {}
Poll::Ready(Ok(n)) => panic!("wrote too many bytes: {n}"),
Poll::Ready(Err(e)) => panic!("{e}"),
}
}
assert!(is_pending);
let (tx, _rx) = simplex::new(INITIAL_BUDGET * 2);
pin_mut!(tx);
let mut is_pending = false;
let io_slices = &[IoSlice::new(&[0u8; 1])];
for _ in 0..INITIAL_BUDGET + 1 {
match tx
.as_mut()
.poll_write_vectored(&mut noop_context(), io_slices)
{
Poll::Pending => {
is_pending = true;
break;
}
Poll::Ready(Ok(1)) => {}
Poll::Ready(Ok(n)) => panic!("wrote too many bytes: {n}"),
Poll::Ready(Err(e)) => panic!("{e}"),
}
}
assert!(is_pending);
let (mut tx, rx) = simplex::new(INITIAL_BUDGET * 2);
tx.write_all(&[0u8; INITIAL_BUDGET + 2]).await.unwrap();
pin_mut!(rx);
let mut is_pending = false;
for _ in 0..INITIAL_BUDGET + 1 {
let mut buf = [0u8; 1];
let mut buf = ReadBuf::new(&mut buf);
match rx.as_mut().poll_read(&mut noop_context(), &mut buf) {
Poll::Pending => {
is_pending = true;
break;
}
Poll::Ready(Ok(())) => assert_eq!(buf.filled().len(), 1),
Poll::Ready(Err(e)) => panic!("{e}"),
}
}
assert!(is_pending);
}
/// The capacity is exactly same as the total length of the vectored buffers.
#[tokio::test]
async fn poll_write_vectored_0() {
const MSG1: &[u8] = b"1";
const MSG2: &[u8] = b"22";
const MSG3: &[u8] = b"333";
const MSG_LEN: usize = MSG1.len() + MSG2.len() + MSG3.len();
let io_slices = &[IoSlice::new(MSG1), IoSlice::new(MSG2), IoSlice::new(MSG3)];
let (tx, mut rx) = simplex::new(MSG_LEN);
tokio::pin!(tx);
let res = tx.poll_write_vectored(&mut noop_context(), io_slices);
let n = assert_ready!(res).unwrap();
assert_eq!(n, MSG_LEN);
let mut buf = [0; MSG_LEN];
let n = rx.read_exact(&mut buf).await.unwrap();
assert_eq!(n, MSG_LEN);
assert_eq!(&buf, b"122333");
}
/// The capacity is smaller than the total length of the vectored buffers.
#[tokio::test]
async fn poll_write_vectored_1() {
const MSG1: &[u8] = b"1";
const MSG2: &[u8] = b"22";
const MSG3: &[u8] = b"333";
const CAPACITY: usize = MSG1.len() + MSG2.len() + 1;
let io_slices = &[IoSlice::new(MSG1), IoSlice::new(MSG2), IoSlice::new(MSG3)];
let (tx, mut rx) = simplex::new(CAPACITY);
tokio::pin!(tx);
// ==== The poll_write_vectored should write MSG1 and MSG2 fully, and MSG3 partially. ====
let res = tx.poll_write_vectored(&mut noop_context(), io_slices);
let n = assert_ready!(res).unwrap();
assert_eq!(n, CAPACITY);
let mut buf = [0; CAPACITY];
let n = rx.read_exact(&mut buf).await.unwrap();
assert_eq!(n, CAPACITY);
assert_eq!(&buf, b"1223");
}
/// There are two empty buffers in the vectored buffers.
#[tokio::test]
async fn poll_write_vectored_2() {
const MSG1: &[u8] = b"1";
const MSG2: &[u8] = b"";
const MSG3: &[u8] = b"22";
const MSG4: &[u8] = b"";
const MSG5: &[u8] = b"333";
const MSG_LEN: usize = MSG1.len() + MSG2.len() + MSG3.len() + MSG4.len() + MSG5.len();
let io_slices = &[
IoSlice::new(MSG1),
IoSlice::new(MSG2),
IoSlice::new(MSG3),
IoSlice::new(MSG4),
IoSlice::new(MSG5),
];
let (tx, mut rx) = simplex::new(MSG_LEN);
tokio::pin!(tx);
let res = tx.poll_write_vectored(&mut noop_context(), io_slices);
let n = assert_ready!(res).unwrap();
assert_eq!(n, MSG_LEN);
let mut buf = [0; MSG_LEN];
let n = rx.read_exact(&mut buf).await.unwrap();
assert_eq!(n, MSG_LEN);
assert_eq!(&buf, b"122333");
}
/// The `Sender::poll_write_vectored` should return `Poll::Ready(Ok(0))`
/// if all the input buffers have zero length.
#[tokio::test]
async fn poll_write_vectored_3() {
let io_slices = &[IoSlice::new(&[]), IoSlice::new(&[]), IoSlice::new(&[])];
let (tx, _rx) = simplex::new(32);
tokio::pin!(tx);
let n = assert_ready!(tx.poll_write_vectored(&mut noop_context(), io_slices)).unwrap();
assert_eq!(n, 0);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/sync_cancellation_token.rs | tokio-util/tests/sync_cancellation_token.rs | #![warn(rust_2018_idioms)]
use tokio::pin;
use tokio::sync::oneshot;
use tokio_util::sync::{CancellationToken, WaitForCancellationFuture};
use core::future::Future;
use core::task::{Context, Poll};
use futures_test::task::new_count_waker;
#[test]
fn cancel_token() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
assert!(!token.is_cancelled());
let wait_fut = token.cancelled();
pin!(wait_fut);
assert_eq!(
Poll::Pending,
wait_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
let wait_fut_2 = token.cancelled();
pin!(wait_fut_2);
token.cancel();
assert_eq!(wake_counter, 1);
assert!(token.is_cancelled());
assert_eq!(
Poll::Ready(()),
wait_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
wait_fut_2.as_mut().poll(&mut Context::from_waker(&waker))
);
}
#[test]
fn cancel_token_owned() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
assert!(!token.is_cancelled());
let wait_fut = token.clone().cancelled_owned();
pin!(wait_fut);
assert_eq!(
Poll::Pending,
wait_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
let wait_fut_2 = token.clone().cancelled_owned();
pin!(wait_fut_2);
token.cancel();
assert_eq!(wake_counter, 1);
assert!(token.is_cancelled());
assert_eq!(
Poll::Ready(()),
wait_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
wait_fut_2.as_mut().poll(&mut Context::from_waker(&waker))
);
}
#[test]
fn cancel_token_owned_drop_test() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
let future = token.cancelled_owned();
pin!(future);
assert_eq!(
Poll::Pending,
future.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
// let future be dropped while pinned and under pending state to
// find potential memory related bugs.
}
#[test]
fn cancel_child_token_through_parent() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
let child_token = token.child_token();
assert!(!child_token.is_cancelled());
let child_fut = child_token.cancelled();
pin!(child_fut);
let parent_fut = token.cancelled();
pin!(parent_fut);
assert_eq!(
Poll::Pending,
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
token.cancel();
assert_eq!(wake_counter, 2);
assert!(token.is_cancelled());
assert!(child_token.is_cancelled());
assert_eq!(
Poll::Ready(()),
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
#[test]
fn cancel_grandchild_token_through_parent_if_child_was_dropped() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
let intermediate_token = token.child_token();
let child_token = intermediate_token.child_token();
drop(intermediate_token);
assert!(!child_token.is_cancelled());
let child_fut = child_token.cancelled();
pin!(child_fut);
let parent_fut = token.cancelled();
pin!(parent_fut);
assert_eq!(
Poll::Pending,
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
token.cancel();
assert_eq!(wake_counter, 2);
assert!(token.is_cancelled());
assert!(child_token.is_cancelled());
assert_eq!(
Poll::Ready(()),
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
#[test]
fn cancel_child_token_without_parent() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
let child_token_1 = token.child_token();
let child_fut = child_token_1.cancelled();
pin!(child_fut);
let parent_fut = token.cancelled();
pin!(parent_fut);
assert_eq!(
Poll::Pending,
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
child_token_1.cancel();
assert_eq!(wake_counter, 1);
assert!(!token.is_cancelled());
assert!(child_token_1.is_cancelled());
assert_eq!(
Poll::Ready(()),
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
let child_token_2 = token.child_token();
let child_fut_2 = child_token_2.cancelled();
pin!(child_fut_2);
assert_eq!(
Poll::Pending,
child_fut_2.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
token.cancel();
assert_eq!(wake_counter, 3);
assert!(token.is_cancelled());
assert!(child_token_2.is_cancelled());
assert_eq!(
Poll::Ready(()),
child_fut_2.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
#[test]
fn create_child_token_after_parent_was_cancelled() {
for drop_child_first in [true, false].iter().cloned() {
let (waker, wake_counter) = new_count_waker();
let token = CancellationToken::new();
token.cancel();
let child_token = token.child_token();
assert!(child_token.is_cancelled());
{
let child_fut = child_token.cancelled();
pin!(child_fut);
let parent_fut = token.cancelled();
pin!(parent_fut);
assert_eq!(
Poll::Ready(()),
child_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
}
if drop_child_first {
drop(child_token);
drop(token);
} else {
drop(token);
drop(child_token);
}
}
}
#[test]
fn drop_multiple_child_tokens() {
for drop_first_child_first in &[true, false] {
let token = CancellationToken::new();
let mut child_tokens = [None, None, None];
for child in &mut child_tokens {
*child = Some(token.child_token());
}
assert!(!token.is_cancelled());
assert!(!child_tokens[0].as_ref().unwrap().is_cancelled());
for i in 0..child_tokens.len() {
if *drop_first_child_first {
child_tokens[i] = None;
} else {
child_tokens[child_tokens.len() - 1 - i] = None;
}
assert!(!token.is_cancelled());
}
drop(token);
}
}
#[test]
fn cancel_only_all_descendants() {
// ARRANGE
let (waker, wake_counter) = new_count_waker();
let parent_token = CancellationToken::new();
let token = parent_token.child_token();
let sibling_token = parent_token.child_token();
let child1_token = token.child_token();
let child2_token = token.child_token();
let grandchild_token = child1_token.child_token();
let grandchild2_token = child1_token.child_token();
let great_grandchild_token = grandchild_token.child_token();
assert!(!parent_token.is_cancelled());
assert!(!token.is_cancelled());
assert!(!sibling_token.is_cancelled());
assert!(!child1_token.is_cancelled());
assert!(!child2_token.is_cancelled());
assert!(!grandchild_token.is_cancelled());
assert!(!grandchild2_token.is_cancelled());
assert!(!great_grandchild_token.is_cancelled());
let parent_fut = parent_token.cancelled();
let fut = token.cancelled();
let sibling_fut = sibling_token.cancelled();
let child1_fut = child1_token.cancelled();
let child2_fut = child2_token.cancelled();
let grandchild_fut = grandchild_token.cancelled();
let grandchild2_fut = grandchild2_token.cancelled();
let great_grandchild_fut = great_grandchild_token.cancelled();
pin!(parent_fut);
pin!(fut);
pin!(sibling_fut);
pin!(child1_fut);
pin!(child2_fut);
pin!(grandchild_fut);
pin!(grandchild2_fut);
pin!(great_grandchild_fut);
assert_eq!(
Poll::Pending,
parent_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
sibling_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
child1_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
child2_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
grandchild_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
grandchild2_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Pending,
great_grandchild_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 0);
// ACT
token.cancel();
// ASSERT
assert_eq!(wake_counter, 6);
assert!(!parent_token.is_cancelled());
assert!(token.is_cancelled());
assert!(!sibling_token.is_cancelled());
assert!(child1_token.is_cancelled());
assert!(child2_token.is_cancelled());
assert!(grandchild_token.is_cancelled());
assert!(grandchild2_token.is_cancelled());
assert!(great_grandchild_token.is_cancelled());
assert_eq!(
Poll::Ready(()),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
child1_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
child2_fut.as_mut().poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
grandchild_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
grandchild2_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(
Poll::Ready(()),
great_grandchild_fut
.as_mut()
.poll(&mut Context::from_waker(&waker))
);
assert_eq!(wake_counter, 6);
}
#[test]
fn drop_parent_before_child_tokens() {
let token = CancellationToken::new();
let child1 = token.child_token();
let child2 = token.child_token();
drop(token);
assert!(!child1.is_cancelled());
drop(child1);
drop(child2);
}
#[test]
fn derives_send_sync() {
fn assert_send<T: Send>() {}
fn assert_sync<T: Sync>() {}
assert_send::<CancellationToken>();
assert_sync::<CancellationToken>();
assert_send::<WaitForCancellationFuture<'static>>();
assert_sync::<WaitForCancellationFuture<'static>>();
}
#[test]
fn run_until_cancelled_test() {
let (waker, _) = new_count_waker();
{
let token = CancellationToken::new();
let fut = token.run_until_cancelled(std::future::pending::<()>());
pin!(fut);
assert_eq!(
Poll::Pending,
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
token.cancel();
assert_eq!(
Poll::Ready(None),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
{
let (tx, rx) = oneshot::channel::<()>();
let token = CancellationToken::new();
let fut = token.run_until_cancelled(async move {
rx.await.unwrap();
42
});
pin!(fut);
assert_eq!(
Poll::Pending,
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
tx.send(()).unwrap();
assert_eq!(
Poll::Ready(Some(42)),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
// Do not poll the future when token is already cancelled.
{
let token = CancellationToken::new();
let fut = token.run_until_cancelled(async { panic!("fut polled after cancellation") });
pin!(fut);
token.cancel();
assert_eq!(
Poll::Ready(None),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
}
#[test]
fn run_until_cancelled_owned_test() {
let (waker, _) = new_count_waker();
{
let token = CancellationToken::new();
let to_cancel = token.clone();
let takes_ownership = move |token: CancellationToken| {
token.run_until_cancelled_owned(std::future::pending::<()>())
};
let fut = takes_ownership(token);
pin!(fut);
assert_eq!(
Poll::Pending,
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
to_cancel.cancel();
assert_eq!(
Poll::Ready(None),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
{
let (tx, rx) = oneshot::channel::<()>();
let token = CancellationToken::new();
let takes_ownership = move |token: CancellationToken, rx: oneshot::Receiver<()>| {
token.run_until_cancelled_owned(async move {
rx.await.unwrap();
42
})
};
let fut = takes_ownership(token, rx);
pin!(fut);
assert_eq!(
Poll::Pending,
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
tx.send(()).unwrap();
assert_eq!(
Poll::Ready(Some(42)),
fut.as_mut().poll(&mut Context::from_waker(&waker))
);
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/io_inspect.rs | tokio-util/tests/io_inspect.rs | use std::{
future::poll_fn,
io::IoSlice,
pin::Pin,
task::{Context, Poll},
};
use tokio::io::{AsyncRead, AsyncReadExt, AsyncWrite, AsyncWriteExt, ReadBuf};
use tokio_util::io::{InspectReader, InspectWriter};
/// An AsyncRead implementation that works byte-by-byte, to catch out callers
/// who don't allow for `buf` being part-filled before the call
struct SmallReader {
contents: Vec<u8>,
}
impl Unpin for SmallReader {}
impl AsyncRead for SmallReader {
fn poll_read(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &mut ReadBuf<'_>,
) -> Poll<std::io::Result<()>> {
if let Some(byte) = self.contents.pop() {
buf.put_slice(&[byte])
}
Poll::Ready(Ok(()))
}
}
#[tokio::test]
async fn read_tee() {
let contents = b"This could be really long, you know".to_vec();
let reader = SmallReader {
contents: contents.clone(),
};
let mut altout: Vec<u8> = Vec::new();
let mut teeout = Vec::new();
{
let mut tee = InspectReader::new(reader, |bytes| altout.extend(bytes));
tee.read_to_end(&mut teeout).await.unwrap();
}
assert_eq!(teeout, altout);
assert_eq!(altout.len(), contents.len());
}
/// An AsyncWrite implementation that works byte-by-byte for poll_write, and
/// that reads the whole of the first buffer plus one byte from the second in
/// poll_write_vectored.
///
/// This is designed to catch bugs in handling partially written buffers
#[derive(Debug)]
struct SmallWriter {
contents: Vec<u8>,
}
impl Unpin for SmallWriter {}
impl AsyncWrite for SmallWriter {
fn poll_write(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
buf: &[u8],
) -> Poll<Result<usize, std::io::Error>> {
// Just write one byte at a time
if buf.is_empty() {
return Poll::Ready(Ok(0));
}
self.contents.push(buf[0]);
Poll::Ready(Ok(1))
}
fn poll_flush(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<Result<(), std::io::Error>> {
Poll::Ready(Ok(()))
}
fn poll_shutdown(
self: Pin<&mut Self>,
_cx: &mut Context<'_>,
) -> Poll<Result<(), std::io::Error>> {
Poll::Ready(Ok(()))
}
fn poll_write_vectored(
mut self: Pin<&mut Self>,
_cx: &mut Context<'_>,
bufs: &[IoSlice<'_>],
) -> Poll<Result<usize, std::io::Error>> {
// Write all of the first buffer, then one byte from the second buffer
// This should trip up anything that doesn't correctly handle multiple
// buffers.
if bufs.is_empty() {
return Poll::Ready(Ok(0));
}
let mut written_len = bufs[0].len();
self.contents.extend_from_slice(&bufs[0]);
if bufs.len() > 1 {
let buf = bufs[1];
if !buf.is_empty() {
written_len += 1;
self.contents.push(buf[0]);
}
}
Poll::Ready(Ok(written_len))
}
fn is_write_vectored(&self) -> bool {
true
}
}
#[tokio::test]
async fn write_tee() {
let mut altout: Vec<u8> = Vec::new();
let mut writeout = SmallWriter {
contents: Vec::new(),
};
{
let mut tee = InspectWriter::new(&mut writeout, |bytes| altout.extend(bytes));
tee.write_all(b"A testing string, very testing")
.await
.unwrap();
}
assert_eq!(altout, writeout.contents);
}
// This is inefficient, but works well enough for test use.
// If you want something similar for real code, you'll want to avoid all the
// fun of manipulating `bufs` - ideally, by the time you read this,
// IoSlice::advance_slices will be stable, and you can use that.
async fn write_all_vectored<W: AsyncWrite + Unpin>(
mut writer: W,
mut bufs: Vec<Vec<u8>>,
) -> Result<usize, std::io::Error> {
let mut res = 0;
while !bufs.is_empty() {
let mut written = poll_fn(|cx| {
let bufs: Vec<IoSlice> = bufs.iter().map(|v| IoSlice::new(v)).collect();
Pin::new(&mut writer).poll_write_vectored(cx, &bufs)
})
.await?;
res += written;
while written > 0 {
let buf_len = bufs[0].len();
if buf_len <= written {
bufs.remove(0);
written -= buf_len;
} else {
let buf = &mut bufs[0];
let drain_len = written.min(buf.len());
buf.drain(..drain_len);
written -= drain_len;
}
}
}
Ok(res)
}
#[tokio::test]
async fn write_tee_vectored() {
let mut altout: Vec<u8> = Vec::new();
let mut writeout = SmallWriter {
contents: Vec::new(),
};
let original = b"A very long string split up";
let bufs: Vec<Vec<u8>> = original
.split(|b| b.is_ascii_whitespace())
.map(Vec::from)
.collect();
assert!(bufs.len() > 1);
let expected: Vec<u8> = {
let mut out = Vec::new();
for item in &bufs {
out.extend_from_slice(item)
}
out
};
{
let mut bufcount = 0;
let tee = InspectWriter::new(&mut writeout, |bytes| {
bufcount += 1;
altout.extend(bytes)
});
assert!(tee.is_write_vectored());
write_all_vectored(tee, bufs.clone()).await.unwrap();
assert!(bufcount >= bufs.len());
}
assert_eq!(altout, writeout.contents);
assert_eq!(writeout.contents, expected);
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/context.rs | tokio-util/tests/context.rs | #![cfg(feature = "rt")]
#![cfg(not(target_os = "wasi"))] // Wasi doesn't support threads
#![warn(rust_2018_idioms)]
use tokio::runtime::Builder;
use tokio::time::*;
use tokio_util::context::RuntimeExt;
#[test]
fn tokio_context_with_another_runtime() {
let rt1 = Builder::new_multi_thread()
.worker_threads(1)
// no timer!
.build()
.unwrap();
let rt2 = Builder::new_multi_thread()
.worker_threads(1)
.enable_all()
.build()
.unwrap();
// Without the `HandleExt.wrap()` there would be a panic because there is
// no timer running, since it would be referencing runtime r1.
rt1.block_on(rt2.wrap(async move { sleep(Duration::from_millis(2)).await }));
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/task_join_queue.rs | tokio-util/tests/task_join_queue.rs | #![warn(rust_2018_idioms)]
use tokio::sync::oneshot;
use tokio::task::yield_now;
use tokio::time::Duration;
use tokio_test::{assert_pending, assert_ready, task};
use tokio_util::task::JoinQueue;
#[tokio::test]
async fn test_join_queue_no_spurious_wakeups() {
let (tx, rx) = oneshot::channel::<()>();
let mut join_queue = JoinQueue::new();
join_queue.spawn(async move {
let _ = rx.await;
42
});
let mut join_next = task::spawn(join_queue.join_next());
assert_pending!(join_next.poll());
assert!(!join_next.is_woken());
let _ = tx.send(());
yield_now().await;
assert!(join_next.is_woken());
let output = assert_ready!(join_next.poll());
assert_eq!(output.unwrap().unwrap(), 42);
}
#[tokio::test]
async fn test_join_queue_abort_on_drop() {
let mut queue = JoinQueue::new();
let mut recvs = Vec::new();
for _ in 0..16 {
let (send, recv) = oneshot::channel::<()>();
recvs.push(recv);
queue.spawn(async move {
// This task will never complete on its own.
futures::future::pending::<()>().await;
drop(send);
});
}
drop(queue);
for recv in recvs {
// The task is aborted soon and we will receive an error.
assert!(recv.await.is_err());
}
}
#[tokio::test]
async fn test_join_queue_alternating() {
let mut queue = JoinQueue::new();
assert_eq!(queue.len(), 0);
queue.spawn(async {});
assert_eq!(queue.len(), 1);
queue.spawn(async {});
assert_eq!(queue.len(), 2);
for _ in 0..16 {
let res = queue.join_next().await.unwrap();
assert!(res.is_ok());
assert_eq!(queue.len(), 1);
queue.spawn(async {});
assert_eq!(queue.len(), 2);
}
}
#[tokio::test(start_paused = true)]
async fn test_join_queue_abort_all() {
let mut queue: JoinQueue<()> = JoinQueue::new();
for _ in 0..5 {
queue.spawn(futures::future::pending());
}
for _ in 0..5 {
queue.spawn(async {
tokio::time::sleep(Duration::from_secs(1)).await;
});
}
// The join queue will now have 5 pending tasks and 5 ready tasks.
tokio::time::sleep(Duration::from_secs(2)).await;
queue.abort_all();
assert_eq!(queue.len(), 10);
let mut count = 0;
while let Some(res) = queue.join_next().await {
if count < 5 {
assert!(res.unwrap_err().is_cancelled());
} else {
assert!(res.is_ok());
}
count += 1;
}
assert_eq!(count, 10);
assert!(queue.is_empty());
}
#[tokio::test]
async fn test_join_queue_join_all() {
let mut queue = JoinQueue::new();
let mut senders = Vec::new();
for i in 0..5 {
let (tx, rx) = oneshot::channel::<()>();
senders.push(tx);
queue.spawn(async move {
let _ = rx.await;
i
});
}
// Complete all tasks in reverse order
while let Some(tx) = senders.pop() {
let _ = tx.send(());
}
let results = queue.join_all().await;
assert_eq!(results, vec![0, 1, 2, 3, 4]);
}
#[tokio::test]
async fn test_join_queue_shutdown() {
let mut queue = JoinQueue::new();
let mut senders = Vec::new();
for _ in 0..5 {
let (tx, rx) = oneshot::channel::<()>();
senders.push(tx);
queue.spawn(async move {
let _ = rx.await;
});
}
queue.shutdown().await;
assert!(queue.is_empty());
while let Some(tx) = senders.pop() {
assert!(tx.is_closed());
}
}
#[tokio::test]
async fn test_join_queue_with_manual_abort() {
let mut queue = JoinQueue::new();
let mut num_canceled = 0;
let mut num_completed = 0;
let mut senders = Vec::new();
for i in 0..16 {
let (tx, rx) = oneshot::channel::<()>();
senders.push(tx);
let abort = queue.spawn(async move {
let _ = rx.await;
i
});
if i % 2 != 0 {
// abort odd-numbered tasks.
abort.abort();
}
}
// Complete all tasks in reverse order
while let Some(tx) = senders.pop() {
let _ = tx.send(());
}
while let Some(res) = queue.join_next().await {
match res {
Ok(res) => {
assert_eq!(res, num_completed * 2);
num_completed += 1;
}
Err(e) => {
assert!(e.is_cancelled());
num_canceled += 1;
}
}
}
assert_eq!(num_canceled, 8);
assert_eq!(num_completed, 8);
}
#[tokio::test]
async fn test_join_queue_join_next_with_id() {
const TASK_NUM: u32 = 1000;
let (send, recv) = tokio::sync::watch::channel(());
let mut queue = JoinQueue::new();
let mut spawned = Vec::with_capacity(TASK_NUM as usize);
for _ in 0..TASK_NUM {
let mut recv = recv.clone();
let handle = queue.spawn(async move { recv.changed().await.unwrap() });
spawned.push(handle.id());
}
drop(recv);
send.send_replace(());
send.closed().await;
let mut count = 0;
let mut joined = Vec::with_capacity(TASK_NUM as usize);
while let Some(res) = queue.join_next_with_id().await {
match res {
Ok((id, ())) => {
count += 1;
joined.push(id);
}
Err(err) => panic!("failed: {err}"),
}
}
assert_eq!(count, TASK_NUM);
assert_eq!(joined, spawned);
}
#[tokio::test]
async fn test_join_queue_try_join_next() {
let mut queue = JoinQueue::new();
let (tx1, rx1) = oneshot::channel::<()>();
queue.spawn(async {
let _ = rx1.await;
});
let (tx2, rx2) = oneshot::channel::<()>();
queue.spawn(async {
let _ = rx2.await;
});
let (tx3, rx3) = oneshot::channel::<()>();
queue.spawn(async {
let _ = rx3.await;
});
// This function also checks that calling `queue.try_join_next()` repeatedly when
// no task is ready is idempotent, i.e. that it does not change the queue state.
fn check_try_join_next_is_noop(queue: &mut JoinQueue<()>) {
let len = queue.len();
for _ in 0..5 {
assert!(queue.try_join_next().is_none());
assert_eq!(queue.len(), len);
}
}
assert_eq!(queue.len(), 3);
check_try_join_next_is_noop(&mut queue);
tx1.send(()).unwrap();
tokio::task::yield_now().await;
assert_eq!(queue.len(), 3);
assert!(queue.try_join_next().is_some());
assert_eq!(queue.len(), 2);
check_try_join_next_is_noop(&mut queue);
tx3.send(()).unwrap();
tokio::task::yield_now().await;
assert_eq!(queue.len(), 2);
check_try_join_next_is_noop(&mut queue);
tx2.send(()).unwrap();
tokio::task::yield_now().await;
assert_eq!(queue.len(), 2);
assert!(queue.try_join_next().is_some());
assert_eq!(queue.len(), 1);
assert!(queue.try_join_next().is_some());
assert!(queue.is_empty());
check_try_join_next_is_noop(&mut queue);
}
#[tokio::test]
async fn test_join_queue_try_join_next_disabled_coop() {
// This number is large enough to trigger coop. Without using `tokio::task::coop::unconstrained`
// inside `try_join_next` this test fails on `assert!(coop_count == 0)`.
const TASK_NUM: u32 = 1000;
let sem: std::sync::Arc<tokio::sync::Semaphore> =
std::sync::Arc::new(tokio::sync::Semaphore::new(0));
let mut queue = JoinQueue::new();
for _ in 0..TASK_NUM {
let sem = sem.clone();
queue.spawn(async move {
sem.add_permits(1);
});
}
let _ = sem.acquire_many(TASK_NUM).await.unwrap();
let mut count = 0;
let mut coop_count = 0;
while !queue.is_empty() {
match queue.try_join_next() {
Some(Ok(())) => count += 1,
Some(Err(err)) => panic!("failed: {err}"),
None => {
coop_count += 1;
tokio::task::yield_now().await;
}
}
}
assert_eq!(coop_count, 0);
assert_eq!(count, TASK_NUM);
}
#[tokio::test]
async fn test_join_queue_try_join_next_with_id_disabled_coop() {
// Note that this number is large enough to trigger coop as in
// `test_join_queue_try_join_next_coop` test. Without using
// `tokio::task::coop::unconstrained` inside `try_join_next_with_id`
// this test fails on `assert_eq!(count, TASK_NUM)`.
const TASK_NUM: u32 = 1000;
let (send, recv) = tokio::sync::watch::channel(());
let mut queue = JoinQueue::new();
let mut spawned = Vec::with_capacity(TASK_NUM as usize);
for _ in 0..TASK_NUM {
let mut recv = recv.clone();
let handle = queue.spawn(async move { recv.changed().await.unwrap() });
spawned.push(handle.id());
}
drop(recv);
assert!(queue.try_join_next_with_id().is_none());
send.send_replace(());
send.closed().await;
let mut count = 0;
let mut coop_count = 0;
let mut joined = Vec::with_capacity(TASK_NUM as usize);
while !queue.is_empty() {
match queue.try_join_next_with_id() {
Some(Ok((id, ()))) => {
count += 1;
joined.push(id);
}
Some(Err(err)) => panic!("failed: {err}"),
None => {
coop_count += 1;
tokio::task::yield_now().await;
}
}
}
assert_eq!(coop_count, 0);
assert_eq!(count, TASK_NUM);
assert_eq!(joined, spawned);
}
#[test]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
fn spawn_local_panic_outside_any_runtime() {
let mut queue = JoinQueue::new();
queue.spawn_local(async {});
}
#[tokio::test(flavor = "multi_thread")]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
async fn spawn_local_panic_in_multi_thread_runtime() {
let mut queue = JoinQueue::new();
queue.spawn_local(async {});
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/task_join_map.rs | tokio-util/tests/task_join_map.rs | #![warn(rust_2018_idioms)]
#![cfg(feature = "join-map")]
use std::panic::AssertUnwindSafe;
use futures::future::{pending, FutureExt};
use tokio::sync::oneshot;
use tokio::task::LocalSet;
use tokio::time::Duration;
use tokio_util::task::JoinMap;
fn rt() -> tokio::runtime::Runtime {
tokio::runtime::Builder::new_current_thread()
.build()
.unwrap()
}
// Spawn `N` tasks that return their index (`i`).
fn spawn_index_tasks(map: &mut JoinMap<usize, usize>, n: usize, on: Option<&LocalSet>) {
for i in 0..n {
let rc = std::rc::Rc::new(i);
match on {
None => map.spawn_local(i, async move { *rc }),
Some(local) => map.spawn_local_on(i, async move { *rc }, local),
};
}
}
// Spawn `N` “pending” tasks that own a `oneshot::Sender`.
// When the task is aborted the sender is dropped, which is observed
// via the returned `Receiver`s.
fn spawn_pending_tasks(
map: &mut JoinMap<usize, ()>,
receivers: &mut Vec<oneshot::Receiver<()>>,
n: usize,
on: Option<&LocalSet>,
) {
for i in 0..n {
let (tx, rx) = oneshot::channel::<()>();
receivers.push(rx);
let fut = async move {
pending::<()>().await;
drop(tx);
};
match on {
None => map.spawn_local(i, fut),
Some(local) => map.spawn_local_on(i, fut, local),
};
}
}
/// Await every task in JoinMap and assert every task returns its own key.
async fn drain_joinmap_and_assert(mut map: JoinMap<usize, usize>, n: usize) {
let mut seen = vec![false; n];
while let Some((k, res)) = map.join_next().await {
let v = res.expect("task panicked");
assert_eq!(k, v);
seen[v] = true;
}
assert!(seen.into_iter().all(|b| b));
assert!(map.is_empty());
}
// Await every receiver and assert they all return `Err` because the
// corresponding sender (inside an aborted task) was dropped.
async fn await_receivers_and_assert(receivers: Vec<oneshot::Receiver<()>>) {
for rx in receivers {
assert!(
rx.await.is_err(),
"task should have been aborted and sender dropped"
);
}
}
#[tokio::test(start_paused = true)]
async fn test_with_sleep() {
let mut map = JoinMap::new();
for i in 0..10 {
map.spawn(i, async move { i });
assert_eq!(map.len(), 1 + i);
}
map.detach_all();
assert_eq!(map.len(), 0);
assert!(map.join_next().await.is_none());
for i in 0..10 {
map.spawn(i, async move {
tokio::time::sleep(Duration::from_secs(i as u64)).await;
i
});
assert_eq!(map.len(), 1 + i);
}
let mut seen = [false; 10];
while let Some((k, res)) = map.join_next().await {
seen[k] = true;
assert_eq!(res.expect("task should have completed successfully"), k);
}
for was_seen in &seen {
assert!(was_seen);
}
assert!(map.join_next().await.is_none());
// Do it again.
for i in 0..10 {
map.spawn(i, async move {
tokio::time::sleep(Duration::from_secs(i as u64)).await;
i
});
}
let mut seen = [false; 10];
while let Some((k, res)) = map.join_next().await {
seen[k] = true;
assert_eq!(res.expect("task should have completed successfully"), k);
}
for was_seen in &seen {
assert!(was_seen);
}
assert!(map.join_next().await.is_none());
}
#[tokio::test]
async fn test_abort_on_drop() {
let mut map = JoinMap::new();
let mut recvs = Vec::new();
for i in 0..16 {
let (send, recv) = oneshot::channel::<()>();
recvs.push(recv);
map.spawn(i, async {
// This task will never complete on its own.
futures::future::pending::<()>().await;
drop(send);
});
}
drop(map);
for recv in recvs {
// The task is aborted soon and we will receive an error.
assert!(recv.await.is_err());
}
}
#[tokio::test]
async fn alternating() {
let mut map = JoinMap::new();
assert_eq!(map.len(), 0);
map.spawn(1, async {});
assert_eq!(map.len(), 1);
map.spawn(2, async {});
assert_eq!(map.len(), 2);
for i in 0..16 {
let (_, res) = map.join_next().await.unwrap();
assert!(res.is_ok());
assert_eq!(map.len(), 1);
map.spawn(i, async {});
assert_eq!(map.len(), 2);
}
}
#[tokio::test]
async fn test_keys() {
use std::collections::HashSet;
let mut map = JoinMap::new();
assert_eq!(map.len(), 0);
map.spawn(1, async {});
assert_eq!(map.len(), 1);
map.spawn(2, async {});
assert_eq!(map.len(), 2);
let keys = map.keys().collect::<HashSet<&u32>>();
assert!(keys.contains(&1));
assert!(keys.contains(&2));
let _ = map.join_next().await.unwrap();
let _ = map.join_next().await.unwrap();
assert_eq!(map.len(), 0);
let keys = map.keys().collect::<HashSet<&u32>>();
assert!(keys.is_empty());
}
#[tokio::test(start_paused = true)]
async fn abort_by_key() {
let mut map = JoinMap::new();
let mut num_canceled = 0;
let mut num_completed = 0;
for i in 0..16 {
map.spawn(i, async move {
tokio::time::sleep(Duration::from_secs(i as u64)).await;
});
}
for i in 0..16 {
if i % 2 != 0 {
// abort odd-numbered tasks.
map.abort(&i);
}
}
while let Some((key, res)) = map.join_next().await {
match res {
Ok(()) => {
num_completed += 1;
assert_eq!(key % 2, 0);
assert!(!map.contains_key(&key));
}
Err(e) => {
num_canceled += 1;
assert!(e.is_cancelled());
assert_ne!(key % 2, 0);
assert!(!map.contains_key(&key));
}
}
}
assert_eq!(num_canceled, 8);
assert_eq!(num_completed, 8);
}
#[tokio::test(start_paused = true)]
async fn abort_by_predicate() {
let mut map = JoinMap::new();
let mut num_canceled = 0;
let mut num_completed = 0;
for i in 0..16 {
map.spawn(i, async move {
tokio::time::sleep(Duration::from_secs(i as u64)).await;
});
}
// abort odd-numbered tasks.
map.abort_matching(|key| key % 2 != 0);
while let Some((key, res)) = map.join_next().await {
match res {
Ok(()) => {
num_completed += 1;
assert_eq!(key % 2, 0);
assert!(!map.contains_key(&key));
}
Err(e) => {
num_canceled += 1;
assert!(e.is_cancelled());
assert_ne!(key % 2, 0);
assert!(!map.contains_key(&key));
}
}
}
assert_eq!(num_canceled, 8);
assert_eq!(num_completed, 8);
}
#[test]
fn runtime_gone() {
let mut map = JoinMap::new();
{
let rt = rt();
map.spawn_on("key", async { 1 }, rt.handle());
drop(rt);
}
let (key, res) = rt().block_on(map.join_next()).unwrap();
assert_eq!(key, "key");
assert!(res.unwrap_err().is_cancelled());
}
// This ensures that `join_next` works correctly when the coop budget is
// exhausted.
#[tokio::test(flavor = "current_thread")]
async fn join_map_coop() {
// Large enough to trigger coop.
const TASK_NUM: u32 = 1000;
static SEM: tokio::sync::Semaphore = tokio::sync::Semaphore::const_new(0);
let mut map = JoinMap::new();
for i in 0..TASK_NUM {
map.spawn(i, async move {
SEM.add_permits(1);
i
});
}
// Wait for all tasks to complete.
//
// Since this is a `current_thread` runtime, there's no race condition
// between the last permit being added and the task completing.
let _ = SEM.acquire_many(TASK_NUM).await.unwrap();
let mut count = 0;
let mut coop_count = 0;
loop {
match map.join_next().now_or_never() {
Some(Some((key, Ok(i)))) => assert_eq!(key, i),
Some(Some((key, Err(err)))) => panic!("failed[{key}]: {err}"),
None => {
coop_count += 1;
tokio::task::yield_now().await;
continue;
}
Some(None) => break,
}
count += 1;
}
assert!(coop_count >= 1);
assert_eq!(count, TASK_NUM);
}
#[tokio::test(start_paused = true)]
async fn abort_all() {
let mut map: JoinMap<usize, ()> = JoinMap::new();
for i in 0..5 {
map.spawn(i, futures::future::pending());
}
for i in 5..10 {
map.spawn(i, async {
tokio::time::sleep(Duration::from_secs(1)).await;
});
}
// The join map will now have 5 pending tasks and 5 ready tasks.
tokio::time::sleep(Duration::from_secs(2)).await;
map.abort_all();
assert_eq!(map.len(), 10);
let mut count = 0;
let mut seen = [false; 10];
while let Some((k, res)) = map.join_next().await {
seen[k] = true;
if let Err(err) = res {
assert!(err.is_cancelled());
}
count += 1;
}
assert_eq!(count, 10);
assert_eq!(map.len(), 0);
for was_seen in &seen {
assert!(was_seen);
}
}
#[tokio::test]
async fn duplicate_keys() {
let mut map = JoinMap::new();
map.spawn(1, async { 1 });
map.spawn(1, async { 2 });
assert_eq!(map.len(), 1);
let (key, res) = map.join_next().await.unwrap();
assert_eq!(key, 1);
assert_eq!(res.unwrap(), 2);
assert!(map.join_next().await.is_none());
}
#[tokio::test]
async fn duplicate_keys2() {
let (send, recv) = oneshot::channel::<()>();
let mut map = JoinMap::new();
map.spawn(1, async { 1 });
map.spawn(1, async {
recv.await.unwrap();
2
});
assert_eq!(map.len(), 1);
tokio::select! {
biased;
res = map.join_next() => match res {
Some((_key, res)) => panic!("Task {res:?} exited."),
None => panic!("Phantom task completion."),
},
() = tokio::task::yield_now() => {},
}
send.send(()).unwrap();
let (key, res) = map.join_next().await.unwrap();
assert_eq!(key, 1);
assert_eq!(res.unwrap(), 2);
assert!(map.join_next().await.is_none());
}
#[cfg_attr(not(panic = "unwind"), ignore)]
#[tokio::test]
async fn duplicate_keys_drop() {
#[derive(Hash, Debug, PartialEq, Eq)]
struct Key;
impl Drop for Key {
fn drop(&mut self) {
panic!("drop called for key");
}
}
let (send, recv) = oneshot::channel::<()>();
let mut map = JoinMap::new();
map.spawn(Key, async { recv.await.unwrap() });
// replace the task, force it to drop the key and abort the task
// we should expect it to panic when dropping the key.
let _ = std::panic::catch_unwind(AssertUnwindSafe(|| map.spawn(Key, async {}))).unwrap_err();
// don't panic when this key drops.
let (key, _) = map.join_next().await.unwrap();
std::mem::forget(key);
// original task should have been aborted, so the sender should be dangling.
assert!(send.is_closed());
assert!(map.join_next().await.is_none());
}
mod spawn_local {
use super::*;
#[test]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
fn panic_outside_any_runtime() {
let mut map = JoinMap::new();
map.spawn_local((), async {});
}
#[tokio::test(flavor = "multi_thread")]
#[should_panic(
expected = "`spawn_local` called from outside of a `task::LocalSet` or `runtime::LocalRuntime`"
)]
async fn panic_in_multi_thread_runtime() {
let mut map = JoinMap::new();
map.spawn_local((), async {});
}
#[cfg(tokio_unstable)]
mod local_runtime {
use super::*;
/// Spawn several tasks, and then join all tasks.
#[tokio::test(flavor = "local")]
async fn spawn_then_join_next() {
const N: usize = 8;
let mut map = JoinMap::new();
spawn_index_tasks(&mut map, N, None);
assert!(map.join_next().now_or_never().is_none());
drain_joinmap_and_assert(map, N).await;
}
/// Spawn several pending-forever tasks, and then shutdown the [`JoinMap`].
#[tokio::test(flavor = "local")]
async fn spawn_then_shutdown() {
const N: usize = 8;
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, None);
assert!(map.join_next().now_or_never().is_none());
map.shutdown().await;
assert!(map.is_empty());
await_receivers_and_assert(receivers).await;
}
/// Spawn several pending-forever tasks, and then drop the [`JoinMap`].
#[tokio::test(flavor = "local")]
async fn spawn_then_drop() {
const N: usize = 8;
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, None);
assert!(map.join_next().now_or_never().is_none());
drop(map);
await_receivers_and_assert(receivers).await;
}
}
mod local_set {
use super::*;
/// Spawn several tasks, and then join all tasks.
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_join_next() {
const N: usize = 8;
let local = LocalSet::new();
local
.run_until(async move {
let mut map = JoinMap::new();
spawn_index_tasks(&mut map, N, None);
drain_joinmap_and_assert(map, N).await;
})
.await;
}
/// Spawn several pending-forever tasks, and then shutdown the [`JoinMap`].
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_shutdown() {
const N: usize = 8;
let local = LocalSet::new();
local
.run_until(async {
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, None);
assert!(map.join_next().now_or_never().is_none());
map.shutdown().await;
assert!(map.is_empty());
await_receivers_and_assert(receivers).await;
})
.await;
}
/// Spawn several pending-forever tasks, and then drop the [`JoinMap`].
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_drop() {
const N: usize = 8;
let local = LocalSet::new();
local
.run_until(async {
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, None);
assert!(map.join_next().now_or_never().is_none());
drop(map);
await_receivers_and_assert(receivers).await;
})
.await;
}
}
}
mod spawn_local_on {
use super::*;
#[cfg(tokio_unstable)]
mod local_runtime {
use super::*;
/// Spawn several tasks, and then join all tasks.
#[tokio::test(flavor = "local")]
async fn spawn_then_join_next() {
const N: usize = 8;
let local = LocalSet::new();
let mut map = JoinMap::new();
spawn_index_tasks(&mut map, N, Some(&local));
assert!(map.join_next().now_or_never().is_none());
local
.run_until(async move {
drain_joinmap_and_assert(map, N).await;
})
.await;
}
}
mod local_set {
use super::*;
/// Spawn several tasks, and then join all tasks.
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_join_next() {
const N: usize = 8;
let local = LocalSet::new();
let mut pending_map = JoinMap::new();
spawn_index_tasks(&mut pending_map, N, Some(&local));
assert!(pending_map.join_next().now_or_never().is_none());
local
.run_until(async move {
drain_joinmap_and_assert(pending_map, N).await;
})
.await;
}
/// Spawn several pending-forever tasks, and then shutdown the [`JoinMap`].
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_shutdown() {
const N: usize = 8;
let local = LocalSet::new();
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, Some(&local));
assert!(map.join_next().now_or_never().is_none());
local
.run_until(async move {
map.shutdown().await;
assert!(map.is_empty());
await_receivers_and_assert(receivers).await;
})
.await;
}
/// Spawn several pending-forever tasks and then drop the [`JoinMap`]
/// before the `LocalSet` is driven and while the `LocalSet` is already driven.
#[tokio::test(flavor = "current_thread")]
async fn spawn_then_drop() {
const N: usize = 8;
{
let local = LocalSet::new();
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, Some(&local));
assert!(map.join_next().now_or_never().is_none());
drop(map);
local
.run_until(async move { await_receivers_and_assert(receivers).await })
.await;
}
{
let local = LocalSet::new();
let mut map = JoinMap::new();
let mut receivers = Vec::new();
spawn_pending_tasks(&mut map, &mut receivers, N, Some(&local));
assert!(map.join_next().now_or_never().is_none());
local
.run_until(async move {
drop(map);
await_receivers_and_assert(receivers).await;
})
.await;
}
}
}
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/poll_semaphore.rs | tokio-util/tests/poll_semaphore.rs | use std::future::Future;
use std::sync::Arc;
use std::task::Poll;
use tokio::sync::{OwnedSemaphorePermit, Semaphore};
use tokio_util::sync::PollSemaphore;
type SemRet = Option<OwnedSemaphorePermit>;
fn semaphore_poll(
sem: &mut PollSemaphore,
) -> tokio_test::task::Spawn<impl Future<Output = SemRet> + '_> {
let fut = std::future::poll_fn(move |cx| sem.poll_acquire(cx));
tokio_test::task::spawn(fut)
}
fn semaphore_poll_many(
sem: &mut PollSemaphore,
permits: u32,
) -> tokio_test::task::Spawn<impl Future<Output = SemRet> + '_> {
let fut = std::future::poll_fn(move |cx| sem.poll_acquire_many(cx, permits));
tokio_test::task::spawn(fut)
}
#[tokio::test]
async fn it_works() {
let sem = Arc::new(Semaphore::new(1));
let mut poll_sem = PollSemaphore::new(sem.clone());
let permit = sem.acquire().await.unwrap();
let mut poll = semaphore_poll(&mut poll_sem);
assert!(poll.poll().is_pending());
drop(permit);
assert!(matches!(poll.poll(), Poll::Ready(Some(_))));
drop(poll);
sem.close();
assert!(semaphore_poll(&mut poll_sem).await.is_none());
// Check that it is fused.
assert!(semaphore_poll(&mut poll_sem).await.is_none());
assert!(semaphore_poll(&mut poll_sem).await.is_none());
}
#[tokio::test]
async fn can_acquire_many_permits() {
let sem = Arc::new(Semaphore::new(4));
let mut poll_sem = PollSemaphore::new(sem.clone());
let permit1 = semaphore_poll(&mut poll_sem).poll();
assert!(matches!(permit1, Poll::Ready(Some(_))));
let permit2 = semaphore_poll_many(&mut poll_sem, 2).poll();
assert!(matches!(permit2, Poll::Ready(Some(_))));
assert_eq!(sem.available_permits(), 1);
drop(permit2);
let mut permit4 = semaphore_poll_many(&mut poll_sem, 4);
assert!(permit4.poll().is_pending());
drop(permit1);
let permit4 = permit4.poll();
assert!(matches!(permit4, Poll::Ready(Some(_))));
assert_eq!(sem.available_permits(), 0);
}
#[tokio::test]
async fn can_poll_different_amounts_of_permits() {
let sem = Arc::new(Semaphore::new(4));
let mut poll_sem = PollSemaphore::new(sem.clone());
assert!(semaphore_poll_many(&mut poll_sem, 5).poll().is_pending());
assert!(semaphore_poll_many(&mut poll_sem, 4).poll().is_ready());
let permit = sem.acquire_many(4).await.unwrap();
assert!(semaphore_poll_many(&mut poll_sem, 5).poll().is_pending());
assert!(semaphore_poll_many(&mut poll_sem, 4).poll().is_pending());
drop(permit);
assert!(semaphore_poll_many(&mut poll_sem, 5).poll().is_pending());
assert!(semaphore_poll_many(&mut poll_sem, 4).poll().is_ready());
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/reusable_box.rs | tokio-util/tests/reusable_box.rs | use futures::future::FutureExt;
use std::alloc::Layout;
use std::future::Future;
use std::marker::PhantomPinned;
use std::pin::Pin;
use std::rc::Rc;
use std::task::{Context, Poll};
use tokio_util::sync::ReusableBoxFuture;
#[test]
// Clippy false positive; it's useful to be able to test the trait impls for any lifetime
#[allow(clippy::extra_unused_lifetimes)]
fn traits<'a>() {
fn assert_traits<T: Send + Sync + Unpin>() {}
// Use a type that is !Unpin
assert_traits::<ReusableBoxFuture<'a, PhantomPinned>>();
// Use a type that is !Send + !Sync
assert_traits::<ReusableBoxFuture<'a, Rc<()>>>();
}
#[test]
fn test_different_futures() {
let fut = async move { 10 };
// Not zero sized!
assert_eq!(Layout::for_value(&fut).size(), 1);
let mut b = ReusableBoxFuture::new(fut);
assert_eq!(b.get_pin().now_or_never(), Some(10));
b.try_set(async move { 20 })
.unwrap_or_else(|_| panic!("incorrect size"));
assert_eq!(b.get_pin().now_or_never(), Some(20));
b.try_set(async move { 30 })
.unwrap_or_else(|_| panic!("incorrect size"));
assert_eq!(b.get_pin().now_or_never(), Some(30));
}
#[test]
fn test_different_sizes() {
let fut1 = async move { 10 };
let val = [0u32; 1000];
let fut2 = async move { val[0] };
let fut3 = ZeroSizedFuture {};
assert_eq!(Layout::for_value(&fut1).size(), 1);
assert_eq!(Layout::for_value(&fut2).size(), 4004);
assert_eq!(Layout::for_value(&fut3).size(), 0);
let mut b = ReusableBoxFuture::new(fut1);
assert_eq!(b.get_pin().now_or_never(), Some(10));
b.set(fut2);
assert_eq!(b.get_pin().now_or_never(), Some(0));
b.set(fut3);
assert_eq!(b.get_pin().now_or_never(), Some(5));
}
struct ZeroSizedFuture {}
impl Future for ZeroSizedFuture {
type Output = u32;
fn poll(self: Pin<&mut Self>, _cx: &mut Context<'_>) -> Poll<u32> {
Poll::Ready(5)
}
}
#[test]
fn test_zero_sized() {
let fut = ZeroSizedFuture {};
// Zero sized!
assert_eq!(Layout::for_value(&fut).size(), 0);
let mut b = ReusableBoxFuture::new(fut);
assert_eq!(b.get_pin().now_or_never(), Some(5));
assert_eq!(b.get_pin().now_or_never(), Some(5));
b.try_set(ZeroSizedFuture {})
.unwrap_or_else(|_| panic!("incorrect size"));
assert_eq!(b.get_pin().now_or_never(), Some(5));
assert_eq!(b.get_pin().now_or_never(), Some(5));
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
tokio-rs/tokio | https://github.com/tokio-rs/tokio/blob/41d1877689f8669902b003a6affce60bdfeb3025/tokio-util/tests/compat.rs | tokio-util/tests/compat.rs | #![cfg(feature = "compat")]
#![cfg(not(target_os = "wasi"))] // WASI does not support all fs operations
#![warn(rust_2018_idioms)]
use futures_io::SeekFrom;
use futures_util::{AsyncReadExt, AsyncSeekExt, AsyncWriteExt};
use tempfile::NamedTempFile;
use tokio::fs::OpenOptions;
use tokio_util::compat::TokioAsyncWriteCompatExt;
#[tokio::test]
async fn compat_file_seek() -> futures_util::io::Result<()> {
let temp_file = NamedTempFile::new()?;
let mut file = OpenOptions::new()
.read(true)
.write(true)
.create(true)
.truncate(true)
.open(temp_file)
.await?
.compat_write();
file.write_all(&[0, 1, 2, 3, 4, 5]).await?;
file.write_all(&[6, 7]).await?;
assert_eq!(file.stream_position().await?, 8);
// Modify elements at position 2.
assert_eq!(file.seek(SeekFrom::Start(2)).await?, 2);
file.write_all(&[8, 9]).await?;
file.flush().await?;
// Verify we still have 8 elements.
assert_eq!(file.seek(SeekFrom::End(0)).await?, 8);
// Seek back to the start of the file to read and verify contents.
file.seek(SeekFrom::Start(0)).await?;
let mut buf = Vec::new();
let num_bytes = file.read_to_end(&mut buf).await?;
assert_eq!(&buf[..num_bytes], &[0, 1, 8, 9, 4, 5, 6, 7]);
Ok(())
}
| rust | MIT | 41d1877689f8669902b003a6affce60bdfeb3025 | 2026-01-04T15:33:40.250594Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/attribute.rs | src/attribute.rs | use super::*;
#[allow(clippy::large_enum_variant)]
#[derive(
EnumDiscriminants, PartialEq, Debug, Clone, Serialize, Ord, PartialOrd, Eq, IntoStaticStr,
)]
#[strum(serialize_all = "kebab-case")]
#[serde(rename_all = "kebab-case")]
#[strum_discriminants(name(AttributeDiscriminant))]
#[strum_discriminants(derive(EnumString, Ord, PartialOrd))]
#[strum_discriminants(strum(serialize_all = "kebab-case"))]
pub(crate) enum Attribute<'src> {
Arg {
help: Option<StringLiteral<'src>>,
long: Option<StringLiteral<'src>>,
#[serde(skip)]
long_key: Option<Token<'src>>,
name: StringLiteral<'src>,
pattern: Option<Pattern<'src>>,
short: Option<StringLiteral<'src>>,
value: Option<StringLiteral<'src>>,
},
Confirm(Option<StringLiteral<'src>>),
Default,
Doc(Option<StringLiteral<'src>>),
ExitMessage,
Extension(StringLiteral<'src>),
Group(StringLiteral<'src>),
Linux,
Macos,
Metadata(Vec<StringLiteral<'src>>),
NoCd,
NoExitMessage,
NoQuiet,
Openbsd,
Parallel,
PositionalArguments,
Private,
Script(Option<Interpreter<StringLiteral<'src>>>),
Unix,
Windows,
WorkingDirectory(StringLiteral<'src>),
}
impl AttributeDiscriminant {
fn argument_range(self) -> RangeInclusive<usize> {
match self {
Self::Default
| Self::ExitMessage
| Self::Linux
| Self::Macos
| Self::NoCd
| Self::NoExitMessage
| Self::NoQuiet
| Self::Openbsd
| Self::Parallel
| Self::PositionalArguments
| Self::Private
| Self::Unix
| Self::Windows => 0..=0,
Self::Confirm | Self::Doc => 0..=1,
Self::Script => 0..=usize::MAX,
Self::Arg | Self::Extension | Self::Group | Self::WorkingDirectory => 1..=1,
Self::Metadata => 1..=usize::MAX,
}
}
}
impl<'src> Attribute<'src> {
fn check_option_name(
parameter: &StringLiteral<'src>,
literal: &StringLiteral<'src>,
) -> CompileResult<'src> {
if literal.cooked.contains('=') {
return Err(
literal
.token
.error(CompileErrorKind::OptionNameContainsEqualSign {
parameter: parameter.cooked.clone(),
}),
);
}
if literal.cooked.is_empty() {
return Err(literal.token.error(CompileErrorKind::OptionNameEmpty {
parameter: parameter.cooked.clone(),
}));
}
Ok(())
}
pub(crate) fn new(
name: Name<'src>,
arguments: Vec<StringLiteral<'src>>,
mut keyword_arguments: BTreeMap<&'src str, (Name<'src>, Option<StringLiteral<'src>>)>,
) -> CompileResult<'src, Self> {
let discriminant = name
.lexeme()
.parse::<AttributeDiscriminant>()
.map_err(|_| {
name.error(CompileErrorKind::UnknownAttribute {
attribute: name.lexeme(),
})
})?;
let found = arguments.len();
let range = discriminant.argument_range();
if !range.contains(&found) {
return Err(
name.error(CompileErrorKind::AttributeArgumentCountMismatch {
attribute: name,
found,
min: *range.start(),
max: *range.end(),
}),
);
}
let attribute = match discriminant {
AttributeDiscriminant::Arg => {
let arg = arguments.into_iter().next().unwrap();
let (long, long_key) = keyword_arguments
.remove("long")
.map(|(name, literal)| {
if let Some(literal) = literal {
Self::check_option_name(&arg, &literal)?;
Ok((Some(literal), None))
} else {
Ok((Some(arg.clone()), Some(*name)))
}
})
.transpose()?
.unwrap_or((None, None));
let short = Self::remove_required(&mut keyword_arguments, "short")?
.map(|(_key, literal)| {
Self::check_option_name(&arg, &literal)?;
if literal.cooked.chars().count() != 1 {
return Err(literal.token.error(
CompileErrorKind::ShortOptionWithMultipleCharacters {
parameter: arg.cooked.clone(),
},
));
}
Ok(literal)
})
.transpose()?;
let pattern = Self::remove_required(&mut keyword_arguments, "pattern")?
.map(|(_key, literal)| Pattern::new(&literal))
.transpose()?;
let value = Self::remove_required(&mut keyword_arguments, "value")?
.map(|(key, literal)| {
if long.is_none() && short.is_none() {
return Err(key.error(CompileErrorKind::ArgAttributeValueRequiresOption));
}
Ok(literal)
})
.transpose()?;
let help =
Self::remove_required(&mut keyword_arguments, "help")?.map(|(_key, literal)| literal);
Self::Arg {
help,
long,
long_key,
name: arg,
pattern,
short,
value,
}
}
AttributeDiscriminant::Confirm => Self::Confirm(arguments.into_iter().next()),
AttributeDiscriminant::Default => Self::Default,
AttributeDiscriminant::Doc => Self::Doc(arguments.into_iter().next()),
AttributeDiscriminant::ExitMessage => Self::ExitMessage,
AttributeDiscriminant::Extension => Self::Extension(arguments.into_iter().next().unwrap()),
AttributeDiscriminant::Group => Self::Group(arguments.into_iter().next().unwrap()),
AttributeDiscriminant::Linux => Self::Linux,
AttributeDiscriminant::Macos => Self::Macos,
AttributeDiscriminant::Metadata => Self::Metadata(arguments),
AttributeDiscriminant::NoCd => Self::NoCd,
AttributeDiscriminant::NoExitMessage => Self::NoExitMessage,
AttributeDiscriminant::NoQuiet => Self::NoQuiet,
AttributeDiscriminant::Openbsd => Self::Openbsd,
AttributeDiscriminant::Parallel => Self::Parallel,
AttributeDiscriminant::PositionalArguments => Self::PositionalArguments,
AttributeDiscriminant::Private => Self::Private,
AttributeDiscriminant::Script => Self::Script({
let mut arguments = arguments.into_iter();
arguments.next().map(|command| Interpreter {
command,
arguments: arguments.collect(),
})
}),
AttributeDiscriminant::Unix => Self::Unix,
AttributeDiscriminant::Windows => Self::Windows,
AttributeDiscriminant::WorkingDirectory => {
Self::WorkingDirectory(arguments.into_iter().next().unwrap())
}
};
if let Some((_name, (keyword_name, _literal))) = keyword_arguments.into_iter().next() {
return Err(
keyword_name.error(CompileErrorKind::UnknownAttributeKeyword {
attribute: name.lexeme(),
keyword: keyword_name.lexeme(),
}),
);
}
Ok(attribute)
}
fn remove_required(
keyword_arguments: &mut BTreeMap<&'src str, (Name<'src>, Option<StringLiteral<'src>>)>,
key: &'src str,
) -> CompileResult<'src, Option<(Name<'src>, StringLiteral<'src>)>> {
let Some((key, literal)) = keyword_arguments.remove(key) else {
return Ok(None);
};
let literal =
literal.ok_or_else(|| key.error(CompileErrorKind::AttributeKeyMissingValue { key }))?;
Ok(Some((key, literal)))
}
pub(crate) fn discriminant(&self) -> AttributeDiscriminant {
self.into()
}
pub(crate) fn name(&self) -> &'static str {
self.into()
}
pub(crate) fn repeatable(&self) -> bool {
matches!(
self,
Attribute::Arg { .. } | Attribute::Group(_) | Attribute::Metadata(_),
)
}
}
impl Display for Attribute<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(f, "{}", self.name())?;
match self {
Self::Arg {
help,
long,
long_key: _,
name,
pattern,
short,
value,
} => {
write!(f, "({name}")?;
if let Some(long) = long {
write!(f, ", long={long}")?;
}
if let Some(short) = short {
write!(f, ", short={short}")?;
}
if let Some(pattern) = pattern {
write!(f, ", pattern={}", pattern.token.lexeme())?;
}
if let Some(value) = value {
write!(f, ", value={value}")?;
}
if let Some(help) = help {
write!(f, ", help={help}")?;
}
write!(f, ")")?;
}
Self::Confirm(None)
| Self::Default
| Self::Doc(None)
| Self::ExitMessage
| Self::Linux
| Self::Macos
| Self::NoCd
| Self::NoExitMessage
| Self::NoQuiet
| Self::Openbsd
| Self::Parallel
| Self::PositionalArguments
| Self::Private
| Self::Script(None)
| Self::Unix
| Self::Windows => {}
Self::Confirm(Some(argument))
| Self::Doc(Some(argument))
| Self::Extension(argument)
| Self::Group(argument)
| Self::WorkingDirectory(argument) => write!(f, "({argument})")?,
Self::Metadata(arguments) => {
write!(f, "(")?;
for (i, argument) in arguments.iter().enumerate() {
if i > 0 {
write!(f, ", ")?;
}
write!(f, "{argument}")?;
}
write!(f, ")")?;
}
Self::Script(Some(shell)) => write!(f, "({shell})")?,
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn name() {
assert_eq!(Attribute::NoExitMessage.name(), "no-exit-message");
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/recipe_resolver.rs | src/recipe_resolver.rs | use {super::*, CompileErrorKind::*};
pub(crate) struct RecipeResolver<'src: 'run, 'run> {
assignments: &'run Table<'src, Assignment<'src>>,
module_path: &'run str,
modules: &'run Table<'src, Justfile<'src>>,
resolved_recipes: Table<'src, Arc<Recipe<'src>>>,
unresolved_recipes: Table<'src, UnresolvedRecipe<'src>>,
}
impl<'src: 'run, 'run> RecipeResolver<'src, 'run> {
pub(crate) fn resolve_recipes(
assignments: &'run Table<'src, Assignment<'src>>,
module_path: &'run str,
modules: &'run Table<'src, Justfile<'src>>,
settings: &Settings,
unresolved_recipes: Table<'src, UnresolvedRecipe<'src>>,
) -> CompileResult<'src, Table<'src, Arc<Recipe<'src>>>> {
let mut resolver = Self {
assignments,
module_path,
modules,
resolved_recipes: Table::new(),
unresolved_recipes,
};
while let Some(unresolved) = resolver.unresolved_recipes.pop() {
resolver.resolve_recipe(&mut Vec::new(), unresolved)?;
}
for recipe in resolver.resolved_recipes.values() {
for (i, parameter) in recipe.parameters.iter().enumerate() {
if let Some(expression) = ¶meter.default {
for variable in expression.variables() {
resolver.resolve_variable(&variable, &recipe.parameters[..i])?;
}
}
}
for dependency in &recipe.dependencies {
for group in &dependency.arguments {
for argument in group {
for variable in argument.variables() {
resolver.resolve_variable(&variable, &recipe.parameters)?;
}
}
}
}
for line in &recipe.body {
if line.is_comment() && settings.ignore_comments {
continue;
}
for fragment in &line.fragments {
if let Fragment::Interpolation { expression, .. } = fragment {
for variable in expression.variables() {
resolver.resolve_variable(&variable, &recipe.parameters)?;
}
}
}
}
}
Ok(resolver.resolved_recipes)
}
fn resolve_variable(
&self,
variable: &Token<'src>,
parameters: &[Parameter],
) -> CompileResult<'src> {
let name = variable.lexeme();
let defined = self.assignments.contains_key(name)
|| parameters.iter().any(|p| p.name.lexeme() == name)
|| constants().contains_key(name);
if !defined {
return Err(variable.error(UndefinedVariable { variable: name }));
}
Ok(())
}
fn resolve_recipe(
&mut self,
stack: &mut Vec<&'src str>,
recipe: UnresolvedRecipe<'src>,
) -> CompileResult<'src, Arc<Recipe<'src>>> {
if let Some(resolved) = self.resolved_recipes.get(recipe.name()) {
return Ok(Arc::clone(resolved));
}
stack.push(recipe.name());
let dependencies = recipe
.dependencies
.iter()
.map(|dependency| {
self
.resolve_dependency(dependency, &recipe, stack)?
.ok_or_else(|| {
dependency.recipe.last().error(UnknownDependency {
recipe: recipe.name(),
unknown: dependency.recipe.clone(),
})
})
})
.collect::<CompileResult<Vec<Arc<Recipe>>>>()?;
stack.pop();
let resolved = Arc::new(recipe.resolve(self.module_path, dependencies)?);
self.resolved_recipes.insert(Arc::clone(&resolved));
Ok(resolved)
}
fn resolve_dependency(
&mut self,
dependency: &UnresolvedDependency<'src>,
recipe: &UnresolvedRecipe<'src>,
stack: &mut Vec<&'src str>,
) -> CompileResult<'src, Option<Arc<Recipe<'src>>>> {
let name = dependency.recipe.last().lexeme();
if dependency.recipe.components() > 1 {
// recipe is in a submodule and is thus already resovled
Ok(Analyzer::resolve_recipe(
&dependency.recipe,
self.modules,
&self.resolved_recipes,
))
} else if let Some(resolved) = self.resolved_recipes.get(name) {
// recipe is the current module and has already been resolved
Ok(Some(Arc::clone(resolved)))
} else if stack.contains(&name) {
// recipe depends on itself
let first = stack[0];
stack.push(first);
Err(
dependency.recipe.last().error(CircularRecipeDependency {
recipe: recipe.name(),
circle: stack
.iter()
.skip_while(|name| **name != dependency.recipe.last().lexeme())
.copied()
.collect(),
}),
)
} else if let Some(unresolved) = self.unresolved_recipes.remove(name) {
// recipe is as of yet unresolved
Ok(Some(self.resolve_recipe(stack, unresolved)?))
} else {
// recipe is unknown
Ok(None)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
analysis_error! {
name: circular_recipe_dependency,
input: "a: b\nb: a",
offset: 8,
line: 1,
column: 3,
width: 1,
kind: CircularRecipeDependency{recipe: "b", circle: vec!["a", "b", "a"]},
}
analysis_error! {
name: self_recipe_dependency,
input: "a: a",
offset: 3,
line: 0,
column: 3,
width: 1,
kind: CircularRecipeDependency{recipe: "a", circle: vec!["a", "a"]},
}
analysis_error! {
name: unknown_dependency,
input: "a: b",
offset: 3,
line: 0,
column: 3,
width: 1,
kind: UnknownDependency{
recipe: "a",
unknown: Namepath::from(Name::from_identifier(
Token{
column: 3,
kind: TokenKind::Identifier,
length: 1,
line: 0,
offset: 3,
path: Path::new("justfile"),
src: "a: b" }))
},
}
analysis_error! {
name: unknown_interpolation_variable,
input: "x:\n {{ hello}}",
offset: 9,
line: 1,
column: 6,
width: 5,
kind: UndefinedVariable{variable: "hello"},
}
analysis_error! {
name: unknown_second_interpolation_variable,
input: "wtf:=\"x\"\nx:\n echo\n foo {{wtf}} {{ lol }}",
offset: 34,
line: 3,
column: 16,
width: 3,
kind: UndefinedVariable{variable: "lol"},
}
analysis_error! {
name: unknown_variable_in_default,
input: "a f=foo:",
offset: 4,
line: 0,
column: 4,
width: 3,
kind: UndefinedVariable{variable: "foo"},
}
analysis_error! {
name: unknown_variable_in_dependency_argument,
input: "bar x:\nfoo: (bar baz)",
offset: 17,
line: 1,
column: 10,
width: 3,
kind: UndefinedVariable{variable: "baz"},
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/config.rs | src/config.rs | use {
super::*,
clap::{
builder::{
styling::{AnsiColor, Effects},
FalseyValueParser, Styles,
},
parser::ValuesRef,
value_parser, Arg, ArgAction, ArgGroup, ArgMatches, Command,
},
};
#[derive(Debug, Default, PartialEq)]
pub(crate) struct Config {
pub(crate) alias_style: AliasStyle,
pub(crate) allow_missing: bool,
pub(crate) ceiling: Option<PathBuf>,
pub(crate) check: bool,
pub(crate) color: Color,
pub(crate) command_color: Option<ansi_term::Color>,
pub(crate) cygpath: PathBuf,
pub(crate) dotenv_filename: Option<String>,
pub(crate) dotenv_path: Option<PathBuf>,
pub(crate) dry_run: bool,
pub(crate) dump_format: DumpFormat,
pub(crate) explain: bool,
pub(crate) highlight: bool,
pub(crate) invocation_directory: PathBuf,
pub(crate) list_heading: String,
pub(crate) list_prefix: String,
pub(crate) list_submodules: bool,
pub(crate) load_dotenv: bool,
pub(crate) no_aliases: bool,
pub(crate) no_dependencies: bool,
pub(crate) one: bool,
pub(crate) overrides: BTreeMap<String, String>,
pub(crate) search_config: SearchConfig,
pub(crate) shell: Option<String>,
pub(crate) shell_args: Option<Vec<String>>,
pub(crate) shell_command: bool,
pub(crate) subcommand: Subcommand,
pub(crate) tempdir: Option<PathBuf>,
pub(crate) timestamp: bool,
pub(crate) timestamp_format: String,
pub(crate) unsorted: bool,
pub(crate) unstable: bool,
pub(crate) verbosity: Verbosity,
pub(crate) yes: bool,
}
mod cmd {
pub(crate) const CHANGELOG: &str = "CHANGELOG";
pub(crate) const CHOOSE: &str = "CHOOSE";
pub(crate) const COMMAND: &str = "COMMAND";
pub(crate) const COMPLETIONS: &str = "COMPLETIONS";
pub(crate) const DUMP: &str = "DUMP";
pub(crate) const EDIT: &str = "EDIT";
pub(crate) const EVALUATE: &str = "EVALUATE";
pub(crate) const FORMAT: &str = "FORMAT";
pub(crate) const GROUPS: &str = "GROUPS";
pub(crate) const INIT: &str = "INIT";
pub(crate) const LIST: &str = "LIST";
pub(crate) const MAN: &str = "MAN";
pub(crate) const REQUEST: &str = "REQUEST";
pub(crate) const SHOW: &str = "SHOW";
pub(crate) const SUMMARY: &str = "SUMMARY";
pub(crate) const USAGE: &str = "USAGE";
pub(crate) const VARIABLES: &str = "VARIABLES";
pub(crate) const ALL: &[&str] = &[
CHANGELOG,
CHOOSE,
COMMAND,
COMPLETIONS,
DUMP,
EDIT,
EVALUATE,
FORMAT,
INIT,
LIST,
MAN,
REQUEST,
SHOW,
SUMMARY,
VARIABLES,
];
pub(crate) const ARGLESS: &[&str] =
&[CHANGELOG, DUMP, EDIT, FORMAT, INIT, MAN, SUMMARY, VARIABLES];
pub(crate) const HEADING: &str = "Commands";
}
mod arg {
pub(crate) const ALIAS_STYLE: &str = "ALIAS_STYLE";
pub(crate) const ALLOW_MISSING: &str = "ALLOW-MISSING";
pub(crate) const ARGUMENTS: &str = "ARGUMENTS";
pub(crate) const CEILING: &str = "CEILING";
pub(crate) const CHECK: &str = "CHECK";
pub(crate) const CHOOSER: &str = "CHOOSER";
pub(crate) const CLEAR_SHELL_ARGS: &str = "CLEAR-SHELL-ARGS";
pub(crate) const COLOR: &str = "COLOR";
pub(crate) const COMMAND_COLOR: &str = "COMMAND-COLOR";
pub(crate) const CYGPATH: &str = "CYGPATH";
pub(crate) const DOTENV_FILENAME: &str = "DOTENV-FILENAME";
pub(crate) const DOTENV_PATH: &str = "DOTENV-PATH";
pub(crate) const DRY_RUN: &str = "DRY-RUN";
pub(crate) const DUMP_FORMAT: &str = "DUMP-FORMAT";
pub(crate) const EXPLAIN: &str = "EXPLAIN";
pub(crate) const GLOBAL_JUSTFILE: &str = "GLOBAL-JUSTFILE";
pub(crate) const HIGHLIGHT: &str = "HIGHLIGHT";
pub(crate) const JUSTFILE: &str = "JUSTFILE";
pub(crate) const LIST_HEADING: &str = "LIST-HEADING";
pub(crate) const LIST_PREFIX: &str = "LIST-PREFIX";
pub(crate) const LIST_SUBMODULES: &str = "LIST-SUBMODULES";
pub(crate) const NO_ALIASES: &str = "NO-ALIASES";
pub(crate) const NO_DEPS: &str = "NO-DEPS";
pub(crate) const NO_DOTENV: &str = "NO-DOTENV";
pub(crate) const NO_HIGHLIGHT: &str = "NO-HIGHLIGHT";
pub(crate) const ONE: &str = "ONE";
pub(crate) const QUIET: &str = "QUIET";
pub(crate) const SET: &str = "SET";
pub(crate) const SHELL: &str = "SHELL";
pub(crate) const SHELL_ARG: &str = "SHELL-ARG";
pub(crate) const SHELL_COMMAND: &str = "SHELL-COMMAND";
pub(crate) const TEMPDIR: &str = "TEMPDIR";
pub(crate) const TIMESTAMP: &str = "TIMESTAMP";
pub(crate) const TIMESTAMP_FORMAT: &str = "TIMESTAMP-FORMAT";
pub(crate) const UNSORTED: &str = "UNSORTED";
pub(crate) const UNSTABLE: &str = "UNSTABLE";
pub(crate) const VERBOSE: &str = "VERBOSE";
pub(crate) const WORKING_DIRECTORY: &str = "WORKING-DIRECTORY";
pub(crate) const YES: &str = "YES";
}
impl Config {
pub(crate) fn app() -> Command {
Command::new(env!("CARGO_PKG_NAME"))
.bin_name(env!("CARGO_PKG_NAME"))
.version(env!("CARGO_PKG_VERSION"))
.author(env!("CARGO_PKG_AUTHORS"))
.about(concat!(
env!("CARGO_PKG_DESCRIPTION"),
" - ",
env!("CARGO_PKG_HOMEPAGE")
))
.trailing_var_arg(true)
.styles(
Styles::styled()
.error(AnsiColor::Red.on_default() | Effects::BOLD)
.header(AnsiColor::Yellow.on_default() | Effects::BOLD)
.invalid(AnsiColor::Red.on_default())
.literal(AnsiColor::Green.on_default())
.placeholder(AnsiColor::Cyan.on_default())
.usage(AnsiColor::Yellow.on_default() | Effects::BOLD)
.valid(AnsiColor::Green.on_default()),
)
.arg(
Arg::new(arg::ALIAS_STYLE)
.long("alias-style")
.env("JUST_ALIAS_STYLE")
.action(ArgAction::Set)
.value_parser(clap::value_parser!(AliasStyle))
.default_value("right")
.help("Set list command alias display style")
.conflicts_with(arg::NO_ALIASES),
)
.arg(
Arg::new(arg::CEILING)
.long("ceiling")
.env("JUST_CEILING")
.action(ArgAction::Set)
.value_parser(value_parser!(PathBuf))
.help("Do not ascend above <CEILING> directory when searching for a justfile."),
)
.arg(
Arg::new(arg::CHECK)
.long("check")
.action(ArgAction::SetTrue)
.requires(cmd::FORMAT)
.help(
"Run `--fmt` in 'check' mode. Exits with 0 if justfile is formatted correctly. \
Exits with 1 and prints a diff if formatting is required.",
),
)
.arg(
Arg::new(arg::CHOOSER)
.long("chooser")
.env("JUST_CHOOSER")
.action(ArgAction::Set)
.help("Override binary invoked by `--choose`"),
)
.arg(
Arg::new(arg::CLEAR_SHELL_ARGS)
.long("clear-shell-args")
.action(ArgAction::SetTrue)
.overrides_with(arg::SHELL_ARG)
.help("Clear shell arguments"),
)
.arg(
Arg::new(arg::COLOR)
.long("color")
.env("JUST_COLOR")
.action(ArgAction::Set)
.value_parser(clap::value_parser!(UseColor))
.default_value("auto")
.help("Print colorful output"),
)
.arg(
Arg::new(arg::COMMAND_COLOR)
.long("command-color")
.env("JUST_COMMAND_COLOR")
.action(ArgAction::Set)
.value_parser(clap::value_parser!(CommandColor))
.help("Echo recipe lines in <COMMAND-COLOR>"),
)
.arg(
Arg::new(arg::CYGPATH)
.long("cygpath")
.env("JUST_CYGPATH")
.action(ArgAction::Set)
.value_parser(value_parser!(PathBuf))
.default_value("cygpath")
.help("Use binary at <CYGPATH> to convert between unix and Windows paths."),
)
.arg(
Arg::new(arg::DOTENV_FILENAME)
.long("dotenv-filename")
.action(ArgAction::Set)
.help("Search for environment file named <DOTENV-FILENAME> instead of `.env`")
.conflicts_with(arg::DOTENV_PATH),
)
.arg(
Arg::new(arg::DOTENV_PATH)
.short('E')
.long("dotenv-path")
.action(ArgAction::Set)
.value_parser(value_parser!(PathBuf))
.help("Load <DOTENV-PATH> as environment file instead of searching for one"),
)
.arg(
Arg::new(arg::DRY_RUN)
.short('n')
.long("dry-run")
.env("JUST_DRY_RUN")
.action(ArgAction::SetTrue)
.help("Print what just would do without doing it")
.conflicts_with(arg::QUIET),
)
.arg(
Arg::new(arg::DUMP_FORMAT)
.long("dump-format")
.env("JUST_DUMP_FORMAT")
.action(ArgAction::Set)
.value_parser(clap::value_parser!(DumpFormat))
.default_value("just")
.value_name("FORMAT")
.help("Dump justfile as <FORMAT>"),
)
.arg(
Arg::new(arg::EXPLAIN)
.action(ArgAction::SetTrue)
.long("explain")
.env("JUST_EXPLAIN")
.help("Print recipe doc comment before running it"),
)
.arg(
Arg::new(arg::GLOBAL_JUSTFILE)
.action(ArgAction::SetTrue)
.long("global-justfile")
.short('g')
.conflicts_with(arg::JUSTFILE)
.conflicts_with(arg::WORKING_DIRECTORY)
.help("Use global justfile"),
)
.arg(
Arg::new(arg::HIGHLIGHT)
.long("highlight")
.env("JUST_HIGHLIGHT")
.action(ArgAction::SetTrue)
.help("Highlight echoed recipe lines in bold")
.overrides_with(arg::NO_HIGHLIGHT),
)
.arg(
Arg::new(arg::JUSTFILE)
.short('f')
.long("justfile")
.env("JUST_JUSTFILE")
.action(ArgAction::Set)
.value_parser(value_parser!(PathBuf))
.help("Use <JUSTFILE> as justfile"),
)
.arg(
Arg::new(arg::LIST_HEADING)
.long("list-heading")
.env("JUST_LIST_HEADING")
.help("Print <TEXT> before list")
.value_name("TEXT")
.default_value("Available recipes:\n")
.action(ArgAction::Set),
)
.arg(
Arg::new(arg::LIST_PREFIX)
.long("list-prefix")
.env("JUST_LIST_PREFIX")
.help("Print <TEXT> before each list item")
.value_name("TEXT")
.default_value(" ")
.action(ArgAction::Set),
)
.arg(
Arg::new(arg::LIST_SUBMODULES)
.long("list-submodules")
.env("JUST_LIST_SUBMODULES")
.help("List recipes in submodules")
.action(ArgAction::SetTrue)
.requires(cmd::LIST),
)
.arg(
Arg::new(arg::NO_ALIASES)
.long("no-aliases")
.env("JUST_NO_ALIASES")
.action(ArgAction::SetTrue)
.help("Don't show aliases in list"),
)
.arg(
Arg::new(arg::NO_DEPS)
.long("no-deps")
.env("JUST_NO_DEPS")
.alias("no-dependencies")
.action(ArgAction::SetTrue)
.help("Don't run recipe dependencies"),
)
.arg(
Arg::new(arg::NO_DOTENV)
.long("no-dotenv")
.env("JUST_NO_DOTENV")
.action(ArgAction::SetTrue)
.help("Don't load `.env` file"),
)
.arg(
Arg::new(arg::NO_HIGHLIGHT)
.long("no-highlight")
.env("JUST_NO_HIGHLIGHT")
.action(ArgAction::SetTrue)
.help("Don't highlight echoed recipe lines in bold")
.overrides_with(arg::HIGHLIGHT),
)
.arg(
Arg::new(arg::ONE)
.long("one")
.env("JUST_ONE")
.action(ArgAction::SetTrue)
.help("Forbid multiple recipes from being invoked on the command line"),
)
.arg(
Arg::new(arg::QUIET)
.short('q')
.long("quiet")
.env("JUST_QUIET")
.action(ArgAction::SetTrue)
.help("Suppress all output")
.conflicts_with(arg::DRY_RUN),
)
.arg(
Arg::new(arg::ALLOW_MISSING)
.long("allow-missing")
.env("JUST_ALLOW_MISSING")
.action(ArgAction::SetTrue)
.help("Ignore missing recipe and module errors"),
)
.arg(
Arg::new(arg::SET)
.long("set")
.action(ArgAction::Append)
.number_of_values(2)
.value_names(["VARIABLE", "VALUE"])
.help("Override <VARIABLE> with <VALUE>"),
)
.arg(
Arg::new(arg::SHELL)
.long("shell")
.action(ArgAction::Set)
.help("Invoke <SHELL> to run recipes"),
)
.arg(
Arg::new(arg::SHELL_ARG)
.long("shell-arg")
.action(ArgAction::Append)
.allow_hyphen_values(true)
.overrides_with(arg::CLEAR_SHELL_ARGS)
.help("Invoke shell with <SHELL-ARG> as an argument"),
)
.arg(
Arg::new(arg::SHELL_COMMAND)
.long("shell-command")
.requires(cmd::COMMAND)
.action(ArgAction::SetTrue)
.help("Invoke <COMMAND> with the shell used to run recipe lines and backticks"),
)
.arg(
Arg::new(arg::TEMPDIR)
.action(ArgAction::Set)
.env("JUST_TEMPDIR")
.long("tempdir")
.value_parser(value_parser!(PathBuf))
.help("Save temporary files to <TEMPDIR>."),
)
.arg(
Arg::new(arg::TIMESTAMP)
.action(ArgAction::SetTrue)
.long("timestamp")
.env("JUST_TIMESTAMP")
.help("Print recipe command timestamps"),
)
.arg(
Arg::new(arg::TIMESTAMP_FORMAT)
.action(ArgAction::Set)
.long("timestamp-format")
.env("JUST_TIMESTAMP_FORMAT")
.default_value("%H:%M:%S")
.help("Timestamp format string"),
)
.arg(
Arg::new(arg::UNSORTED)
.long("unsorted")
.env("JUST_UNSORTED")
.short('u')
.action(ArgAction::SetTrue)
.help("Return list and summary entries in source order"),
)
.arg(
Arg::new(arg::UNSTABLE)
.long("unstable")
.env("JUST_UNSTABLE")
.action(ArgAction::SetTrue)
.value_parser(FalseyValueParser::new())
.help("Enable unstable features"),
)
.arg(
Arg::new(arg::VERBOSE)
.short('v')
.long("verbose")
.env("JUST_VERBOSE")
.action(ArgAction::Count)
.help("Use verbose output"),
)
.arg(
Arg::new(arg::WORKING_DIRECTORY)
.short('d')
.long("working-directory")
.env("JUST_WORKING_DIRECTORY")
.action(ArgAction::Set)
.value_parser(value_parser!(PathBuf))
.help("Use <WORKING-DIRECTORY> as working directory. --justfile must also be set")
.requires(arg::JUSTFILE),
)
.arg(
Arg::new(arg::YES)
.long("yes")
.env("JUST_YES")
.action(ArgAction::SetTrue)
.help("Automatically confirm all recipes."),
)
.arg(
Arg::new(cmd::CHANGELOG)
.long("changelog")
.action(ArgAction::SetTrue)
.help("Print changelog")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::CHOOSE)
.long("choose")
.action(ArgAction::SetTrue)
.help(
"Select one or more recipes to run using a binary chooser. If `--chooser` is not \
passed the chooser defaults to the value of $JUST_CHOOSER, falling back to `fzf`",
)
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::COMMAND)
.long("command")
.short('c')
.num_args(1..)
.allow_hyphen_values(true)
.action(ArgAction::Append)
.value_parser(value_parser!(std::ffi::OsString))
.help(
"Run an arbitrary command with the working directory, `.env`, overrides, and exports \
set",
)
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::COMPLETIONS)
.long("completions")
.action(ArgAction::Set)
.value_name("SHELL")
.value_parser(value_parser!(completions::Shell))
.ignore_case(true)
.help("Print shell completion script for <SHELL>")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::DUMP)
.long("dump")
.action(ArgAction::SetTrue)
.help("Print justfile")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::EDIT)
.short('e')
.long("edit")
.action(ArgAction::SetTrue)
.help("Edit justfile with editor given by $VISUAL or $EDITOR, falling back to `vim`")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::EVALUATE)
.long("evaluate")
.alias("eval")
.action(ArgAction::SetTrue)
.help(
"Evaluate and print all variables. If a variable name is given as an argument, only \
print that variable's value.",
)
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::FORMAT)
.long("fmt")
.alias("format")
.action(ArgAction::SetTrue)
.help("Format and overwrite justfile")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::GROUPS)
.long("groups")
.action(ArgAction::SetTrue)
.help("List recipe groups")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::INIT)
.long("init")
.alias("initialize")
.action(ArgAction::SetTrue)
.help("Initialize new justfile in project root")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::LIST)
.short('l')
.long("list")
.num_args(0..)
.value_name("MODULE")
.action(ArgAction::Set)
.conflicts_with(arg::ARGUMENTS)
.help("List available recipes in <MODULE> or root if omitted")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::MAN)
.long("man")
.action(ArgAction::SetTrue)
.help("Print man page")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::REQUEST)
.long("request")
.action(ArgAction::Set)
.hide(true)
.help(
"Execute <REQUEST>. For internal testing purposes only. May be changed or removed at \
any time.",
)
.help_heading(cmd::REQUEST),
)
.arg(
Arg::new(cmd::SHOW)
.short('s')
.long("show")
.num_args(1..)
.action(ArgAction::Set)
.value_name("PATH")
.conflicts_with(arg::ARGUMENTS)
.help("Show recipe at <PATH>")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::SUMMARY)
.long("summary")
.action(ArgAction::SetTrue)
.help("List names of available recipes")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::USAGE)
.long("usage")
.num_args(1..)
.value_name("PATH")
.action(ArgAction::Set)
.conflicts_with(arg::ARGUMENTS)
.help("Print recipe usage information")
.help_heading(cmd::HEADING),
)
.arg(
Arg::new(cmd::VARIABLES)
.long("variables")
.action(ArgAction::SetTrue)
.help("List names of variables")
.help_heading(cmd::HEADING),
)
.group(ArgGroup::new("SUBCOMMAND").args(cmd::ALL))
.arg(
Arg::new(arg::ARGUMENTS)
.num_args(1..)
.action(ArgAction::Append)
.help("Overrides and recipe(s) to run, defaulting to the first recipe in the justfile"),
)
}
fn parse_module_path(values: ValuesRef<String>) -> ConfigResult<ModulePath> {
let path = values.clone().map(|s| (*s).as_str()).collect::<Vec<&str>>();
let path = if path.len() == 1 && path[0].contains(' ') {
path[0].split_whitespace().collect::<Vec<&str>>()
} else {
path
};
path
.as_slice()
.try_into()
.map_err(|()| ConfigError::ModulePath {
path: values.cloned().collect(),
})
}
fn search_config(matches: &ArgMatches, positional: &Positional) -> ConfigResult<SearchConfig> {
if matches.get_flag(arg::GLOBAL_JUSTFILE) {
return Ok(SearchConfig::GlobalJustfile);
}
let justfile = matches.get_one::<PathBuf>(arg::JUSTFILE).map(Into::into);
let working_directory = matches
.get_one::<PathBuf>(arg::WORKING_DIRECTORY)
.map(Into::into);
if let Some(search_directory) = positional.search_directory.as_ref().map(PathBuf::from) {
if justfile.is_some() || working_directory.is_some() {
return Err(ConfigError::SearchDirConflict);
}
Ok(SearchConfig::FromSearchDirectory { search_directory })
} else {
match (justfile, working_directory) {
(None, None) => Ok(SearchConfig::FromInvocationDirectory),
(Some(justfile), None) => Ok(SearchConfig::WithJustfile { justfile }),
(Some(justfile), Some(working_directory)) => {
Ok(SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
})
}
(None, Some(_)) => Err(ConfigError::internal(
"--working-directory set without --justfile",
)),
}
}
}
pub(crate) fn from_matches(matches: &ArgMatches) -> ConfigResult<Self> {
let mut overrides = BTreeMap::new();
if let Some(mut values) = matches.get_many::<String>(arg::SET) {
while let (Some(k), Some(v)) = (values.next(), values.next()) {
overrides.insert(k.into(), v.into());
}
}
let positional = Positional::from_values(
matches
.get_many::<String>(arg::ARGUMENTS)
.map(|s| s.map(String::as_str)),
);
for (name, value) in &positional.overrides {
overrides.insert(name.clone(), value.clone());
}
let search_config = Self::search_config(matches, &positional)?;
for subcommand in cmd::ARGLESS {
if matches.get_flag(subcommand) {
match (!overrides.is_empty(), !positional.arguments.is_empty()) {
(false, false) => {}
(true, false) => {
return Err(ConfigError::SubcommandOverrides {
subcommand,
overrides,
});
}
(false, true) => {
return Err(ConfigError::SubcommandArguments {
arguments: positional.arguments,
subcommand,
});
}
(true, true) => {
return Err(ConfigError::SubcommandOverridesAndArguments {
arguments: positional.arguments,
subcommand,
overrides,
});
}
}
}
}
let subcommand = if matches.get_flag(cmd::CHANGELOG) {
Subcommand::Changelog
} else if matches.get_flag(cmd::CHOOSE) {
Subcommand::Choose {
chooser: matches.get_one::<String>(arg::CHOOSER).map(Into::into),
}
} else if let Some(values) = matches.get_many::<OsString>(cmd::COMMAND) {
let mut arguments = values.map(Into::into).collect::<Vec<OsString>>();
Subcommand::Command {
binary: arguments.remove(0),
arguments,
}
} else if let Some(&shell) = matches.get_one::<completions::Shell>(cmd::COMPLETIONS) {
Subcommand::Completions { shell }
} else if matches.get_flag(cmd::DUMP) {
Subcommand::Dump
} else if matches.get_flag(cmd::EDIT) {
Subcommand::Edit
} else if matches.get_flag(cmd::EVALUATE) {
if positional.arguments.len() > 1 {
return Err(ConfigError::SubcommandArguments {
subcommand: cmd::EVALUATE,
arguments: positional
.arguments
.into_iter()
.skip(1)
.collect::<Vec<String>>(),
});
}
Subcommand::Evaluate {
variable: positional.arguments.into_iter().next(),
}
} else if matches.get_flag(cmd::FORMAT) {
Subcommand::Format
} else if matches.get_flag(cmd::GROUPS) {
Subcommand::Groups
} else if matches.get_flag(cmd::INIT) {
Subcommand::Init
} else if let Some(path) = matches.get_many::<String>(cmd::LIST) {
Subcommand::List {
path: Self::parse_module_path(path)?,
}
} else if matches.get_flag(cmd::MAN) {
Subcommand::Man
} else if let Some(request) = matches.get_one::<String>(cmd::REQUEST) {
Subcommand::Request {
request: serde_json::from_str(request)
.map_err(|source| ConfigError::RequestParse { source })?,
}
} else if let Some(path) = matches.get_many::<String>(cmd::SHOW) {
Subcommand::Show {
path: Self::parse_module_path(path)?,
}
} else if matches.get_flag(cmd::SUMMARY) {
Subcommand::Summary
} else if let Some(path) = matches.get_many::<String>(cmd::USAGE) {
Subcommand::Usage {
path: Self::parse_module_path(path)?,
}
} else if matches.get_flag(cmd::VARIABLES) {
Subcommand::Variables
} else {
Subcommand::Run {
arguments: positional.arguments,
}
};
let unstable = matches.get_flag(arg::UNSTABLE) || subcommand == Subcommand::Summary;
let explain = matches.get_flag(arg::EXPLAIN);
Ok(Self {
alias_style: matches
.get_one::<AliasStyle>(arg::ALIAS_STYLE)
.unwrap()
.clone(),
allow_missing: matches.get_flag(arg::ALLOW_MISSING),
ceiling: matches.get_one::<PathBuf>(arg::CEILING).cloned(),
check: matches.get_flag(arg::CHECK),
color: (*matches.get_one::<UseColor>(arg::COLOR).unwrap()).into(),
command_color: matches
.get_one::<CommandColor>(arg::COMMAND_COLOR)
.copied()
.map(CommandColor::into),
cygpath: matches.get_one::<PathBuf>(arg::CYGPATH).unwrap().clone(),
dotenv_filename: matches
.get_one::<String>(arg::DOTENV_FILENAME)
.map(Into::into),
dotenv_path: matches.get_one::<PathBuf>(arg::DOTENV_PATH).map(Into::into),
dry_run: matches.get_flag(arg::DRY_RUN),
dump_format: matches
.get_one::<DumpFormat>(arg::DUMP_FORMAT)
.unwrap()
.clone(),
explain,
highlight: !matches.get_flag(arg::NO_HIGHLIGHT),
invocation_directory: env::current_dir().context(config_error::CurrentDirContext)?,
list_heading: matches.get_one::<String>(arg::LIST_HEADING).unwrap().into(),
list_prefix: matches.get_one::<String>(arg::LIST_PREFIX).unwrap().into(),
list_submodules: matches.get_flag(arg::LIST_SUBMODULES),
load_dotenv: !matches.get_flag(arg::NO_DOTENV),
no_aliases: matches.get_flag(arg::NO_ALIASES),
no_dependencies: matches.get_flag(arg::NO_DEPS),
one: matches.get_flag(arg::ONE),
overrides,
search_config,
shell: matches.get_one::<String>(arg::SHELL).map(Into::into),
shell_args: if matches.get_flag(arg::CLEAR_SHELL_ARGS) {
Some(Vec::new())
} else {
matches
.get_many::<String>(arg::SHELL_ARG)
.map(|s| s.map(Into::into).collect())
},
shell_command: matches.get_flag(arg::SHELL_COMMAND),
subcommand,
tempdir: matches.get_one::<PathBuf>(arg::TEMPDIR).map(Into::into),
timestamp: matches.get_flag(arg::TIMESTAMP),
timestamp_format: matches
.get_one::<String>(arg::TIMESTAMP_FORMAT)
.unwrap()
.into(),
unsorted: matches.get_flag(arg::UNSORTED),
unstable,
verbosity: if matches.get_flag(arg::QUIET) {
Verbosity::Quiet
} else {
Verbosity::from_flag_occurrences(matches.get_count(arg::VERBOSE))
},
yes: matches.get_flag(arg::YES),
})
}
pub(crate) fn require_unstable(
&self,
justfile: &Justfile,
unstable_feature: UnstableFeature,
) -> RunResult<'static> {
if self.unstable || justfile.settings.unstable {
Ok(())
} else {
Err(Error::UnstableFeature { unstable_feature })
}
}
}
#[cfg(test)]
mod tests {
use {
super::*,
clap::error::{ContextKind, ContextValue},
pretty_assertions::assert_eq,
};
macro_rules! test {
{
name: $name:ident,
args: [$($arg:expr),*],
$(color: $color:expr,)?
$(dry_run: $dry_run:expr,)?
$(dump_format: $dump_format:expr,)?
$(highlight: $highlight:expr,)?
$(no_dependencies: $no_dependencies:expr,)?
$(overrides: $overrides:expr,)?
$(search_config: $search_config:expr,)?
$(shell: $shell:expr,)?
$(shell_args: $shell_args:expr,)?
$(subcommand: $subcommand:expr,)?
$(unsorted: $unsorted:expr,)?
$(unstable: $unstable:expr,)?
$(verbosity: $verbosity:expr,)?
} => {
#[test]
fn $name() {
let arguments = &[
"just",
$($arg,)*
];
let want = Config {
$(color: $color,)?
$(dry_run: $dry_run,)?
$(dump_format: $dump_format,)?
$(highlight: $highlight,)?
$(no_dependencies: $no_dependencies,)?
$(overrides: $overrides,)?
$(search_config: $search_config,)?
$(shell: $shell,)?
$(shell_args: $shell_args,)?
$(subcommand: $subcommand,)?
$(unsorted: $unsorted,)?
$(unstable: $unstable,)?
$(verbosity: $verbosity,)?
..testing::config(&[])
};
test(arguments, want);
}
}
}
#[track_caller]
fn test(arguments: &[&str], want: Config) {
let app = Config::app();
let matches = app
.try_get_matches_from(arguments)
.expect("argument parsing failed");
let have = Config::from_matches(&matches).expect("config parsing failed");
assert_eq!(have, want);
}
macro_rules! error {
{
name: $name:ident,
args: [$($arg:expr),*],
} => {
#[test]
fn $name() {
let arguments = &[
"just",
$($arg,)*
];
let app = Config::app();
app.try_get_matches_from(arguments).expect_err("Expected clap error");
}
};
{
name: $name:ident,
args: [$($arg:expr),*],
error: $error:pat,
$(check: $check:block,)?
} => {
#[test]
fn $name() {
let arguments = &[
"just",
$($arg,)*
];
let app = Config::app();
let matches = app.try_get_matches_from(arguments).expect("Matching fails");
match Config::from_matches(&matches).expect_err("config parsing succeeded") {
$error => { $($check)? }
other => panic!("Unexpected config error: {other}"),
}
}
}
}
macro_rules! error_matches {
(
name: $name:ident,
args: [$($arg:expr),*],
error: $error:pat,
$(check: $check:block,)?
) => {
#[test]
fn $name() {
let arguments = &[
"just",
$($arg,)*
];
let app = Config::app();
match app.try_get_matches_from(arguments) {
Err($error) => { $($check)? }
other => panic!("Unexpected result from get matches: {other:?}")
}
}
};
}
macro_rules! map {
{} => {
BTreeMap::new()
};
{
$($key:literal : $value:literal),* $(,)?
} => {
{
let mut map: BTreeMap<String, String> = BTreeMap::new();
$(
map.insert($key.to_owned(), $value.to_owned());
)*
map
}
}
}
test! {
name: default_config,
args: [],
}
test! {
name: color_default,
args: [],
color: Color::auto(),
}
test! {
name: color_never,
args: ["--color", "never"],
color: Color::never(),
}
test! {
name: color_always,
args: ["--color", "always"],
color: Color::always(),
}
test! {
name: color_auto,
args: ["--color", "auto"],
color: Color::auto(),
}
error! {
name: color_bad_value,
args: ["--color", "foo"],
}
test! {
name: dry_run_default,
args: [],
dry_run: false,
}
test! {
name: dry_run_long,
args: ["--dry-run"],
dry_run: true,
}
test! {
name: dry_run_short,
args: ["-n"],
dry_run: true,
}
error! {
name: dry_run_quiet,
args: ["--dry-run", "--quiet"],
}
test! {
name: highlight_default,
args: [],
highlight: true,
}
test! {
name: highlight_yes,
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | true |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/parameter_kind.rs | src/parameter_kind.rs | use super::*;
/// Parameters can either be…
#[derive(Debug, Copy, Clone, Eq, PartialEq, Serialize)]
#[serde(rename_all = "snake_case")]
pub(crate) enum ParameterKind {
/// …variadic, accepting one or more arguments
Plus,
/// …singular, accepting a single argument
Singular,
/// …variadic, accepting zero or more arguments
Star,
}
impl ParameterKind {
pub(crate) fn prefix(self) -> Option<&'static str> {
match self {
Self::Singular => None,
Self::Plus => Some("+"),
Self::Star => Some("*"),
}
}
pub(crate) fn is_variadic(self) -> bool {
self != Self::Singular
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/dump_format.rs | src/dump_format.rs | use super::*;
#[derive(Debug, Default, PartialEq, Clone, ValueEnum)]
pub(crate) enum DumpFormat {
Json,
#[default]
Just,
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/color_display.rs | src/color_display.rs | use super::*;
pub(crate) trait ColorDisplay {
fn color_display(&self, color: Color) -> Wrapper
where
Self: Sized,
{
Wrapper(self, color)
}
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result;
}
pub(crate) struct Wrapper<'a>(&'a dyn ColorDisplay, Color);
impl Display for Wrapper<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
self.0.fmt(f, self.1)
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/recipe.rs | src/recipe.rs | use super::*;
/// Return a `Error::Signal` if the process was terminated by a signal,
/// otherwise return an `Error::UnknownFailure`
fn error_from_signal(recipe: &str, line_number: Option<usize>, exit_status: ExitStatus) -> Error {
match Platform::signal_from_exit_status(exit_status) {
Some(signal) => Error::Signal {
recipe,
line_number,
signal,
},
None => Error::Unknown {
recipe,
line_number,
},
}
}
/// A recipe, e.g. `foo: bar baz`
#[derive(PartialEq, Debug, Clone, Serialize)]
pub(crate) struct Recipe<'src, D = Dependency<'src>> {
pub(crate) attributes: AttributeSet<'src>,
pub(crate) body: Vec<Line<'src>>,
pub(crate) dependencies: Vec<D>,
pub(crate) doc: Option<String>,
#[serde(skip)]
pub(crate) file_depth: u32,
#[serde(skip)]
pub(crate) import_offsets: Vec<usize>,
pub(crate) name: Name<'src>,
pub(crate) namepath: Option<String>,
pub(crate) parameters: Vec<Parameter<'src>>,
pub(crate) priors: usize,
pub(crate) private: bool,
pub(crate) quiet: bool,
pub(crate) shebang: bool,
}
impl Recipe<'_> {
pub(crate) fn module_path(&self) -> &str {
let namepath = self.namepath();
&namepath[0..namepath.rfind("::").unwrap_or_default()]
}
pub(crate) fn namepath(&self) -> &str {
self.namepath.as_ref().unwrap()
}
pub(crate) fn spaced_namepath(&self) -> String {
self.namepath().replace("::", " ")
}
}
impl<'src, D> Recipe<'src, D> {
pub(crate) fn argument_range(&self) -> RangeInclusive<usize> {
self.min_arguments()..=self.max_arguments()
}
pub(crate) fn group_arguments(
&self,
arguments: &[Expression<'src>],
) -> Vec<Vec<Expression<'src>>> {
let mut groups = Vec::new();
let mut rest = arguments;
for parameter in &self.parameters {
let group = if parameter.kind.is_variadic() {
mem::take(&mut rest).into()
} else if let Some(argument) = rest.first() {
rest = &rest[1..];
vec![argument.clone()]
} else {
debug_assert!(parameter.default.is_some());
Vec::new()
};
groups.push(group);
}
groups
}
pub(crate) fn min_arguments(&self) -> usize {
self.parameters.iter().filter(|p| p.is_required()).count()
}
pub(crate) fn max_arguments(&self) -> usize {
if self.parameters.iter().any(|p| p.kind.is_variadic()) {
usize::MAX - 1
} else {
self.parameters.len()
}
}
pub(crate) fn name(&self) -> &'src str {
self.name.lexeme()
}
pub(crate) fn line_number(&self) -> usize {
self.name.line
}
pub(crate) fn confirm(&self) -> RunResult<'src, bool> {
if let Some(Attribute::Confirm(prompt)) = self.attributes.get(AttributeDiscriminant::Confirm) {
if let Some(prompt) = prompt {
eprint!("{} ", prompt.cooked);
} else {
eprint!("Run recipe `{}`? ", self.name);
}
let mut line = String::new();
std::io::stdin()
.read_line(&mut line)
.map_err(|io_error| Error::GetConfirmation { io_error })?;
let line = line.trim().to_lowercase();
Ok(line == "y" || line == "yes")
} else {
Ok(true)
}
}
pub(crate) fn check_can_be_default_recipe(&self) -> RunResult<'src, ()> {
let min_arguments = self.min_arguments();
if min_arguments > 0 {
return Err(Error::DefaultRecipeRequiresArguments {
recipe: self.name.lexeme(),
min_arguments,
});
}
Ok(())
}
pub(crate) fn is_parallel(&self) -> bool {
self.attributes.contains(AttributeDiscriminant::Parallel)
}
pub(crate) fn is_public(&self) -> bool {
!self.private && !self.attributes.contains(AttributeDiscriminant::Private)
}
pub(crate) fn is_script(&self) -> bool {
self.shebang
}
pub(crate) fn takes_positional_arguments(&self, settings: &Settings) -> bool {
settings.positional_arguments
|| self
.attributes
.contains(AttributeDiscriminant::PositionalArguments)
}
pub(crate) fn change_directory(&self) -> bool {
!self.attributes.contains(AttributeDiscriminant::NoCd)
}
pub(crate) fn enabled(&self) -> bool {
let linux = self.attributes.contains(AttributeDiscriminant::Linux);
let macos = self.attributes.contains(AttributeDiscriminant::Macos);
let openbsd = self.attributes.contains(AttributeDiscriminant::Openbsd);
let unix = self.attributes.contains(AttributeDiscriminant::Unix);
let windows = self.attributes.contains(AttributeDiscriminant::Windows);
(!windows && !linux && !macos && !openbsd && !unix)
|| (cfg!(target_os = "linux") && (linux || unix))
|| (cfg!(target_os = "macos") && (macos || unix))
|| (cfg!(target_os = "openbsd") && (openbsd || unix))
|| (cfg!(target_os = "windows") && windows)
|| (cfg!(unix) && unix)
|| (cfg!(windows) && windows)
}
fn print_exit_message(&self, settings: &Settings) -> bool {
if self.attributes.contains(AttributeDiscriminant::ExitMessage) {
true
} else if settings.no_exit_message {
false
} else {
!self
.attributes
.contains(AttributeDiscriminant::NoExitMessage)
}
}
fn working_directory<'a>(&'a self, context: &'a ExecutionContext) -> Option<PathBuf> {
if !self.change_directory() {
return None;
}
let working_directory = context.working_directory();
for attribute in &self.attributes {
if let Attribute::WorkingDirectory(dir) = attribute {
return Some(working_directory.join(&dir.cooked));
}
}
Some(working_directory)
}
fn no_quiet(&self) -> bool {
self.attributes.contains(AttributeDiscriminant::NoQuiet)
}
pub(crate) fn run<'run>(
&self,
context: &ExecutionContext<'src, 'run>,
scope: &Scope<'src, 'run>,
positional: &[String],
is_dependency: bool,
) -> RunResult<'src, ()> {
let color = context.config.color.stderr().banner();
let prefix = color.prefix();
let suffix = color.suffix();
if context.config.verbosity.loquacious() {
eprintln!("{prefix}===> Running recipe `{}`...{suffix}", self.name);
}
if context.config.explain {
if let Some(doc) = self.doc() {
eprintln!("{prefix}#### {doc}{suffix}");
}
}
let evaluator = Evaluator::new(context, is_dependency, scope);
if self.is_script() {
self.run_script(context, scope, positional, evaluator)
} else {
self.run_linewise(context, scope, positional, evaluator)
}
}
fn run_linewise<'run>(
&self,
context: &ExecutionContext<'src, 'run>,
scope: &Scope<'src, 'run>,
positional: &[String],
mut evaluator: Evaluator<'src, 'run>,
) -> RunResult<'src, ()> {
let config = &context.config;
let mut lines = self.body.iter().peekable();
let mut line_number = self.line_number() + 1;
loop {
if lines.peek().is_none() {
return Ok(());
}
let mut evaluated = String::new();
let mut continued = false;
let quiet_line = lines.peek().is_some_and(|line| line.is_quiet());
let infallible_line = lines.peek().is_some_and(|line| line.is_infallible());
let comment_line = context.module.settings.ignore_comments
&& lines.peek().is_some_and(|line| line.is_comment());
loop {
if lines.peek().is_none() {
break;
}
let line = lines.next().unwrap();
line_number += 1;
if !comment_line {
evaluated += &evaluator.evaluate_line(line, continued)?;
}
if line.is_continuation() && !comment_line {
continued = true;
evaluated.pop();
} else {
break;
}
}
if comment_line {
continue;
}
let mut command = evaluated.as_str();
let sigils = usize::from(infallible_line) + usize::from(quiet_line);
command = &command[sigils..];
if command.is_empty() {
continue;
}
if config.dry_run
|| config.verbosity.loquacious()
|| !((quiet_line ^ self.quiet)
|| (context.module.settings.quiet && !self.no_quiet())
|| config.verbosity.quiet())
{
let color = if config.highlight {
config.color.command(config.command_color)
} else {
config.color
}
.stderr();
if config.timestamp {
eprint!(
"[{}] ",
color.paint(
&chrono::Local::now()
.format(&config.timestamp_format)
.to_string()
),
);
}
eprintln!("{}", color.paint(command));
}
if config.dry_run {
continue;
}
let mut cmd = context.module.settings.shell_command(config);
if let Some(working_directory) = self.working_directory(context) {
cmd.current_dir(working_directory);
}
cmd.arg(command);
if self.takes_positional_arguments(&context.module.settings) {
cmd.arg(self.name.lexeme());
cmd.args(positional);
}
if config.verbosity.quiet() {
cmd.stderr(Stdio::null());
cmd.stdout(Stdio::null());
}
cmd.export(
&context.module.settings,
context.dotenv,
scope,
&context.module.unexports,
);
let (result, caught) = cmd.status_guard();
match result {
Ok(exit_status) => {
if let Some(code) = exit_status.code() {
if code != 0 && !infallible_line {
return Err(Error::Code {
recipe: self.name(),
line_number: Some(line_number),
code,
print_message: self.print_exit_message(&context.module.settings),
});
}
} else if !infallible_line {
return Err(error_from_signal(
self.name(),
Some(line_number),
exit_status,
));
}
}
Err(io_error) => {
return Err(Error::Io {
recipe: self.name(),
io_error,
});
}
}
if !infallible_line {
if let Some(signal) = caught {
return Err(Error::Interrupted { signal });
}
}
}
}
pub(crate) fn run_script<'run>(
&self,
context: &ExecutionContext<'src, 'run>,
scope: &Scope<'src, 'run>,
positional: &[String],
mut evaluator: Evaluator<'src, 'run>,
) -> RunResult<'src, ()> {
let config = &context.config;
let mut evaluated_lines = Vec::new();
for line in &self.body {
evaluated_lines.push(evaluator.evaluate_line(line, false)?);
}
if config.verbosity.loud() && (config.dry_run || self.quiet) {
for line in &evaluated_lines {
eprintln!(
"{}",
config
.color
.command(config.command_color)
.stderr()
.paint(line)
);
}
}
if config.dry_run {
return Ok(());
}
let executor = if let Some(Attribute::Script(interpreter)) =
self.attributes.get(AttributeDiscriminant::Script)
{
Executor::Command(
interpreter
.as_ref()
.map(|interpreter| Interpreter {
command: interpreter.command.cooked.clone(),
arguments: interpreter
.arguments
.iter()
.map(|argument| argument.cooked.clone())
.collect(),
})
.or_else(|| context.module.settings.script_interpreter.clone())
.unwrap_or_else(|| Interpreter::default_script_interpreter().clone()),
)
} else {
let line = evaluated_lines
.first()
.ok_or_else(|| Error::internal("evaluated_lines was empty"))?;
let shebang =
Shebang::new(line).ok_or_else(|| Error::internal(format!("bad shebang line: {line}")))?;
Executor::Shebang(shebang)
};
let tempdir = context.tempdir(self)?;
let mut path = tempdir.path().to_path_buf();
let extension = self.attributes.iter().find_map(|attribute| {
if let Attribute::Extension(extension) = attribute {
Some(extension.cooked.as_str())
} else {
None
}
});
path.push(executor.script_filename(self.name(), extension));
let script = executor.script(self, &evaluated_lines);
if config.verbosity.grandiloquent() {
eprintln!("{}", config.color.doc().stderr().paint(&script));
}
fs::write(&path, script).map_err(|error| Error::TempdirIo {
recipe: self.name(),
io_error: error,
})?;
let mut command = executor.command(
config,
&path,
self.name(),
self.working_directory(context).as_deref(),
)?;
if self.takes_positional_arguments(&context.module.settings) {
command.args(positional);
}
command.export(
&context.module.settings,
context.dotenv,
scope,
&context.module.unexports,
);
// run it!
let (result, caught) = command.status_guard();
match result {
Ok(exit_status) => exit_status.code().map_or_else(
|| Err(error_from_signal(self.name(), None, exit_status)),
|code| {
if code == 0 {
Ok(())
} else {
Err(Error::Code {
recipe: self.name(),
line_number: None,
code,
print_message: self.print_exit_message(&context.module.settings),
})
}
},
)?,
Err(io_error) => return Err(executor.error(io_error, self.name())),
}
if let Some(signal) = caught {
return Err(Error::Interrupted { signal });
}
Ok(())
}
pub(crate) fn groups(&self) -> BTreeSet<String> {
self
.attributes
.iter()
.filter_map(|attribute| {
if let Attribute::Group(group) = attribute {
Some(group.cooked.clone())
} else {
None
}
})
.collect()
}
pub(crate) fn doc(&self) -> Option<&str> {
for attribute in &self.attributes {
if let Attribute::Doc(doc) = attribute {
return doc.as_ref().map(|s| s.cooked.as_ref());
}
}
self.doc.as_deref()
}
pub(crate) fn priors(&self) -> &[D] {
&self.dependencies[..self.priors]
}
pub(crate) fn subsequents(&self) -> &[D] {
&self.dependencies[self.priors..]
}
}
impl<D: Display> ColorDisplay for Recipe<'_, D> {
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result {
if !self
.attributes
.iter()
.any(|attribute| matches!(attribute, Attribute::Doc(_)))
{
if let Some(doc) = &self.doc {
writeln!(f, "# {doc}")?;
}
}
for attribute in &self.attributes {
writeln!(f, "[{attribute}]")?;
}
if self.quiet {
write!(f, "@{}", self.name)?;
} else {
write!(f, "{}", self.name)?;
}
for parameter in &self.parameters {
write!(f, " {}", parameter.color_display(color))?;
}
write!(f, ":")?;
for (i, dependency) in self.dependencies.iter().enumerate() {
if i == self.priors {
write!(f, " &&")?;
}
write!(f, " {dependency}")?;
}
for (i, line) in self.body.iter().enumerate() {
if i == 0 {
writeln!(f)?;
}
for (j, fragment) in line.fragments.iter().enumerate() {
if j == 0 {
write!(f, " ")?;
}
match fragment {
Fragment::Text { token } => write!(f, "{}", token.lexeme())?,
Fragment::Interpolation { expression, .. } => write!(f, "{{{{ {expression} }}}}")?,
}
}
if i + 1 < self.body.len() {
writeln!(f)?;
}
}
Ok(())
}
}
impl<'src, D> Keyed<'src> for Recipe<'src, D> {
fn key(&self) -> &'src str {
self.name.lexeme()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/load_dotenv.rs | src/load_dotenv.rs | use super::*;
pub(crate) fn load_dotenv(
config: &Config,
settings: &Settings,
working_directory: &Path,
) -> RunResult<'static, BTreeMap<String, String>> {
let dotenv_filename = config
.dotenv_filename
.as_ref()
.or(settings.dotenv_filename.as_ref());
let dotenv_path = config
.dotenv_path
.as_ref()
.or(settings.dotenv_path.as_ref());
if !settings.dotenv_load
&& !settings.dotenv_override
&& !settings.dotenv_required
&& dotenv_filename.is_none()
&& dotenv_path.is_none()
{
return Ok(BTreeMap::new());
}
if let Some(path) = dotenv_path {
let path = working_directory.join(path);
if path.is_file() {
return load_from_file(&path, settings);
}
}
let filename = dotenv_filename.map_or(".env", |s| s.as_str());
for directory in working_directory.ancestors() {
let path = directory.join(filename);
if path.is_file() {
return load_from_file(&path, settings);
}
}
if settings.dotenv_required {
Err(Error::DotenvRequired)
} else {
Ok(BTreeMap::new())
}
}
fn load_from_file(
path: &Path,
settings: &Settings,
) -> RunResult<'static, BTreeMap<String, String>> {
let iter = dotenvy::from_path_iter(path)?;
let mut dotenv = BTreeMap::new();
for result in iter {
let (key, value) = result?;
if settings.dotenv_override || env::var_os(&key).is_none() {
dotenv.insert(key, value);
}
}
Ok(dotenv)
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/lexer.rs | src/lexer.rs | use {super::*, CompileErrorKind::*, TokenKind::*};
/// Just language lexer
///
/// The lexer proceeds character-by-character, as opposed to using regular
/// expressions to lex tokens or semi-tokens at a time. As a result, it is
/// verbose and straightforward. Just used to have a regex-based lexer, which
/// was slower and generally godawful. However, this should not be taken as a
/// slight against regular expressions, the lexer was just idiosyncratically
/// bad.
pub(crate) struct Lexer<'src> {
/// Char iterator
chars: Chars<'src>,
/// Indentation stack
indentation: Vec<&'src str>,
/// Interpolation token start stack
interpolation_stack: Vec<Token<'src>>,
/// Next character to be lexed
next: Option<char>,
/// Current open delimiters
open_delimiters: Vec<(Delimiter, usize)>,
/// Path to source file
path: &'src Path,
/// Inside recipe body
recipe_body: bool,
/// Next indent will start a recipe body
recipe_body_pending: bool,
/// Source text
src: &'src str,
/// Current token end
token_end: Position,
/// Current token start
token_start: Position,
/// Tokens
tokens: Vec<Token<'src>>,
}
impl<'src> Lexer<'src> {
pub(crate) const INTERPOLATION_END: &'static str = "}}";
pub(crate) const INTERPOLATION_ESCAPE: &'static str = "{{{{";
pub(crate) const INTERPOLATION_START: &'static str = "{{";
/// Lex `src`
pub(crate) fn lex(path: &'src Path, src: &'src str) -> CompileResult<'src, Vec<Token<'src>>> {
Self::new(path, src).tokenize()
}
#[cfg(test)]
pub(crate) fn test_lex(src: &'src str) -> CompileResult<'src, Vec<Token<'src>>> {
Self::new("justfile".as_ref(), src).tokenize()
}
/// Create a new Lexer to lex `src`
fn new(path: &'src Path, src: &'src str) -> Self {
let mut chars = src.chars();
let next = chars.next();
let start = Position {
offset: 0,
column: 0,
line: 0,
};
Self {
indentation: vec![""],
tokens: Vec::new(),
token_start: start,
token_end: start,
recipe_body_pending: false,
recipe_body: false,
interpolation_stack: Vec::new(),
open_delimiters: Vec::new(),
chars,
next,
src,
path,
}
}
/// Advance over the character in `self.next`, updating `self.token_end`
/// accordingly.
fn advance(&mut self) -> CompileResult<'src> {
match self.next {
Some(c) => {
let len_utf8 = c.len_utf8();
self.token_end.offset += len_utf8;
self.token_end.column += len_utf8;
if c == '\n' {
self.token_end.column = 0;
self.token_end.line += 1;
}
self.next = self.chars.next();
Ok(())
}
None => Err(self.internal_error("Lexer advanced past end of text")),
}
}
/// Lexeme of in-progress token
fn lexeme(&self) -> &'src str {
&self.src[self.token_start.offset..self.token_end.offset]
}
/// Length of current token
fn current_token_length(&self) -> usize {
self.token_end.offset - self.token_start.offset
}
fn accepted(&mut self, c: char) -> CompileResult<'src, bool> {
if self.next_is(c) {
self.advance()?;
Ok(true)
} else {
Ok(false)
}
}
fn presume(&mut self, c: char) -> CompileResult<'src> {
if !self.next_is(c) {
return Err(self.internal_error(format!("Lexer presumed character `{c}`")));
}
self.advance()?;
Ok(())
}
fn presume_str(&mut self, s: &str) -> CompileResult<'src> {
for c in s.chars() {
self.presume(c)?;
}
Ok(())
}
/// Is next character c?
fn next_is(&self, c: char) -> bool {
self.next == Some(c)
}
/// Is next character ' ' or '\t'?
fn next_is_whitespace(&self) -> bool {
self.next_is(' ') || self.next_is('\t')
}
/// Un-lexed text
fn rest(&self) -> &'src str {
&self.src[self.token_end.offset..]
}
/// Check if unlexed text begins with prefix
fn rest_starts_with(&self, prefix: &str) -> bool {
self.rest().starts_with(prefix)
}
/// Does rest start with "\n" or "\r\n"?
fn at_eol(&self) -> bool {
self.next_is('\n') || self.rest_starts_with("\r\n")
}
/// Are we at end-of-file?
fn at_eof(&self) -> bool {
self.rest().is_empty()
}
/// Are we at end-of-line or end-of-file?
fn at_eol_or_eof(&self) -> bool {
self.at_eol() || self.at_eof()
}
/// Get current indentation
fn indentation(&self) -> &'src str {
self.indentation.last().unwrap()
}
/// Are we currently indented
fn indented(&self) -> bool {
!self.indentation().is_empty()
}
/// Create a new token with `kind` whose lexeme is between `self.token_start`
/// and `self.token_end`
fn token(&mut self, kind: TokenKind) {
self.tokens.push(Token {
column: self.token_start.column,
kind,
length: self.token_end.offset - self.token_start.offset,
line: self.token_start.line,
offset: self.token_start.offset,
path: self.path,
src: self.src,
});
// Set `token_start` to point after the lexed token
self.token_start = self.token_end;
}
/// Create an internal error with `message`
fn internal_error(&self, message: impl Into<String>) -> CompileError<'src> {
// Use `self.token_end` as the location of the error
let token = Token {
src: self.src,
offset: self.token_end.offset,
line: self.token_end.line,
column: self.token_end.column,
length: 0,
kind: Unspecified,
path: self.path,
};
CompileError::new(
token,
Internal {
message: message.into(),
},
)
}
/// Create a compilation error with `kind`
fn error(&self, kind: CompileErrorKind<'src>) -> CompileError<'src> {
// Use the in-progress token span as the location of the error.
// The width of the error site to highlight depends on the kind of error:
let length = match kind {
UnterminatedString | UnterminatedBacktick => {
let Some(kind) = StringKind::from_token_start(self.lexeme()) else {
return self.internal_error("Lexer::error: expected string or backtick token start");
};
kind.delimiter().len()
}
// highlight the full token
_ => self.lexeme().len(),
};
let token = Token {
kind: Unspecified,
src: self.src,
offset: self.token_start.offset,
line: self.token_start.line,
column: self.token_start.column,
length,
path: self.path,
};
CompileError::new(token, kind)
}
fn unterminated_interpolation_error(interpolation_start: Token<'src>) -> CompileError<'src> {
CompileError::new(interpolation_start, UnterminatedInterpolation)
}
/// True if `text` could be an identifier
pub(crate) fn is_identifier(text: &str) -> bool {
if !text.chars().next().is_some_and(Self::is_identifier_start) {
return false;
}
for c in text.chars().skip(1) {
if !Self::is_identifier_continue(c) {
return false;
}
}
true
}
/// True if `c` can be the first character of an identifier
pub(crate) fn is_identifier_start(c: char) -> bool {
matches!(c, 'a'..='z' | 'A'..='Z' | '_')
}
/// True if `c` can be a continuation character of an identifier
pub(crate) fn is_identifier_continue(c: char) -> bool {
Self::is_identifier_start(c) || matches!(c, '0'..='9' | '-')
}
/// Consume the text and produce a series of tokens
fn tokenize(mut self) -> CompileResult<'src, Vec<Token<'src>>> {
loop {
if self.token_start.column == 0 {
self.lex_line_start()?;
}
match self.next {
Some(first) => {
if let Some(&interpolation_start) = self.interpolation_stack.last() {
self.lex_interpolation(interpolation_start, first)?;
} else if self.recipe_body {
self.lex_body()?;
} else {
self.lex_normal(first)?;
}
}
None => break,
}
}
if let Some(&interpolation_start) = self.interpolation_stack.last() {
return Err(Self::unterminated_interpolation_error(interpolation_start));
}
while self.indented() {
self.lex_dedent();
}
self.token(Eof);
assert_eq!(self.token_start.offset, self.token_end.offset);
assert_eq!(self.token_start.offset, self.src.len());
assert_eq!(self.indentation.len(), 1);
Ok(self.tokens)
}
/// Handle blank lines and indentation
fn lex_line_start(&mut self) -> CompileResult<'src> {
enum Indentation<'src> {
// Line only contains whitespace
Blank,
// Indentation continues
Continue,
// Indentation decreases
Decrease,
// Indentation isn't consistent
Inconsistent,
// Indentation increases
Increase,
// Indentation mixes spaces and tabs
Mixed { whitespace: &'src str },
}
use Indentation::*;
let nonblank_index = self
.rest()
.char_indices()
.skip_while(|&(_, c)| c == ' ' || c == '\t')
.map(|(i, _)| i)
.next()
.unwrap_or_else(|| self.rest().len());
let rest = &self.rest()[nonblank_index..];
let whitespace = &self.rest()[..nonblank_index];
if self.open_delimiters_or_interpolation() {
if !whitespace.is_empty() {
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
}
return Ok(());
}
let body_whitespace = &whitespace[..whitespace
.char_indices()
.take(self.indentation().chars().count())
.map(|(i, _c)| i)
.next()
.unwrap_or(0)];
let spaces = whitespace.chars().any(|c| c == ' ');
let tabs = whitespace.chars().any(|c| c == '\t');
let body_spaces = body_whitespace.chars().any(|c| c == ' ');
let body_tabs = body_whitespace.chars().any(|c| c == '\t');
#[allow(clippy::if_same_then_else)]
let indentation = if rest.starts_with('\n') || rest.starts_with("\r\n") || rest.is_empty() {
Blank
} else if whitespace == self.indentation() {
Continue
} else if self.indentation.contains(&whitespace) {
Decrease
} else if self.recipe_body && whitespace.starts_with(self.indentation()) {
Continue
} else if self.recipe_body && body_spaces && body_tabs {
Mixed {
whitespace: body_whitespace,
}
} else if !self.recipe_body && spaces && tabs {
Mixed { whitespace }
} else if whitespace.len() < self.indentation().len() {
Inconsistent
} else if self.recipe_body
&& body_whitespace.len() >= self.indentation().len()
&& !body_whitespace.starts_with(self.indentation())
{
Inconsistent
} else if whitespace.len() >= self.indentation().len()
&& !whitespace.starts_with(self.indentation())
{
Inconsistent
} else {
Increase
};
match indentation {
Blank => {
if !whitespace.is_empty() {
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
}
Ok(())
}
Continue => {
if !self.indentation().is_empty() {
for _ in self.indentation().chars() {
self.advance()?;
}
self.token(Whitespace);
}
Ok(())
}
Decrease => {
while self.indentation() != whitespace {
self.lex_dedent();
}
if !whitespace.is_empty() {
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
}
Ok(())
}
Mixed { whitespace } => {
for _ in whitespace.chars() {
self.advance()?;
}
Err(self.error(MixedLeadingWhitespace { whitespace }))
}
Inconsistent => {
for _ in whitespace.chars() {
self.advance()?;
}
Err(self.error(InconsistentLeadingWhitespace {
expected: self.indentation(),
found: whitespace,
}))
}
Increase => {
while self.next_is_whitespace() {
self.advance()?;
}
let indentation = self.lexeme();
self.indentation.push(indentation);
self.token(Indent);
if self.recipe_body_pending {
self.recipe_body = true;
}
Ok(())
}
}
}
/// Lex token beginning with `start` outside of a recipe body
fn lex_normal(&mut self, start: char) -> CompileResult<'src> {
match start {
' ' | '\t' => self.lex_whitespace(),
'!' if self.rest().starts_with("!include") => Err(self.error(Include)),
'!' => self.lex_choices('!', &[('=', BangEquals), ('~', BangTilde)], None),
'#' => self.lex_comment(),
'$' => self.lex_single(Dollar),
'&' => self.lex_digraph('&', '&', AmpersandAmpersand),
'(' => self.lex_delimiter(ParenL),
')' => self.lex_delimiter(ParenR),
'*' => self.lex_single(Asterisk),
'+' => self.lex_single(Plus),
',' => self.lex_single(Comma),
'/' => self.lex_single(Slash),
':' => self.lex_colon(),
'=' => self.lex_choices(
'=',
&[('=', EqualsEquals), ('~', EqualsTilde)],
Some(Equals),
),
'?' => self.lex_single(QuestionMark),
'@' => self.lex_single(At),
'[' => self.lex_delimiter(BracketL),
'\\' => self.lex_escape(),
'\n' | '\r' => self.lex_eol(),
'\u{feff}' => self.lex_single(ByteOrderMark),
']' => self.lex_delimiter(BracketR),
'`' | '"' | '\'' => self.lex_string(None),
'{' => self.lex_delimiter(BraceL),
'|' => self.lex_digraph('|', '|', BarBar),
'}' => {
let format_string_kind = self.open_delimiters.last().and_then(|(delimiter, _line)| {
if !self.rest().starts_with(Self::INTERPOLATION_END) {
None
} else if let Delimiter::FormatString(kind) = delimiter {
Some(kind)
} else {
None
}
});
if let Some(format_string_kind) = format_string_kind {
self.lex_string(Some(*format_string_kind))
} else {
self.lex_delimiter(BraceR)
}
}
_ if Self::is_identifier_start(start) => self.lex_identifier(),
_ => {
self.advance()?;
Err(self.error(UnknownStartOfToken { start }))
}
}
}
/// Lex token beginning with `start` inside an interpolation
fn lex_interpolation(
&mut self,
interpolation_start: Token<'src>,
start: char,
) -> CompileResult<'src> {
if self.rest_starts_with(Self::INTERPOLATION_END) && self.open_delimiters.is_empty() {
// end current interpolation
if self.interpolation_stack.pop().is_none() {
self.presume_str(Self::INTERPOLATION_END)?;
return Err(self.internal_error(
"Lexer::lex_interpolation found `}}` but was called with empty interpolation stack.",
));
}
// Emit interpolation end token
self.lex_double(InterpolationEnd)
} else if self.at_eof() && self.open_delimiters.is_empty() {
// Return unterminated interpolation error that highlights the opening
// {{
Err(Self::unterminated_interpolation_error(interpolation_start))
} else {
// Otherwise lex as per normal
self.lex_normal(start)
}
}
/// Lex token while in recipe body
fn lex_body(&mut self) -> CompileResult<'src> {
enum Terminator {
EndOfFile,
Interpolation,
Newline,
NewlineCarriageReturn,
}
use Terminator::*;
let terminator = loop {
if self.rest_starts_with(Self::INTERPOLATION_ESCAPE) {
self.presume_str(Self::INTERPOLATION_ESCAPE)?;
continue;
}
if self.rest_starts_with("\n") {
break Newline;
}
if self.rest_starts_with("\r\n") {
break NewlineCarriageReturn;
}
if self.rest_starts_with(Self::INTERPOLATION_START) {
break Interpolation;
}
if self.at_eof() {
break EndOfFile;
}
self.advance()?;
};
// emit text token containing text so far
if self.current_token_length() > 0 {
self.token(Text);
}
match terminator {
Newline => self.lex_single(Eol),
NewlineCarriageReturn => self.lex_double(Eol),
Interpolation => {
self.lex_double(InterpolationStart)?;
self
.interpolation_stack
.push(self.tokens[self.tokens.len() - 1]);
Ok(())
}
EndOfFile => Ok(()),
}
}
fn lex_dedent(&mut self) {
assert_eq!(self.current_token_length(), 0);
self.token(Dedent);
self.indentation.pop();
self.recipe_body_pending = false;
self.recipe_body = false;
}
/// Lex a single-character token
fn lex_single(&mut self, kind: TokenKind) -> CompileResult<'src> {
self.advance()?;
self.token(kind);
Ok(())
}
/// Lex a double-character token
fn lex_double(&mut self, kind: TokenKind) -> CompileResult<'src> {
self.advance()?;
self.advance()?;
self.token(kind);
Ok(())
}
/// Lex a double-character token of kind `then` if the second character of
/// that token would be `second`, otherwise lex a single-character token of
/// kind `otherwise`
fn lex_choices(
&mut self,
first: char,
choices: &[(char, TokenKind)],
otherwise: Option<TokenKind>,
) -> CompileResult<'src> {
self.presume(first)?;
for (second, then) in choices {
if self.accepted(*second)? {
self.token(*then);
return Ok(());
}
}
if let Some(token) = otherwise {
self.token(token);
} else {
// Emit an unspecified token to consume the current character,
self.token(Unspecified);
let expected = choices.iter().map(|choice| choice.0).collect();
if self.at_eof() {
return Err(self.error(UnexpectedEndOfToken { expected }));
}
// …and advance past another character,
self.advance()?;
// …so that the error we produce highlights the unexpected character.
return Err(self.error(UnexpectedCharacter { expected }));
}
Ok(())
}
/// Lex an opening or closing delimiter
fn lex_delimiter(&mut self, kind: TokenKind) -> CompileResult<'src> {
match kind {
BraceL => self.open_delimiter(Delimiter::Brace),
BraceR => self.close_delimiter(Delimiter::Brace)?,
BracketL => self.open_delimiter(Delimiter::Bracket),
BracketR => self.close_delimiter(Delimiter::Bracket)?,
ParenL => self.open_delimiter(Delimiter::Paren),
ParenR => self.close_delimiter(Delimiter::Paren)?,
_ => {
return Err(self.internal_error(format!(
"Lexer::lex_delimiter called with non-delimiter token: `{kind}`",
)));
}
}
// Emit the delimiter token
self.lex_single(kind)?;
Ok(())
}
/// Push a delimiter onto the open delimiter stack
fn open_delimiter(&mut self, delimiter: Delimiter) {
self
.open_delimiters
.push((delimiter, self.token_start.line));
}
/// Pop a delimiter from the open delimiter stack and error if incorrect type
fn close_delimiter(&mut self, close: Delimiter) -> CompileResult<'src> {
match self.open_delimiters.pop() {
Some((open, _)) if open == close => Ok(()),
Some((open, open_line)) => Err(self.error(MismatchedClosingDelimiter {
open,
close,
open_line,
})),
None => Err(self.error(UnexpectedClosingDelimiter { close })),
}
}
/// Return true if there are any unclosed delimiters
fn open_delimiters_or_interpolation(&self) -> bool {
!self.open_delimiters.is_empty() || !self.interpolation_stack.is_empty()
}
/// Lex a two-character digraph
fn lex_digraph(&mut self, left: char, right: char, token: TokenKind) -> CompileResult<'src> {
self.presume(left)?;
if self.accepted(right)? {
self.token(token);
Ok(())
} else {
// Emit an unspecified token to consume the current character,
self.token(Unspecified);
if self.at_eof() {
return Err(self.error(UnexpectedEndOfToken {
expected: vec![right],
}));
}
// …and advance past another character,
self.advance()?;
// …so that the error we produce highlights the unexpected character.
Err(self.error(UnexpectedCharacter {
expected: vec![right],
}))
}
}
/// Lex a token starting with ':'
fn lex_colon(&mut self) -> CompileResult<'src> {
self.presume(':')?;
if self.accepted('=')? {
self.token(ColonEquals);
} else if self.accepted(':')? {
self.token(ColonColon);
} else {
self.token(Colon);
self.recipe_body_pending = true;
}
Ok(())
}
/// Lex an token starting with '\' escape
fn lex_escape(&mut self) -> CompileResult<'src> {
self.presume('\\')?;
// Treat newline escaped with \ as whitespace
if self.accepted('\n')? {
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
} else if self.accepted('\r')? {
if !self.accepted('\n')? {
return Err(self.error(UnpairedCarriageReturn));
}
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
} else if let Some(character) = self.next {
return Err(self.error(InvalidEscapeSequence { character }));
}
Ok(())
}
/// Lex a carriage return and line feed
fn lex_eol(&mut self) -> CompileResult<'src> {
if self.accepted('\r')? {
if !self.accepted('\n')? {
return Err(self.error(UnpairedCarriageReturn));
}
} else {
self.presume('\n')?;
}
// Emit eol if there are no open delimiters, otherwise emit whitespace.
if self.open_delimiters_or_interpolation() {
self.token(Whitespace);
} else {
self.token(Eol);
}
Ok(())
}
/// Lex name: [a-zA-Z_][a-zA-Z0-9_]*
fn lex_identifier(&mut self) -> CompileResult<'src> {
self.advance()?;
while let Some(c) = self.next {
if !Self::is_identifier_continue(c) {
break;
}
self.advance()?;
}
self.token(Identifier);
Ok(())
}
/// Lex comment: #[^\r\n]
fn lex_comment(&mut self) -> CompileResult<'src> {
self.presume('#')?;
while !self.at_eol_or_eof() {
self.advance()?;
}
self.token(Comment);
Ok(())
}
/// Lex whitespace: [ \t]+
fn lex_whitespace(&mut self) -> CompileResult<'src> {
while self.next_is_whitespace() {
self.advance()?;
}
self.token(Whitespace);
Ok(())
}
/// Lex a backtick, cooked string, or raw string.
///
/// Backtick: ``[^`]*``
/// Cooked string: "[^"]*" # also processes escape sequences
/// Raw string: '[^']*'
fn lex_string(&mut self, format_string_kind: Option<StringKind>) -> CompileResult<'src> {
let format = format_string_kind.is_some()
|| self.tokens.last().is_some_and(|token| {
token.kind == TokenKind::Identifier && token.lexeme() == Keyword::F.lexeme()
});
let kind = if let Some(kind) = format_string_kind {
self.presume_str(Self::INTERPOLATION_END)?;
kind
} else {
let Some(kind) = StringKind::from_token_start(self.rest()) else {
self.advance()?;
return Err(self.internal_error("Lexer::lex_string: invalid string start"));
};
self.presume_str(kind.delimiter())?;
kind
};
let mut escape = false;
loop {
if self.next.is_none() {
return Err(self.error(kind.unterminated_error_kind()));
} else if !escape && kind.processes_escape_sequences() && self.next_is('\\') {
escape = true;
} else if escape && kind.processes_escape_sequences() && self.next_is('u') {
escape = false;
} else if format && self.rest_starts_with(Self::INTERPOLATION_ESCAPE) {
escape = false;
self.advance()?;
self.advance()?;
self.advance()?;
} else if !escape
&& (self.rest_starts_with(kind.delimiter())
|| format && self.rest_starts_with(Self::INTERPOLATION_START))
{
break;
} else {
escape = false;
}
self.advance()?;
}
if format && self.rest_starts_with(Self::INTERPOLATION_START) {
self.presume_str(Self::INTERPOLATION_START)?;
if format_string_kind.is_some() {
self.token(FormatStringContinue);
} else {
self.token(FormatStringStart);
self.open_delimiter(Delimiter::FormatString(kind));
}
} else {
self.presume_str(kind.delimiter())?;
if let Some(format_string_kind) = format_string_kind {
self.close_delimiter(Delimiter::FormatString(format_string_kind))?;
self.token(FormatStringEnd);
} else {
self.token(kind.token_kind());
}
}
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use pretty_assertions::assert_eq;
macro_rules! test {
{
name: $name:ident,
text: $text:expr,
tokens: ($($kind:ident $(: $lexeme:literal)?),* $(,)?)$(,)?
} => {
#[test]
fn $name() {
let kinds: &[TokenKind] = &[$($kind,)* Eof];
let lexemes: &[&str] = &[$(lexeme!($kind $(, $lexeme)?),)* ""];
test($text, true, kinds, lexemes);
}
};
{
name: $name:ident,
text: $text:expr,
tokens: ($($kind:ident $(: $lexeme:literal)?),* $(,)?)$(,)?
unindent: $unindent:expr,
} => {
#[test]
fn $name() {
let kinds: &[TokenKind] = &[$($kind,)* Eof];
let lexemes: &[&str] = &[$(lexeme!($kind $(, $lexeme)?),)* ""];
test($text, $unindent, kinds, lexemes);
}
}
}
macro_rules! lexeme {
{
$kind:ident, $lexeme:literal
} => {
$lexeme
};
{
$kind:ident
} => {
default_lexeme($kind)
}
}
#[track_caller]
fn test(text: &str, unindent_text: bool, want_kinds: &[TokenKind], want_lexemes: &[&str]) {
let text = if unindent_text {
unindent(text)
} else {
text.to_owned()
};
let have = Lexer::test_lex(&text).unwrap();
let have_kinds = have
.iter()
.map(|token| token.kind)
.collect::<Vec<TokenKind>>();
let have_lexemes = have.iter().map(Token::lexeme).collect::<Vec<&str>>();
assert_eq!(have_kinds, want_kinds, "Token kind mismatch");
assert_eq!(have_lexemes, want_lexemes, "Token lexeme mismatch");
let mut roundtrip = String::new();
for lexeme in have_lexemes {
roundtrip.push_str(lexeme);
}
assert_eq!(roundtrip, text, "Roundtrip mismatch");
let mut offset = 0;
let mut line = 0;
let mut column = 0;
for token in have {
assert_eq!(token.offset, offset);
assert_eq!(token.line, line);
assert_eq!(token.lexeme().len(), token.length);
assert_eq!(token.column, column);
for c in token.lexeme().chars() {
if c == '\n' {
line += 1;
column = 0;
} else {
column += c.len_utf8();
}
}
offset += token.length;
}
}
fn default_lexeme(kind: TokenKind) -> &'static str {
match kind {
// Fixed lexemes
AmpersandAmpersand => "&&",
Asterisk => "*",
At => "@",
BangEquals => "!=",
BangTilde => "!~",
BarBar => "||",
BraceL => "{",
BraceR => "}",
BracketL => "[",
BracketR => "]",
ByteOrderMark => "\u{feff}",
Colon => ":",
ColonColon => "::",
ColonEquals => ":=",
Comma => ",",
Dollar => "$",
Eol => "\n",
Equals => "=",
EqualsEquals => "==",
EqualsTilde => "=~",
Indent => " ",
InterpolationEnd => "}}",
InterpolationStart => "{{",
ParenL => "(",
ParenR => ")",
Plus => "+",
QuestionMark => "?",
Slash => "/",
Whitespace => " ",
// Empty lexemes
Dedent | Eof => "",
// Variable lexemes
Backtick | Comment | FormatStringContinue | FormatStringEnd | FormatStringStart
| Identifier | StringToken | Text | Unspecified => {
panic!("Token {kind:?} has no default lexeme")
}
}
}
macro_rules! error {
(
name: $name:ident,
input: $input:expr,
offset: $offset:expr,
line: $line:expr,
column: $column:expr,
width: $width:expr,
kind: $kind:expr,
) => {
#[test]
fn $name() {
error($input, $offset, $line, $column, $width, $kind);
}
};
}
#[track_caller]
fn error(
src: &str,
offset: usize,
line: usize,
column: usize,
length: usize,
kind: CompileErrorKind,
) {
match Lexer::test_lex(src) {
Ok(_) => panic!("Lexing succeeded but expected"),
Err(have) => {
let want = CompileError {
token: Token {
kind: have.token.kind,
src,
offset,
line,
column,
length,
path: "justfile".as_ref(),
},
kind: kind.into(),
};
assert_eq!(have, want);
}
}
}
test! {
name: name_new,
text: "foo",
tokens: (Identifier:"foo"),
}
test! {
name: comment,
text: "# hello",
tokens: (Comment:"# hello"),
}
test! {
name: backtick,
text: "`echo`",
tokens: (Backtick:"`echo`"),
}
test! {
name: backtick_multi_line,
text: "`echo\necho`",
tokens: (Backtick:"`echo\necho`"),
}
test! {
name: raw_string,
text: "'hello'",
tokens: (StringToken:"'hello'"),
}
test! {
name: raw_string_multi_line,
text: "'hello\ngoodbye'",
tokens: (StringToken:"'hello\ngoodbye'"),
}
test! {
name: cooked_string,
text: "\"hello\"",
tokens: (StringToken:"\"hello\""),
}
test! {
name: cooked_string_multi_line,
text: "\"hello\ngoodbye\"",
tokens: (StringToken:"\"hello\ngoodbye\""),
}
test! {
name: cooked_multiline_string,
text: "\"\"\"hello\ngoodbye\"\"\"",
tokens: (StringToken:"\"\"\"hello\ngoodbye\"\"\""),
}
test! {
name: ampersand_ampersand,
text: "&&",
tokens: (AmpersandAmpersand),
}
test! {
name: equals,
text: "=",
tokens: (Equals),
}
test! {
name: equals_equals,
text: "==",
tokens: (EqualsEquals),
}
test! {
name: bang_equals,
text: "!=",
tokens: (BangEquals),
}
test! {
name: brace_l,
text: "{",
tokens: (BraceL),
}
test! {
name: brace_r,
text: "{}",
tokens: (BraceL, BraceR),
}
test! {
name: brace_lll,
text: "{{{",
tokens: (BraceL, BraceL, BraceL),
}
test! {
name: brace_rrr,
text: "{{{}}}",
tokens: (BraceL, BraceL, BraceL, BraceR, BraceR, BraceR),
}
test! {
name: dollar,
text: "$",
tokens: (Dollar),
}
test! {
name: export_concatenation,
text: "export foo = 'foo' + 'bar'",
tokens: (
Identifier:"export",
Whitespace,
Identifier:"foo",
Whitespace,
Equals,
Whitespace,
StringToken:"'foo'",
Whitespace,
Plus,
Whitespace,
StringToken:"'bar'",
)
}
test! {
name: export_complex,
text: "export foo = ('foo' + 'bar') + `baz`",
tokens: (
Identifier:"export",
Whitespace,
Identifier:"foo",
Whitespace,
Equals,
Whitespace,
ParenL,
StringToken:"'foo'",
Whitespace,
Plus,
Whitespace,
StringToken:"'bar'",
ParenR,
Whitespace,
Plus,
Whitespace,
Backtick:"`baz`",
),
}
test! {
name: eol_linefeed,
text: "\n",
tokens: (Eol),
unindent: false,
}
test! {
name: eol_carriage_return_linefeed,
text: "\r\n",
tokens: (Eol:"\r\n"),
unindent: false,
}
test! {
name: indented_line,
text: "foo:\n a",
tokens: (Identifier:"foo", Colon, Eol, Indent:" ", Text:"a", Dedent),
}
test! {
name: indented_normal,
text: "
a
b
c
",
tokens: (
Identifier:"a",
Eol,
Indent:" ",
Identifier:"b",
Eol,
Whitespace:" ",
Identifier:"c",
Eol,
Dedent,
),
}
test! {
name: indented_normal_nonempty_blank,
text: "a\n b\n\t\t\n c\n",
tokens: (
Identifier:"a",
Eol,
Indent:" ",
Identifier:"b",
Eol,
Whitespace:"\t\t",
Eol,
Whitespace:" ",
Identifier:"c",
Eol,
Dedent,
),
unindent: false,
}
test! {
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | true |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/variables.rs | src/variables.rs | use super::*;
pub(crate) struct Variables<'expression, 'src> {
stack: Vec<&'expression Expression<'src>>,
}
impl<'expression, 'src> Variables<'expression, 'src> {
pub(crate) fn new(root: &'expression Expression<'src>) -> Self {
Self { stack: vec![root] }
}
}
impl<'src> Iterator for Variables<'_, 'src> {
type Item = Name<'src>;
fn next(&mut self) -> Option<Name<'src>> {
loop {
match self.stack.pop()? {
Expression::And { lhs, rhs } | Expression::Or { lhs, rhs } => {
self.stack.push(lhs);
self.stack.push(rhs);
}
Expression::Assert {
condition:
Condition {
lhs,
rhs,
operator: _,
},
error,
..
} => {
self.stack.push(error);
self.stack.push(rhs);
self.stack.push(lhs);
}
Expression::Backtick { .. } | Expression::StringLiteral { .. } => {}
Expression::Call { thunk } => match thunk {
Thunk::Nullary { .. } => {}
Thunk::Unary { arg, .. } => self.stack.push(arg),
Thunk::UnaryOpt {
args: (a, opt_b), ..
} => {
self.stack.push(a);
if let Some(b) = opt_b.as_ref() {
self.stack.push(b);
}
}
Thunk::UnaryPlus {
args: (a, rest), ..
} => {
let first: &[&Expression] = &[a];
for arg in first.iter().copied().chain(rest).rev() {
self.stack.push(arg);
}
}
Thunk::Binary { args, .. } => {
for arg in args.iter().rev() {
self.stack.push(arg);
}
}
Thunk::BinaryPlus {
args: ([a, b], rest),
..
} => {
let first: &[&Expression] = &[a, b];
for arg in first.iter().copied().chain(rest).rev() {
self.stack.push(arg);
}
}
Thunk::Ternary { args, .. } => {
for arg in args.iter().rev() {
self.stack.push(arg);
}
}
},
Expression::Concatenation { lhs, rhs } => {
self.stack.push(rhs);
self.stack.push(lhs);
}
Expression::Conditional {
condition:
Condition {
lhs,
rhs,
operator: _,
},
then,
otherwise,
} => {
self.stack.push(otherwise);
self.stack.push(then);
self.stack.push(rhs);
self.stack.push(lhs);
}
Expression::FormatString { expressions, .. } => {
for (expression, _string) in expressions {
self.stack.push(expression);
}
}
Expression::Group { contents } => {
self.stack.push(contents);
}
Expression::Join { lhs, rhs } => {
self.stack.push(rhs);
if let Some(lhs) = lhs {
self.stack.push(lhs);
}
}
Expression::Variable { name, .. } => return Some(*name),
}
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/recipe_signature.rs | src/recipe_signature.rs | use super::*;
pub(crate) struct RecipeSignature<'a> {
pub(crate) name: &'a str,
pub(crate) recipe: &'a Recipe<'a>,
}
impl ColorDisplay for RecipeSignature<'_> {
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result {
write!(f, "{}", self.name)?;
for parameter in &self.recipe.parameters {
write!(f, " {}", parameter.color_display(color))?;
}
Ok(())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/count.rs | src/count.rs | use super::*;
pub struct Count<T: Display>(pub T, pub usize);
impl<T: Display> Display for Count<T> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.1 == 1 {
write!(f, "{}", self.0)
} else {
write!(f, "{}s", self.0)
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn count() {
assert_eq!(Count("dog", 0).to_string(), "dogs");
assert_eq!(Count("dog", 1).to_string(), "dog");
assert_eq!(Count("dog", 2).to_string(), "dogs");
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/settings.rs | src/settings.rs | use super::*;
pub(crate) const DEFAULT_SHELL: &str = "sh";
pub(crate) const DEFAULT_SHELL_ARGS: &[&str] = &["-cu"];
pub(crate) const WINDOWS_POWERSHELL_SHELL: &str = "powershell.exe";
pub(crate) const WINDOWS_POWERSHELL_ARGS: &[&str] = &["-NoLogo", "-Command"];
#[derive(Debug, PartialEq, Serialize, Default)]
pub(crate) struct Settings {
pub(crate) allow_duplicate_recipes: bool,
pub(crate) allow_duplicate_variables: bool,
pub(crate) dotenv_filename: Option<String>,
pub(crate) dotenv_load: bool,
pub(crate) dotenv_override: bool,
pub(crate) dotenv_path: Option<PathBuf>,
pub(crate) dotenv_required: bool,
pub(crate) export: bool,
pub(crate) fallback: bool,
pub(crate) ignore_comments: bool,
pub(crate) no_exit_message: bool,
pub(crate) positional_arguments: bool,
pub(crate) quiet: bool,
#[serde(skip)]
pub(crate) script_interpreter: Option<Interpreter<String>>,
pub(crate) shell: Option<Interpreter<String>>,
pub(crate) tempdir: Option<String>,
pub(crate) unstable: bool,
pub(crate) windows_powershell: bool,
pub(crate) windows_shell: Option<Interpreter<String>>,
pub(crate) working_directory: Option<PathBuf>,
}
impl Settings {
pub(crate) fn shell_command(&self, config: &Config) -> Command {
let (command, args) = self.shell(config);
let mut cmd = Command::new(command);
cmd.args(args);
cmd
}
pub(crate) fn shell<'a>(&'a self, config: &'a Config) -> (&'a str, Vec<&'a str>) {
match (&config.shell, &config.shell_args) {
(Some(shell), Some(shell_args)) => (shell, shell_args.iter().map(String::as_ref).collect()),
(Some(shell), None) => (shell, DEFAULT_SHELL_ARGS.to_vec()),
(None, Some(shell_args)) => (
DEFAULT_SHELL,
shell_args.iter().map(String::as_ref).collect(),
),
(None, None) => {
if let (true, Some(shell)) = (cfg!(windows), &self.windows_shell) {
(
shell.command.as_ref(),
shell.arguments.iter().map(AsRef::as_ref).collect(),
)
} else if cfg!(windows) && self.windows_powershell {
(WINDOWS_POWERSHELL_SHELL, WINDOWS_POWERSHELL_ARGS.to_vec())
} else if let Some(shell) = &self.shell {
(
shell.command.as_ref(),
shell.arguments.iter().map(AsRef::as_ref).collect(),
)
} else {
(DEFAULT_SHELL, DEFAULT_SHELL_ARGS.to_vec())
}
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn default_shell() {
let settings = Settings::default();
let config = Config {
shell_command: false,
..testing::config(&[])
};
assert_eq!(settings.shell(&config), ("sh", vec!["-cu"]));
}
#[test]
fn default_shell_powershell() {
let settings = Settings {
windows_powershell: true,
..Default::default()
};
let config = Config {
shell_command: false,
..testing::config(&[])
};
if cfg!(windows) {
assert_eq!(
settings.shell(&config),
("powershell.exe", vec!["-NoLogo", "-Command"])
);
} else {
assert_eq!(settings.shell(&config), ("sh", vec!["-cu"]));
}
}
#[test]
fn overwrite_shell() {
let settings = Settings::default();
let config = Config {
shell_command: true,
shell: Some("lol".to_string()),
shell_args: Some(vec!["-nice".to_string()]),
..testing::config(&[])
};
assert_eq!(settings.shell(&config), ("lol", vec!["-nice"]));
}
#[test]
fn overwrite_shell_powershell() {
let settings = Settings {
windows_powershell: true,
..Default::default()
};
let config = Config {
shell_command: true,
shell: Some("lol".to_string()),
shell_args: Some(vec!["-nice".to_string()]),
..testing::config(&[])
};
assert_eq!(settings.shell(&config), ("lol", vec!["-nice"]));
}
#[test]
fn shell_cooked() {
let settings = Settings {
shell: Some(Interpreter {
command: "asdf.exe".into(),
arguments: vec!["-nope".into()],
}),
..Default::default()
};
let config = Config {
shell_command: false,
..testing::config(&[])
};
assert_eq!(settings.shell(&config), ("asdf.exe", vec!["-nope"]));
}
#[test]
fn shell_present_but_not_shell_args() {
let settings = Settings {
windows_powershell: true,
..Default::default()
};
let config = Config {
shell: Some("lol".to_string()),
..testing::config(&[])
};
assert_eq!(settings.shell(&config).0, "lol");
}
#[test]
fn shell_args_present_but_not_shell() {
let settings = Settings {
windows_powershell: true,
..Default::default()
};
let config = Config {
shell_command: false,
shell_args: Some(vec!["-nice".to_string()]),
..testing::config(&[])
};
assert_eq!(settings.shell(&config), ("sh", vec!["-nice"]));
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/ast.rs | src/ast.rs | use super::*;
/// The top-level type produced by the parser. Not all successful parses result
/// in valid justfiles, so additional consistency checks and name resolution
/// are performed by the `Analyzer`, which produces a `Justfile` from an `Ast`.
#[derive(Debug, Clone)]
pub(crate) struct Ast<'src> {
pub(crate) items: Vec<Item<'src>>,
pub(crate) module_path: String,
pub(crate) unstable_features: BTreeSet<UnstableFeature>,
pub(crate) warnings: Vec<Warning>,
pub(crate) working_directory: PathBuf,
}
impl Display for Ast<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let mut iter = self.items.iter().peekable();
while let Some(item) = iter.next() {
writeln!(f, "{item}")?;
if let Some(next_item) = iter.peek() {
if matches!(item, Item::Recipe(_))
|| mem::discriminant(item) != mem::discriminant(next_item)
{
writeln!(f)?;
}
}
}
Ok(())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/node.rs | src/node.rs | use super::*;
/// Methods common to all AST nodes. Currently only used in parser unit tests.
pub(crate) trait Node<'src> {
/// Construct an untyped tree of atoms representing this Node. This function,
/// and `Tree` type, are only used in parser unit tests.
fn tree(&self) -> Tree<'src>;
}
impl<'src> Node<'src> for Ast<'src> {
fn tree(&self) -> Tree<'src> {
Tree::atom("justfile")
.extend(self.items.iter().map(Node::tree))
.extend(self.warnings.iter().map(Node::tree))
}
}
impl<'src> Node<'src> for Item<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Self::Alias(alias) => alias.tree(),
Self::Assignment(assignment) => assignment.tree(),
Self::Comment(comment) => comment.tree(),
Self::Import {
relative, optional, ..
} => {
let mut tree = Tree::atom("import");
if *optional {
tree = tree.push("?");
}
tree.push(format!("{relative}"))
}
Self::Module {
name,
optional,
relative,
..
} => {
let mut tree = Tree::atom("mod");
if *optional {
tree = tree.push("?");
}
tree = tree.push(name.lexeme());
if let Some(relative) = relative {
tree = tree.push(format!("{relative}"));
}
tree
}
Self::Recipe(recipe) => recipe.tree(),
Self::Set(set) => set.tree(),
Self::Unexport { name } => {
let mut unexport = Tree::atom(Keyword::Unexport.lexeme());
unexport.push_mut(name.lexeme().replace('-', "_"));
unexport
}
}
}
}
impl<'src> Node<'src> for Namepath<'src> {
fn tree(&self) -> Tree<'src> {
match self.components() {
1 => Tree::atom(self.last().lexeme()),
_ => Tree::list(
self
.iter()
.map(|name| Tree::atom(Cow::Borrowed(name.lexeme()))),
),
}
}
}
impl<'src> Node<'src> for Alias<'src, Namepath<'src>> {
fn tree(&self) -> Tree<'src> {
let target = self.target.tree();
Tree::atom(Keyword::Alias.lexeme())
.push(self.name.lexeme())
.push(target)
}
}
impl<'src> Node<'src> for Assignment<'src> {
fn tree(&self) -> Tree<'src> {
if self.export {
Tree::atom("assignment")
.push("#")
.push(Keyword::Export.lexeme())
} else {
Tree::atom("assignment")
}
.push(self.name.lexeme())
.push(self.value.tree())
}
}
impl<'src> Node<'src> for Expression<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Self::And { lhs, rhs } => Tree::atom("&&").push(lhs.tree()).push(rhs.tree()),
Self::Assert {
condition: Condition { lhs, rhs, operator },
error,
..
} => Tree::atom(Keyword::Assert.lexeme())
.push(lhs.tree())
.push(operator.to_string())
.push(rhs.tree())
.push(error.tree()),
Self::Backtick { contents, .. } => Tree::atom("backtick").push(Tree::string(contents)),
Self::Call { thunk } => {
use Thunk::*;
let mut tree = Tree::atom("call");
match thunk {
Nullary { name, .. } => tree.push_mut(name.lexeme()),
Unary { name, arg, .. } => {
tree.push_mut(name.lexeme());
tree.push_mut(arg.tree());
}
UnaryOpt {
name, args: (a, b), ..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
if let Some(b) = b.as_ref() {
tree.push_mut(b.tree());
}
}
UnaryPlus {
name,
args: (a, rest),
..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
for arg in rest {
tree.push_mut(arg.tree());
}
}
Binary {
name, args: [a, b], ..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
tree.push_mut(b.tree());
}
BinaryPlus {
name,
args: ([a, b], rest),
..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
tree.push_mut(b.tree());
for arg in rest {
tree.push_mut(arg.tree());
}
}
Ternary {
name,
args: [a, b, c],
..
} => {
tree.push_mut(name.lexeme());
tree.push_mut(a.tree());
tree.push_mut(b.tree());
tree.push_mut(c.tree());
}
}
tree
}
Self::Concatenation { lhs, rhs } => Tree::atom("+").push(lhs.tree()).push(rhs.tree()),
Self::Conditional {
condition: Condition { lhs, rhs, operator },
then,
otherwise,
} => {
let mut tree = Tree::atom(Keyword::If.lexeme());
tree.push_mut(lhs.tree());
tree.push_mut(operator.to_string());
tree.push_mut(rhs.tree());
tree.push_mut(then.tree());
tree.push_mut(otherwise.tree());
tree
}
Self::FormatString { start, expressions } => {
let mut tree = Tree::atom("format");
tree.push_mut(Tree::string(&start.cooked));
for (expression, string) in expressions {
tree.push_mut(expression.tree());
tree.push_mut(Tree::string(&string.cooked));
}
tree
}
Self::Group { contents } => Tree::List(vec![contents.tree()]),
Self::Join { lhs: None, rhs } => Tree::atom("/").push(rhs.tree()),
Self::Join {
lhs: Some(lhs),
rhs,
} => Tree::atom("/").push(lhs.tree()).push(rhs.tree()),
Self::Or { lhs, rhs } => Tree::atom("||").push(lhs.tree()).push(rhs.tree()),
Self::StringLiteral {
string_literal: StringLiteral { cooked, .. },
} => Tree::string(cooked),
Self::Variable { name } => Tree::atom(name.lexeme()),
}
}
}
impl<'src> Node<'src> for UnresolvedRecipe<'src> {
fn tree(&self) -> Tree<'src> {
let mut t = Tree::atom("recipe");
if self.quiet {
t.push_mut("#");
t.push_mut("quiet");
}
if let Some(doc) = &self.doc {
t.push_mut(Tree::string(doc));
}
t.push_mut(self.name.lexeme());
if !self.parameters.is_empty() {
let mut params = Tree::atom("params");
for parameter in &self.parameters {
if let Some(prefix) = parameter.kind.prefix() {
params.push_mut(prefix);
}
params.push_mut(parameter.tree());
}
t.push_mut(params);
}
if !self.dependencies.is_empty() {
let mut dependencies = Tree::atom("deps");
let mut subsequents = Tree::atom("sups");
for (i, dependency) in self.dependencies.iter().enumerate() {
let mut d = dependency.recipe.tree();
for argument in &dependency.arguments {
d.push_mut(argument.tree());
}
if i < self.priors {
dependencies.push_mut(d);
} else {
subsequents.push_mut(d);
}
}
if let Tree::List(_) = dependencies {
t.push_mut(dependencies);
}
if let Tree::List(_) = subsequents {
t.push_mut(subsequents);
}
}
if !self.body.is_empty() {
t.push_mut(Tree::atom("body").extend(self.body.iter().map(Node::tree)));
}
t
}
}
impl<'src> Node<'src> for Parameter<'src> {
fn tree(&self) -> Tree<'src> {
let mut children = vec![Tree::atom(self.name.lexeme())];
if let Some(default) = &self.default {
children.push(default.tree());
}
Tree::List(children)
}
}
impl<'src> Node<'src> for Line<'src> {
fn tree(&self) -> Tree<'src> {
Tree::list(self.fragments.iter().map(Node::tree))
}
}
impl<'src> Node<'src> for Fragment<'src> {
fn tree(&self) -> Tree<'src> {
match self {
Self::Text { token } => Tree::string(token.lexeme()),
Self::Interpolation { expression } => Tree::List(vec![expression.tree()]),
}
}
}
impl<'src> Node<'src> for Set<'src> {
fn tree(&self) -> Tree<'src> {
let mut set = Tree::atom(Keyword::Set.lexeme());
set.push_mut(self.name.lexeme().replace('-', "_"));
match &self.value {
Setting::AllowDuplicateRecipes(value)
| Setting::AllowDuplicateVariables(value)
| Setting::DotenvLoad(value)
| Setting::DotenvOverride(value)
| Setting::DotenvRequired(value)
| Setting::Export(value)
| Setting::Fallback(value)
| Setting::NoExitMessage(value)
| Setting::PositionalArguments(value)
| Setting::Quiet(value)
| Setting::Unstable(value)
| Setting::WindowsPowerShell(value)
| Setting::IgnoreComments(value) => {
set.push_mut(value.to_string());
}
Setting::DotenvFilename(value)
| Setting::DotenvPath(value)
| Setting::Tempdir(value)
| Setting::WorkingDirectory(value) => {
set.push_mut(value.tree());
}
Setting::ScriptInterpreter(Interpreter { command, arguments })
| Setting::Shell(Interpreter { command, arguments })
| Setting::WindowsShell(Interpreter { command, arguments }) => {
set.push_mut(command.tree());
for argument in arguments {
set.push_mut(argument.tree());
}
}
}
set
}
}
impl<'src> Node<'src> for Warning {
fn tree(&self) -> Tree<'src> {
unreachable!()
}
}
impl<'src> Node<'src> for str {
fn tree(&self) -> Tree<'src> {
Tree::atom("comment").push(["\"", self, "\""].concat())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/const_error.rs | src/const_error.rs | use super::*;
#[derive(Clone, Copy, Debug)]
pub(crate) enum ConstError<'src> {
Backtick(Token<'src>),
FunctionCall(Name<'src>),
Variable(Name<'src>),
}
impl<'src> ConstError<'src> {
pub(crate) fn context(self) -> Token<'src> {
match self {
Self::Backtick(token) => token,
Self::FunctionCall(name) | Self::Variable(name) => name.token,
}
}
}
impl Display for ConstError<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::Backtick(_) => write!(f, "Cannot call backticks in const context"),
Self::FunctionCall(_) => write!(f, "Cannot call functions in const context"),
Self::Variable(name) => write!(
f,
"Cannot access non-const variable `{name}` in const context"
),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/use_color.rs | src/use_color.rs | use super::*;
#[derive(Copy, Clone, Debug, PartialEq, ValueEnum)]
pub(crate) enum UseColor {
Always,
Auto,
Never,
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/string_state.rs | src/string_state.rs | use super::*;
pub(crate) enum StringState {
FormatContinue(StringKind),
FormatStart,
Normal,
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/justfile.rs | src/justfile.rs | use {super::*, serde::Serialize};
#[derive(Debug, PartialEq, Serialize)]
pub(crate) struct Justfile<'src> {
pub(crate) aliases: Table<'src, Alias<'src>>,
pub(crate) assignments: Table<'src, Assignment<'src>>,
#[serde(rename = "first", serialize_with = "keyed::serialize_option")]
pub(crate) default: Option<Arc<Recipe<'src>>>,
pub(crate) doc: Option<String>,
pub(crate) groups: Vec<StringLiteral<'src>>,
#[serde(skip)]
pub(crate) loaded: Vec<PathBuf>,
#[serde(skip)]
pub(crate) module_path: String,
pub(crate) modules: Table<'src, Justfile<'src>>,
#[serde(skip)]
pub(crate) name: Option<Name<'src>>,
#[serde(skip)]
pub(crate) private: bool,
pub(crate) recipes: Table<'src, Arc<Recipe<'src>>>,
pub(crate) settings: Settings,
pub(crate) source: PathBuf,
pub(crate) unexports: HashSet<String>,
#[serde(skip)]
pub(crate) unstable_features: BTreeSet<UnstableFeature>,
pub(crate) warnings: Vec<Warning>,
#[serde(skip)]
pub(crate) working_directory: PathBuf,
}
impl<'src> Justfile<'src> {
fn find_suggestion(
input: &str,
candidates: impl Iterator<Item = Suggestion<'src>>,
) -> Option<Suggestion<'src>> {
candidates
.map(|suggestion| (edit_distance(input, suggestion.name), suggestion))
.filter(|(distance, _suggestion)| *distance < 3)
.min_by_key(|(distance, _suggestion)| *distance)
.map(|(_distance, suggestion)| suggestion)
}
pub(crate) fn suggest_recipe(&self, input: &str) -> Option<Suggestion<'src>> {
Self::find_suggestion(
input,
self
.recipes
.values()
.filter(|recipe| recipe.is_public())
.map(|recipe| Suggestion {
name: recipe.name(),
target: None,
})
.chain(
self
.aliases
.values()
.filter(|alias| alias.is_public())
.map(|alias| Suggestion {
name: alias.name.lexeme(),
target: Some(alias.target.name.lexeme()),
}),
),
)
}
pub(crate) fn suggest_variable(&self, input: &str) -> Option<Suggestion<'src>> {
Self::find_suggestion(
input,
self
.assignments
.keys()
.map(|name| Suggestion { name, target: None }),
)
}
fn evaluate_scopes<'run>(
&'run self,
arena: &'run Arena<Scope<'src, 'run>>,
config: &'run Config,
dotenv: &'run BTreeMap<String, String>,
root: &'run Scope<'src, 'run>,
scopes: &mut BTreeMap<String, (&'run Justfile<'src>, &'run Scope<'src, 'run>)>,
search: &'run Search,
) -> RunResult<'src> {
let scope = Evaluator::evaluate_assignments(config, dotenv, self, root, search)?;
let scope = arena.alloc(scope);
scopes.insert(self.module_path.clone(), (self, scope));
for module in self.modules.values() {
module.evaluate_scopes(arena, config, dotenv, scope, scopes, search)?;
}
Ok(())
}
pub(crate) fn run(
&self,
config: &Config,
search: &Search,
arguments: &[String],
) -> RunResult<'src> {
let unknown_overrides = config
.overrides
.keys()
.filter(|name| !self.assignments.contains_key(name.as_str()))
.cloned()
.collect::<Vec<String>>();
if !unknown_overrides.is_empty() {
return Err(Error::UnknownOverrides {
overrides: unknown_overrides,
});
}
let dotenv = if config.load_dotenv {
load_dotenv(config, &self.settings, &search.working_directory)?
} else {
BTreeMap::new()
};
let root = Scope::root();
let arena = Arena::new();
let mut scopes = BTreeMap::new();
self.evaluate_scopes(&arena, config, &dotenv, &root, &mut scopes, search)?;
let scope = scopes.get(&self.module_path).unwrap().1;
match &config.subcommand {
Subcommand::Command {
binary, arguments, ..
} => {
let mut command = if config.shell_command {
let mut command = self.settings.shell_command(config);
command.arg(binary);
command
} else {
Command::new(binary)
};
command
.args(arguments)
.current_dir(&search.working_directory);
let scope = scope.child();
command.export(&self.settings, &dotenv, &scope, &self.unexports);
let (result, caught) = command.status_guard();
let status = result.map_err(|io_error| Error::CommandInvoke {
binary: binary.clone(),
arguments: arguments.clone(),
io_error,
})?;
if !status.success() {
return Err(Error::CommandStatus {
binary: binary.clone(),
arguments: arguments.clone(),
status,
});
}
if let Some(signal) = caught {
return Err(Error::Interrupted { signal });
}
return Ok(());
}
Subcommand::Evaluate { variable, .. } => {
if let Some(variable) = variable {
if let Some(value) = scope.value(variable) {
print!("{value}");
} else {
return Err(Error::EvalUnknownVariable {
suggestion: self.suggest_variable(variable),
variable: variable.clone(),
});
}
} else {
let width = scope.names().fold(0, |max, name| name.len().max(max));
for binding in scope.bindings() {
if !binding.private {
println!(
"{0:1$} := \"{2}\"",
binding.name.lexeme(),
width,
binding.value
);
}
}
}
return Ok(());
}
_ => {}
}
let arguments = arguments.iter().map(String::as_str).collect::<Vec<&str>>();
let invocations = InvocationParser::parse_invocations(self, &arguments)?;
if config.one && invocations.len() > 1 {
return Err(Error::ExcessInvocations {
invocations: invocations.len(),
});
}
let ran = Ran::default();
for invocation in invocations {
Self::run_recipe(
&invocation.arguments,
config,
&dotenv,
false,
&ran,
invocation.recipe,
&scopes,
search,
)?;
}
Ok(())
}
pub(crate) fn check_unstable(&self, config: &Config) -> RunResult<'src> {
if let Some(&unstable_feature) = self.unstable_features.iter().next() {
config.require_unstable(self, unstable_feature)?;
}
for module in self.modules.values() {
module.check_unstable(config)?;
}
Ok(())
}
pub(crate) fn get_alias(&self, name: &str) -> Option<&Alias<'src>> {
self.aliases.get(name)
}
pub(crate) fn get_recipe(&self, name: &str) -> Option<&Recipe<'src>> {
self
.recipes
.get(name)
.map(Arc::as_ref)
.or_else(|| self.aliases.get(name).map(|alias| alias.target.as_ref()))
}
pub(crate) fn is_submodule(&self) -> bool {
self.name.is_some()
}
pub(crate) fn name(&self) -> &'src str {
self.name.map(|name| name.lexeme()).unwrap_or_default()
}
fn run_recipe(
arguments: &[Vec<String>],
config: &Config,
dotenv: &BTreeMap<String, String>,
is_dependency: bool,
ran: &Ran,
recipe: &Recipe<'src>,
scopes: &BTreeMap<String, (&Justfile<'src>, &Scope<'src, '_>)>,
search: &Search,
) -> RunResult<'src> {
let mutex = ran.mutex(recipe, arguments);
let mut guard = mutex.lock().unwrap();
if *guard {
return Ok(());
}
if !config.yes && !recipe.confirm()? {
return Err(Error::NotConfirmed {
recipe: recipe.name(),
});
}
let (module, scope) = scopes
.get(recipe.module_path())
.expect("failed to retrieve scope for module");
let context = ExecutionContext {
config,
dotenv,
module,
search,
};
let (outer, positional) = Evaluator::evaluate_parameters(
arguments,
&context,
is_dependency,
&recipe.parameters,
recipe,
scope,
)?;
let scope = outer.child();
let mut evaluator = Evaluator::new(&context, true, &scope);
Self::run_dependencies(
config,
&context,
recipe.priors(),
dotenv,
&mut evaluator,
ran,
recipe,
scopes,
search,
)?;
recipe.run(&context, &scope, &positional, is_dependency)?;
Self::run_dependencies(
config,
&context,
recipe.subsequents(),
dotenv,
&mut evaluator,
&Ran::default(),
recipe,
scopes,
search,
)?;
*guard = true;
Ok(())
}
fn run_dependencies<'run>(
config: &Config,
context: &ExecutionContext<'src, 'run>,
dependencies: &[Dependency<'src>],
dotenv: &BTreeMap<String, String>,
evaluator: &mut Evaluator<'src, 'run>,
ran: &Ran,
recipe: &Recipe<'src>,
scopes: &BTreeMap<String, (&Justfile<'src>, &Scope<'src, 'run>)>,
search: &Search,
) -> RunResult<'src> {
if context.config.no_dependencies {
return Ok(());
}
let mut evaluated = Vec::new();
for Dependency { recipe, arguments } in dependencies {
let mut grouped = Vec::new();
for group in arguments {
let evaluated_group = group
.iter()
.map(|argument| evaluator.evaluate_expression(argument))
.collect::<RunResult<Vec<String>>>()?;
grouped.push(evaluated_group);
}
evaluated.push((recipe, grouped));
}
if recipe.is_parallel() {
thread::scope::<_, RunResult>(|thread_scope| {
let mut handles = Vec::new();
for (recipe, arguments) in evaluated {
handles.push(thread_scope.spawn(move || {
Self::run_recipe(
&arguments, config, dotenv, true, ran, recipe, scopes, search,
)
}));
}
for handle in handles {
handle
.join()
.map_err(|_| Error::internal("parallel dependency thread panicked"))??;
}
Ok(())
})?;
} else {
for (recipe, arguments) in evaluated {
Self::run_recipe(
&arguments, config, dotenv, true, ran, recipe, scopes, search,
)?;
}
}
Ok(())
}
pub(crate) fn public_modules(&self, config: &Config) -> Vec<&Justfile> {
let mut modules = self
.modules
.values()
.filter(|module| !module.private)
.collect::<Vec<&Justfile>>();
if config.unsorted {
modules.sort_by_key(|module| {
module
.name
.map(|name| name.token.offset)
.unwrap_or_default()
});
}
modules
}
pub(crate) fn public_recipes(&self, config: &Config) -> Vec<&Recipe> {
let mut recipes = self
.recipes
.values()
.map(AsRef::as_ref)
.filter(|recipe| recipe.is_public())
.collect::<Vec<&Recipe>>();
if config.unsorted {
recipes.sort_by_key(|recipe| (&recipe.import_offsets, recipe.name.offset));
}
recipes
}
pub(crate) fn groups(&self) -> Vec<&str> {
self
.groups
.iter()
.map(|group| group.cooked.as_str())
.collect()
}
pub(crate) fn public_groups(&self, config: &Config) -> Vec<String> {
let mut groups = Vec::new();
for recipe in self.recipes.values() {
if recipe.is_public() {
for group in recipe.groups() {
groups.push((recipe.import_offsets.as_slice(), recipe.name.offset, group));
}
}
}
for submodule in self.public_modules(config) {
for group in submodule.groups() {
groups.push((&[], submodule.name.unwrap().offset, group.to_string()));
}
}
if config.unsorted {
groups.sort();
} else {
groups.sort_by(|(_, _, a), (_, _, b)| a.cmp(b));
}
let mut seen = HashSet::new();
groups.retain(|(_, _, group)| seen.insert(group.clone()));
groups.into_iter().map(|(_, _, group)| group).collect()
}
}
impl ColorDisplay for Justfile<'_> {
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result {
let mut items = self.recipes.len() + self.assignments.len() + self.aliases.len();
for (name, assignment) in &self.assignments {
if assignment.export {
write!(f, "export ")?;
}
write!(f, "{name} := {}", assignment.value)?;
items -= 1;
if items != 0 {
write!(f, "\n\n")?;
}
}
for alias in self.aliases.values() {
write!(f, "{alias}")?;
items -= 1;
if items != 0 {
write!(f, "\n\n")?;
}
}
for recipe in self.recipes.values() {
write!(f, "{}", recipe.color_display(color))?;
items -= 1;
if items != 0 {
write!(f, "\n\n")?;
}
}
Ok(())
}
}
impl<'src> Keyed<'src> for Justfile<'src> {
fn key(&self) -> &'src str {
self.name()
}
}
#[cfg(test)]
mod tests {
use super::*;
use testing::compile;
use Error::*;
run_error! {
name: unknown_recipe_no_suggestion,
src: "a:\nb:\nc:",
args: ["a", "xyz", "y", "z"],
error: UnknownRecipe {
recipe,
suggestion,
},
check: {
assert_eq!(recipe, "xyz");
assert_eq!(suggestion, None);
}
}
run_error! {
name: unknown_recipe_with_suggestion,
src: "a:\nb:\nc:",
args: ["a", "x", "y", "z"],
error: UnknownRecipe {
recipe,
suggestion,
},
check: {
assert_eq!(recipe, "x");
assert_eq!(suggestion, Some(Suggestion {
name: "a",
target: None,
}));
}
}
run_error! {
name: unknown_recipe_show_alias_suggestion,
src: "
foo:
echo foo
alias z := foo
",
args: ["zz"],
error: UnknownRecipe {
recipe,
suggestion,
},
check: {
assert_eq!(recipe, "zz");
assert_eq!(suggestion, Some(Suggestion {
name: "z",
target: Some("foo"),
}
));
}
}
run_error! {
name: code_error,
src: "
fail:
@exit 100
",
args: ["fail"],
error: Code {
recipe,
line_number,
code,
print_message,
},
check: {
assert_eq!(recipe, "fail");
assert_eq!(code, 100);
assert_eq!(line_number, Some(2));
assert!(print_message);
}
}
run_error! {
name: run_args,
src: r#"
a return code:
@x() { {{return}} {{code + "0"}}; }; x
"#,
args: ["a", "return", "15"],
error: Code {
recipe,
line_number,
code,
print_message,
},
check: {
assert_eq!(recipe, "a");
assert_eq!(code, 150);
assert_eq!(line_number, Some(2));
assert!(print_message);
}
}
run_error! {
name: missing_some_arguments,
src: "a b c d:",
args: ["a", "b", "c"],
error: PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
},
check: {
assert_eq!(recipe.name(), "a");
assert_eq!(found, 2);
assert_eq!(min, 3);
assert_eq!(max, 3);
}
}
run_error! {
name: missing_some_arguments_variadic,
src: "a b c +d:",
args: ["a", "B", "C"],
error: PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
},
check: {
assert_eq!(recipe.name(), "a");
assert_eq!(found, 2);
assert_eq!(min, 3);
assert_eq!(max, usize::MAX - 1);
}
}
run_error! {
name: missing_all_arguments,
src: "a b c d:\n echo {{b}}{{c}}{{d}}",
args: ["a"],
error: PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
},
check: {
assert_eq!(recipe.name(), "a");
assert_eq!(found, 0);
assert_eq!(min, 3);
assert_eq!(max, 3);
}
}
run_error! {
name: missing_some_defaults,
src: "a b c d='hello':",
args: ["a", "b"],
error: PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
},
check: {
assert_eq!(recipe.name(), "a");
assert_eq!(found, 1);
assert_eq!(min, 2);
assert_eq!(max, 3);
}
}
run_error! {
name: missing_all_defaults,
src: "a b c='r' d='h':",
args: ["a"],
error: PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
},
check: {
assert_eq!(recipe.name(), "a");
assert_eq!(found, 0);
assert_eq!(min, 1);
assert_eq!(max, 3);
}
}
run_error! {
name: unknown_overrides,
src: "
a:
echo {{`f() { return 100; }; f`}}
",
args: ["foo=bar", "baz=bob", "a"],
error: UnknownOverrides { overrides },
check: {
assert_eq!(overrides, &["baz", "foo"]);
}
}
run_error! {
name: export_failure,
src: r#"
export foo := "a"
baz := "c"
export bar := "b"
export abc := foo + bar + baz
wut:
echo $foo $bar $baz
"#,
args: ["--quiet", "wut"],
error: Code {
recipe,
line_number,
print_message,
..
},
check: {
assert_eq!(recipe, "wut");
assert_eq!(line_number, Some(7));
assert!(print_message);
}
}
fn case(input: &str, expected: &str) {
let justfile = compile(input);
let actual = format!("{}", justfile.color_display(Color::never()));
assert_eq!(actual, expected);
let reparsed = compile(&actual);
let redumped = format!("{}", reparsed.color_display(Color::never()));
assert_eq!(redumped, actual);
}
#[test]
fn parse_empty() {
case(
"
# hello
",
"",
);
}
#[test]
fn parse_string_default() {
case(
r#"
foo a="b\t":
"#,
r#"foo a="b\t":"#,
);
}
#[test]
fn parse_multiple() {
case(
r"
a:
b:
", r"a:
b:",
);
}
#[test]
fn parse_variadic() {
case(
r"
foo +a:
",
r"foo +a:",
);
}
#[test]
fn parse_variadic_string_default() {
case(
r#"
foo +a="Hello":
"#,
r#"foo +a="Hello":"#,
);
}
#[test]
fn parse_raw_string_default() {
case(
r"
foo a='b\t':
",
r"foo a='b\t':",
);
}
#[test]
fn parse_export() {
case(
r#"
export a := "hello"
"#,
r#"export a := "hello""#,
);
}
#[test]
fn parse_alias_after_target() {
case(
r"
foo:
echo a
alias f := foo
",
r"alias f := foo
foo:
echo a",
);
}
#[test]
fn parse_alias_before_target() {
case(
r"
alias f := foo
foo:
echo a
",
r"alias f := foo
foo:
echo a",
);
}
#[test]
fn parse_alias_with_comment() {
case(
r"
alias f := foo #comment
foo:
echo a
",
r"alias f := foo
foo:
echo a",
);
}
#[test]
fn parse_complex() {
case(
"
x:
y:
z:
foo := \"xx\"
bar := foo
goodbye := \"y\"
hello a b c : x y z #hello
#! blah
#blarg
{{ foo + bar}}abc{{ goodbye\t + \"x\" }}xyz
1
2
3
",
"bar := foo
foo := \"xx\"
goodbye := \"y\"
hello a b c: x y z
#! blah
#blarg
{{ foo + bar }}abc{{ goodbye + \"x\" }}xyz
1
2
3
x:
y:
z:",
);
}
#[test]
fn parse_shebang() {
case(
"
practicum := 'hello'
install:
\t#!/bin/sh
\tif [[ -f {{practicum}} ]]; then
\t\treturn
\tfi
",
"practicum := 'hello'
install:
#!/bin/sh
if [[ -f {{ practicum }} ]]; then
\treturn
fi",
);
}
#[test]
fn parse_simple_shebang() {
case("a:\n #!\n print(1)", "a:\n #!\n print(1)");
}
#[test]
fn parse_assignments() {
case(
r#"a := "0"
c := a + b + a + b
b := "1"
"#,
r#"a := "0"
b := "1"
c := a + b + a + b"#,
);
}
#[test]
fn parse_assignment_backticks() {
case(
"a := `echo hello`
c := a + b + a + b
b := `echo goodbye`",
"a := `echo hello`
b := `echo goodbye`
c := a + b + a + b",
);
}
#[test]
fn parse_interpolation_backticks() {
case(
r#"a:
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
r#"a:
echo {{ `echo hello` + "blarg" }} {{ `echo bob` }}"#,
);
}
#[test]
fn eof_test() {
case("x:\ny:\nz:\na b c: x y z", "a b c: x y z\n\nx:\n\ny:\n\nz:");
}
#[test]
fn string_quote_escape() {
case(r#"a := "hello\"""#, r#"a := "hello\"""#);
}
#[test]
fn string_escapes() {
case(r#"a := "\n\t\r\"\\""#, r#"a := "\n\t\r\"\\""#);
}
#[test]
fn parameters() {
case(
"a b c:
{{b}} {{c}}",
"a b c:
{{ b }} {{ c }}",
);
}
#[test]
fn unary_functions() {
case(
"
x := arch()
a:
{{os()}} {{os_family()}} {{num_cpus()}}",
"x := arch()
a:
{{ os() }} {{ os_family() }} {{ num_cpus() }}",
);
}
#[test]
fn env_functions() {
case(
r#"
x := env_var('foo',)
a:
{{env_var_or_default('foo' + 'bar', 'baz',)}} {{env_var(env_var("baz"))}}"#,
r#"x := env_var('foo')
a:
{{ env_var_or_default('foo' + 'bar', 'baz') }} {{ env_var(env_var("baz")) }}"#,
);
}
#[test]
fn parameter_default_string() {
case(
r#"
f x="abc":
"#,
r#"f x="abc":"#,
);
}
#[test]
fn parameter_default_raw_string() {
case(
r"
f x='abc':
",
r"f x='abc':",
);
}
#[test]
fn parameter_default_backtick() {
case(
r"
f x=`echo hello`:
",
r"f x=`echo hello`:",
);
}
#[test]
fn parameter_default_concatenation_string() {
case(
r#"
f x=(`echo hello` + "foo"):
"#,
r#"f x=(`echo hello` + "foo"):"#,
);
}
#[test]
fn parameter_default_concatenation_variable() {
case(
r#"
x := "10"
f y=(`echo hello` + x) +z="foo":
"#,
r#"x := "10"
f y=(`echo hello` + x) +z="foo":"#,
);
}
#[test]
fn parameter_default_multiple() {
case(
r#"
x := "10"
f y=(`echo hello` + x) +z=("foo" + "bar"):
"#,
r#"x := "10"
f y=(`echo hello` + x) +z=("foo" + "bar"):"#,
);
}
#[test]
fn concatenation_in_group() {
case("x := ('0' + '1')", "x := ('0' + '1')");
}
#[test]
fn string_in_group() {
case("x := ('0' )", "x := ('0')");
}
#[rustfmt::skip]
#[test]
fn escaped_dos_newlines() {
case("@spam:\r
\t{ \\\r
\t\tfiglet test; \\\r
\t\tcargo build --color always 2>&1; \\\r
\t\tcargo test --color always -- --color always 2>&1; \\\r
\t} | less\r
",
"@spam:
{ \\
\tfiglet test; \\
\tcargo build --color always 2>&1; \\
\tcargo test --color always -- --color always 2>&1; \\
} | less");
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/source.rs | src/source.rs | use super::*;
#[derive(Debug)]
pub(crate) struct Source<'src> {
pub(crate) file_depth: u32,
pub(crate) file_path: Vec<PathBuf>,
pub(crate) import_offsets: Vec<usize>,
pub(crate) namepath: Option<Namepath<'src>>,
pub(crate) path: PathBuf,
pub(crate) working_directory: PathBuf,
}
impl<'src> Source<'src> {
pub(crate) fn root(path: &Path) -> Self {
Self {
file_depth: 0,
file_path: vec![path.into()],
import_offsets: Vec::new(),
namepath: None,
path: path.into(),
working_directory: path.parent().unwrap().into(),
}
}
pub(crate) fn import(&self, path: PathBuf, import_offset: usize) -> Self {
Self {
file_depth: self.file_depth + 1,
file_path: self
.file_path
.clone()
.into_iter()
.chain(iter::once(path.clone()))
.collect(),
import_offsets: self
.import_offsets
.iter()
.copied()
.chain(iter::once(import_offset))
.collect(),
namepath: self.namepath.clone(),
path,
working_directory: self.working_directory.clone(),
}
}
pub(crate) fn module(&self, name: Name<'src>, path: PathBuf) -> Self {
Self {
file_depth: self.file_depth + 1,
file_path: self
.file_path
.clone()
.into_iter()
.chain(iter::once(path.clone()))
.collect(),
import_offsets: Vec::new(),
namepath: Some(
self
.namepath
.as_ref()
.map_or_else(|| name.into(), |namepath| namepath.join(name)),
),
path: path.clone(),
working_directory: path.parent().unwrap().into(),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/invocation.rs | src/invocation.rs | use super::*;
#[derive(Debug, PartialEq)]
pub(crate) struct Invocation<'src, 'run> {
pub(crate) arguments: Vec<Vec<String>>,
pub(crate) recipe: &'run Recipe<'src>,
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/delimiter.rs | src/delimiter.rs | use super::*;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum Delimiter {
Brace,
Bracket,
FormatString(StringKind),
Paren,
}
impl Delimiter {
pub(crate) fn open(self) -> char {
match self {
Self::Brace | Self::FormatString(_) => '{',
Self::Bracket => '[',
Self::Paren => '(',
}
}
pub(crate) fn close(self) -> char {
match self {
Self::Brace | Self::FormatString(_) => '}',
Self::Bracket => ']',
Self::Paren => ')',
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/search_config.rs | src/search_config.rs | use super::*;
/// Controls how `just` will search for the justfile.
#[derive(Debug, Default, PartialEq)]
pub(crate) enum SearchConfig {
/// Recursively search for the justfile upwards from the invocation directory
/// to the root, setting the working directory to the directory in which the
/// justfile is found.
#[default]
FromInvocationDirectory,
/// As in `Invocation`, but start from `search_directory`.
FromSearchDirectory { search_directory: PathBuf },
/// Search for global justfile
GlobalJustfile,
/// Use user-specified justfile, with the working directory set to the
/// directory that contains it.
WithJustfile { justfile: PathBuf },
/// Use user-specified justfile and working directory.
WithJustfileAndWorkingDirectory {
justfile: PathBuf,
working_directory: PathBuf,
},
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/lib.rs | src/lib.rs | //! `just` is primarily used as a command-line binary, but does provide a
//! limited public library interface.
//!
//! Please keep in mind that there are no semantic version guarantees for the
//! library interface. It may break or change at any time.
pub(crate) use {
crate::{
alias::Alias,
alias_style::AliasStyle,
analyzer::Analyzer,
arg_attribute::ArgAttribute,
assignment::Assignment,
assignment_resolver::AssignmentResolver,
ast::Ast,
attribute::{Attribute, AttributeDiscriminant},
attribute_set::AttributeSet,
binding::Binding,
color::Color,
color_display::ColorDisplay,
command_color::CommandColor,
command_ext::CommandExt,
compilation::Compilation,
compile_error::CompileError,
compile_error_kind::CompileErrorKind,
compiler::Compiler,
condition::Condition,
conditional_operator::ConditionalOperator,
config::Config,
config_error::ConfigError,
const_error::ConstError,
constants::constants,
count::Count,
delimiter::Delimiter,
dependency::Dependency,
dump_format::DumpFormat,
enclosure::Enclosure,
error::Error,
evaluator::Evaluator,
execution_context::ExecutionContext,
executor::Executor,
expression::Expression,
format_string_part::FormatStringPart,
fragment::Fragment,
function::Function,
interpreter::Interpreter,
invocation::Invocation,
invocation_parser::InvocationParser,
item::Item,
justfile::Justfile,
keyed::Keyed,
keyword::Keyword,
lexer::Lexer,
line::Line,
list::List,
load_dotenv::load_dotenv,
loader::Loader,
module_path::ModulePath,
name::Name,
namepath::Namepath,
ordinal::Ordinal,
output_error::OutputError,
parameter::Parameter,
parameter_kind::ParameterKind,
parser::Parser,
pattern::Pattern,
platform::Platform,
platform_interface::PlatformInterface,
position::Position,
positional::Positional,
ran::Ran,
range_ext::RangeExt,
recipe::Recipe,
recipe_resolver::RecipeResolver,
recipe_signature::RecipeSignature,
scope::Scope,
search::Search,
search_config::SearchConfig,
search_error::SearchError,
set::Set,
setting::Setting,
settings::Settings,
shebang::Shebang,
show_whitespace::ShowWhitespace,
signal::Signal,
signal_handler::SignalHandler,
source::Source,
string_delimiter::StringDelimiter,
string_kind::StringKind,
string_literal::StringLiteral,
string_state::StringState,
subcommand::Subcommand,
suggestion::Suggestion,
switch::Switch,
table::Table,
thunk::Thunk,
token::Token,
token_kind::TokenKind,
unresolved_dependency::UnresolvedDependency,
unresolved_recipe::UnresolvedRecipe,
unstable_feature::UnstableFeature,
usage::Usage,
use_color::UseColor,
variables::Variables,
verbosity::Verbosity,
warning::Warning,
which::which,
},
camino::Utf8Path,
clap::ValueEnum,
derive_where::derive_where,
edit_distance::edit_distance,
lexiclean::Lexiclean,
libc::EXIT_FAILURE,
rand::seq::IndexedRandom,
regex::Regex,
serde::{
ser::{SerializeMap, SerializeSeq},
Deserialize, Serialize, Serializer,
},
snafu::{ResultExt, Snafu},
std::{
borrow::Cow,
cmp::Ordering,
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
env,
ffi::OsString,
fmt::{self, Debug, Display, Formatter},
fs,
io::{self, Write},
iter::{self, FromIterator},
mem,
ops::Deref,
ops::{Index, RangeInclusive},
path::{self, Path, PathBuf},
process::{self, Command, ExitStatus, Stdio},
str::{self, Chars},
sync::{Arc, LazyLock, Mutex, MutexGuard},
thread, vec,
},
strum::{Display, EnumDiscriminants, EnumString, IntoStaticStr},
tempfile::TempDir,
typed_arena::Arena,
unicode_width::{UnicodeWidthChar, UnicodeWidthStr},
};
#[cfg(test)]
pub(crate) use {
crate::{node::Node, tree::Tree},
std::slice,
};
pub use crate::run::run;
#[doc(hidden)]
use request::Request;
// Used in integration tests.
#[doc(hidden)]
pub use {request::Response, subcommand::INIT_JUSTFILE, unindent::unindent};
type CompileResult<'a, T = ()> = Result<T, CompileError<'a>>;
type ConfigResult<T> = Result<T, ConfigError>;
type FunctionResult = Result<String, String>;
type RunResult<'a, T = ()> = Result<T, Error<'a>>;
type SearchResult<T> = Result<T, SearchError>;
#[cfg(test)]
#[macro_use]
pub mod testing;
#[cfg(test)]
#[macro_use]
pub mod tree;
#[cfg(test)]
pub mod node;
#[cfg(fuzzing)]
pub mod fuzzing;
// Used by Janus, https://github.com/casey/janus, a tool
// that analyses all public justfiles on GitHub to avoid
// breaking changes.
#[doc(hidden)]
pub mod summary;
// Used for testing with the `--request` subcommand.
#[doc(hidden)]
pub mod request;
mod alias;
mod alias_style;
mod analyzer;
mod arg_attribute;
mod assignment;
mod assignment_resolver;
mod ast;
mod attribute;
mod attribute_set;
mod binding;
mod color;
mod color_display;
mod command_color;
mod command_ext;
mod compilation;
mod compile_error;
mod compile_error_kind;
mod compiler;
mod completions;
mod condition;
mod conditional_operator;
mod config;
mod config_error;
mod const_error;
mod constants;
mod count;
mod delimiter;
mod dependency;
mod dump_format;
mod enclosure;
mod error;
mod evaluator;
mod execution_context;
mod executor;
mod expression;
mod format_string_part;
mod fragment;
mod function;
mod interpreter;
mod invocation;
mod invocation_parser;
mod item;
mod justfile;
mod keyed;
mod keyword;
mod lexer;
mod line;
mod list;
mod load_dotenv;
mod loader;
mod module_path;
mod name;
mod namepath;
mod ordinal;
mod output_error;
mod parameter;
mod parameter_kind;
mod parser;
mod pattern;
mod platform;
mod platform_interface;
mod position;
mod positional;
mod ran;
mod range_ext;
mod recipe;
mod recipe_resolver;
mod recipe_signature;
mod run;
mod scope;
mod search;
mod search_config;
mod search_error;
mod set;
mod setting;
mod settings;
mod shebang;
mod show_whitespace;
mod signal;
mod signal_handler;
#[cfg(unix)]
mod signals;
mod source;
mod string_delimiter;
mod string_kind;
mod string_literal;
mod string_state;
mod subcommand;
mod suggestion;
mod switch;
mod table;
mod thunk;
mod token;
mod token_kind;
mod unindent;
mod unresolved_dependency;
mod unresolved_recipe;
mod unstable_feature;
mod usage;
mod use_color;
mod variables;
mod verbosity;
mod warning;
mod which;
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/analyzer.rs | src/analyzer.rs | use {super::*, CompileErrorKind::*};
#[derive(Default)]
pub(crate) struct Analyzer<'run, 'src> {
aliases: Table<'src, Alias<'src, Namepath<'src>>>,
assignments: Vec<&'run Binding<'src, Expression<'src>>>,
modules: Table<'src, Justfile<'src>>,
recipes: Vec<&'run Recipe<'src, UnresolvedDependency<'src>>>,
sets: Table<'src, Set<'src>>,
unexports: HashSet<String>,
warnings: Vec<Warning>,
}
impl<'run, 'src> Analyzer<'run, 'src> {
pub(crate) fn analyze(
asts: &'run HashMap<PathBuf, Ast<'src>>,
config: &Config,
doc: Option<String>,
groups: &[StringLiteral<'src>],
loaded: &[PathBuf],
name: Option<Name<'src>>,
paths: &HashMap<PathBuf, PathBuf>,
private: bool,
root: &Path,
) -> RunResult<'src, Justfile<'src>> {
Self::default().justfile(
asts, config, doc, groups, loaded, name, paths, root, private,
)
}
fn justfile(
mut self,
asts: &'run HashMap<PathBuf, Ast<'src>>,
config: &Config,
doc: Option<String>,
groups: &[StringLiteral<'src>],
loaded: &[PathBuf],
name: Option<Name<'src>>,
paths: &HashMap<PathBuf, PathBuf>,
root: &Path,
private: bool,
) -> RunResult<'src, Justfile<'src>> {
let mut definitions = HashMap::new();
let mut imports = HashSet::new();
let mut unstable_features = BTreeSet::new();
let mut stack = Vec::new();
let ast = asts.get(root).unwrap();
stack.push(ast);
while let Some(ast) = stack.pop() {
unstable_features.extend(&ast.unstable_features);
for item in &ast.items {
match item {
Item::Alias(alias) => {
Self::define(&mut definitions, alias.name, "alias", false)?;
self.aliases.insert(alias.clone());
}
Item::Assignment(assignment) => {
self.assignments.push(assignment);
}
Item::Comment(_) => (),
Item::Import { absolute, .. } => {
if let Some(absolute) = absolute {
if imports.insert(absolute) {
stack.push(asts.get(absolute).unwrap());
}
}
}
Item::Module {
absolute,
doc,
groups,
name,
private,
..
} => {
if let Some(absolute) = absolute {
Self::define(&mut definitions, *name, "module", false)?;
self.modules.insert(Self::analyze(
asts,
config,
doc.clone(),
groups.as_slice(),
loaded,
Some(*name),
paths,
*private,
absolute,
)?);
}
}
Item::Recipe(recipe) => {
if recipe.enabled() {
Self::analyze_recipe(recipe)?;
self.recipes.push(recipe);
}
}
Item::Set(set) => {
self.analyze_set(set)?;
self.sets.insert(set.clone());
}
Item::Unexport { name } => {
if !self.unexports.insert(name.lexeme().to_string()) {
return Err(
name
.error(DuplicateUnexport {
variable: name.lexeme(),
})
.into(),
);
}
}
}
}
self.warnings.extend(ast.warnings.iter().cloned());
}
let mut allow_duplicate_variables = false;
for (_name, set) in &self.sets {
if let Setting::AllowDuplicateVariables(value) = set.value {
allow_duplicate_variables = value;
}
}
let mut assignments: Table<'src, Assignment<'src>> = Table::default();
for assignment in self.assignments {
let variable = assignment.name.lexeme();
if !allow_duplicate_variables && assignments.contains_key(variable) {
return Err(assignment.name.error(DuplicateVariable { variable }).into());
}
if assignments
.get(variable)
.is_none_or(|original| assignment.file_depth <= original.file_depth)
{
assignments.insert(assignment.clone());
}
if self.unexports.contains(variable) {
return Err(assignment.name.error(ExportUnexported { variable }).into());
}
}
AssignmentResolver::resolve_assignments(&assignments)?;
for set in self.sets.values() {
for expression in set.value.expressions() {
for variable in expression.variables() {
let name = variable.lexeme();
if !assignments.contains_key(name) && !constants().contains_key(name) {
return Err(variable.error(UndefinedVariable { variable: name }).into());
}
}
}
}
let settings =
Evaluator::evaluate_settings(&assignments, config, name, self.sets, &Scope::root())?;
let mut deduplicated_recipes = Table::<'src, UnresolvedRecipe<'src>>::default();
for recipe in self.recipes {
Self::define(
&mut definitions,
recipe.name,
"recipe",
settings.allow_duplicate_recipes,
)?;
if deduplicated_recipes
.get(recipe.name.lexeme())
.is_none_or(|original| recipe.file_depth <= original.file_depth)
{
deduplicated_recipes.insert(recipe.clone());
}
}
let recipes = RecipeResolver::resolve_recipes(
&assignments,
&ast.module_path,
&self.modules,
&settings,
deduplicated_recipes,
)?;
let mut aliases = Table::new();
while let Some(alias) = self.aliases.pop() {
aliases.insert(Self::resolve_alias(&self.modules, &recipes, alias)?);
}
let source = root.to_owned();
let root = paths.get(root).unwrap();
let mut default = None;
for recipe in recipes.values() {
if recipe.attributes.contains(AttributeDiscriminant::Default) {
if default.is_some() {
return Err(
recipe
.name
.error(CompileErrorKind::DuplicateDefault {
recipe: recipe.name.lexeme(),
})
.into(),
);
}
default = Some(Arc::clone(recipe));
}
}
let default = default.or_else(|| {
recipes
.values()
.filter(|recipe| recipe.name.path == root)
.fold(None, |accumulator, next| match accumulator {
None => Some(Arc::clone(next)),
Some(previous) => Some(if previous.line_number() < next.line_number() {
previous
} else {
Arc::clone(next)
}),
})
});
Ok(Justfile {
aliases,
assignments,
default,
doc: doc.filter(|doc| !doc.is_empty()),
groups: groups.into(),
loaded: loaded.into(),
module_path: ast.module_path.clone(),
modules: self.modules,
name,
private,
recipes,
settings,
source,
unexports: self.unexports,
unstable_features,
warnings: self.warnings,
working_directory: ast.working_directory.clone(),
})
}
fn define(
definitions: &mut HashMap<&'src str, (&'static str, Name<'src>)>,
name: Name<'src>,
second_type: &'static str,
duplicates_allowed: bool,
) -> CompileResult<'src> {
if let Some((first_type, original)) = definitions.get(name.lexeme()) {
if !(*first_type == second_type && duplicates_allowed) {
let ((first_type, second_type), (original, redefinition)) = if name.line < original.line {
((second_type, *first_type), (name, *original))
} else {
((*first_type, second_type), (*original, name))
};
return Err(redefinition.token.error(Redefinition {
first_type,
second_type,
name: name.lexeme(),
first: original.line,
}));
}
}
definitions.insert(name.lexeme(), (second_type, name));
Ok(())
}
fn analyze_recipe(recipe: &UnresolvedRecipe<'src>) -> CompileResult<'src> {
let mut parameters = BTreeSet::new();
let mut passed_default = false;
for parameter in &recipe.parameters {
if parameters.contains(parameter.name.lexeme()) {
return Err(parameter.name.error(DuplicateParameter {
recipe: recipe.name.lexeme(),
parameter: parameter.name.lexeme(),
}));
}
parameters.insert(parameter.name.lexeme());
if parameter.default.is_some() {
passed_default = true;
} else if passed_default && parameter.is_required() && !parameter.is_option() {
return Err(
parameter
.name
.token
.error(RequiredParameterFollowsDefaultParameter {
parameter: parameter.name.lexeme(),
}),
);
}
}
let mut continued = false;
for line in &recipe.body {
if !recipe.is_script() && !continued {
if let Some(Fragment::Text { token }) = line.fragments.first() {
let text = token.lexeme();
if text.starts_with(' ') || text.starts_with('\t') {
return Err(token.error(ExtraLeadingWhitespace));
}
}
}
continued = line.is_continuation();
}
if !recipe.is_script() {
if let Some(attribute) = recipe.attributes.get(AttributeDiscriminant::Extension) {
return Err(recipe.name.error(InvalidAttribute {
item_kind: "Recipe",
item_name: recipe.name.lexeme(),
attribute: Box::new(attribute.clone()),
}));
}
}
Ok(())
}
fn analyze_set(&self, set: &Set<'src>) -> CompileResult<'src> {
if let Some(original) = self.sets.get(set.name.lexeme()) {
return Err(set.name.error(DuplicateSet {
setting: original.name.lexeme(),
first: original.name.line,
}));
}
Ok(())
}
fn resolve_alias<'a>(
modules: &'a Table<'src, Justfile<'src>>,
recipes: &'a Table<'src, Arc<Recipe<'src>>>,
alias: Alias<'src, Namepath<'src>>,
) -> CompileResult<'src, Alias<'src>> {
match Self::resolve_recipe(&alias.target, modules, recipes) {
Some(target) => Ok(alias.resolve(target)),
None => Err(alias.name.error(UnknownAliasTarget {
alias: alias.name.lexeme(),
target: alias.target,
})),
}
}
pub(crate) fn resolve_recipe<'a>(
path: &Namepath<'src>,
mut modules: &'a Table<'src, Justfile<'src>>,
mut recipes: &'a Table<'src, Arc<Recipe<'src>>>,
) -> Option<Arc<Recipe<'src>>> {
let (name, path) = path.split_last();
for name in path {
let module = modules.get(name.lexeme())?;
modules = &module.modules;
recipes = &module.recipes;
}
recipes.get(name.lexeme()).cloned()
}
}
#[cfg(test)]
mod tests {
use super::*;
analysis_error! {
name: duplicate_alias,
input: "alias foo := bar\nalias foo := baz",
offset: 23,
line: 1,
column: 6,
width: 3,
kind: Redefinition {
first_type: "alias",
second_type: "alias",
name: "foo",
first: 0,
},
}
analysis_error! {
name: unknown_alias_target,
input: "alias foo := bar\n",
offset: 6,
line: 0,
column: 6,
width: 3,
kind: UnknownAliasTarget {
alias: "foo",
target: Namepath::from(Name::from_identifier(
Token{
column: 13,
kind: TokenKind::Identifier,
length: 3,
line: 0,
offset: 13,
path: Path::new("justfile"),
src: "alias foo := bar\n",
}
))
},
}
analysis_error! {
name: alias_shadows_recipe_before,
input: "bar: \n echo bar\nalias foo := bar\nfoo:\n echo foo",
offset: 34,
line: 3,
column: 0,
width: 3,
kind: Redefinition {
first_type: "alias",
second_type: "recipe",
name: "foo",
first: 2,
},
}
analysis_error! {
name: alias_shadows_recipe_after,
input: "foo:\n echo foo\nalias foo := bar\nbar:\n echo bar",
offset: 22,
line: 2,
column: 6,
width: 3,
kind: Redefinition {
first_type: "recipe",
second_type: "alias",
name: "foo",
first: 0,
},
}
analysis_error! {
name: required_after_default,
input: "hello arg='foo' bar:",
offset: 16,
line: 0,
column: 16,
width: 3,
kind: RequiredParameterFollowsDefaultParameter { parameter: "bar" },
}
analysis_error! {
name: duplicate_parameter,
input: "a b b:",
offset: 4,
line: 0,
column: 4,
width: 1,
kind: DuplicateParameter{ recipe: "a", parameter: "b" },
}
analysis_error! {
name: duplicate_variadic_parameter,
input: "a b +b:",
offset: 5,
line: 0,
column: 5,
width: 1,
kind: DuplicateParameter{ recipe: "a", parameter: "b" },
}
analysis_error! {
name: duplicate_recipe,
input: "a:\nb:\na:",
offset: 6,
line: 2,
column: 0,
width: 1,
kind: Redefinition { first_type: "recipe", second_type: "recipe", name: "a", first: 0 },
}
analysis_error! {
name: duplicate_variable,
input: "a := \"0\"\na := \"0\"",
offset: 9,
line: 1,
column: 0,
width: 1,
kind: DuplicateVariable{variable: "a"},
}
analysis_error! {
name: extra_whitespace,
input: "a:\n blah\n blarg",
offset: 10,
line: 2,
column: 1,
width: 6,
kind: ExtraLeadingWhitespace,
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/platform_interface.rs | src/platform_interface.rs | use super::*;
pub(crate) trait PlatformInterface {
/// translate path from "native" path to path interpreter expects
fn convert_native_path(config: &Config, working_directory: &Path, path: &Path) -> FunctionResult;
/// install handler, may only be called once
fn install_signal_handler<T: Fn(Signal) + Send + 'static>(handler: T) -> RunResult<'static>;
/// construct command equivalent to running script at `path` with shebang
/// line `shebang`
fn make_shebang_command(
config: &Config,
path: &Path,
shebang: Shebang,
working_directory: Option<&Path>,
) -> Result<Command, OutputError>;
/// set the execute permission on file pointed to by `path`
fn set_execute_permission(path: &Path) -> io::Result<()>;
/// extract signal from process exit status
fn signal_from_exit_status(exit_status: ExitStatus) -> Option<i32>;
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/pattern.rs | src/pattern.rs | use super::*;
#[derive(Debug, Clone)]
pub(crate) struct Pattern<'src> {
pub(crate) regex: Regex,
pub(crate) token: Token<'src>,
}
impl<'src> Pattern<'src> {
pub(crate) fn is_match(&self, haystack: &str) -> bool {
self.regex.is_match(haystack)
}
pub(crate) fn new(literal: &StringLiteral<'src>) -> Result<Self, CompileError<'src>> {
literal.cooked.parse::<Regex>().map_err(|source| {
literal
.token
.error(CompileErrorKind::ArgumentPatternRegex { source })
})?;
Ok(Self {
regex: format!("^(?:{})$", literal.cooked)
.parse::<Regex>()
.map_err(|source| {
literal
.token
.error(CompileErrorKind::ArgumentPatternRegex { source })
})?,
token: literal.token,
})
}
pub(crate) fn original(&self) -> &str {
&self.regex.as_str()[4..self.regex.as_str().len() - 2]
}
}
impl Eq for Pattern<'_> {}
impl Ord for Pattern<'_> {
fn cmp(&self, other: &pattern::Pattern) -> Ordering {
self.regex.as_str().cmp(other.regex.as_str())
}
}
impl PartialEq for Pattern<'_> {
fn eq(&self, other: &pattern::Pattern) -> bool {
self.regex.as_str() == other.regex.as_str()
}
}
impl PartialOrd for Pattern<'_> {
fn partial_cmp(&self, other: &pattern::Pattern) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl Serialize for Pattern<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(self.original())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/assignment_resolver.rs | src/assignment_resolver.rs | use {super::*, CompileErrorKind::*};
pub(crate) struct AssignmentResolver<'src: 'run, 'run> {
assignments: &'run Table<'src, Assignment<'src>>,
evaluated: BTreeSet<&'src str>,
stack: Vec<&'src str>,
}
impl<'src: 'run, 'run> AssignmentResolver<'src, 'run> {
pub(crate) fn resolve_assignments(
assignments: &'run Table<'src, Assignment<'src>>,
) -> CompileResult<'src> {
let mut resolver = Self {
stack: Vec::new(),
evaluated: BTreeSet::new(),
assignments,
};
for assignment in assignments.values() {
resolver.resolve_assignment(assignment)?;
}
Ok(())
}
fn resolve_assignment(&mut self, assignment: &Assignment<'src>) -> CompileResult<'src> {
let name = assignment.name.lexeme();
if self.evaluated.contains(name) {
return Ok(());
}
self.stack.push(name);
for variable in assignment.value.variables() {
let name = variable.lexeme();
if self.evaluated.contains(name) || constants().contains_key(name) {
continue;
}
if self.stack.contains(&name) {
self.stack.push(name);
return Err(
self.assignments[name]
.name
.error(CircularVariableDependency {
variable: name,
circle: self.stack.clone(),
}),
);
} else if let Some(assignment) = self.assignments.get(name) {
self.resolve_assignment(assignment)?;
} else {
return Err(variable.error(UndefinedVariable { variable: name }));
}
}
self.evaluated.insert(name);
self.stack.pop();
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
analysis_error! {
name: circular_variable_dependency,
input: "a := b\nb := a",
offset: 0,
line: 0,
column: 0,
width: 1,
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "b", "a"]},
}
analysis_error! {
name: self_variable_dependency,
input: "a := a",
offset: 0,
line: 0,
column: 0,
width: 1,
kind: CircularVariableDependency{variable: "a", circle: vec!["a", "a"]},
}
analysis_error! {
name: unknown_expression_variable,
input: "x := yy",
offset: 5,
line: 0,
column: 5,
width: 2,
kind: UndefinedVariable{variable: "yy"},
}
analysis_error! {
name: unknown_function_parameter,
input: "x := env_var(yy)",
offset: 13,
line: 0,
column: 13,
width: 2,
kind: UndefinedVariable{variable: "yy"},
}
analysis_error! {
name: unknown_function_parameter_binary_first,
input: "x := env_var_or_default(yy, 'foo')",
offset: 24,
line: 0,
column: 24,
width: 2,
kind: UndefinedVariable{variable: "yy"},
}
analysis_error! {
name: unknown_function_parameter_binary_second,
input: "x := env_var_or_default('foo', yy)",
offset: 31,
line: 0,
column: 31,
width: 2,
kind: UndefinedVariable{variable: "yy"},
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/show_whitespace.rs | src/show_whitespace.rs | use super::*;
/// String wrapper that uses nonblank characters to display spaces and tabs
pub struct ShowWhitespace<'str>(pub &'str str);
impl Display for ShowWhitespace<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
for c in self.0.chars() {
match c {
'\t' => write!(f, "␉")?,
' ' => write!(f, "␠")?,
_ => write!(f, "{c}")?,
}
}
Ok(())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/dependency.rs | src/dependency.rs | use super::*;
#[derive(Clone, PartialEq, Debug, Serialize)]
pub(crate) struct Dependency<'src> {
#[serde(serialize_with = "flatten_arguments")]
pub(crate) arguments: Vec<Vec<Expression<'src>>>,
#[serde(serialize_with = "keyed::serialize")]
pub(crate) recipe: Arc<Recipe<'src>>,
}
fn flatten_arguments<S: Serializer>(
arguments: &[Vec<Expression<'_>>],
serializer: S,
) -> Result<S::Ok, S::Error> {
let len = arguments.iter().map(Vec::len).sum();
let mut seq = serializer.serialize_seq(Some(len))?;
for group in arguments {
for argument in group {
seq.serialize_element(argument)?;
}
}
seq.end()
}
impl Display for Dependency<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.arguments.is_empty() {
write!(f, "{}", self.recipe.name())
} else {
write!(f, "({}", self.recipe.name())?;
for group in &self.arguments {
for argument in group {
write!(f, " {argument}")?;
}
}
write!(f, ")")
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/parser.rs | src/parser.rs | use {super::*, TokenKind::*};
/// Just language parser
///
/// The parser is a (hopefully) straightforward recursive descent parser.
///
/// It uses a few tokens of lookahead to disambiguate different constructs.
///
/// The `expect_*` and `presume_`* methods are similar in that they assert the
/// type of unparsed tokens and consume them. However, upon encountering an
/// unexpected token, the `expect_*` methods return an unexpected token error,
/// whereas the `presume_*` tokens return an internal error.
///
/// The `presume_*` methods are used when the token stream has been inspected in
/// some other way, and thus encountering an unexpected token is a bug in Just,
/// and not a syntax error.
///
/// All methods starting with `parse_*` parse and return a language construct.
///
/// The parser tracks an expected set of tokens as it parses. This set contains
/// all tokens which would have been accepted at the current point in the
/// parse. Whenever the parser tests for a token that would be accepted, but
/// does not find it, it adds that token to the set. When the parser accepts a
/// token, the set is cleared. If the parser finds a token which is unexpected,
/// the elements of the set are printed in the resultant error message.
pub(crate) struct Parser<'run, 'src> {
expected_tokens: BTreeSet<TokenKind>,
file_depth: u32,
import_offsets: Vec<usize>,
module_namepath: Option<&'run Namepath<'src>>,
next_token: usize,
recursion_depth: usize,
tokens: &'run [Token<'src>],
unstable_features: BTreeSet<UnstableFeature>,
working_directory: &'run Path,
}
impl<'run, 'src> Parser<'run, 'src> {
/// Parse `tokens` into an `Ast`
pub(crate) fn parse(
file_depth: u32,
import_offsets: &[usize],
module_namepath: Option<&'run Namepath<'src>>,
tokens: &'run [Token<'src>],
working_directory: &'run Path,
) -> CompileResult<'src, Ast<'src>> {
Self {
expected_tokens: BTreeSet::new(),
file_depth,
import_offsets: import_offsets.to_vec(),
module_namepath,
next_token: 0,
recursion_depth: 0,
tokens,
unstable_features: BTreeSet::new(),
working_directory,
}
.parse_ast()
}
fn error(&self, kind: CompileErrorKind<'src>) -> CompileResult<'src, CompileError<'src>> {
Ok(self.next()?.error(kind))
}
/// Construct an unexpected token error with the token returned by
/// `Parser::next`
fn unexpected_token(&self) -> CompileResult<'src, CompileError<'src>> {
self.error(CompileErrorKind::UnexpectedToken {
expected: self
.expected_tokens
.iter()
.copied()
.filter(|kind| *kind != ByteOrderMark)
.collect::<Vec<TokenKind>>(),
found: self.next()?.kind,
})
}
fn internal_error(&self, message: impl Into<String>) -> CompileResult<'src, CompileError<'src>> {
self.error(CompileErrorKind::Internal {
message: message.into(),
})
}
/// An iterator over the remaining significant tokens
fn rest(&self) -> impl Iterator<Item = Token<'src>> + 'run {
self.tokens[self.next_token..]
.iter()
.copied()
.filter(|token| token.kind != Whitespace)
}
/// The next significant token
fn next(&self) -> CompileResult<'src, Token<'src>> {
if let Some(token) = self.rest().next() {
Ok(token)
} else {
Err(self.internal_error("`Parser::next()` called after end of token stream")?)
}
}
/// Check if the next significant token is of kind `kind`
fn next_is(&mut self, kind: TokenKind) -> bool {
self.next_are(&[kind])
}
/// Check if the next significant tokens are of kinds `kinds`
///
/// The first token in `kinds` will be added to the expected token set.
fn next_are(&mut self, kinds: &[TokenKind]) -> bool {
if let Some(&kind) = kinds.first() {
self.expected_tokens.insert(kind);
}
let mut rest = self.rest();
for kind in kinds {
match rest.next() {
Some(token) => {
if token.kind != *kind {
return false;
}
}
None => return false,
}
}
true
}
/// Advance past one significant token, clearing the expected token set.
fn advance(&mut self) -> CompileResult<'src, Token<'src>> {
self.expected_tokens.clear();
for skipped in &self.tokens[self.next_token..] {
self.next_token += 1;
if skipped.kind != Whitespace {
return Ok(*skipped);
}
}
Err(self.internal_error("`Parser::advance()` advanced past end of token stream")?)
}
/// Return next token if it is of kind `expected`, otherwise, return an
/// unexpected token error
fn expect(&mut self, expected: TokenKind) -> CompileResult<'src, Token<'src>> {
if let Some(token) = self.accept(expected)? {
Ok(token)
} else {
Err(self.unexpected_token()?)
}
}
/// Return the next token if it is any of kinds in `expected`, otherwise,
/// return an unexpected token error
fn expect_any(&mut self, expected: &[TokenKind]) -> CompileResult<'src, Token<'src>> {
for &kind in expected {
if let Some(token) = self.accept(kind)? {
return Ok(token);
}
}
Err(self.unexpected_token()?)
}
/// Return an unexpected token error if the next token is not an EOL
fn expect_eol(&mut self) -> CompileResult<'src> {
self.accept(Comment)?;
if self.next_is(Eof) {
return Ok(());
}
self.expect(Eol).map(|_| ())
}
fn expect_keyword(&mut self, expected: Keyword) -> CompileResult<'src> {
let found = self.advance()?;
if found.kind == Identifier && expected == found.lexeme() {
Ok(())
} else {
Err(found.error(CompileErrorKind::ExpectedKeyword {
expected: vec![expected],
found,
}))
}
}
/// Return an internal error if the next token is not of kind `Identifier`
/// with lexeme `lexeme`.
fn presume_keyword(&mut self, keyword: Keyword) -> CompileResult<'src> {
let next = self.advance()?;
if next.kind != Identifier {
Err(self.internal_error(format!(
"Presumed next token would have kind {Identifier}, but found {}",
next.kind
))?)
} else if keyword == next.lexeme() {
Ok(())
} else {
Err(self.internal_error(format!(
"Presumed next token would have lexeme \"{keyword}\", but found \"{}\"",
next.lexeme(),
))?)
}
}
/// Return an internal error if the next token is not of kind `kind`.
fn presume(&mut self, kind: TokenKind) -> CompileResult<'src, Token<'src>> {
let next = self.advance()?;
if next.kind == kind {
Ok(next)
} else {
Err(self.internal_error(format!(
"Presumed next token would have kind {kind:?}, but found {:?}",
next.kind
))?)
}
}
/// Return an internal error if the next token is not one of kinds `kinds`.
fn presume_any(&mut self, kinds: &[TokenKind]) -> CompileResult<'src, Token<'src>> {
let next = self.advance()?;
if kinds.contains(&next.kind) {
Ok(next)
} else {
Err(self.internal_error(format!(
"Presumed next token would be {}, but found {}",
List::or(kinds),
next.kind
))?)
}
}
/// Accept and return a token of kind `kind`
fn accept(&mut self, kind: TokenKind) -> CompileResult<'src, Option<Token<'src>>> {
if self.next_is(kind) {
Ok(Some(self.advance()?))
} else {
Ok(None)
}
}
/// Return an error if the next token is of kind `forbidden`
fn forbid<F>(&self, forbidden: TokenKind, error: F) -> CompileResult<'src>
where
F: FnOnce(Token) -> CompileError,
{
let next = self.next()?;
if next.kind == forbidden {
Err(error(next))
} else {
Ok(())
}
}
/// Accept a double-colon separated sequence of identifiers
fn accept_namepath(&mut self) -> CompileResult<'src, Option<Namepath<'src>>> {
if self.next_is(Identifier) {
Ok(Some(self.parse_namepath()?))
} else {
Ok(None)
}
}
fn accept_keyword(&mut self, keyword: Keyword) -> CompileResult<'src, Option<Name<'src>>> {
let next = self.next()?;
if next.kind == Identifier && next.lexeme() == keyword.lexeme() {
self.advance()?;
Ok(Some(Name::from_identifier(next)))
} else {
Ok(None)
}
}
fn accepted_keyword(&mut self, keyword: Keyword) -> CompileResult<'src, bool> {
Ok(self.accept_keyword(keyword)?.is_some())
}
/// Accept a dependency
fn accept_dependency(&mut self) -> CompileResult<'src, Option<UnresolvedDependency<'src>>> {
if let Some(recipe) = self.accept_namepath()? {
Ok(Some(UnresolvedDependency {
arguments: Vec::new(),
recipe,
}))
} else if self.accepted(ParenL)? {
let recipe = self.parse_namepath()?;
let mut arguments = Vec::new();
while !self.accepted(ParenR)? {
arguments.push(self.parse_expression()?);
}
Ok(Some(UnresolvedDependency { arguments, recipe }))
} else {
Ok(None)
}
}
/// Accept and return `true` if next token is of kind `kind`
fn accepted(&mut self, kind: TokenKind) -> CompileResult<'src, bool> {
Ok(self.accept(kind)?.is_some())
}
/// Parse a justfile, consumes self
fn parse_ast(mut self) -> CompileResult<'src, Ast<'src>> {
fn pop_doc_comment<'src>(
items: &mut Vec<Item<'src>>,
eol_since_last_comment: bool,
) -> Option<&'src str> {
if !eol_since_last_comment {
if let Some(Item::Comment(contents)) = items.last() {
let doc = Some(contents[1..].trim_start());
items.pop();
return doc;
}
}
None
}
let mut items = Vec::new();
let mut eol_since_last_comment = false;
self.accept(ByteOrderMark)?;
loop {
let mut attributes = self.parse_attributes()?;
let mut take_attributes = || {
attributes
.take()
.map(|(_token, attributes)| attributes)
.unwrap_or_default()
};
let next = self.next()?;
if let Some(comment) = self.accept(Comment)? {
items.push(Item::Comment(comment.lexeme().trim_end()));
self.expect_eol()?;
eol_since_last_comment = false;
} else if self.accepted(Eol)? {
eol_since_last_comment = true;
} else if self.accepted(Eof)? {
break;
} else if self.next_is(Identifier) {
match Keyword::from_lexeme(next.lexeme()) {
Some(Keyword::Alias) if self.next_are(&[Identifier, Identifier, ColonEquals]) => {
items.push(Item::Alias(self.parse_alias(take_attributes())?));
}
Some(Keyword::Export) if self.next_are(&[Identifier, Identifier, ColonEquals]) => {
self.presume_keyword(Keyword::Export)?;
items.push(Item::Assignment(
self.parse_assignment(true, take_attributes())?,
));
}
Some(Keyword::Unexport)
if self.next_are(&[Identifier, Identifier, Eof])
|| self.next_are(&[Identifier, Identifier, Eol]) =>
{
self.presume_keyword(Keyword::Unexport)?;
let name = self.parse_name()?;
self.expect_eol()?;
items.push(Item::Unexport { name });
}
Some(Keyword::Import)
if self.next_are(&[Identifier, StringToken])
|| self.next_are(&[Identifier, Identifier, StringToken])
|| self.next_are(&[Identifier, QuestionMark]) =>
{
self.presume_keyword(Keyword::Import)?;
let optional = self.accepted(QuestionMark)?;
let relative = self.parse_string_literal()?;
items.push(Item::Import {
absolute: None,
optional,
relative,
});
}
Some(Keyword::Mod)
if self.next_are(&[Identifier, Identifier, Comment])
|| self.next_are(&[Identifier, Identifier, Eof])
|| self.next_are(&[Identifier, Identifier, Eol])
|| self.next_are(&[Identifier, Identifier, Identifier, StringToken])
|| self.next_are(&[Identifier, Identifier, StringToken])
|| self.next_are(&[Identifier, QuestionMark]) =>
{
let doc = pop_doc_comment(&mut items, eol_since_last_comment);
self.presume_keyword(Keyword::Mod)?;
let optional = self.accepted(QuestionMark)?;
let name = self.parse_name()?;
let relative = if self.next_is(StringToken) || self.next_are(&[Identifier, StringToken])
{
Some(self.parse_string_literal()?)
} else {
None
};
let attributes = take_attributes();
attributes.ensure_valid_attributes(
"Module",
*name,
&[
AttributeDiscriminant::Doc,
AttributeDiscriminant::Group,
AttributeDiscriminant::Private,
],
)?;
let doc = match attributes.get(AttributeDiscriminant::Doc) {
Some(Attribute::Doc(Some(doc))) => Some(doc.cooked.clone()),
Some(Attribute::Doc(None)) => None,
None => doc.map(ToOwned::to_owned),
_ => unreachable!(),
};
let private = attributes.contains(AttributeDiscriminant::Private);
let mut groups = Vec::new();
for attribute in attributes {
if let Attribute::Group(group) = attribute {
groups.push(group);
}
}
items.push(Item::Module {
absolute: None,
doc,
groups,
name,
optional,
private,
relative,
});
}
Some(Keyword::Set)
if self.next_are(&[Identifier, Identifier, ColonEquals])
|| self.next_are(&[Identifier, Identifier, Comment, Eof])
|| self.next_are(&[Identifier, Identifier, Comment, Eol])
|| self.next_are(&[Identifier, Identifier, Eof])
|| self.next_are(&[Identifier, Identifier, Eol]) =>
{
items.push(Item::Set(self.parse_set()?));
}
_ => {
if self.next_are(&[Identifier, ColonEquals]) {
items.push(Item::Assignment(
self.parse_assignment(false, take_attributes())?,
));
} else {
let doc = pop_doc_comment(&mut items, eol_since_last_comment);
items.push(Item::Recipe(self.parse_recipe(
take_attributes(),
doc,
false,
)?));
}
}
}
} else if self.accepted(At)? {
let doc = pop_doc_comment(&mut items, eol_since_last_comment);
items.push(Item::Recipe(self.parse_recipe(
take_attributes(),
doc,
true,
)?));
} else {
return Err(self.unexpected_token()?);
}
if let Some((token, attributes)) = attributes {
return Err(token.error(CompileErrorKind::ExtraneousAttributes {
count: attributes.len(),
}));
}
}
if self.next_token != self.tokens.len() {
return Err(self.internal_error(format!(
"Parse completed with {} unparsed tokens",
self.tokens.len() - self.next_token,
))?);
}
Ok(Ast {
items,
module_path: self
.module_namepath
.map(ToString::to_string)
.unwrap_or_default(),
unstable_features: self.unstable_features,
warnings: Vec::new(),
working_directory: self.working_directory.into(),
})
}
/// Parse an alias, e.g `alias name := target`
fn parse_alias(
&mut self,
attributes: AttributeSet<'src>,
) -> CompileResult<'src, Alias<'src, Namepath<'src>>> {
self.presume_keyword(Keyword::Alias)?;
let name = self.parse_name()?;
self.presume_any(&[Equals, ColonEquals])?;
let target = self.parse_namepath()?;
self.expect_eol()?;
attributes.ensure_valid_attributes("Alias", *name, &[AttributeDiscriminant::Private])?;
Ok(Alias {
attributes,
name,
target,
})
}
/// Parse an assignment, e.g. `foo := bar`
fn parse_assignment(
&mut self,
export: bool,
attributes: AttributeSet<'src>,
) -> CompileResult<'src, Assignment<'src>> {
let name = self.parse_name()?;
self.presume(ColonEquals)?;
let value = self.parse_expression()?;
self.expect_eol()?;
let private = attributes.contains(AttributeDiscriminant::Private);
attributes.ensure_valid_attributes("Assignment", *name, &[AttributeDiscriminant::Private])?;
Ok(Assignment {
export,
file_depth: self.file_depth,
name,
prelude: false,
private: private || name.lexeme().starts_with('_'),
value,
})
}
/// Parse an expression, e.g. `1 + 2`
fn parse_expression(&mut self) -> CompileResult<'src, Expression<'src>> {
if self.recursion_depth == if cfg!(windows) { 48 } else { 256 } {
let token = self.next()?;
return Err(CompileError::new(
token,
CompileErrorKind::ParsingRecursionDepthExceeded,
));
}
self.recursion_depth += 1;
let disjunct = self.parse_disjunct()?;
let expression = if self.accepted(BarBar)? {
self
.unstable_features
.insert(UnstableFeature::LogicalOperators);
let lhs = disjunct.into();
let rhs = self.parse_expression()?.into();
Expression::Or { lhs, rhs }
} else {
disjunct
};
self.recursion_depth -= 1;
Ok(expression)
}
fn parse_disjunct(&mut self) -> CompileResult<'src, Expression<'src>> {
let conjunct = self.parse_conjunct()?;
let disjunct = if self.accepted(AmpersandAmpersand)? {
self
.unstable_features
.insert(UnstableFeature::LogicalOperators);
let lhs = conjunct.into();
let rhs = self.parse_disjunct()?.into();
Expression::And { lhs, rhs }
} else {
conjunct
};
Ok(disjunct)
}
fn parse_conjunct(&mut self) -> CompileResult<'src, Expression<'src>> {
if self.accepted_keyword(Keyword::If)? {
self.parse_conditional()
} else if self.accepted(Slash)? {
let lhs = None;
let rhs = self.parse_conjunct()?.into();
Ok(Expression::Join { lhs, rhs })
} else {
let value = self.parse_value()?;
if self.accepted(Slash)? {
let lhs = Some(Box::new(value));
let rhs = self.parse_conjunct()?.into();
Ok(Expression::Join { lhs, rhs })
} else if self.accepted(Plus)? {
let lhs = value.into();
let rhs = self.parse_conjunct()?.into();
Ok(Expression::Concatenation { lhs, rhs })
} else {
Ok(value)
}
}
}
/// Parse a conditional, e.g. `if a == b { "foo" } else { "bar" }`
fn parse_conditional(&mut self) -> CompileResult<'src, Expression<'src>> {
let condition = self.parse_condition()?;
self.expect(BraceL)?;
let then = self.parse_expression()?;
self.expect(BraceR)?;
self.expect_keyword(Keyword::Else)?;
let otherwise = if self.accepted_keyword(Keyword::If)? {
self.parse_conditional()?
} else {
self.expect(BraceL)?;
let otherwise = self.parse_expression()?;
self.expect(BraceR)?;
otherwise
};
Ok(Expression::Conditional {
condition,
then: then.into(),
otherwise: otherwise.into(),
})
}
fn parse_condition(&mut self) -> CompileResult<'src, Condition<'src>> {
let lhs = self.parse_expression()?;
let operator = if self.accepted(BangEquals)? {
ConditionalOperator::Inequality
} else if self.accepted(EqualsTilde)? {
ConditionalOperator::RegexMatch
} else if self.accepted(BangTilde)? {
ConditionalOperator::RegexMismatch
} else {
self.expect(EqualsEquals)?;
ConditionalOperator::Equality
};
let rhs = self.parse_expression()?;
Ok(Condition {
lhs: lhs.into(),
rhs: rhs.into(),
operator,
})
}
fn parse_format_string(&mut self) -> CompileResult<'src, Expression<'src>> {
self.expect_keyword(Keyword::F)?;
let start = self.parse_string_literal_in_state(StringState::FormatStart)?;
let kind = StringKind::from_string_or_backtick(start.token)?;
let mut more = start.token.kind == FormatStringStart;
let mut expressions = Vec::new();
while more {
let expression = self.parse_expression()?;
more = self.next_is(FormatStringContinue);
expressions.push((
expression,
self.parse_string_literal_in_state(StringState::FormatContinue(kind))?,
));
}
Ok(Expression::FormatString { start, expressions })
}
// Check if the next tokens are a shell-expanded string, i.e., `x"foo"`.
//
// This function skips initial whitespace tokens, but thereafter is
// whitespace-sensitive, so `x"foo"` is a shell-expanded string, whereas `x
// "foo"` is not.
fn next_is_shell_expanded_string(&self) -> bool {
let mut tokens = self
.tokens
.iter()
.skip(self.next_token)
.skip_while(|token| token.kind == Whitespace);
tokens
.next()
.is_some_and(|token| token.kind == Identifier && token.lexeme() == Keyword::X.lexeme())
&& tokens.next().is_some_and(|token| token.kind == StringToken)
}
// Check if the next tokens are a format string, i.e., `f"foo"`.
//
// This function skips initial whitespace tokens, but thereafter is
// whitespace-sensitive, so `f"foo"` is a format string, whereas `f
// "foo"` is not.
fn next_is_format_string(&self) -> bool {
let mut tokens = self
.tokens
.iter()
.skip(self.next_token)
.skip_while(|token| token.kind == Whitespace);
tokens
.next()
.is_some_and(|token| token.kind == Identifier && token.lexeme() == Keyword::F.lexeme())
&& tokens
.next()
.is_some_and(|token| matches!(token.kind, StringToken | FormatStringStart))
}
/// Parse a value, e.g. `(bar)`
fn parse_value(&mut self) -> CompileResult<'src, Expression<'src>> {
if self.next_is(StringToken) || self.next_is_shell_expanded_string() {
Ok(Expression::StringLiteral {
string_literal: self.parse_string_literal()?,
})
} else if self.next_is_format_string() {
self.parse_format_string()
} else if self.next_is(Backtick) {
let next = self.next()?;
let kind = StringKind::from_string_or_backtick(next)?;
let contents =
&next.lexeme()[kind.delimiter_len()..next.lexeme().len() - kind.delimiter_len()];
let token = self.advance()?;
let contents = if kind.indented() {
unindent(contents)
} else {
contents.to_owned()
};
if contents.starts_with("#!") {
return Err(next.error(CompileErrorKind::BacktickShebang));
}
Ok(Expression::Backtick { contents, token })
} else if self.next_is(Identifier) {
if let Some(name) = self.accept_keyword(Keyword::Assert)? {
self.expect(ParenL)?;
let condition = self.parse_condition()?;
self.expect(Comma)?;
let error = Box::new(self.parse_expression()?);
self.expect(ParenR)?;
Ok(Expression::Assert {
condition,
error,
name,
})
} else {
let name = self.parse_name()?;
if self.next_is(ParenL) {
let arguments = self.parse_sequence()?;
if name.lexeme() == "which" {
self
.unstable_features
.insert(UnstableFeature::WhichFunction);
}
Ok(Expression::Call {
thunk: Thunk::resolve(name, arguments)?,
})
} else {
Ok(Expression::Variable { name })
}
}
} else if self.next_is(ParenL) {
self.presume(ParenL)?;
let contents = self.parse_expression()?.into();
self.expect(ParenR)?;
Ok(Expression::Group { contents })
} else {
Err(self.unexpected_token()?)
}
}
/// Parse a string literal, e.g. `"FOO"`
fn parse_string_literal(&mut self) -> CompileResult<'src, StringLiteral<'src>> {
self.parse_string_literal_in_state(StringState::Normal)
}
/// Parse a string literal, e.g. `"FOO"`
fn parse_string_literal_in_state(
&mut self,
state: StringState,
) -> CompileResult<'src, StringLiteral<'src>> {
let expand = if self.next_is(Identifier) {
self.expect_keyword(Keyword::X)?;
true
} else {
false
};
let token = match state {
StringState::Normal => self.expect(StringToken)?,
StringState::FormatStart => self.expect_any(&[StringToken, FormatStringStart])?,
StringState::FormatContinue(_) => {
self.expect_any(&[FormatStringContinue, FormatStringEnd])?
}
};
let kind = match state {
StringState::Normal | StringState::FormatStart => StringKind::from_string_or_backtick(token)?,
StringState::FormatContinue(kind) => kind,
};
let open = if matches!(token.kind, FormatStringContinue | FormatStringEnd) {
Lexer::INTERPOLATION_END.len()
} else {
kind.delimiter_len()
};
let close = if matches!(token.kind, FormatStringStart | FormatStringContinue) {
Lexer::INTERPOLATION_START.len()
} else {
kind.delimiter_len()
};
let raw = &token.lexeme()[open..token.lexeme().len() - close];
let unindented = if kind.indented() && matches!(token.kind, StringToken) {
unindent(raw)
} else {
raw.to_owned()
};
let undelimited = if matches!(state, StringState::Normal) {
unindented
} else {
unindented.replace(Lexer::INTERPOLATION_ESCAPE, Lexer::INTERPOLATION_START)
};
let cooked = if kind.processes_escape_sequences() {
Self::cook_string(token, &undelimited)?
} else {
undelimited
};
let cooked = if expand {
shellexpand::full(&cooked)
.map_err(|err| token.error(CompileErrorKind::ShellExpansion { err }))?
.into_owned()
} else {
cooked
};
Ok(StringLiteral {
token,
cooked,
expand,
kind,
part: match token.kind {
FormatStringStart => Some(FormatStringPart::Start),
FormatStringContinue => Some(FormatStringPart::Continue),
FormatStringEnd => Some(FormatStringPart::End),
StringToken => {
if matches!(state, StringState::Normal) {
None
} else {
Some(FormatStringPart::Single)
}
}
_ => {
return Err(token.error(CompileErrorKind::Internal {
message: "unexpected token kind while parsing string literal".into(),
}));
}
},
})
}
// Transform escape sequences in from string literal `token` with content `text`
fn cook_string(token: Token<'src>, text: &str) -> CompileResult<'src, String> {
#[derive(PartialEq, Eq)]
enum State {
Backslash,
Initial,
Unicode,
UnicodeValue { hex: String },
}
let mut cooked = String::new();
let mut state = State::Initial;
for c in text.chars() {
match state {
State::Initial => {
if c == '\\' {
state = State::Backslash;
} else {
cooked.push(c);
}
}
State::Backslash if c == 'u' => {
state = State::Unicode;
}
State::Backslash => {
match c {
'n' => cooked.push('\n'),
'r' => cooked.push('\r'),
't' => cooked.push('\t'),
'\\' => cooked.push('\\'),
'\n' => {}
'"' => cooked.push('"'),
character => {
return Err(token.error(CompileErrorKind::InvalidEscapeSequence { character }));
}
}
state = State::Initial;
}
State::Unicode => match c {
'{' => {
state = State::UnicodeValue { hex: String::new() };
}
character => {
return Err(token.error(CompileErrorKind::UnicodeEscapeDelimiter { character }));
}
},
State::UnicodeValue { ref mut hex } => match c {
'}' => {
if hex.is_empty() {
return Err(token.error(CompileErrorKind::UnicodeEscapeEmpty));
}
let codepoint = u32::from_str_radix(hex, 16).unwrap();
cooked.push(char::from_u32(codepoint).ok_or_else(|| {
token.error(CompileErrorKind::UnicodeEscapeRange { hex: hex.clone() })
})?);
state = State::Initial;
}
'0'..='9' | 'A'..='F' | 'a'..='f' => {
hex.push(c);
if hex.len() > 6 {
return Err(token.error(CompileErrorKind::UnicodeEscapeLength { hex: hex.clone() }));
}
}
_ => {
return Err(token.error(CompileErrorKind::UnicodeEscapeCharacter { character: c }));
}
},
}
}
if state != State::Initial {
return Err(token.error(CompileErrorKind::UnicodeEscapeUnterminated));
}
Ok(cooked)
}
/// Parse a name from an identifier token
fn parse_name(&mut self) -> CompileResult<'src, Name<'src>> {
self.expect(Identifier).map(Name::from_identifier)
}
/// Parse a path of `::` separated names
fn parse_namepath(&mut self) -> CompileResult<'src, Namepath<'src>> {
let first = self.parse_name()?;
let mut path = Namepath::from(first);
while self.accepted(ColonColon)? {
let name = self.parse_name()?;
path.push(name);
}
Ok(path)
}
/// Parse sequence of comma-separated expressions
fn parse_sequence(&mut self) -> CompileResult<'src, Vec<Expression<'src>>> {
self.presume(ParenL)?;
let mut elements = Vec::new();
while !self.next_is(ParenR) {
elements.push(self.parse_expression()?);
if !self.accepted(Comma)? {
break;
}
}
self.expect(ParenR)?;
Ok(elements)
}
/// Parse a recipe
fn parse_recipe(
&mut self,
attributes: AttributeSet<'src>,
doc: Option<&'src str>,
quiet: bool,
) -> CompileResult<'src, UnresolvedRecipe<'src>> {
let name = self.parse_name()?;
let mut positional = Vec::new();
let mut longs = HashSet::new();
let mut shorts = HashSet::new();
let mut arg_attributes = BTreeMap::new();
for attribute in &attributes {
let Attribute::Arg {
help,
long,
long_key,
name: arg,
pattern,
short,
value,
..
} = attribute
else {
continue;
};
if let Some(option) = long {
if !longs.insert(&option.cooked) {
return Err(
long_key
.unwrap_or(option.token)
.error(CompileErrorKind::DuplicateOption {
option: Switch::Long(option.cooked.clone()),
recipe: name.lexeme(),
}),
);
}
}
if let Some(option) = short {
if !shorts.insert(&option.cooked) {
return Err(option.token.error(CompileErrorKind::DuplicateOption {
option: Switch::Short(option.cooked.chars().next().unwrap()),
recipe: name.lexeme(),
}));
}
}
arg_attributes.insert(
arg.cooked.clone(),
ArgAttribute {
help: help.as_ref().map(|literal| literal.cooked.clone()),
name: arg.token,
pattern: pattern.clone(),
long: long.as_ref().map(|long| long.cooked.clone()),
short: short
.as_ref()
.map(|short| short.cooked.chars().next().unwrap()),
value: value.as_ref().map(|value| value.cooked.clone()),
},
);
}
while self.next_is(Identifier) || self.next_is(Dollar) {
positional.push(self.parse_parameter(&mut arg_attributes, ParameterKind::Singular)?);
}
let kind = if self.accepted(Plus)? {
ParameterKind::Plus
} else if self.accepted(Asterisk)? {
ParameterKind::Star
} else {
ParameterKind::Singular
};
let variadic = if kind.is_variadic() {
let variadic = self.parse_parameter(&mut arg_attributes, kind)?;
self.forbid(Identifier, |token| {
token.error(CompileErrorKind::ParameterFollowsVariadicParameter {
parameter: token.lexeme(),
})
})?;
Some(variadic)
} else {
None
};
self.expect(Colon)?;
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | true |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/signal_handler.rs | src/signal_handler.rs | use super::*;
pub(crate) struct SignalHandler {
caught: Option<Signal>,
children: BTreeMap<i32, Command>,
initialized: bool,
verbosity: Verbosity,
}
impl SignalHandler {
pub(crate) fn install(verbosity: Verbosity) -> RunResult<'static> {
let mut instance = Self::instance();
instance.verbosity = verbosity;
if !instance.initialized {
Platform::install_signal_handler(|signal| Self::instance().handle(signal))?;
instance.initialized = true;
}
Ok(())
}
pub(crate) fn instance() -> MutexGuard<'static, Self> {
static INSTANCE: Mutex<SignalHandler> = Mutex::new(SignalHandler::new());
match INSTANCE.lock() {
Ok(guard) => guard,
Err(poison_error) => {
eprintln!(
"{}",
Error::internal(format!("signal handler mutex poisoned: {poison_error}"),)
.color_display(Color::auto().stderr())
);
process::exit(EXIT_FAILURE);
}
}
}
const fn new() -> Self {
Self {
caught: None,
children: BTreeMap::new(),
initialized: false,
verbosity: Verbosity::default(),
}
}
fn handle(&mut self, signal: Signal) {
if signal.is_fatal() {
if self.children.is_empty() {
process::exit(signal.code());
}
if self.caught.is_none() {
self.caught = Some(signal);
}
}
match signal {
// SIGHUP, SIGINT, and SIGQUIT are normally sent on terminal close,
// ctrl-c, and ctrl-\, respectively, and are sent to all processes in the
// foreground process group. this includes child processes, so we ignore
// the signal and wait for them to exit
Signal::Hangup | Signal::Interrupt | Signal::Quit => {}
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Signal::Info => {
let id = process::id();
if self.children.is_empty() {
eprintln!("just {id}: no child processes");
} else {
let n = self.children.len();
let mut message = format!(
"just {id}: {n} child {}:\n",
if n == 1 { "process" } else { "processes" }
);
for (&child, command) in &self.children {
use std::fmt::Write;
writeln!(message, "{child}: {command:?}").unwrap();
}
eprint!("{message}");
}
}
// SIGTERM is the default signal sent by kill. forward it to child
// processes and wait for them to exit
Signal::Terminate =>
{
#[cfg(not(windows))]
for &child in self.children.keys() {
if self.verbosity.loquacious() {
eprintln!("just: sending SIGTERM to child process {child}");
}
nix::sys::signal::kill(
nix::unistd::Pid::from_raw(child),
Some(Signal::Terminate.into()),
)
.ok();
}
}
}
}
pub(crate) fn spawn<T>(
mut command: Command,
f: impl Fn(process::Child) -> io::Result<T>,
) -> (io::Result<T>, Option<Signal>) {
let mut instance = Self::instance();
let child = match command.spawn() {
Err(err) => return (Err(err), None),
Ok(child) => child,
};
let pid = match child.id().try_into() {
Err(err) => {
return (
Err(io::Error::other(format!("invalid child PID: {err}"))),
None,
);
}
Ok(pid) => pid,
};
instance.children.insert(pid, command);
drop(instance);
let result = f(child);
let mut instance = Self::instance();
instance.children.remove(&pid);
(result, instance.caught)
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/string_literal.rs | src/string_literal.rs | use super::*;
#[derive(PartialEq, Debug, Clone, Ord, Eq, PartialOrd)]
pub(crate) struct StringLiteral<'src> {
pub(crate) cooked: String,
pub(crate) expand: bool,
pub(crate) kind: StringKind,
pub(crate) part: Option<FormatStringPart>,
pub(crate) token: Token<'src>,
}
impl Display for StringLiteral<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.expand {
write!(f, "x")?;
}
if let Some(FormatStringPart::Start | FormatStringPart::Single) = self.part {
write!(f, "f")?;
}
write!(f, "{}", self.token.lexeme())
}
}
impl Serialize for StringLiteral<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.cooked)
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/alias_style.rs | src/alias_style.rs | use super::*;
#[derive(Debug, Default, PartialEq, Clone, ValueEnum)]
pub(crate) enum AliasStyle {
Left,
#[default]
Right,
Separate,
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/execution_context.rs | src/execution_context.rs | use super::*;
#[derive(Copy, Clone)]
pub(crate) struct ExecutionContext<'src: 'run, 'run> {
pub(crate) config: &'run Config,
pub(crate) dotenv: &'run BTreeMap<String, String>,
pub(crate) module: &'run Justfile<'src>,
pub(crate) search: &'run Search,
}
impl<'src: 'run, 'run> ExecutionContext<'src, 'run> {
pub(crate) fn tempdir<D>(&self, recipe: &Recipe<'src, D>) -> RunResult<'src, TempDir> {
let mut tempdir_builder = tempfile::Builder::new();
tempdir_builder.prefix("just-");
if let Some(tempdir) = &self.config.tempdir {
tempdir_builder.tempdir_in(self.search.working_directory.join(tempdir))
} else {
match &self.module.settings.tempdir {
Some(tempdir) => tempdir_builder.tempdir_in(self.search.working_directory.join(tempdir)),
None => {
if let Some(runtime_dir) = dirs::runtime_dir() {
let path = runtime_dir.join("just");
fs::create_dir_all(&path).map_err(|io_error| Error::RuntimeDirIo {
io_error,
path: path.clone(),
})?;
tempdir_builder.tempdir_in(path)
} else {
tempdir_builder.tempdir()
}
}
}
}
.map_err(|error| Error::TempdirIo {
recipe: recipe.name(),
io_error: error,
})
}
pub(crate) fn working_directory(&self) -> PathBuf {
let base = if self.module.is_submodule() {
&self.module.working_directory
} else {
&self.search.working_directory
};
if let Some(setting) = &self.module.settings.working_directory {
base.join(setting)
} else {
base.into()
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/fuzzing.rs | src/fuzzing.rs | use super::*;
pub fn compile(text: &str) {
let _ = testing::compile(text);
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/list.rs | src/list.rs | use super::*;
pub struct List<T: Display, I: Iterator<Item = T> + Clone> {
conjunction: &'static str,
values: I,
}
impl<T: Display, I: Iterator<Item = T> + Clone> List<T, I> {
pub fn or<II: IntoIterator<Item = T, IntoIter = I>>(values: II) -> Self {
Self {
conjunction: "or",
values: values.into_iter(),
}
}
pub fn and<II: IntoIterator<Item = T, IntoIter = I>>(values: II) -> Self {
Self {
conjunction: "and",
values: values.into_iter(),
}
}
pub fn or_ticked<II: IntoIterator<Item = T, IntoIter = I>>(
values: II,
) -> List<Enclosure<T>, impl Iterator<Item = Enclosure<T>> + Clone> {
List::or(values.into_iter().map(Enclosure::tick))
}
pub fn and_ticked<II: IntoIterator<Item = T, IntoIter = I>>(
values: II,
) -> List<Enclosure<T>, impl Iterator<Item = Enclosure<T>> + Clone> {
List::and(values.into_iter().map(Enclosure::tick))
}
}
impl<T: Display, I: Iterator<Item = T> + Clone> Display for List<T, I> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
let mut values = self.values.clone().fuse();
if let Some(first) = values.next() {
write!(f, "{first}")?;
} else {
return Ok(());
}
let second = values.next();
if second.is_none() {
return Ok(());
}
let third = values.next();
if let (Some(second), None) = (second.as_ref(), third.as_ref()) {
write!(f, " {} {second}", self.conjunction)?;
return Ok(());
}
let mut current = second;
let mut next = third;
loop {
match (current, next) {
(Some(c), Some(n)) => {
write!(f, ", {c}")?;
current = Some(n);
next = values.next();
}
(Some(c), None) => {
write!(f, ", {} {c}", self.conjunction)?;
return Ok(());
}
_ => unreachable!("Iterator was fused, but returned Some after None"),
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn or() {
assert_eq!("1", List::or(&[1]).to_string());
assert_eq!("1 or 2", List::or(&[1, 2]).to_string());
assert_eq!("1, 2, or 3", List::or(&[1, 2, 3]).to_string());
assert_eq!("1, 2, 3, or 4", List::or(&[1, 2, 3, 4]).to_string());
}
#[test]
fn and() {
assert_eq!("1", List::and(&[1]).to_string());
assert_eq!("1 and 2", List::and(&[1, 2]).to_string());
assert_eq!("1, 2, and 3", List::and(&[1, 2, 3]).to_string());
assert_eq!("1, 2, 3, and 4", List::and(&[1, 2, 3, 4]).to_string());
}
#[test]
fn or_ticked() {
assert_eq!("`1`", List::or_ticked(&[1]).to_string());
assert_eq!("`1` or `2`", List::or_ticked(&[1, 2]).to_string());
assert_eq!("`1`, `2`, or `3`", List::or_ticked(&[1, 2, 3]).to_string());
assert_eq!(
"`1`, `2`, `3`, or `4`",
List::or_ticked(&[1, 2, 3, 4]).to_string()
);
}
#[test]
fn and_ticked() {
assert_eq!("`1`", List::and_ticked(&[1]).to_string());
assert_eq!("`1` and `2`", List::and_ticked(&[1, 2]).to_string());
assert_eq!(
"`1`, `2`, and `3`",
List::and_ticked(&[1, 2, 3]).to_string()
);
assert_eq!(
"`1`, `2`, `3`, and `4`",
List::and_ticked(&[1, 2, 3, 4]).to_string()
);
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/warning.rs | src/warning.rs | use super::*;
#[derive(Clone, Debug, PartialEq)]
pub(crate) enum Warning {}
impl Warning {
#[allow(clippy::unused_self)]
fn context(&self) -> Option<&Token> {
None
}
}
impl ColorDisplay for Warning {
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result {
let warning = color.warning();
let message = color.message();
write!(f, "{} {}", warning.paint("warning:"), message.prefix())?;
write!(f, "{}", message.suffix())?;
if let Some(token) = self.context() {
writeln!(f)?;
write!(f, "{}", token.color_display(color))?;
}
Ok(())
}
}
impl Serialize for Warning {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(None)?;
map.serialize_entry("message", &self.color_display(Color::never()).to_string())?;
map.end()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/fragment.rs | src/fragment.rs | use super::*;
/// A line fragment consisting either of…
#[derive(PartialEq, Debug, Clone)]
pub(crate) enum Fragment<'src> {
/// …an interpolation containing `expression`.
Interpolation { expression: Expression<'src> },
/// …raw text…
Text { token: Token<'src> },
}
impl Serialize for Fragment<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::Text { token } => serializer.serialize_str(token.lexeme()),
Self::Interpolation { expression } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element(expression)?;
seq.end()
}
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/token_kind.rs | src/token_kind.rs | use super::*;
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
pub(crate) enum TokenKind {
AmpersandAmpersand,
Asterisk,
At,
Backtick,
BangEquals,
BangTilde,
BarBar,
BraceL,
BraceR,
BracketL,
BracketR,
ByteOrderMark,
Colon,
ColonColon,
ColonEquals,
Comma,
Comment,
Dedent,
Dollar,
Eof,
Eol,
Equals,
EqualsEquals,
EqualsTilde,
FormatStringContinue,
FormatStringEnd,
FormatStringStart,
Identifier,
Indent,
InterpolationEnd,
InterpolationStart,
ParenL,
ParenR,
Plus,
QuestionMark,
Slash,
StringToken,
Text,
Unspecified,
Whitespace,
}
impl Display for TokenKind {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
use TokenKind::*;
write!(
f,
"{}",
match *self {
AmpersandAmpersand => "'&&'",
Asterisk => "'*'",
At => "'@'",
Backtick => "backtick",
BangEquals => "'!='",
BangTilde => "'!~'",
BarBar => "'||'",
BraceL => "'{'",
BraceR => "'}'",
BracketL => "'['",
BracketR => "']'",
ByteOrderMark => "byte order mark",
Colon => "':'",
ColonColon => "'::'",
ColonEquals => "':='",
Comma => "','",
Comment => "comment",
Dedent => "dedent",
Dollar => "'$'",
Eof => "end of file",
Eol => "end of line",
Equals => "'='",
EqualsEquals => "'=='",
EqualsTilde => "'=~'",
FormatStringContinue | FormatStringEnd | FormatStringStart => "format string",
Identifier => "identifier",
Indent => "indent",
InterpolationEnd => "'}}'",
InterpolationStart => "'{{'",
ParenL => "'('",
ParenR => "')'",
Plus => "'+'",
QuestionMark => "?",
Slash => "'/'",
StringToken => "string",
Text => "command text",
Unspecified => "unspecified",
Whitespace => "whitespace",
}
)
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/subcommand.rs | src/subcommand.rs | use {super::*, clap_mangen::Man};
pub const INIT_JUSTFILE: &str = "\
# https://just.systems
default:
echo 'Hello, world!'
";
static BACKTICK_RE: LazyLock<Regex> = LazyLock::new(|| Regex::new("(`.*?`)|(`[^`]*$)").unwrap());
#[derive(PartialEq, Clone, Debug)]
pub(crate) enum Subcommand {
Changelog,
Choose {
chooser: Option<String>,
},
Command {
arguments: Vec<OsString>,
binary: OsString,
},
Completions {
shell: completions::Shell,
},
Dump,
Edit,
Evaluate {
variable: Option<String>,
},
Format,
Groups,
Init,
List {
path: ModulePath,
},
Man,
Request {
request: Request,
},
Run {
arguments: Vec<String>,
},
Show {
path: ModulePath,
},
Summary,
Usage {
path: ModulePath,
},
Variables,
}
impl Default for Subcommand {
fn default() -> Self {
Self::Run {
arguments: Vec::new(),
}
}
}
impl Subcommand {
pub(crate) fn execute<'src>(&self, config: &Config, loader: &'src Loader) -> RunResult<'src> {
use Subcommand::*;
match self {
Changelog => {
Self::changelog();
return Ok(());
}
Completions { shell } => {
Self::completions(*shell);
return Ok(());
}
Init => return Self::init(config),
Man => return Self::man(),
Request { request } => return Self::request(request),
_ => {}
}
let search = Search::find(
config.ceiling.as_deref(),
&config.invocation_directory,
&config.search_config,
)?;
if let Edit = self {
return Self::edit(&search);
}
let compilation = Self::compile(config, loader, &search)?;
let justfile = &compilation.justfile;
match self {
Choose { chooser } => {
Self::choose(config, justfile, &search, chooser.as_deref())?;
}
Command { .. } | Evaluate { .. } => {
justfile.run(config, &search, &[])?;
}
Dump => Self::dump(config, compilation)?,
Format => Self::format(config, &search, compilation)?,
Groups => Self::groups(config, justfile),
List { path } => Self::list(config, justfile, path)?,
Run { arguments } => Self::run(config, loader, search, compilation, arguments)?,
Show { path } => Self::show(config, justfile, path)?,
Summary => Self::summary(config, justfile),
Usage { path } => Self::usage(config, justfile, path)?,
Variables => Self::variables(justfile),
Changelog | Completions { .. } | Edit | Init | Man | Request { .. } => unreachable!(),
}
Ok(())
}
fn groups(config: &Config, justfile: &Justfile) {
println!("Recipe groups:");
for group in justfile.public_groups(config) {
println!("{}{group}", config.list_prefix);
}
}
fn run<'src>(
config: &Config,
loader: &'src Loader,
mut search: Search,
mut compilation: Compilation<'src>,
arguments: &[String],
) -> RunResult<'src> {
let starting_parent = search.justfile.parent().as_ref().unwrap().lexiclean();
loop {
let justfile = &compilation.justfile;
let fallback = justfile.settings.fallback
&& matches!(
config.search_config,
SearchConfig::FromInvocationDirectory | SearchConfig::FromSearchDirectory { .. }
);
let result = justfile.run(config, &search, arguments);
if fallback {
if let Err(err @ (Error::UnknownRecipe { .. } | Error::UnknownSubmodule { .. })) = result {
search = search
.search_parent_directory(config.ceiling.as_deref())
.map_err(|_| err)?;
if config.verbosity.loquacious() {
eprintln!(
"Trying {}",
starting_parent
.strip_prefix(search.justfile.parent().unwrap())
.unwrap()
.components()
.map(|_| path::Component::ParentDir)
.collect::<PathBuf>()
.join(search.justfile.file_name().unwrap())
.display()
);
}
compilation = Self::compile(config, loader, &search)?;
continue;
}
}
if config.allow_missing
&& matches!(
result,
Err(Error::UnknownRecipe { .. } | Error::UnknownSubmodule { .. })
)
{
return Ok(());
}
return result;
}
}
fn compile<'src>(
config: &Config,
loader: &'src Loader,
search: &Search,
) -> RunResult<'src, Compilation<'src>> {
let compilation = Compiler::compile(config, loader, &search.justfile)?;
compilation.justfile.check_unstable(config)?;
if config.verbosity.loud() {
for warning in &compilation.justfile.warnings {
eprintln!("{}", warning.color_display(config.color.stderr()));
}
}
Ok(compilation)
}
fn changelog() {
write!(io::stdout(), "{}", include_str!("../CHANGELOG.md")).ok();
}
fn choose<'src>(
config: &Config,
justfile: &Justfile<'src>,
search: &Search,
chooser: Option<&str>,
) -> RunResult<'src> {
let mut recipes = Vec::<&Recipe>::new();
let mut stack = vec![justfile];
while let Some(module) = stack.pop() {
recipes.extend(
module
.public_recipes(config)
.iter()
.filter(|recipe| recipe.min_arguments() == 0),
);
stack.extend(module.modules.values());
}
if recipes.is_empty() {
return Err(Error::NoChoosableRecipes);
}
let chooser = if let Some(chooser) = chooser {
OsString::from(chooser)
} else {
let mut chooser = OsString::new();
chooser.push("fzf --multi --preview 'just --unstable --color always --justfile \"");
chooser.push(&search.justfile);
chooser.push("\" --show {}'");
chooser
};
let result = justfile
.settings
.shell_command(config)
.arg(&chooser)
.current_dir(&search.working_directory)
.stdin(Stdio::piped())
.stdout(Stdio::piped())
.spawn();
let mut child = match result {
Ok(child) => child,
Err(io_error) => {
let (shell_binary, shell_arguments) = justfile.settings.shell(config);
return Err(Error::ChooserInvoke {
shell_binary: shell_binary.to_owned(),
shell_arguments: shell_arguments.join(" "),
chooser,
io_error,
});
}
};
let stdin = child.stdin.as_mut().unwrap();
for recipe in recipes {
if let Err(io_error) = writeln!(stdin, "{}", recipe.spaced_namepath()) {
if io_error.kind() != std::io::ErrorKind::BrokenPipe {
return Err(Error::ChooserWrite { io_error, chooser });
}
}
}
let output = match child.wait_with_output() {
Ok(output) => output,
Err(io_error) => {
return Err(Error::ChooserRead { io_error, chooser });
}
};
if !output.status.success() {
return Err(Error::ChooserStatus {
status: output.status,
chooser,
});
}
let stdout = String::from_utf8_lossy(&output.stdout);
let recipes = stdout
.split_whitespace()
.map(str::to_owned)
.collect::<Vec<String>>();
justfile.run(config, search, &recipes)
}
fn completions(shell: completions::Shell) {
print!("{}", shell.script());
}
fn dump(config: &Config, compilation: Compilation) -> RunResult<'static> {
match config.dump_format {
DumpFormat::Json => {
serde_json::to_writer(io::stdout(), &compilation.justfile)
.map_err(|source| Error::DumpJson { source })?;
println!();
}
DumpFormat::Just => print!("{}", compilation.root_ast()),
}
Ok(())
}
fn edit(search: &Search) -> RunResult<'static> {
let editor = env::var_os("VISUAL")
.or_else(|| env::var_os("EDITOR"))
.unwrap_or_else(|| "vim".into());
let error = Command::new(&editor)
.current_dir(&search.working_directory)
.arg(&search.justfile)
.status();
let status = match error {
Err(io_error) => return Err(Error::EditorInvoke { editor, io_error }),
Ok(status) => status,
};
if !status.success() {
return Err(Error::EditorStatus { editor, status });
}
Ok(())
}
fn format(config: &Config, search: &Search, compilation: Compilation) -> RunResult<'static> {
let justfile = &compilation.justfile;
let src = compilation.root_src();
let ast = compilation.root_ast();
config.require_unstable(justfile, UnstableFeature::FormatSubcommand)?;
let formatted = ast.to_string();
if formatted == src {
return Ok(());
}
if config.check {
if !config.verbosity.quiet() {
use similar::{ChangeTag, TextDiff};
let diff = TextDiff::configure()
.algorithm(similar::Algorithm::Patience)
.diff_lines(src, &formatted);
for op in diff.ops() {
for change in diff.iter_changes(op) {
let (symbol, color) = match change.tag() {
ChangeTag::Delete => ("-", config.color.stdout().diff_deleted()),
ChangeTag::Equal => (" ", config.color.stdout()),
ChangeTag::Insert => ("+", config.color.stdout().diff_added()),
};
print!("{}{symbol}{change}{}", color.prefix(), color.suffix());
}
}
}
Err(Error::FormatCheckFoundDiff)
} else {
fs::write(&search.justfile, formatted).map_err(|io_error| Error::WriteJustfile {
justfile: search.justfile.clone(),
io_error,
})?;
if config.verbosity.loud() {
eprintln!("Wrote justfile to `{}`", search.justfile.display());
}
Ok(())
}
}
fn init(config: &Config) -> RunResult<'static> {
let search = Search::init(
&config.search_config,
&config.invocation_directory,
config.ceiling.as_deref(),
)?;
if search.justfile.is_file() {
return Err(Error::InitExists {
justfile: search.justfile,
});
}
if let Err(io_error) = fs::write(&search.justfile, INIT_JUSTFILE) {
return Err(Error::WriteJustfile {
justfile: search.justfile,
io_error,
});
}
if config.verbosity.loud() {
eprintln!("Wrote justfile to `{}`", search.justfile.display());
}
Ok(())
}
fn man() -> RunResult<'static> {
let mut buffer = Vec::<u8>::new();
Man::new(Config::app())
.render(&mut buffer)
.expect("writing to buffer cannot fail");
let mut stdout = io::stdout().lock();
stdout
.write_all(&buffer)
.map_err(|io_error| Error::StdoutIo { io_error })?;
stdout
.flush()
.map_err(|io_error| Error::StdoutIo { io_error })?;
Ok(())
}
fn request(request: &Request) -> RunResult<'static> {
let response = match request {
Request::EnvironmentVariable(key) => Response::EnvironmentVariable(env::var_os(key)),
#[cfg(not(windows))]
Request::Signal => {
let sigset = nix::sys::signal::SigSet::all();
sigset.thread_block().unwrap();
let received = sigset.wait().unwrap();
Response::Signal(received.as_str().into())
}
};
serde_json::to_writer(io::stdout(), &response).map_err(|source| Error::DumpJson { source })?;
Ok(())
}
fn list(config: &Config, mut module: &Justfile, path: &ModulePath) -> RunResult<'static> {
for name in &path.path {
module = module
.modules
.get(name)
.ok_or_else(|| Error::UnknownSubmodule {
path: path.to_string(),
})?;
}
Self::list_module(config, module, 0);
Ok(())
}
fn list_module(config: &Config, module: &Justfile, depth: usize) {
fn print_doc_and_aliases(
config: &Config,
name: &str,
doc: Option<&str>,
aliases: &[&str],
max_signature_width: usize,
signature_widths: &BTreeMap<&str, usize>,
) {
let color = config.color.stdout();
let inline_aliases = config.alias_style != AliasStyle::Separate && !aliases.is_empty();
if inline_aliases || doc.is_some() {
print!(
"{:padding$}{}",
"",
color.doc().paint("#"),
padding = max_signature_width.saturating_sub(signature_widths[name]) + 1,
);
}
let print_aliases = || {
print!(
" {}",
color.alias().paint(&format!(
"[alias{}: {}]",
if aliases.len() == 1 { "" } else { "es" },
aliases.join(", ")
))
);
};
if inline_aliases && config.alias_style == AliasStyle::Left {
print_aliases();
}
if let Some(doc) = doc {
print!(" ");
let mut end = 0;
for backtick in BACKTICK_RE.find_iter(doc) {
let prefix = &doc[end..backtick.start()];
if !prefix.is_empty() {
print!("{}", color.doc().paint(prefix));
}
print!("{}", color.doc_backtick().paint(backtick.as_str()));
end = backtick.end();
}
let suffix = &doc[end..];
if !suffix.is_empty() {
print!("{}", color.doc().paint(suffix));
}
}
if inline_aliases && config.alias_style == AliasStyle::Right {
print_aliases();
}
println!();
}
let aliases = if config.no_aliases {
BTreeMap::new()
} else {
let mut aliases = BTreeMap::<&str, Vec<&str>>::new();
for alias in module.aliases.values().filter(|alias| alias.is_public()) {
aliases
.entry(alias.target.name.lexeme())
.or_default()
.push(alias.name.lexeme());
}
aliases
};
let signature_widths = {
let mut signature_widths: BTreeMap<&str, usize> = BTreeMap::new();
for (name, recipe) in &module.recipes {
if !recipe.is_public() {
continue;
}
for name in iter::once(name).chain(aliases.get(name).unwrap_or(&Vec::new())) {
signature_widths.insert(
name,
UnicodeWidthStr::width(
RecipeSignature { name, recipe }
.color_display(Color::never())
.to_string()
.as_str(),
),
);
}
}
if !config.list_submodules {
for submodule in module.public_modules(config) {
let name = submodule.name();
signature_widths.insert(name, UnicodeWidthStr::width(format!("{name} ...").as_str()));
}
}
signature_widths
};
let max_signature_width = signature_widths
.values()
.copied()
.filter(|width| *width <= 50)
.max()
.unwrap_or(0);
let list_prefix = config.list_prefix.repeat(depth + 1);
if depth == 0 {
print!("{}", config.list_heading);
}
let recipe_groups = {
let mut groups = BTreeMap::<Option<String>, Vec<&Recipe>>::new();
for recipe in module.public_recipes(config) {
let recipe_groups = recipe.groups();
if recipe_groups.is_empty() {
groups.entry(None).or_default().push(recipe);
} else {
for group in recipe_groups {
groups.entry(Some(group)).or_default().push(recipe);
}
}
}
groups
};
let submodule_groups = {
let mut groups = BTreeMap::<Option<String>, Vec<&Justfile>>::new();
for submodule in module.public_modules(config) {
let submodule_groups = submodule.groups();
if submodule_groups.is_empty() {
groups.entry(None).or_default().push(submodule);
} else {
for group in submodule_groups {
groups
.entry(Some(group.to_string()))
.or_default()
.push(submodule);
}
}
}
groups
};
let mut ordered_groups = module
.public_groups(config)
.into_iter()
.map(Some)
.collect::<Vec<Option<String>>>();
if recipe_groups.contains_key(&None) || submodule_groups.contains_key(&None) {
ordered_groups.insert(0, None);
}
let no_groups = ordered_groups.len() == 1 && ordered_groups.first() == Some(&None);
let mut groups_count = 0;
if !no_groups {
groups_count = ordered_groups.len();
}
for (i, group) in ordered_groups.into_iter().enumerate() {
if i > 0 {
println!();
}
if !no_groups {
if let Some(group) = &group {
println!(
"{list_prefix}{}",
config.color.stdout().group().paint(&format!("[{group}]"))
);
}
}
if let Some(recipes) = recipe_groups.get(&group) {
for recipe in recipes {
let recipe_alias_entries = if config.alias_style == AliasStyle::Separate {
aliases.get(recipe.name())
} else {
None
};
for (i, name) in iter::once(&recipe.name())
.chain(recipe_alias_entries.unwrap_or(&Vec::new()))
.enumerate()
{
let doc = if i == 0 {
recipe.doc().map(Cow::Borrowed)
} else {
Some(Cow::Owned(format!("alias for `{}`", recipe.name)))
};
if let Some(doc) = &doc {
if doc.lines().count() > 1 {
for line in doc.lines() {
println!(
"{list_prefix}{} {}",
config.color.stdout().doc().paint("#"),
config.color.stdout().doc().paint(line),
);
}
}
}
print!(
"{list_prefix}{}",
RecipeSignature { name, recipe }.color_display(config.color.stdout())
);
print_doc_and_aliases(
config,
name,
doc.filter(|doc| doc.lines().count() <= 1).as_deref(),
aliases
.get(recipe.name())
.map(Vec::as_slice)
.unwrap_or_default(),
max_signature_width,
&signature_widths,
);
}
}
}
if let Some(submodules) = submodule_groups.get(&group) {
for (i, submodule) in submodules.iter().enumerate() {
if config.list_submodules {
if no_groups && (i + groups_count > 0) {
println!();
}
println!("{list_prefix}{}:", submodule.name());
Self::list_module(config, submodule, depth + 1);
} else {
print!("{list_prefix}{} ...", submodule.name());
print_doc_and_aliases(
config,
submodule.name(),
submodule.doc.as_deref(),
&[],
max_signature_width,
&signature_widths,
);
}
}
}
}
}
fn show<'src>(config: &Config, module: &Justfile<'src>, path: &ModulePath) -> RunResult<'src> {
let (alias, recipe) = Self::resolve_path(module, path)?;
if let Some(alias) = alias {
println!("{alias}");
}
println!("{}", recipe.color_display(config.color.stdout()));
Ok(())
}
fn summary(config: &Config, justfile: &Justfile) {
let mut printed = 0;
Self::summary_recursive(config, &mut Vec::new(), &mut printed, justfile);
println!();
if printed == 0 && config.verbosity.loud() {
eprintln!("Justfile contains no recipes.");
}
}
fn summary_recursive<'a>(
config: &Config,
components: &mut Vec<&'a str>,
printed: &mut usize,
justfile: &'a Justfile,
) {
let path = components.join("::");
for recipe in justfile.public_recipes(config) {
if *printed > 0 {
print!(" ");
}
if path.is_empty() {
print!("{}", recipe.name());
} else {
print!("{}::{}", path, recipe.name());
}
*printed += 1;
}
for module in justfile.public_modules(config) {
let name = module.name();
components.push(name);
Self::summary_recursive(config, components, printed, module);
components.pop();
}
}
fn usage<'src>(config: &Config, module: &Justfile<'src>, path: &ModulePath) -> RunResult<'src> {
let (alias, recipe) = Self::resolve_path(module, path)?;
if let Some(alias) = alias {
println!("{alias}");
}
println!(
"{}",
Usage {
long: true,
path,
recipe,
}
.color_display(config.color.stdout()),
);
Ok(())
}
fn resolve_path<'src, 'run>(
mut module: &'run Justfile<'src>,
path: &ModulePath,
) -> RunResult<'src, (Option<&'run Alias<'src>>, &'run Recipe<'src>)> {
for name in &path.path[0..path.path.len() - 1] {
module = module
.modules
.get(name)
.ok_or_else(|| Error::UnknownSubmodule {
path: path.to_string(),
})?;
}
let name = path.path.last().unwrap();
if let Some(alias) = module.get_alias(name) {
Ok((Some(alias), &alias.target))
} else if let Some(recipe) = module.get_recipe(name) {
Ok((None, recipe))
} else {
Err(Error::UnknownRecipe {
recipe: name.to_owned(),
suggestion: module.suggest_recipe(name),
})
}
}
fn variables(justfile: &Justfile) {
for (i, (_, assignment)) in justfile
.assignments
.iter()
.filter(|(_, binding)| !binding.private)
.enumerate()
{
if i > 0 {
print!(" ");
}
print!("{}", assignment.name);
}
println!();
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn init_justfile() {
testing::compile(INIT_JUSTFILE);
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/signal.rs | src/signal.rs | use super::*;
#[derive(Clone, Copy, Debug, PartialEq)]
#[repr(i32)]
pub(crate) enum Signal {
Hangup = 1,
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Info = 29,
Interrupt = 2,
Quit = 3,
Terminate = 15,
}
impl Signal {
#[cfg(not(windows))]
pub(crate) const ALL: &'static [Signal] = &[
Signal::Hangup,
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Signal::Info,
Signal::Interrupt,
Signal::Quit,
Signal::Terminate,
];
pub(crate) fn code(self) -> i32 {
128i32.checked_add(self.number()).unwrap()
}
pub(crate) fn is_fatal(self) -> bool {
match self {
Self::Hangup | Self::Interrupt | Self::Quit | Self::Terminate => true,
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Self::Info => false,
}
}
pub(crate) fn number(self) -> i32 {
self as libc::c_int
}
}
impl Display for Signal {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
write!(
f,
"{}",
match self {
Signal::Hangup => "SIGHUP",
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Signal::Info => "SIGINFO",
Signal::Interrupt => "SIGINT",
Signal::Quit => "SIGQUIT",
Signal::Terminate => "SIGTERM",
}
)
}
}
#[cfg(not(windows))]
impl From<Signal> for nix::sys::signal::Signal {
fn from(signal: Signal) -> Self {
match signal {
Signal::Hangup => Self::SIGHUP,
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Signal::Info => Self::SIGINFO,
Signal::Interrupt => Self::SIGINT,
Signal::Quit => Self::SIGQUIT,
Signal::Terminate => Self::SIGTERM,
}
}
}
impl TryFrom<u8> for Signal {
type Error = io::Error;
fn try_from(n: u8) -> Result<Signal, Self::Error> {
match n {
1 => Ok(Signal::Hangup),
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
29 => Ok(Signal::Info),
2 => Ok(Signal::Interrupt),
3 => Ok(Signal::Quit),
15 => Ok(Signal::Terminate),
_ => Err(io::Error::other(format!("unexpected signal: {n}"))),
}
}
}
#[cfg(test)]
#[cfg(not(windows))]
mod tests {
use super::*;
#[test]
fn signals_fit_in_u8() {
for signal in Signal::ALL {
assert!(signal.number() <= i32::from(u8::MAX));
}
}
#[test]
fn signals_have_valid_exit_codes() {
for signal in Signal::ALL {
signal.code();
}
}
#[test]
fn signal_numbers_are_correct() {
for &signal in Signal::ALL {
let n = match signal {
Signal::Hangup => libc::SIGHUP,
#[cfg(any(
target_os = "dragonfly",
target_os = "freebsd",
target_os = "ios",
target_os = "macos",
target_os = "netbsd",
target_os = "openbsd",
))]
Signal::Info => libc::SIGINFO,
Signal::Interrupt => libc::SIGINT,
Signal::Quit => libc::SIGQUIT,
Signal::Terminate => libc::SIGTERM,
};
assert_eq!(signal as i32, n);
assert_eq!(Signal::try_from(u8::try_from(n).unwrap()).unwrap(), signal);
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/ran.rs | src/ran.rs | use super::*;
#[derive(Default)]
pub(crate) struct Ran(Mutex<BTreeMap<String, BTreeMap<Vec<Vec<String>>, Arc<Mutex<bool>>>>>);
impl Ran {
pub(crate) fn mutex(&self, recipe: &Recipe, arguments: &[Vec<String>]) -> Arc<Mutex<bool>> {
self
.0
.lock()
.unwrap()
.entry(recipe.namepath().into())
.or_default()
.entry(arguments.into())
.or_default()
.clone()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/search_error.rs | src/search_error.rs | use super::*;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)))]
pub(crate) enum SearchError {
#[snafu(display("Cannot initialize global justfile"))]
GlobalJustfileInit,
#[snafu(display("Global justfile not found"))]
GlobalJustfileNotFound,
#[snafu(display(
"I/O error reading directory `{}`: {}",
directory.display(),
io_error
))]
Io {
directory: PathBuf,
io_error: io::Error,
},
#[snafu(display("Justfile path had no parent: {}", path.display()))]
JustfileHadNoParent { path: PathBuf },
#[snafu(display(
"Multiple candidate justfiles found in `{}`: {}",
candidates.iter().next().unwrap().parent().unwrap().display(),
List::and_ticked(
candidates
.iter()
.map(|candidate| candidate.file_name().unwrap().to_string_lossy())
),
))]
MultipleCandidates { candidates: BTreeSet<PathBuf> },
#[snafu(display("No justfile found"))]
NotFound,
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn multiple_candidates_formatting() {
let error = SearchError::MultipleCandidates {
candidates: [Path::new("/foo/justfile"), Path::new("/foo/JUSTFILE")]
.iter()
.map(|path| path.to_path_buf())
.collect(),
};
assert_eq!(
error.to_string(),
"Multiple candidate justfiles found in `/foo`: `JUSTFILE` and `justfile`"
);
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/conditional_operator.rs | src/conditional_operator.rs | use super::*;
/// A conditional expression operator.
#[derive(PartialEq, Debug, Copy, Clone)]
pub(crate) enum ConditionalOperator {
/// `==`
Equality,
/// `!=`
Inequality,
/// `=~`
RegexMatch,
/// `!~`
RegexMismatch,
}
impl Display for ConditionalOperator {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::Equality => write!(f, "=="),
Self::Inequality => write!(f, "!="),
Self::RegexMatch => write!(f, "=~"),
Self::RegexMismatch => write!(f, "!~"),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/platform.rs | src/platform.rs | use super::*;
pub(crate) struct Platform;
#[cfg(unix)]
mod unix;
#[cfg(windows)]
mod windows;
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/command_ext.rs | src/command_ext.rs | use super::*;
pub(crate) trait CommandExt {
fn export(
&mut self,
settings: &Settings,
dotenv: &BTreeMap<String, String>,
scope: &Scope,
unexports: &HashSet<String>,
) -> &mut Command;
fn export_scope(&mut self, settings: &Settings, scope: &Scope, unexports: &HashSet<String>);
fn output_guard(self) -> (io::Result<process::Output>, Option<Signal>);
fn output_guard_stdout(self) -> Result<String, OutputError>;
fn status_guard(self) -> (io::Result<ExitStatus>, Option<Signal>);
}
impl CommandExt for Command {
fn export(
&mut self,
settings: &Settings,
dotenv: &BTreeMap<String, String>,
scope: &Scope,
unexports: &HashSet<String>,
) -> &mut Command {
for (name, value) in dotenv {
self.env(name, value);
}
if let Some(parent) = scope.parent() {
self.export_scope(settings, parent, unexports);
}
self
}
fn export_scope(&mut self, settings: &Settings, scope: &Scope, unexports: &HashSet<String>) {
if let Some(parent) = scope.parent() {
self.export_scope(settings, parent, unexports);
}
for unexport in unexports {
self.env_remove(unexport);
}
for binding in scope.bindings() {
if binding.export || (settings.export && !binding.prelude) {
self.env(binding.name.lexeme(), &binding.value);
}
}
}
fn output_guard(self) -> (io::Result<process::Output>, Option<Signal>) {
SignalHandler::spawn(self, process::Child::wait_with_output)
}
fn output_guard_stdout(self) -> Result<String, OutputError> {
let (result, caught) = self.output_guard();
let output = result.map_err(OutputError::Io)?;
OutputError::result_from_exit_status(output.status)?;
let output = str::from_utf8(&output.stdout).map_err(OutputError::Utf8)?;
if let Some(signal) = caught {
return Err(OutputError::Interrupted(signal));
}
Ok(
output
.strip_suffix("\r\n")
.or_else(|| output.strip_suffix("\n"))
.unwrap_or(output)
.into(),
)
}
fn status_guard(self) -> (io::Result<ExitStatus>, Option<Signal>) {
SignalHandler::spawn(self, |mut child| child.wait())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/attribute_set.rs | src/attribute_set.rs | use {super::*, std::collections};
#[derive(Default, Debug, Clone, PartialEq, Serialize)]
pub(crate) struct AttributeSet<'src>(BTreeSet<Attribute<'src>>);
impl<'src> AttributeSet<'src> {
pub(crate) fn len(&self) -> usize {
self.0.len()
}
pub(crate) fn contains(&self, target: AttributeDiscriminant) -> bool {
self.0.iter().any(|attr| attr.discriminant() == target)
}
pub(crate) fn get(&self, discriminant: AttributeDiscriminant) -> Option<&Attribute<'src>> {
self
.0
.iter()
.find(|attr| discriminant == attr.discriminant())
}
pub(crate) fn iter<'a>(&'a self) -> collections::btree_set::Iter<'a, Attribute<'src>> {
self.0.iter()
}
pub(crate) fn ensure_valid_attributes(
&self,
item_kind: &'static str,
item_token: Token<'src>,
valid: &[AttributeDiscriminant],
) -> Result<(), CompileError<'src>> {
for attribute in &self.0 {
let discriminant = attribute.discriminant();
if !valid.contains(&discriminant) {
return Err(item_token.error(CompileErrorKind::InvalidAttribute {
item_kind,
item_name: item_token.lexeme(),
attribute: Box::new(attribute.clone()),
}));
}
}
Ok(())
}
}
impl<'src> FromIterator<Attribute<'src>> for AttributeSet<'src> {
fn from_iter<T: IntoIterator<Item = attribute::Attribute<'src>>>(iter: T) -> Self {
Self(iter.into_iter().collect())
}
}
impl<'src, 'a> IntoIterator for &'a AttributeSet<'src> {
type Item = &'a Attribute<'src>;
type IntoIter = collections::btree_set::Iter<'a, Attribute<'src>>;
fn into_iter(self) -> Self::IntoIter {
self.0.iter()
}
}
impl<'src> IntoIterator for AttributeSet<'src> {
type Item = Attribute<'src>;
type IntoIter = collections::btree_set::IntoIter<Attribute<'src>>;
fn into_iter(self) -> Self::IntoIter {
self.0.into_iter()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/table.rs | src/table.rs | use {super::*, std::collections::btree_map};
#[derive(Debug, PartialEq, Serialize)]
#[serde(transparent)]
pub(crate) struct Table<'key, V: Keyed<'key>> {
map: BTreeMap<&'key str, V>,
}
impl<'key, V: Keyed<'key>> Table<'key, V> {
pub(crate) fn new() -> Self {
Self {
map: BTreeMap::new(),
}
}
pub(crate) fn insert(&mut self, value: V) {
self.map.insert(value.key(), value);
}
pub(crate) fn len(&self) -> usize {
self.map.len()
}
pub(crate) fn get(&self, key: &str) -> Option<&V> {
self.map.get(key)
}
pub(crate) fn is_empty(&self) -> bool {
self.map.is_empty()
}
pub(crate) fn values(&self) -> btree_map::Values<&'key str, V> {
self.map.values()
}
pub(crate) fn contains_key(&self, key: &str) -> bool {
self.map.contains_key(key)
}
pub(crate) fn keys(&self) -> btree_map::Keys<&'key str, V> {
self.map.keys()
}
pub(crate) fn iter(&self) -> btree_map::Iter<&'key str, V> {
self.map.iter()
}
pub(crate) fn pop(&mut self) -> Option<V> {
let key = self.map.keys().next().copied()?;
self.map.remove(key)
}
pub(crate) fn remove(&mut self, key: &str) -> Option<V> {
self.map.remove(key)
}
}
impl<'key, V: Keyed<'key>> Default for Table<'key, V> {
fn default() -> Self {
Self::new()
}
}
impl<'key, V: Keyed<'key>> FromIterator<V> for Table<'key, V> {
fn from_iter<I: IntoIterator<Item = V>>(iter: I) -> Self {
Self {
map: iter.into_iter().map(|value| (value.key(), value)).collect(),
}
}
}
impl<'key, V: Keyed<'key>> Index<&'key str> for Table<'key, V> {
type Output = V;
#[inline]
fn index(&self, key: &str) -> &V {
self.map.get(key).expect("no entry found for key")
}
}
impl<'key, V: Keyed<'key>> IntoIterator for Table<'key, V> {
type IntoIter = btree_map::IntoIter<&'key str, V>;
type Item = (&'key str, V);
fn into_iter(self) -> btree_map::IntoIter<&'key str, V> {
self.map.into_iter()
}
}
impl<'table, V: Keyed<'table> + 'table> IntoIterator for &'table Table<'table, V> {
type IntoIter = btree_map::Iter<'table, &'table str, V>;
type Item = (&'table &'table str, &'table V);
fn into_iter(self) -> btree_map::Iter<'table, &'table str, V> {
self.map.iter()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/unindent.rs | src/unindent.rs | #[must_use]
pub fn unindent(text: &str) -> String {
// find line start and end indices
let mut lines = Vec::new();
let mut start = 0;
for (i, c) in text.char_indices() {
if c == '\n' || i == text.len() - c.len_utf8() {
let end = i + c.len_utf8();
lines.push(&text[start..end]);
start = end;
}
}
let common_indentation = lines
.iter()
.filter(|line| !blank(line))
.copied()
.map(indentation)
.fold(
None,
|common_indentation, line_indentation| match common_indentation {
Some(common_indentation) => Some(common(common_indentation, line_indentation)),
None => Some(line_indentation),
},
)
.unwrap_or("");
let mut replacements = Vec::with_capacity(lines.len());
for (i, line) in lines.iter().enumerate() {
let blank = blank(line);
let first = i == 0;
let last = i == lines.len() - 1;
let replacement = match (blank, first, last) {
(true, false, false) => "\n",
(true, _, _) => "",
(false, _, _) => &line[common_indentation.len()..],
};
replacements.push(replacement);
}
replacements.into_iter().collect()
}
fn indentation(line: &str) -> &str {
let i = line
.char_indices()
.take_while(|(_, c)| matches!(c, ' ' | '\t'))
.map(|(i, _)| i + 1)
.last()
.unwrap_or(0);
&line[..i]
}
fn blank(line: &str) -> bool {
line.chars().all(|c| matches!(c, ' ' | '\t' | '\r' | '\n'))
}
fn common<'s>(a: &'s str, b: &'s str) -> &'s str {
let i = a
.char_indices()
.zip(b.chars())
.take_while(|((_, ac), bc)| ac == bc)
.map(|((i, c), _)| i + c.len_utf8())
.last()
.unwrap_or(0);
&a[0..i]
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn unindents() {
assert_eq!(unindent("foo"), "foo");
assert_eq!(unindent("foo\nbar\nbaz\n"), "foo\nbar\nbaz\n");
assert_eq!(unindent(""), "");
assert_eq!(unindent(" foo\n bar"), "foo\nbar");
assert_eq!(unindent(" foo\n bar\n\n"), "foo\nbar\n");
assert_eq!(
unindent(
"
hello
bar
"
),
"hello\nbar\n"
);
assert_eq!(unindent("hello\n bar\n foo"), "hello\n bar\n foo");
assert_eq!(
unindent(
"
hello
bar
"
),
"\nhello\nbar\n\n"
);
}
#[test]
fn indentations() {
assert_eq!(indentation(""), "");
assert_eq!(indentation("foo"), "");
assert_eq!(indentation(" foo"), " ");
assert_eq!(indentation("\t\tfoo"), "\t\t");
assert_eq!(indentation("\t \t foo"), "\t \t ");
}
#[test]
fn blanks() {
assert!(blank(" \n"));
assert!(!blank(" foo\n"));
assert!(blank("\t\t\n"));
}
#[test]
fn commons() {
assert_eq!(common("foo", "foobar"), "foo");
assert_eq!(common("foo", "bar"), "");
assert_eq!(common("", ""), "");
assert_eq!(common("", "bar"), "");
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/compile_error.rs | src/compile_error.rs | use super::*;
#[derive(Debug, PartialEq)]
pub(crate) struct CompileError<'src> {
pub(crate) kind: Box<CompileErrorKind<'src>>,
pub(crate) token: Token<'src>,
}
impl<'src> CompileError<'src> {
pub(crate) fn context(&self) -> Token<'src> {
self.token
}
pub(crate) fn new(token: Token<'src>, kind: CompileErrorKind<'src>) -> CompileError<'src> {
Self {
token,
kind: kind.into(),
}
}
pub(crate) fn source(&self) -> Option<&dyn std::error::Error> {
match &*self.kind {
CompileErrorKind::ArgumentPatternRegex { source } => Some(source),
_ => None,
}
}
}
fn capitalize(s: &str) -> String {
let mut chars = s.chars();
match chars.next() {
None => String::new(),
Some(first) => first.to_uppercase().collect::<String>() + chars.as_str(),
}
}
impl Display for CompileError<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
use CompileErrorKind::*;
match &*self.kind {
ArgAttributeValueRequiresOption => {
write!(
f,
"Argument attribute `value` only valid with `long` or `short`"
)
}
ArgumentPatternRegex { .. } => {
write!(f, "Failed to parse argument pattern")
}
AttributeArgumentCountMismatch {
attribute,
found,
min,
max,
} => {
write!(
f,
"Attribute `{attribute}` got {found} {} but takes ",
Count("argument", *found),
)?;
if min == max {
let expected = min;
write!(f, "{expected} {}", Count("argument", *expected))
} else if found < min {
write!(f, "at least {min} {}", Count("argument", *min))
} else {
write!(f, "at most {max} {}", Count("argument", *max))
}
}
AttributePositionalFollowsKeyword => {
write!(
f,
"Positional attribute arguments cannot follow keyword attribute arguments"
)
}
BacktickShebang => write!(f, "Backticks may not start with `#!`"),
CircularRecipeDependency { recipe, circle } => {
if circle.len() == 2 {
write!(f, "Recipe `{recipe}` depends on itself")
} else {
write!(
f,
"Recipe `{recipe}` has circular dependency `{}`",
circle.join(" -> ")
)
}
}
CircularVariableDependency { variable, circle } => {
if circle.len() == 2 {
write!(f, "Variable `{variable}` is defined in terms of itself")
} else {
write!(
f,
"Variable `{variable}` depends on its own value: `{}`",
circle.join(" -> "),
)
}
}
DependencyArgumentCountMismatch {
dependency,
found,
min,
max,
} => {
write!(
f,
"Dependency `{dependency}` got {found} {} but takes ",
Count("argument", *found),
)?;
if min == max {
let expected = min;
write!(f, "{expected} {}", Count("argument", *expected))
} else if found < min {
write!(f, "at least {min} {}", Count("argument", *min))
} else {
write!(f, "at most {max} {}", Count("argument", *max))
}
}
DuplicateArgAttribute { arg, first } => write!(
f,
"Recipe attribute for argument `{arg}` first used on line {} is duplicated on line {}",
first.ordinal(),
self.token.line.ordinal(),
),
DuplicateAttribute { attribute, first } => write!(
f,
"Recipe attribute `{attribute}` first used on line {} is duplicated on line {}",
first.ordinal(),
self.token.line.ordinal(),
),
DuplicateDefault { recipe } => write!(
f,
"Recipe `{recipe}` has duplicate `[default]` attribute, which may only appear once per module",
),
DuplicateOption { recipe, option } => {
write!(
f,
"Recipe `{recipe}` defines option `{option}` multiple times"
)
}
DuplicateParameter { recipe, parameter } => {
write!(f, "Recipe `{recipe}` has duplicate parameter `{parameter}`")
}
DuplicateSet { setting, first } => write!(
f,
"Setting `{setting}` first set on line {} is redefined on line {}",
first.ordinal(),
self.token.line.ordinal(),
),
DuplicateVariable { variable } => {
write!(f, "Variable `{variable}` has multiple definitions")
}
DuplicateUnexport { variable } => {
write!(f, "Variable `{variable}` is unexported multiple times")
}
ExitMessageAndNoExitMessageAttribute { recipe } => write!(
f,
"Recipe `{recipe}` has both `[exit-message]` and `[no-exit-message]` attributes"
),
ExpectedKeyword { expected, found } => {
let expected = List::or_ticked(expected);
if found.kind == TokenKind::Identifier {
write!(
f,
"Expected keyword {expected} but found identifier `{}`",
found.lexeme()
)
} else {
write!(f, "Expected keyword {expected} but found `{}`", found.kind)
}
}
ExportUnexported { variable } => {
write!(f, "Variable {variable} is both exported and unexported")
}
ExtraLeadingWhitespace => write!(f, "Recipe line has extra leading whitespace"),
ExtraneousAttributes { count } => {
write!(f, "Extraneous {}", Count("attribute", *count))
}
FunctionArgumentCountMismatch {
function,
found,
expected,
} => write!(
f,
"Function `{function}` called with {found} {} but takes {}",
Count("argument", *found),
expected.display(),
),
Include => write!(
f,
"The `!include` directive has been stabilized as `import`"
),
InconsistentLeadingWhitespace { expected, found } => write!(
f,
"Recipe line has inconsistent leading whitespace. Recipe started with `{}` but found \
line with `{}`",
ShowWhitespace(expected),
ShowWhitespace(found)
),
Internal { message } => write!(
f,
"Internal error, this may indicate a bug in just: {message}\n\
consider filing an issue: https://github.com/casey/just/issues/new"
),
InvalidAttribute {
item_name,
item_kind,
attribute,
} => write!(
f,
"{item_kind} `{item_name}` has invalid attribute `{}`",
attribute.name(),
),
InvalidEscapeSequence { character } => write!(
f,
"`\\{}` is not a valid escape sequence",
match character {
'`' => r"\`".to_owned(),
'\\' => r"\".to_owned(),
'\'' => r"'".to_owned(),
'"' => r#"""#.to_owned(),
_ => character.escape_default().collect(),
}
),
MismatchedClosingDelimiter {
open,
open_line,
close,
} => write!(
f,
"Mismatched closing delimiter `{}`. (Did you mean to close the `{}` on line {}?)",
close.close(),
open.open(),
open_line.ordinal(),
),
MixedLeadingWhitespace { whitespace } => write!(
f,
"Found a mix of tabs and spaces in leading whitespace: `{}`\nLeading whitespace may \
consist of tabs or spaces, but not both",
ShowWhitespace(whitespace)
),
NoCdAndWorkingDirectoryAttribute { recipe } => write!(
f,
"Recipe `{recipe}` has both `[no-cd]` and `[working-directory]` attributes"
),
OptionNameContainsEqualSign { parameter } => {
write!(
f,
"Option name for parameter `{parameter}` contains equal sign"
)
}
OptionNameEmpty { parameter } => {
write!(f, "Option name for parameter `{parameter}` is empty")
}
ParameterFollowsVariadicParameter { parameter } => {
write!(f, "Parameter `{parameter}` follows variadic parameter")
}
ParsingRecursionDepthExceeded => write!(f, "Parsing recursion depth exceeded"),
Redefinition {
first,
first_type,
name,
second_type,
} => {
if first_type == second_type {
write!(
f,
"{} `{name}` first defined on line {} is redefined on line {}",
capitalize(first_type),
first.ordinal(),
self.token.line.ordinal(),
)
} else {
write!(
f,
"{} `{name}` defined on line {} is redefined as {} {second_type} on line {}",
capitalize(first_type),
first.ordinal(),
if *second_type == "alias" { "an" } else { "a" },
self.token.line.ordinal(),
)
}
}
ShellExpansion { err } => write!(f, "Shell expansion failed: {err}"),
ShortOptionWithMultipleCharacters { parameter } => {
write!(
f,
"Short option name for parameter `{parameter}` contains multiple characters"
)
}
RequiredParameterFollowsDefaultParameter { parameter } => write!(
f,
"Non-default parameter `{parameter}` follows default parameter"
),
UndefinedArgAttribute { argument } => {
write!(f, "Argument attribute for undefined argument `{argument}`")
}
UndefinedVariable { variable } => write!(f, "Variable `{variable}` not defined"),
UnexpectedCharacter { expected } => {
write!(f, "Expected character {}", List::or_ticked(expected))
}
UnexpectedClosingDelimiter { close } => {
write!(f, "Unexpected closing delimiter `{}`", close.close())
}
UnexpectedEndOfToken { expected } => {
write!(
f,
"Expected character {} but found end-of-file",
List::or_ticked(expected),
)
}
UnexpectedToken { expected, found } => {
write!(f, "Expected {}, but found {found}", List::or(expected))
}
UnicodeEscapeCharacter { character } => {
write!(f, "expected hex digit [0-9A-Fa-f] but found `{character}`")
}
UnicodeEscapeDelimiter { character } => write!(
f,
"expected unicode escape sequence delimiter `{{` but found `{character}`"
),
UnicodeEscapeEmpty => write!(f, "unicode escape sequences must not be empty"),
UnicodeEscapeLength { hex } => write!(
f,
"unicode escape sequence starting with `\\u{{{hex}` longer than six hex digits"
),
UnicodeEscapeRange { hex } => {
write!(
f,
"unicode escape sequence value `{hex}` greater than maximum valid code point `10FFFF`",
)
}
UnicodeEscapeUnterminated => write!(f, "unterminated unicode escape sequence"),
UnknownAliasTarget { alias, target } => {
write!(f, "Alias `{alias}` has an unknown target `{target}`")
}
AttributeKeyMissingValue { key } => {
write!(
f,
"Attribute key `{key}` requires value",
)
}
UnknownAttributeKeyword { attribute, keyword } => {
write!(f, "Unknown keyword `{keyword}` for `{attribute}` attribute")
}
UnknownAttribute { attribute } => write!(f, "Unknown attribute `{attribute}`"),
UnknownDependency { recipe, unknown } => {
write!(f, "Recipe `{recipe}` has unknown dependency `{unknown}`")
}
UnknownFunction { function } => write!(f, "Call to unknown function `{function}`"),
UnknownSetting { setting } => write!(f, "Unknown setting `{setting}`"),
UnknownStartOfToken { start } => {
write!(f, "Unknown start of token '{start}'")?;
if !start.is_ascii_graphic() {
write!(f, " (U+{:04X})", *start as u32)?;
}
Ok(())
}
UnpairedCarriageReturn => write!(f, "Unpaired carriage return"),
UnterminatedBacktick => write!(f, "Unterminated backtick"),
UnterminatedInterpolation => write!(f, "Unterminated interpolation"),
UnterminatedString => write!(f, "Unterminated string"),
VariadicParameterWithOption => write!(f, "Variadic parameters may not be options"),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/output_error.rs | src/output_error.rs | use super::*;
#[derive(Debug)]
pub(crate) enum OutputError {
/// Non-zero exit code
Code(i32),
/// Interrupted by signal
Interrupted(Signal),
/// IO error
Io(io::Error),
/// Terminated by signal
Signal(i32),
/// Unknown failure
Unknown,
/// Stdout not UTF-8
Utf8(str::Utf8Error),
}
impl OutputError {
pub(crate) fn result_from_exit_status(exit_status: ExitStatus) -> Result<(), OutputError> {
match exit_status.code() {
Some(0) => Ok(()),
Some(code) => Err(Self::Code(code)),
None => match Platform::signal_from_exit_status(exit_status) {
Some(signal) => Err(Self::Signal(signal)),
None => Err(Self::Unknown),
},
}
}
}
impl Display for OutputError {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match *self {
Self::Code(code) => write!(f, "Process exited with status code {code}"),
Self::Interrupted(signal) => write!(
f,
"Process succeded but `just` was interrupted by signal {signal}"
),
Self::Io(ref io_error) => write!(f, "Error executing process: {io_error}"),
Self::Signal(signal) => write!(f, "Process terminated by signal {signal}"),
Self::Unknown => write!(f, "Process experienced an unknown failure"),
Self::Utf8(ref err) => write!(f, "Could not convert process stdout to UTF-8: {err}"),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/search.rs | src/search.rs | use {super::*, std::path::Component};
const DEFAULT_JUSTFILE_NAME: &str = JUSTFILE_NAMES[0];
pub(crate) const JUSTFILE_NAMES: [&str; 2] = ["justfile", ".justfile"];
const PROJECT_ROOT_CHILDREN: &[&str] = &[".bzr", ".git", ".hg", ".svn", "_darcs"];
#[derive(Debug)]
pub(crate) struct Search {
pub(crate) justfile: PathBuf,
pub(crate) working_directory: PathBuf,
}
impl Search {
fn global_justfile_paths() -> Vec<(PathBuf, &'static str)> {
let mut paths = Vec::new();
if let Some(config_dir) = dirs::config_dir() {
paths.push((config_dir.join("just"), DEFAULT_JUSTFILE_NAME));
}
if let Some(home_dir) = dirs::home_dir() {
paths.push((home_dir.join(".config").join("just"), DEFAULT_JUSTFILE_NAME));
for justfile_name in JUSTFILE_NAMES {
paths.push((home_dir.clone(), justfile_name));
}
}
paths
}
/// Find justfile given search configuration and invocation directory
pub(crate) fn find(
ceiling: Option<&Path>,
invocation_directory: &Path,
search_config: &SearchConfig,
) -> SearchResult<Self> {
match search_config {
SearchConfig::FromInvocationDirectory => {
Self::find_in_directory(ceiling, invocation_directory)
}
SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory);
let justfile = Self::justfile(ceiling, &search_directory)?;
let working_directory = Self::working_directory_from_justfile(&justfile)?;
Ok(Self {
justfile,
working_directory,
})
}
SearchConfig::GlobalJustfile => Ok(Self {
justfile: Self::find_global_justfile()?,
working_directory: Self::project_root(ceiling, invocation_directory)?,
}),
SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile);
let working_directory = Self::working_directory_from_justfile(&justfile)?;
Ok(Self {
justfile,
working_directory,
})
}
SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
} => Ok(Self {
justfile: Self::clean(invocation_directory, justfile),
working_directory: Self::clean(invocation_directory, working_directory),
}),
}
}
fn find_global_justfile() -> SearchResult<PathBuf> {
for (directory, filename) in Self::global_justfile_paths() {
if let Ok(read_dir) = fs::read_dir(&directory) {
for entry in read_dir {
let entry = entry.map_err(|io_error| SearchError::Io {
io_error,
directory: directory.clone(),
})?;
if let Some(candidate) = entry.file_name().to_str() {
if candidate.eq_ignore_ascii_case(filename) {
return Ok(entry.path());
}
}
}
}
}
Err(SearchError::GlobalJustfileNotFound)
}
/// Find justfile starting from parent directory of current justfile
pub(crate) fn search_parent_directory(&self, ceiling: Option<&Path>) -> SearchResult<Self> {
let parent = self
.justfile
.parent()
.and_then(|path| path.parent())
.ok_or_else(|| SearchError::JustfileHadNoParent {
path: self.justfile.clone(),
})?;
Self::find_in_directory(ceiling, parent)
}
/// Find justfile starting in given directory searching upwards in directory tree
fn find_in_directory(ceiling: Option<&Path>, starting_dir: &Path) -> SearchResult<Self> {
let justfile = Self::justfile(ceiling, starting_dir)?;
let working_directory = Self::working_directory_from_justfile(&justfile)?;
Ok(Self {
justfile,
working_directory,
})
}
/// Get working directory and justfile path for newly-initialized justfile
pub(crate) fn init(
search_config: &SearchConfig,
invocation_directory: &Path,
ceiling: Option<&Path>,
) -> SearchResult<Self> {
match search_config {
SearchConfig::FromInvocationDirectory => {
let working_directory = Self::project_root(ceiling, invocation_directory)?;
let justfile = working_directory.join(DEFAULT_JUSTFILE_NAME);
Ok(Self {
justfile,
working_directory,
})
}
SearchConfig::FromSearchDirectory { search_directory } => {
let search_directory = Self::clean(invocation_directory, search_directory);
let working_directory = Self::project_root(ceiling, &search_directory)?;
let justfile = working_directory.join(DEFAULT_JUSTFILE_NAME);
Ok(Self {
justfile,
working_directory,
})
}
SearchConfig::GlobalJustfile => Err(SearchError::GlobalJustfileInit),
SearchConfig::WithJustfile { justfile } => {
let justfile = Self::clean(invocation_directory, justfile);
let working_directory = Self::working_directory_from_justfile(&justfile)?;
Ok(Self {
justfile,
working_directory,
})
}
SearchConfig::WithJustfileAndWorkingDirectory {
justfile,
working_directory,
} => Ok(Self {
justfile: Self::clean(invocation_directory, justfile),
working_directory: Self::clean(invocation_directory, working_directory),
}),
}
}
/// Search upwards from `directory` for a file whose name matches one of
/// `JUSTFILE_NAMES`
fn justfile(ceiling: Option<&Path>, directory: &Path) -> SearchResult<PathBuf> {
for directory in directory.ancestors() {
let mut candidates = BTreeSet::new();
let entries = fs::read_dir(directory).map_err(|io_error| SearchError::Io {
io_error,
directory: directory.to_owned(),
})?;
for entry in entries {
let entry = entry.map_err(|io_error| SearchError::Io {
io_error,
directory: directory.to_owned(),
})?;
if let Some(name) = entry.file_name().to_str() {
for justfile_name in JUSTFILE_NAMES {
if name.eq_ignore_ascii_case(justfile_name) {
candidates.insert(entry.path());
}
}
}
}
match candidates.len() {
0 => {}
1 => return Ok(candidates.into_iter().next().unwrap()),
_ => return Err(SearchError::MultipleCandidates { candidates }),
}
if let Some(ceiling) = ceiling {
if directory == ceiling {
break;
}
}
}
Err(SearchError::NotFound)
}
fn clean(invocation_directory: &Path, path: &Path) -> PathBuf {
let path = invocation_directory.join(path);
let mut clean = Vec::new();
for component in path.components() {
if component == Component::ParentDir {
if let Some(Component::Normal(_)) = clean.last() {
clean.pop();
}
} else {
clean.push(component);
}
}
clean.into_iter().collect()
}
/// Search upwards from `directory` for the root directory of a software
/// project, as determined by the presence of one of the version control
/// system directories given in `PROJECT_ROOT_CHILDREN`
fn project_root(ceiling: Option<&Path>, directory: &Path) -> SearchResult<PathBuf> {
for directory in directory.ancestors() {
let entries = fs::read_dir(directory).map_err(|io_error| SearchError::Io {
io_error,
directory: directory.to_owned(),
})?;
for entry in entries {
let entry = entry.map_err(|io_error| SearchError::Io {
io_error,
directory: directory.to_owned(),
})?;
for project_root_child in PROJECT_ROOT_CHILDREN.iter().copied() {
if entry.file_name() == project_root_child {
return Ok(directory.to_owned());
}
}
}
if let Some(ceiling) = ceiling {
if directory == ceiling {
break;
}
}
}
Ok(directory.to_owned())
}
fn working_directory_from_justfile(justfile: &Path) -> SearchResult<PathBuf> {
Ok(
justfile
.parent()
.ok_or_else(|| SearchError::JustfileHadNoParent {
path: justfile.to_path_buf(),
})?
.to_owned(),
)
}
}
#[cfg(test)]
mod tests {
use super::*;
use temptree::temptree;
#[test]
fn not_found() {
let tmp = testing::tempdir();
match Search::justfile(None, tmp.path()) {
Err(SearchError::NotFound) => {}
_ => panic!("No justfile found error was expected"),
}
}
#[test]
fn multiple_candidates() {
let tmp = testing::tempdir();
let mut path = tmp.path().to_path_buf();
path.push(DEFAULT_JUSTFILE_NAME);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
path.push(DEFAULT_JUSTFILE_NAME.to_uppercase());
if fs::File::open(path.as_path()).is_ok() {
// We are in case-insensitive file system
return;
}
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
match Search::justfile(None, path.as_path()) {
Err(SearchError::MultipleCandidates { .. }) => {}
_ => panic!("Multiple candidates error was expected"),
}
}
#[test]
fn found() {
let tmp = testing::tempdir();
let mut path = tmp.path().to_path_buf();
path.push(DEFAULT_JUSTFILE_NAME);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
if let Err(err) = Search::justfile(None, path.as_path()) {
panic!("No errors were expected: {err}");
}
}
#[test]
fn found_spongebob_case() {
let tmp = testing::tempdir();
let mut path = tmp.path().to_path_buf();
let spongebob_case = DEFAULT_JUSTFILE_NAME
.chars()
.enumerate()
.map(|(i, c)| {
if i % 2 == 0 {
c.to_ascii_uppercase()
} else {
c
}
})
.collect::<String>();
path.push(spongebob_case);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
if let Err(err) = Search::justfile(None, path.as_path()) {
panic!("No errors were expected: {err}");
}
}
#[test]
fn found_from_inner_dir() {
let tmp = testing::tempdir();
let mut path = tmp.path().to_path_buf();
path.push(DEFAULT_JUSTFILE_NAME);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
path.push("a");
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
path.push("b");
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
if let Err(err) = Search::justfile(None, path.as_path()) {
panic!("No errors were expected: {err}");
}
}
#[test]
fn found_and_stopped_at_first_justfile() {
let tmp = testing::tempdir();
let mut path = tmp.path().to_path_buf();
path.push(DEFAULT_JUSTFILE_NAME);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
path.push("a");
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
path.push(DEFAULT_JUSTFILE_NAME);
fs::write(&path, "default:\n\techo ok").unwrap();
path.pop();
path.push("b");
fs::create_dir(&path).expect("test justfile search: failed to create intermediary directory");
match Search::justfile(None, path.as_path()) {
Ok(found_path) => {
path.pop();
path.push(DEFAULT_JUSTFILE_NAME);
assert_eq!(found_path, path);
}
Err(err) => panic!("No errors were expected: {err}"),
}
}
#[test]
fn justfile_symlink_parent() {
let tmp = temptree! {
src: "",
sub: {},
};
let src = tmp.path().join("src");
let sub = tmp.path().join("sub");
let justfile = sub.join("justfile");
#[cfg(unix)]
std::os::unix::fs::symlink(src, &justfile).unwrap();
#[cfg(windows)]
std::os::windows::fs::symlink_file(&src, &justfile).unwrap();
let search_config = SearchConfig::FromInvocationDirectory;
let search = Search::find(None, &sub, &search_config).unwrap();
assert_eq!(search.justfile, justfile);
assert_eq!(search.working_directory, sub);
}
#[test]
fn clean() {
let cases = &[
("/", "foo", "/foo"),
("/bar", "/foo", "/foo"),
#[cfg(windows)]
("//foo", "bar//baz", "//foo\\bar\\baz"),
#[cfg(not(windows))]
("/", "..", "/"),
("/", "/..", "/"),
("/..", "", "/"),
("/../../../..", "../../../", "/"),
("/.", "./", "/"),
("/foo/../", "bar", "/bar"),
("/foo/bar", "..", "/foo"),
("/foo/bar/", "..", "/foo"),
];
for (prefix, suffix, want) in cases {
let have = Search::clean(Path::new(prefix), Path::new(suffix));
assert_eq!(have, Path::new(want));
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/tree.rs | src/tree.rs | use {super::*, std::borrow::Cow};
/// Construct a `Tree` from a symbolic expression literal. This macro, and the
/// Tree type, are only used in the Parser unit tests, providing a concise
/// notation for representing the expected results of parsing a given string.
macro_rules! tree {
{ ($($child:tt)*) } => {
$crate::tree::Tree::List(vec![$(tree!($child),)*])
};
{ $atom:ident } => {
$crate::tree::Tree::atom(stringify!($atom))
};
{ $atom:literal } => {
$crate::tree::Tree::atom(format!("\"{}\"", $atom))
};
{ # } => {
$crate::tree::Tree::atom("#")
};
{ ? } => {
$crate::tree::Tree::atom("?")
};
{ + } => {
$crate::tree::Tree::atom("+")
};
{ * } => {
$crate::tree::Tree::atom("*")
};
{ && } => {
$crate::tree::Tree::atom("&&")
};
{ == } => {
$crate::tree::Tree::atom("==")
};
{ != } => {
$crate::tree::Tree::atom("!=")
};
}
/// A `Tree` is either…
#[derive(Debug, PartialEq)]
pub(crate) enum Tree<'text> {
/// …an atom containing text, or…
Atom(Cow<'text, str>),
/// …a list containing zero or more `Tree`s.
List(Vec<Self>),
}
impl<'text> Tree<'text> {
/// Construct an Atom from a text scalar
pub(crate) fn atom(text: impl Into<Cow<'text, str>>) -> Self {
Self::Atom(text.into())
}
/// Construct a List from an iterable of trees
pub(crate) fn list(children: impl IntoIterator<Item = Self>) -> Self {
Self::List(children.into_iter().collect())
}
/// Convenience function to create an atom containing quoted text
pub(crate) fn string(contents: impl AsRef<str>) -> Self {
Self::atom(format!("\"{}\"", contents.as_ref()))
}
/// Push a child node into self, turning it into a List if it was an Atom
pub(crate) fn push(self, tree: impl Into<Self>) -> Self {
match self {
Self::List(mut children) => {
children.push(tree.into());
Self::List(children)
}
Self::Atom(text) => Self::List(vec![Self::Atom(text), tree.into()]),
}
}
/// Extend a self with a tail of Trees, turning self into a List if it was an
/// Atom
pub(crate) fn extend<I, T>(self, tail: I) -> Self
where
I: IntoIterator<Item = T>,
T: Into<Self>,
{
// Tree::List(children.into_iter().collect())
let mut head = match self {
Self::List(children) => children,
Self::Atom(text) => vec![Self::Atom(text)],
};
for child in tail {
head.push(child.into());
}
Self::List(head)
}
/// Like `push`, but modify self in-place
pub(crate) fn push_mut(&mut self, tree: impl Into<Self>) {
*self = mem::replace(self, Self::List(Vec::new())).push(tree.into());
}
}
impl Display for Tree<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::List(children) => {
write!(f, "(")?;
for (i, child) in children.iter().enumerate() {
if i > 0 {
write!(f, " ")?;
}
write!(f, "{child}")?;
}
write!(f, ")")
}
Self::Atom(text) => write!(f, "{text}"),
}
}
}
impl<'text, T> From<T> for Tree<'text>
where
T: Into<Cow<'text, str>>,
{
fn from(text: T) -> Self {
Self::Atom(text.into())
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/which.rs | src/which.rs | use super::*;
pub(crate) fn which(context: function::Context, name: &str) -> Result<Option<String>, String> {
let name = Path::new(name);
let candidates = match name.components().count() {
0 => return Err("empty command".into()),
1 => {
// cmd is a regular command
env::split_paths(&env::var_os("PATH").ok_or("`PATH` environment variable not set")?)
.map(|path| path.join(name))
.collect()
}
_ => {
// cmd contains a path separator, treat it as a path
vec![name.into()]
}
};
for mut candidate in candidates {
if candidate.is_relative() {
// This candidate is a relative path, either because the user invoked `which("rel/path")`,
// or because there was a relative path in `PATH`. Resolve it to an absolute path,
// relative to the working directory of the just invocation.
candidate = context
.execution_context
.working_directory()
.join(candidate);
}
candidate = candidate.lexiclean();
if is_executable::is_executable(&candidate) {
return candidate
.to_str()
.map(|candidate| Some(candidate.into()))
.ok_or_else(|| {
format!(
"Executable path is not valid unicode: {}",
candidate.display()
)
});
}
}
Ok(None)
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/keyword.rs | src/keyword.rs | use super::*;
#[derive(Debug, Eq, PartialEq, IntoStaticStr, Display, Copy, Clone, EnumString)]
#[strum(serialize_all = "kebab_case")]
pub(crate) enum Keyword {
Alias,
AllowDuplicateRecipes,
AllowDuplicateVariables,
Assert,
DotenvFilename,
DotenvLoad,
DotenvOverride,
DotenvPath,
DotenvRequired,
Else,
Export,
F,
Fallback,
False,
If,
IgnoreComments,
Import,
Mod,
NoExitMessage,
PositionalArguments,
Quiet,
ScriptInterpreter,
Set,
Shell,
Tempdir,
True,
Unexport,
Unstable,
WindowsPowershell,
WindowsShell,
WorkingDirectory,
X,
}
impl Keyword {
pub(crate) fn from_lexeme(lexeme: &str) -> Option<Keyword> {
lexeme.parse().ok()
}
pub(crate) fn lexeme(self) -> &'static str {
self.into()
}
}
impl<'a> PartialEq<&'a str> for Keyword {
fn eq(&self, other: &&'a str) -> bool {
self.lexeme() == *other
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn keyword_case() {
assert_eq!(Keyword::X.lexeme(), "x");
assert_eq!(Keyword::IgnoreComments.lexeme(), "ignore-comments");
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/function.rs | src/function.rs | use {
super::*,
heck::{
ToKebabCase, ToLowerCamelCase, ToShoutyKebabCase, ToShoutySnakeCase, ToSnakeCase, ToTitleCase,
ToUpperCamelCase,
},
semver::{Version, VersionReq},
std::collections::HashSet,
Function::*,
};
#[allow(clippy::arbitrary_source_item_ordering)]
pub(crate) enum Function {
Nullary(fn(Context) -> FunctionResult),
Unary(fn(Context, &str) -> FunctionResult),
UnaryOpt(fn(Context, &str, Option<&str>) -> FunctionResult),
UnaryPlus(fn(Context, &str, &[String]) -> FunctionResult),
Binary(fn(Context, &str, &str) -> FunctionResult),
BinaryPlus(fn(Context, &str, &str, &[String]) -> FunctionResult),
Ternary(fn(Context, &str, &str, &str) -> FunctionResult),
}
pub(crate) struct Context<'src: 'run, 'run> {
pub(crate) execution_context: &'run ExecutionContext<'src, 'run>,
pub(crate) is_dependency: bool,
pub(crate) name: Name<'src>,
pub(crate) scope: &'run Scope<'src, 'run>,
}
pub(crate) fn get(name: &str) -> Option<Function> {
let name = if let Some(prefix) = name.strip_suffix("_dir") {
format!("{prefix}_directory")
} else if let Some(prefix) = name.strip_suffix("_dir_native") {
format!("{prefix}_directory_native")
} else {
name.into()
};
let function = match name.as_str() {
"absolute_path" => Unary(absolute_path),
"append" => Binary(append),
"arch" => Nullary(arch),
"blake3" => Unary(blake3),
"blake3_file" => Unary(blake3_file),
"cache_directory" => Nullary(|_| dir("cache", dirs::cache_dir)),
"canonicalize" => Unary(canonicalize),
"capitalize" => Unary(capitalize),
"choose" => Binary(choose),
"clean" => Unary(clean),
"config_directory" => Nullary(|_| dir("config", dirs::config_dir)),
"config_local_directory" => Nullary(|_| dir("local config", dirs::config_local_dir)),
"data_directory" => Nullary(|_| dir("data", dirs::data_dir)),
"data_local_directory" => Nullary(|_| dir("local data", dirs::data_local_dir)),
"datetime" => Unary(datetime),
"datetime_utc" => Unary(datetime_utc),
"encode_uri_component" => Unary(encode_uri_component),
"env" => UnaryOpt(env),
"env_var" => Unary(env_var),
"env_var_or_default" => Binary(env_var_or_default),
"error" => Unary(error),
"executable_directory" => Nullary(|_| dir("executable", dirs::executable_dir)),
"extension" => Unary(extension),
"file_name" => Unary(file_name),
"file_stem" => Unary(file_stem),
"home_directory" => Nullary(|_| dir("home", dirs::home_dir)),
"invocation_directory" => Nullary(invocation_directory),
"invocation_directory_native" => Nullary(invocation_directory_native),
"is_dependency" => Nullary(is_dependency),
"join" => BinaryPlus(join),
"just_executable" => Nullary(just_executable),
"just_pid" => Nullary(just_pid),
"justfile" => Nullary(justfile),
"justfile_directory" => Nullary(justfile_directory),
"kebabcase" => Unary(kebabcase),
"lowercamelcase" => Unary(lowercamelcase),
"lowercase" => Unary(lowercase),
"module_directory" => Nullary(module_directory),
"module_file" => Nullary(module_file),
"num_cpus" => Nullary(num_cpus),
"os" => Nullary(os),
"os_family" => Nullary(os_family),
"parent_directory" => Unary(parent_directory),
"path_exists" => Unary(path_exists),
"prepend" => Binary(prepend),
"quote" => Unary(quote),
"read" => Unary(read),
"replace" => Ternary(replace),
"replace_regex" => Ternary(replace_regex),
"require" => Unary(require),
"semver_matches" => Binary(semver_matches),
"sha256" => Unary(sha256),
"sha256_file" => Unary(sha256_file),
"shell" => UnaryPlus(shell),
"shoutykebabcase" => Unary(shoutykebabcase),
"shoutysnakecase" => Unary(shoutysnakecase),
"snakecase" => Unary(snakecase),
"source_directory" => Nullary(source_directory),
"source_file" => Nullary(source_file),
"style" => Unary(style),
"titlecase" => Unary(titlecase),
"trim" => Unary(trim),
"trim_end" => Unary(trim_end),
"trim_end_match" => Binary(trim_end_match),
"trim_end_matches" => Binary(trim_end_matches),
"trim_start" => Unary(trim_start),
"trim_start_match" => Binary(trim_start_match),
"trim_start_matches" => Binary(trim_start_matches),
"uppercamelcase" => Unary(uppercamelcase),
"uppercase" => Unary(uppercase),
"uuid" => Nullary(uuid),
"which" => Unary(which),
"without_extension" => Unary(without_extension),
_ => return None,
};
Some(function)
}
impl Function {
pub(crate) fn argc(&self) -> RangeInclusive<usize> {
match *self {
Nullary(_) => 0..=0,
Unary(_) => 1..=1,
UnaryOpt(_) => 1..=2,
UnaryPlus(_) => 1..=usize::MAX,
Binary(_) => 2..=2,
BinaryPlus(_) => 2..=usize::MAX,
Ternary(_) => 3..=3,
}
}
}
fn absolute_path(context: Context, path: &str) -> FunctionResult {
let abs_path_unchecked = context
.execution_context
.working_directory()
.join(path)
.lexiclean();
match abs_path_unchecked.to_str() {
Some(absolute_path) => Ok(absolute_path.to_owned()),
None => Err(format!(
"Working directory is not valid unicode: {}",
context.execution_context.search.working_directory.display()
)),
}
}
fn append(_context: Context, suffix: &str, s: &str) -> FunctionResult {
Ok(
s.split_whitespace()
.map(|s| format!("{s}{suffix}"))
.collect::<Vec<String>>()
.join(" "),
)
}
fn arch(_context: Context) -> FunctionResult {
Ok(target::arch().to_owned())
}
fn blake3(_context: Context, s: &str) -> FunctionResult {
Ok(blake3::hash(s.as_bytes()).to_string())
}
fn blake3_file(context: Context, path: &str) -> FunctionResult {
let path = context.execution_context.working_directory().join(path);
let mut hasher = blake3::Hasher::new();
hasher
.update_mmap_rayon(&path)
.map_err(|err| format!("Failed to hash `{}`: {err}", path.display()))?;
Ok(hasher.finalize().to_string())
}
fn canonicalize(context: Context, path: &str) -> FunctionResult {
let canonical = std::fs::canonicalize(context.execution_context.working_directory().join(path))
.map_err(|err| format!("I/O error canonicalizing path: {err}"))?;
canonical.to_str().map(str::to_string).ok_or_else(|| {
format!(
"Canonical path is not valid unicode: {}",
canonical.display(),
)
})
}
fn capitalize(_context: Context, s: &str) -> FunctionResult {
let mut capitalized = String::new();
for (i, c) in s.chars().enumerate() {
if i == 0 {
capitalized.extend(c.to_uppercase());
} else {
capitalized.extend(c.to_lowercase());
}
}
Ok(capitalized)
}
fn choose(_context: Context, n: &str, alphabet: &str) -> FunctionResult {
let mut chars = HashSet::<char>::with_capacity(alphabet.len());
for c in alphabet.chars() {
if !chars.insert(c) {
return Err(format!("alphabet contains repeated character `{c}`"));
}
}
let alphabet = alphabet.chars().collect::<Vec<char>>();
let n = n
.parse::<usize>()
.map_err(|err| format!("failed to parse `{n}` as positive integer: {err}"))?;
let mut rng = rand::rng();
(0..n)
.map(|_| {
alphabet
.choose(&mut rng)
.ok_or_else(|| "empty alphabet".to_string())
})
.collect()
}
fn clean(_context: Context, path: &str) -> FunctionResult {
Ok(Path::new(path).lexiclean().to_str().unwrap().to_owned())
}
fn dir(name: &'static str, f: fn() -> Option<PathBuf>) -> FunctionResult {
match f() {
Some(path) => path
.as_os_str()
.to_str()
.map(str::to_string)
.ok_or_else(|| {
format!(
"unable to convert {name} directory path to string: {}",
path.display(),
)
}),
None => Err(format!("{name} directory not found")),
}
}
fn datetime(_context: Context, format: &str) -> FunctionResult {
Ok(chrono::Local::now().format(format).to_string())
}
fn datetime_utc(_context: Context, format: &str) -> FunctionResult {
Ok(chrono::Utc::now().format(format).to_string())
}
fn encode_uri_component(_context: Context, s: &str) -> FunctionResult {
static PERCENT_ENCODE: percent_encoding::AsciiSet = percent_encoding::NON_ALPHANUMERIC
.remove(b'-')
.remove(b'_')
.remove(b'.')
.remove(b'!')
.remove(b'~')
.remove(b'*')
.remove(b'\'')
.remove(b'(')
.remove(b')');
Ok(percent_encoding::utf8_percent_encode(s, &PERCENT_ENCODE).to_string())
}
fn env(context: Context, key: &str, default: Option<&str>) -> FunctionResult {
match default {
Some(val) => env_var_or_default(context, key, val),
None => env_var(context, key),
}
}
fn env_var(context: Context, key: &str) -> FunctionResult {
use std::env::VarError::*;
if let Some(value) = context.execution_context.dotenv.get(key) {
return Ok(value.clone());
}
match env::var(key) {
Err(NotPresent) => Err(format!("environment variable `{key}` not present")),
Err(NotUnicode(os_string)) => Err(format!(
"environment variable `{key}` not unicode: {os_string:?}"
)),
Ok(value) => Ok(value),
}
}
fn env_var_or_default(context: Context, key: &str, default: &str) -> FunctionResult {
use std::env::VarError::*;
if let Some(value) = context.execution_context.dotenv.get(key) {
return Ok(value.clone());
}
match env::var(key) {
Err(NotPresent) => Ok(default.to_owned()),
Err(NotUnicode(os_string)) => Err(format!(
"environment variable `{key}` not unicode: {os_string:?}"
)),
Ok(value) => Ok(value),
}
}
fn error(_context: Context, message: &str) -> FunctionResult {
Err(message.to_owned())
}
fn extension(_context: Context, path: &str) -> FunctionResult {
Utf8Path::new(path)
.extension()
.map(str::to_owned)
.ok_or_else(|| format!("Could not extract extension from `{path}`"))
}
fn file_name(_context: Context, path: &str) -> FunctionResult {
Utf8Path::new(path)
.file_name()
.map(str::to_owned)
.ok_or_else(|| format!("Could not extract file name from `{path}`"))
}
fn file_stem(_context: Context, path: &str) -> FunctionResult {
Utf8Path::new(path)
.file_stem()
.map(str::to_owned)
.ok_or_else(|| format!("Could not extract file stem from `{path}`"))
}
fn invocation_directory(context: Context) -> FunctionResult {
Platform::convert_native_path(
context.execution_context.config,
&context.execution_context.search.working_directory,
&context.execution_context.config.invocation_directory,
)
.map_err(|e| format!("Error getting shell path: {e}"))
}
fn invocation_directory_native(context: Context) -> FunctionResult {
context
.execution_context
.config
.invocation_directory
.to_str()
.map(str::to_owned)
.ok_or_else(|| {
format!(
"Invocation directory is not valid unicode: {}",
context
.execution_context
.config
.invocation_directory
.display()
)
})
}
fn is_dependency(context: Context) -> FunctionResult {
Ok(context.is_dependency.to_string())
}
fn prepend(_context: Context, prefix: &str, s: &str) -> FunctionResult {
Ok(
s.split_whitespace()
.map(|s| format!("{prefix}{s}"))
.collect::<Vec<String>>()
.join(" "),
)
}
fn join(_context: Context, base: &str, with: &str, and: &[String]) -> FunctionResult {
let mut result = Utf8Path::new(base).join(with);
for arg in and {
result.push(arg);
}
Ok(result.to_string())
}
fn just_executable(_context: Context) -> FunctionResult {
let exe_path =
env::current_exe().map_err(|e| format!("Error getting current executable: {e}"))?;
exe_path.to_str().map(str::to_owned).ok_or_else(|| {
format!(
"Executable path is not valid unicode: {}",
exe_path.display()
)
})
}
fn just_pid(_context: Context) -> FunctionResult {
Ok(std::process::id().to_string())
}
fn justfile(context: Context) -> FunctionResult {
context
.execution_context
.search
.justfile
.to_str()
.map(str::to_owned)
.ok_or_else(|| {
format!(
"Justfile path is not valid unicode: {}",
context.execution_context.search.justfile.display()
)
})
}
fn justfile_directory(context: Context) -> FunctionResult {
let justfile_directory = context
.execution_context
.search
.justfile
.parent()
.ok_or_else(|| {
format!(
"Could not resolve justfile directory. Justfile `{}` had no parent.",
context.execution_context.search.justfile.display()
)
})?;
justfile_directory
.to_str()
.map(str::to_owned)
.ok_or_else(|| {
format!(
"Justfile directory is not valid unicode: {}",
justfile_directory.display()
)
})
}
fn kebabcase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_kebab_case())
}
fn lowercamelcase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_lower_camel_case())
}
fn lowercase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_lowercase())
}
fn module_directory(context: Context) -> FunctionResult {
let module_directory = context.execution_context.module.source.parent().unwrap();
module_directory.to_str().map(str::to_owned).ok_or_else(|| {
format!(
"Module directory is not valid unicode: {}",
module_directory.display(),
)
})
}
fn module_file(context: Context) -> FunctionResult {
let module_file = &context.execution_context.module.source;
module_file.to_str().map(str::to_owned).ok_or_else(|| {
format!(
"Module file path is not valid unicode: {}",
module_file.display(),
)
})
}
fn num_cpus(_context: Context) -> FunctionResult {
let num = num_cpus::get();
Ok(num.to_string())
}
fn os(_context: Context) -> FunctionResult {
Ok(target::os().to_owned())
}
fn os_family(_context: Context) -> FunctionResult {
Ok(target::family().to_owned())
}
fn parent_directory(_context: Context, path: &str) -> FunctionResult {
Utf8Path::new(path)
.parent()
.map(Utf8Path::to_string)
.ok_or_else(|| format!("Could not extract parent directory from `{path}`"))
}
fn path_exists(context: Context, path: &str) -> FunctionResult {
Ok(
context
.execution_context
.working_directory()
.join(path)
.exists()
.to_string(),
)
}
fn quote(_context: Context, s: &str) -> FunctionResult {
Ok(format!("'{}'", s.replace('\'', "'\\''")))
}
fn read(context: Context, filename: &str) -> FunctionResult {
fs::read_to_string(context.execution_context.working_directory().join(filename))
.map_err(|err| format!("I/O error reading `{filename}`: {err}"))
}
fn replace(_context: Context, s: &str, from: &str, to: &str) -> FunctionResult {
Ok(s.replace(from, to))
}
fn require(context: Context, name: &str) -> FunctionResult {
crate::which(context, name)?.ok_or_else(|| format!("could not find executable `{name}`"))
}
fn replace_regex(_context: Context, s: &str, regex: &str, replacement: &str) -> FunctionResult {
Ok(
Regex::new(regex)
.map_err(|err| err.to_string())?
.replace_all(s, replacement)
.to_string(),
)
}
fn sha256(_context: Context, s: &str) -> FunctionResult {
use sha2::{Digest, Sha256};
let mut hasher = Sha256::new();
hasher.update(s);
let hash = hasher.finalize();
Ok(format!("{hash:x}"))
}
fn sha256_file(context: Context, path: &str) -> FunctionResult {
use sha2::{Digest, Sha256};
let path = context.execution_context.working_directory().join(path);
let mut hasher = Sha256::new();
let mut file =
fs::File::open(&path).map_err(|err| format!("Failed to open `{}`: {err}", path.display()))?;
std::io::copy(&mut file, &mut hasher)
.map_err(|err| format!("Failed to read `{}`: {err}", path.display()))?;
let hash = hasher.finalize();
Ok(format!("{hash:x}"))
}
fn shell(context: Context, command: &str, args: &[String]) -> FunctionResult {
let args = iter::once(command)
.chain(args.iter().map(String::as_str))
.collect::<Vec<&str>>();
Evaluator::run_command(context.execution_context, context.scope, command, &args)
.map_err(|output_error| output_error.to_string())
}
fn shoutykebabcase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_shouty_kebab_case())
}
fn shoutysnakecase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_shouty_snake_case())
}
fn snakecase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_snake_case())
}
fn source_directory(context: Context) -> FunctionResult {
context
.execution_context
.search
.justfile
.parent()
.unwrap()
.join(context.name.token.path)
.parent()
.unwrap()
.to_str()
.map(str::to_owned)
.ok_or_else(|| {
format!(
"Source file path not valid unicode: {}",
context.name.token.path.display(),
)
})
}
fn source_file(context: Context) -> FunctionResult {
context
.execution_context
.search
.justfile
.parent()
.unwrap()
.join(context.name.token.path)
.to_str()
.map(str::to_owned)
.ok_or_else(|| {
format!(
"Source file path not valid unicode: {}",
context.name.token.path.display(),
)
})
}
fn style(context: Context, s: &str) -> FunctionResult {
match s {
"command" => Ok(
Color::always()
.command(context.execution_context.config.command_color)
.prefix()
.to_string(),
),
"error" => Ok(Color::always().error().prefix().to_string()),
"warning" => Ok(Color::always().warning().prefix().to_string()),
_ => Err(format!("unknown style: `{s}`")),
}
}
fn titlecase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_title_case())
}
fn trim(_context: Context, s: &str) -> FunctionResult {
Ok(s.trim().to_owned())
}
fn trim_end(_context: Context, s: &str) -> FunctionResult {
Ok(s.trim_end().to_owned())
}
fn trim_end_match(_context: Context, s: &str, pat: &str) -> FunctionResult {
Ok(s.strip_suffix(pat).unwrap_or(s).to_owned())
}
fn trim_end_matches(_context: Context, s: &str, pat: &str) -> FunctionResult {
Ok(s.trim_end_matches(pat).to_owned())
}
fn trim_start(_context: Context, s: &str) -> FunctionResult {
Ok(s.trim_start().to_owned())
}
fn trim_start_match(_context: Context, s: &str, pat: &str) -> FunctionResult {
Ok(s.strip_prefix(pat).unwrap_or(s).to_owned())
}
fn trim_start_matches(_context: Context, s: &str, pat: &str) -> FunctionResult {
Ok(s.trim_start_matches(pat).to_owned())
}
fn uppercamelcase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_upper_camel_case())
}
fn uppercase(_context: Context, s: &str) -> FunctionResult {
Ok(s.to_uppercase())
}
fn uuid(_context: Context) -> FunctionResult {
Ok(uuid::Uuid::new_v4().to_string())
}
fn which(context: Context, name: &str) -> FunctionResult {
Ok(crate::which(context, name)?.unwrap_or_default())
}
fn without_extension(_context: Context, path: &str) -> FunctionResult {
let parent = Utf8Path::new(path)
.parent()
.ok_or_else(|| format!("Could not extract parent from `{path}`"))?;
let file_stem = Utf8Path::new(path)
.file_stem()
.ok_or_else(|| format!("Could not extract file stem from `{path}`"))?;
Ok(parent.join(file_stem).to_string())
}
/// Check whether a string processes properly as semver (e.x. "0.1.0")
/// and matches a given semver requirement (e.x. ">=0.1.0")
fn semver_matches(_context: Context, version: &str, requirement: &str) -> FunctionResult {
Ok(
requirement
.parse::<VersionReq>()
.map_err(|err| format!("invalid semver requirement: {err}"))?
.matches(
&version
.parse::<Version>()
.map_err(|err| format!("invalid semver version: {err}"))?,
)
.to_string(),
)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn dir_not_found() {
assert_eq!(dir("foo", || None).unwrap_err(), "foo directory not found");
}
#[cfg(unix)]
#[test]
fn dir_not_unicode() {
use std::os::unix::ffi::OsStrExt;
assert_eq!(
dir("foo", || Some(
std::ffi::OsStr::from_bytes(b"\xe0\x80\x80").into()
))
.unwrap_err(),
"unable to convert foo directory path to string: ���",
);
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/error.rs | src/error.rs | use super::*;
#[derive(Debug)]
pub(crate) enum Error<'src> {
AmbiguousModuleFile {
module: Name<'src>,
found: Vec<PathBuf>,
},
ArgumentPatternMismatch {
argument: String,
parameter: &'src str,
pattern: Box<Pattern<'src>>,
recipe: &'src str,
},
Assert {
message: String,
name: Name<'src>,
},
Backtick {
token: Token<'src>,
output_error: OutputError,
},
ChooserInvoke {
shell_binary: String,
shell_arguments: String,
chooser: OsString,
io_error: io::Error,
},
ChooserRead {
chooser: OsString,
io_error: io::Error,
},
ChooserStatus {
chooser: OsString,
status: ExitStatus,
},
ChooserWrite {
chooser: OsString,
io_error: io::Error,
},
CircularImport {
current: PathBuf,
import: PathBuf,
},
Code {
recipe: &'src str,
line_number: Option<usize>,
code: i32,
print_message: bool,
},
CommandInvoke {
binary: OsString,
arguments: Vec<OsString>,
io_error: io::Error,
},
CommandStatus {
binary: OsString,
arguments: Vec<OsString>,
status: ExitStatus,
},
Compile {
compile_error: CompileError<'src>,
},
Config {
config_error: ConfigError,
},
Const {
const_error: ConstError<'src>,
},
Cygpath {
recipe: &'src str,
output_error: OutputError,
},
DefaultRecipeRequiresArguments {
recipe: &'src str,
min_arguments: usize,
},
Dotenv {
dotenv_error: dotenvy::Error,
},
DotenvRequired,
DumpJson {
source: serde_json::Error,
},
DuplicateOption {
recipe: &'src str,
option: Switch,
},
EditorInvoke {
editor: OsString,
io_error: io::Error,
},
EditorStatus {
editor: OsString,
status: ExitStatus,
},
EvalUnknownVariable {
variable: String,
suggestion: Option<Suggestion<'src>>,
},
ExcessInvocations {
invocations: usize,
},
ExpectedSubmoduleButFoundRecipe {
path: String,
},
FlagWithValue {
recipe: &'src str,
option: Switch,
},
FormatCheckFoundDiff,
FunctionCall {
function: Name<'src>,
message: String,
},
GetConfirmation {
io_error: io::Error,
},
Homedir,
InitExists {
justfile: PathBuf,
},
Internal {
message: String,
},
Interrupted {
signal: Signal,
},
Io {
recipe: &'src str,
io_error: io::Error,
},
Load {
path: PathBuf,
io_error: io::Error,
},
MissingImportFile {
path: Token<'src>,
},
MissingModuleFile {
module: Name<'src>,
},
MissingOption {
recipe: &'src str,
option: Switch,
},
MultipleShortOptions {
options: String,
},
NoChoosableRecipes,
NoDefaultRecipe,
NoRecipes,
NotConfirmed {
recipe: &'src str,
},
OptionMissingValue {
recipe: &'src str,
option: Switch,
},
PositionalArgumentCountMismatch {
recipe: Box<Recipe<'src>>,
found: usize,
min: usize,
max: usize,
},
RegexCompile {
source: regex::Error,
},
RuntimeDirIo {
io_error: io::Error,
path: PathBuf,
},
Script {
command: String,
io_error: io::Error,
recipe: &'src str,
},
Search {
search_error: SearchError,
},
Shebang {
argument: Option<String>,
command: String,
io_error: io::Error,
recipe: &'src str,
},
Signal {
recipe: &'src str,
line_number: Option<usize>,
signal: i32,
},
#[cfg(windows)]
SignalHandlerInstall {
source: ctrlc::Error,
},
#[cfg(unix)]
SignalHandlerPipeCloexec {
io_error: io::Error,
},
#[cfg(unix)]
SignalHandlerPipeOpen {
io_error: io::Error,
},
#[cfg(unix)]
SignalHandlerSigaction {
signal: Signal,
io_error: io::Error,
},
#[cfg(unix)]
SignalHandlerSpawnThread {
io_error: io::Error,
},
StdoutIo {
io_error: io::Error,
},
TempdirIo {
recipe: &'src str,
io_error: io::Error,
},
Unknown {
recipe: &'src str,
line_number: Option<usize>,
},
UnknownOption {
recipe: &'src str,
option: Switch,
},
UnknownOverrides {
overrides: Vec<String>,
},
UnknownRecipe {
recipe: String,
suggestion: Option<Suggestion<'src>>,
},
UnknownSubmodule {
path: String,
},
UnstableFeature {
unstable_feature: UnstableFeature,
},
WriteJustfile {
justfile: PathBuf,
io_error: io::Error,
},
}
impl<'src> Error<'src> {
pub(crate) fn code(&self) -> Option<i32> {
match self {
Self::Backtick {
output_error: OutputError::Code(code),
..
}
| Self::Code { code, .. } => Some(*code),
Self::ChooserStatus { status, .. } | Self::EditorStatus { status, .. } => status.code(),
Self::Backtick {
output_error: OutputError::Signal(signal),
..
}
| Self::Signal { signal, .. } => 128i32.checked_add(*signal),
Self::Backtick {
output_error: OutputError::Interrupted(signal),
..
}
| Self::Interrupted { signal } => Some(signal.code()),
_ => None,
}
}
fn context(&self) -> Option<Token<'src>> {
match self {
Self::AmbiguousModuleFile { module, .. } | Self::MissingModuleFile { module, .. } => {
Some(module.token)
}
Self::Assert { name, .. } => Some(**name),
Self::Backtick { token, .. } => Some(*token),
Self::Compile { compile_error } => Some(compile_error.context()),
Self::Const { const_error } => Some(const_error.context()),
Self::FunctionCall { function, .. } => Some(function.token),
Self::MissingImportFile { path } => Some(*path),
_ => None,
}
}
pub(crate) fn internal(message: impl Into<String>) -> Self {
Self::Internal {
message: message.into(),
}
}
pub(crate) fn print_message(&self) -> bool {
!matches!(
self,
Error::Code {
print_message: false,
..
}
)
}
fn source(&self) -> Option<&dyn std::error::Error> {
match self {
Self::Compile { compile_error } => compile_error.source(),
_ => None,
}
}
}
impl<'src> From<CompileError<'src>> for Error<'src> {
fn from(compile_error: CompileError<'src>) -> Self {
Self::Compile { compile_error }
}
}
impl From<ConfigError> for Error<'_> {
fn from(config_error: ConfigError) -> Self {
Self::Config { config_error }
}
}
impl<'src> From<ConstError<'src>> for Error<'src> {
fn from(const_error: ConstError<'src>) -> Self {
Self::Const { const_error }
}
}
impl<'src> From<dotenvy::Error> for Error<'src> {
fn from(dotenv_error: dotenvy::Error) -> Error<'src> {
Self::Dotenv { dotenv_error }
}
}
impl From<SearchError> for Error<'_> {
fn from(search_error: SearchError) -> Self {
Self::Search { search_error }
}
}
impl ColorDisplay for Error<'_> {
fn fmt(&self, f: &mut Formatter, color: Color) -> fmt::Result {
use Error::*;
let error = color.error().paint("error");
let message = color.message().prefix();
write!(f, "{error}: {message}")?;
match self {
Const { const_error } => write!(
f,
"{const_error}",
)?,
AmbiguousModuleFile { module, found } => write!(
f,
"Found multiple source files for module `{module}`: {}",
List::and_ticked(found.iter().map(|path| path.display())),
)?,
ArgumentPatternMismatch {
argument,
parameter,
pattern,
recipe,
} => {
write!(
f,
"Argument `{argument}` passed to recipe `{recipe}` parameter `{parameter}` does not match pattern '{}'",
pattern.original(),
)?;
}
Assert { message, .. } => {
write!(f, "Assert failed: {message}")?;
}
Backtick { output_error, .. } => match output_error {
OutputError::Code(code) => write!(f, "Backtick failed with exit code {code}")?,
OutputError::Signal(signal) => write!(f, "Backtick was terminated by signal {signal}")?,
OutputError::Unknown => write!(f, "Backtick failed for an unknown reason")?,
OutputError::Interrupted(signal) => write!(
f,
"Backtick succeeded but `just` was interrupted by signal {signal}",
)?,
OutputError::Io(io_error) => match io_error.kind() {
io::ErrorKind::NotFound => write!(
f,
"Backtick could not be run because just could not find the shell:\n{io_error}",
),
io::ErrorKind::PermissionDenied => write!(
f,
"Backtick could not be run because just could not run the shell:\n{io_error}",
),
_ => write!(
f,
"Backtick could not be run because of an IO error while launching the shell:\n{io_error}",
),
}?,
OutputError::Utf8(utf8_error) => write!(
f,
"Backtick succeeded but stdout was not utf8: {utf8_error}",
)?,
},
ChooserInvoke {
shell_binary,
shell_arguments,
chooser,
io_error,
} => {
let chooser = chooser.to_string_lossy();
write!(
f,
"Chooser `{shell_binary} {shell_arguments} {chooser}` invocation failed: {io_error}",
)?;
}
ChooserRead { chooser, io_error } => {
let chooser = chooser.to_string_lossy();
write!(
f,
"Failed to read output from chooser `{chooser}`: {io_error}",
)?;
}
ChooserStatus { chooser, status } => {
let chooser = chooser.to_string_lossy();
write!(f, "Chooser `{chooser}` failed: {status}")?;
}
ChooserWrite { chooser, io_error } => {
let chooser = chooser.to_string_lossy();
write!(f, "Failed to write to chooser `{chooser}`: {io_error}")?;
}
CircularImport { current, import } => {
let import = import.display();
let current = current.display();
write!(f, "Import `{import}` in `{current}` is circular")?;
}
Code {
recipe,
line_number,
code,
..
} => {
if let Some(n) = line_number {
write!(
f,
"Recipe `{recipe}` failed on line {n} with exit code {code}",
)?;
} else {
write!(f, "Recipe `{recipe}` failed with exit code {code}")?;
}
}
CommandInvoke {
binary,
arguments,
io_error,
} => {
let cmd = format_cmd(binary, arguments);
write!(f, "Failed to invoke {cmd}: {io_error}")?;
}
CommandStatus {
binary,
arguments,
status,
} => {
let cmd = format_cmd(binary, arguments);
write!(f, "Command {cmd} failed: {status}")?;
}
Compile { compile_error } => Display::fmt(compile_error, f)?,
Config { config_error } => Display::fmt(config_error, f)?,
Cygpath {
recipe,
output_error,
} => match output_error {
OutputError::Code(code) => write!(
f,
"Cygpath failed with exit code {code} while translating recipe `{recipe}` shebang interpreter path",
)?,
OutputError::Signal(signal) => write!(
f,
"Cygpath terminated by signal {signal} while translating recipe `{recipe}` shebang interpreter path",
)?,
OutputError::Unknown => write!(
f,
"Cygpath experienced an unknown failure while translating recipe `{recipe}` shebang interpreter path",
)?,
OutputError::Interrupted(signal) => write!(
f,
"Cygpath succeeded but `just` was interrupted by {signal}",
)?,
OutputError::Io(io_error) => {
match io_error.kind() {
io::ErrorKind::NotFound => write!(
f,
"Could not find `cygpath` executable to translate recipe `{recipe}` shebang interpreter path:\n{io_error}",
),
io::ErrorKind::PermissionDenied => write!(
f,
"Could not run `cygpath` executable to translate recipe `{recipe}` shebang interpreter path:\n{io_error}",
),
_ => write!(f, "Could not run `cygpath` executable:\n{io_error}"),
}?;
}
OutputError::Utf8(utf8_error) => write!(
f,
"Cygpath successfully translated recipe `{recipe}` shebang interpreter path, but output was not utf8: {utf8_error}",
)?,
},
DefaultRecipeRequiresArguments {
recipe,
min_arguments,
} => {
let count = Count("argument", *min_arguments);
write!(
f,
"Recipe `{recipe}` cannot be used as default recipe since it requires at least {min_arguments} {count}.",
)?;
}
Dotenv { dotenv_error } => {
write!(f, "Failed to load environment file: {dotenv_error}")?;
}
DotenvRequired => {
write!(f, "Dotenv file not found")?;
}
DumpJson { source } => {
write!(f, "Failed to dump JSON to stdout: {source}")?;
}
DuplicateOption { recipe, option } => {
write!(
f,
"Recipe `{recipe}` option `{option}` cannot be passed more than once",
)?;
}
EditorInvoke { editor, io_error } => {
let editor = editor.to_string_lossy();
write!(f, "Editor `{editor}` invocation failed: {io_error}")?;
}
EditorStatus { editor, status } => {
let editor = editor.to_string_lossy();
write!(f, "Editor `{editor}` failed: {status}")?;
}
EvalUnknownVariable {
variable,
suggestion,
} => {
write!(f, "Justfile does not contain variable `{variable}`.")?;
if let Some(suggestion) = suggestion {
write!(f, "\n{suggestion}")?;
}
}
ExcessInvocations { invocations } => {
write!(
f,
"Expected 1 command-line recipe invocation but found {invocations}.",
)?;
}
ExpectedSubmoduleButFoundRecipe { path } => {
write!(f, "Expected submodule at `{path}` but found recipe.")?;
}
FlagWithValue { recipe, option } => {
write!(f, "Recipe `{recipe}` flag `{option}` does not take value",)?;
}
FormatCheckFoundDiff => {
write!(f, "Formatted justfile differs from original.")?;
}
FunctionCall { function, message } => {
let function = function.lexeme();
write!(f, "Call to function `{function}` failed: {message}")?;
}
GetConfirmation { io_error } => {
write!(f, "Failed to read confirmation from stdin: {io_error}")?;
}
Homedir => {
write!(f, "Failed to get homedir")?;
}
InitExists { justfile } => {
write!(f, "Justfile `{}` already exists", justfile.display())?;
}
Internal { message } => {
write!(
f,
"Internal runtime error, this may indicate a bug in just: {message} \
consider filing an issue: https://github.com/casey/just/issues/new",
)?;
}
Interrupted { signal } => {
write!(f, "Interrupted by {signal}")?;
}
Io { recipe, io_error } => {
match io_error.kind() {
io::ErrorKind::NotFound => write!(
f,
"Recipe `{recipe}` could not be run because just could not find the shell: {io_error}",
),
io::ErrorKind::PermissionDenied => write!(
f,
"Recipe `{recipe}` could not be run because just could not run the shell: {io_error}",
),
_ => write!(
f,
"Recipe `{recipe}` could not be run because of an IO error while launching the shell: {io_error}",
),
}?;
}
Load { io_error, path } => {
write!(
f,
"Failed to read justfile at `{}`: {io_error}",
path.display()
)?;
}
MissingImportFile { .. } => write!(f, "Could not find source file for import.")?,
MissingModuleFile { module } => {
write!(f, "Could not find source file for module `{module}`.")?;
}
MissingOption { recipe, option } => {
write!(f, "Recipe `{recipe}` requires option `{option}`")?;
}
MultipleShortOptions { options } => {
write!(
f,
"Passing multiple short options (`-{options}`) in one argument is not supported"
)?;
}
NoChoosableRecipes => write!(f, "Justfile contains no choosable recipes.")?,
NoDefaultRecipe => write!(f, "Justfile contains no default recipe.")?,
NoRecipes => write!(f, "Justfile contains no recipes.")?,
NotConfirmed { recipe } => {
write!(f, "Recipe `{recipe}` was not confirmed")?;
}
OptionMissingValue { recipe, option } => {
write!(f, "Recipe `{recipe}` option `{option}` missing value")?;
}
PositionalArgumentCountMismatch {
recipe,
found,
min,
max,
..
} => {
let count = Count("argument", *found);
if min == max {
let expected = min;
let only = if expected < found { "only " } else { "" };
write!(
f,
"Recipe `{}` got {found} positional {count} but {only}takes {expected}",
recipe.name(),
)?;
} else if found < min {
write!(
f,
"Recipe `{}` got {found} positional {count} but takes at least {min}",
recipe.name(),
)?;
} else if found > max {
write!(
f,
"Recipe `{}` got {found} positional {count} but takes at most {max}",
recipe.name(),
)?;
}
}
RegexCompile { source } => write!(f, "{source}")?,
RuntimeDirIo { io_error, path } => {
write!(
f,
"I/O error in runtime dir `{}`: {io_error}",
path.display(),
)?;
}
Script {
command,
io_error,
recipe,
} => {
write!(
f,
"Recipe `{recipe}` with command `{command}` execution error: {io_error}",
)?;
}
Search { search_error } => Display::fmt(search_error, f)?,
Shebang {
recipe,
command,
argument,
io_error,
} => {
if let Some(argument) = argument {
write!(
f,
"Recipe `{recipe}` with shebang `#!{command} {argument}` execution error: {io_error}",
)?;
} else {
write!(
f,
"Recipe `{recipe}` with shebang `#!{command}` execution error: {io_error}",
)?;
}
}
Signal {
recipe,
line_number,
signal,
} => {
if let Some(n) = line_number {
write!(
f,
"Recipe `{recipe}` was terminated on line {n} by signal {signal}",
)?;
} else {
write!(f, "Recipe `{recipe}` was terminated by signal {signal}")?;
}
}
#[cfg(windows)]
SignalHandlerInstall { source } => {
write!(f, "Could not install signal handler: {source}")?;
}
#[cfg(unix)]
SignalHandlerPipeCloexec { io_error } => {
write!(f, "I/O error setting O_CLOEXEC on signal handler pipe: {io_error}")?;
}
#[cfg(unix)]
SignalHandlerPipeOpen { io_error } => {
write!(f, "I/O error opening signal handler pipe: {io_error}")?;
}
#[cfg(unix)]
SignalHandlerSigaction { io_error, signal } => {
write!(f, "I/O error setting sigaction for {signal}: {io_error}")?;
}
#[cfg(unix)]
SignalHandlerSpawnThread { io_error } => {
write!(
f,
"I/O error spawning thread for signal handler: {io_error}",
)?;
}
StdoutIo { io_error } => {
write!(f, "I/O error writing to stdout: {io_error}")?;
}
TempdirIo { recipe, io_error } => {
write!(
f,
"Recipe `{recipe}` could not be run because of an IO error while trying to create a temporary \
directory or write a file to that directory: {io_error}",
)?;
}
Unknown {
recipe,
line_number,
} => {
if let Some(n) = line_number {
write!(
f,
"Recipe `{recipe}` failed on line {n} for an unknown reason",
)?;
} else {
write!(f, "Recipe `{recipe}` failed for an unknown reason")?;
}
}
UnknownOption { recipe, option } => {
write!(f, "Recipe `{recipe}` does not have option `{option}`")?;
}
UnknownOverrides { overrides } => {
let count = Count("Variable", overrides.len());
let overrides = List::and_ticked(overrides);
write!(
f,
"{count} {overrides} overridden on the command line but not present in justfile",
)?;
}
UnknownRecipe { recipe, suggestion } => {
write!(f, "Justfile does not contain recipe `{recipe}`")?;
if let Some(suggestion) = suggestion {
write!(f, "\n{suggestion}")?;
}
}
UnknownSubmodule { path } => {
write!(f, "Justfile does not contain submodule `{path}`")?;
}
UnstableFeature { unstable_feature } => {
write!(
f,
"{unstable_feature} Invoke `just` with `--unstable`, set the `JUST_UNSTABLE` environment variable, or add `set unstable` to your `justfile` to enable unstable features.",
)?;
}
WriteJustfile { justfile, io_error } => {
let justfile = justfile.display();
write!(f, "Failed to write justfile to `{justfile}`: {io_error}")?;
}
}
write!(f, "{}", color.message().suffix())?;
if let PositionalArgumentCountMismatch { recipe, .. } = self {
writeln!(f)?;
let path = ModulePath::try_from([recipe.name()].as_slice()).unwrap();
write!(
f,
"{}",
Usage {
long: false,
path: &path,
recipe,
}
.color_display(color)
)?;
}
if let Some(token) = self.context() {
writeln!(f)?;
write!(f, "{}", token.color_display(color.error()))?;
}
if let Some(source) = self.source() {
writeln!(f)?;
write!(f, "caused by: {source}")?;
}
Ok(())
}
}
fn format_cmd(binary: &OsString, arguments: &Vec<OsString>) -> String {
iter::once(binary)
.chain(arguments)
.map(|value| Enclosure::tick(value.to_string_lossy()).to_string())
.collect::<Vec<String>>()
.join(" ")
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/config_error.rs | src/config_error.rs | use super::*;
#[derive(Debug, Snafu)]
#[snafu(visibility(pub(crate)), context(suffix(Context)))]
pub(crate) enum ConfigError {
#[snafu(display("Failed to get current directory: {}", source))]
CurrentDir { source: io::Error },
#[snafu(display(
"Internal config error, this may indicate a bug in just: {message} \
consider filing an issue: https://github.com/casey/just/issues/new",
))]
Internal { message: String },
#[snafu(display("Invalid module path `{}`", path.join(" ")))]
ModulePath { path: Vec<String> },
#[snafu(display("Failed to parse request: {source}"))]
RequestParse { source: serde_json::Error },
#[snafu(display(
"Path-prefixed recipes may not be used with `--working-directory` or `--justfile`."
))]
SearchDirConflict,
#[snafu(display(
"`--{}` used with unexpected {}: {}",
subcommand.to_lowercase(),
Count("argument", arguments.len()),
List::and_ticked(arguments)
))]
SubcommandArguments {
subcommand: &'static str,
arguments: Vec<String>,
},
#[snafu(display(
"`--{}` used with unexpected overrides: {}",
subcommand.to_lowercase(),
List::and_ticked(overrides.iter().map(|(key, value)| format!("{key}={value}"))),
))]
SubcommandOverrides {
subcommand: &'static str,
overrides: BTreeMap<String, String>,
},
#[snafu(display(
"`--{}` used with unexpected overrides: {}; and arguments: {}",
subcommand.to_lowercase(),
List::and_ticked(overrides.iter().map(|(key, value)| format!("{key}={value}"))),
List::and_ticked(arguments)))
]
SubcommandOverridesAndArguments {
subcommand: &'static str,
overrides: BTreeMap<String, String>,
arguments: Vec<String>,
},
}
impl ConfigError {
pub(crate) fn internal(message: impl Into<String>) -> Self {
Self::Internal {
message: message.into(),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/unstable_feature.rs | src/unstable_feature.rs | use super::*;
#[derive(Copy, Clone, Debug, PartialEq, Ord, Eq, PartialOrd)]
pub(crate) enum UnstableFeature {
FormatSubcommand,
LogicalOperators,
WhichFunction,
}
impl Display for UnstableFeature {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::FormatSubcommand => write!(f, "The `--fmt` command is currently unstable."),
Self::LogicalOperators => write!(
f,
"The logical operators `&&` and `||` are currently unstable."
),
Self::WhichFunction => write!(f, "The `which()` function is currently unstable."),
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/string_kind.rs | src/string_kind.rs | use super::*;
#[derive(Debug, PartialEq, Clone, Copy, Ord, PartialOrd, Eq)]
pub(crate) struct StringKind {
pub(crate) delimiter: StringDelimiter,
pub(crate) indented: bool,
}
impl StringKind {
// Indented values must come before un-indented values, or else
// `Self::from_token_start` will incorrectly return indented = false
// for indented strings.
const ALL: &'static [Self] = &[
Self::new(StringDelimiter::Backtick, true),
Self::new(StringDelimiter::Backtick, false),
Self::new(StringDelimiter::QuoteDouble, true),
Self::new(StringDelimiter::QuoteDouble, false),
Self::new(StringDelimiter::QuoteSingle, true),
Self::new(StringDelimiter::QuoteSingle, false),
];
const fn new(delimiter: StringDelimiter, indented: bool) -> Self {
Self {
delimiter,
indented,
}
}
pub(crate) fn delimiter(self) -> &'static str {
match (self.delimiter, self.indented) {
(StringDelimiter::Backtick, false) => "`",
(StringDelimiter::Backtick, true) => "```",
(StringDelimiter::QuoteDouble, false) => "\"",
(StringDelimiter::QuoteDouble, true) => "\"\"\"",
(StringDelimiter::QuoteSingle, false) => "'",
(StringDelimiter::QuoteSingle, true) => "'''",
}
}
pub(crate) fn delimiter_len(self) -> usize {
self.delimiter().len()
}
pub(crate) fn token_kind(self) -> TokenKind {
match self.delimiter {
StringDelimiter::QuoteDouble | StringDelimiter::QuoteSingle => TokenKind::StringToken,
StringDelimiter::Backtick => TokenKind::Backtick,
}
}
pub(crate) fn unterminated_error_kind(self) -> CompileErrorKind<'static> {
match self.delimiter {
StringDelimiter::QuoteDouble | StringDelimiter::QuoteSingle => {
CompileErrorKind::UnterminatedString
}
StringDelimiter::Backtick => CompileErrorKind::UnterminatedBacktick,
}
}
pub(crate) fn processes_escape_sequences(self) -> bool {
match self.delimiter {
StringDelimiter::QuoteDouble => true,
StringDelimiter::Backtick | StringDelimiter::QuoteSingle => false,
}
}
pub(crate) fn indented(self) -> bool {
self.indented
}
pub(crate) fn from_string_or_backtick(token: Token) -> CompileResult<Self> {
Self::from_token_start(token.lexeme()).ok_or_else(|| {
token.error(CompileErrorKind::Internal {
message: "StringKind::from_token: expected string or backtick".to_owned(),
})
})
}
pub(crate) fn from_token_start(token_start: &str) -> Option<Self> {
Self::ALL
.iter()
.find(|&&kind| token_start.starts_with(kind.delimiter()))
.copied()
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/expression.rs | src/expression.rs | use super::*;
/// An expression. Note that the Just language grammar has both an `expression`
/// production of additions (`a + b`) and values, and a `value` production of
/// all other value types (for example strings, function calls, and
/// parenthetical groups).
///
/// The parser parses both values and expressions into `Expression`s.
#[derive(PartialEq, Debug, Clone)]
pub(crate) enum Expression<'src> {
/// `lhs && rhs`
And {
lhs: Box<Expression<'src>>,
rhs: Box<Expression<'src>>,
},
/// `assert(condition, error)`
Assert {
name: Name<'src>,
condition: Condition<'src>,
error: Box<Expression<'src>>,
},
/// `contents`
Backtick {
contents: String,
token: Token<'src>,
},
/// `name(arguments)`
Call { thunk: Thunk<'src> },
/// `lhs + rhs`
Concatenation {
lhs: Box<Expression<'src>>,
rhs: Box<Expression<'src>>,
},
/// `if condition { then } else { otherwise }`
Conditional {
condition: Condition<'src>,
then: Box<Expression<'src>>,
otherwise: Box<Expression<'src>>,
},
// `f"format string"`
FormatString {
start: StringLiteral<'src>,
expressions: Vec<(Expression<'src>, StringLiteral<'src>)>,
},
/// `(contents)`
Group { contents: Box<Expression<'src>> },
/// `lhs / rhs`
Join {
lhs: Option<Box<Expression<'src>>>,
rhs: Box<Expression<'src>>,
},
/// `lhs || rhs`
Or {
lhs: Box<Expression<'src>>,
rhs: Box<Expression<'src>>,
},
/// `"string_literal"` or `'string_literal'`
StringLiteral { string_literal: StringLiteral<'src> },
/// `variable`
Variable { name: Name<'src> },
}
impl<'src> Expression<'src> {
pub(crate) fn variables<'expression>(&'expression self) -> Variables<'expression, 'src> {
Variables::new(self)
}
}
impl Display for Expression<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
match self {
Self::And { lhs, rhs } => write!(f, "{lhs} && {rhs}"),
Self::Assert {
condition, error, ..
} => write!(f, "assert({condition}, {error})"),
Self::Backtick { token, .. } => write!(f, "{}", token.lexeme()),
Self::Call { thunk } => write!(f, "{thunk}"),
Self::Concatenation { lhs, rhs } => write!(f, "{lhs} + {rhs}"),
Self::Conditional {
condition,
then,
otherwise,
} => {
if let Self::Conditional { .. } = **otherwise {
write!(f, "if {condition} {{ {then} }} else {otherwise}")
} else {
write!(f, "if {condition} {{ {then} }} else {{ {otherwise} }}")
}
}
Self::FormatString { start, expressions } => {
write!(f, "{start}")?;
for (expression, string) in expressions {
write!(f, "{expression}{string}")?;
}
Ok(())
}
Self::Group { contents } => write!(f, "({contents})"),
Self::Join { lhs: None, rhs } => write!(f, "/ {rhs}"),
Self::Join {
lhs: Some(lhs),
rhs,
} => write!(f, "{lhs} / {rhs}"),
Self::Or { lhs, rhs } => write!(f, "{lhs} || {rhs}"),
Self::StringLiteral { string_literal } => write!(f, "{string_literal}"),
Self::Variable { name } => write!(f, "{}", name.lexeme()),
}
}
}
impl Serialize for Expression<'_> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self {
Self::And { lhs, rhs } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("and")?;
seq.serialize_element(lhs)?;
seq.serialize_element(rhs)?;
seq.end()
}
Self::Assert {
condition, error, ..
} => {
let mut seq: <S as Serializer>::SerializeSeq = serializer.serialize_seq(None)?;
seq.serialize_element("assert")?;
seq.serialize_element(condition)?;
seq.serialize_element(error)?;
seq.end()
}
Self::Backtick { contents, .. } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("evaluate")?;
seq.serialize_element(contents)?;
seq.end()
}
Self::Call { thunk } => thunk.serialize(serializer),
Self::Concatenation { lhs, rhs } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("concatenate")?;
seq.serialize_element(lhs)?;
seq.serialize_element(rhs)?;
seq.end()
}
Self::Conditional {
condition,
then,
otherwise,
} => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("if")?;
seq.serialize_element(condition)?;
seq.serialize_element(then)?;
seq.serialize_element(otherwise)?;
seq.end()
}
Self::FormatString { start, expressions } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("format")?;
seq.serialize_element(start)?;
for (expression, string) in expressions {
seq.serialize_element(expression)?;
seq.serialize_element(string)?;
}
seq.end()
}
Self::Group { contents } => contents.serialize(serializer),
Self::Join { lhs, rhs } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("join")?;
seq.serialize_element(lhs)?;
seq.serialize_element(rhs)?;
seq.end()
}
Self::Or { lhs, rhs } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("or")?;
seq.serialize_element(lhs)?;
seq.serialize_element(rhs)?;
seq.end()
}
Self::StringLiteral { string_literal } => string_literal.serialize(serializer),
Self::Variable { name } => {
let mut seq = serializer.serialize_seq(None)?;
seq.serialize_element("variable")?;
seq.serialize_element(name)?;
seq.end()
}
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/unresolved_dependency.rs | src/unresolved_dependency.rs | use super::*;
#[derive(PartialEq, Debug, Clone)]
pub(crate) struct UnresolvedDependency<'src> {
pub(crate) arguments: Vec<Expression<'src>>,
pub(crate) recipe: Namepath<'src>,
}
impl Display for UnresolvedDependency<'_> {
fn fmt(&self, f: &mut Formatter) -> fmt::Result {
if self.arguments.is_empty() {
write!(f, "{}", self.recipe)
} else {
write!(f, "({}", self.recipe)?;
for argument in &self.arguments {
write!(f, " {argument}")?;
}
write!(f, ")")
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/scope.rs | src/scope.rs | use super::*;
#[derive(Debug)]
pub(crate) struct Scope<'src: 'run, 'run> {
bindings: Table<'src, Binding<'src, String>>,
parent: Option<&'run Self>,
}
impl<'src, 'run> Scope<'src, 'run> {
pub(crate) fn child(&'run self) -> Self {
Self {
parent: Some(self),
bindings: Table::new(),
}
}
pub(crate) fn root() -> Self {
let mut root = Self {
parent: None,
bindings: Table::new(),
};
for (key, value) in constants() {
root.bind(Binding {
export: false,
file_depth: 0,
name: Name {
token: Token {
column: 0,
kind: TokenKind::Identifier,
length: key.len(),
line: 0,
offset: 0,
path: Path::new("PRELUDE"),
src: key,
},
},
prelude: true,
private: false,
value: (*value).into(),
});
}
root
}
pub(crate) fn bind(&mut self, binding: Binding<'src>) {
self.bindings.insert(binding);
}
pub(crate) fn bound(&self, name: &str) -> bool {
self.bindings.contains_key(name)
}
pub(crate) fn value(&self, name: &str) -> Option<&str> {
if let Some(binding) = self.bindings.get(name) {
Some(binding.value.as_ref())
} else {
self.parent?.value(name)
}
}
pub(crate) fn bindings(&self) -> impl Iterator<Item = &Binding<String>> {
self.bindings.values()
}
pub(crate) fn names(&self) -> impl Iterator<Item = &str> {
self.bindings.keys().copied()
}
pub(crate) fn parent(&self) -> Option<&'run Self> {
self.parent
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/keyed.rs | src/keyed.rs | use super::*;
pub(crate) trait Keyed<'key> {
fn key(&self) -> &'key str;
}
impl<'key, T: Keyed<'key>> Keyed<'key> for Arc<T> {
fn key(&self) -> &'key str {
self.as_ref().key()
}
}
pub(crate) fn serialize<'src, S, K>(keyed: &K, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
K: Keyed<'src>,
{
serializer.serialize_str(keyed.key())
}
#[rustversion::attr(since(1.83), allow(clippy::ref_option))]
pub(crate) fn serialize_option<'src, S, K>(
recipe: &Option<K>,
serializer: S,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
K: Keyed<'src>,
{
match recipe {
None => serializer.serialize_none(),
Some(keyed) => serialize(keyed, serializer),
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
casey/just | https://github.com/casey/just/blob/5732ee083a58c534804d184acbdede11d1bbaac5/src/summary.rs | src/summary.rs | //! Justfile summary creation, for testing purposes only.
//!
//! The contents of this module are not bound by any stability guarantees.
//! Breaking changes may be introduced at any time.
//!
//! The main entry point into this module is the `summary` function, which
//! parses a justfile at a given path and produces a `Summary` object, which
//! broadly captures the functionality of the parsed justfile, or an error
//! message.
//!
//! This functionality is intended to be used with `janus`, a tool for ensuring
//! that changes to just do not inadvertently break or change the interpretation
//! of existing justfiles.
use {
crate::{compiler::Compiler, config::Config, error::Error, loader::Loader},
std::{collections::BTreeMap, io, path::Path},
};
mod full {
pub(crate) use crate::{
assignment::Assignment, condition::Condition, conditional_operator::ConditionalOperator,
dependency::Dependency, expression::Expression, fragment::Fragment, justfile::Justfile,
line::Line, parameter::Parameter, parameter_kind::ParameterKind, recipe::Recipe, thunk::Thunk,
};
}
pub fn summary(path: &Path) -> io::Result<Result<Summary, String>> {
let loader = Loader::new();
match Compiler::compile(&Config::default(), &loader, path) {
Ok(compilation) => Ok(Ok(Summary::new(&compilation.justfile))),
Err(error) => Ok(Err(if let Error::Compile { compile_error } = error {
compile_error.to_string()
} else {
format!("{error:?}")
})),
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Summary {
pub assignments: BTreeMap<String, Assignment>,
pub recipes: BTreeMap<String, Recipe>,
}
impl Summary {
fn new(justfile: &full::Justfile) -> Self {
let mut aliases = BTreeMap::new();
for alias in justfile.aliases.values() {
aliases
.entry(alias.target.name())
.or_insert_with(Vec::new)
.push(alias.name.to_string());
}
Self {
recipes: justfile
.recipes
.iter()
.map(|(name, recipe)| {
(
(*name).to_string(),
Recipe::new(recipe, aliases.remove(name).unwrap_or_default()),
)
})
.collect(),
assignments: justfile
.assignments
.iter()
.map(|(name, assignment)| ((*name).to_owned(), Assignment::new(assignment)))
.collect(),
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Recipe {
pub aliases: Vec<String>,
pub dependencies: Vec<Dependency>,
pub lines: Vec<Line>,
pub parameters: Vec<Parameter>,
pub private: bool,
pub quiet: bool,
pub shebang: bool,
}
impl Recipe {
fn new(recipe: &full::Recipe, aliases: Vec<String>) -> Self {
Self {
aliases,
dependencies: recipe.dependencies.iter().map(Dependency::new).collect(),
lines: recipe.body.iter().map(Line::new).collect(),
parameters: recipe.parameters.iter().map(Parameter::new).collect(),
private: recipe.private,
quiet: recipe.quiet,
shebang: recipe.shebang,
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Parameter {
pub default: Option<Expression>,
pub kind: ParameterKind,
pub name: String,
}
impl Parameter {
fn new(parameter: &full::Parameter) -> Self {
Self {
kind: ParameterKind::new(parameter.kind),
name: parameter.name.lexeme().to_owned(),
default: parameter.default.as_ref().map(Expression::new),
}
}
}
#[derive(Debug, Copy, Clone, Eq, PartialEq, Hash, Ord, PartialOrd)]
pub enum ParameterKind {
Plus,
Singular,
Star,
}
impl ParameterKind {
fn new(parameter_kind: full::ParameterKind) -> Self {
match parameter_kind {
full::ParameterKind::Singular => Self::Singular,
full::ParameterKind::Plus => Self::Plus,
full::ParameterKind::Star => Self::Star,
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Line {
pub fragments: Vec<Fragment>,
}
impl Line {
fn new(line: &full::Line) -> Self {
Self {
fragments: line.fragments.iter().map(Fragment::new).collect(),
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub enum Fragment {
Expression { expression: Expression },
Text { text: String },
}
impl Fragment {
fn new(fragment: &full::Fragment) -> Self {
match fragment {
full::Fragment::Text { token } => Self::Text {
text: token.lexeme().to_owned(),
},
full::Fragment::Interpolation { expression } => Self::Expression {
expression: Expression::new(expression),
},
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Assignment {
pub exported: bool,
pub expression: Expression,
}
impl Assignment {
fn new(assignment: &full::Assignment) -> Self {
Self {
exported: assignment.export,
expression: Expression::new(&assignment.value),
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub enum Expression {
And {
lhs: Box<Expression>,
rhs: Box<Expression>,
},
Assert {
condition: Condition,
error: Box<Expression>,
},
Backtick {
command: String,
},
Call {
name: String,
arguments: Vec<Expression>,
},
Concatenation {
lhs: Box<Expression>,
rhs: Box<Expression>,
},
Conditional {
lhs: Box<Expression>,
rhs: Box<Expression>,
then: Box<Expression>,
otherwise: Box<Expression>,
operator: ConditionalOperator,
},
FormatString {
start: String,
expressions: Vec<(Expression, String)>,
},
Join {
lhs: Option<Box<Expression>>,
rhs: Box<Expression>,
},
Or {
lhs: Box<Expression>,
rhs: Box<Expression>,
},
String {
text: String,
},
Variable {
name: String,
},
}
impl Expression {
fn new(expression: &full::Expression) -> Self {
use full::Expression::*;
match expression {
And { lhs, rhs } => Self::And {
lhs: Self::new(lhs).into(),
rhs: Self::new(rhs).into(),
},
Assert {
condition: full::Condition { lhs, rhs, operator },
error,
..
} => Expression::Assert {
condition: Condition {
lhs: Box::new(Expression::new(lhs)),
rhs: Box::new(Expression::new(rhs)),
operator: ConditionalOperator::new(*operator),
},
error: Box::new(Expression::new(error)),
},
Backtick { contents, .. } => Self::Backtick {
command: (*contents).clone(),
},
Call { thunk } => match thunk {
full::Thunk::Nullary { name, .. } => Self::Call {
name: name.lexeme().to_owned(),
arguments: Vec::new(),
},
full::Thunk::Unary { name, arg, .. } => Self::Call {
name: name.lexeme().to_owned(),
arguments: vec![Self::new(arg)],
},
full::Thunk::UnaryOpt {
name,
args: (a, opt_b),
..
} => {
let mut arguments = Vec::new();
if let Some(b) = opt_b.as_ref() {
arguments.push(Self::new(b));
}
arguments.push(Self::new(a));
Self::Call {
name: name.lexeme().to_owned(),
arguments,
}
}
full::Thunk::UnaryPlus {
name,
args: (a, rest),
..
} => {
let mut arguments = vec![Expression::new(a)];
for arg in rest {
arguments.push(Expression::new(arg));
}
Expression::Call {
name: name.lexeme().to_owned(),
arguments,
}
}
full::Thunk::Binary {
name, args: [a, b], ..
} => Self::Call {
name: name.lexeme().to_owned(),
arguments: vec![Self::new(a), Self::new(b)],
},
full::Thunk::BinaryPlus {
name,
args: ([a, b], rest),
..
} => {
let mut arguments = vec![Self::new(a), Self::new(b)];
for arg in rest {
arguments.push(Self::new(arg));
}
Self::Call {
name: name.lexeme().to_owned(),
arguments,
}
}
full::Thunk::Ternary {
name,
args: [a, b, c],
..
} => Self::Call {
name: name.lexeme().to_owned(),
arguments: vec![Self::new(a), Self::new(b), Self::new(c)],
},
},
Concatenation { lhs, rhs } => Self::Concatenation {
lhs: Self::new(lhs).into(),
rhs: Self::new(rhs).into(),
},
Conditional {
condition: full::Condition { lhs, rhs, operator },
otherwise,
then,
} => Self::Conditional {
lhs: Self::new(lhs).into(),
operator: ConditionalOperator::new(*operator),
otherwise: Self::new(otherwise).into(),
rhs: Self::new(rhs).into(),
then: Self::new(then).into(),
},
FormatString { start, expressions } => Self::FormatString {
start: start.cooked.clone(),
expressions: expressions
.iter()
.map(|(expression, string)| (Self::new(expression), string.cooked.clone()))
.collect(),
},
Group { contents } => Self::new(contents),
Join { lhs, rhs } => Self::Join {
lhs: lhs.as_ref().map(|lhs| Self::new(lhs).into()),
rhs: Self::new(rhs).into(),
},
Or { lhs, rhs } => Self::Or {
lhs: Self::new(lhs).into(),
rhs: Self::new(rhs).into(),
},
StringLiteral { string_literal } => Self::String {
text: string_literal.cooked.clone(),
},
Variable { name, .. } => Self::Variable {
name: name.lexeme().to_owned(),
},
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Condition {
lhs: Box<Expression>,
operator: ConditionalOperator,
rhs: Box<Expression>,
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub enum ConditionalOperator {
Equality,
Inequality,
RegexMatch,
RegexMismatch,
}
impl ConditionalOperator {
fn new(operator: full::ConditionalOperator) -> Self {
match operator {
full::ConditionalOperator::Equality => Self::Equality,
full::ConditionalOperator::Inequality => Self::Inequality,
full::ConditionalOperator::RegexMatch => Self::RegexMatch,
full::ConditionalOperator::RegexMismatch => Self::RegexMismatch,
}
}
}
#[derive(Eq, PartialEq, Hash, Ord, PartialOrd, Debug, Clone)]
pub struct Dependency {
pub arguments: Vec<Expression>,
pub recipe: String,
}
impl Dependency {
fn new(dependency: &full::Dependency) -> Self {
let mut arguments = Vec::new();
for group in &dependency.arguments {
for argument in group {
arguments.push(Expression::new(argument));
}
}
Self {
recipe: dependency.recipe.name().to_owned(),
arguments,
}
}
}
| rust | CC0-1.0 | 5732ee083a58c534804d184acbdede11d1bbaac5 | 2026-01-04T15:34:07.853244Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.