repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/build.rs | build.rs | /// The version string isn’t the simplest: we want to show the version,
/// current Git hash, and compilation date when building *debug* versions, but
/// just the version for *release* versions so the builds are reproducible.
///
/// This script generates the string from the environment variables that Cargo
/// adds (http://doc.crates.io/environment-variables.html) and runs `git` to
/// get the SHA1 hash. It then writes the string into a file, which exa then
/// includes at build-time.
///
/// - https://stackoverflow.com/q/43753491/3484614
/// - https://crates.io/crates/vergen
use std::env;
use std::fs::File;
use std::io::{self, Write};
use std::path::PathBuf;
use datetime::{LocalDateTime, ISO};
/// The build script entry point.
fn main() -> io::Result<()> {
#![allow(clippy::write_with_newline)]
let tagline = "exa - list files on the command-line";
let url = "https://the.exa.website/";
let ver =
if is_debug_build() {
format!("{}\nv{} \\1;31m(pre-release debug build!)\\0m\n\\1;4;34m{}\\0m", tagline, version_string(), url)
}
else if is_development_version() {
format!("{}\nv{} [{}] built on {} \\1;31m(pre-release!)\\0m\n\\1;4;34m{}\\0m", tagline, version_string(), git_hash(), build_date(), url)
}
else {
format!("{}\nv{}\n\\1;4;34m{}\\0m", tagline, version_string(), url)
};
// We need to create these files in the Cargo output directory.
let out = PathBuf::from(env::var("OUT_DIR").unwrap());
let path = &out.join("version_string.txt");
// Bland version text
let mut f = File::create(path).unwrap_or_else(|_| { panic!("{}", path.to_string_lossy().to_string()) });
writeln!(f, "{}", strip_codes(&ver))?;
Ok(())
}
/// Removes escape codes from a string.
fn strip_codes(input: &str) -> String {
input.replace("\\0m", "")
.replace("\\1;31m", "")
.replace("\\1;4;34m", "")
}
/// Retrieve the project’s current Git hash, as a string.
fn git_hash() -> String {
use std::process::Command;
String::from_utf8_lossy(
&Command::new("git")
.args(&["rev-parse", "--short", "HEAD"])
.output().unwrap()
.stdout).trim().to_string()
}
/// Whether we should show pre-release info in the version string.
///
/// Both weekly releases and actual releases are --release releases,
/// but actual releases will have a proper version number.
fn is_development_version() -> bool {
cargo_version().ends_with("-pre") || env::var("PROFILE").unwrap() == "debug"
}
/// Whether we are building in debug mode.
fn is_debug_build() -> bool {
env::var("PROFILE").unwrap() == "debug"
}
/// Retrieves the [package] version in Cargo.toml as a string.
fn cargo_version() -> String {
env::var("CARGO_PKG_VERSION").unwrap()
}
/// Returns the version and build parameters string.
fn version_string() -> String {
let mut ver = cargo_version();
let feats = nonstandard_features_string();
if ! feats.is_empty() {
ver.push_str(&format!(" [{}]", &feats));
}
ver
}
/// Finds whether a feature is enabled by examining the Cargo variable.
fn feature_enabled(name: &str) -> bool {
env::var(&format!("CARGO_FEATURE_{}", name))
.map(|e| ! e.is_empty())
.unwrap_or(false)
}
/// A comma-separated list of non-standard feature choices.
fn nonstandard_features_string() -> String {
let mut s = Vec::new();
if feature_enabled("GIT") {
s.push("+git");
}
else {
s.push("-git");
}
s.join(", ")
}
/// Formats the current date as an ISO 8601 string.
fn build_date() -> String {
let now = LocalDateTime::now();
format!("{}", now.date().iso())
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/logger.rs | src/logger.rs | //! Debug error logging.
use std::ffi::OsStr;
use ansi_term::{Colour, ANSIString};
/// Sets the internal logger, changing the log level based on the value of an
/// environment variable.
pub fn configure<T: AsRef<OsStr>>(ev: Option<T>) {
let ev = match ev {
Some(v) => v,
None => return,
};
let env_var = ev.as_ref();
if env_var.is_empty() {
return;
}
if env_var == "trace" {
log::set_max_level(log::LevelFilter::Trace);
}
else {
log::set_max_level(log::LevelFilter::Debug);
}
let result = log::set_logger(GLOBAL_LOGGER);
if let Err(e) = result {
eprintln!("Failed to initialise logger: {}", e);
}
}
#[derive(Debug)]
struct Logger;
const GLOBAL_LOGGER: &Logger = &Logger;
impl log::Log for Logger {
fn enabled(&self, _: &log::Metadata<'_>) -> bool {
true // no need to filter after using ‘set_max_level’.
}
fn log(&self, record: &log::Record<'_>) {
let open = Colour::Fixed(243).paint("[");
let level = level(record.level());
let close = Colour::Fixed(243).paint("]");
eprintln!("{}{} {}{} {}", open, level, record.target(), close, record.args());
}
fn flush(&self) {
// no need to flush with ‘eprintln!’.
}
}
fn level(level: log::Level) -> ANSIString<'static> {
match level {
log::Level::Error => Colour::Red.paint("ERROR"),
log::Level::Warn => Colour::Yellow.paint("WARN"),
log::Level::Info => Colour::Cyan.paint("INFO"),
log::Level::Debug => Colour::Blue.paint("DEBUG"),
log::Level::Trace => Colour::Fixed(245).paint("TRACE"),
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/main.rs | src/main.rs | #![warn(deprecated_in_future)]
#![warn(future_incompatible)]
#![warn(nonstandard_style)]
#![warn(rust_2018_compatibility)]
#![warn(rust_2018_idioms)]
#![warn(trivial_casts, trivial_numeric_casts)]
#![warn(unused)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(clippy::cast_precision_loss)]
#![allow(clippy::cast_possible_truncation)]
#![allow(clippy::cast_possible_wrap)]
#![allow(clippy::cast_sign_loss)]
#![allow(clippy::enum_glob_use)]
#![allow(clippy::map_unwrap_or)]
#![allow(clippy::match_same_arms)]
#![allow(clippy::module_name_repetitions)]
#![allow(clippy::non_ascii_literal)]
#![allow(clippy::option_if_let_else)]
#![allow(clippy::too_many_lines)]
#![allow(clippy::unused_self)]
#![allow(clippy::upper_case_acronyms)]
#![allow(clippy::wildcard_imports)]
use std::env;
use std::ffi::{OsStr, OsString};
use std::io::{self, Write, ErrorKind};
use std::path::{Component, PathBuf};
use ansi_term::{ANSIStrings, Style};
use log::*;
use crate::fs::{Dir, File};
use crate::fs::feature::git::GitCache;
use crate::fs::filter::GitIgnore;
use crate::options::{Options, Vars, vars, OptionsResult};
use crate::output::{escape, lines, grid, grid_details, details, View, Mode};
use crate::theme::Theme;
mod fs;
mod info;
mod logger;
mod options;
mod output;
mod theme;
fn main() {
use std::process::exit;
#[cfg(unix)]
unsafe {
libc::signal(libc::SIGPIPE, libc::SIG_DFL);
}
logger::configure(env::var_os(vars::EXA_DEBUG));
#[cfg(windows)]
if let Err(e) = ansi_term::enable_ansi_support() {
warn!("Failed to enable ANSI support: {}", e);
}
let args: Vec<_> = env::args_os().skip(1).collect();
match Options::parse(args.iter().map(std::convert::AsRef::as_ref), &LiveVars) {
OptionsResult::Ok(options, mut input_paths) => {
// List the current directory by default.
// (This has to be done here, otherwise git_options won’t see it.)
if input_paths.is_empty() {
input_paths = vec![ OsStr::new(".") ];
}
let git = git_options(&options, &input_paths);
let writer = io::stdout();
let console_width = options.view.width.actual_terminal_width();
let theme = options.theme.to_theme(console_width.is_some());
let exa = Exa { options, writer, input_paths, theme, console_width, git };
match exa.run() {
Ok(exit_status) => {
exit(exit_status);
}
Err(e) if e.kind() == ErrorKind::BrokenPipe => {
warn!("Broken pipe error: {}", e);
exit(exits::SUCCESS);
}
Err(e) => {
eprintln!("{}", e);
exit(exits::RUNTIME_ERROR);
}
}
}
OptionsResult::Help(help_text) => {
print!("{}", help_text);
}
OptionsResult::Version(version_str) => {
print!("{}", version_str);
}
OptionsResult::InvalidOptions(error) => {
eprintln!("exa: {}", error);
if let Some(s) = error.suggestion() {
eprintln!("{}", s);
}
exit(exits::OPTIONS_ERROR);
}
}
}
/// The main program wrapper.
pub struct Exa<'args> {
/// List of command-line options, having been successfully parsed.
pub options: Options,
/// The output handle that we write to.
pub writer: io::Stdout,
/// List of the free command-line arguments that should correspond to file
/// names (anything that isn’t an option).
pub input_paths: Vec<&'args OsStr>,
/// The theme that has been configured from the command-line options and
/// environment variables. If colours are disabled, this is a theme with
/// every style set to the default.
pub theme: Theme,
/// The detected width of the console. This is used to determine which
/// view to use.
pub console_width: Option<usize>,
/// A global Git cache, if the option was passed in.
/// This has to last the lifetime of the program, because the user might
/// want to list several directories in the same repository.
pub git: Option<GitCache>,
}
/// The “real” environment variables type.
/// Instead of just calling `var_os` from within the options module,
/// the method of looking up environment variables has to be passed in.
struct LiveVars;
impl Vars for LiveVars {
fn get(&self, name: &'static str) -> Option<OsString> {
env::var_os(name)
}
}
/// Create a Git cache populated with the arguments that are going to be
/// listed before they’re actually listed, if the options demand it.
fn git_options(options: &Options, args: &[&OsStr]) -> Option<GitCache> {
if options.should_scan_for_git() {
Some(args.iter().map(PathBuf::from).collect())
}
else {
None
}
}
impl<'args> Exa<'args> {
/// # Errors
///
/// Will return `Err` if printing to stderr fails.
pub fn run(mut self) -> io::Result<i32> {
debug!("Running with options: {:#?}", self.options);
let mut files = Vec::new();
let mut dirs = Vec::new();
let mut exit_status = 0;
for file_path in &self.input_paths {
match File::from_args(PathBuf::from(file_path), None, None) {
Err(e) => {
exit_status = 2;
writeln!(io::stderr(), "{:?}: {}", file_path, e)?;
}
Ok(f) => {
if f.points_to_directory() && ! self.options.dir_action.treat_dirs_as_files() {
match f.to_dir() {
Ok(d) => dirs.push(d),
Err(e) => writeln!(io::stderr(), "{:?}: {}", file_path, e)?,
}
}
else {
files.push(f);
}
}
}
}
// We want to print a directory’s name before we list it, *except* in
// the case where it’s the only directory, *except* if there are any
// files to print as well. (It’s a double negative)
let no_files = files.is_empty();
let is_only_dir = dirs.len() == 1 && no_files;
self.options.filter.filter_argument_files(&mut files);
self.print_files(None, files)?;
self.print_dirs(dirs, no_files, is_only_dir, exit_status)
}
fn print_dirs(&mut self, dir_files: Vec<Dir>, mut first: bool, is_only_dir: bool, exit_status: i32) -> io::Result<i32> {
for dir in dir_files {
// Put a gap between directories, or between the list of files and
// the first directory.
if first {
first = false;
}
else {
writeln!(&mut self.writer)?;
}
if ! is_only_dir {
let mut bits = Vec::new();
escape(dir.path.display().to_string(), &mut bits, Style::default(), Style::default());
writeln!(&mut self.writer, "{}:", ANSIStrings(&bits))?;
}
let mut children = Vec::new();
let git_ignore = self.options.filter.git_ignore == GitIgnore::CheckAndIgnore;
for file in dir.files(self.options.filter.dot_filter, self.git.as_ref(), git_ignore) {
match file {
Ok(file) => children.push(file),
Err((path, e)) => writeln!(io::stderr(), "[{}: {}]", path.display(), e)?,
}
};
self.options.filter.filter_child_files(&mut children);
self.options.filter.sort_files(&mut children);
if let Some(recurse_opts) = self.options.dir_action.recurse_options() {
let depth = dir.path.components().filter(|&c| c != Component::CurDir).count() + 1;
if ! recurse_opts.tree && ! recurse_opts.is_too_deep(depth) {
let mut child_dirs = Vec::new();
for child_dir in children.iter().filter(|f| f.is_directory() && ! f.is_all_all) {
match child_dir.to_dir() {
Ok(d) => child_dirs.push(d),
Err(e) => writeln!(io::stderr(), "{}: {}", child_dir.path.display(), e)?,
}
}
self.print_files(Some(&dir), children)?;
match self.print_dirs(child_dirs, false, false, exit_status) {
Ok(_) => (),
Err(e) => return Err(e),
}
continue;
}
}
self.print_files(Some(&dir), children)?;
}
Ok(exit_status)
}
/// Prints the list of files using whichever view is selected.
fn print_files(&mut self, dir: Option<&Dir>, files: Vec<File<'_>>) -> io::Result<()> {
if files.is_empty() {
return Ok(());
}
let theme = &self.theme;
let View { ref mode, ref file_style, .. } = self.options.view;
match (mode, self.console_width) {
(Mode::Grid(ref opts), Some(console_width)) => {
let filter = &self.options.filter;
let r = grid::Render { files, theme, file_style, opts, console_width, filter };
r.render(&mut self.writer)
}
(Mode::Grid(_), None) |
(Mode::Lines, _) => {
let filter = &self.options.filter;
let r = lines::Render { files, theme, file_style, filter };
r.render(&mut self.writer)
}
(Mode::Details(ref opts), _) => {
let filter = &self.options.filter;
let recurse = self.options.dir_action.recurse_options();
let git_ignoring = self.options.filter.git_ignore == GitIgnore::CheckAndIgnore;
let git = self.git.as_ref();
let r = details::Render { dir, files, theme, file_style, opts, recurse, filter, git_ignoring, git };
r.render(&mut self.writer)
}
(Mode::GridDetails(ref opts), Some(console_width)) => {
let grid = &opts.grid;
let details = &opts.details;
let row_threshold = opts.row_threshold;
let filter = &self.options.filter;
let git_ignoring = self.options.filter.git_ignore == GitIgnore::CheckAndIgnore;
let git = self.git.as_ref();
let r = grid_details::Render { dir, files, theme, file_style, grid, details, filter, row_threshold, git_ignoring, git, console_width };
r.render(&mut self.writer)
}
(Mode::GridDetails(ref opts), None) => {
let opts = &opts.to_details_options();
let filter = &self.options.filter;
let recurse = self.options.dir_action.recurse_options();
let git_ignoring = self.options.filter.git_ignore == GitIgnore::CheckAndIgnore;
let git = self.git.as_ref();
let r = details::Render { dir, files, theme, file_style, opts, recurse, filter, git_ignoring, git };
r.render(&mut self.writer)
}
}
}
}
mod exits {
/// Exit code for when exa runs OK.
pub const SUCCESS: i32 = 0;
/// Exit code for when there was at least one I/O error during execution.
pub const RUNTIME_ERROR: i32 = 1;
/// Exit code for when the command-line options are invalid.
pub const OPTIONS_ERROR: i32 = 3;
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/dir_action.rs | src/options/dir_action.rs | //! Parsing the options for `DirAction`.
use crate::options::parser::MatchedFlags;
use crate::options::{flags, OptionsError, NumberSource};
use crate::fs::dir_action::{DirAction, RecurseOptions};
impl DirAction {
/// Determine which action to perform when trying to list a directory.
/// There are three possible actions, and they overlap somewhat: the
/// `--tree` flag is another form of recursion, so those two are allowed
/// to both be present, but the `--list-dirs` flag is used separately.
pub fn deduce(matches: &MatchedFlags<'_>, can_tree: bool) -> Result<Self, OptionsError> {
let recurse = matches.has(&flags::RECURSE)?;
let as_file = matches.has(&flags::LIST_DIRS)?;
let tree = matches.has(&flags::TREE)?;
if matches.is_strict() {
// Early check for --level when it wouldn’t do anything
if ! recurse && ! tree && matches.count(&flags::LEVEL) > 0 {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
else if recurse && as_file {
return Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS));
}
else if tree && as_file {
return Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS));
}
}
if tree && can_tree {
// Tree is only appropriate in details mode, so this has to
// examine the View, which should have already been deduced by now
Ok(Self::Recurse(RecurseOptions::deduce(matches, true)?))
}
else if recurse {
Ok(Self::Recurse(RecurseOptions::deduce(matches, false)?))
}
else if as_file {
Ok(Self::AsFile)
}
else {
Ok(Self::List)
}
}
}
impl RecurseOptions {
/// Determine which files should be recursed into, based on the `--level`
/// flag’s value, and whether the `--tree` flag was passed, which was
/// determined earlier. The maximum level should be a number, and this
/// will fail with an `Err` if it isn’t.
pub fn deduce(matches: &MatchedFlags<'_>, tree: bool) -> Result<Self, OptionsError> {
if let Some(level) = matches.get(&flags::LEVEL)? {
let arg_str = level.to_string_lossy();
match arg_str.parse() {
Ok(l) => {
Ok(Self { tree, max_depth: Some(l) })
}
Err(e) => {
let source = NumberSource::Arg(&flags::LEVEL);
Err(OptionsError::FailedParse(arg_str.to_string(), source, e))
}
}
}
else {
Ok(Self { tree, max_depth: None })
}
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[&flags::RECURSE, &flags::LIST_DIRS, &flags::TREE, &flags::LEVEL ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, true)) {
assert_eq!(result, $result);
}
}
};
}
// Default behaviour
test!(empty: DirAction <- []; Both => Ok(DirAction::List));
// Listing files as directories
test!(dirs_short: DirAction <- ["-d"]; Both => Ok(DirAction::AsFile));
test!(dirs_long: DirAction <- ["--list-dirs"]; Both => Ok(DirAction::AsFile));
// Recursing
use self::DirAction::Recurse;
test!(rec_short: DirAction <- ["-R"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_long: DirAction <- ["--recurse"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(rec_lim_short: DirAction <- ["-RL4"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(4) })));
test!(rec_lim_short_2: DirAction <- ["-RL=5"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(5) })));
test!(rec_lim_long: DirAction <- ["--recurse", "--level", "666"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(666) })));
test!(rec_lim_long_2: DirAction <- ["--recurse", "--level=0118"]; Both => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(118) })));
test!(tree: DirAction <- ["--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_tree: DirAction <- ["--recurse", "--tree"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(rec_short_tree: DirAction <- ["-TR"]; Both => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
// Overriding --list-dirs, --recurse, and --tree
test!(dirs_recurse: DirAction <- ["--list-dirs", "--recurse"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: None })));
test!(dirs_tree: DirAction <- ["--list-dirs", "--tree"]; Last => Ok(Recurse(RecurseOptions { tree: true, max_depth: None })));
test!(just_level: DirAction <- ["--level=4"]; Last => Ok(DirAction::List));
test!(dirs_recurse_2: DirAction <- ["--list-dirs", "--recurse"]; Complain => Err(OptionsError::Conflict(&flags::RECURSE, &flags::LIST_DIRS)));
test!(dirs_tree_2: DirAction <- ["--list-dirs", "--tree"]; Complain => Err(OptionsError::Conflict(&flags::TREE, &flags::LIST_DIRS)));
test!(just_level_2: DirAction <- ["--level=4"]; Complain => Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE)));
// Overriding levels
test!(overriding_1: DirAction <- ["-RL=6", "-L=7"]; Last => Ok(Recurse(RecurseOptions { tree: false, max_depth: Some(7) })));
test!(overriding_2: DirAction <- ["-RL=6", "-L=7"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'L'), Flag::Short(b'L'))));
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/vars.rs | src/options/vars.rs | use std::ffi::OsString;
// General variables
/// Environment variable used to colour files, both by their filesystem type
/// (symlink, socket, directory) and their file name or extension (image,
/// video, archive);
pub static LS_COLORS: &str = "LS_COLORS";
/// Environment variable used to override the width of the terminal, in
/// characters.
pub static COLUMNS: &str = "COLUMNS";
/// Environment variable used to datetime format.
pub static TIME_STYLE: &str = "TIME_STYLE";
/// Environment variable used to disable colors.
/// See: <https://no-color.org/>
pub static NO_COLOR: &str = "NO_COLOR";
// exa-specific variables
/// Environment variable used to colour exa’s interface when colours are
/// enabled. This includes all the colours that `LS_COLORS` would recognise,
/// overriding them if necessary. It can also contain exa-specific codes.
pub static EXA_COLORS: &str = "EXA_COLORS";
/// Environment variable used to switch on strict argument checking, such as
/// complaining if an argument was specified twice, or if two conflict.
/// This is meant to be so you don’t accidentally introduce the wrong
/// behaviour in a script, rather than for general command-line use.
/// Any non-empty value will turn strict mode on.
pub static EXA_STRICT: &str = "EXA_STRICT";
/// Environment variable used to make exa print out debugging information as
/// it runs. Any non-empty value will turn debug mode on.
pub static EXA_DEBUG: &str = "EXA_DEBUG";
/// Environment variable used to limit the grid-details view
/// (`--grid --long`) so it’s only activated if there’s at least the given
/// number of rows of output.
pub static EXA_GRID_ROWS: &str = "EXA_GRID_ROWS";
/// Environment variable used to specify how many spaces to print between an
/// icon and its file name. Different terminals display icons differently,
/// with 1 space bringing them too close together or 2 spaces putting them too
/// far apart, so this may be necessary depending on how they are shown.
pub static EXA_ICON_SPACING: &str = "EXA_ICON_SPACING";
/// Mockable wrapper for `std::env::var_os`.
pub trait Vars {
fn get(&self, name: &'static str) -> Option<OsString>;
}
// Test impl that just returns the value it has.
#[cfg(test)]
impl Vars for Option<OsString> {
fn get(&self, _name: &'static str) -> Option<OsString> {
self.clone()
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/theme.rs | src/options/theme.rs | use crate::options::{flags, vars, Vars, OptionsError};
use crate::options::parser::MatchedFlags;
use crate::theme::{Options, UseColours, ColourScale, Definitions};
impl Options {
pub fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let use_colours = UseColours::deduce(matches, vars)?;
let colour_scale = ColourScale::deduce(matches)?;
let definitions = if use_colours == UseColours::Never {
Definitions::default()
}
else {
Definitions::deduce(vars)
};
Ok(Self { use_colours, colour_scale, definitions })
}
}
impl UseColours {
fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let default_value = match vars.get(vars::NO_COLOR) {
Some(_) => Self::Never,
None => Self::Automatic,
};
let word = match matches.get_where(|f| f.matches(&flags::COLOR) || f.matches(&flags::COLOUR))? {
Some(w) => w,
None => return Ok(default_value),
};
if word == "always" {
Ok(Self::Always)
}
else if word == "auto" || word == "automatic" {
Ok(Self::Automatic)
}
else if word == "never" {
Ok(Self::Never)
}
else {
Err(OptionsError::BadArgument(&flags::COLOR, word.into()))
}
}
}
impl ColourScale {
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
if matches.has_where(|f| f.matches(&flags::COLOR_SCALE) || f.matches(&flags::COLOUR_SCALE))?.is_some() {
Ok(Self::Gradient)
}
else {
Ok(Self::Fixed)
}
}
}
impl Definitions {
fn deduce<V: Vars>(vars: &V) -> Self {
let ls = vars.get(vars::LS_COLORS) .map(|e| e.to_string_lossy().to_string());
let exa = vars.get(vars::EXA_COLORS).map(|e| e.to_string_lossy().to_string());
Self { ls, exa }
}
}
#[cfg(test)]
mod terminal_test {
use super::*;
use std::ffi::OsString;
use crate::options::flags;
use crate::options::parser::{Flag, Arg};
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[ &flags::COLOR, &flags::COLOUR,
&flags::COLOR_SCALE, &flags::COLOUR_SCALE, ];
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
assert_eq!(result, $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr, $env:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
let env = $env;
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, &env)) {
assert_eq!(result, $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => err $result:expr) => {
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
assert_eq!(result.unwrap_err(), $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr, $env:expr; $stricts:expr => err $result:expr) => {
#[test]
fn $name() {
let env = $env;
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, &env)) {
assert_eq!(result.unwrap_err(), $result);
}
}
};
}
struct MockVars {
ls: &'static str,
exa: &'static str,
no_color: &'static str,
}
impl MockVars {
fn empty() -> MockVars {
MockVars {
ls: "",
exa: "",
no_color: "",
}
}
fn with_no_color() -> MockVars {
MockVars {
ls: "",
exa: "",
no_color: "true",
}
}
}
// Test impl that just returns the value it has.
impl Vars for MockVars {
fn get(&self, name: &'static str) -> Option<OsString> {
if name == vars::LS_COLORS && ! self.ls.is_empty() {
Some(OsString::from(self.ls.clone()))
}
else if name == vars::EXA_COLORS && ! self.exa.is_empty() {
Some(OsString::from(self.exa.clone()))
}
else if name == vars::NO_COLOR && ! self.no_color.is_empty() {
Some(OsString::from(self.no_color.clone()))
}
else {
None
}
}
}
// Default
test!(empty: UseColours <- [], MockVars::empty(); Both => Ok(UseColours::Automatic));
test!(empty_with_no_color: UseColours <- [], MockVars::with_no_color(); Both => Ok(UseColours::Never));
// --colour
test!(u_always: UseColours <- ["--colour=always"], MockVars::empty(); Both => Ok(UseColours::Always));
test!(u_auto: UseColours <- ["--colour", "auto"], MockVars::empty(); Both => Ok(UseColours::Automatic));
test!(u_never: UseColours <- ["--colour=never"], MockVars::empty(); Both => Ok(UseColours::Never));
// --color
test!(no_u_always: UseColours <- ["--color", "always"], MockVars::empty(); Both => Ok(UseColours::Always));
test!(no_u_auto: UseColours <- ["--color=auto"], MockVars::empty(); Both => Ok(UseColours::Automatic));
test!(no_u_never: UseColours <- ["--color", "never"], MockVars::empty(); Both => Ok(UseColours::Never));
// Errors
test!(no_u_error: UseColours <- ["--color=upstream"], MockVars::empty(); Both => err OptionsError::BadArgument(&flags::COLOR, OsString::from("upstream"))); // the error is for --color
test!(u_error: UseColours <- ["--colour=lovers"], MockVars::empty(); Both => err OptionsError::BadArgument(&flags::COLOR, OsString::from("lovers"))); // and so is this one!
// Overriding
test!(overridden_1: UseColours <- ["--colour=auto", "--colour=never"], MockVars::empty(); Last => Ok(UseColours::Never));
test!(overridden_2: UseColours <- ["--color=auto", "--colour=never"], MockVars::empty(); Last => Ok(UseColours::Never));
test!(overridden_3: UseColours <- ["--colour=auto", "--color=never"], MockVars::empty(); Last => Ok(UseColours::Never));
test!(overridden_4: UseColours <- ["--color=auto", "--color=never"], MockVars::empty(); Last => Ok(UseColours::Never));
test!(overridden_5: UseColours <- ["--colour=auto", "--colour=never"], MockVars::empty(); Complain => err OptionsError::Duplicate(Flag::Long("colour"), Flag::Long("colour")));
test!(overridden_6: UseColours <- ["--color=auto", "--colour=never"], MockVars::empty(); Complain => err OptionsError::Duplicate(Flag::Long("color"), Flag::Long("colour")));
test!(overridden_7: UseColours <- ["--colour=auto", "--color=never"], MockVars::empty(); Complain => err OptionsError::Duplicate(Flag::Long("colour"), Flag::Long("color")));
test!(overridden_8: UseColours <- ["--color=auto", "--color=never"], MockVars::empty(); Complain => err OptionsError::Duplicate(Flag::Long("color"), Flag::Long("color")));
test!(scale_1: ColourScale <- ["--color-scale", "--colour-scale"]; Last => Ok(ColourScale::Gradient));
test!(scale_2: ColourScale <- ["--color-scale", ]; Last => Ok(ColourScale::Gradient));
test!(scale_3: ColourScale <- [ "--colour-scale"]; Last => Ok(ColourScale::Gradient));
test!(scale_4: ColourScale <- [ ]; Last => Ok(ColourScale::Fixed));
test!(scale_5: ColourScale <- ["--color-scale", "--colour-scale"]; Complain => err OptionsError::Duplicate(Flag::Long("color-scale"), Flag::Long("colour-scale")));
test!(scale_6: ColourScale <- ["--color-scale", ]; Complain => Ok(ColourScale::Gradient));
test!(scale_7: ColourScale <- [ "--colour-scale"]; Complain => Ok(ColourScale::Gradient));
test!(scale_8: ColourScale <- [ ]; Complain => Ok(ColourScale::Fixed));
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/version.rs | src/options/version.rs | //! Printing the version string.
//!
//! The code that works out which string to print is done in `build.rs`.
use std::fmt;
use crate::options::flags;
use crate::options::parser::MatchedFlags;
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct VersionString;
// There were options here once, but there aren’t anymore!
impl VersionString {
/// Determines how to show the version, if at all, based on the user’s
/// command-line arguments. This one works backwards from the other
/// ‘deduce’ functions, returning Err if help needs to be shown.
///
/// Like --help, this doesn’t check for errors.
pub fn deduce(matches: &MatchedFlags<'_>) -> Option<Self> {
if matches.count(&flags::VERSION) > 0 {
Some(Self)
}
else {
None
}
}
}
impl fmt::Display for VersionString {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "{}", include_str!(concat!(env!("OUT_DIR"), "/version_string.txt")))
}
}
#[cfg(test)]
mod test {
use crate::options::{Options, OptionsResult};
use std::ffi::OsStr;
#[test]
fn version() {
let args = vec![ OsStr::new("--version") ];
let opts = Options::parse(args, &None);
assert!(matches!(opts, OptionsResult::Version(_)));
}
#[test]
fn version_with_file() {
let args = vec![ OsStr::new("--version"), OsStr::new("me") ];
let opts = Options::parse(args, &None);
assert!(matches!(opts, OptionsResult::Version(_)));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/file_name.rs | src/options/file_name.rs | use crate::options::{flags, OptionsError, NumberSource};
use crate::options::parser::MatchedFlags;
use crate::options::vars::{self, Vars};
use crate::output::file_name::{Options, Classify, ShowIcons};
impl Options {
pub fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let classify = Classify::deduce(matches)?;
let show_icons = ShowIcons::deduce(matches, vars)?;
Ok(Self { classify, show_icons })
}
}
impl Classify {
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let flagged = matches.has(&flags::CLASSIFY)?;
if flagged { Ok(Self::AddFileIndicators) }
else { Ok(Self::JustFilenames) }
}
}
impl ShowIcons {
pub fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
if matches.has(&flags::NO_ICONS)? || !matches.has(&flags::ICONS)? {
Ok(Self::Off)
}
else if let Some(columns) = vars.get(vars::EXA_ICON_SPACING).and_then(|s| s.into_string().ok()) {
match columns.parse() {
Ok(width) => {
Ok(Self::On(width))
}
Err(e) => {
let source = NumberSource::Env(vars::EXA_ICON_SPACING);
Err(OptionsError::FailedParse(columns, source, e))
}
}
}
else {
Ok(Self::On(1))
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/parser.rs | src/options/parser.rs | //! A general parser for command-line options.
//!
//! exa uses its own hand-rolled parser for command-line options. It supports
//! the following syntax:
//!
//! - Long options: `--inode`, `--grid`
//! - Long options with values: `--sort size`, `--level=4`
//! - Short options: `-i`, `-G`
//! - Short options with values: `-ssize`, `-L=4`
//!
//! These values can be mixed and matched: `exa -lssize --grid`. If you’ve used
//! other command-line programs, then hopefully it’ll work much like them.
//!
//! Because exa already has its own files for the help text, shell completions,
//! man page, and readme, so it can get away with having the options parser do
//! very little: all it really needs to do is parse a slice of strings.
//!
//!
//! ## UTF-8 and `OsStr`
//!
//! The parser uses `OsStr` as its string type. This is necessary for exa to
//! list files that have invalid UTF-8 in their names: by treating file paths
//! as bytes with no encoding, a file can be specified on the command-line and
//! be looked up without having to be encoded into a `str` first.
//!
//! It also avoids the overhead of checking for invalid UTF-8 when parsing
//! command-line options, as all the options and their values (such as
//! `--sort size`) are guaranteed to just be 8-bit ASCII.
use std::ffi::{OsStr, OsString};
use std::fmt;
use crate::options::error::{OptionsError, Choices};
/// A **short argument** is a single ASCII character.
pub type ShortArg = u8;
/// A **long argument** is a string. This can be a UTF-8 string, even though
/// the arguments will all be unchecked `OsString` values, because we don’t
/// actually store the user’s input after it’s been matched to a flag, we just
/// store which flag it was.
pub type LongArg = &'static str;
/// A **list of values** that an option can have, to be displayed when the
/// user enters an invalid one or skips it.
///
/// This is literally just help text, and won’t be used to validate a value to
/// see if it’s correct.
pub type Values = &'static [&'static str];
/// A **flag** is either of the two argument types, because they have to
/// be in the same array together.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum Flag {
Short(ShortArg),
Long(LongArg),
}
impl Flag {
pub fn matches(&self, arg: &Arg) -> bool {
match self {
Self::Short(short) => arg.short == Some(*short),
Self::Long(long) => arg.long == *long,
}
}
}
impl fmt::Display for Flag {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
match self {
Self::Short(short) => write!(f, "-{}", *short as char),
Self::Long(long) => write!(f, "--{}", long),
}
}
}
/// Whether redundant arguments should be considered a problem.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum Strictness {
/// Throw an error when an argument doesn’t do anything, either because
/// it requires another argument to be specified, or because two conflict.
ComplainAboutRedundantArguments,
/// Search the arguments list back-to-front, giving ones specified later
/// in the list priority over earlier ones.
UseLastArguments,
}
/// Whether a flag takes a value. This is applicable to both long and short
/// arguments.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum TakesValue {
/// This flag has to be followed by a value.
/// If there’s a fixed set of possible values, they can be printed out
/// with the error text.
Necessary(Option<Values>),
/// This flag will throw an error if there’s a value after it.
Forbidden,
/// This flag may be followed by a value to override its defaults
Optional(Option<Values>),
}
/// An **argument** can be matched by one of the user’s input strings.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct Arg {
/// The short argument that matches it, if any.
pub short: Option<ShortArg>,
/// The long argument that matches it. This is non-optional; all flags
/// should at least have a descriptive long name.
pub long: LongArg,
/// Whether this flag takes a value or not.
pub takes_value: TakesValue,
}
impl fmt::Display for Arg {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "--{}", self.long)?;
if let Some(short) = self.short {
write!(f, " (-{})", short as char)?;
}
Ok(())
}
}
/// Literally just several args.
#[derive(PartialEq, Eq, Debug)]
pub struct Args(pub &'static [&'static Arg]);
impl Args {
/// Iterates over the given list of command-line arguments and parses
/// them into a list of matched flags and free strings.
pub fn parse<'args, I>(&self, inputs: I, strictness: Strictness) -> Result<Matches<'args>, ParseError>
where I: IntoIterator<Item = &'args OsStr>
{
let mut parsing = true;
// The results that get built up.
let mut result_flags = Vec::new();
let mut frees: Vec<&OsStr> = Vec::new();
// Iterate over the inputs with “while let” because we need to advance
// the iterator manually whenever an argument that takes a value
// doesn’t have one in its string so it needs the next one.
let mut inputs = inputs.into_iter();
while let Some(arg) = inputs.next() {
let bytes = os_str_to_bytes(arg);
// Stop parsing if one of the arguments is the literal string “--”.
// This allows a file named “--arg” to be specified by passing in
// the pair “-- --arg”, without it getting matched as a flag that
// doesn’t exist.
if ! parsing {
frees.push(arg)
}
else if arg == "--" {
parsing = false;
}
// If the string starts with *two* dashes then it’s a long argument.
else if bytes.starts_with(b"--") {
let long_arg_name = bytes_to_os_str(&bytes[2..]);
// If there’s an equals in it, then the string before the
// equals will be the flag’s name, and the string after it
// will be its value.
if let Some((before, after)) = split_on_equals(long_arg_name) {
let arg = self.lookup_long(before)?;
let flag = Flag::Long(arg.long);
match arg.takes_value {
TakesValue::Necessary(_) |
TakesValue::Optional(_) => result_flags.push((flag, Some(after))),
TakesValue::Forbidden => return Err(ParseError::ForbiddenValue { flag }),
}
}
// If there’s no equals, then the entire string (apart from
// the dashes) is the argument name.
else {
let arg = self.lookup_long(long_arg_name)?;
let flag = Flag::Long(arg.long);
match arg.takes_value {
TakesValue::Forbidden => {
result_flags.push((flag, None))
}
TakesValue::Necessary(values) => {
if let Some(next_arg) = inputs.next() {
result_flags.push((flag, Some(next_arg)));
}
else {
return Err(ParseError::NeedsValue { flag, values })
}
}
TakesValue::Optional(_) => {
if let Some(next_arg) = inputs.next() {
result_flags.push((flag, Some(next_arg)));
}
else {
result_flags.push((flag, None));
}
}
}
}
}
// If the string starts with *one* dash then it’s one or more
// short arguments.
else if bytes.starts_with(b"-") && arg != "-" {
let short_arg = bytes_to_os_str(&bytes[1..]);
// If there’s an equals in it, then the argument immediately
// before the equals was the one that has the value, with the
// others (if any) as value-less short ones.
//
// -x=abc => ‘x=abc’
// -abcdx=fgh => ‘a’, ‘b’, ‘c’, ‘d’, ‘x=fgh’
// -x= => error
// -abcdx= => error
//
// There’s no way to give two values in a cluster like this:
// it’s an error if any of the first set of arguments actually
// takes a value.
if let Some((before, after)) = split_on_equals(short_arg) {
let (arg_with_value, other_args) = os_str_to_bytes(before).split_last().unwrap();
// Process the characters immediately following the dash...
for byte in other_args {
let arg = self.lookup_short(*byte)?;
let flag = Flag::Short(*byte);
match arg.takes_value {
TakesValue::Forbidden |
TakesValue::Optional(_) => {
result_flags.push((flag, None));
}
TakesValue::Necessary(values) => {
return Err(ParseError::NeedsValue { flag, values });
}
}
}
// ...then the last one and the value after the equals.
let arg = self.lookup_short(*arg_with_value)?;
let flag = Flag::Short(arg.short.unwrap());
match arg.takes_value {
TakesValue::Necessary(_) |
TakesValue::Optional(_) => {
result_flags.push((flag, Some(after)));
}
TakesValue::Forbidden => {
return Err(ParseError::ForbiddenValue { flag });
}
}
}
// If there’s no equals, then every character is parsed as
// its own short argument. However, if any of the arguments
// takes a value, then the *rest* of the string is used as
// its value, and if there’s no rest of the string, then it
// uses the next one in the iterator.
//
// -a => ‘a’
// -abc => ‘a’, ‘b’, ‘c’
// -abxdef => ‘a’, ‘b’, ‘x=def’
// -abx def => ‘a’, ‘b’, ‘x=def’
// -abx => error
//
else {
for (index, byte) in bytes.iter().enumerate().skip(1) {
let arg = self.lookup_short(*byte)?;
let flag = Flag::Short(*byte);
match arg.takes_value {
TakesValue::Forbidden => {
result_flags.push((flag, None))
}
TakesValue::Necessary(values) |
TakesValue::Optional(values) => {
if index < bytes.len() - 1 {
let remnants = &bytes[index+1 ..];
result_flags.push((flag, Some(bytes_to_os_str(remnants))));
break;
}
else if let Some(next_arg) = inputs.next() {
result_flags.push((flag, Some(next_arg)));
}
else {
match arg.takes_value {
TakesValue::Forbidden => {
unreachable!()
}
TakesValue::Necessary(_) => {
return Err(ParseError::NeedsValue { flag, values });
}
TakesValue::Optional(_) => {
result_flags.push((flag, None));
}
}
}
}
}
}
}
}
// Otherwise, it’s a free string, usually a file name.
else {
frees.push(arg)
}
}
Ok(Matches { frees, flags: MatchedFlags { flags: result_flags, strictness } })
}
fn lookup_short(&self, short: ShortArg) -> Result<&Arg, ParseError> {
match self.0.iter().find(|arg| arg.short == Some(short)) {
Some(arg) => Ok(arg),
None => Err(ParseError::UnknownShortArgument { attempt: short })
}
}
fn lookup_long<'b>(&self, long: &'b OsStr) -> Result<&Arg, ParseError> {
match self.0.iter().find(|arg| arg.long == long) {
Some(arg) => Ok(arg),
None => Err(ParseError::UnknownArgument { attempt: long.to_os_string() })
}
}
}
/// The **matches** are the result of parsing the user’s command-line strings.
#[derive(PartialEq, Eq, Debug)]
pub struct Matches<'args> {
/// The flags that were parsed from the user’s input.
pub flags: MatchedFlags<'args>,
/// All the strings that weren’t matched as arguments, as well as anything
/// after the special “--” string.
pub frees: Vec<&'args OsStr>,
}
#[derive(PartialEq, Eq, Debug)]
pub struct MatchedFlags<'args> {
/// The individual flags from the user’s input, in the order they were
/// originally given.
///
/// Long and short arguments need to be kept in the same vector because
/// we usually want the one nearest the end to count, and to know this,
/// we need to know where they are in relation to one another.
flags: Vec<(Flag, Option<&'args OsStr>)>,
/// Whether to check for duplicate or redundant arguments.
strictness: Strictness,
}
impl<'a> MatchedFlags<'a> {
/// Whether the given argument was specified.
/// Returns `true` if it was, `false` if it wasn’t, and an error in
/// strict mode if it was specified more than once.
pub fn has(&self, arg: &'static Arg) -> Result<bool, OptionsError> {
self.has_where(|flag| flag.matches(arg))
.map(|flag| flag.is_some())
}
/// Returns the first found argument that satisfies the predicate, or
/// nothing if none is found, or an error in strict mode if multiple
/// argument satisfy the predicate.
///
/// You’ll have to test the resulting flag to see which argument it was.
pub fn has_where<P>(&self, predicate: P) -> Result<Option<&Flag>, OptionsError>
where P: Fn(&Flag) -> bool {
if self.is_strict() {
let all = self.flags.iter()
.filter(|tuple| tuple.1.is_none() && predicate(&tuple.0))
.collect::<Vec<_>>();
if all.len() < 2 { Ok(all.first().map(|t| &t.0)) }
else { Err(OptionsError::Duplicate(all[0].0, all[1].0)) }
}
else {
Ok(self.has_where_any(predicate))
}
}
/// Returns the first found argument that satisfies the predicate, or
/// nothing if none is found, with strict mode having no effect.
///
/// You’ll have to test the resulting flag to see which argument it was.
pub fn has_where_any<P>(&self, predicate: P) -> Option<&Flag>
where P: Fn(&Flag) -> bool {
self.flags.iter().rev()
.find(|tuple| tuple.1.is_none() && predicate(&tuple.0))
.map(|tuple| &tuple.0)
}
// This code could probably be better.
// Both ‘has’ and ‘get’ immediately begin with a conditional, which makes
// me think the functionality could be moved to inside Strictness.
/// Returns the value of the given argument if it was specified, nothing
/// if it wasn’t, and an error in strict mode if it was specified more
/// than once.
pub fn get(&self, arg: &'static Arg) -> Result<Option<&OsStr>, OptionsError> {
self.get_where(|flag| flag.matches(arg))
}
/// Returns the value of the argument that matches the predicate if it
/// was specified, nothing if it wasn’t, and an error in strict mode if
/// multiple arguments matched the predicate.
///
/// It’s not possible to tell which flag the value belonged to from this.
pub fn get_where<P>(&self, predicate: P) -> Result<Option<&OsStr>, OptionsError>
where P: Fn(&Flag) -> bool {
if self.is_strict() {
let those = self.flags.iter()
.filter(|tuple| tuple.1.is_some() && predicate(&tuple.0))
.collect::<Vec<_>>();
if those.len() < 2 { Ok(those.first().copied().map(|t| t.1.unwrap())) }
else { Err(OptionsError::Duplicate(those[0].0, those[1].0)) }
}
else {
let found = self.flags.iter().rev()
.find(|tuple| tuple.1.is_some() && predicate(&tuple.0))
.map(|tuple| tuple.1.unwrap());
Ok(found)
}
}
// It’s annoying that ‘has’ and ‘get’ won’t work when accidentally given
// flags that do/don’t take values, but this should be caught by tests.
/// Counts the number of occurrences of the given argument, even in
/// strict mode.
pub fn count(&self, arg: &Arg) -> usize {
self.flags.iter()
.filter(|tuple| tuple.0.matches(arg))
.count()
}
/// Checks whether strict mode is on. This is usually done from within
/// ‘has’ and ‘get’, but it’s available in an emergency.
pub fn is_strict(&self) -> bool {
self.strictness == Strictness::ComplainAboutRedundantArguments
}
}
/// A problem with the user’s input that meant it couldn’t be parsed into a
/// coherent list of arguments.
#[derive(PartialEq, Eq, Debug)]
pub enum ParseError {
/// A flag that has to take a value was not given one.
NeedsValue { flag: Flag, values: Option<Values> },
/// A flag that can’t take a value *was* given one.
ForbiddenValue { flag: Flag },
/// A short argument, either alone or in a cluster, was not
/// recognised by the program.
UnknownShortArgument { attempt: ShortArg },
/// A long argument was not recognised by the program.
/// We don’t have a known &str version of the flag, so
/// this may not be valid UTF-8.
UnknownArgument { attempt: OsString },
}
impl fmt::Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::NeedsValue { flag, values: None } => write!(f, "Flag {} needs a value", flag),
Self::NeedsValue { flag, values: Some(cs) } => write!(f, "Flag {} needs a value ({})", flag, Choices(cs)),
Self::ForbiddenValue { flag } => write!(f, "Flag {} cannot take a value", flag),
Self::UnknownShortArgument { attempt } => write!(f, "Unknown argument -{}", *attempt as char),
Self::UnknownArgument { attempt } => write!(f, "Unknown argument --{}", attempt.to_string_lossy()),
}
}
}
#[cfg(unix)]
fn os_str_to_bytes<'b>(s: &'b OsStr) -> &'b [u8]{
use std::os::unix::ffi::OsStrExt;
return s.as_bytes()
}
#[cfg(unix)]
fn bytes_to_os_str<'b>(b: &'b [u8]) -> &'b OsStr{
use std::os::unix::ffi::OsStrExt;
return OsStr::from_bytes(b);
}
#[cfg(windows)]
fn os_str_to_bytes<'b>(s: &'b OsStr) -> &'b [u8]{
return s.to_str().unwrap().as_bytes()
}
#[cfg(windows)]
fn bytes_to_os_str<'b>(b: &'b [u8]) -> &'b OsStr{
use std::str;
return OsStr::new(str::from_utf8(b).unwrap());
}
/// Splits a string on its `=` character, returning the two substrings on
/// either side. Returns `None` if there’s no equals or a string is missing.
fn split_on_equals(input: &OsStr) -> Option<(&OsStr, &OsStr)> {
if let Some(index) = os_str_to_bytes(input).iter().position(|elem| *elem == b'=') {
let (before, after) = os_str_to_bytes(input).split_at(index);
// The after string contains the = that we need to remove.
if ! before.is_empty() && after.len() >= 2 {
return Some((bytes_to_os_str(before),
bytes_to_os_str(&after[1..])))
}
}
None
}
#[cfg(test)]
mod split_test {
use super::split_on_equals;
use std::ffi::{OsStr, OsString};
macro_rules! test_split {
($name:ident: $input:expr => None) => {
#[test]
fn $name() {
assert_eq!(split_on_equals(&OsString::from($input)),
None);
}
};
($name:ident: $input:expr => $before:expr, $after:expr) => {
#[test]
fn $name() {
assert_eq!(split_on_equals(&OsString::from($input)),
Some((OsStr::new($before), OsStr::new($after))));
}
};
}
test_split!(empty: "" => None);
test_split!(letter: "a" => None);
test_split!(just: "=" => None);
test_split!(intro: "=bbb" => None);
test_split!(denou: "aaa=" => None);
test_split!(equals: "aaa=bbb" => "aaa", "bbb");
test_split!(sort: "--sort=size" => "--sort", "size");
test_split!(more: "this=that=other" => "this", "that=other");
}
#[cfg(test)]
mod parse_test {
use super::*;
macro_rules! test {
($name:ident: $inputs:expr => frees: $frees:expr, flags: $flags:expr) => {
#[test]
fn $name() {
let inputs: &[&'static str] = $inputs.as_ref();
let inputs = inputs.iter().map(OsStr::new);
let frees: &[&'static str] = $frees.as_ref();
let frees = frees.iter().map(OsStr::new).collect();
let flags = <[_]>::into_vec(Box::new($flags));
let strictness = Strictness::UseLastArguments; // this isn’t even used
let got = Args(TEST_ARGS).parse(inputs, strictness);
let flags = MatchedFlags { flags, strictness };
let expected = Ok(Matches { frees, flags });
assert_eq!(got, expected);
}
};
($name:ident: $inputs:expr => error $error:expr) => {
#[test]
fn $name() {
use self::ParseError::*;
let inputs = $inputs.iter().map(OsStr::new);
let strictness = Strictness::UseLastArguments; // this isn’t even used
let got = Args(TEST_ARGS).parse(inputs, strictness);
assert_eq!(got, Err($error));
}
};
}
const SUGGESTIONS: Values = &[ "example" ];
static TEST_ARGS: &[&Arg] = &[
&Arg { short: Some(b'l'), long: "long", takes_value: TakesValue::Forbidden },
&Arg { short: Some(b'v'), long: "verbose", takes_value: TakesValue::Forbidden },
&Arg { short: Some(b'c'), long: "count", takes_value: TakesValue::Necessary(None) },
&Arg { short: Some(b't'), long: "type", takes_value: TakesValue::Necessary(Some(SUGGESTIONS)) }
];
// Just filenames
test!(empty: [] => frees: [], flags: []);
test!(one_arg: ["exa"] => frees: [ "exa" ], flags: []);
// Dashes and double dashes
test!(one_dash: ["-"] => frees: [ "-" ], flags: []);
test!(two_dashes: ["--"] => frees: [], flags: []);
test!(two_file: ["--", "file"] => frees: [ "file" ], flags: []);
test!(two_arg_l: ["--", "--long"] => frees: [ "--long" ], flags: []);
test!(two_arg_s: ["--", "-l"] => frees: [ "-l" ], flags: []);
// Long args
test!(long: ["--long"] => frees: [], flags: [ (Flag::Long("long"), None) ]);
test!(long_then: ["--long", "4"] => frees: [ "4" ], flags: [ (Flag::Long("long"), None) ]);
test!(long_two: ["--long", "--verbose"] => frees: [], flags: [ (Flag::Long("long"), None), (Flag::Long("verbose"), None) ]);
// Long args with values
test!(bad_equals: ["--long=equals"] => error ForbiddenValue { flag: Flag::Long("long") });
test!(no_arg: ["--count"] => error NeedsValue { flag: Flag::Long("count"), values: None });
test!(arg_equals: ["--count=4"] => frees: [], flags: [ (Flag::Long("count"), Some(OsStr::new("4"))) ]);
test!(arg_then: ["--count", "4"] => frees: [], flags: [ (Flag::Long("count"), Some(OsStr::new("4"))) ]);
// Long args with values and suggestions
test!(no_arg_s: ["--type"] => error NeedsValue { flag: Flag::Long("type"), values: Some(SUGGESTIONS) });
test!(arg_equals_s: ["--type=exa"] => frees: [], flags: [ (Flag::Long("type"), Some(OsStr::new("exa"))) ]);
test!(arg_then_s: ["--type", "exa"] => frees: [], flags: [ (Flag::Long("type"), Some(OsStr::new("exa"))) ]);
// Short args
test!(short: ["-l"] => frees: [], flags: [ (Flag::Short(b'l'), None) ]);
test!(short_then: ["-l", "4"] => frees: [ "4" ], flags: [ (Flag::Short(b'l'), None) ]);
test!(short_two: ["-lv"] => frees: [], flags: [ (Flag::Short(b'l'), None), (Flag::Short(b'v'), None) ]);
test!(mixed: ["-v", "--long"] => frees: [], flags: [ (Flag::Short(b'v'), None), (Flag::Long("long"), None) ]);
// Short args with values
test!(bad_short: ["-l=equals"] => error ForbiddenValue { flag: Flag::Short(b'l') });
test!(short_none: ["-c"] => error NeedsValue { flag: Flag::Short(b'c'), values: None });
test!(short_arg_eq: ["-c=4"] => frees: [], flags: [(Flag::Short(b'c'), Some(OsStr::new("4"))) ]);
test!(short_arg_then: ["-c", "4"] => frees: [], flags: [(Flag::Short(b'c'), Some(OsStr::new("4"))) ]);
test!(short_two_together: ["-lctwo"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]);
test!(short_two_equals: ["-lc=two"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]);
test!(short_two_next: ["-lc", "two"] => frees: [], flags: [(Flag::Short(b'l'), None), (Flag::Short(b'c'), Some(OsStr::new("two"))) ]);
// Short args with values and suggestions
test!(short_none_s: ["-t"] => error NeedsValue { flag: Flag::Short(b't'), values: Some(SUGGESTIONS) });
test!(short_two_together_s: ["-texa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]);
test!(short_two_equals_s: ["-t=exa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]);
test!(short_two_next_s: ["-t", "exa"] => frees: [], flags: [(Flag::Short(b't'), Some(OsStr::new("exa"))) ]);
// Unknown args
test!(unknown_long: ["--quiet"] => error UnknownArgument { attempt: OsString::from("quiet") });
test!(unknown_long_eq: ["--quiet=shhh"] => error UnknownArgument { attempt: OsString::from("quiet") });
test!(unknown_short: ["-q"] => error UnknownShortArgument { attempt: b'q' });
test!(unknown_short_2nd: ["-lq"] => error UnknownShortArgument { attempt: b'q' });
test!(unknown_short_eq: ["-q=shhh"] => error UnknownShortArgument { attempt: b'q' });
test!(unknown_short_2nd_eq: ["-lq=shhh"] => error UnknownShortArgument { attempt: b'q' });
}
#[cfg(test)]
mod matches_test {
use super::*;
macro_rules! test {
($name:ident: $input:expr, has $param:expr => $result:expr) => {
#[test]
fn $name() {
let flags = MatchedFlags {
flags: $input.to_vec(),
strictness: Strictness::UseLastArguments,
};
assert_eq!(flags.has(&$param), Ok($result));
}
};
}
static VERBOSE: Arg = Arg { short: Some(b'v'), long: "verbose", takes_value: TakesValue::Forbidden };
static COUNT: Arg = Arg { short: Some(b'c'), long: "count", takes_value: TakesValue::Necessary(None) };
test!(short_never: [], has VERBOSE => false);
test!(short_once: [(Flag::Short(b'v'), None)], has VERBOSE => true);
test!(short_twice: [(Flag::Short(b'v'), None), (Flag::Short(b'v'), None)], has VERBOSE => true);
test!(long_once: [(Flag::Long("verbose"), None)], has VERBOSE => true);
test!(long_twice: [(Flag::Long("verbose"), None), (Flag::Long("verbose"), None)], has VERBOSE => true);
test!(long_mixed: [(Flag::Long("verbose"), None), (Flag::Short(b'v'), None)], has VERBOSE => true);
#[test]
fn only_count() {
let everything = OsString::from("everything");
let flags = MatchedFlags {
flags: vec![ (Flag::Short(b'c'), Some(&*everything)) ],
strictness: Strictness::UseLastArguments,
};
assert_eq!(flags.get(&COUNT), Ok(Some(&*everything)));
}
#[test]
fn rightmost_count() {
let everything = OsString::from("everything");
let nothing = OsString::from("nothing");
let flags = MatchedFlags {
flags: vec![ (Flag::Short(b'c'), Some(&*everything)),
(Flag::Short(b'c'), Some(&*nothing)) ],
strictness: Strictness::UseLastArguments,
};
assert_eq!(flags.get(&COUNT), Ok(Some(&*nothing)));
}
#[test]
fn no_count() {
let flags = MatchedFlags { flags: Vec::new(), strictness: Strictness::UseLastArguments };
assert!(!flags.has(&COUNT).unwrap());
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/view.rs | src/options/view.rs | use crate::fs::feature::xattr;
use crate::options::{flags, OptionsError, NumberSource, Vars};
use crate::options::parser::MatchedFlags;
use crate::output::{View, Mode, TerminalWidth, grid, details};
use crate::output::grid_details::{self, RowThreshold};
use crate::output::file_name::Options as FileStyle;
use crate::output::table::{TimeTypes, SizeFormat, UserFormat, Columns, Options as TableOptions};
use crate::output::time::TimeFormat;
impl View {
pub fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let mode = Mode::deduce(matches, vars)?;
let width = TerminalWidth::deduce(vars)?;
let file_style = FileStyle::deduce(matches, vars)?;
Ok(Self { mode, width, file_style })
}
}
impl Mode {
/// Determine which viewing mode to use based on the user’s options.
///
/// As with the other options, arguments are scanned right-to-left and the
/// first flag found is matched, so `exa --oneline --long` will pick a
/// details view, and `exa --long --oneline` will pick the lines view.
///
/// This is complicated a little by the fact that `--grid` and `--tree`
/// can also combine with `--long`, so care has to be taken to use the
pub fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let flag = matches.has_where_any(|f| f.matches(&flags::LONG) || f.matches(&flags::ONE_LINE)
|| f.matches(&flags::GRID) || f.matches(&flags::TREE));
let flag = if let Some(f) = flag { f } else {
Self::strict_check_long_flags(matches)?;
let grid = grid::Options::deduce(matches)?;
return Ok(Self::Grid(grid));
};
if flag.matches(&flags::LONG)
|| (flag.matches(&flags::TREE) && matches.has(&flags::LONG)?)
|| (flag.matches(&flags::GRID) && matches.has(&flags::LONG)?)
{
let _ = matches.has(&flags::LONG)?;
let details = details::Options::deduce_long(matches, vars)?;
let flag = matches.has_where_any(|f| f.matches(&flags::GRID) || f.matches(&flags::TREE));
if flag.is_some() && flag.unwrap().matches(&flags::GRID) {
let _ = matches.has(&flags::GRID)?;
let grid = grid::Options::deduce(matches)?;
let row_threshold = RowThreshold::deduce(vars)?;
let grid_details = grid_details::Options { grid, details, row_threshold };
return Ok(Self::GridDetails(grid_details));
}
// the --tree case is handled by the DirAction parser later
return Ok(Self::Details(details));
}
Self::strict_check_long_flags(matches)?;
if flag.matches(&flags::TREE) {
let _ = matches.has(&flags::TREE)?;
let details = details::Options::deduce_tree(matches)?;
return Ok(Self::Details(details));
}
if flag.matches(&flags::ONE_LINE) {
let _ = matches.has(&flags::ONE_LINE)?;
return Ok(Self::Lines);
}
let grid = grid::Options::deduce(matches)?;
Ok(Self::Grid(grid))
}
fn strict_check_long_flags(matches: &MatchedFlags<'_>) -> Result<(), OptionsError> {
// If --long hasn’t been passed, then check if we need to warn the
// user about flags that won’t have any effect.
if matches.is_strict() {
for option in &[ &flags::BINARY, &flags::BYTES, &flags::INODE, &flags::LINKS,
&flags::HEADER, &flags::BLOCKS, &flags::TIME, &flags::GROUP, &flags::NUMERIC ] {
if matches.has(option)? {
return Err(OptionsError::Useless(*option, false, &flags::LONG));
}
}
if matches.has(&flags::GIT)? {
return Err(OptionsError::Useless(&flags::GIT, false, &flags::LONG));
}
else if matches.has(&flags::LEVEL)? && ! matches.has(&flags::RECURSE)? && ! matches.has(&flags::TREE)? {
return Err(OptionsError::Useless2(&flags::LEVEL, &flags::RECURSE, &flags::TREE));
}
}
Ok(())
}
}
impl grid::Options {
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let grid = grid::Options {
across: matches.has(&flags::ACROSS)?,
};
Ok(grid)
}
}
impl details::Options {
fn deduce_tree(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let details = details::Options {
table: None,
header: false,
xattr: xattr::ENABLED && matches.has(&flags::EXTENDED)?,
};
Ok(details)
}
fn deduce_long<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
if matches.is_strict() {
if matches.has(&flags::ACROSS)? && ! matches.has(&flags::GRID)? {
return Err(OptionsError::Useless(&flags::ACROSS, true, &flags::LONG));
}
else if matches.has(&flags::ONE_LINE)? {
return Err(OptionsError::Useless(&flags::ONE_LINE, true, &flags::LONG));
}
}
Ok(details::Options {
table: Some(TableOptions::deduce(matches, vars)?),
header: matches.has(&flags::HEADER)?,
xattr: xattr::ENABLED && matches.has(&flags::EXTENDED)?,
})
}
}
impl TerminalWidth {
fn deduce<V: Vars>(vars: &V) -> Result<Self, OptionsError> {
use crate::options::vars;
if let Some(columns) = vars.get(vars::COLUMNS).and_then(|s| s.into_string().ok()) {
match columns.parse() {
Ok(width) => {
Ok(Self::Set(width))
}
Err(e) => {
let source = NumberSource::Env(vars::COLUMNS);
Err(OptionsError::FailedParse(columns, source, e))
}
}
}
else {
Ok(Self::Automatic)
}
}
}
impl RowThreshold {
fn deduce<V: Vars>(vars: &V) -> Result<Self, OptionsError> {
use crate::options::vars;
if let Some(columns) = vars.get(vars::EXA_GRID_ROWS).and_then(|s| s.into_string().ok()) {
match columns.parse() {
Ok(rows) => {
Ok(Self::MinimumRows(rows))
}
Err(e) => {
let source = NumberSource::Env(vars::EXA_GRID_ROWS);
Err(OptionsError::FailedParse(columns, source, e))
}
}
}
else {
Ok(Self::AlwaysGrid)
}
}
}
impl TableOptions {
fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let time_format = TimeFormat::deduce(matches, vars)?;
let size_format = SizeFormat::deduce(matches)?;
let user_format = UserFormat::deduce(matches)?;
let columns = Columns::deduce(matches)?;
Ok(Self { size_format, time_format, user_format, columns })
}
}
impl Columns {
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let time_types = TimeTypes::deduce(matches)?;
let git = matches.has(&flags::GIT)?;
let blocks = matches.has(&flags::BLOCKS)?;
let group = matches.has(&flags::GROUP)?;
let inode = matches.has(&flags::INODE)?;
let links = matches.has(&flags::LINKS)?;
let octal = matches.has(&flags::OCTAL)?;
let permissions = ! matches.has(&flags::NO_PERMISSIONS)?;
let filesize = ! matches.has(&flags::NO_FILESIZE)?;
let user = ! matches.has(&flags::NO_USER)?;
Ok(Self { time_types, inode, links, blocks, group, git, octal, permissions, filesize, user })
}
}
impl SizeFormat {
/// Determine which file size to use in the file size column based on
/// the user’s options.
///
/// The default mode is to use the decimal prefixes, as they are the
/// most commonly-understood, and don’t involve trying to parse large
/// strings of digits in your head. Changing the format to anything else
/// involves the `--binary` or `--bytes` flags, and these conflict with
/// each other.
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let flag = matches.has_where(|f| f.matches(&flags::BINARY) || f.matches(&flags::BYTES))?;
Ok(match flag {
Some(f) if f.matches(&flags::BINARY) => Self::BinaryBytes,
Some(f) if f.matches(&flags::BYTES) => Self::JustBytes,
_ => Self::DecimalBytes,
})
}
}
impl TimeFormat {
/// Determine how time should be formatted in timestamp columns.
fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
let word =
if let Some(w) = matches.get(&flags::TIME_STYLE)? {
w.to_os_string()
}
else {
use crate::options::vars;
match vars.get(vars::TIME_STYLE) {
Some(ref t) if ! t.is_empty() => t.clone(),
_ => return Ok(Self::DefaultFormat)
}
};
if &word == "default" {
Ok(Self::DefaultFormat)
}
else if &word == "iso" {
Ok(Self::ISOFormat)
}
else if &word == "long-iso" {
Ok(Self::LongISO)
}
else if &word == "full-iso" {
Ok(Self::FullISO)
}
else {
Err(OptionsError::BadArgument(&flags::TIME_STYLE, word))
}
}
}
impl UserFormat {
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let flag = matches.has(&flags::NUMERIC)?;
Ok(if flag { Self::Numeric } else { Self::Name })
}
}
impl TimeTypes {
/// Determine which of a file’s time fields should be displayed for it
/// based on the user’s options.
///
/// There are two separate ways to pick which fields to show: with a
/// flag (such as `--modified`) or with a parameter (such as
/// `--time=modified`). An error is signaled if both ways are used.
///
/// It’s valid to show more than one column by passing in more than one
/// option, but passing *no* options means that the user just wants to
/// see the default set.
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let possible_word = matches.get(&flags::TIME)?;
let modified = matches.has(&flags::MODIFIED)?;
let changed = matches.has(&flags::CHANGED)?;
let accessed = matches.has(&flags::ACCESSED)?;
let created = matches.has(&flags::CREATED)?;
let no_time = matches.has(&flags::NO_TIME)?;
let time_types = if no_time {
Self { modified: false, changed: false, accessed: false, created: false }
} else if let Some(word) = possible_word {
if modified {
return Err(OptionsError::Useless(&flags::MODIFIED, true, &flags::TIME));
}
else if changed {
return Err(OptionsError::Useless(&flags::CHANGED, true, &flags::TIME));
}
else if accessed {
return Err(OptionsError::Useless(&flags::ACCESSED, true, &flags::TIME));
}
else if created {
return Err(OptionsError::Useless(&flags::CREATED, true, &flags::TIME));
}
else if word == "mod" || word == "modified" {
Self { modified: true, changed: false, accessed: false, created: false }
}
else if word == "ch" || word == "changed" {
Self { modified: false, changed: true, accessed: false, created: false }
}
else if word == "acc" || word == "accessed" {
Self { modified: false, changed: false, accessed: true, created: false }
}
else if word == "cr" || word == "created" {
Self { modified: false, changed: false, accessed: false, created: true }
}
else {
return Err(OptionsError::BadArgument(&flags::TIME, word.into()));
}
}
else if modified || changed || accessed || created {
Self { modified, changed, accessed, created }
}
else {
Self::default()
};
Ok(time_types)
}
}
#[cfg(test)]
mod test {
use super::*;
use std::ffi::OsString;
use crate::options::flags;
use crate::options::parser::{Flag, Arg};
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[ &flags::BINARY, &flags::BYTES, &flags::TIME_STYLE,
&flags::TIME, &flags::MODIFIED, &flags::CHANGED,
&flags::CREATED, &flags::ACCESSED,
&flags::HEADER, &flags::GROUP, &flags::INODE, &flags::GIT,
&flags::LINKS, &flags::BLOCKS, &flags::LONG, &flags::LEVEL,
&flags::GRID, &flags::ACROSS, &flags::ONE_LINE, &flags::TREE,
&flags::NUMERIC ];
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
/// Macro that writes a test.
/// If testing both strictnesses, they’ll both be done in the same function.
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
assert_eq!(result, $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => err $result:expr) => {
/// Special macro for testing Err results.
/// This is needed because sometimes the Ok type doesn’t implement `PartialEq`.
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
assert_eq!(result.unwrap_err(), $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => like $pat:pat) => {
/// More general macro for testing against a pattern.
/// Instead of using `PartialEq`, this just tests if it matches a pat.
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
println!("Testing {:?}", result);
match result {
$pat => assert!(true),
_ => assert!(false),
}
}
}
};
($name:ident: $type:ident <- $inputs:expr, $vars:expr; $stricts:expr => err $result:expr) => {
/// Like above, but with $vars.
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, &$vars)) {
assert_eq!(result.unwrap_err(), $result);
}
}
};
($name:ident: $type:ident <- $inputs:expr, $vars:expr; $stricts:expr => like $pat:pat) => {
/// Like further above, but with $vars.
#[test]
fn $name() {
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf, &$vars)) {
println!("Testing {:?}", result);
match result {
$pat => assert!(true),
_ => assert!(false),
}
}
}
};
}
mod size_formats {
use super::*;
// Default behaviour
test!(empty: SizeFormat <- []; Both => Ok(SizeFormat::DecimalBytes));
// Individual flags
test!(binary: SizeFormat <- ["--binary"]; Both => Ok(SizeFormat::BinaryBytes));
test!(bytes: SizeFormat <- ["--bytes"]; Both => Ok(SizeFormat::JustBytes));
// Overriding
test!(both_1: SizeFormat <- ["--binary", "--binary"]; Last => Ok(SizeFormat::BinaryBytes));
test!(both_2: SizeFormat <- ["--bytes", "--binary"]; Last => Ok(SizeFormat::BinaryBytes));
test!(both_3: SizeFormat <- ["--binary", "--bytes"]; Last => Ok(SizeFormat::JustBytes));
test!(both_4: SizeFormat <- ["--bytes", "--bytes"]; Last => Ok(SizeFormat::JustBytes));
test!(both_5: SizeFormat <- ["--binary", "--binary"]; Complain => err OptionsError::Duplicate(Flag::Long("binary"), Flag::Long("binary")));
test!(both_6: SizeFormat <- ["--bytes", "--binary"]; Complain => err OptionsError::Duplicate(Flag::Long("bytes"), Flag::Long("binary")));
test!(both_7: SizeFormat <- ["--binary", "--bytes"]; Complain => err OptionsError::Duplicate(Flag::Long("binary"), Flag::Long("bytes")));
test!(both_8: SizeFormat <- ["--bytes", "--bytes"]; Complain => err OptionsError::Duplicate(Flag::Long("bytes"), Flag::Long("bytes")));
}
mod time_formats {
use super::*;
// These tests use pattern matching because TimeFormat doesn’t
// implement PartialEq.
// Default behaviour
test!(empty: TimeFormat <- [], None; Both => like Ok(TimeFormat::DefaultFormat));
// Individual settings
test!(default: TimeFormat <- ["--time-style=default"], None; Both => like Ok(TimeFormat::DefaultFormat));
test!(iso: TimeFormat <- ["--time-style", "iso"], None; Both => like Ok(TimeFormat::ISOFormat));
test!(long_iso: TimeFormat <- ["--time-style=long-iso"], None; Both => like Ok(TimeFormat::LongISO));
test!(full_iso: TimeFormat <- ["--time-style", "full-iso"], None; Both => like Ok(TimeFormat::FullISO));
// Overriding
test!(actually: TimeFormat <- ["--time-style=default", "--time-style", "iso"], None; Last => like Ok(TimeFormat::ISOFormat));
test!(actual_2: TimeFormat <- ["--time-style=default", "--time-style", "iso"], None; Complain => err OptionsError::Duplicate(Flag::Long("time-style"), Flag::Long("time-style")));
test!(nevermind: TimeFormat <- ["--time-style", "long-iso", "--time-style=full-iso"], None; Last => like Ok(TimeFormat::FullISO));
test!(nevermore: TimeFormat <- ["--time-style", "long-iso", "--time-style=full-iso"], None; Complain => err OptionsError::Duplicate(Flag::Long("time-style"), Flag::Long("time-style")));
// Errors
test!(daily: TimeFormat <- ["--time-style=24-hour"], None; Both => err OptionsError::BadArgument(&flags::TIME_STYLE, OsString::from("24-hour")));
// `TIME_STYLE` environment variable is defined.
// If the time-style argument is not given, `TIME_STYLE` is used.
test!(use_env: TimeFormat <- [], Some("long-iso".into()); Both => like Ok(TimeFormat::LongISO));
// If the time-style argument is given, `TIME_STYLE` is overriding.
test!(override_env: TimeFormat <- ["--time-style=full-iso"], Some("long-iso".into()); Both => like Ok(TimeFormat::FullISO));
}
mod time_types {
use super::*;
// Default behaviour
test!(empty: TimeTypes <- []; Both => Ok(TimeTypes::default()));
// Modified
test!(modified: TimeTypes <- ["--modified"]; Both => Ok(TimeTypes { modified: true, changed: false, accessed: false, created: false }));
test!(m: TimeTypes <- ["-m"]; Both => Ok(TimeTypes { modified: true, changed: false, accessed: false, created: false }));
test!(time_mod: TimeTypes <- ["--time=modified"]; Both => Ok(TimeTypes { modified: true, changed: false, accessed: false, created: false }));
test!(t_m: TimeTypes <- ["-tmod"]; Both => Ok(TimeTypes { modified: true, changed: false, accessed: false, created: false }));
// Changed
#[cfg(target_family = "unix")]
test!(changed: TimeTypes <- ["--changed"]; Both => Ok(TimeTypes { modified: false, changed: true, accessed: false, created: false }));
#[cfg(target_family = "unix")]
test!(time_ch: TimeTypes <- ["--time=changed"]; Both => Ok(TimeTypes { modified: false, changed: true, accessed: false, created: false }));
#[cfg(target_family = "unix")]
test!(t_ch: TimeTypes <- ["-t", "ch"]; Both => Ok(TimeTypes { modified: false, changed: true, accessed: false, created: false }));
// Accessed
test!(acc: TimeTypes <- ["--accessed"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: true, created: false }));
test!(a: TimeTypes <- ["-u"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: true, created: false }));
test!(time_acc: TimeTypes <- ["--time", "accessed"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: true, created: false }));
test!(time_a: TimeTypes <- ["-t", "acc"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: true, created: false }));
// Created
test!(cr: TimeTypes <- ["--created"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: false, created: true }));
test!(c: TimeTypes <- ["-U"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: false, created: true }));
test!(time_cr: TimeTypes <- ["--time=created"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: false, created: true }));
test!(t_cr: TimeTypes <- ["-tcr"]; Both => Ok(TimeTypes { modified: false, changed: false, accessed: false, created: true }));
// Multiples
test!(time_uu: TimeTypes <- ["-u", "--modified"]; Both => Ok(TimeTypes { modified: true, changed: false, accessed: true, created: false }));
// Errors
test!(time_tea: TimeTypes <- ["--time=tea"]; Both => err OptionsError::BadArgument(&flags::TIME, OsString::from("tea")));
test!(t_ea: TimeTypes <- ["-tea"]; Both => err OptionsError::BadArgument(&flags::TIME, OsString::from("ea")));
// Overriding
test!(overridden: TimeTypes <- ["-tcr", "-tmod"]; Last => Ok(TimeTypes { modified: true, changed: false, accessed: false, created: false }));
test!(overridden_2: TimeTypes <- ["-tcr", "-tmod"]; Complain => err OptionsError::Duplicate(Flag::Short(b't'), Flag::Short(b't')));
}
mod views {
use super::*;
use crate::output::grid::Options as GridOptions;
// Default
test!(empty: Mode <- [], None; Both => like Ok(Mode::Grid(_)));
// Grid views
test!(original_g: Mode <- ["-G"], None; Both => like Ok(Mode::Grid(GridOptions { across: false, .. })));
test!(grid: Mode <- ["--grid"], None; Both => like Ok(Mode::Grid(GridOptions { across: false, .. })));
test!(across: Mode <- ["--across"], None; Both => like Ok(Mode::Grid(GridOptions { across: true, .. })));
test!(gracross: Mode <- ["-xG"], None; Both => like Ok(Mode::Grid(GridOptions { across: true, .. })));
// Lines views
test!(lines: Mode <- ["--oneline"], None; Both => like Ok(Mode::Lines));
test!(prima: Mode <- ["-1"], None; Both => like Ok(Mode::Lines));
// Details views
test!(long: Mode <- ["--long"], None; Both => like Ok(Mode::Details(_)));
test!(ell: Mode <- ["-l"], None; Both => like Ok(Mode::Details(_)));
// Grid-details views
test!(lid: Mode <- ["--long", "--grid"], None; Both => like Ok(Mode::GridDetails(_)));
test!(leg: Mode <- ["-lG"], None; Both => like Ok(Mode::GridDetails(_)));
// Options that do nothing with --long
test!(long_across: Mode <- ["--long", "--across"], None; Last => like Ok(Mode::Details(_)));
// Options that do nothing without --long
test!(just_header: Mode <- ["--header"], None; Last => like Ok(Mode::Grid(_)));
test!(just_group: Mode <- ["--group"], None; Last => like Ok(Mode::Grid(_)));
test!(just_inode: Mode <- ["--inode"], None; Last => like Ok(Mode::Grid(_)));
test!(just_links: Mode <- ["--links"], None; Last => like Ok(Mode::Grid(_)));
test!(just_blocks: Mode <- ["--blocks"], None; Last => like Ok(Mode::Grid(_)));
test!(just_binary: Mode <- ["--binary"], None; Last => like Ok(Mode::Grid(_)));
test!(just_bytes: Mode <- ["--bytes"], None; Last => like Ok(Mode::Grid(_)));
test!(just_numeric: Mode <- ["--numeric"], None; Last => like Ok(Mode::Grid(_)));
#[cfg(feature = "git")]
test!(just_git: Mode <- ["--git"], None; Last => like Ok(Mode::Grid(_)));
test!(just_header_2: Mode <- ["--header"], None; Complain => err OptionsError::Useless(&flags::HEADER, false, &flags::LONG));
test!(just_group_2: Mode <- ["--group"], None; Complain => err OptionsError::Useless(&flags::GROUP, false, &flags::LONG));
test!(just_inode_2: Mode <- ["--inode"], None; Complain => err OptionsError::Useless(&flags::INODE, false, &flags::LONG));
test!(just_links_2: Mode <- ["--links"], None; Complain => err OptionsError::Useless(&flags::LINKS, false, &flags::LONG));
test!(just_blocks_2: Mode <- ["--blocks"], None; Complain => err OptionsError::Useless(&flags::BLOCKS, false, &flags::LONG));
test!(just_binary_2: Mode <- ["--binary"], None; Complain => err OptionsError::Useless(&flags::BINARY, false, &flags::LONG));
test!(just_bytes_2: Mode <- ["--bytes"], None; Complain => err OptionsError::Useless(&flags::BYTES, false, &flags::LONG));
test!(just_numeric2: Mode <- ["--numeric"], None; Complain => err OptionsError::Useless(&flags::NUMERIC, false, &flags::LONG));
#[cfg(feature = "git")]
test!(just_git_2: Mode <- ["--git"], None; Complain => err OptionsError::Useless(&flags::GIT, false, &flags::LONG));
// Contradictions and combinations
test!(lgo: Mode <- ["--long", "--grid", "--oneline"], None; Both => like Ok(Mode::Lines));
test!(lgt: Mode <- ["--long", "--grid", "--tree"], None; Both => like Ok(Mode::Details(_)));
test!(tgl: Mode <- ["--tree", "--grid", "--long"], None; Both => like Ok(Mode::GridDetails(_)));
test!(tlg: Mode <- ["--tree", "--long", "--grid"], None; Both => like Ok(Mode::GridDetails(_)));
test!(ot: Mode <- ["--oneline", "--tree"], None; Both => like Ok(Mode::Details(_)));
test!(og: Mode <- ["--oneline", "--grid"], None; Both => like Ok(Mode::Grid(_)));
test!(tg: Mode <- ["--tree", "--grid"], None; Both => like Ok(Mode::Grid(_)));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/filter.rs | src/options/filter.rs | //! Parsing the options for `FileFilter`.
use crate::fs::DotFilter;
use crate::fs::filter::{FileFilter, SortField, SortCase, IgnorePatterns, GitIgnore};
use crate::options::{flags, OptionsError};
use crate::options::parser::MatchedFlags;
impl FileFilter {
/// Determines which of all the file filter options to use.
pub fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
Ok(Self {
list_dirs_first: matches.has(&flags::DIRS_FIRST)?,
reverse: matches.has(&flags::REVERSE)?,
only_dirs: matches.has(&flags::ONLY_DIRS)?,
sort_field: SortField::deduce(matches)?,
dot_filter: DotFilter::deduce(matches)?,
ignore_patterns: IgnorePatterns::deduce(matches)?,
git_ignore: GitIgnore::deduce(matches)?,
})
}
}
impl SortField {
/// Determines which sort field to use based on the `--sort` argument.
/// This argument’s value can be one of several flags, listed above.
/// Returns the default sort field if none is given, or `Err` if the
/// value doesn’t correspond to a sort field we know about.
fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let word = match matches.get(&flags::SORT)? {
Some(w) => w,
None => return Ok(Self::default()),
};
// Get String because we can’t match an OsStr
let word = match word.to_str() {
Some(w) => w,
None => return Err(OptionsError::BadArgument(&flags::SORT, word.into()))
};
let field = match word {
"name" | "filename" => {
Self::Name(SortCase::AaBbCc)
}
"Name" | "Filename" => {
Self::Name(SortCase::ABCabc)
}
".name" | ".filename" => {
Self::NameMixHidden(SortCase::AaBbCc)
}
".Name" | ".Filename" => {
Self::NameMixHidden(SortCase::ABCabc)
}
"size" | "filesize" => {
Self::Size
}
"ext" | "extension" => {
Self::Extension(SortCase::AaBbCc)
}
"Ext" | "Extension" => {
Self::Extension(SortCase::ABCabc)
}
// “new” sorts oldest at the top and newest at the bottom; “old”
// sorts newest at the top and oldest at the bottom. I think this
// is the right way round to do this: “size” puts the smallest at
// the top and the largest at the bottom, doesn’t it?
"date" | "time" | "mod" | "modified" | "new" | "newest" => {
Self::ModifiedDate
}
// Similarly, “age” means that files with the least age (the
// newest files) get sorted at the top, and files with the most
// age (the oldest) at the bottom.
"age" | "old" | "oldest" => {
Self::ModifiedAge
}
"ch" | "changed" => {
Self::ChangedDate
}
"acc" | "accessed" => {
Self::AccessedDate
}
"cr" | "created" => {
Self::CreatedDate
}
#[cfg(unix)]
"inode" => {
Self::FileInode
}
"type" => {
Self::FileType
}
"none" => {
Self::Unsorted
}
_ => {
return Err(OptionsError::BadArgument(&flags::SORT, word.into()));
}
};
Ok(field)
}
}
// I’ve gone back and forth between whether to sort case-sensitively or
// insensitively by default. The default string sort in most programming
// languages takes each character’s ASCII value into account, sorting
// “Documents” before “apps”, but there’s usually an option to ignore
// characters’ case, putting “apps” before “Documents”.
//
// The argument for following case is that it’s easy to forget whether an item
// begins with an uppercase or lowercase letter and end up having to scan both
// the uppercase and lowercase sub-lists to find the item you want. If you
// happen to pick the sublist it’s not in, it looks like it’s missing, which
// is worse than if you just take longer to find it.
// (https://ux.stackexchange.com/a/79266)
//
// The argument for ignoring case is that it makes exa sort files differently
// from shells. A user would expect a directory’s files to be in the same
// order if they used “exa ~/directory” or “exa ~/directory/*”, but exa sorts
// them in the first case, and the shell in the second case, so they wouldn’t
// be exactly the same if exa does something non-conventional.
//
// However, exa already sorts files differently: it uses natural sorting from
// the natord crate, sorting the string “2” before “10” because the number’s
// smaller, because that’s usually what the user expects to happen. Users will
// name their files with numbers expecting them to be treated like numbers,
// rather than lists of numeric characters.
//
// In the same way, users will name their files with letters expecting the
// order of the letters to matter, rather than each letter’s character’s ASCII
// value. So exa breaks from tradition and ignores case while sorting:
// “apps” first, then “Documents”.
//
// You can get the old behaviour back by sorting with `--sort=Name`.
impl Default for SortField {
fn default() -> Self {
Self::Name(SortCase::AaBbCc)
}
}
impl DotFilter {
/// Determines the dot filter based on how many `--all` options were
/// given: one will show dotfiles, but two will show `.` and `..` too.
///
/// It also checks for the `--tree` option in strict mode, because of a
/// special case where `--tree --all --all` won’t work: listing the
/// parent directory in tree mode would loop onto itself!
pub fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
let count = matches.count(&flags::ALL);
if count == 0 {
Ok(Self::JustFiles)
}
else if count == 1 {
Ok(Self::Dotfiles)
}
else if matches.count(&flags::TREE) > 0 {
Err(OptionsError::TreeAllAll)
}
else if count >= 3 && matches.is_strict() {
Err(OptionsError::Conflict(&flags::ALL, &flags::ALL))
}
else {
Ok(Self::DotfilesAndDots)
}
}
}
impl IgnorePatterns {
/// Determines the set of glob patterns to use based on the
/// `--ignore-glob` argument’s value. This is a list of strings
/// separated by pipe (`|`) characters, given in any order.
pub fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
// If there are no inputs, we return a set of patterns that doesn’t
// match anything, rather than, say, `None`.
let inputs = match matches.get(&flags::IGNORE_GLOB)? {
Some(is) => is,
None => return Ok(Self::empty()),
};
// Awkwardly, though, a glob pattern can be invalid, and we need to
// deal with invalid patterns somehow.
let (patterns, mut errors) = Self::parse_from_iter(inputs.to_string_lossy().split('|'));
// It can actually return more than one glob error,
// but we only use one. (TODO)
match errors.pop() {
Some(e) => Err(e.into()),
None => Ok(patterns),
}
}
}
impl GitIgnore {
pub fn deduce(matches: &MatchedFlags<'_>) -> Result<Self, OptionsError> {
if matches.has(&flags::GIT_IGNORE)? {
Ok(Self::CheckAndIgnore)
}
else {
Ok(Self::Off)
}
}
}
#[cfg(test)]
mod test {
use super::*;
use std::ffi::OsString;
use crate::options::flags;
use crate::options::parser::Flag;
macro_rules! test {
($name:ident: $type:ident <- $inputs:expr; $stricts:expr => $result:expr) => {
#[test]
fn $name() {
use crate::options::parser::Arg;
use crate::options::test::parse_for_test;
use crate::options::test::Strictnesses::*;
static TEST_ARGS: &[&Arg] = &[ &flags::SORT, &flags::ALL, &flags::TREE, &flags::IGNORE_GLOB, &flags::GIT_IGNORE ];
for result in parse_for_test($inputs.as_ref(), TEST_ARGS, $stricts, |mf| $type::deduce(mf)) {
assert_eq!(result, $result);
}
}
};
}
mod sort_fields {
use super::*;
// Default behaviour
test!(empty: SortField <- []; Both => Ok(SortField::default()));
// Sort field arguments
test!(one_arg: SortField <- ["--sort=mod"]; Both => Ok(SortField::ModifiedDate));
test!(one_long: SortField <- ["--sort=size"]; Both => Ok(SortField::Size));
test!(one_short: SortField <- ["-saccessed"]; Both => Ok(SortField::AccessedDate));
test!(lowercase: SortField <- ["--sort", "name"]; Both => Ok(SortField::Name(SortCase::AaBbCc)));
test!(uppercase: SortField <- ["--sort", "Name"]; Both => Ok(SortField::Name(SortCase::ABCabc)));
test!(old: SortField <- ["--sort", "new"]; Both => Ok(SortField::ModifiedDate));
test!(oldest: SortField <- ["--sort=newest"]; Both => Ok(SortField::ModifiedDate));
test!(new: SortField <- ["--sort", "old"]; Both => Ok(SortField::ModifiedAge));
test!(newest: SortField <- ["--sort=oldest"]; Both => Ok(SortField::ModifiedAge));
test!(age: SortField <- ["-sage"]; Both => Ok(SortField::ModifiedAge));
test!(mix_hidden_lowercase: SortField <- ["--sort", ".name"]; Both => Ok(SortField::NameMixHidden(SortCase::AaBbCc)));
test!(mix_hidden_uppercase: SortField <- ["--sort", ".Name"]; Both => Ok(SortField::NameMixHidden(SortCase::ABCabc)));
// Errors
test!(error: SortField <- ["--sort=colour"]; Both => Err(OptionsError::BadArgument(&flags::SORT, OsString::from("colour"))));
// Overriding
test!(overridden: SortField <- ["--sort=cr", "--sort", "mod"]; Last => Ok(SortField::ModifiedDate));
test!(overridden_2: SortField <- ["--sort", "none", "--sort=Extension"]; Last => Ok(SortField::Extension(SortCase::ABCabc)));
test!(overridden_3: SortField <- ["--sort=cr", "--sort", "mod"]; Complain => Err(OptionsError::Duplicate(Flag::Long("sort"), Flag::Long("sort"))));
test!(overridden_4: SortField <- ["--sort", "none", "--sort=Extension"]; Complain => Err(OptionsError::Duplicate(Flag::Long("sort"), Flag::Long("sort"))));
}
mod dot_filters {
use super::*;
// Default behaviour
test!(empty: DotFilter <- []; Both => Ok(DotFilter::JustFiles));
// --all
test!(all: DotFilter <- ["--all"]; Both => Ok(DotFilter::Dotfiles));
test!(all_all: DotFilter <- ["--all", "-a"]; Both => Ok(DotFilter::DotfilesAndDots));
test!(all_all_2: DotFilter <- ["-aa"]; Both => Ok(DotFilter::DotfilesAndDots));
test!(all_all_3: DotFilter <- ["-aaa"]; Last => Ok(DotFilter::DotfilesAndDots));
test!(all_all_4: DotFilter <- ["-aaa"]; Complain => Err(OptionsError::Conflict(&flags::ALL, &flags::ALL)));
// --all and --tree
test!(tree_a: DotFilter <- ["-Ta"]; Both => Ok(DotFilter::Dotfiles));
test!(tree_aa: DotFilter <- ["-Taa"]; Both => Err(OptionsError::TreeAllAll));
test!(tree_aaa: DotFilter <- ["-Taaa"]; Both => Err(OptionsError::TreeAllAll));
}
mod ignore_patterns {
use super::*;
use std::iter::FromIterator;
fn pat(string: &'static str) -> glob::Pattern {
glob::Pattern::new(string).unwrap()
}
// Various numbers of globs
test!(none: IgnorePatterns <- []; Both => Ok(IgnorePatterns::empty()));
test!(one: IgnorePatterns <- ["--ignore-glob", "*.ogg"]; Both => Ok(IgnorePatterns::from_iter(vec![ pat("*.ogg") ])));
test!(two: IgnorePatterns <- ["--ignore-glob=*.ogg|*.MP3"]; Both => Ok(IgnorePatterns::from_iter(vec![ pat("*.ogg"), pat("*.MP3") ])));
test!(loads: IgnorePatterns <- ["-I*|?|.|*"]; Both => Ok(IgnorePatterns::from_iter(vec![ pat("*"), pat("?"), pat("."), pat("*") ])));
// Overriding
test!(overridden: IgnorePatterns <- ["-I=*.ogg", "-I", "*.mp3"]; Last => Ok(IgnorePatterns::from_iter(vec![ pat("*.mp3") ])));
test!(overridden_2: IgnorePatterns <- ["-I", "*.OGG", "-I*.MP3"]; Last => Ok(IgnorePatterns::from_iter(vec![ pat("*.MP3") ])));
test!(overridden_3: IgnorePatterns <- ["-I=*.ogg", "-I", "*.mp3"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'I'), Flag::Short(b'I'))));
test!(overridden_4: IgnorePatterns <- ["-I", "*.OGG", "-I*.MP3"]; Complain => Err(OptionsError::Duplicate(Flag::Short(b'I'), Flag::Short(b'I'))));
}
mod git_ignores {
use super::*;
test!(off: GitIgnore <- []; Both => Ok(GitIgnore::Off));
test!(on: GitIgnore <- ["--git-ignore"]; Both => Ok(GitIgnore::CheckAndIgnore));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/help.rs | src/options/help.rs | use std::fmt;
use crate::fs::feature::xattr;
use crate::options::flags;
use crate::options::parser::MatchedFlags;
static USAGE_PART1: &str = "Usage:
exa [options] [files...]
META OPTIONS
-?, --help show list of command-line options
-v, --version show version of exa
DISPLAY OPTIONS
-1, --oneline display one entry per line
-l, --long display extended file metadata as a table
-G, --grid display entries as a grid (default)
-x, --across sort the grid across, rather than downwards
-R, --recurse recurse into directories
-T, --tree recurse into directories as a tree
-F, --classify display type indicator by file names
--colo[u]r=WHEN when to use terminal colours (always, auto, never)
--colo[u]r-scale highlight levels of file sizes distinctly
--icons display icons
--no-icons don't display icons (always overrides --icons)
FILTERING AND SORTING OPTIONS
-a, --all show hidden and 'dot' files
-d, --list-dirs list directories as files; don't list their contents
-L, --level DEPTH limit the depth of recursion
-r, --reverse reverse the sort order
-s, --sort SORT_FIELD which field to sort by
--group-directories-first list directories before other files
-D, --only-dirs list only directories
-I, --ignore-glob GLOBS glob patterns (pipe-separated) of files to ignore";
static USAGE_PART2: &str = " \
Valid sort fields: name, Name, extension, Extension, size, type,
modified, accessed, created, inode, and none.
date, time, old, and new all refer to modified.
LONG VIEW OPTIONS
-b, --binary list file sizes with binary prefixes
-B, --bytes list file sizes in bytes, without any prefixes
-g, --group list each file's group
-h, --header add a header row to each column
-H, --links list each file's number of hard links
-i, --inode list each file's inode number
-m, --modified use the modified timestamp field
-n, --numeric list numeric user and group IDs
-S, --blocks show number of file system blocks
-t, --time FIELD which timestamp field to list (modified, accessed, created)
-u, --accessed use the accessed timestamp field
-U, --created use the created timestamp field
--changed use the changed timestamp field
--time-style how to format timestamps (default, iso, long-iso, full-iso)
--no-permissions suppress the permissions field
--octal-permissions list each file's permission in octal format
--no-filesize suppress the filesize field
--no-user suppress the user field
--no-time suppress the time field";
static GIT_FILTER_HELP: &str = " --git-ignore ignore files mentioned in '.gitignore'";
static GIT_VIEW_HELP: &str = " --git list each file's Git status, if tracked or ignored";
static EXTENDED_HELP: &str = " -@, --extended list each file's extended attributes and sizes";
/// All the information needed to display the help text, which depends
/// on which features are enabled and whether the user only wants to
/// see one section’s help.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct HelpString;
impl HelpString {
/// Determines how to show help, if at all, based on the user’s
/// command-line arguments. This one works backwards from the other
/// ‘deduce’ functions, returning Err if help needs to be shown.
///
/// We don’t do any strict-mode error checking here: it’s OK to give
/// the --help or --long flags more than once. Actually checking for
/// errors when the user wants help is kind of petty!
pub fn deduce(matches: &MatchedFlags<'_>) -> Option<Self> {
if matches.count(&flags::HELP) > 0 {
Some(Self)
}
else {
None
}
}
}
impl fmt::Display for HelpString {
/// Format this help options into an actual string of help
/// text to be displayed to the user.
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> Result<(), fmt::Error> {
write!(f, "{}", USAGE_PART1)?;
if cfg!(feature = "git") {
write!(f, "\n{}", GIT_FILTER_HELP)?;
}
write!(f, "\n{}", USAGE_PART2)?;
if cfg!(feature = "git") {
write!(f, "\n{}", GIT_VIEW_HELP)?;
}
if xattr::ENABLED {
write!(f, "\n{}", EXTENDED_HELP)?;
}
writeln!(f)
}
}
#[cfg(test)]
mod test {
use crate::options::{Options, OptionsResult};
use std::ffi::OsStr;
#[test]
fn help() {
let args = vec![ OsStr::new("--help") ];
let opts = Options::parse(args, &None);
assert!(matches!(opts, OptionsResult::Help(_)));
}
#[test]
fn help_with_file() {
let args = vec![ OsStr::new("--help"), OsStr::new("me") ];
let opts = Options::parse(args, &None);
assert!(matches!(opts, OptionsResult::Help(_)));
}
#[test]
fn unhelpful() {
let args = vec![];
let opts = Options::parse(args, &None);
assert!(! matches!(opts, OptionsResult::Help(_))) // no help when --help isn’t passed
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/flags.rs | src/options/flags.rs | use crate::options::parser::{Arg, Args, TakesValue, Values};
// exa options
pub static VERSION: Arg = Arg { short: Some(b'v'), long: "version", takes_value: TakesValue::Forbidden };
pub static HELP: Arg = Arg { short: Some(b'?'), long: "help", takes_value: TakesValue::Forbidden };
// display options
pub static ONE_LINE: Arg = Arg { short: Some(b'1'), long: "oneline", takes_value: TakesValue::Forbidden };
pub static LONG: Arg = Arg { short: Some(b'l'), long: "long", takes_value: TakesValue::Forbidden };
pub static GRID: Arg = Arg { short: Some(b'G'), long: "grid", takes_value: TakesValue::Forbidden };
pub static ACROSS: Arg = Arg { short: Some(b'x'), long: "across", takes_value: TakesValue::Forbidden };
pub static RECURSE: Arg = Arg { short: Some(b'R'), long: "recurse", takes_value: TakesValue::Forbidden };
pub static TREE: Arg = Arg { short: Some(b'T'), long: "tree", takes_value: TakesValue::Forbidden };
pub static CLASSIFY: Arg = Arg { short: Some(b'F'), long: "classify", takes_value: TakesValue::Forbidden };
pub static COLOR: Arg = Arg { short: None, long: "color", takes_value: TakesValue::Necessary(Some(COLOURS)) };
pub static COLOUR: Arg = Arg { short: None, long: "colour", takes_value: TakesValue::Necessary(Some(COLOURS)) };
const COLOURS: &[&str] = &["always", "auto", "never"];
pub static COLOR_SCALE: Arg = Arg { short: None, long: "color-scale", takes_value: TakesValue::Forbidden };
pub static COLOUR_SCALE: Arg = Arg { short: None, long: "colour-scale", takes_value: TakesValue::Forbidden };
// filtering and sorting options
pub static ALL: Arg = Arg { short: Some(b'a'), long: "all", takes_value: TakesValue::Forbidden };
pub static LIST_DIRS: Arg = Arg { short: Some(b'd'), long: "list-dirs", takes_value: TakesValue::Forbidden };
pub static LEVEL: Arg = Arg { short: Some(b'L'), long: "level", takes_value: TakesValue::Necessary(None) };
pub static REVERSE: Arg = Arg { short: Some(b'r'), long: "reverse", takes_value: TakesValue::Forbidden };
pub static SORT: Arg = Arg { short: Some(b's'), long: "sort", takes_value: TakesValue::Necessary(Some(SORTS)) };
pub static IGNORE_GLOB: Arg = Arg { short: Some(b'I'), long: "ignore-glob", takes_value: TakesValue::Necessary(None) };
pub static GIT_IGNORE: Arg = Arg { short: None, long: "git-ignore", takes_value: TakesValue::Forbidden };
pub static DIRS_FIRST: Arg = Arg { short: None, long: "group-directories-first", takes_value: TakesValue::Forbidden };
pub static ONLY_DIRS: Arg = Arg { short: Some(b'D'), long: "only-dirs", takes_value: TakesValue::Forbidden };
const SORTS: Values = &[ "name", "Name", "size", "extension",
"Extension", "modified", "changed", "accessed",
"created", "inode", "type", "none" ];
// display options
pub static BINARY: Arg = Arg { short: Some(b'b'), long: "binary", takes_value: TakesValue::Forbidden };
pub static BYTES: Arg = Arg { short: Some(b'B'), long: "bytes", takes_value: TakesValue::Forbidden };
pub static GROUP: Arg = Arg { short: Some(b'g'), long: "group", takes_value: TakesValue::Forbidden };
pub static NUMERIC: Arg = Arg { short: Some(b'n'), long: "numeric", takes_value: TakesValue::Forbidden };
pub static HEADER: Arg = Arg { short: Some(b'h'), long: "header", takes_value: TakesValue::Forbidden };
pub static ICONS: Arg = Arg { short: None, long: "icons", takes_value: TakesValue::Forbidden };
pub static INODE: Arg = Arg { short: Some(b'i'), long: "inode", takes_value: TakesValue::Forbidden };
pub static LINKS: Arg = Arg { short: Some(b'H'), long: "links", takes_value: TakesValue::Forbidden };
pub static MODIFIED: Arg = Arg { short: Some(b'm'), long: "modified", takes_value: TakesValue::Forbidden };
pub static CHANGED: Arg = Arg { short: None, long: "changed", takes_value: TakesValue::Forbidden };
pub static BLOCKS: Arg = Arg { short: Some(b'S'), long: "blocks", takes_value: TakesValue::Forbidden };
pub static TIME: Arg = Arg { short: Some(b't'), long: "time", takes_value: TakesValue::Necessary(Some(TIMES)) };
pub static ACCESSED: Arg = Arg { short: Some(b'u'), long: "accessed", takes_value: TakesValue::Forbidden };
pub static CREATED: Arg = Arg { short: Some(b'U'), long: "created", takes_value: TakesValue::Forbidden };
pub static TIME_STYLE: Arg = Arg { short: None, long: "time-style", takes_value: TakesValue::Necessary(Some(TIME_STYLES)) };
const TIMES: Values = &["modified", "changed", "accessed", "created"];
const TIME_STYLES: Values = &["default", "long-iso", "full-iso", "iso"];
// suppressing columns
pub static NO_PERMISSIONS: Arg = Arg { short: None, long: "no-permissions", takes_value: TakesValue::Forbidden };
pub static NO_FILESIZE: Arg = Arg { short: None, long: "no-filesize", takes_value: TakesValue::Forbidden };
pub static NO_USER: Arg = Arg { short: None, long: "no-user", takes_value: TakesValue::Forbidden };
pub static NO_TIME: Arg = Arg { short: None, long: "no-time", takes_value: TakesValue::Forbidden };
pub static NO_ICONS: Arg = Arg { short: None, long: "no-icons", takes_value: TakesValue::Forbidden };
// optional feature options
pub static GIT: Arg = Arg { short: None, long: "git", takes_value: TakesValue::Forbidden };
pub static EXTENDED: Arg = Arg { short: Some(b'@'), long: "extended", takes_value: TakesValue::Forbidden };
pub static OCTAL: Arg = Arg { short: None, long: "octal-permissions", takes_value: TakesValue::Forbidden };
pub static ALL_ARGS: Args = Args(&[
&VERSION, &HELP,
&ONE_LINE, &LONG, &GRID, &ACROSS, &RECURSE, &TREE, &CLASSIFY,
&COLOR, &COLOUR, &COLOR_SCALE, &COLOUR_SCALE,
&ALL, &LIST_DIRS, &LEVEL, &REVERSE, &SORT, &DIRS_FIRST,
&IGNORE_GLOB, &GIT_IGNORE, &ONLY_DIRS,
&BINARY, &BYTES, &GROUP, &NUMERIC, &HEADER, &ICONS, &INODE, &LINKS, &MODIFIED, &CHANGED,
&BLOCKS, &TIME, &ACCESSED, &CREATED, &TIME_STYLE,
&NO_PERMISSIONS, &NO_FILESIZE, &NO_USER, &NO_TIME, &NO_ICONS,
&GIT, &EXTENDED, &OCTAL
]);
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/error.rs | src/options/error.rs | use std::ffi::OsString;
use std::fmt;
use std::num::ParseIntError;
use crate::options::flags;
use crate::options::parser::{Arg, Flag, ParseError};
/// Something wrong with the combination of options the user has picked.
#[derive(PartialEq, Eq, Debug)]
pub enum OptionsError {
/// There was an error (from `getopts`) parsing the arguments.
Parse(ParseError),
/// The user supplied an illegal choice to an Argument.
BadArgument(&'static Arg, OsString),
/// The user supplied a set of options that are unsupported
Unsupported(String),
/// An option was given twice or more in strict mode.
Duplicate(Flag, Flag),
/// Two options were given that conflict with one another.
Conflict(&'static Arg, &'static Arg),
/// An option was given that does nothing when another one either is or
/// isn’t present.
Useless(&'static Arg, bool, &'static Arg),
/// An option was given that does nothing when either of two other options
/// are not present.
Useless2(&'static Arg, &'static Arg, &'static Arg),
/// A very specific edge case where --tree can’t be used with --all twice.
TreeAllAll,
/// A numeric option was given that failed to be parsed as a number.
FailedParse(String, NumberSource, ParseIntError),
/// A glob ignore was given that failed to be parsed as a pattern.
FailedGlobPattern(String),
}
/// The source of a string that failed to be parsed as a number.
#[derive(PartialEq, Eq, Debug)]
pub enum NumberSource {
/// It came... from a command-line argument!
Arg(&'static Arg),
/// It came... from the enviroment!
Env(&'static str),
}
impl From<glob::PatternError> for OptionsError {
fn from(error: glob::PatternError) -> Self {
Self::FailedGlobPattern(error.to_string())
}
}
impl fmt::Display for NumberSource {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Arg(arg) => write!(f, "option {}", arg),
Self::Env(env) => write!(f, "environment variable {}", env),
}
}
}
impl fmt::Display for OptionsError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
use crate::options::parser::TakesValue;
match self {
Self::BadArgument(arg, attempt) => {
if let TakesValue::Necessary(Some(values)) = arg.takes_value {
write!(f, "Option {} has no {:?} setting ({})", arg, attempt, Choices(values))
}
else {
write!(f, "Option {} has no {:?} setting", arg, attempt)
}
}
Self::Parse(e) => write!(f, "{}", e),
Self::Unsupported(e) => write!(f, "{}", e),
Self::Conflict(a, b) => write!(f, "Option {} conflicts with option {}", a, b),
Self::Duplicate(a, b) if a == b => write!(f, "Flag {} was given twice", a),
Self::Duplicate(a, b) => write!(f, "Flag {} conflicts with flag {}", a, b),
Self::Useless(a, false, b) => write!(f, "Option {} is useless without option {}", a, b),
Self::Useless(a, true, b) => write!(f, "Option {} is useless given option {}", a, b),
Self::Useless2(a, b1, b2) => write!(f, "Option {} is useless without options {} or {}", a, b1, b2),
Self::TreeAllAll => write!(f, "Option --tree is useless given --all --all"),
Self::FailedParse(s, n, e) => write!(f, "Value {:?} not valid for {}: {}", s, n, e),
Self::FailedGlobPattern(ref e) => write!(f, "Failed to parse glob pattern: {}", e),
}
}
}
impl OptionsError {
/// Try to second-guess what the user was trying to do, depending on what
/// went wrong.
pub fn suggestion(&self) -> Option<&'static str> {
// ‘ls -lt’ and ‘ls -ltr’ are common combinations
match self {
Self::BadArgument(time, r) if *time == &flags::TIME && r == "r" => {
Some("To sort oldest files last, try \"--sort oldest\", or just \"-sold\"")
}
Self::Parse(ParseError::NeedsValue { ref flag, .. }) if *flag == Flag::Short(b't') => {
Some("To sort newest files last, try \"--sort newest\", or just \"-snew\"")
}
_ => {
None
}
}
}
}
/// A list of legal choices for an argument-taking option.
#[derive(PartialEq, Eq, Debug)]
pub struct Choices(pub &'static [&'static str]);
impl fmt::Display for Choices {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "choices: {}", self.0.join(", "))
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/options/mod.rs | src/options/mod.rs | //! Parsing command-line strings into exa options.
//!
//! This module imports exa’s configuration types, such as `View` (the details
//! of displaying multiple files) and `DirAction` (what to do when encountering
//! a directory), and implements `deduce` methods on them so they can be
//! configured using command-line options.
//!
//!
//! ## Useless and overridden options
//!
//! Let’s say exa was invoked with just one argument: `exa --inode`. The
//! `--inode` option is used in the details view, where it adds the inode
//! column to the output. But because the details view is *only* activated with
//! the `--long` argument, adding `--inode` without it would not have any
//! effect.
//!
//! For a long time, exa’s philosophy was that the user should be warned
//! whenever they could be mistaken like this. If you tell exa to display the
//! inode, and it *doesn’t* display the inode, isn’t that more annoying than
//! having it throw an error back at you?
//!
//! However, this doesn’t take into account *configuration*. Say a user wants
//! to configure exa so that it lists inodes in the details view, but otherwise
//! functions normally. A common way to do this for command-line programs is to
//! define a shell alias that specifies the details they want to use every
//! time. For the inode column, the alias would be:
//!
//! `alias exa="exa --inode"`
//!
//! Using this alias means that although the inode column will be shown in the
//! details view, you’re now *only* allowed to use the details view, as any
//! other view type will result in an error. Oops!
//!
//! Another example is when an option is specified twice, such as `exa
//! --sort=Name --sort=size`. Did the user change their mind about sorting, and
//! accidentally specify the option twice?
//!
//! Again, exa rejected this case, throwing an error back to the user instead
//! of trying to guess how they want their output sorted. And again, this
//! doesn’t take into account aliases being used to set defaults. A user who
//! wants their files to be sorted case-insensitively may configure their shell
//! with the following:
//!
//! `alias exa="exa --sort=Name"`
//!
//! Just like the earlier example, the user now can’t use any other sort order,
//! because exa refuses to guess which one they meant. It’s *more* annoying to
//! have to go back and edit the command than if there were no error.
//!
//! Fortunately, there’s a heuristic for telling which options came from an
//! alias and which came from the actual command-line: aliased options are
//! nearer the beginning of the options array, and command-line options are
//! nearer the end. This means that after the options have been parsed, exa
//! needs to traverse them *backwards* to find the last-most-specified one.
//!
//! For example, invoking exa with `exa --sort=size` when that alias is present
//! would result in a full command-line of:
//!
//! `exa --sort=Name --sort=size`
//!
//! `--sort=size` should override `--sort=Name` because it’s closer to the end
//! of the arguments array. In fact, because there’s no way to tell where the
//! arguments came from — it’s just a heuristic — this will still work even
//! if no aliases are being used!
//!
//! Finally, this isn’t just useful when options could override each other.
//! Creating an alias `exal="exa --long --inode --header"` then invoking `exal
//! --grid --long` shouldn’t complain about `--long` being given twice when
//! it’s clear what the user wants.
use std::ffi::OsStr;
use crate::fs::dir_action::DirAction;
use crate::fs::filter::{FileFilter, GitIgnore};
use crate::output::{View, Mode, details, grid_details};
use crate::theme::Options as ThemeOptions;
mod dir_action;
mod file_name;
mod filter;
mod flags;
mod theme;
mod view;
mod error;
pub use self::error::{OptionsError, NumberSource};
mod help;
use self::help::HelpString;
mod parser;
use self::parser::MatchedFlags;
pub mod vars;
pub use self::vars::Vars;
mod version;
use self::version::VersionString;
/// These **options** represent a parsed, error-checked versions of the
/// user’s command-line options.
#[derive(Debug)]
pub struct Options {
/// The action to perform when encountering a directory rather than a
/// regular file.
pub dir_action: DirAction,
/// How to sort and filter files before outputting them.
pub filter: FileFilter,
/// The user’s preference of view to use (lines, grid, details, or
/// grid-details) along with the options on how to render file names.
/// If the view requires the terminal to have a width, and there is no
/// width, then the view will be downgraded.
pub view: View,
/// The options to make up the styles of the UI and file names.
pub theme: ThemeOptions,
}
impl Options {
/// Parse the given iterator of command-line strings into an Options
/// struct and a list of free filenames, using the environment variables
/// for extra options.
#[allow(unused_results)]
pub fn parse<'args, I, V>(args: I, vars: &V) -> OptionsResult<'args>
where I: IntoIterator<Item = &'args OsStr>,
V: Vars,
{
use crate::options::parser::{Matches, Strictness};
let strictness = match vars.get(vars::EXA_STRICT) {
None => Strictness::UseLastArguments,
Some(ref t) if t.is_empty() => Strictness::UseLastArguments,
Some(_) => Strictness::ComplainAboutRedundantArguments,
};
let Matches { flags, frees } = match flags::ALL_ARGS.parse(args, strictness) {
Ok(m) => m,
Err(pe) => return OptionsResult::InvalidOptions(OptionsError::Parse(pe)),
};
if let Some(help) = HelpString::deduce(&flags) {
return OptionsResult::Help(help);
}
if let Some(version) = VersionString::deduce(&flags) {
return OptionsResult::Version(version);
}
match Self::deduce(&flags, vars) {
Ok(options) => OptionsResult::Ok(options, frees),
Err(oe) => OptionsResult::InvalidOptions(oe),
}
}
/// Whether the View specified in this set of options includes a Git
/// status column. It’s only worth trying to discover a repository if the
/// results will end up being displayed.
pub fn should_scan_for_git(&self) -> bool {
if self.filter.git_ignore == GitIgnore::CheckAndIgnore {
return true;
}
match self.view.mode {
Mode::Details(details::Options { table: Some(ref table), .. }) |
Mode::GridDetails(grid_details::Options { details: details::Options { table: Some(ref table), .. }, .. }) => table.columns.git,
_ => false,
}
}
/// Determines the complete set of options based on the given command-line
/// arguments, after they’ve been parsed.
fn deduce<V: Vars>(matches: &MatchedFlags<'_>, vars: &V) -> Result<Self, OptionsError> {
if cfg!(not(feature = "git")) &&
matches.has_where_any(|f| f.matches(&flags::GIT) || f.matches(&flags::GIT_IGNORE)).is_some() {
return Err(OptionsError::Unsupported(String::from(
"Options --git and --git-ignore can't be used because `git` feature was disabled in this build of exa"
)));
}
let view = View::deduce(matches, vars)?;
let dir_action = DirAction::deduce(matches, matches!(view.mode, Mode::Details(_)))?;
let filter = FileFilter::deduce(matches)?;
let theme = ThemeOptions::deduce(matches, vars)?;
Ok(Self { dir_action, filter, view, theme })
}
}
/// The result of the `Options::getopts` function.
#[derive(Debug)]
pub enum OptionsResult<'args> {
/// The options were parsed successfully.
Ok(Options, Vec<&'args OsStr>),
/// There was an error parsing the arguments.
InvalidOptions(OptionsError),
/// One of the arguments was `--help`, so display help.
Help(HelpString),
/// One of the arguments was `--version`, so display the version number.
Version(VersionString),
}
#[cfg(test)]
pub mod test {
use crate::options::parser::{Arg, MatchedFlags};
use std::ffi::OsStr;
#[derive(PartialEq, Eq, Debug)]
pub enum Strictnesses {
Last,
Complain,
Both,
}
/// This function gets used by the other testing modules.
/// It can run with one or both strictness values: if told to run with
/// both, then both should resolve to the same result.
///
/// It returns a vector with one or two elements in.
/// These elements can then be tested with `assert_eq` or what have you.
pub fn parse_for_test<T, F>(inputs: &[&str], args: &'static [&'static Arg], strictnesses: Strictnesses, get: F) -> Vec<T>
where F: Fn(&MatchedFlags<'_>) -> T
{
use self::Strictnesses::*;
use crate::options::parser::{Args, Strictness};
let bits = inputs.iter().map(OsStr::new).collect::<Vec<_>>();
let mut result = Vec::new();
if strictnesses == Last || strictnesses == Both {
let results = Args(args).parse(bits.clone(), Strictness::UseLastArguments);
result.push(get(&results.unwrap().flags));
}
if strictnesses == Complain || strictnesses == Both {
let results = Args(args).parse(bits, Strictness::ComplainAboutRedundantArguments);
result.push(get(&results.unwrap().flags));
}
result
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/theme/ui_styles.rs | src/theme/ui_styles.rs | use ansi_term::Style;
use crate::theme::lsc::Pair;
#[derive(Debug, Default, PartialEq)]
pub struct UiStyles {
pub colourful: bool,
pub filekinds: FileKinds,
pub perms: Permissions,
pub size: Size,
pub users: Users,
pub links: Links,
pub git: Git,
pub punctuation: Style,
pub date: Style,
pub inode: Style,
pub blocks: Style,
pub header: Style,
pub octal: Style,
pub symlink_path: Style,
pub control_char: Style,
pub broken_symlink: Style,
pub broken_path_overlay: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct FileKinds {
pub normal: Style,
pub directory: Style,
pub symlink: Style,
pub pipe: Style,
pub block_device: Style,
pub char_device: Style,
pub socket: Style,
pub special: Style,
pub executable: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Permissions {
pub user_read: Style,
pub user_write: Style,
pub user_execute_file: Style,
pub user_execute_other: Style,
pub group_read: Style,
pub group_write: Style,
pub group_execute: Style,
pub other_read: Style,
pub other_write: Style,
pub other_execute: Style,
pub special_user_file: Style,
pub special_other: Style,
pub attribute: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Size {
pub major: Style,
pub minor: Style,
pub number_byte: Style,
pub number_kilo: Style,
pub number_mega: Style,
pub number_giga: Style,
pub number_huge: Style,
pub unit_byte: Style,
pub unit_kilo: Style,
pub unit_mega: Style,
pub unit_giga: Style,
pub unit_huge: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Users {
pub user_you: Style,
pub user_someone_else: Style,
pub group_yours: Style,
pub group_not_yours: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Links {
pub normal: Style,
pub multi_link_file: Style,
}
#[derive(Clone, Copy, Debug, Default, PartialEq)]
pub struct Git {
pub new: Style,
pub modified: Style,
pub deleted: Style,
pub renamed: Style,
pub typechange: Style,
pub ignored: Style,
pub conflicted: Style,
}
impl UiStyles {
pub fn plain() -> Self {
Self::default()
}
}
impl UiStyles {
/// Sets a value on this set of colours using one of the keys understood
/// by the `LS_COLORS` environment variable. Invalid keys set nothing, but
/// return false.
pub fn set_ls(&mut self, pair: &Pair<'_>) -> bool {
match pair.key {
"di" => self.filekinds.directory = pair.to_style(), // DIR
"ex" => self.filekinds.executable = pair.to_style(), // EXEC
"fi" => self.filekinds.normal = pair.to_style(), // FILE
"pi" => self.filekinds.pipe = pair.to_style(), // FIFO
"so" => self.filekinds.socket = pair.to_style(), // SOCK
"bd" => self.filekinds.block_device = pair.to_style(), // BLK
"cd" => self.filekinds.char_device = pair.to_style(), // CHR
"ln" => self.filekinds.symlink = pair.to_style(), // LINK
"or" => self.broken_symlink = pair.to_style(), // ORPHAN
_ => return false,
// Codes we don’t do anything with:
// MULTIHARDLINK, DOOR, SETUID, SETGID, CAPABILITY,
// STICKY_OTHER_WRITABLE, OTHER_WRITABLE, STICKY, MISSING
}
true
}
/// Sets a value on this set of colours using one of the keys understood
/// by the `EXA_COLORS` environment variable. Invalid keys set nothing,
/// but return false. This doesn’t take the `LS_COLORS` keys into account,
/// so `set_ls` should have been run first.
pub fn set_exa(&mut self, pair: &Pair<'_>) -> bool {
match pair.key {
"ur" => self.perms.user_read = pair.to_style(),
"uw" => self.perms.user_write = pair.to_style(),
"ux" => self.perms.user_execute_file = pair.to_style(),
"ue" => self.perms.user_execute_other = pair.to_style(),
"gr" => self.perms.group_read = pair.to_style(),
"gw" => self.perms.group_write = pair.to_style(),
"gx" => self.perms.group_execute = pair.to_style(),
"tr" => self.perms.other_read = pair.to_style(),
"tw" => self.perms.other_write = pair.to_style(),
"tx" => self.perms.other_execute = pair.to_style(),
"su" => self.perms.special_user_file = pair.to_style(),
"sf" => self.perms.special_other = pair.to_style(),
"xa" => self.perms.attribute = pair.to_style(),
"sn" => self.set_number_style(pair.to_style()),
"sb" => self.set_unit_style(pair.to_style()),
"nb" => self.size.number_byte = pair.to_style(),
"nk" => self.size.number_kilo = pair.to_style(),
"nm" => self.size.number_mega = pair.to_style(),
"ng" => self.size.number_giga = pair.to_style(),
"nh" => self.size.number_huge = pair.to_style(),
"ub" => self.size.unit_byte = pair.to_style(),
"uk" => self.size.unit_kilo = pair.to_style(),
"um" => self.size.unit_mega = pair.to_style(),
"ug" => self.size.unit_giga = pair.to_style(),
"uh" => self.size.unit_huge = pair.to_style(),
"df" => self.size.major = pair.to_style(),
"ds" => self.size.minor = pair.to_style(),
"uu" => self.users.user_you = pair.to_style(),
"un" => self.users.user_someone_else = pair.to_style(),
"gu" => self.users.group_yours = pair.to_style(),
"gn" => self.users.group_not_yours = pair.to_style(),
"lc" => self.links.normal = pair.to_style(),
"lm" => self.links.multi_link_file = pair.to_style(),
"ga" => self.git.new = pair.to_style(),
"gm" => self.git.modified = pair.to_style(),
"gd" => self.git.deleted = pair.to_style(),
"gv" => self.git.renamed = pair.to_style(),
"gt" => self.git.typechange = pair.to_style(),
"xx" => self.punctuation = pair.to_style(),
"da" => self.date = pair.to_style(),
"in" => self.inode = pair.to_style(),
"bl" => self.blocks = pair.to_style(),
"hd" => self.header = pair.to_style(),
"lp" => self.symlink_path = pair.to_style(),
"cc" => self.control_char = pair.to_style(),
"bO" => self.broken_path_overlay = pair.to_style(),
_ => return false,
}
true
}
pub fn set_number_style(&mut self, style: Style) {
self.size.number_byte = style;
self.size.number_kilo = style;
self.size.number_mega = style;
self.size.number_giga = style;
self.size.number_huge = style;
}
pub fn set_unit_style(&mut self, style: Style) {
self.size.unit_byte = style;
self.size.unit_kilo = style;
self.size.unit_mega = style;
self.size.unit_giga = style;
self.size.unit_huge = style;
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/theme/default_theme.rs | src/theme/default_theme.rs | use ansi_term::Style;
use ansi_term::Colour::*;
use crate::theme::ColourScale;
use crate::theme::ui_styles::*;
impl UiStyles {
pub fn default_theme(scale: ColourScale) -> Self {
Self {
colourful: true,
filekinds: FileKinds {
normal: Style::default(),
directory: Blue.bold(),
symlink: Cyan.normal(),
pipe: Yellow.normal(),
block_device: Yellow.bold(),
char_device: Yellow.bold(),
socket: Red.bold(),
special: Yellow.normal(),
executable: Green.bold(),
},
perms: Permissions {
user_read: Yellow.bold(),
user_write: Red.bold(),
user_execute_file: Green.bold().underline(),
user_execute_other: Green.bold(),
group_read: Yellow.normal(),
group_write: Red.normal(),
group_execute: Green.normal(),
other_read: Yellow.normal(),
other_write: Red.normal(),
other_execute: Green.normal(),
special_user_file: Purple.normal(),
special_other: Purple.normal(),
attribute: Style::default(),
},
size: Size::colourful(scale),
users: Users {
user_you: Yellow.bold(),
user_someone_else: Style::default(),
group_yours: Yellow.bold(),
group_not_yours: Style::default(),
},
links: Links {
normal: Red.bold(),
multi_link_file: Red.on(Yellow),
},
git: Git {
new: Green.normal(),
modified: Blue.normal(),
deleted: Red.normal(),
renamed: Yellow.normal(),
typechange: Purple.normal(),
ignored: Style::default().dimmed(),
conflicted: Red.normal(),
},
punctuation: Fixed(244).normal(),
date: Blue.normal(),
inode: Purple.normal(),
blocks: Cyan.normal(),
octal: Purple.normal(),
header: Style::default().underline(),
symlink_path: Cyan.normal(),
control_char: Red.normal(),
broken_symlink: Red.normal(),
broken_path_overlay: Style::default().underline(),
}
}
}
impl Size {
pub fn colourful(scale: ColourScale) -> Self {
match scale {
ColourScale::Gradient => Self::colourful_gradient(),
ColourScale::Fixed => Self::colourful_fixed(),
}
}
fn colourful_fixed() -> Self {
Self {
major: Green.bold(),
minor: Green.normal(),
number_byte: Green.bold(),
number_kilo: Green.bold(),
number_mega: Green.bold(),
number_giga: Green.bold(),
number_huge: Green.bold(),
unit_byte: Green.normal(),
unit_kilo: Green.normal(),
unit_mega: Green.normal(),
unit_giga: Green.normal(),
unit_huge: Green.normal(),
}
}
fn colourful_gradient() -> Self {
Self {
major: Green.bold(),
minor: Green.normal(),
number_byte: Fixed(118).normal(),
number_kilo: Fixed(190).normal(),
number_mega: Fixed(226).normal(),
number_giga: Fixed(220).normal(),
number_huge: Fixed(214).normal(),
unit_byte: Green.normal(),
unit_kilo: Green.normal(),
unit_mega: Green.normal(),
unit_giga: Green.normal(),
unit_huge: Green.normal(),
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/theme/mod.rs | src/theme/mod.rs | use ansi_term::Style;
use crate::fs::File;
use crate::output::file_name::Colours as FileNameColours;
use crate::output::render;
mod ui_styles;
pub use self::ui_styles::UiStyles;
pub use self::ui_styles::Size as SizeColours;
mod lsc;
pub use self::lsc::LSColors;
mod default_theme;
#[derive(PartialEq, Eq, Debug)]
pub struct Options {
pub use_colours: UseColours,
pub colour_scale: ColourScale,
pub definitions: Definitions,
}
/// Under what circumstances we should display coloured, rather than plain,
/// output to the terminal.
///
/// By default, we want to display the colours when stdout can display them.
/// Turning them on when output is going to, say, a pipe, would make programs
/// such as `grep` or `more` not work properly. So the `Automatic` mode does
/// this check and only displays colours when they can be truly appreciated.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum UseColours {
/// Display them even when output isn’t going to a terminal.
Always,
/// Display them when output is going to a terminal, but not otherwise.
Automatic,
/// Never display them, even when output is going to a terminal.
Never,
}
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum ColourScale {
Fixed,
Gradient,
}
#[derive(PartialEq, Eq, Debug, Default)]
pub struct Definitions {
pub ls: Option<String>,
pub exa: Option<String>,
}
pub struct Theme {
pub ui: UiStyles,
pub exts: Box<dyn FileColours>,
}
impl Options {
#[allow(trivial_casts)] // the `as Box<_>` stuff below warns about this for some reason
pub fn to_theme(&self, isatty: bool) -> Theme {
use crate::info::filetype::FileExtensions;
if self.use_colours == UseColours::Never || (self.use_colours == UseColours::Automatic && ! isatty) {
let ui = UiStyles::plain();
let exts = Box::new(NoFileColours);
return Theme { ui, exts };
}
// Parse the environment variables into colours and extension mappings
let mut ui = UiStyles::default_theme(self.colour_scale);
let (exts, use_default_filetypes) = self.definitions.parse_color_vars(&mut ui);
// Use between 0 and 2 file name highlighters
let exts = match (exts.is_non_empty(), use_default_filetypes) {
(false, false) => Box::new(NoFileColours) as Box<_>,
(false, true) => Box::new(FileExtensions) as Box<_>,
( true, false) => Box::new(exts) as Box<_>,
( true, true) => Box::new((exts, FileExtensions)) as Box<_>,
};
Theme { ui, exts }
}
}
impl Definitions {
/// Parse the environment variables into `LS_COLORS` pairs, putting file glob
/// colours into the `ExtensionMappings` that gets returned, and using the
/// two-character UI codes to modify the mutable `Colours`.
///
/// Also returns if the `EXA_COLORS` variable should reset the existing file
/// type mappings or not. The `reset` code needs to be the first one.
fn parse_color_vars(&self, colours: &mut UiStyles) -> (ExtensionMappings, bool) {
use log::*;
let mut exts = ExtensionMappings::default();
if let Some(lsc) = &self.ls {
LSColors(lsc).each_pair(|pair| {
if ! colours.set_ls(&pair) {
match glob::Pattern::new(pair.key) {
Ok(pat) => {
exts.add(pat, pair.to_style());
}
Err(e) => {
warn!("Couldn't parse glob pattern {:?}: {}", pair.key, e);
}
}
}
});
}
let mut use_default_filetypes = true;
if let Some(exa) = &self.exa {
// Is this hacky? Yes.
if exa == "reset" || exa.starts_with("reset:") {
use_default_filetypes = false;
}
LSColors(exa).each_pair(|pair| {
if ! colours.set_ls(&pair) && ! colours.set_exa(&pair) {
match glob::Pattern::new(pair.key) {
Ok(pat) => {
exts.add(pat, pair.to_style());
}
Err(e) => {
warn!("Couldn't parse glob pattern {:?}: {}", pair.key, e);
}
}
};
});
}
(exts, use_default_filetypes)
}
}
pub trait FileColours: std::marker::Sync {
fn colour_file(&self, file: &File<'_>) -> Option<Style>;
}
#[derive(PartialEq, Debug)]
struct NoFileColours;
impl FileColours for NoFileColours {
fn colour_file(&self, _file: &File<'_>) -> Option<Style> {
None
}
}
// When getting the colour of a file from a *pair* of colourisers, try the
// first one then try the second one. This lets the user provide their own
// file type associations, while falling back to the default set if not set
// explicitly.
impl<A, B> FileColours for (A, B)
where A: FileColours,
B: FileColours,
{
fn colour_file(&self, file: &File<'_>) -> Option<Style> {
self.0.colour_file(file)
.or_else(|| self.1.colour_file(file))
}
}
#[derive(PartialEq, Debug, Default)]
struct ExtensionMappings {
mappings: Vec<(glob::Pattern, Style)>,
}
// Loop through backwards so that colours specified later in the list override
// colours specified earlier, like we do with options and strict mode
impl FileColours for ExtensionMappings {
fn colour_file(&self, file: &File<'_>) -> Option<Style> {
self.mappings.iter().rev()
.find(|t| t.0.matches(&file.name))
.map (|t| t.1)
}
}
impl ExtensionMappings {
fn is_non_empty(&self) -> bool {
! self.mappings.is_empty()
}
fn add(&mut self, pattern: glob::Pattern, style: Style) {
self.mappings.push((pattern, style))
}
}
impl render::BlocksColours for Theme {
fn block_count(&self) -> Style { self.ui.blocks }
fn no_blocks(&self) -> Style { self.ui.punctuation }
}
impl render::FiletypeColours for Theme {
fn normal(&self) -> Style { self.ui.filekinds.normal }
fn directory(&self) -> Style { self.ui.filekinds.directory }
fn pipe(&self) -> Style { self.ui.filekinds.pipe }
fn symlink(&self) -> Style { self.ui.filekinds.symlink }
fn block_device(&self) -> Style { self.ui.filekinds.block_device }
fn char_device(&self) -> Style { self.ui.filekinds.char_device }
fn socket(&self) -> Style { self.ui.filekinds.socket }
fn special(&self) -> Style { self.ui.filekinds.special }
}
impl render::GitColours for Theme {
fn not_modified(&self) -> Style { self.ui.punctuation }
#[allow(clippy::new_ret_no_self)]
fn new(&self) -> Style { self.ui.git.new }
fn modified(&self) -> Style { self.ui.git.modified }
fn deleted(&self) -> Style { self.ui.git.deleted }
fn renamed(&self) -> Style { self.ui.git.renamed }
fn type_change(&self) -> Style { self.ui.git.typechange }
fn ignored(&self) -> Style { self.ui.git.ignored }
fn conflicted(&self) -> Style { self.ui.git.conflicted }
}
#[cfg(unix)]
impl render::GroupColours for Theme {
fn yours(&self) -> Style { self.ui.users.group_yours }
fn not_yours(&self) -> Style { self.ui.users.group_not_yours }
}
impl render::LinksColours for Theme {
fn normal(&self) -> Style { self.ui.links.normal }
fn multi_link_file(&self) -> Style { self.ui.links.multi_link_file }
}
impl render::PermissionsColours for Theme {
fn dash(&self) -> Style { self.ui.punctuation }
fn user_read(&self) -> Style { self.ui.perms.user_read }
fn user_write(&self) -> Style { self.ui.perms.user_write }
fn user_execute_file(&self) -> Style { self.ui.perms.user_execute_file }
fn user_execute_other(&self) -> Style { self.ui.perms.user_execute_other }
fn group_read(&self) -> Style { self.ui.perms.group_read }
fn group_write(&self) -> Style { self.ui.perms.group_write }
fn group_execute(&self) -> Style { self.ui.perms.group_execute }
fn other_read(&self) -> Style { self.ui.perms.other_read }
fn other_write(&self) -> Style { self.ui.perms.other_write }
fn other_execute(&self) -> Style { self.ui.perms.other_execute }
fn special_user_file(&self) -> Style { self.ui.perms.special_user_file }
fn special_other(&self) -> Style { self.ui.perms.special_other }
fn attribute(&self) -> Style { self.ui.perms.attribute }
}
impl render::SizeColours for Theme {
fn size(&self, prefix: Option<number_prefix::Prefix>) -> Style {
use number_prefix::Prefix::*;
match prefix {
Some(Kilo | Kibi) => self.ui.size.number_kilo,
Some(Mega | Mebi) => self.ui.size.number_mega,
Some(Giga | Gibi) => self.ui.size.number_giga,
Some(_) => self.ui.size.number_huge,
None => self.ui.size.number_byte,
}
}
fn unit(&self, prefix: Option<number_prefix::Prefix>) -> Style {
use number_prefix::Prefix::*;
match prefix {
Some(Kilo | Kibi) => self.ui.size.unit_kilo,
Some(Mega | Mebi) => self.ui.size.unit_mega,
Some(Giga | Gibi) => self.ui.size.unit_giga,
Some(_) => self.ui.size.unit_huge,
None => self.ui.size.unit_byte,
}
}
fn no_size(&self) -> Style { self.ui.punctuation }
fn major(&self) -> Style { self.ui.size.major }
fn comma(&self) -> Style { self.ui.punctuation }
fn minor(&self) -> Style { self.ui.size.minor }
}
#[cfg(unix)]
impl render::UserColours for Theme {
fn you(&self) -> Style { self.ui.users.user_you }
fn someone_else(&self) -> Style { self.ui.users.user_someone_else }
}
impl FileNameColours for Theme {
fn normal_arrow(&self) -> Style { self.ui.punctuation }
fn broken_symlink(&self) -> Style { self.ui.broken_symlink }
fn broken_filename(&self) -> Style { apply_overlay(self.ui.broken_symlink, self.ui.broken_path_overlay) }
fn broken_control_char(&self) -> Style { apply_overlay(self.ui.control_char, self.ui.broken_path_overlay) }
fn control_char(&self) -> Style { self.ui.control_char }
fn symlink_path(&self) -> Style { self.ui.symlink_path }
fn executable_file(&self) -> Style { self.ui.filekinds.executable }
fn colour_file(&self, file: &File<'_>) -> Style {
self.exts.colour_file(file).unwrap_or(self.ui.filekinds.normal)
}
}
/// Some of the styles are **overlays**: although they have the same attribute
/// set as regular styles (foreground and background colours, bold, underline,
/// etc), they’re intended to be used to *amend* existing styles.
///
/// For example, the target path of a broken symlink is displayed in a red,
/// underlined style by default. Paths can contain control characters, so
/// these control characters need to be underlined too, otherwise it looks
/// weird. So instead of having four separate configurable styles for “link
/// path”, “broken link path”, “control character” and “broken control
/// character”, there are styles for “link path”, “control character”, and
/// “broken link overlay”, the latter of which is just set to override the
/// underline attribute on the other two.
fn apply_overlay(mut base: Style, overlay: Style) -> Style {
if let Some(fg) = overlay.foreground { base.foreground = Some(fg); }
if let Some(bg) = overlay.background { base.background = Some(bg); }
if overlay.is_bold { base.is_bold = true; }
if overlay.is_dimmed { base.is_dimmed = true; }
if overlay.is_italic { base.is_italic = true; }
if overlay.is_underline { base.is_underline = true; }
if overlay.is_blink { base.is_blink = true; }
if overlay.is_reverse { base.is_reverse = true; }
if overlay.is_hidden { base.is_hidden = true; }
if overlay.is_strikethrough { base.is_strikethrough = true; }
base
}
// TODO: move this function to the ansi_term crate
#[cfg(test)]
mod customs_test {
use super::*;
use crate::theme::ui_styles::UiStyles;
use ansi_term::Colour::*;
macro_rules! test {
($name:ident: ls $ls:expr, exa $exa:expr => colours $expected:ident -> $process_expected:expr) => {
#[test]
fn $name() {
let mut $expected = UiStyles::default();
$process_expected();
let definitions = Definitions {
ls: Some($ls.into()),
exa: Some($exa.into()),
};
let mut result = UiStyles::default();
let (_exts, _reset) = definitions.parse_color_vars(&mut result);
assert_eq!($expected, result);
}
};
($name:ident: ls $ls:expr, exa $exa:expr => exts $mappings:expr) => {
#[test]
fn $name() {
let mappings: Vec<(glob::Pattern, Style)>
= $mappings.iter()
.map(|t| (glob::Pattern::new(t.0).unwrap(), t.1))
.collect();
let definitions = Definitions {
ls: Some($ls.into()),
exa: Some($exa.into()),
};
let (result, _reset) = definitions.parse_color_vars(&mut UiStyles::default());
assert_eq!(ExtensionMappings { mappings }, result);
}
};
($name:ident: ls $ls:expr, exa $exa:expr => colours $expected:ident -> $process_expected:expr, exts $mappings:expr) => {
#[test]
fn $name() {
let mut $expected = UiStyles::colourful(false);
$process_expected();
let mappings: Vec<(glob::Pattern, Style)>
= $mappings.into_iter()
.map(|t| (glob::Pattern::new(t.0).unwrap(), t.1))
.collect();
let definitions = Definitions {
ls: Some($ls.into()),
exa: Some($exa.into()),
};
let mut meh = UiStyles::colourful(false);
let (result, _reset) = definitions.parse_color_vars(&vars, &mut meh);
assert_eq!(ExtensionMappings { mappings }, result);
assert_eq!($expected, meh);
}
};
}
// LS_COLORS can affect all of these colours:
test!(ls_di: ls "di=31", exa "" => colours c -> { c.filekinds.directory = Red.normal(); });
test!(ls_ex: ls "ex=32", exa "" => colours c -> { c.filekinds.executable = Green.normal(); });
test!(ls_fi: ls "fi=33", exa "" => colours c -> { c.filekinds.normal = Yellow.normal(); });
test!(ls_pi: ls "pi=34", exa "" => colours c -> { c.filekinds.pipe = Blue.normal(); });
test!(ls_so: ls "so=35", exa "" => colours c -> { c.filekinds.socket = Purple.normal(); });
test!(ls_bd: ls "bd=36", exa "" => colours c -> { c.filekinds.block_device = Cyan.normal(); });
test!(ls_cd: ls "cd=35", exa "" => colours c -> { c.filekinds.char_device = Purple.normal(); });
test!(ls_ln: ls "ln=34", exa "" => colours c -> { c.filekinds.symlink = Blue.normal(); });
test!(ls_or: ls "or=33", exa "" => colours c -> { c.broken_symlink = Yellow.normal(); });
// EXA_COLORS can affect all those colours too:
test!(exa_di: ls "", exa "di=32" => colours c -> { c.filekinds.directory = Green.normal(); });
test!(exa_ex: ls "", exa "ex=33" => colours c -> { c.filekinds.executable = Yellow.normal(); });
test!(exa_fi: ls "", exa "fi=34" => colours c -> { c.filekinds.normal = Blue.normal(); });
test!(exa_pi: ls "", exa "pi=35" => colours c -> { c.filekinds.pipe = Purple.normal(); });
test!(exa_so: ls "", exa "so=36" => colours c -> { c.filekinds.socket = Cyan.normal(); });
test!(exa_bd: ls "", exa "bd=35" => colours c -> { c.filekinds.block_device = Purple.normal(); });
test!(exa_cd: ls "", exa "cd=34" => colours c -> { c.filekinds.char_device = Blue.normal(); });
test!(exa_ln: ls "", exa "ln=33" => colours c -> { c.filekinds.symlink = Yellow.normal(); });
test!(exa_or: ls "", exa "or=32" => colours c -> { c.broken_symlink = Green.normal(); });
// EXA_COLORS will even override options from LS_COLORS:
test!(ls_exa_di: ls "di=31", exa "di=32" => colours c -> { c.filekinds.directory = Green.normal(); });
test!(ls_exa_ex: ls "ex=32", exa "ex=33" => colours c -> { c.filekinds.executable = Yellow.normal(); });
test!(ls_exa_fi: ls "fi=33", exa "fi=34" => colours c -> { c.filekinds.normal = Blue.normal(); });
// But more importantly, EXA_COLORS has its own, special list of colours:
test!(exa_ur: ls "", exa "ur=38;5;100" => colours c -> { c.perms.user_read = Fixed(100).normal(); });
test!(exa_uw: ls "", exa "uw=38;5;101" => colours c -> { c.perms.user_write = Fixed(101).normal(); });
test!(exa_ux: ls "", exa "ux=38;5;102" => colours c -> { c.perms.user_execute_file = Fixed(102).normal(); });
test!(exa_ue: ls "", exa "ue=38;5;103" => colours c -> { c.perms.user_execute_other = Fixed(103).normal(); });
test!(exa_gr: ls "", exa "gr=38;5;104" => colours c -> { c.perms.group_read = Fixed(104).normal(); });
test!(exa_gw: ls "", exa "gw=38;5;105" => colours c -> { c.perms.group_write = Fixed(105).normal(); });
test!(exa_gx: ls "", exa "gx=38;5;106" => colours c -> { c.perms.group_execute = Fixed(106).normal(); });
test!(exa_tr: ls "", exa "tr=38;5;107" => colours c -> { c.perms.other_read = Fixed(107).normal(); });
test!(exa_tw: ls "", exa "tw=38;5;108" => colours c -> { c.perms.other_write = Fixed(108).normal(); });
test!(exa_tx: ls "", exa "tx=38;5;109" => colours c -> { c.perms.other_execute = Fixed(109).normal(); });
test!(exa_su: ls "", exa "su=38;5;110" => colours c -> { c.perms.special_user_file = Fixed(110).normal(); });
test!(exa_sf: ls "", exa "sf=38;5;111" => colours c -> { c.perms.special_other = Fixed(111).normal(); });
test!(exa_xa: ls "", exa "xa=38;5;112" => colours c -> { c.perms.attribute = Fixed(112).normal(); });
test!(exa_sn: ls "", exa "sn=38;5;113" => colours c -> {
c.size.number_byte = Fixed(113).normal();
c.size.number_kilo = Fixed(113).normal();
c.size.number_mega = Fixed(113).normal();
c.size.number_giga = Fixed(113).normal();
c.size.number_huge = Fixed(113).normal();
});
test!(exa_sb: ls "", exa "sb=38;5;114" => colours c -> {
c.size.unit_byte = Fixed(114).normal();
c.size.unit_kilo = Fixed(114).normal();
c.size.unit_mega = Fixed(114).normal();
c.size.unit_giga = Fixed(114).normal();
c.size.unit_huge = Fixed(114).normal();
});
test!(exa_nb: ls "", exa "nb=38;5;115" => colours c -> { c.size.number_byte = Fixed(115).normal(); });
test!(exa_nk: ls "", exa "nk=38;5;116" => colours c -> { c.size.number_kilo = Fixed(116).normal(); });
test!(exa_nm: ls "", exa "nm=38;5;117" => colours c -> { c.size.number_mega = Fixed(117).normal(); });
test!(exa_ng: ls "", exa "ng=38;5;118" => colours c -> { c.size.number_giga = Fixed(118).normal(); });
test!(exa_nh: ls "", exa "nh=38;5;119" => colours c -> { c.size.number_huge = Fixed(119).normal(); });
test!(exa_ub: ls "", exa "ub=38;5;115" => colours c -> { c.size.unit_byte = Fixed(115).normal(); });
test!(exa_uk: ls "", exa "uk=38;5;116" => colours c -> { c.size.unit_kilo = Fixed(116).normal(); });
test!(exa_um: ls "", exa "um=38;5;117" => colours c -> { c.size.unit_mega = Fixed(117).normal(); });
test!(exa_ug: ls "", exa "ug=38;5;118" => colours c -> { c.size.unit_giga = Fixed(118).normal(); });
test!(exa_uh: ls "", exa "uh=38;5;119" => colours c -> { c.size.unit_huge = Fixed(119).normal(); });
test!(exa_df: ls "", exa "df=38;5;115" => colours c -> { c.size.major = Fixed(115).normal(); });
test!(exa_ds: ls "", exa "ds=38;5;116" => colours c -> { c.size.minor = Fixed(116).normal(); });
test!(exa_uu: ls "", exa "uu=38;5;117" => colours c -> { c.users.user_you = Fixed(117).normal(); });
test!(exa_un: ls "", exa "un=38;5;118" => colours c -> { c.users.user_someone_else = Fixed(118).normal(); });
test!(exa_gu: ls "", exa "gu=38;5;119" => colours c -> { c.users.group_yours = Fixed(119).normal(); });
test!(exa_gn: ls "", exa "gn=38;5;120" => colours c -> { c.users.group_not_yours = Fixed(120).normal(); });
test!(exa_lc: ls "", exa "lc=38;5;121" => colours c -> { c.links.normal = Fixed(121).normal(); });
test!(exa_lm: ls "", exa "lm=38;5;122" => colours c -> { c.links.multi_link_file = Fixed(122).normal(); });
test!(exa_ga: ls "", exa "ga=38;5;123" => colours c -> { c.git.new = Fixed(123).normal(); });
test!(exa_gm: ls "", exa "gm=38;5;124" => colours c -> { c.git.modified = Fixed(124).normal(); });
test!(exa_gd: ls "", exa "gd=38;5;125" => colours c -> { c.git.deleted = Fixed(125).normal(); });
test!(exa_gv: ls "", exa "gv=38;5;126" => colours c -> { c.git.renamed = Fixed(126).normal(); });
test!(exa_gt: ls "", exa "gt=38;5;127" => colours c -> { c.git.typechange = Fixed(127).normal(); });
test!(exa_xx: ls "", exa "xx=38;5;128" => colours c -> { c.punctuation = Fixed(128).normal(); });
test!(exa_da: ls "", exa "da=38;5;129" => colours c -> { c.date = Fixed(129).normal(); });
test!(exa_in: ls "", exa "in=38;5;130" => colours c -> { c.inode = Fixed(130).normal(); });
test!(exa_bl: ls "", exa "bl=38;5;131" => colours c -> { c.blocks = Fixed(131).normal(); });
test!(exa_hd: ls "", exa "hd=38;5;132" => colours c -> { c.header = Fixed(132).normal(); });
test!(exa_lp: ls "", exa "lp=38;5;133" => colours c -> { c.symlink_path = Fixed(133).normal(); });
test!(exa_cc: ls "", exa "cc=38;5;134" => colours c -> { c.control_char = Fixed(134).normal(); });
test!(exa_bo: ls "", exa "bO=4" => colours c -> { c.broken_path_overlay = Style::default().underline(); });
// All the while, LS_COLORS treats them as filenames:
test!(ls_uu: ls "uu=38;5;117", exa "" => exts [ ("uu", Fixed(117).normal()) ]);
test!(ls_un: ls "un=38;5;118", exa "" => exts [ ("un", Fixed(118).normal()) ]);
test!(ls_gu: ls "gu=38;5;119", exa "" => exts [ ("gu", Fixed(119).normal()) ]);
test!(ls_gn: ls "gn=38;5;120", exa "" => exts [ ("gn", Fixed(120).normal()) ]);
// Just like all other keys:
test!(ls_txt: ls "*.txt=31", exa "" => exts [ ("*.txt", Red.normal()) ]);
test!(ls_mp3: ls "*.mp3=38;5;135", exa "" => exts [ ("*.mp3", Fixed(135).normal()) ]);
test!(ls_mak: ls "Makefile=1;32;4", exa "" => exts [ ("Makefile", Green.bold().underline()) ]);
test!(exa_txt: ls "", exa "*.zip=31" => exts [ ("*.zip", Red.normal()) ]);
test!(exa_mp3: ls "", exa "lev.*=38;5;153" => exts [ ("lev.*", Fixed(153).normal()) ]);
test!(exa_mak: ls "", exa "Cargo.toml=4;32;1" => exts [ ("Cargo.toml", Green.bold().underline()) ]);
// Testing whether a glob from EXA_COLORS overrides a glob from LS_COLORS
// can’t be tested here, because they’ll both be added to the same vec
// Values get separated by colons:
test!(ls_multi: ls "*.txt=31:*.rtf=32", exa "" => exts [ ("*.txt", Red.normal()), ("*.rtf", Green.normal()) ]);
test!(exa_multi: ls "", exa "*.tmp=37:*.log=37" => exts [ ("*.tmp", White.normal()), ("*.log", White.normal()) ]);
test!(ls_five: ls "1*1=31:2*2=32:3*3=1;33:4*4=34;1:5*5=35;4", exa "" => exts [
("1*1", Red.normal()), ("2*2", Green.normal()), ("3*3", Yellow.bold()), ("4*4", Blue.bold()), ("5*5", Purple.underline())
]);
// Finally, colours get applied right-to-left:
test!(ls_overwrite: ls "pi=31:pi=32:pi=33", exa "" => colours c -> { c.filekinds.pipe = Yellow.normal(); });
test!(exa_overwrite: ls "", exa "da=36:da=35:da=34" => colours c -> { c.date = Blue.normal(); });
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/theme/lsc.rs | src/theme/lsc.rs | use std::iter::Peekable;
use std::ops::FnMut;
use ansi_term::{Colour, Style};
use ansi_term::Colour::*;
// Parsing the LS_COLORS environment variable into a map of names to Style values.
//
// This is sitting around undocumented at the moment because it’s a feature
// that should really be unnecessary! exa highlights its output by creating a
// theme of one Style value per part of the interface that can be coloured,
// then reading styles from that theme. The LS_COLORS variable, on the other
// hand, can contain arbitrary characters that ls is supposed to add to the
// output, without needing to know what they actually do. This puts exa in the
// annoying position of having to parse the ANSI escape codes _back_ into
// Style values before it’s able to use them. Doing this has a lot of
// downsides: if a new terminal feature is added with its own code, exa won’t
// be able to use this without explicit support for parsing the feature, while
// ls would not even need to know it existed. And there are some edge cases in
// ANSI codes, where terminals would accept codes exa is strict about it. It’s
// just not worth doing, and there should really be a way to just use slices
// of the LS_COLORS string without having to parse them.
pub struct LSColors<'var>(pub &'var str);
impl<'var> LSColors<'var> {
pub fn each_pair<C>(&mut self, mut callback: C)
where C: FnMut(Pair<'var>)
{
for next in self.0.split(':') {
let bits = next.split('=')
.take(3)
.collect::<Vec<_>>();
if bits.len() == 2 && ! bits[0].is_empty() && ! bits[1].is_empty() {
callback(Pair { key: bits[0], value: bits[1] });
}
}
}
}
fn parse_into_high_colour<'a, I>(iter: &mut Peekable<I>) -> Option<Colour>
where I: Iterator<Item = &'a str>
{
match iter.peek() {
Some(&"5") => {
let _ = iter.next();
if let Some(byte) = iter.next() {
if let Ok(num) = byte.parse() {
return Some(Fixed(num));
}
}
}
Some(&"2") => {
let _ = iter.next();
if let Some(hexes) = iter.next() {
// Some terminals support R:G:B instead of R;G;B
// but this clashes with splitting on ‘:’ in each_pair above.
/*if hexes.contains(':') {
let rgb = hexes.splitn(3, ':').collect::<Vec<_>>();
if rgb.len() != 3 {
return None;
}
else if let (Ok(r), Ok(g), Ok(b)) = (rgb[0].parse(), rgb[1].parse(), rgb[2].parse()) {
return Some(RGB(r, g, b));
}
}*/
if let (Some(r), Some(g), Some(b)) = (hexes.parse().ok(),
iter.next().and_then(|s| s.parse().ok()),
iter.next().and_then(|s| s.parse().ok()))
{
return Some(RGB(r, g, b));
}
}
}
_ => {},
}
None
}
pub struct Pair<'var> {
pub key: &'var str,
pub value: &'var str,
}
impl<'var> Pair<'var> {
pub fn to_style(&self) -> Style {
let mut style = Style::default();
let mut iter = self.value.split(';').peekable();
while let Some(num) = iter.next() {
match num.trim_start_matches('0') {
// Bold and italic
"1" => style = style.bold(),
"2" => style = style.dimmed(),
"3" => style = style.italic(),
"4" => style = style.underline(),
"5" => style = style.blink(),
// 6 is supposedly a faster blink
"7" => style = style.reverse(),
"8" => style = style.hidden(),
"9" => style = style.strikethrough(),
// Foreground colours
"30" => style = style.fg(Black),
"31" => style = style.fg(Red),
"32" => style = style.fg(Green),
"33" => style = style.fg(Yellow),
"34" => style = style.fg(Blue),
"35" => style = style.fg(Purple),
"36" => style = style.fg(Cyan),
"37" => style = style.fg(White),
"38" => if let Some(c) = parse_into_high_colour(&mut iter) { style = style.fg(c) },
// Background colours
"40" => style = style.on(Black),
"41" => style = style.on(Red),
"42" => style = style.on(Green),
"43" => style = style.on(Yellow),
"44" => style = style.on(Blue),
"45" => style = style.on(Purple),
"46" => style = style.on(Cyan),
"47" => style = style.on(White),
"48" => if let Some(c) = parse_into_high_colour(&mut iter) { style = style.on(c) },
_ => {/* ignore the error and do nothing */},
}
}
style
}
}
#[cfg(test)]
mod ansi_test {
use super::*;
use ansi_term::Style;
macro_rules! test {
($name:ident: $input:expr => $result:expr) => {
#[test]
fn $name() {
assert_eq!(Pair { key: "", value: $input }.to_style(), $result);
}
};
}
// Styles
test!(bold: "1" => Style::default().bold());
test!(bold2: "01" => Style::default().bold());
test!(under: "4" => Style::default().underline());
test!(unde2: "04" => Style::default().underline());
test!(both: "1;4" => Style::default().bold().underline());
test!(both2: "01;04" => Style::default().bold().underline());
test!(fg: "31" => Red.normal());
test!(bg: "43" => Style::default().on(Yellow));
test!(bfg: "31;43" => Red.on(Yellow));
test!(bfg2: "0031;0043" => Red.on(Yellow));
test!(all: "43;31;1;4" => Red.on(Yellow).bold().underline());
test!(again: "1;1;1;1;1" => Style::default().bold());
// Failure cases
test!(empty: "" => Style::default());
test!(semis: ";;;;;;" => Style::default());
test!(nines: "99999999" => Style::default());
test!(word: "GREEN" => Style::default());
// Higher colours
test!(hifg: "38;5;149" => Fixed(149).normal());
test!(hibg: "48;5;1" => Style::default().on(Fixed(1)));
test!(hibo: "48;5;1;1" => Style::default().on(Fixed(1)).bold());
test!(hiund: "4;48;5;1" => Style::default().on(Fixed(1)).underline());
test!(rgb: "38;2;255;100;0" => Style::default().fg(RGB(255, 100, 0)));
test!(rgbi: "38;2;255;100;0;3" => Style::default().fg(RGB(255, 100, 0)).italic());
test!(rgbbg: "48;2;255;100;0" => Style::default().on(RGB(255, 100, 0)));
test!(rgbbi: "48;2;255;100;0;3" => Style::default().on(RGB(255, 100, 0)).italic());
test!(fgbg: "38;5;121;48;5;212" => Fixed(121).on(Fixed(212)));
test!(bgfg: "48;5;121;38;5;212" => Fixed(212).on(Fixed(121)));
test!(toohi: "48;5;999" => Style::default());
}
#[cfg(test)]
mod test {
use super::*;
macro_rules! test {
($name:ident: $input:expr => $result:expr) => {
#[test]
fn $name() {
let mut lscs = Vec::new();
LSColors($input).each_pair(|p| lscs.push( (p.key.clone(), p.to_style()) ));
assert_eq!(lscs, $result.to_vec());
}
};
}
// Bad parses
test!(empty: "" => []);
test!(jibber: "blah" => []);
test!(equals: "=" => []);
test!(starts: "=di" => []);
test!(ends: "id=" => []);
// Foreground colours
test!(green: "cb=32" => [ ("cb", Green.normal()) ]);
test!(red: "di=31" => [ ("di", Red.normal()) ]);
test!(blue: "la=34" => [ ("la", Blue.normal()) ]);
// Background colours
test!(yellow: "do=43" => [ ("do", Style::default().on(Yellow)) ]);
test!(purple: "re=45" => [ ("re", Style::default().on(Purple)) ]);
test!(cyan: "mi=46" => [ ("mi", Style::default().on(Cyan)) ]);
// Bold and underline
test!(bold: "fa=1" => [ ("fa", Style::default().bold()) ]);
test!(under: "so=4" => [ ("so", Style::default().underline()) ]);
test!(both: "la=1;4" => [ ("la", Style::default().bold().underline()) ]);
// More and many
test!(more: "me=43;21;55;34:yu=1;4;1" => [ ("me", Blue.on(Yellow)), ("yu", Style::default().bold().underline()) ]);
test!(many: "red=31:green=32:blue=34" => [ ("red", Red.normal()), ("green", Green.normal()), ("blue", Blue.normal()) ]);
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/dir_action.rs | src/fs/dir_action.rs | //! What to do when encountering a directory?
/// The action to take when trying to list a file that turns out to be a
/// directory.
///
/// By default, exa will display the information about files passed in as
/// command-line arguments, with one file per entry. However, if a directory
/// is passed in, exa assumes that the user wants to see its contents, rather
/// than the directory itself.
///
/// This can get annoying sometimes: if a user does `exa ~/Downloads/img-*`
/// to see the details of every file starting with `img-`, any directories
/// that happen to start with the same will be listed after the files at
/// the end in a separate block. By listing directories as files, their
/// directory status will be ignored, and both will be listed side-by-side.
///
/// These two modes have recursive analogues in the “recurse” and “tree”
/// modes. Here, instead of just listing the directories, exa will descend
/// into them and print out their contents. The recurse mode does this by
/// having extra output blocks at the end, while the tree mode will show
/// directories inline, with their contents immediately underneath.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum DirAction {
/// This directory should be listed along with the regular files, instead
/// of having its contents queried.
AsFile,
/// This directory should not be listed, and should instead be opened and
/// *its* files listed separately. This is the default behaviour.
List,
/// This directory should be listed along with the regular files, and then
/// its contents should be listed afterward. The recursive contents of
/// *those* contents are dictated by the options argument.
Recurse(RecurseOptions),
}
impl DirAction {
/// Gets the recurse options, if this dir action has any.
pub fn recurse_options(self) -> Option<RecurseOptions> {
match self {
Self::Recurse(o) => Some(o),
_ => None,
}
}
/// Whether to treat directories as regular files or not.
pub fn treat_dirs_as_files(self) -> bool {
match self {
Self::AsFile => true,
Self::Recurse(o) => o.tree,
Self::List => false,
}
}
}
/// The options that determine how to recurse into a directory.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct RecurseOptions {
/// Whether recursion should be done as a tree or as multiple individual
/// views of files.
pub tree: bool,
/// The maximum number of times that recursion should descend to, if one
/// is specified.
pub max_depth: Option<usize>,
}
impl RecurseOptions {
/// Returns whether a directory of the given depth would be too deep.
pub fn is_too_deep(self, depth: usize) -> bool {
match self.max_depth {
None => false,
Some(d) => d <= depth
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/filter.rs | src/fs/filter.rs | //! Filtering and sorting the list of files before displaying them.
use std::cmp::Ordering;
use std::iter::FromIterator;
#[cfg(unix)]
use std::os::unix::fs::MetadataExt;
use crate::fs::DotFilter;
use crate::fs::File;
/// The **file filter** processes a list of files before displaying them to
/// the user, by removing files they don’t want to see, and putting the list
/// in the desired order.
///
/// Usually a user does not want to see *every* file in the list. The most
/// common case is to remove files starting with `.`, which are designated
/// as ‘hidden’ files.
///
/// The special files `.` and `..` files are not actually filtered out, but
/// need to be inserted into the list, in a special case.
///
/// The filter also governs sorting the list. After being filtered, pairs of
/// files are compared and sorted based on the result, with the sort field
/// performing the comparison.
#[derive(PartialEq, Eq, Debug, Clone)]
pub struct FileFilter {
/// Whether directories should be listed first, and other types of file
/// second. Some users prefer it like this.
pub list_dirs_first: bool,
/// The metadata field to sort by.
pub sort_field: SortField,
/// Whether to reverse the sorting order. This would sort the largest
/// files first, or files starting with Z, or the most-recently-changed
/// ones, depending on the sort field.
pub reverse: bool,
/// Whether to only show directories.
pub only_dirs: bool,
/// Which invisible “dot” files to include when listing a directory.
///
/// Files starting with a single “.” are used to determine “system” or
/// “configuration” files that should not be displayed in a regular
/// directory listing, and the directory entries “.” and “..” are
/// considered extra-special.
///
/// This came about more or less by a complete historical accident,
/// when the original `ls` tried to hide `.` and `..`:
///
/// [Linux History: How Dot Files Became Hidden Files](https://linux-audit.com/linux-history-how-dot-files-became-hidden-files/)
pub dot_filter: DotFilter,
/// Glob patterns to ignore. Any file name that matches *any* of these
/// patterns won’t be displayed in the list.
pub ignore_patterns: IgnorePatterns,
/// Whether to ignore Git-ignored patterns.
pub git_ignore: GitIgnore,
}
impl FileFilter {
/// Remove every file in the given vector that does *not* pass the
/// filter predicate for files found inside a directory.
pub fn filter_child_files(&self, files: &mut Vec<File<'_>>) {
files.retain(|f| ! self.ignore_patterns.is_ignored(&f.name));
if self.only_dirs {
files.retain(File::is_directory);
}
}
/// Remove every file in the given vector that does *not* pass the
/// filter predicate for file names specified on the command-line.
///
/// The rules are different for these types of files than the other
/// type because the ignore rules can be used with globbing. For
/// example, running `exa -I='*.tmp' .vimrc` shouldn’t filter out the
/// dotfile, because it’s been directly specified. But running
/// `exa -I='*.ogg' music/*` should filter out the ogg files obtained
/// from the glob, even though the globbing is done by the shell!
pub fn filter_argument_files(&self, files: &mut Vec<File<'_>>) {
files.retain(|f| {
! self.ignore_patterns.is_ignored(&f.name)
});
}
/// Sort the files in the given vector based on the sort field option.
pub fn sort_files<'a, F>(&self, files: &mut [F])
where F: AsRef<File<'a>>
{
files.sort_by(|a, b| {
self.sort_field.compare_files(a.as_ref(), b.as_ref())
});
if self.reverse {
files.reverse();
}
if self.list_dirs_first {
// This relies on the fact that `sort_by` is *stable*: it will keep
// adjacent elements next to each other.
files.sort_by(|a, b| {
b.as_ref().points_to_directory()
.cmp(&a.as_ref().points_to_directory())
});
}
}
}
/// User-supplied field to sort by.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum SortField {
/// Don’t apply any sorting. This is usually used as an optimisation in
/// scripts, where the order doesn’t matter.
Unsorted,
/// The file name. This is the default sorting.
Name(SortCase),
/// The file’s extension, with extensionless files being listed first.
Extension(SortCase),
/// The file’s size, in bytes.
Size,
/// The file’s inode, which usually corresponds to the order in which
/// files were created on the filesystem, more or less.
#[cfg(unix)]
FileInode,
/// The time the file was modified (the “mtime”).
///
/// As this is stored as a Unix timestamp, rather than a local time
/// instance, the time zone does not matter and will only be used to
/// display the timestamps, not compare them.
ModifiedDate,
/// The time the file was accessed (the “atime”).
///
/// Oddly enough, this field rarely holds the *actual* accessed time.
/// Recording a read time means writing to the file each time it’s read
/// slows the whole operation down, so many systems will only update the
/// timestamp in certain circumstances. This has become common enough that
/// it’s now expected behaviour!
/// <https://unix.stackexchange.com/a/8842>
AccessedDate,
/// The time the file was changed (the “ctime”).
///
/// This field is used to mark the time when a file’s metadata
/// changed — its permissions, owners, or link count.
///
/// In original Unix, this was, however, meant as creation time.
/// <https://www.bell-labs.com/usr/dmr/www/cacm.html>
ChangedDate,
/// The time the file was created (the “btime” or “birthtime”).
CreatedDate,
/// The type of the file: directories, links, pipes, regular, files, etc.
///
/// Files are ordered according to the `PartialOrd` implementation of
/// `fs::fields::Type`, so changing that will change this.
FileType,
/// The “age” of the file, which is the time it was modified sorted
/// backwards. The reverse of the `ModifiedDate` ordering!
///
/// It turns out that listing the most-recently-modified files first is a
/// common-enough use case that it deserves its own variant. This would be
/// implemented by just using the modified date and setting the reverse
/// flag, but this would make reversing *that* output not work, which is
/// bad, even though that’s kind of nonsensical. So it’s its own variant
/// that can be reversed like usual.
ModifiedAge,
/// The file's name, however if the name of the file begins with `.`
/// ignore the leading `.` and then sort as Name
NameMixHidden(SortCase),
}
/// Whether a field should be sorted case-sensitively or case-insensitively.
/// This determines which of the `natord` functions to use.
///
/// I kept on forgetting which one was sensitive and which one was
/// insensitive. Would a case-sensitive sort put capital letters first because
/// it takes the case of the letters into account, or intermingle them with
/// lowercase letters because it takes the difference between the two cases
/// into account? I gave up and just named these two variants after the
/// effects they have.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum SortCase {
/// Sort files case-sensitively with uppercase first, with ‘A’ coming
/// before ‘a’.
ABCabc,
/// Sort files case-insensitively, with ‘A’ being equal to ‘a’.
AaBbCc,
}
impl SortField {
/// Compares two files to determine the order they should be listed in,
/// depending on the search field.
///
/// The `natord` crate is used here to provide a more *natural* sorting
/// order than just sorting character-by-character. This splits filenames
/// into groups between letters and numbers, and then sorts those blocks
/// together, so `file10` will sort after `file9`, instead of before it
/// because of the `1`.
pub fn compare_files(self, a: &File<'_>, b: &File<'_>) -> Ordering {
use self::SortCase::{ABCabc, AaBbCc};
match self {
Self::Unsorted => Ordering::Equal,
Self::Name(ABCabc) => natord::compare(&a.name, &b.name),
Self::Name(AaBbCc) => natord::compare_ignore_case(&a.name, &b.name),
Self::Size => a.metadata.len().cmp(&b.metadata.len()),
#[cfg(unix)]
Self::FileInode => a.metadata.ino().cmp(&b.metadata.ino()),
Self::ModifiedDate => a.modified_time().cmp(&b.modified_time()),
Self::AccessedDate => a.accessed_time().cmp(&b.accessed_time()),
Self::ChangedDate => a.changed_time().cmp(&b.changed_time()),
Self::CreatedDate => a.created_time().cmp(&b.created_time()),
Self::ModifiedAge => b.modified_time().cmp(&a.modified_time()), // flip b and a
Self::FileType => match a.type_char().cmp(&b.type_char()) { // todo: this recomputes
Ordering::Equal => natord::compare(&*a.name, &*b.name),
order => order,
},
Self::Extension(ABCabc) => match a.ext.cmp(&b.ext) {
Ordering::Equal => natord::compare(&*a.name, &*b.name),
order => order,
},
Self::Extension(AaBbCc) => match a.ext.cmp(&b.ext) {
Ordering::Equal => natord::compare_ignore_case(&*a.name, &*b.name),
order => order,
},
Self::NameMixHidden(ABCabc) => natord::compare(
Self::strip_dot(&a.name),
Self::strip_dot(&b.name)
),
Self::NameMixHidden(AaBbCc) => natord::compare_ignore_case(
Self::strip_dot(&a.name),
Self::strip_dot(&b.name)
)
}
}
fn strip_dot(n: &str) -> &str {
match n.strip_prefix('.') {
Some(s) => s,
None => n,
}
}
}
/// The **ignore patterns** are a list of globs that are tested against
/// each filename, and if any of them match, that file isn’t displayed.
/// This lets a user hide, say, text files by ignoring `*.txt`.
#[derive(PartialEq, Eq, Default, Debug, Clone)]
pub struct IgnorePatterns {
patterns: Vec<glob::Pattern>,
}
impl FromIterator<glob::Pattern> for IgnorePatterns {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item = glob::Pattern>
{
let patterns = iter.into_iter().collect();
Self { patterns }
}
}
impl IgnorePatterns {
/// Create a new list from the input glob strings, turning the inputs that
/// are valid glob patterns into an `IgnorePatterns`. The inputs that
/// don’t parse correctly are returned separately.
pub fn parse_from_iter<'a, I: IntoIterator<Item = &'a str>>(iter: I) -> (Self, Vec<glob::PatternError>) {
let iter = iter.into_iter();
// Almost all glob patterns are valid, so it’s worth pre-allocating
// the vector with enough space for all of them.
let mut patterns = match iter.size_hint() {
(_, Some(count)) => Vec::with_capacity(count),
_ => Vec::new(),
};
// Similarly, assume there won’t be any errors.
let mut errors = Vec::new();
for input in iter {
match glob::Pattern::new(input) {
Ok(pat) => patterns.push(pat),
Err(e) => errors.push(e),
}
}
(Self { patterns }, errors)
}
/// Create a new empty set of patterns that matches nothing.
pub fn empty() -> Self {
Self { patterns: Vec::new() }
}
/// Test whether the given file should be hidden from the results.
fn is_ignored(&self, file: &str) -> bool {
self.patterns.iter().any(|p| p.matches(file))
}
}
/// Whether to ignore or display files that Git would ignore.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum GitIgnore {
/// Ignore files that Git would ignore.
CheckAndIgnore,
/// Display files, even if Git would ignore them.
Off,
}
#[cfg(test)]
mod test_ignores {
use super::*;
#[test]
fn empty_matches_nothing() {
let pats = IgnorePatterns::empty();
assert!(!pats.is_ignored("nothing"));
assert!(!pats.is_ignored("test.mp3"));
}
#[test]
fn ignores_a_glob() {
let (pats, fails) = IgnorePatterns::parse_from_iter(vec![ "*.mp3" ]);
assert!(fails.is_empty());
assert!(!pats.is_ignored("nothing"));
assert!(pats.is_ignored("test.mp3"));
}
#[test]
fn ignores_an_exact_filename() {
let (pats, fails) = IgnorePatterns::parse_from_iter(vec![ "nothing" ]);
assert!(fails.is_empty());
assert!(pats.is_ignored("nothing"));
assert!(!pats.is_ignored("test.mp3"));
}
#[test]
fn ignores_both() {
let (pats, fails) = IgnorePatterns::parse_from_iter(vec![ "nothing", "*.mp3" ]);
assert!(fails.is_empty());
assert!(pats.is_ignored("nothing"));
assert!(pats.is_ignored("test.mp3"));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/fields.rs | src/fs/fields.rs | //! Wrapper types for the values returned from `File`s.
//!
//! The methods of `File` that return information about the entry on the
//! filesystem -- size, modification date, block count, or Git status -- used
//! to just return these as formatted strings, but this became inflexible once
//! customisable output styles landed.
//!
//! Instead, they will return a wrapper type from this module, which tags the
//! type with what field it is while containing the actual raw value.
//!
//! The `output::details` module, among others, uses these types to render and
//! display the information as formatted strings.
// C-style `blkcnt_t` types don’t follow Rust’s rules!
#![allow(non_camel_case_types)]
#![allow(clippy::struct_excessive_bools)]
/// The type of a file’s block count.
pub type blkcnt_t = u64;
/// The type of a file’s group ID.
pub type gid_t = u32;
/// The type of a file’s inode.
pub type ino_t = u64;
/// The type of a file’s number of links.
pub type nlink_t = u64;
/// The type of a file’s timestamp (creation, modification, access, etc).
pub type time_t = i64;
/// The type of a file’s user ID.
pub type uid_t = u32;
/// The file’s base type, which gets displayed in the very first column of the
/// details output.
///
/// This type is set entirely by the filesystem, rather than relying on a
/// file’s contents. So “link” is a type, but “image” is just a type of
/// regular file. (See the `filetype` module for those checks.)
///
/// Its ordering is used when sorting by type.
#[derive(PartialEq, Eq, PartialOrd, Ord, Copy, Clone)]
pub enum Type {
Directory,
File,
Link,
Pipe,
Socket,
CharDevice,
BlockDevice,
Special,
}
impl Type {
pub fn is_regular_file(self) -> bool {
matches!(self, Self::File)
}
}
/// The file’s Unix permission bitfield, with one entry per bit.
#[derive(Copy, Clone)]
pub struct Permissions {
pub user_read: bool,
pub user_write: bool,
pub user_execute: bool,
pub group_read: bool,
pub group_write: bool,
pub group_execute: bool,
pub other_read: bool,
pub other_write: bool,
pub other_execute: bool,
pub sticky: bool,
pub setgid: bool,
pub setuid: bool,
}
/// The file's FileAttributes field, available only on Windows.
#[derive(Copy, Clone)]
pub struct Attributes {
pub archive: bool,
pub directory: bool,
pub readonly: bool,
pub hidden: bool,
pub system: bool,
pub reparse_point: bool,
}
/// The three pieces of information that are displayed as a single column in
/// the details view. These values are fused together to make the output a
/// little more compressed.
#[derive(Copy, Clone)]
pub struct PermissionsPlus {
pub file_type: Type,
#[cfg(unix)]
pub permissions: Permissions,
#[cfg(windows)]
pub attributes: Attributes,
pub xattrs: bool,
}
/// The permissions encoded as octal values
#[derive(Copy, Clone)]
pub struct OctalPermissions {
pub permissions: Permissions,
}
/// A file’s number of hard links on the filesystem.
///
/// Under Unix, a file can exist on the filesystem only once but appear in
/// multiple directories. However, it’s rare (but occasionally useful!) for a
/// regular file to have a link count greater than 1, so we highlight the
/// block count specifically for this case.
#[derive(Copy, Clone)]
pub struct Links {
/// The actual link count.
pub count: nlink_t,
/// Whether this file is a regular file with more than one hard link.
pub multiple: bool,
}
/// A file’s inode. Every directory entry on a Unix filesystem has an inode,
/// including directories and links, so this is applicable to everything exa
/// can deal with.
#[derive(Copy, Clone)]
pub struct Inode(pub ino_t);
/// The number of blocks that a file takes up on the filesystem, if any.
#[derive(Copy, Clone)]
pub enum Blocks {
/// This file has the given number of blocks.
Some(blkcnt_t),
/// This file isn’t of a type that can take up blocks.
None,
}
/// The ID of the user that owns a file. This will only ever be a number;
/// looking up the username is done in the `display` module.
#[derive(Copy, Clone)]
pub struct User(pub uid_t);
/// The ID of the group that a file belongs to.
#[derive(Copy, Clone)]
pub struct Group(pub gid_t);
/// A file’s size, in bytes. This is usually formatted by the `number_prefix`
/// crate into something human-readable.
#[derive(Copy, Clone)]
pub enum Size {
/// This file has a defined size.
Some(u64),
/// This file has no size, or has a size but we aren’t interested in it.
///
/// Under Unix, directory entries that aren’t regular files will still
/// have a file size. For example, a directory will just contain a list of
/// its files as its “contents” and will be specially flagged as being a
/// directory, rather than a file. However, seeing the “file size” of this
/// data is rarely useful — I can’t think of a time when I’ve seen it and
/// learnt something. So we discard it and just output “-” instead.
///
/// See this answer for more: <https://unix.stackexchange.com/a/68266>
None,
/// This file is a block or character device, so instead of a size, print
/// out the file’s major and minor device IDs.
///
/// This is what ls does as well. Without it, the devices will just have
/// file sizes of zero.
DeviceIDs(DeviceIDs),
}
/// The major and minor device IDs that gets displayed for device files.
///
/// You can see what these device numbers mean:
/// - <http://www.lanana.org/docs/device-list/>
/// - <http://www.lanana.org/docs/device-list/devices-2.6+.txt>
#[derive(Copy, Clone)]
pub struct DeviceIDs {
pub major: u8,
pub minor: u8,
}
/// One of a file’s timestamps (created, accessed, or modified).
#[derive(Copy, Clone, PartialEq, Eq, PartialOrd, Ord)]
pub struct Time {
pub seconds: time_t,
pub nanoseconds: time_t,
}
/// A file’s status in a Git repository. Whether a file is in a repository or
/// not is handled by the Git module, rather than having a “null” variant in
/// this enum.
#[derive(PartialEq, Eq, Copy, Clone)]
pub enum GitStatus {
/// This file hasn’t changed since the last commit.
NotModified,
/// This file didn’t exist for the last commit, and is not specified in
/// the ignored files list.
New,
/// A file that’s been modified since the last commit.
Modified,
/// A deleted file. This can’t ever be shown, but it’s here anyway!
Deleted,
/// A file that Git has tracked a rename for.
Renamed,
/// A file that’s had its type (such as the file permissions) changed.
TypeChange,
/// A file that’s ignored (that matches a line in .gitignore)
Ignored,
/// A file that’s updated but unmerged.
Conflicted,
}
/// A file’s complete Git status. It’s possible to make changes to a file, add
/// it to the staging area, then make *more* changes, so we need to list each
/// file’s status for both of these.
#[derive(Copy, Clone)]
pub struct Git {
pub staged: GitStatus,
pub unstaged: GitStatus,
}
impl Default for Git {
/// Create a Git status for a file with nothing done to it.
fn default() -> Self {
Self {
staged: GitStatus::NotModified,
unstaged: GitStatus::NotModified,
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/file.rs | src/fs/file.rs | //! Files, and methods and fields to access their metadata.
use std::io;
#[cfg(unix)]
use std::os::unix::fs::{FileTypeExt, MetadataExt, PermissionsExt};
#[cfg(windows)]
use std::os::windows::fs::MetadataExt;
use std::path::{Path, PathBuf};
use std::time::{Duration, SystemTime, UNIX_EPOCH};
use log::*;
use crate::fs::dir::Dir;
use crate::fs::fields as f;
/// A **File** is a wrapper around one of Rust’s `PathBuf` values, along with
/// associated data about the file.
///
/// Each file is definitely going to have its filename displayed at least
/// once, have its file extension extracted at least once, and have its metadata
/// information queried at least once, so it makes sense to do all this at the
/// start and hold on to all the information.
pub struct File<'dir> {
/// The filename portion of this file’s path, including the extension.
///
/// This is used to compare against certain filenames (such as checking if
/// it’s “Makefile” or something) and to highlight only the filename in
/// colour when displaying the path.
pub name: String,
/// The file’s name’s extension, if present, extracted from the name.
///
/// This is queried many times over, so it’s worth caching it.
pub ext: Option<String>,
/// The path that begat this file.
///
/// Even though the file’s name is extracted, the path needs to be kept
/// around, as certain operations involve looking up the file’s absolute
/// location (such as searching for compiled files) or using its original
/// path (following a symlink).
pub path: PathBuf,
/// A cached `metadata` (`stat`) call for this file.
///
/// This too is queried multiple times, and is *not* cached by the OS, as
/// it could easily change between invocations — but exa is so short-lived
/// it’s better to just cache it.
pub metadata: std::fs::Metadata,
/// A reference to the directory that contains this file, if any.
///
/// Filenames that get passed in on the command-line directly will have no
/// parent directory reference — although they technically have one on the
/// filesystem, we’ll never need to look at it, so it’ll be `None`.
/// However, *directories* that get passed in will produce files that
/// contain a reference to it, which is used in certain operations (such
/// as looking up compiled files).
pub parent_dir: Option<&'dir Dir>,
/// Whether this is one of the two `--all all` directories, `.` and `..`.
///
/// Unlike all other entries, these are not returned as part of the
/// directory’s children, and are in fact added specifically by exa; this
/// means that they should be skipped when recursing.
pub is_all_all: bool,
}
impl<'dir> File<'dir> {
pub fn from_args<PD, FN>(path: PathBuf, parent_dir: PD, filename: FN) -> io::Result<File<'dir>>
where PD: Into<Option<&'dir Dir>>,
FN: Into<Option<String>>
{
let parent_dir = parent_dir.into();
let name = filename.into().unwrap_or_else(|| File::filename(&path));
let ext = File::ext(&path);
debug!("Statting file {:?}", &path);
let metadata = std::fs::symlink_metadata(&path)?;
let is_all_all = false;
Ok(File { name, ext, path, metadata, parent_dir, is_all_all })
}
pub fn new_aa_current(parent_dir: &'dir Dir) -> io::Result<File<'dir>> {
let path = parent_dir.path.clone();
let ext = File::ext(&path);
debug!("Statting file {:?}", &path);
let metadata = std::fs::symlink_metadata(&path)?;
let is_all_all = true;
let parent_dir = Some(parent_dir);
Ok(File { path, parent_dir, metadata, ext, name: ".".into(), is_all_all })
}
pub fn new_aa_parent(path: PathBuf, parent_dir: &'dir Dir) -> io::Result<File<'dir>> {
let ext = File::ext(&path);
debug!("Statting file {:?}", &path);
let metadata = std::fs::symlink_metadata(&path)?;
let is_all_all = true;
let parent_dir = Some(parent_dir);
Ok(File { path, parent_dir, metadata, ext, name: "..".into(), is_all_all })
}
/// A file’s name is derived from its string. This needs to handle directories
/// such as `/` or `..`, which have no `file_name` component. So instead, just
/// use the last component as the name.
pub fn filename(path: &Path) -> String {
if let Some(back) = path.components().next_back() {
back.as_os_str().to_string_lossy().to_string()
}
else {
// use the path as fallback
error!("Path {:?} has no last component", path);
path.display().to_string()
}
}
/// Extract an extension from a file path, if one is present, in lowercase.
///
/// The extension is the series of characters after the last dot. This
/// deliberately counts dotfiles, so the “.git” folder has the extension “git”.
///
/// ASCII lowercasing is used because these extensions are only compared
/// against a pre-compiled list of extensions which are known to only exist
/// within ASCII, so it’s alright.
fn ext(path: &Path) -> Option<String> {
let name = path.file_name().map(|f| f.to_string_lossy().to_string())?;
name.rfind('.')
.map(|p| name[p + 1 ..]
.to_ascii_lowercase())
}
/// Whether this file is a directory on the filesystem.
pub fn is_directory(&self) -> bool {
self.metadata.is_dir()
}
/// Whether this file is a directory, or a symlink pointing to a directory.
pub fn points_to_directory(&self) -> bool {
if self.is_directory() {
return true;
}
if self.is_link() {
let target = self.link_target();
if let FileTarget::Ok(target) = target {
return target.points_to_directory();
}
}
false
}
/// If this file is a directory on the filesystem, then clone its
/// `PathBuf` for use in one of our own `Dir` values, and read a list of
/// its contents.
///
/// Returns an IO error upon failure, but this shouldn’t be used to check
/// if a `File` is a directory or not! For that, just use `is_directory()`.
pub fn to_dir(&self) -> io::Result<Dir> {
Dir::read_dir(self.path.clone())
}
/// Whether this file is a regular file on the filesystem — that is, not a
/// directory, a link, or anything else treated specially.
pub fn is_file(&self) -> bool {
self.metadata.is_file()
}
/// Whether this file is both a regular file *and* executable for the
/// current user. An executable file has a different purpose from an
/// executable directory, so they should be highlighted differently.
#[cfg(unix)]
pub fn is_executable_file(&self) -> bool {
let bit = modes::USER_EXECUTE;
self.is_file() && (self.metadata.permissions().mode() & bit) == bit
}
/// Whether this file is a symlink on the filesystem.
pub fn is_link(&self) -> bool {
self.metadata.file_type().is_symlink()
}
/// Whether this file is a named pipe on the filesystem.
#[cfg(unix)]
pub fn is_pipe(&self) -> bool {
self.metadata.file_type().is_fifo()
}
/// Whether this file is a char device on the filesystem.
#[cfg(unix)]
pub fn is_char_device(&self) -> bool {
self.metadata.file_type().is_char_device()
}
/// Whether this file is a block device on the filesystem.
#[cfg(unix)]
pub fn is_block_device(&self) -> bool {
self.metadata.file_type().is_block_device()
}
/// Whether this file is a socket on the filesystem.
#[cfg(unix)]
pub fn is_socket(&self) -> bool {
self.metadata.file_type().is_socket()
}
/// Re-prefixes the path pointed to by this file, if it’s a symlink, to
/// make it an absolute path that can be accessed from whichever
/// directory exa is being run from.
fn reorient_target_path(&self, path: &Path) -> PathBuf {
if path.is_absolute() {
path.to_path_buf()
}
else if let Some(dir) = self.parent_dir {
dir.join(path)
}
else if let Some(parent) = self.path.parent() {
parent.join(path)
}
else {
self.path.join(path)
}
}
/// Again assuming this file is a symlink, follows that link and returns
/// the result of following it.
///
/// For a working symlink that the user is allowed to follow,
/// this will be the `File` object at the other end, which can then have
/// its name, colour, and other details read.
///
/// For a broken symlink, returns where the file *would* be, if it
/// existed. If this file cannot be read at all, returns the error that
/// we got when we tried to read it.
pub fn link_target(&self) -> FileTarget<'dir> {
// We need to be careful to treat the path actually pointed to by
// this file — which could be absolute or relative — to the path
// we actually look up and turn into a `File` — which needs to be
// absolute to be accessible from any directory.
debug!("Reading link {:?}", &self.path);
let path = match std::fs::read_link(&self.path) {
Ok(p) => p,
Err(e) => return FileTarget::Err(e),
};
let absolute_path = self.reorient_target_path(&path);
// Use plain `metadata` instead of `symlink_metadata` - we *want* to
// follow links.
match std::fs::metadata(&absolute_path) {
Ok(metadata) => {
let ext = File::ext(&path);
let name = File::filename(&path);
let file = File { parent_dir: None, path, ext, metadata, name, is_all_all: false };
FileTarget::Ok(Box::new(file))
}
Err(e) => {
error!("Error following link {:?}: {:#?}", &path, e);
FileTarget::Broken(path)
}
}
}
/// This file’s number of hard links.
///
/// It also reports whether this is both a regular file, and a file with
/// multiple links. This is important, because a file with multiple links
/// is uncommon, while you come across directories and other types
/// with multiple links much more often. Thus, it should get highlighted
/// more attentively.
#[cfg(unix)]
pub fn links(&self) -> f::Links {
let count = self.metadata.nlink();
f::Links {
count,
multiple: self.is_file() && count > 1,
}
}
/// This file’s inode.
#[cfg(unix)]
pub fn inode(&self) -> f::Inode {
f::Inode(self.metadata.ino())
}
/// This file’s number of filesystem blocks.
///
/// (Not the size of each block, which we don’t actually report on)
#[cfg(unix)]
pub fn blocks(&self) -> f::Blocks {
if self.is_file() || self.is_link() {
f::Blocks::Some(self.metadata.blocks())
}
else {
f::Blocks::None
}
}
/// The ID of the user that own this file.
#[cfg(unix)]
pub fn user(&self) -> f::User {
f::User(self.metadata.uid())
}
/// The ID of the group that owns this file.
#[cfg(unix)]
pub fn group(&self) -> f::Group {
f::Group(self.metadata.gid())
}
/// This file’s size, if it’s a regular file.
///
/// For directories, no size is given. Although they do have a size on
/// some filesystems, I’ve never looked at one of those numbers and gained
/// any information from it. So it’s going to be hidden instead.
///
/// Block and character devices return their device IDs, because they
/// usually just have a file size of zero.
#[cfg(unix)]
pub fn size(&self) -> f::Size {
if self.is_directory() {
f::Size::None
}
else if self.is_char_device() || self.is_block_device() {
let device_ids = self.metadata.rdev().to_be_bytes();
// In C-land, getting the major and minor device IDs is done with
// preprocessor macros called `major` and `minor` that depend on
// the size of `dev_t`, but we just take the second-to-last and
// last bytes.
f::Size::DeviceIDs(f::DeviceIDs {
major: device_ids[6],
minor: device_ids[7],
})
}
else {
f::Size::Some(self.metadata.len())
}
}
#[cfg(windows)]
pub fn size(&self) -> f::Size {
if self.is_directory() {
f::Size::None
}
else {
f::Size::Some(self.metadata.len())
}
}
/// This file’s last modified timestamp, if available on this platform.
pub fn modified_time(&self) -> Option<SystemTime> {
self.metadata.modified().ok()
}
/// This file’s last changed timestamp, if available on this platform.
#[cfg(unix)]
pub fn changed_time(&self) -> Option<SystemTime> {
let (mut sec, mut nanosec) = (self.metadata.ctime(), self.metadata.ctime_nsec());
if sec < 0 {
if nanosec > 0 {
sec += 1;
nanosec -= 1_000_000_000;
}
let duration = Duration::new(sec.unsigned_abs(), nanosec.unsigned_abs() as u32);
Some(UNIX_EPOCH - duration)
}
else {
let duration = Duration::new(sec as u64, nanosec as u32);
Some(UNIX_EPOCH + duration)
}
}
#[cfg(windows)]
pub fn changed_time(&self) -> Option<SystemTime> {
return self.modified_time()
}
/// This file’s last accessed timestamp, if available on this platform.
pub fn accessed_time(&self) -> Option<SystemTime> {
self.metadata.accessed().ok()
}
/// This file’s created timestamp, if available on this platform.
pub fn created_time(&self) -> Option<SystemTime> {
self.metadata.created().ok()
}
/// This file’s ‘type’.
///
/// This is used a the leftmost character of the permissions column.
/// The file type can usually be guessed from the colour of the file, but
/// ls puts this character there.
#[cfg(unix)]
pub fn type_char(&self) -> f::Type {
if self.is_file() {
f::Type::File
}
else if self.is_directory() {
f::Type::Directory
}
else if self.is_pipe() {
f::Type::Pipe
}
else if self.is_link() {
f::Type::Link
}
else if self.is_char_device() {
f::Type::CharDevice
}
else if self.is_block_device() {
f::Type::BlockDevice
}
else if self.is_socket() {
f::Type::Socket
}
else {
f::Type::Special
}
}
#[cfg(windows)]
pub fn type_char(&self) -> f::Type {
if self.is_file() {
f::Type::File
}
else if self.is_directory() {
f::Type::Directory
}
else {
f::Type::Special
}
}
/// This file’s permissions, with flags for each bit.
#[cfg(unix)]
pub fn permissions(&self) -> f::Permissions {
let bits = self.metadata.mode();
let has_bit = |bit| bits & bit == bit;
f::Permissions {
user_read: has_bit(modes::USER_READ),
user_write: has_bit(modes::USER_WRITE),
user_execute: has_bit(modes::USER_EXECUTE),
group_read: has_bit(modes::GROUP_READ),
group_write: has_bit(modes::GROUP_WRITE),
group_execute: has_bit(modes::GROUP_EXECUTE),
other_read: has_bit(modes::OTHER_READ),
other_write: has_bit(modes::OTHER_WRITE),
other_execute: has_bit(modes::OTHER_EXECUTE),
sticky: has_bit(modes::STICKY),
setgid: has_bit(modes::SETGID),
setuid: has_bit(modes::SETUID),
}
}
#[cfg(windows)]
pub fn attributes(&self) -> f::Attributes {
let bits = self.metadata.file_attributes();
let has_bit = |bit| bits & bit == bit;
// https://docs.microsoft.com/en-us/windows/win32/fileio/file-attribute-constants
f::Attributes {
directory: has_bit(0x10),
archive: has_bit(0x20),
readonly: has_bit(0x1),
hidden: has_bit(0x2),
system: has_bit(0x4),
reparse_point: has_bit(0x400),
}
}
/// Whether this file’s extension is any of the strings that get passed in.
///
/// This will always return `false` if the file has no extension.
pub fn extension_is_one_of(&self, choices: &[&str]) -> bool {
match &self.ext {
Some(ext) => choices.contains(&&ext[..]),
None => false,
}
}
/// Whether this file’s name, including extension, is any of the strings
/// that get passed in.
pub fn name_is_one_of(&self, choices: &[&str]) -> bool {
choices.contains(&&self.name[..])
}
}
impl<'a> AsRef<File<'a>> for File<'a> {
fn as_ref(&self) -> &File<'a> {
self
}
}
/// The result of following a symlink.
pub enum FileTarget<'dir> {
/// The symlink pointed at a file that exists.
Ok(Box<File<'dir>>),
/// The symlink pointed at a file that does not exist. Holds the path
/// where the file would be, if it existed.
Broken(PathBuf),
/// There was an IO error when following the link. This can happen if the
/// file isn’t a link to begin with, but also if, say, we don’t have
/// permission to follow it.
Err(io::Error),
// Err is its own variant, instead of having the whole thing be inside an
// `io::Result`, because being unable to follow a symlink is not a serious
// error — we just display the error message and move on.
}
impl<'dir> FileTarget<'dir> {
/// Whether this link doesn’t lead to a file, for whatever reason. This
/// gets used to determine how to highlight the link in grid views.
pub fn is_broken(&self) -> bool {
matches!(self, Self::Broken(_) | Self::Err(_))
}
}
/// More readable aliases for the permission bits exposed by libc.
#[allow(trivial_numeric_casts)]
#[cfg(unix)]
mod modes {
// The `libc::mode_t` type’s actual type varies, but the value returned
// from `metadata.permissions().mode()` is always `u32`.
pub type Mode = u32;
pub const USER_READ: Mode = libc::S_IRUSR as Mode;
pub const USER_WRITE: Mode = libc::S_IWUSR as Mode;
pub const USER_EXECUTE: Mode = libc::S_IXUSR as Mode;
pub const GROUP_READ: Mode = libc::S_IRGRP as Mode;
pub const GROUP_WRITE: Mode = libc::S_IWGRP as Mode;
pub const GROUP_EXECUTE: Mode = libc::S_IXGRP as Mode;
pub const OTHER_READ: Mode = libc::S_IROTH as Mode;
pub const OTHER_WRITE: Mode = libc::S_IWOTH as Mode;
pub const OTHER_EXECUTE: Mode = libc::S_IXOTH as Mode;
pub const STICKY: Mode = libc::S_ISVTX as Mode;
pub const SETGID: Mode = libc::S_ISGID as Mode;
pub const SETUID: Mode = libc::S_ISUID as Mode;
}
#[cfg(test)]
mod ext_test {
use super::File;
use std::path::Path;
#[test]
fn extension() {
assert_eq!(Some("dat".to_string()), File::ext(Path::new("fester.dat")))
}
#[test]
fn dotfile() {
assert_eq!(Some("vimrc".to_string()), File::ext(Path::new(".vimrc")))
}
#[test]
fn no_extension() {
assert_eq!(None, File::ext(Path::new("jarlsberg")))
}
}
#[cfg(test)]
mod filename_test {
use super::File;
use std::path::Path;
#[test]
fn file() {
assert_eq!("fester.dat", File::filename(Path::new("fester.dat")))
}
#[test]
fn no_path() {
assert_eq!("foo.wha", File::filename(Path::new("/var/cache/foo.wha")))
}
#[test]
fn here() {
assert_eq!(".", File::filename(Path::new(".")))
}
#[test]
fn there() {
assert_eq!("..", File::filename(Path::new("..")))
}
#[test]
fn everywhere() {
assert_eq!("..", File::filename(Path::new("./..")))
}
#[test]
#[cfg(unix)]
fn topmost() {
assert_eq!("/", File::filename(Path::new("/")))
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/mod.rs | src/fs/mod.rs | mod dir;
pub use self::dir::{Dir, DotFilter};
mod file;
pub use self::file::{File, FileTarget};
pub mod dir_action;
pub mod feature;
pub mod fields;
pub mod filter;
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/dir.rs | src/fs/dir.rs | use crate::fs::feature::git::GitCache;
use crate::fs::fields::GitStatus;
use std::io;
use std::fs;
use std::path::{Path, PathBuf};
use std::slice::Iter as SliceIter;
use log::*;
use crate::fs::File;
/// A **Dir** provides a cached list of the file paths in a directory that’s
/// being listed.
///
/// This object gets passed to the Files themselves, in order for them to
/// check the existence of surrounding files, then highlight themselves
/// accordingly. (See `File#get_source_files`)
pub struct Dir {
/// A vector of the files that have been read from this directory.
contents: Vec<PathBuf>,
/// The path that was read.
pub path: PathBuf,
}
impl Dir {
/// Create a new Dir object filled with all the files in the directory
/// pointed to by the given path. Fails if the directory can’t be read, or
/// isn’t actually a directory, or if there’s an IO error that occurs at
/// any point.
///
/// The `read_dir` iterator doesn’t actually yield the `.` and `..`
/// entries, so if the user wants to see them, we’ll have to add them
/// ourselves after the files have been read.
pub fn read_dir(path: PathBuf) -> io::Result<Self> {
info!("Reading directory {:?}", &path);
let contents = fs::read_dir(&path)?
.map(|result| result.map(|entry| entry.path()))
.collect::<Result<_, _>>()?;
Ok(Self { contents, path })
}
/// Produce an iterator of IO results of trying to read all the files in
/// this directory.
pub fn files<'dir, 'ig>(&'dir self, dots: DotFilter, git: Option<&'ig GitCache>, git_ignoring: bool) -> Files<'dir, 'ig> {
Files {
inner: self.contents.iter(),
dir: self,
dotfiles: dots.shows_dotfiles(),
dots: dots.dots(),
git,
git_ignoring,
}
}
/// Whether this directory contains a file with the given path.
pub fn contains(&self, path: &Path) -> bool {
self.contents.iter().any(|p| p.as_path() == path)
}
/// Append a path onto the path specified by this directory.
pub fn join(&self, child: &Path) -> PathBuf {
self.path.join(child)
}
}
/// Iterator over reading the contents of a directory as `File` objects.
pub struct Files<'dir, 'ig> {
/// The internal iterator over the paths that have been read already.
inner: SliceIter<'dir, PathBuf>,
/// The directory that begat those paths.
dir: &'dir Dir,
/// Whether to include dotfiles in the list.
dotfiles: bool,
/// Whether the `.` or `..` directories should be produced first, before
/// any files have been listed.
dots: DotsNext,
git: Option<&'ig GitCache>,
git_ignoring: bool,
}
impl<'dir, 'ig> Files<'dir, 'ig> {
fn parent(&self) -> PathBuf {
// We can’t use `Path#parent` here because all it does is remove the
// last path component, which is no good for us if the path is
// relative. For example, while the parent of `/testcases/files` is
// `/testcases`, the parent of `.` is an empty path. Adding `..` on
// the end is the only way to get to the *actual* parent directory.
self.dir.path.join("..")
}
/// Go through the directory until we encounter a file we can list (which
/// varies depending on the dotfile visibility flag)
fn next_visible_file(&mut self) -> Option<Result<File<'dir>, (PathBuf, io::Error)>> {
loop {
if let Some(path) = self.inner.next() {
let filename = File::filename(path);
if ! self.dotfiles && filename.starts_with('.') {
continue;
}
// Also hide _prefix files on Windows because it's used by old applications
// as an alternative to dot-prefix files.
#[cfg(windows)]
if ! self.dotfiles && filename.starts_with('_') {
continue;
}
if self.git_ignoring {
let git_status = self.git.map(|g| g.get(path, false)).unwrap_or_default();
if git_status.unstaged == GitStatus::Ignored {
continue;
}
}
return Some(File::from_args(path.clone(), self.dir, filename)
.map_err(|e| (path.clone(), e)))
}
return None
}
}
}
/// The dot directories that need to be listed before actual files, if any.
/// If these aren’t being printed, then `FilesNext` is used to skip them.
enum DotsNext {
/// List the `.` directory next.
Dot,
/// List the `..` directory next.
DotDot,
/// Forget about the dot directories and just list files.
Files,
}
impl<'dir, 'ig> Iterator for Files<'dir, 'ig> {
type Item = Result<File<'dir>, (PathBuf, io::Error)>;
fn next(&mut self) -> Option<Self::Item> {
match self.dots {
DotsNext::Dot => {
self.dots = DotsNext::DotDot;
Some(File::new_aa_current(self.dir)
.map_err(|e| (Path::new(".").to_path_buf(), e)))
}
DotsNext::DotDot => {
self.dots = DotsNext::Files;
Some(File::new_aa_parent(self.parent(), self.dir)
.map_err(|e| (self.parent(), e)))
}
DotsNext::Files => {
self.next_visible_file()
}
}
}
}
/// Usually files in Unix use a leading dot to be hidden or visible, but two
/// entries in particular are “extra-hidden”: `.` and `..`, which only become
/// visible after an extra `-a` option.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum DotFilter {
/// Shows files, dotfiles, and `.` and `..`.
DotfilesAndDots,
/// Show files and dotfiles, but hide `.` and `..`.
Dotfiles,
/// Just show files, hiding anything beginning with a dot.
JustFiles,
}
impl Default for DotFilter {
fn default() -> Self {
Self::JustFiles
}
}
impl DotFilter {
/// Whether this filter should show dotfiles in a listing.
fn shows_dotfiles(self) -> bool {
match self {
Self::JustFiles => false,
Self::Dotfiles => true,
Self::DotfilesAndDots => true,
}
}
/// Whether this filter should add dot directories to a listing.
fn dots(self) -> DotsNext {
match self {
Self::JustFiles => DotsNext::Files,
Self::Dotfiles => DotsNext::Files,
Self::DotfilesAndDots => DotsNext::Dot,
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/feature/xattr.rs | src/fs/feature/xattr.rs | //! Extended attribute support for Darwin and Linux systems.
#![allow(trivial_casts)] // for ARM
use std::cmp::Ordering;
use std::io;
use std::path::Path;
pub const ENABLED: bool = cfg!(any(target_os = "macos", target_os = "linux"));
pub trait FileAttributes {
fn attributes(&self) -> io::Result<Vec<Attribute>>;
fn symlink_attributes(&self) -> io::Result<Vec<Attribute>>;
}
#[cfg(any(target_os = "macos", target_os = "linux"))]
impl FileAttributes for Path {
fn attributes(&self) -> io::Result<Vec<Attribute>> {
list_attrs(&lister::Lister::new(FollowSymlinks::Yes), self)
}
fn symlink_attributes(&self) -> io::Result<Vec<Attribute>> {
list_attrs(&lister::Lister::new(FollowSymlinks::No), self)
}
}
#[cfg(not(any(target_os = "macos", target_os = "linux")))]
impl FileAttributes for Path {
fn attributes(&self) -> io::Result<Vec<Attribute>> {
Ok(Vec::new())
}
fn symlink_attributes(&self) -> io::Result<Vec<Attribute>> {
Ok(Vec::new())
}
}
/// Attributes which can be passed to `Attribute::list_with_flags`
#[cfg(any(target_os = "macos", target_os = "linux"))]
#[derive(Copy, Clone)]
pub enum FollowSymlinks {
Yes,
No,
}
/// Extended attribute
#[derive(Debug, Clone)]
pub struct Attribute {
pub name: String,
pub size: usize,
}
#[cfg(any(target_os = "macos", target_os = "linux"))]
pub fn list_attrs(lister: &lister::Lister, path: &Path) -> io::Result<Vec<Attribute>> {
use std::ffi::CString;
let c_path = match path.to_str().and_then(|s| CString::new(s).ok()) {
Some(cstring) => cstring,
None => {
return Err(io::Error::new(io::ErrorKind::Other, "Error: path somehow contained a NUL?"));
}
};
let bufsize = lister.listxattr_first(&c_path);
match bufsize.cmp(&0) {
Ordering::Less => return Err(io::Error::last_os_error()),
Ordering::Equal => return Ok(Vec::new()),
Ordering::Greater => {},
}
let mut buf = vec![0_u8; bufsize as usize];
let err = lister.listxattr_second(&c_path, &mut buf, bufsize);
match err.cmp(&0) {
Ordering::Less => return Err(io::Error::last_os_error()),
Ordering::Equal => return Ok(Vec::new()),
Ordering::Greater => {},
}
let mut names = Vec::new();
if err > 0 {
// End indices of the attribute names
// the buffer contains 0-terminated c-strings
let idx = buf.iter().enumerate().filter_map(|(i, v)|
if *v == 0 { Some(i) } else { None }
);
let mut start = 0;
for end in idx {
let c_end = end + 1; // end of the c-string (including 0)
let size = lister.getxattr(&c_path, &buf[start..c_end]);
if size > 0 {
names.push(Attribute {
name: lister.translate_attribute_name(&buf[start..end]),
size: size as usize,
});
}
start = c_end;
}
}
Ok(names)
}
#[cfg(target_os = "macos")]
mod lister {
use super::FollowSymlinks;
use libc::{c_int, size_t, ssize_t, c_char, c_void};
use std::ffi::CString;
use std::ptr;
extern "C" {
fn listxattr(
path: *const c_char,
namebuf: *mut c_char,
size: size_t,
options: c_int,
) -> ssize_t;
fn getxattr(
path: *const c_char,
name: *const c_char,
value: *mut c_void,
size: size_t,
position: u32,
options: c_int,
) -> ssize_t;
}
pub struct Lister {
c_flags: c_int,
}
impl Lister {
pub fn new(do_follow: FollowSymlinks) -> Self {
let c_flags: c_int = match do_follow {
FollowSymlinks::Yes => 0x0001,
FollowSymlinks::No => 0x0000,
};
Self { c_flags }
}
pub fn translate_attribute_name(&self, input: &[u8]) -> String {
unsafe { std::str::from_utf8_unchecked(input).into() }
}
pub fn listxattr_first(&self, c_path: &CString) -> ssize_t {
unsafe {
listxattr(
c_path.as_ptr(),
ptr::null_mut(),
0,
self.c_flags,
)
}
}
pub fn listxattr_second(&self, c_path: &CString, buf: &mut Vec<u8>, bufsize: ssize_t) -> ssize_t {
unsafe {
listxattr(
c_path.as_ptr(),
buf.as_mut_ptr().cast::<c_char>(),
bufsize as size_t,
self.c_flags,
)
}
}
pub fn getxattr(&self, c_path: &CString, buf: &[u8]) -> ssize_t {
unsafe {
getxattr(
c_path.as_ptr(),
buf.as_ptr().cast::<c_char>(),
ptr::null_mut(),
0,
0,
self.c_flags,
)
}
}
}
}
#[cfg(target_os = "linux")]
mod lister {
use std::ffi::CString;
use libc::{size_t, ssize_t, c_char, c_void};
use super::FollowSymlinks;
use std::ptr;
extern "C" {
fn listxattr(
path: *const c_char,
list: *mut c_char,
size: size_t,
) -> ssize_t;
fn llistxattr(
path: *const c_char,
list: *mut c_char,
size: size_t,
) -> ssize_t;
fn getxattr(
path: *const c_char,
name: *const c_char,
value: *mut c_void,
size: size_t,
) -> ssize_t;
fn lgetxattr(
path: *const c_char,
name: *const c_char,
value: *mut c_void,
size: size_t,
) -> ssize_t;
}
pub struct Lister {
follow_symlinks: FollowSymlinks,
}
impl Lister {
pub fn new(follow_symlinks: FollowSymlinks) -> Lister {
Lister { follow_symlinks }
}
pub fn translate_attribute_name(&self, input: &[u8]) -> String {
String::from_utf8_lossy(input).into_owned()
}
pub fn listxattr_first(&self, c_path: &CString) -> ssize_t {
let listxattr = match self.follow_symlinks {
FollowSymlinks::Yes => listxattr,
FollowSymlinks::No => llistxattr,
};
unsafe {
listxattr(
c_path.as_ptr().cast(),
ptr::null_mut(),
0,
)
}
}
pub fn listxattr_second(&self, c_path: &CString, buf: &mut Vec<u8>, bufsize: ssize_t) -> ssize_t {
let listxattr = match self.follow_symlinks {
FollowSymlinks::Yes => listxattr,
FollowSymlinks::No => llistxattr,
};
unsafe {
listxattr(
c_path.as_ptr().cast(),
buf.as_mut_ptr().cast(),
bufsize as size_t,
)
}
}
pub fn getxattr(&self, c_path: &CString, buf: &[u8]) -> ssize_t {
let getxattr = match self.follow_symlinks {
FollowSymlinks::Yes => getxattr,
FollowSymlinks::No => lgetxattr,
};
unsafe {
getxattr(
c_path.as_ptr().cast(),
buf.as_ptr().cast(),
ptr::null_mut(),
0,
)
}
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/feature/git.rs | src/fs/feature/git.rs | //! Getting the Git status of files and directories.
use std::ffi::OsStr;
#[cfg(target_family = "unix")]
use std::os::unix::ffi::OsStrExt;
use std::path::{Path, PathBuf};
use std::sync::Mutex;
use log::*;
use crate::fs::fields as f;
/// A **Git cache** is assembled based on the user’s input arguments.
///
/// This uses vectors to avoid the overhead of hashing: it’s not worth it when the
/// expected number of Git repositories per exa invocation is 0 or 1...
pub struct GitCache {
/// A list of discovered Git repositories and their paths.
repos: Vec<GitRepo>,
/// Paths that we’ve confirmed do not have Git repositories underneath them.
misses: Vec<PathBuf>,
}
impl GitCache {
pub fn has_anything_for(&self, index: &Path) -> bool {
self.repos.iter().any(|e| e.has_path(index))
}
pub fn get(&self, index: &Path, prefix_lookup: bool) -> f::Git {
self.repos.iter()
.find(|e| e.has_path(index))
.map(|repo| repo.search(index, prefix_lookup))
.unwrap_or_default()
}
}
use std::iter::FromIterator;
impl FromIterator<PathBuf> for GitCache {
fn from_iter<I>(iter: I) -> Self
where I: IntoIterator<Item=PathBuf>
{
let iter = iter.into_iter();
let mut git = Self {
repos: Vec::with_capacity(iter.size_hint().0),
misses: Vec::new(),
};
for path in iter {
if git.misses.contains(&path) {
debug!("Skipping {:?} because it already came back Gitless", path);
}
else if git.repos.iter().any(|e| e.has_path(&path)) {
debug!("Skipping {:?} because we already queried it", path);
}
else {
match GitRepo::discover(path) {
Ok(r) => {
if let Some(r2) = git.repos.iter_mut().find(|e| e.has_workdir(&r.workdir)) {
debug!("Adding to existing repo (workdir matches with {:?})", r2.workdir);
r2.extra_paths.push(r.original_path);
continue;
}
debug!("Discovered new Git repo");
git.repos.push(r);
}
Err(miss) => {
git.misses.push(miss)
}
}
}
}
git
}
}
/// A **Git repository** is one we’ve discovered somewhere on the filesystem.
pub struct GitRepo {
/// The queryable contents of the repository: either a `git2` repo, or the
/// cached results from when we queried it last time.
contents: Mutex<GitContents>,
/// The working directory of this repository.
/// This is used to check whether two repositories are the same.
workdir: PathBuf,
/// The path that was originally checked to discover this repository.
/// This is as important as the extra_paths (it gets checked first), but
/// is separate to avoid having to deal with a non-empty Vec.
original_path: PathBuf,
/// Any other paths that were checked only to result in this same
/// repository.
extra_paths: Vec<PathBuf>,
}
/// A repository’s queried state.
enum GitContents {
/// All the interesting Git stuff goes through this.
Before {
repo: git2::Repository,
},
/// Temporary value used in `repo_to_statuses` so we can move the
/// repository out of the `Before` variant.
Processing,
/// The data we’ve extracted from the repository, but only after we’ve
/// actually done so.
After {
statuses: Git,
},
}
impl GitRepo {
/// Searches through this repository for a path (to a file or directory,
/// depending on the prefix-lookup flag) and returns its Git status.
///
/// Actually querying the `git2` repository for the mapping of paths to
/// Git statuses is only done once, and gets cached so we don’t need to
/// re-query the entire repository the times after that.
///
/// The temporary `Processing` enum variant is used after the `git2`
/// repository is moved out, but before the results have been moved in!
/// See <https://stackoverflow.com/q/45985827/3484614>
fn search(&self, index: &Path, prefix_lookup: bool) -> f::Git {
use std::mem::replace;
let mut contents = self.contents.lock().unwrap();
if let GitContents::After { ref statuses } = *contents {
debug!("Git repo {:?} has been found in cache", &self.workdir);
return statuses.status(index, prefix_lookup);
}
debug!("Querying Git repo {:?} for the first time", &self.workdir);
let repo = replace(&mut *contents, GitContents::Processing).inner_repo();
let statuses = repo_to_statuses(&repo, &self.workdir);
let result = statuses.status(index, prefix_lookup);
let _processing = replace(&mut *contents, GitContents::After { statuses });
result
}
/// Whether this repository has the given working directory.
fn has_workdir(&self, path: &Path) -> bool {
self.workdir == path
}
/// Whether this repository cares about the given path at all.
fn has_path(&self, path: &Path) -> bool {
path.starts_with(&self.original_path) || self.extra_paths.iter().any(|e| path.starts_with(e))
}
/// Searches for a Git repository at any point above the given path.
/// Returns the original buffer if none is found.
fn discover(path: PathBuf) -> Result<Self, PathBuf> {
info!("Searching for Git repository above {:?}", path);
let repo = match git2::Repository::discover(&path) {
Ok(r) => r,
Err(e) => {
error!("Error discovering Git repositories: {:?}", e);
return Err(path);
}
};
if let Some(workdir) = repo.workdir() {
let workdir = workdir.to_path_buf();
let contents = Mutex::new(GitContents::Before { repo });
Ok(Self { contents, workdir, original_path: path, extra_paths: Vec::new() })
}
else {
warn!("Repository has no workdir?");
Err(path)
}
}
}
impl GitContents {
/// Assumes that the repository hasn’t been queried, and extracts it
/// (consuming the value) if it has. This is needed because the entire
/// enum variant gets replaced when a repo is queried (see above).
fn inner_repo(self) -> git2::Repository {
if let Self::Before { repo } = self {
repo
}
else {
unreachable!("Tried to extract a non-Repository")
}
}
}
/// Iterates through a repository’s statuses, consuming it and returning the
/// mapping of files to their Git status.
/// We will have already used the working directory at this point, so it gets
/// passed in rather than deriving it from the `Repository` again.
fn repo_to_statuses(repo: &git2::Repository, workdir: &Path) -> Git {
let mut statuses = Vec::new();
info!("Getting Git statuses for repo with workdir {:?}", workdir);
match repo.statuses(None) {
Ok(es) => {
for e in es.iter() {
#[cfg(target_family = "unix")]
let path = workdir.join(Path::new(OsStr::from_bytes(e.path_bytes())));
// TODO: handle non Unix systems better:
// https://github.com/ogham/exa/issues/698
#[cfg(not(target_family = "unix"))]
let path = workdir.join(Path::new(e.path().unwrap()));
let elem = (path, e.status());
statuses.push(elem);
}
}
Err(e) => {
error!("Error looking up Git statuses: {:?}", e);
}
}
Git { statuses }
}
// The `repo.statuses` call above takes a long time. exa debug output:
//
// 20.311276 INFO:exa::fs::feature::git: Getting Git statuses for repo with workdir "/vagrant/"
// 20.799610 DEBUG:exa::output::table: Getting Git status for file "./Cargo.toml"
//
// Even inserting another logging line immediately afterwards doesn’t make it
// look any faster.
/// Container of Git statuses for all the files in this folder’s Git repository.
struct Git {
statuses: Vec<(PathBuf, git2::Status)>,
}
impl Git {
/// Get either the file or directory status for the given path.
/// “Prefix lookup” means that it should report an aggregate status of all
/// paths starting with the given prefix (in other words, a directory).
fn status(&self, index: &Path, prefix_lookup: bool) -> f::Git {
if prefix_lookup { self.dir_status(index) }
else { self.file_status(index) }
}
/// Get the user-facing status of a file.
/// We check the statuses directly applying to a file, and for the ignored
/// status we check if any of its parents directories is ignored by git.
fn file_status(&self, file: &Path) -> f::Git {
let path = reorient(file);
let s = self.statuses.iter()
.filter(|p| if p.1 == git2::Status::IGNORED {
path.starts_with(&p.0)
} else {
p.0 == path
})
.fold(git2::Status::empty(), |a, b| a | b.1);
let staged = index_status(s);
let unstaged = working_tree_status(s);
f::Git { staged, unstaged }
}
/// Get the combined, user-facing status of a directory.
/// Statuses are aggregating (for example, a directory is considered
/// modified if any file under it has the status modified), except for
/// ignored status which applies to files under (for example, a directory
/// is considered ignored if one of its parent directories is ignored).
fn dir_status(&self, dir: &Path) -> f::Git {
let path = reorient(dir);
let s = self.statuses.iter()
.filter(|p| if p.1 == git2::Status::IGNORED {
path.starts_with(&p.0)
} else {
p.0.starts_with(&path)
})
.fold(git2::Status::empty(), |a, b| a | b.1);
let staged = index_status(s);
let unstaged = working_tree_status(s);
f::Git { staged, unstaged }
}
}
/// Converts a path to an absolute path based on the current directory.
/// Paths need to be absolute for them to be compared properly, otherwise
/// you’d ask a repo about “./README.md” but it only knows about
/// “/vagrant/README.md”, prefixed by the workdir.
#[cfg(unix)]
fn reorient(path: &Path) -> PathBuf {
use std::env::current_dir;
// TODO: I’m not 100% on this func tbh
let path = match current_dir() {
Err(_) => Path::new(".").join(&path),
Ok(dir) => dir.join(&path),
};
path.canonicalize().unwrap_or(path)
}
#[cfg(windows)]
fn reorient(path: &Path) -> PathBuf {
let unc_path = path.canonicalize().unwrap();
// On Windows UNC path is returned. We need to strip the prefix for it to work.
let normal_path = unc_path.as_os_str().to_str().unwrap().trim_left_matches("\\\\?\\");
return PathBuf::from(normal_path);
}
/// The character to display if the file has been modified, but not staged.
fn working_tree_status(status: git2::Status) -> f::GitStatus {
match status {
s if s.contains(git2::Status::WT_NEW) => f::GitStatus::New,
s if s.contains(git2::Status::WT_MODIFIED) => f::GitStatus::Modified,
s if s.contains(git2::Status::WT_DELETED) => f::GitStatus::Deleted,
s if s.contains(git2::Status::WT_RENAMED) => f::GitStatus::Renamed,
s if s.contains(git2::Status::WT_TYPECHANGE) => f::GitStatus::TypeChange,
s if s.contains(git2::Status::IGNORED) => f::GitStatus::Ignored,
s if s.contains(git2::Status::CONFLICTED) => f::GitStatus::Conflicted,
_ => f::GitStatus::NotModified,
}
}
/// The character to display if the file has been modified and the change
/// has been staged.
fn index_status(status: git2::Status) -> f::GitStatus {
match status {
s if s.contains(git2::Status::INDEX_NEW) => f::GitStatus::New,
s if s.contains(git2::Status::INDEX_MODIFIED) => f::GitStatus::Modified,
s if s.contains(git2::Status::INDEX_DELETED) => f::GitStatus::Deleted,
s if s.contains(git2::Status::INDEX_RENAMED) => f::GitStatus::Renamed,
s if s.contains(git2::Status::INDEX_TYPECHANGE) => f::GitStatus::TypeChange,
_ => f::GitStatus::NotModified,
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/fs/feature/mod.rs | src/fs/feature/mod.rs | pub mod xattr;
#[cfg(feature = "git")]
pub mod git;
#[cfg(not(feature = "git"))]
pub mod git {
use std::iter::FromIterator;
use std::path::{Path, PathBuf};
use crate::fs::fields as f;
pub struct GitCache;
impl FromIterator<PathBuf> for GitCache {
fn from_iter<I>(_iter: I) -> Self
where I: IntoIterator<Item=PathBuf>
{
Self
}
}
impl GitCache {
pub fn has_anything_for(&self, _index: &Path) -> bool {
false
}
pub fn get(&self, _index: &Path, _prefix_lookup: bool) -> f::Git {
unreachable!();
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/info/filetype.rs | src/info/filetype.rs | //! Tests for various types of file (video, image, compressed, etc).
//!
//! Currently this is dependent on the file’s name and extension, because
//! those are the only metadata that we have access to without reading the
//! file’s contents.
use ansi_term::Style;
use crate::fs::File;
use crate::output::icons::FileIcon;
use crate::theme::FileColours;
#[derive(Debug, Default, PartialEq, Eq)]
pub struct FileExtensions;
impl FileExtensions {
/// An “immediate” file is something that can be run or activated somehow
/// in order to kick off the build of a project. It’s usually only present
/// in directories full of source code.
#[allow(clippy::case_sensitive_file_extension_comparisons)]
fn is_immediate(&self, file: &File<'_>) -> bool {
file.name.to_lowercase().starts_with("readme") ||
file.name.ends_with(".ninja") ||
file.name_is_one_of( &[
"Makefile", "Cargo.toml", "SConstruct", "CMakeLists.txt",
"build.gradle", "pom.xml", "Rakefile", "package.json", "Gruntfile.js",
"Gruntfile.coffee", "BUILD", "BUILD.bazel", "WORKSPACE", "build.xml", "Podfile",
"webpack.config.js", "meson.build", "composer.json", "RoboFile.php", "PKGBUILD",
"Justfile", "Procfile", "Dockerfile", "Containerfile", "Vagrantfile", "Brewfile",
"Gemfile", "Pipfile", "build.sbt", "mix.exs", "bsconfig.json", "tsconfig.json",
])
}
fn is_image(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"png", "jfi", "jfif", "jif", "jpe", "jpeg", "jpg", "gif", "bmp",
"tiff", "tif", "ppm", "pgm", "pbm", "pnm", "webp", "raw", "arw",
"svg", "stl", "eps", "dvi", "ps", "cbr", "jpf", "cbz", "xpm",
"ico", "cr2", "orf", "nef", "heif", "avif", "jxl", "j2k", "jp2",
"j2c", "jpx",
])
}
fn is_video(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"avi", "flv", "m2v", "m4v", "mkv", "mov", "mp4", "mpeg",
"mpg", "ogm", "ogv", "vob", "wmv", "webm", "m2ts", "heic",
])
}
fn is_music(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"aac", "m4a", "mp3", "ogg", "wma", "mka", "opus",
])
}
// Lossless music, rather than any other kind of data...
fn is_lossless(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"alac", "ape", "flac", "wav",
])
}
fn is_crypto(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"asc", "enc", "gpg", "pgp", "sig", "signature", "pfx", "p12",
])
}
fn is_document(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"djvu", "doc", "docx", "dvi", "eml", "eps", "fotd", "key",
"keynote", "numbers", "odp", "odt", "pages", "pdf", "ppt",
"pptx", "rtf", "xls", "xlsx",
])
}
fn is_compressed(&self, file: &File<'_>) -> bool {
file.extension_is_one_of( &[
"zip", "tar", "Z", "z", "gz", "bz2", "a", "ar", "7z",
"iso", "dmg", "tc", "rar", "par", "tgz", "xz", "txz",
"lz", "tlz", "lzma", "deb", "rpm", "zst", "lz4", "cpio",
])
}
fn is_temp(&self, file: &File<'_>) -> bool {
file.name.ends_with('~')
|| (file.name.starts_with('#') && file.name.ends_with('#'))
|| file.extension_is_one_of( &[ "tmp", "swp", "swo", "swn", "bak", "bkp", "bk" ])
}
fn is_compiled(&self, file: &File<'_>) -> bool {
if file.extension_is_one_of( &[ "class", "elc", "hi", "o", "pyc", "zwc", "ko" ]) {
true
}
else if let Some(dir) = file.parent_dir {
file.get_source_files().iter().any(|path| dir.contains(path))
}
else {
false
}
}
}
impl FileColours for FileExtensions {
fn colour_file(&self, file: &File<'_>) -> Option<Style> {
use ansi_term::Colour::*;
Some(match file {
f if self.is_temp(f) => Fixed(244).normal(),
f if self.is_immediate(f) => Yellow.bold().underline(),
f if self.is_image(f) => Fixed(133).normal(),
f if self.is_video(f) => Fixed(135).normal(),
f if self.is_music(f) => Fixed(92).normal(),
f if self.is_lossless(f) => Fixed(93).normal(),
f if self.is_crypto(f) => Fixed(109).normal(),
f if self.is_document(f) => Fixed(105).normal(),
f if self.is_compressed(f) => Red.normal(),
f if self.is_compiled(f) => Fixed(137).normal(),
_ => return None,
})
}
}
impl FileIcon for FileExtensions {
fn icon_file(&self, file: &File<'_>) -> Option<char> {
use crate::output::icons::Icons;
if self.is_music(file) || self.is_lossless(file) {
Some(Icons::Audio.value())
}
else if self.is_image(file) {
Some(Icons::Image.value())
}
else if self.is_video(file) {
Some(Icons::Video.value())
}
else {
None
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/info/mod.rs | src/info/mod.rs | //! The “info” module contains routines that aren’t about probing the
//! filesystem nor displaying output to the user, but are internal “business
//! logic” routines that are performed on a file’s already-read metadata.
//! (This counts the file name as metadata.)
pub mod filetype;
mod sources;
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/info/sources.rs | src/info/sources.rs | use std::path::PathBuf;
use crate::fs::File;
impl<'a> File<'a> {
/// For this file, return a vector of alternate file paths that, if any of
/// them exist, mean that *this* file should be coloured as “compiled”.
///
/// The point of this is to highlight compiled files such as `foo.js` when
/// their source file `foo.coffee` exists in the same directory.
/// For example, `foo.js` is perfectly valid without `foo.coffee`, so we
/// don’t want to always blindly highlight `*.js` as compiled.
/// (See also `FileExtensions#is_compiled`)
pub fn get_source_files(&self) -> Vec<PathBuf> {
if let Some(ext) = &self.ext {
match &ext[..] {
"css" => vec![self.path.with_extension("sass"), self.path.with_extension("scss"), // SASS, SCSS
self.path.with_extension("styl"), self.path.with_extension("less")], // Stylus, Less
"js" => vec![self.path.with_extension("coffee"), self.path.with_extension("ts")], // CoffeeScript, TypeScript
"aux" | // TeX: auxiliary file
"bbl" | // BibTeX bibliography file
"bcf" | // biblatex control file
"blg" | // BibTeX log file
"fdb_latexmk" | // TeX latexmk file
"fls" | // TeX -recorder file
"lof" | // TeX list of figures
"log" | // TeX log file
"lot" | // TeX list of tables
"toc" => vec![self.path.with_extension("tex")], // TeX table of contents
_ => vec![], // No source files if none of the above
}
}
else {
vec![] // No source files if there’s no extension, either!
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/file_name.rs | src/output/file_name.rs | use std::fmt::Debug;
use std::path::Path;
use ansi_term::{ANSIString, Style};
use crate::fs::{File, FileTarget};
use crate::output::cell::TextCellContents;
use crate::output::escape;
use crate::output::icons::{icon_for_file, iconify_style};
use crate::output::render::FiletypeColours;
/// Basically a file name factory.
#[derive(Debug, Copy, Clone)]
pub struct Options {
/// Whether to append file class characters to file names.
pub classify: Classify,
/// Whether to prepend icon characters before file names.
pub show_icons: ShowIcons,
}
impl Options {
/// Create a new `FileName` that prints the given file’s name, painting it
/// with the remaining arguments.
pub fn for_file<'a, 'dir, C>(self, file: &'a File<'dir>, colours: &'a C) -> FileName<'a, 'dir, C> {
FileName {
file,
colours,
link_style: LinkStyle::JustFilenames,
options: self,
target: if file.is_link() { Some(file.link_target()) }
else { None }
}
}
}
/// When displaying a file name, there needs to be some way to handle broken
/// links, depending on how long the resulting Cell can be.
#[derive(PartialEq, Debug, Copy, Clone)]
enum LinkStyle {
/// Just display the file names, but colour them differently if they’re
/// a broken link or can’t be followed.
JustFilenames,
/// Display all files in their usual style, but follow each link with an
/// arrow pointing to their path, colouring the path differently if it’s
/// a broken link, and doing nothing if it can’t be followed.
FullLinkPaths,
}
/// Whether to append file class characters to the file names.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum Classify {
/// Just display the file names, without any characters.
JustFilenames,
/// Add a character after the file name depending on what class of file
/// it is.
AddFileIndicators,
}
impl Default for Classify {
fn default() -> Self {
Self::JustFilenames
}
}
/// Whether and how to show icons.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum ShowIcons {
/// Don’t show icons at all.
Off,
/// Show icons next to file names, with the given number of spaces between
/// the icon and the file name.
On(u32),
}
/// A **file name** holds all the information necessary to display the name
/// of the given file. This is used in all of the views.
pub struct FileName<'a, 'dir, C> {
/// A reference to the file that we’re getting the name of.
file: &'a File<'dir>,
/// The colours used to paint the file name and its surrounding text.
colours: &'a C,
/// The file that this file points to if it’s a link.
target: Option<FileTarget<'dir>>, // todo: remove?
/// How to handle displaying links.
link_style: LinkStyle,
options: Options,
}
impl<'a, 'dir, C> FileName<'a, 'dir, C> {
/// Sets the flag on this file name to display link targets with an
/// arrow followed by their path.
pub fn with_link_paths(mut self) -> Self {
self.link_style = LinkStyle::FullLinkPaths;
self
}
}
impl<'a, 'dir, C: Colours> FileName<'a, 'dir, C> {
/// Paints the name of the file using the colours, resulting in a vector
/// of coloured cells that can be printed to the terminal.
///
/// This method returns some `TextCellContents`, rather than a `TextCell`,
/// because for the last cell in a table, it doesn’t need to have its
/// width calculated.
pub fn paint(&self) -> TextCellContents {
let mut bits = Vec::new();
if let ShowIcons::On(spaces_count) = self.options.show_icons {
let style = iconify_style(self.style());
let file_icon = icon_for_file(self.file).to_string();
bits.push(style.paint(file_icon));
match spaces_count {
1 => bits.push(style.paint(" ")),
2 => bits.push(style.paint(" ")),
n => bits.push(style.paint(spaces(n))),
}
}
if self.file.parent_dir.is_none() {
if let Some(parent) = self.file.path.parent() {
self.add_parent_bits(&mut bits, parent);
}
}
if ! self.file.name.is_empty() {
// The “missing file” colour seems like it should be used here,
// but it’s not! In a grid view, where there’s no space to display
// link targets, the filename has to have a different style to
// indicate this fact. But when showing targets, we can just
// colour the path instead (see below), and leave the broken
// link’s filename as the link colour.
for bit in self.coloured_file_name() {
bits.push(bit);
}
}
if let (LinkStyle::FullLinkPaths, Some(target)) = (self.link_style, self.target.as_ref()) {
match target {
FileTarget::Ok(target) => {
bits.push(Style::default().paint(" "));
bits.push(self.colours.normal_arrow().paint("->"));
bits.push(Style::default().paint(" "));
if let Some(parent) = target.path.parent() {
self.add_parent_bits(&mut bits, parent);
}
if ! target.name.is_empty() {
let target_options = Options {
classify: Classify::JustFilenames,
show_icons: ShowIcons::Off,
};
let target_name = FileName {
file: target,
colours: self.colours,
target: None,
link_style: LinkStyle::FullLinkPaths,
options: target_options,
};
for bit in target_name.coloured_file_name() {
bits.push(bit);
}
if let Classify::AddFileIndicators = self.options.classify {
if let Some(class) = self.classify_char(target) {
bits.push(Style::default().paint(class));
}
}
}
}
FileTarget::Broken(broken_path) => {
bits.push(Style::default().paint(" "));
bits.push(self.colours.broken_symlink().paint("->"));
bits.push(Style::default().paint(" "));
escape(
broken_path.display().to_string(),
&mut bits,
self.colours.broken_filename(),
self.colours.broken_control_char(),
);
}
FileTarget::Err(_) => {
// Do nothing — the error gets displayed on the next line
}
}
}
else if let Classify::AddFileIndicators = self.options.classify {
if let Some(class) = self.classify_char(self.file) {
bits.push(Style::default().paint(class));
}
}
bits.into()
}
/// Adds the bits of the parent path to the given bits vector.
/// The path gets its characters escaped based on the colours.
fn add_parent_bits(&self, bits: &mut Vec<ANSIString<'_>>, parent: &Path) {
let coconut = parent.components().count();
if coconut == 1 && parent.has_root() {
bits.push(self.colours.symlink_path().paint(std::path::MAIN_SEPARATOR.to_string()));
}
else if coconut >= 1 {
escape(
parent.to_string_lossy().to_string(),
bits,
self.colours.symlink_path(),
self.colours.control_char(),
);
bits.push(self.colours.symlink_path().paint(std::path::MAIN_SEPARATOR.to_string()));
}
}
/// The character to be displayed after a file when classifying is on, if
/// the file’s type has one associated with it.
#[cfg(unix)]
fn classify_char(&self, file: &File<'_>) -> Option<&'static str> {
if file.is_executable_file() {
Some("*")
}
else if file.is_directory() {
Some("/")
}
else if file.is_pipe() {
Some("|")
}
else if file.is_link() {
Some("@")
}
else if file.is_socket() {
Some("=")
}
else {
None
}
}
#[cfg(windows)]
fn classify_char(&self, file: &File<'_>) -> Option<&'static str> {
if file.is_directory() {
Some("/")
}
else if file.is_link() {
Some("@")
}
else {
None
}
}
/// Returns at least one ANSI-highlighted string representing this file’s
/// name using the given set of colours.
///
/// Ordinarily, this will be just one string: the file’s complete name,
/// coloured according to its file type. If the name contains control
/// characters such as newlines or escapes, though, we can’t just print them
/// to the screen directly, because then there’ll be newlines in weird places.
///
/// So in that situation, those characters will be escaped and highlighted in
/// a different colour.
fn coloured_file_name<'unused>(&self) -> Vec<ANSIString<'unused>> {
let file_style = self.style();
let mut bits = Vec::new();
escape(
self.file.name.clone(),
&mut bits,
file_style,
self.colours.control_char(),
);
bits
}
/// Figures out which colour to paint the filename part of the output,
/// depending on which “type” of file it appears to be — either from the
/// class on the filesystem or from its name. (Or the broken link colour,
/// if there’s nowhere else for that fact to be shown.)
pub fn style(&self) -> Style {
if let LinkStyle::JustFilenames = self.link_style {
if let Some(ref target) = self.target {
if target.is_broken() {
return self.colours.broken_symlink();
}
}
}
match self.file {
f if f.is_directory() => self.colours.directory(),
#[cfg(unix)]
f if f.is_executable_file() => self.colours.executable_file(),
f if f.is_link() => self.colours.symlink(),
#[cfg(unix)]
f if f.is_pipe() => self.colours.pipe(),
#[cfg(unix)]
f if f.is_block_device() => self.colours.block_device(),
#[cfg(unix)]
f if f.is_char_device() => self.colours.char_device(),
#[cfg(unix)]
f if f.is_socket() => self.colours.socket(),
f if ! f.is_file() => self.colours.special(),
_ => self.colours.colour_file(self.file),
}
}
}
/// The set of colours that are needed to paint a file name.
pub trait Colours: FiletypeColours {
/// The style to paint the path of a symlink’s target, up to but not
/// including the file’s name.
fn symlink_path(&self) -> Style;
/// The style to paint the arrow between a link and its target.
fn normal_arrow(&self) -> Style;
/// The style to paint the filenames of broken links in views that don’t
/// show link targets, and the style to paint the *arrow* between the link
/// and its target in views that *do* show link targets.
fn broken_symlink(&self) -> Style;
/// The style to paint the entire filename of a broken link.
fn broken_filename(&self) -> Style;
/// The style to paint a non-displayable control character in a filename.
fn control_char(&self) -> Style;
/// The style to paint a non-displayable control character in a filename,
/// when the filename is being displayed as a broken link target.
fn broken_control_char(&self) -> Style;
/// The style to paint a file that has its executable bit set.
fn executable_file(&self) -> Style;
fn colour_file(&self, file: &File<'_>) -> Style;
}
/// Generate a string made of `n` spaces.
fn spaces(width: u32) -> String {
(0 .. width).into_iter().map(|_| ' ').collect()
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/grid_details.rs | src/output/grid_details.rs | //! The grid-details view lists several details views side-by-side.
use std::io::{self, Write};
use ansi_term::ANSIStrings;
use term_grid as grid;
use crate::fs::{Dir, File};
use crate::fs::feature::git::GitCache;
use crate::fs::feature::xattr::FileAttributes;
use crate::fs::filter::FileFilter;
use crate::output::cell::TextCell;
use crate::output::details::{Options as DetailsOptions, Row as DetailsRow, Render as DetailsRender};
use crate::output::file_name::Options as FileStyle;
use crate::output::grid::Options as GridOptions;
use crate::output::table::{Table, Row as TableRow, Options as TableOptions};
use crate::output::tree::{TreeParams, TreeDepth};
use crate::theme::Theme;
#[derive(PartialEq, Eq, Debug)]
pub struct Options {
pub grid: GridOptions,
pub details: DetailsOptions,
pub row_threshold: RowThreshold,
}
impl Options {
pub fn to_details_options(&self) -> &DetailsOptions {
&self.details
}
}
/// The grid-details view can be configured to revert to just a details view
/// (with one column) if it wouldn’t produce enough rows of output.
///
/// Doing this makes the resulting output look a bit better: when listing a
/// small directory of four files in four columns, the files just look spaced
/// out and it’s harder to see what’s going on. So it can be enabled just for
/// larger directory listings.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum RowThreshold {
/// Only use grid-details view if it would result in at least this many
/// rows of output.
MinimumRows(usize),
/// Use the grid-details view no matter what.
AlwaysGrid,
}
pub struct Render<'a> {
/// The directory that’s being rendered here.
/// We need this to know which columns to put in the output.
pub dir: Option<&'a Dir>,
/// The files that have been read from the directory. They should all
/// hold a reference to it.
pub files: Vec<File<'a>>,
/// How to colour various pieces of text.
pub theme: &'a Theme,
/// How to format filenames.
pub file_style: &'a FileStyle,
/// The grid part of the grid-details view.
pub grid: &'a GridOptions,
/// The details part of the grid-details view.
pub details: &'a DetailsOptions,
/// How to filter files after listing a directory. The files in this
/// render will already have been filtered and sorted, but any directories
/// that we recurse into will have to have this applied.
pub filter: &'a FileFilter,
/// The minimum number of rows that there need to be before grid-details
/// mode is activated.
pub row_threshold: RowThreshold,
/// Whether we are skipping Git-ignored files.
pub git_ignoring: bool,
pub git: Option<&'a GitCache>,
pub console_width: usize,
}
impl<'a> Render<'a> {
/// Create a temporary Details render that gets used for the columns of
/// the grid-details render that’s being generated.
///
/// This includes an empty files vector because the files get added to
/// the table in *this* file, not in details: we only want to insert every
/// *n* files into each column’s table, not all of them.
fn details_for_column(&self) -> DetailsRender<'a> {
DetailsRender {
dir: self.dir,
files: Vec::new(),
theme: self.theme,
file_style: self.file_style,
opts: self.details,
recurse: None,
filter: self.filter,
git_ignoring: self.git_ignoring,
git: self.git,
}
}
/// Create a Details render for when this grid-details render doesn’t fit
/// in the terminal (or something has gone wrong) and we have given up, or
/// when the user asked for a grid-details view but the terminal width is
/// not available, so we downgrade.
pub fn give_up(self) -> DetailsRender<'a> {
DetailsRender {
dir: self.dir,
files: self.files,
theme: self.theme,
file_style: self.file_style,
opts: self.details,
recurse: None,
filter: self.filter,
git_ignoring: self.git_ignoring,
git: self.git,
}
}
// This doesn’t take an IgnoreCache even though the details one does
// because grid-details has no tree view.
pub fn render<W: Write>(mut self, w: &mut W) -> io::Result<()> {
if let Some((grid, width)) = self.find_fitting_grid() {
write!(w, "{}", grid.fit_into_columns(width))
}
else {
self.give_up().render(w)
}
}
pub fn find_fitting_grid(&mut self) -> Option<(grid::Grid, grid::Width)> {
let options = self.details.table.as_ref().expect("Details table options not given!");
let drender = self.details_for_column();
let (first_table, _) = self.make_table(options, &drender);
let rows = self.files.iter()
.map(|file| first_table.row_for_file(file, file_has_xattrs(file)))
.collect::<Vec<_>>();
let file_names = self.files.iter()
.map(|file| self.file_style.for_file(file, self.theme).paint().promote())
.collect::<Vec<_>>();
let mut last_working_grid = self.make_grid(1, options, &file_names, rows.clone(), &drender);
if file_names.len() == 1 {
return Some((last_working_grid, 1));
}
// If we can’t fit everything in a grid 100 columns wide, then
// something has gone seriously awry
for column_count in 2..100 {
let grid = self.make_grid(column_count, options, &file_names, rows.clone(), &drender);
let the_grid_fits = {
let d = grid.fit_into_columns(column_count);
d.width() <= self.console_width
};
if the_grid_fits {
last_working_grid = grid;
}
if !the_grid_fits || column_count == file_names.len() {
let last_column_count = if the_grid_fits { column_count } else { column_count - 1 };
// If we’ve figured out how many columns can fit in the user’s terminal,
// and it turns out there aren’t enough rows to make it worthwhile
// (according to EXA_GRID_ROWS), then just resort to the lines view.
if let RowThreshold::MinimumRows(thresh) = self.row_threshold {
if last_working_grid.fit_into_columns(last_column_count).row_count() < thresh {
return None;
}
}
return Some((last_working_grid, last_column_count));
}
}
None
}
fn make_table(&mut self, options: &'a TableOptions, drender: &DetailsRender<'_>) -> (Table<'a>, Vec<DetailsRow>) {
match (self.git, self.dir) {
(Some(g), Some(d)) => if ! g.has_anything_for(&d.path) { self.git = None },
(Some(g), None) => if ! self.files.iter().any(|f| g.has_anything_for(&f.path)) { self.git = None },
(None, _) => {/* Keep Git how it is */},
}
let mut table = Table::new(options, self.git, self.theme);
let mut rows = Vec::new();
if self.details.header {
let row = table.header_row();
table.add_widths(&row);
rows.push(drender.render_header(row));
}
(table, rows)
}
fn make_grid(&mut self, column_count: usize, options: &'a TableOptions, file_names: &[TextCell], rows: Vec<TableRow>, drender: &DetailsRender<'_>) -> grid::Grid {
let mut tables = Vec::new();
for _ in 0 .. column_count {
tables.push(self.make_table(options, drender));
}
let mut num_cells = rows.len();
if self.details.header {
num_cells += column_count;
}
let original_height = divide_rounding_up(rows.len(), column_count);
let height = divide_rounding_up(num_cells, column_count);
for (i, (file_name, row)) in file_names.iter().zip(rows.into_iter()).enumerate() {
let index = if self.grid.across {
i % column_count
}
else {
i / original_height
};
let (ref mut table, ref mut rows) = tables[index];
table.add_widths(&row);
let details_row = drender.render_file(row, file_name.clone(), TreeParams::new(TreeDepth::root(), false));
rows.push(details_row);
}
let columns = tables
.into_iter()
.map(|(table, details_rows)| {
drender.iterate_with_table(table, details_rows)
.collect::<Vec<_>>()
})
.collect::<Vec<_>>();
let direction = if self.grid.across { grid::Direction::LeftToRight }
else { grid::Direction::TopToBottom };
let filling = grid::Filling::Spaces(4);
let mut grid = grid::Grid::new(grid::GridOptions { direction, filling });
if self.grid.across {
for row in 0 .. height {
for column in &columns {
if row < column.len() {
let cell = grid::Cell {
contents: ANSIStrings(&column[row].contents).to_string(),
width: *column[row].width,
alignment: grid::Alignment::Left,
};
grid.add(cell);
}
}
}
}
else {
for column in &columns {
for cell in column.iter() {
let cell = grid::Cell {
contents: ANSIStrings(&cell.contents).to_string(),
width: *cell.width,
alignment: grid::Alignment::Left,
};
grid.add(cell);
}
}
}
grid
}
}
fn divide_rounding_up(a: usize, b: usize) -> usize {
let mut result = a / b;
if a % b != 0 {
result += 1;
}
result
}
fn file_has_xattrs(file: &File<'_>) -> bool {
match file.path.attributes() {
Ok(attrs) => ! attrs.is_empty(),
Err(_) => false,
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/icons.rs | src/output/icons.rs | use ansi_term::Style;
use crate::fs::File;
use crate::info::filetype::FileExtensions;
use lazy_static::lazy_static;
use std::collections::HashMap;
pub trait FileIcon {
fn icon_file(&self, file: &File<'_>) -> Option<char>;
}
#[derive(Copy, Clone)]
pub enum Icons {
Audio,
Image,
Video,
}
impl Icons {
pub fn value(self) -> char {
match self {
Self::Audio => '\u{f001}',
Self::Image => '\u{f1c5}',
Self::Video => '\u{f03d}',
}
}
}
/// Converts the style used to paint a file name into the style that should be
/// used to paint an icon.
///
/// - The background colour should be preferred to the foreground colour, as
/// if one is set, it’s the more “obvious” colour choice.
/// - If neither is set, just use the default style.
/// - Attributes such as bold or underline should not be used to paint the
/// icon, as they can make it look weird.
pub fn iconify_style(style: Style) -> Style {
style.background.or(style.foreground)
.map(Style::from)
.unwrap_or_default()
}
lazy_static! {
static ref MAP_BY_NAME: HashMap<&'static str, char> = {
let mut m = HashMap::new();
m.insert(".Trash", '\u{f1f8}'); //
m.insert(".atom", '\u{e764}'); //
m.insert(".bashprofile", '\u{e615}'); //
m.insert(".bashrc", '\u{f489}'); //
m.insert(".git", '\u{f1d3}'); //
m.insert(".gitattributes", '\u{f1d3}'); //
m.insert(".gitconfig", '\u{f1d3}'); //
m.insert(".github", '\u{f408}'); //
m.insert(".gitignore", '\u{f1d3}'); //
m.insert(".gitmodules", '\u{f1d3}'); //
m.insert(".rvm", '\u{e21e}'); //
m.insert(".vimrc", '\u{e62b}'); //
m.insert(".vscode", '\u{e70c}'); //
m.insert(".zshrc", '\u{f489}'); //
m.insert("Cargo.lock", '\u{e7a8}'); //
m.insert("bin", '\u{e5fc}'); //
m.insert("config", '\u{e5fc}'); //
m.insert("docker-compose.yml", '\u{f308}'); //
m.insert("Dockerfile", '\u{f308}'); //
m.insert("ds_store", '\u{f179}'); //
m.insert("gitignore_global", '\u{f1d3}'); //
m.insert("go.mod", '\u{e626}'); //
m.insert("go.sum", '\u{e626}'); //
m.insert("gradle", '\u{e256}'); //
m.insert("gruntfile.coffee", '\u{e611}'); //
m.insert("gruntfile.js", '\u{e611}'); //
m.insert("gruntfile.ls", '\u{e611}'); //
m.insert("gulpfile.coffee", '\u{e610}'); //
m.insert("gulpfile.js", '\u{e610}'); //
m.insert("gulpfile.ls", '\u{e610}'); //
m.insert("hidden", '\u{f023}'); //
m.insert("include", '\u{e5fc}'); //
m.insert("lib", '\u{f121}'); //
m.insert("localized", '\u{f179}'); //
m.insert("Makefile", '\u{f489}'); //
m.insert("node_modules", '\u{e718}'); //
m.insert("npmignore", '\u{e71e}'); //
m.insert("PKGBUILD", '\u{f303}'); //
m.insert("rubydoc", '\u{e73b}'); //
m.insert("yarn.lock", '\u{e718}'); //
m
};
}
pub fn icon_for_file(file: &File<'_>) -> char {
let extensions = Box::new(FileExtensions);
if let Some(icon) = MAP_BY_NAME.get(file.name.as_str()) { *icon }
else if file.points_to_directory() {
match file.name.as_str() {
"bin" => '\u{e5fc}', //
".git" => '\u{f1d3}', //
".idea" => '\u{e7b5}', //
_ => '\u{f115}' //
}
}
else if let Some(icon) = extensions.icon_file(file) { icon }
else if let Some(ext) = file.ext.as_ref() {
match ext.as_str() {
"ai" => '\u{e7b4}', //
"android" => '\u{e70e}', //
"apk" => '\u{e70e}', //
"apple" => '\u{f179}', //
"avi" => '\u{f03d}', //
"avif" => '\u{f1c5}', //
"avro" => '\u{e60b}', //
"awk" => '\u{f489}', //
"bash" => '\u{f489}', //
"bash_history" => '\u{f489}', //
"bash_profile" => '\u{f489}', //
"bashrc" => '\u{f489}', //
"bat" => '\u{f17a}', //
"bats" => '\u{f489}', //
"bmp" => '\u{f1c5}', //
"bz" => '\u{f410}', //
"bz2" => '\u{f410}', //
"c" => '\u{e61e}', //
"c++" => '\u{e61d}', //
"cab" => '\u{e70f}', //
"cc" => '\u{e61d}', //
"cfg" => '\u{e615}', //
"class" => '\u{e256}', //
"clj" => '\u{e768}', //
"cljs" => '\u{e76a}', //
"cls" => '\u{f034}', //
"cmd" => '\u{e70f}', //
"coffee" => '\u{f0f4}', //
"conf" => '\u{e615}', //
"cp" => '\u{e61d}', //
"cpio" => '\u{f410}', //
"cpp" => '\u{e61d}', //
"cs" => '\u{f031b}', //
"csh" => '\u{f489}', //
"cshtml" => '\u{f1fa}', //
"csproj" => '\u{f031b}', //
"css" => '\u{e749}', //
"csv" => '\u{f1c3}', //
"csx" => '\u{f031b}', //
"cxx" => '\u{e61d}', //
"d" => '\u{e7af}', //
"dart" => '\u{e798}', //
"db" => '\u{f1c0}', //
"deb" => '\u{e77d}', //
"diff" => '\u{f440}', //
"djvu" => '\u{f02d}', //
"dll" => '\u{e70f}', //
"doc" => '\u{f1c2}', //
"docx" => '\u{f1c2}', //
"ds_store" => '\u{f179}', //
"DS_store" => '\u{f179}', //
"dump" => '\u{f1c0}', //
"ebook" => '\u{e28b}', //
"ebuild" => '\u{f30d}', //
"editorconfig" => '\u{e615}', //
"ejs" => '\u{e618}', //
"elm" => '\u{e62c}', //
"env" => '\u{f462}', //
"eot" => '\u{f031}', //
"epub" => '\u{e28a}', //
"erb" => '\u{e73b}', //
"erl" => '\u{e7b1}', //
"ex" => '\u{e62d}', //
"exe" => '\u{f17a}', //
"exs" => '\u{e62d}', //
"fish" => '\u{f489}', //
"flac" => '\u{f001}', //
"flv" => '\u{f03d}', //
"font" => '\u{f031}', //
"fs" => '\u{e7a7}', //
"fsi" => '\u{e7a7}', //
"fsx" => '\u{e7a7}', //
"gdoc" => '\u{f1c2}', //
"gem" => '\u{e21e}', //
"gemfile" => '\u{e21e}', //
"gemspec" => '\u{e21e}', //
"gform" => '\u{f298}', //
"gif" => '\u{f1c5}', //
"git" => '\u{f1d3}', //
"gitattributes" => '\u{f1d3}', //
"gitignore" => '\u{f1d3}', //
"gitmodules" => '\u{f1d3}', //
"go" => '\u{e626}', //
"gradle" => '\u{e256}', //
"groovy" => '\u{e775}', //
"gsheet" => '\u{f1c3}', //
"gslides" => '\u{f1c4}', //
"guardfile" => '\u{e21e}', //
"gz" => '\u{f410}', //
"h" => '\u{f0fd}', //
"hbs" => '\u{e60f}', //
"hpp" => '\u{f0fd}', //
"hs" => '\u{e777}', //
"htm" => '\u{f13b}', //
"html" => '\u{f13b}', //
"hxx" => '\u{f0fd}', //
"ico" => '\u{f1c5}', //
"image" => '\u{f1c5}', //
"img" => '\u{e271}', //
"iml" => '\u{e7b5}', //
"ini" => '\u{f17a}', //
"ipynb" => '\u{e678}', //
"iso" => '\u{e271}', //
"j2c" => '\u{f1c5}', //
"j2k" => '\u{f1c5}', //
"jad" => '\u{e256}', //
"jar" => '\u{e256}', //
"java" => '\u{e256}', //
"jfi" => '\u{f1c5}', //
"jfif" => '\u{f1c5}', //
"jif" => '\u{f1c5}', //
"jl" => '\u{e624}', //
"jmd" => '\u{f48a}', //
"jp2" => '\u{f1c5}', //
"jpe" => '\u{f1c5}', //
"jpeg" => '\u{f1c5}', //
"jpg" => '\u{f1c5}', //
"jpx" => '\u{f1c5}', //
"js" => '\u{e74e}', //
"json" => '\u{e60b}', //
"jsx" => '\u{e7ba}', //
"jxl" => '\u{f1c5}', //
"ksh" => '\u{f489}', //
"latex" => '\u{f034}', //
"less" => '\u{e758}', //
"lhs" => '\u{e777}', //
"license" => '\u{f0219}', //
"localized" => '\u{f179}', //
"lock" => '\u{f023}', //
"log" => '\u{f18d}', //
"lua" => '\u{e620}', //
"lz" => '\u{f410}', //
"lz4" => '\u{f410}', //
"lzh" => '\u{f410}', //
"lzma" => '\u{f410}', //
"lzo" => '\u{f410}', //
"m" => '\u{e61e}', //
"mm" => '\u{e61d}', //
"m4a" => '\u{f001}', //
"markdown" => '\u{f48a}', //
"md" => '\u{f48a}', //
"mjs" => '\u{e74e}', //
"mk" => '\u{f489}', //
"mkd" => '\u{f48a}', //
"mkv" => '\u{f03d}', //
"mobi" => '\u{e28b}', //
"mov" => '\u{f03d}', //
"mp3" => '\u{f001}', //
"mp4" => '\u{f03d}', //
"msi" => '\u{e70f}', //
"mustache" => '\u{e60f}', //
"nix" => '\u{f313}', //
"node" => '\u{f0399}', //
"npmignore" => '\u{e71e}', //
"odp" => '\u{f1c4}', //
"ods" => '\u{f1c3}', //
"odt" => '\u{f1c2}', //
"ogg" => '\u{f001}', //
"ogv" => '\u{f03d}', //
"otf" => '\u{f031}', //
"part" => '\u{f43a}', //
"patch" => '\u{f440}', //
"pdf" => '\u{f1c1}', //
"php" => '\u{e73d}', //
"pl" => '\u{e769}', //
"plx" => '\u{e769}', //
"pm" => '\u{e769}', //
"png" => '\u{f1c5}', //
"pod" => '\u{e769}', //
"ppt" => '\u{f1c4}', //
"pptx" => '\u{f1c4}', //
"procfile" => '\u{e21e}', //
"properties" => '\u{e60b}', //
"ps1" => '\u{f489}', //
"psd" => '\u{e7b8}', //
"pxm" => '\u{f1c5}', //
"py" => '\u{e606}', //
"pyc" => '\u{e606}', //
"r" => '\u{f25d}', //
"rakefile" => '\u{e21e}', //
"rar" => '\u{f410}', //
"razor" => '\u{f1fa}', //
"rb" => '\u{e21e}', //
"rdata" => '\u{f25d}', //
"rdb" => '\u{e76d}', //
"rdoc" => '\u{f48a}', //
"rds" => '\u{f25d}', //
"readme" => '\u{f48a}', //
"rlib" => '\u{e7a8}', //
"rmd" => '\u{f48a}', //
"rpm" => '\u{e7bb}', //
"rs" => '\u{e7a8}', //
"rspec" => '\u{e21e}', //
"rspec_parallel"=> '\u{e21e}', //
"rspec_status" => '\u{e21e}', //
"rss" => '\u{f09e}', //
"rtf" => '\u{f0219}', //
"ru" => '\u{e21e}', //
"rubydoc" => '\u{e73b}', //
"sass" => '\u{e603}', //
"scala" => '\u{e737}', //
"scss" => '\u{e749}', //
"sh" => '\u{f489}', //
"shell" => '\u{f489}', //
"slim" => '\u{e73b}', //
"sln" => '\u{e70c}', //
"so" => '\u{f17c}', //
"sql" => '\u{f1c0}', //
"sqlite3" => '\u{e7c4}', //
"sty" => '\u{f034}', //
"styl" => '\u{e600}', //
"stylus" => '\u{e600}', //
"svg" => '\u{f1c5}', //
"swift" => '\u{e755}', //
"t" => '\u{e769}', //
"tar" => '\u{f410}', //
"taz" => '\u{f410}', //
"tbz" => '\u{f410}', //
"tbz2" => '\u{f410}', //
"tex" => '\u{f034}', //
"tgz" => '\u{f410}', //
"tiff" => '\u{f1c5}', //
"tlz" => '\u{f410}', //
"toml" => '\u{e615}', //
"torrent" => '\u{e275}', //
"ts" => '\u{e628}', //
"tsv" => '\u{f1c3}', //
"tsx" => '\u{e7ba}', //
"ttf" => '\u{f031}', //
"twig" => '\u{e61c}', //
"txt" => '\u{f15c}', //
"txz" => '\u{f410}', //
"tz" => '\u{f410}', //
"tzo" => '\u{f410}', //
"video" => '\u{f03d}', //
"vim" => '\u{e62b}', //
"vue" => '\u{f0844}', //
"war" => '\u{e256}', //
"wav" => '\u{f001}', //
"webm" => '\u{f03d}', //
"webp" => '\u{f1c5}', //
"windows" => '\u{f17a}', //
"woff" => '\u{f031}', //
"woff2" => '\u{f031}', //
"xhtml" => '\u{f13b}', //
"xls" => '\u{f1c3}', //
"xlsx" => '\u{f1c3}', //
"xml" => '\u{f05c0}', //
"xul" => '\u{f05c0}', //
"xz" => '\u{f410}', //
"yaml" => '\u{f481}', //
"yml" => '\u{f481}', //
"zip" => '\u{f410}', //
"zsh" => '\u{f489}', //
"zsh-theme" => '\u{f489}', //
"zshrc" => '\u{f489}', //
"zst" => '\u{f410}', //
_ => '\u{f15b}' //
}
}
else {
'\u{f016}'
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/time.rs | src/output/time.rs | //! Timestamp formatting.
use std::time::{SystemTime, UNIX_EPOCH};
use datetime::{LocalDateTime, TimeZone, DatePiece, TimePiece};
use datetime::fmt::DateFormat;
use lazy_static::lazy_static;
use unicode_width::UnicodeWidthStr;
/// Every timestamp in exa needs to be rendered by a **time format**.
/// Formatting times is tricky, because how a timestamp is rendered can
/// depend on one or more of the following:
///
/// - The user’s locale, for printing the month name as “Feb”, or as “fév”,
/// or as “2月”;
/// - The current year, because certain formats will be less precise when
/// dealing with dates far in the past;
/// - The formatting style that the user asked for on the command-line.
///
/// Because not all formatting styles need the same data, they all have their
/// own enum variants. It’s not worth looking the locale up if the formatter
/// prints month names as numbers.
///
/// Currently exa does not support *custom* styles, where the user enters a
/// format string in an environment variable or something. Just these four.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum TimeFormat {
/// The **default format** uses the user’s locale to print month names,
/// and specifies the timestamp down to the minute for recent times, and
/// day for older times.
DefaultFormat,
/// Use the **ISO format**, which specifies the timestamp down to the
/// minute for recent times, and day for older times. It uses a number
/// for the month so it doesn’t use the locale.
ISOFormat,
/// Use the **long ISO format**, which specifies the timestamp down to the
/// minute using only numbers, without needing the locale or year.
LongISO,
/// Use the **full ISO format**, which specifies the timestamp down to the
/// millisecond and includes its offset down to the minute. This too uses
/// only numbers so doesn’t require any special consideration.
FullISO,
}
// There are two different formatting functions because local and zoned
// timestamps are separate types.
impl TimeFormat {
pub fn format_local(self, time: SystemTime) -> String {
match self {
Self::DefaultFormat => default_local(time),
Self::ISOFormat => iso_local(time),
Self::LongISO => long_local(time),
Self::FullISO => full_local(time),
}
}
pub fn format_zoned(self, time: SystemTime, zone: &TimeZone) -> String {
match self {
Self::DefaultFormat => default_zoned(time, zone),
Self::ISOFormat => iso_zoned(time, zone),
Self::LongISO => long_zoned(time, zone),
Self::FullISO => full_zoned(time, zone),
}
}
}
#[allow(trivial_numeric_casts)]
fn default_local(time: SystemTime) -> String {
let date = LocalDateTime::at(systemtime_epoch(time));
let date_format = get_dateformat(&date);
date_format.format(&date, &*LOCALE)
}
#[allow(trivial_numeric_casts)]
fn default_zoned(time: SystemTime, zone: &TimeZone) -> String {
let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time)));
let date_format = get_dateformat(&date);
date_format.format(&date, &*LOCALE)
}
fn get_dateformat(date: &LocalDateTime) -> &'static DateFormat<'static> {
match (is_recent(date), *MAXIMUM_MONTH_WIDTH) {
(true, 4) => &FOUR_WIDE_DATE_TIME,
(true, 5) => &FIVE_WIDE_DATE_TIME,
(true, _) => &OTHER_WIDE_DATE_TIME,
(false, 4) => &FOUR_WIDE_DATE_YEAR,
(false, 5) => &FIVE_WIDE_DATE_YEAR,
(false, _) => &OTHER_WIDE_DATE_YEAR,
}
}
#[allow(trivial_numeric_casts)]
fn long_local(time: SystemTime) -> String {
let date = LocalDateTime::at(systemtime_epoch(time));
format!("{:04}-{:02}-{:02} {:02}:{:02}",
date.year(), date.month() as usize, date.day(),
date.hour(), date.minute())
}
#[allow(trivial_numeric_casts)]
fn long_zoned(time: SystemTime, zone: &TimeZone) -> String {
let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time)));
format!("{:04}-{:02}-{:02} {:02}:{:02}",
date.year(), date.month() as usize, date.day(),
date.hour(), date.minute())
}
#[allow(trivial_numeric_casts)]
fn full_local(time: SystemTime) -> String {
let date = LocalDateTime::at(systemtime_epoch(time));
format!("{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:09}",
date.year(), date.month() as usize, date.day(),
date.hour(), date.minute(), date.second(), systemtime_nanos(time))
}
#[allow(trivial_numeric_casts)]
fn full_zoned(time: SystemTime, zone: &TimeZone) -> String {
use datetime::Offset;
let local = LocalDateTime::at(systemtime_epoch(time));
let date = zone.to_zoned(local);
let offset = Offset::of_seconds(zone.offset(local) as i32).expect("Offset out of range");
format!("{:04}-{:02}-{:02} {:02}:{:02}:{:02}.{:09} {:+03}{:02}",
date.year(), date.month() as usize, date.day(),
date.hour(), date.minute(), date.second(), systemtime_nanos(time),
offset.hours(), offset.minutes().abs())
}
#[allow(trivial_numeric_casts)]
fn iso_local(time: SystemTime) -> String {
let date = LocalDateTime::at(systemtime_epoch(time));
if is_recent(&date) {
format!("{:02}-{:02} {:02}:{:02}",
date.month() as usize, date.day(),
date.hour(), date.minute())
}
else {
format!("{:04}-{:02}-{:02}",
date.year(), date.month() as usize, date.day())
}
}
#[allow(trivial_numeric_casts)]
fn iso_zoned(time: SystemTime, zone: &TimeZone) -> String {
let date = zone.to_zoned(LocalDateTime::at(systemtime_epoch(time)));
if is_recent(&date) {
format!("{:02}-{:02} {:02}:{:02}",
date.month() as usize, date.day(),
date.hour(), date.minute())
}
else {
format!("{:04}-{:02}-{:02}",
date.year(), date.month() as usize, date.day())
}
}
fn systemtime_epoch(time: SystemTime) -> i64 {
time.duration_since(UNIX_EPOCH)
.map(|t| t.as_secs() as i64)
.unwrap_or_else(|e| {
let diff = e.duration();
let mut secs = diff.as_secs();
if diff.subsec_nanos() > 0 {
secs += 1;
}
-(secs as i64)
})
}
fn systemtime_nanos(time: SystemTime) -> u32 {
time.duration_since(UNIX_EPOCH)
.map(|t| t.subsec_nanos())
.unwrap_or_else(|e| {
let nanos = e.duration().subsec_nanos();
if nanos > 0 {
1_000_000_000 - nanos
} else {
nanos
}
})
}
fn is_recent(date: &LocalDateTime) -> bool {
date.year() == *CURRENT_YEAR
}
lazy_static! {
static ref CURRENT_YEAR: i64 = LocalDateTime::now().year();
static ref LOCALE: locale::Time = {
locale::Time::load_user_locale()
.unwrap_or_else(|_| locale::Time::english())
};
static ref MAXIMUM_MONTH_WIDTH: usize = {
// Some locales use a three-character wide month name (Jan to Dec);
// others vary between three to four (1月 to 12月, juil.). We check each month width
// to detect the longest and set the output format accordingly.
let mut maximum_month_width = 0;
for i in 0..11 {
let current_month_width = UnicodeWidthStr::width(&*LOCALE.short_month_name(i));
maximum_month_width = std::cmp::max(maximum_month_width, current_month_width);
}
maximum_month_width
};
static ref FOUR_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse(
"{2>:D} {4<:M} {02>:h}:{02>:m}"
).unwrap();
static ref FIVE_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse(
"{2>:D} {5<:M} {02>:h}:{02>:m}"
).unwrap();
static ref OTHER_WIDE_DATE_TIME: DateFormat<'static> = DateFormat::parse(
"{2>:D} {:M} {02>:h}:{02>:m}"
).unwrap();
static ref FOUR_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse(
"{2>:D} {4<:M} {5>:Y}"
).unwrap();
static ref FIVE_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse(
"{2>:D} {5<:M} {5>:Y}"
).unwrap();
static ref OTHER_WIDE_DATE_YEAR: DateFormat<'static> = DateFormat::parse(
"{2>:D} {:M} {5>:Y}"
).unwrap();
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/table.rs | src/output/table.rs | use std::cmp::max;
use std::env;
use std::ops::Deref;
#[cfg(unix)]
use std::sync::{Mutex, MutexGuard};
use datetime::TimeZone;
use zoneinfo_compiled::{CompiledData, Result as TZResult};
use lazy_static::lazy_static;
use log::*;
#[cfg(unix)]
use users::UsersCache;
use crate::fs::{File, fields as f};
use crate::fs::feature::git::GitCache;
use crate::output::cell::TextCell;
use crate::output::render::TimeRender;
use crate::output::time::TimeFormat;
use crate::theme::Theme;
/// Options for displaying a table.
#[derive(PartialEq, Eq, Debug)]
pub struct Options {
pub size_format: SizeFormat,
pub time_format: TimeFormat,
pub user_format: UserFormat,
pub columns: Columns,
}
/// Extra columns to display in the table.
#[allow(clippy::struct_excessive_bools)]
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct Columns {
/// At least one of these timestamps will be shown.
pub time_types: TimeTypes,
// The rest are just on/off
pub inode: bool,
pub links: bool,
pub blocks: bool,
pub group: bool,
pub git: bool,
pub octal: bool,
// Defaults to true:
pub permissions: bool,
pub filesize: bool,
pub user: bool,
}
impl Columns {
pub fn collect(&self, actually_enable_git: bool) -> Vec<Column> {
let mut columns = Vec::with_capacity(4);
if self.inode {
#[cfg(unix)]
columns.push(Column::Inode);
}
if self.octal {
#[cfg(unix)]
columns.push(Column::Octal);
}
if self.permissions {
columns.push(Column::Permissions);
}
if self.links {
#[cfg(unix)]
columns.push(Column::HardLinks);
}
if self.filesize {
columns.push(Column::FileSize);
}
if self.blocks {
#[cfg(unix)]
columns.push(Column::Blocks);
}
if self.user {
#[cfg(unix)]
columns.push(Column::User);
}
if self.group {
#[cfg(unix)]
columns.push(Column::Group);
}
if self.time_types.modified {
columns.push(Column::Timestamp(TimeType::Modified));
}
if self.time_types.changed {
columns.push(Column::Timestamp(TimeType::Changed));
}
if self.time_types.created {
columns.push(Column::Timestamp(TimeType::Created));
}
if self.time_types.accessed {
columns.push(Column::Timestamp(TimeType::Accessed));
}
if self.git && actually_enable_git {
columns.push(Column::GitStatus);
}
columns
}
}
/// A table contains these.
#[derive(Debug, Copy, Clone)]
pub enum Column {
Permissions,
FileSize,
Timestamp(TimeType),
#[cfg(unix)]
Blocks,
#[cfg(unix)]
User,
#[cfg(unix)]
Group,
#[cfg(unix)]
HardLinks,
#[cfg(unix)]
Inode,
GitStatus,
#[cfg(unix)]
Octal,
}
/// Each column can pick its own **Alignment**. Usually, numbers are
/// right-aligned, and text is left-aligned.
#[derive(Copy, Clone)]
pub enum Alignment {
Left,
Right,
}
impl Column {
/// Get the alignment this column should use.
#[cfg(unix)]
pub fn alignment(self) -> Alignment {
match self {
Self::FileSize |
Self::HardLinks |
Self::Inode |
Self::Blocks |
Self::GitStatus => Alignment::Right,
_ => Alignment::Left,
}
}
#[cfg(windows)]
pub fn alignment(&self) -> Alignment {
match self {
Self::FileSize |
Self::GitStatus => Alignment::Right,
_ => Alignment::Left,
}
}
/// Get the text that should be printed at the top, when the user elects
/// to have a header row printed.
pub fn header(self) -> &'static str {
match self {
#[cfg(unix)]
Self::Permissions => "Permissions",
#[cfg(windows)]
Self::Permissions => "Mode",
Self::FileSize => "Size",
Self::Timestamp(t) => t.header(),
#[cfg(unix)]
Self::Blocks => "Blocks",
#[cfg(unix)]
Self::User => "User",
#[cfg(unix)]
Self::Group => "Group",
#[cfg(unix)]
Self::HardLinks => "Links",
#[cfg(unix)]
Self::Inode => "inode",
Self::GitStatus => "Git",
#[cfg(unix)]
Self::Octal => "Octal",
}
}
}
/// Formatting options for file sizes.
#[allow(clippy::enum_variant_names)]
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum SizeFormat {
/// Format the file size using **decimal** prefixes, such as “kilo”,
/// “mega”, or “giga”.
DecimalBytes,
/// Format the file size using **binary** prefixes, such as “kibi”,
/// “mebi”, or “gibi”.
BinaryBytes,
/// Do no formatting and just display the size as a number of bytes.
JustBytes,
}
/// Formatting options for user and group.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum UserFormat {
/// The UID / GID
Numeric,
/// Show the name
Name,
}
impl Default for SizeFormat {
fn default() -> Self {
Self::DecimalBytes
}
}
/// The types of a file’s time fields. These three fields are standard
/// across most (all?) operating systems.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum TimeType {
/// The file’s modified time (`st_mtime`).
Modified,
/// The file’s changed time (`st_ctime`)
Changed,
/// The file’s accessed time (`st_atime`).
Accessed,
/// The file’s creation time (`btime` or `birthtime`).
Created,
}
impl TimeType {
/// Returns the text to use for a column’s heading in the columns output.
pub fn header(self) -> &'static str {
match self {
Self::Modified => "Date Modified",
Self::Changed => "Date Changed",
Self::Accessed => "Date Accessed",
Self::Created => "Date Created",
}
}
}
/// Fields for which of a file’s time fields should be displayed in the
/// columns output.
///
/// There should always be at least one of these — there’s no way to disable
/// the time columns entirely (yet).
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
#[allow(clippy::struct_excessive_bools)]
pub struct TimeTypes {
pub modified: bool,
pub changed: bool,
pub accessed: bool,
pub created: bool,
}
impl Default for TimeTypes {
/// By default, display just the ‘modified’ time. This is the most
/// common option, which is why it has this shorthand.
fn default() -> Self {
Self {
modified: true,
changed: false,
accessed: false,
created: false,
}
}
}
/// The **environment** struct contains any data that could change between
/// running instances of exa, depending on the user’s computer’s configuration.
///
/// Any environment field should be able to be mocked up for test runs.
pub struct Environment {
/// Localisation rules for formatting numbers.
numeric: locale::Numeric,
/// The computer’s current time zone. This gets used to determine how to
/// offset files’ timestamps.
tz: Option<TimeZone>,
/// Mapping cache of user IDs to usernames.
#[cfg(unix)]
users: Mutex<UsersCache>,
}
impl Environment {
#[cfg(unix)]
pub fn lock_users(&self) -> MutexGuard<'_, UsersCache> {
self.users.lock().unwrap()
}
fn load_all() -> Self {
let tz = match determine_time_zone() {
Ok(t) => {
Some(t)
}
Err(ref e) => {
println!("Unable to determine time zone: {}", e);
None
}
};
let numeric = locale::Numeric::load_user_locale()
.unwrap_or_else(|_| locale::Numeric::english());
#[cfg(unix)]
let users = Mutex::new(UsersCache::new());
Self { numeric, tz, #[cfg(unix)] users }
}
}
#[cfg(unix)]
fn determine_time_zone() -> TZResult<TimeZone> {
if let Ok(file) = env::var("TZ") {
TimeZone::from_file({
if file.starts_with('/') {
file
} else {
format!("/usr/share/zoneinfo/{}", {
if file.starts_with(':') {
file.replacen(':', "", 1)
} else {
file
}
})
}
})
} else {
TimeZone::from_file("/etc/localtime")
}
}
#[cfg(windows)]
fn determine_time_zone() -> TZResult<TimeZone> {
use datetime::zone::{FixedTimespan, FixedTimespanSet, StaticTimeZone, TimeZoneSource};
use std::borrow::Cow;
Ok(TimeZone(TimeZoneSource::Static(&StaticTimeZone {
name: "Unsupported",
fixed_timespans: FixedTimespanSet {
first: FixedTimespan {
offset: 0,
is_dst: false,
name: Cow::Borrowed("ZONE_A"),
},
rest: &[(
1206838800,
FixedTimespan {
offset: 3600,
is_dst: false,
name: Cow::Borrowed("ZONE_B"),
},
)],
},
})))
}
lazy_static! {
static ref ENVIRONMENT: Environment = Environment::load_all();
}
pub struct Table<'a> {
columns: Vec<Column>,
theme: &'a Theme,
env: &'a Environment,
widths: TableWidths,
time_format: TimeFormat,
size_format: SizeFormat,
user_format: UserFormat,
git: Option<&'a GitCache>,
}
#[derive(Clone)]
pub struct Row {
cells: Vec<TextCell>,
}
impl<'a, 'f> Table<'a> {
pub fn new(options: &'a Options, git: Option<&'a GitCache>, theme: &'a Theme) -> Table<'a> {
let columns = options.columns.collect(git.is_some());
let widths = TableWidths::zero(columns.len());
let env = &*ENVIRONMENT;
Table {
theme,
widths,
columns,
git,
env,
time_format: options.time_format,
size_format: options.size_format,
user_format: options.user_format,
}
}
pub fn widths(&self) -> &TableWidths {
&self.widths
}
pub fn header_row(&self) -> Row {
let cells = self.columns.iter()
.map(|c| TextCell::paint_str(self.theme.ui.header, c.header()))
.collect();
Row { cells }
}
pub fn row_for_file(&self, file: &File<'_>, xattrs: bool) -> Row {
let cells = self.columns.iter()
.map(|c| self.display(file, *c, xattrs))
.collect();
Row { cells }
}
pub fn add_widths(&mut self, row: &Row) {
self.widths.add_widths(row)
}
fn permissions_plus(&self, file: &File<'_>, xattrs: bool) -> f::PermissionsPlus {
f::PermissionsPlus {
file_type: file.type_char(),
#[cfg(unix)]
permissions: file.permissions(),
#[cfg(windows)]
attributes: file.attributes(),
xattrs,
}
}
#[cfg(unix)]
fn octal_permissions(&self, file: &File<'_>) -> f::OctalPermissions {
f::OctalPermissions {
permissions: file.permissions(),
}
}
fn display(&self, file: &File<'_>, column: Column, xattrs: bool) -> TextCell {
match column {
Column::Permissions => {
self.permissions_plus(file, xattrs).render(self.theme)
}
Column::FileSize => {
file.size().render(self.theme, self.size_format, &self.env.numeric)
}
#[cfg(unix)]
Column::HardLinks => {
file.links().render(self.theme, &self.env.numeric)
}
#[cfg(unix)]
Column::Inode => {
file.inode().render(self.theme.ui.inode)
}
#[cfg(unix)]
Column::Blocks => {
file.blocks().render(self.theme)
}
#[cfg(unix)]
Column::User => {
file.user().render(self.theme, &*self.env.lock_users(), self.user_format)
}
#[cfg(unix)]
Column::Group => {
file.group().render(self.theme, &*self.env.lock_users(), self.user_format)
}
Column::GitStatus => {
self.git_status(file).render(self.theme)
}
#[cfg(unix)]
Column::Octal => {
self.octal_permissions(file).render(self.theme.ui.octal)
}
Column::Timestamp(TimeType::Modified) => {
file.modified_time().render(self.theme.ui.date, &self.env.tz, self.time_format)
}
Column::Timestamp(TimeType::Changed) => {
file.changed_time().render(self.theme.ui.date, &self.env.tz, self.time_format)
}
Column::Timestamp(TimeType::Created) => {
file.created_time().render(self.theme.ui.date, &self.env.tz, self.time_format)
}
Column::Timestamp(TimeType::Accessed) => {
file.accessed_time().render(self.theme.ui.date, &self.env.tz, self.time_format)
}
}
}
fn git_status(&self, file: &File<'_>) -> f::Git {
debug!("Getting Git status for file {:?}", file.path);
self.git
.map(|g| g.get(&file.path, file.is_directory()))
.unwrap_or_default()
}
pub fn render(&self, row: Row) -> TextCell {
let mut cell = TextCell::default();
let iter = row.cells.into_iter()
.zip(self.widths.iter())
.enumerate();
for (n, (this_cell, width)) in iter {
let padding = width - *this_cell.width;
match self.columns[n].alignment() {
Alignment::Left => {
cell.append(this_cell);
cell.add_spaces(padding);
}
Alignment::Right => {
cell.add_spaces(padding);
cell.append(this_cell);
}
}
cell.add_spaces(1);
}
cell
}
}
pub struct TableWidths(Vec<usize>);
impl Deref for TableWidths {
type Target = [usize];
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl TableWidths {
pub fn zero(count: usize) -> Self {
Self(vec![0; count])
}
pub fn add_widths(&mut self, row: &Row) {
for (old_width, cell) in self.0.iter_mut().zip(row.cells.iter()) {
*old_width = max(*old_width, *cell.width);
}
}
pub fn total(&self) -> usize {
self.0.len() + self.0.iter().sum::<usize>()
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/tree.rs | src/output/tree.rs | //! Tree structures, such as `├──` or `└──`, used in a tree view.
//!
//! ## Constructing Tree Views
//!
//! When using the `--tree` argument, instead of a vector of cells, each row
//! has a `depth` field that indicates how far deep in the tree it is: the top
//! level has depth 0, its children have depth 1, and *their* children have
//! depth 2, and so on.
//!
//! On top of this, it also has a `last` field that specifies whether this is
//! the last row of this particular consecutive set of rows. This doesn’t
//! affect the file’s information; it’s just used to display a different set of
//! Unicode tree characters! The resulting table looks like this:
//!
//! ```text
//! ┌───────┬───────┬───────────────────────┐
//! │ Depth │ Last │ Output │
//! ├───────┼───────┼───────────────────────┤
//! │ 0 │ │ documents │
//! │ 1 │ false │ ├── this_file.txt │
//! │ 1 │ false │ ├── that_file.txt │
//! │ 1 │ false │ ├── features │
//! │ 2 │ false │ │ ├── feature_1.rs │
//! │ 2 │ false │ │ ├── feature_2.rs │
//! │ 2 │ true │ │ └── feature_3.rs │
//! │ 1 │ true │ └── pictures │
//! │ 2 │ false │ ├── garden.jpg │
//! │ 2 │ false │ ├── flowers.jpg │
//! │ 2 │ false │ ├── library.png │
//! │ 2 │ true │ └── space.tiff │
//! └───────┴───────┴───────────────────────┘
//! ```
//!
//! Creating the table like this means that each file has to be tested to see
//! if it’s the last one in the group. This is usually done by putting all the
//! files in a vector beforehand, getting its length, then comparing the index
//! of each file to see if it’s the last one. (As some files may not be
//! successfully `stat`ted, we don’t know how many files are going to exist in
//! each directory)
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum TreePart {
/// Rightmost column, *not* the last in the directory.
Edge,
/// Not the rightmost column, and the directory has not finished yet.
Line,
/// Rightmost column, and the last in the directory.
Corner,
/// Not the rightmost column, and the directory *has* finished.
Blank,
}
impl TreePart {
/// Turn this tree part into ASCII-licious box drawing characters!
/// (Warning: not actually ASCII)
pub fn ascii_art(self) -> &'static str {
match self {
Self::Edge => "├──",
Self::Line => "│ ",
Self::Corner => "└──",
Self::Blank => " ",
}
}
}
/// A **tree trunk** builds up arrays of tree parts over multiple depths.
#[derive(Debug, Default)]
pub struct TreeTrunk {
/// A stack tracks which tree characters should be printed. It’s
/// necessary to maintain information about the previously-printed
/// lines, as the output will change based on any previous entries.
stack: Vec<TreePart>,
/// A tuple for the last ‘depth’ and ‘last’ parameters that are passed in.
last_params: Option<TreeParams>,
}
#[derive(Debug, Copy, Clone)]
pub struct TreeParams {
/// How many directories deep into the tree structure this is. Directories
/// on top have depth 0.
depth: TreeDepth,
/// Whether this is the last entry in the directory.
last: bool,
}
#[derive(Debug, Copy, Clone)]
pub struct TreeDepth(pub usize);
impl TreeTrunk {
/// Calculates the tree parts for an entry at the given depth and
/// last-ness. The depth is used to determine where in the stack the tree
/// part should be inserted, and the last-ness is used to determine which
/// type of tree part to insert.
///
/// This takes a `&mut self` because the results of each file are stored
/// and used in future rows.
pub fn new_row(&mut self, params: TreeParams) -> &[TreePart] {
// If this isn’t our first iteration, then update the tree parts thus
// far to account for there being another row after it.
if let Some(last) = self.last_params {
self.stack[last.depth.0] = if last.last { TreePart::Blank }
else { TreePart::Line };
}
// Make sure the stack has enough space, then add or modify another
// part into it.
self.stack.resize(params.depth.0 + 1, TreePart::Edge);
self.stack[params.depth.0] = if params.last { TreePart::Corner }
else { TreePart::Edge };
self.last_params = Some(params);
// Return the tree parts as a slice of the stack.
//
// Ignore the first element here to prevent a ‘zeroth level’ from
// appearing before the very first directory. This level would
// join unrelated directories without connecting to anything:
//
// with [0..] with [1..]
// ========== ==========
// ├── folder folder
// │ └── file └── file
// └── folder folder
// └── file └──file
//
&self.stack[1..]
}
}
impl TreeParams {
pub fn new(depth: TreeDepth, last: bool) -> Self {
Self { depth, last }
}
pub fn is_at_root(&self) -> bool {
self.depth.0 == 0
}
}
impl TreeDepth {
pub fn root() -> Self {
Self(0)
}
pub fn deeper(self) -> Self {
Self(self.0 + 1)
}
/// Creates an iterator that, as well as yielding each value, yields a
/// `TreeParams` with the current depth and last flag filled in.
pub fn iterate_over<I, T>(self, inner: I) -> Iter<I>
where I: ExactSizeIterator + Iterator<Item = T>
{
Iter { current_depth: self, inner }
}
}
pub struct Iter<I> {
current_depth: TreeDepth,
inner: I,
}
impl<I, T> Iterator for Iter<I>
where I: ExactSizeIterator + Iterator<Item = T>
{
type Item = (TreeParams, T);
fn next(&mut self) -> Option<Self::Item> {
let t = self.inner.next()?;
// TODO: use exact_size_is_empty API soon
let params = TreeParams::new(self.current_depth, self.inner.len() == 0);
Some((params, t))
}
}
#[cfg(test)]
mod trunk_test {
use super::*;
fn params(depth: usize, last: bool) -> TreeParams {
TreeParams::new(TreeDepth(depth), last)
}
#[test]
fn empty_at_first() {
let mut tt = TreeTrunk::default();
assert_eq!(tt.new_row(params(0, true)), &[ ]);
}
#[test]
fn one_child() {
let mut tt = TreeTrunk::default();
assert_eq!(tt.new_row(params(0, true)), &[ ]);
assert_eq!(tt.new_row(params(1, true)), &[ TreePart::Corner ]);
}
#[test]
fn two_children() {
let mut tt = TreeTrunk::default();
assert_eq!(tt.new_row(params(0, true)), &[ ]);
assert_eq!(tt.new_row(params(1, false)), &[ TreePart::Edge ]);
assert_eq!(tt.new_row(params(1, true)), &[ TreePart::Corner ]);
}
#[test]
fn two_times_two_children() {
let mut tt = TreeTrunk::default();
assert_eq!(tt.new_row(params(0, false)), &[ ]);
assert_eq!(tt.new_row(params(1, false)), &[ TreePart::Edge ]);
assert_eq!(tt.new_row(params(1, true)), &[ TreePart::Corner ]);
assert_eq!(tt.new_row(params(0, true)), &[ ]);
assert_eq!(tt.new_row(params(1, false)), &[ TreePart::Edge ]);
assert_eq!(tt.new_row(params(1, true)), &[ TreePart::Corner ]);
}
#[test]
fn two_times_two_nested_children() {
let mut tt = TreeTrunk::default();
assert_eq!(tt.new_row(params(0, true)), &[ ]);
assert_eq!(tt.new_row(params(1, false)), &[ TreePart::Edge ]);
assert_eq!(tt.new_row(params(2, false)), &[ TreePart::Line, TreePart::Edge ]);
assert_eq!(tt.new_row(params(2, true)), &[ TreePart::Line, TreePart::Corner ]);
assert_eq!(tt.new_row(params(1, true)), &[ TreePart::Corner ]);
assert_eq!(tt.new_row(params(2, false)), &[ TreePart::Blank, TreePart::Edge ]);
assert_eq!(tt.new_row(params(2, true)), &[ TreePart::Blank, TreePart::Corner ]);
}
}
#[cfg(test)]
mod iter_test {
use super::*;
#[test]
fn test_iteration() {
let foos = &[ "first", "middle", "last" ];
let mut iter = TreeDepth::root().iterate_over(foos.iter());
let next = iter.next().unwrap();
assert_eq!(&"first", next.1);
assert!(!next.0.last);
let next = iter.next().unwrap();
assert_eq!(&"middle", next.1);
assert!(!next.0.last);
let next = iter.next().unwrap();
assert_eq!(&"last", next.1);
assert!(next.0.last);
assert!(iter.next().is_none());
}
#[test]
fn test_empty() {
let nothing: &[usize] = &[];
let mut iter = TreeDepth::root().iterate_over(nothing.iter());
assert!(iter.next().is_none());
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/grid.rs | src/output/grid.rs | use std::io::{self, Write};
use term_grid as tg;
use crate::fs::File;
use crate::fs::filter::FileFilter;
use crate::output::file_name::Options as FileStyle;
use crate::theme::Theme;
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub struct Options {
pub across: bool,
}
impl Options {
pub fn direction(self) -> tg::Direction {
if self.across { tg::Direction::LeftToRight }
else { tg::Direction::TopToBottom }
}
}
pub struct Render<'a> {
pub files: Vec<File<'a>>,
pub theme: &'a Theme,
pub file_style: &'a FileStyle,
pub opts: &'a Options,
pub console_width: usize,
pub filter: &'a FileFilter,
}
impl<'a> Render<'a> {
pub fn render<W: Write>(mut self, w: &mut W) -> io::Result<()> {
let mut grid = tg::Grid::new(tg::GridOptions {
direction: self.opts.direction(),
filling: tg::Filling::Spaces(2),
});
grid.reserve(self.files.len());
self.filter.sort_files(&mut self.files);
for file in &self.files {
let filename = self.file_style.for_file(file, self.theme).paint();
grid.add(tg::Cell {
contents: filename.strings().to_string(),
width: *filename.width(),
alignment: tg::Alignment::Left,
});
}
if let Some(display) = grid.fit_into_width(self.console_width) {
write!(w, "{}", display)
}
else {
// File names too long for a grid - drop down to just listing them!
// This isn’t *quite* the same as the lines view, which also
// displays full link paths.
for file in &self.files {
let name_cell = self.file_style.for_file(file, self.theme).paint();
writeln!(w, "{}", name_cell.strings())?;
}
Ok(())
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/mod.rs | src/output/mod.rs | pub use self::cell::{TextCell, TextCellContents, DisplayWidth};
pub use self::escape::escape;
pub mod details;
pub mod file_name;
pub mod grid;
pub mod grid_details;
pub mod icons;
pub mod lines;
pub mod render;
pub mod table;
pub mod time;
mod cell;
mod escape;
mod tree;
/// The **view** contains all information about how to format output.
#[derive(Debug)]
pub struct View {
pub mode: Mode,
pub width: TerminalWidth,
pub file_style: file_name::Options,
}
/// The **mode** is the “type” of output.
#[derive(PartialEq, Eq, Debug)]
#[allow(clippy::large_enum_variant)]
pub enum Mode {
Grid(grid::Options),
Details(details::Options),
GridDetails(grid_details::Options),
Lines,
}
/// The width of the terminal requested by the user.
#[derive(PartialEq, Eq, Debug, Copy, Clone)]
pub enum TerminalWidth {
/// The user requested this specific number of columns.
Set(usize),
/// Look up the terminal size at runtime.
Automatic,
}
impl TerminalWidth {
pub fn actual_terminal_width(self) -> Option<usize> {
// All of stdin, stdout, and stderr could not be connected to a
// terminal, but we’re only interested in stdout because it’s
// where the output goes.
match self {
Self::Set(width) => Some(width),
Self::Automatic => terminal_size::terminal_size().map(|(w, _)| w.0.into()),
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/lines.rs | src/output/lines.rs | use std::io::{self, Write};
use ansi_term::ANSIStrings;
use crate::fs::File;
use crate::fs::filter::FileFilter;
use crate::output::cell::TextCellContents;
use crate::output::file_name::{Options as FileStyle};
use crate::theme::Theme;
/// The lines view literally just displays each file, line-by-line.
pub struct Render<'a> {
pub files: Vec<File<'a>>,
pub theme: &'a Theme,
pub file_style: &'a FileStyle,
pub filter: &'a FileFilter,
}
impl<'a> Render<'a> {
pub fn render<W: Write>(mut self, w: &mut W) -> io::Result<()> {
self.filter.sort_files(&mut self.files);
for file in &self.files {
let name_cell = self.render_file(file);
writeln!(w, "{}", ANSIStrings(&name_cell))?;
}
Ok(())
}
fn render_file<'f>(&self, file: &'f File<'a>) -> TextCellContents {
self.file_style
.for_file(file, self.theme)
.with_link_paths()
.paint()
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/details.rs | src/output/details.rs | //! The **Details** output view displays each file as a row in a table.
//!
//! It’s used in the following situations:
//!
//! - Most commonly, when using the `--long` command-line argument to display the
//! details of each file, which requires using a table view to hold all the data;
//! - When using the `--tree` argument, which uses the same table view to display
//! each file on its own line, with the table providing the tree characters;
//! - When using both the `--long` and `--grid` arguments, which constructs a
//! series of tables to fit all the data on the screen.
//!
//! You will probably recognise it from the `ls --long` command. It looks like
//! this:
//!
//! ```text
//! .rw-r--r-- 9.6k ben 29 Jun 16:16 Cargo.lock
//! .rw-r--r-- 547 ben 23 Jun 10:54 Cargo.toml
//! .rw-r--r-- 1.1k ben 23 Nov 2014 LICENCE
//! .rw-r--r-- 2.5k ben 21 May 14:38 README.md
//! .rw-r--r-- 382k ben 8 Jun 21:00 screenshot.png
//! drwxr-xr-x - ben 29 Jun 14:50 src
//! drwxr-xr-x - ben 28 Jun 19:53 target
//! ```
//!
//! The table is constructed by creating a `Table` value, which produces a `Row`
//! value for each file. These rows can contain a vector of `Cell`s, or they can
//! contain depth information for the tree view, or both. These are described
//! below.
//!
//!
//! ## Constructing Detail Views
//!
//! When using the `--long` command-line argument, the details of each file are
//! displayed next to its name.
//!
//! The table holds a vector of all the column types. For each file and column, a
//! `Cell` value containing the ANSI-coloured text and Unicode width of each cell
//! is generated, with the row and column determined by indexing into both arrays.
//!
//! The column types vector does not actually include the filename. This is
//! because the filename is always the rightmost field, and as such, it does not
//! need to have its width queried or be padded with spaces.
//!
//! To illustrate the above:
//!
//! ```text
//! ┌─────────────────────────────────────────────────────────────────────────┐
//! │ columns: [ Permissions, Size, User, Date(Modified) ] │
//! ├─────────────────────────────────────────────────────────────────────────┤
//! │ rows: cells: filename: │
//! │ row 1: [ ".rw-r--r--", "9.6k", "ben", "29 Jun 16:16" ] Cargo.lock │
//! │ row 2: [ ".rw-r--r--", "547", "ben", "23 Jun 10:54" ] Cargo.toml │
//! │ row 3: [ "drwxr-xr-x", "-", "ben", "29 Jun 14:50" ] src │
//! │ row 4: [ "drwxr-xr-x", "-", "ben", "28 Jun 19:53" ] target │
//! └─────────────────────────────────────────────────────────────────────────┘
//! ```
//!
//! Each column in the table needs to be resized to fit its widest argument. This
//! means that we must wait until every row has been added to the table before it
//! can be displayed, in order to make sure that every column is wide enough.
use std::io::{self, Write};
use std::mem::MaybeUninit;
use std::path::PathBuf;
use std::vec::IntoIter as VecIntoIter;
use ansi_term::Style;
use scoped_threadpool::Pool;
use crate::fs::{Dir, File};
use crate::fs::dir_action::RecurseOptions;
use crate::fs::feature::git::GitCache;
use crate::fs::feature::xattr::{Attribute, FileAttributes};
use crate::fs::filter::FileFilter;
use crate::output::cell::TextCell;
use crate::output::file_name::Options as FileStyle;
use crate::output::table::{Table, Options as TableOptions, Row as TableRow};
use crate::output::tree::{TreeTrunk, TreeParams, TreeDepth};
use crate::theme::Theme;
/// With the **Details** view, the output gets formatted into columns, with
/// each `Column` object showing some piece of information about the file,
/// such as its size, or its permissions.
///
/// To do this, the results have to be written to a table, instead of
/// displaying each file immediately. Then, the width of each column can be
/// calculated based on the individual results, and the fields are padded
/// during output.
///
/// Almost all the heavy lifting is done in a Table object, which handles the
/// columns for each row.
#[derive(PartialEq, Eq, Debug)]
pub struct Options {
/// Options specific to drawing a table.
///
/// Directories themselves can pick which columns are *added* to this
/// list, such as the Git column.
pub table: Option<TableOptions>,
/// Whether to show a header line or not.
pub header: bool,
/// Whether to show each file’s extended attributes.
pub xattr: bool,
}
pub struct Render<'a> {
pub dir: Option<&'a Dir>,
pub files: Vec<File<'a>>,
pub theme: &'a Theme,
pub file_style: &'a FileStyle,
pub opts: &'a Options,
/// Whether to recurse through directories with a tree view, and if so,
/// which options to use. This field is only relevant here if the `tree`
/// field of the RecurseOptions is `true`.
pub recurse: Option<RecurseOptions>,
/// How to sort and filter the files after getting their details.
pub filter: &'a FileFilter,
/// Whether we are skipping Git-ignored files.
pub git_ignoring: bool,
pub git: Option<&'a GitCache>,
}
struct Egg<'a> {
table_row: Option<TableRow>,
xattrs: Vec<Attribute>,
errors: Vec<(io::Error, Option<PathBuf>)>,
dir: Option<Dir>,
file: &'a File<'a>,
}
impl<'a> AsRef<File<'a>> for Egg<'a> {
fn as_ref(&self) -> &File<'a> {
self.file
}
}
impl<'a> Render<'a> {
pub fn render<W: Write>(mut self, w: &mut W) -> io::Result<()> {
let n_cpus = match num_cpus::get() as u32 {
0 => 1,
n => n,
};
let mut pool = Pool::new(n_cpus);
let mut rows = Vec::new();
if let Some(ref table) = self.opts.table {
match (self.git, self.dir) {
(Some(g), Some(d)) => if ! g.has_anything_for(&d.path) { self.git = None },
(Some(g), None) => if ! self.files.iter().any(|f| g.has_anything_for(&f.path)) { self.git = None },
(None, _) => {/* Keep Git how it is */},
}
let mut table = Table::new(table, self.git, self.theme);
if self.opts.header {
let header = table.header_row();
table.add_widths(&header);
rows.push(self.render_header(header));
}
// This is weird, but I can’t find a way around it:
// https://internals.rust-lang.org/t/should-option-mut-t-implement-copy/3715/6
let mut table = Some(table);
self.add_files_to_table(&mut pool, &mut table, &mut rows, &self.files, TreeDepth::root());
for row in self.iterate_with_table(table.unwrap(), rows) {
writeln!(w, "{}", row.strings())?
}
}
else {
self.add_files_to_table(&mut pool, &mut None, &mut rows, &self.files, TreeDepth::root());
for row in self.iterate(rows) {
writeln!(w, "{}", row.strings())?
}
}
Ok(())
}
/// Adds files to the table, possibly recursively. This is easily
/// parallelisable, and uses a pool of threads.
fn add_files_to_table<'dir>(&self, pool: &mut Pool, table: &mut Option<Table<'a>>, rows: &mut Vec<Row>, src: &[File<'dir>], depth: TreeDepth) {
use std::sync::{Arc, Mutex};
use log::*;
use crate::fs::feature::xattr;
let mut file_eggs = (0..src.len()).map(|_| MaybeUninit::uninit()).collect::<Vec<_>>();
pool.scoped(|scoped| {
let file_eggs = Arc::new(Mutex::new(&mut file_eggs));
let table = table.as_ref();
for (idx, file) in src.iter().enumerate() {
let file_eggs = Arc::clone(&file_eggs);
scoped.execute(move || {
let mut errors = Vec::new();
let mut xattrs = Vec::new();
// There are three “levels” of extended attribute support:
//
// 1. If we’re compiling without that feature, then
// exa pretends all files have no attributes.
// 2. If the feature is enabled and the --extended flag
// has been specified, then display an @ in the
// permissions column for files with attributes, the
// names of all attributes and their lengths, and any
// errors encountered when getting them.
// 3. If the --extended flag *hasn’t* been specified, then
// display the @, but don’t display anything else.
//
// For a while, exa took a stricter approach to (3):
// if an error occurred while checking a file’s xattrs to
// see if it should display the @, exa would display that
// error even though the attributes weren’t actually being
// shown! This was confusing, as users were being shown
// errors for something they didn’t explicitly ask for,
// and just cluttered up the output. So now errors aren’t
// printed unless the user passes --extended to signify
// that they want to see them.
if xattr::ENABLED {
match file.path.attributes() {
Ok(xs) => {
xattrs.extend(xs);
}
Err(e) => {
if self.opts.xattr {
errors.push((e, None));
}
else {
error!("Error looking up xattr for {:?}: {:#?}", file.path, e);
}
}
}
}
let table_row = table.as_ref()
.map(|t| t.row_for_file(file, ! xattrs.is_empty()));
if ! self.opts.xattr {
xattrs.clear();
}
let mut dir = None;
if let Some(r) = self.recurse {
if file.is_directory() && r.tree && ! r.is_too_deep(depth.0) {
match file.to_dir() {
Ok(d) => {
dir = Some(d);
}
Err(e) => {
errors.push((e, None));
}
}
}
};
let egg = Egg { table_row, xattrs, errors, dir, file };
unsafe { std::ptr::write(file_eggs.lock().unwrap()[idx].as_mut_ptr(), egg) }
});
}
});
// this is safe because all entries have been initialized above
let mut file_eggs = unsafe { std::mem::transmute::<_, Vec<Egg<'_>>>(file_eggs) };
self.filter.sort_files(&mut file_eggs);
for (tree_params, egg) in depth.iterate_over(file_eggs.into_iter()) {
let mut files = Vec::new();
let mut errors = egg.errors;
if let (Some(ref mut t), Some(row)) = (table.as_mut(), egg.table_row.as_ref()) {
t.add_widths(row);
}
let file_name = self.file_style.for_file(egg.file, self.theme)
.with_link_paths()
.paint()
.promote();
let row = Row {
tree: tree_params,
cells: egg.table_row,
name: file_name,
};
rows.push(row);
if let Some(ref dir) = egg.dir {
for file_to_add in dir.files(self.filter.dot_filter, self.git, self.git_ignoring) {
match file_to_add {
Ok(f) => {
files.push(f);
}
Err((path, e)) => {
errors.push((e, Some(path)));
}
}
}
self.filter.filter_child_files(&mut files);
if ! files.is_empty() {
for xattr in egg.xattrs {
rows.push(self.render_xattr(&xattr, TreeParams::new(depth.deeper(), false)));
}
for (error, path) in errors {
rows.push(self.render_error(&error, TreeParams::new(depth.deeper(), false), path));
}
self.add_files_to_table(pool, table, rows, &files, depth.deeper());
continue;
}
}
let count = egg.xattrs.len();
for (index, xattr) in egg.xattrs.into_iter().enumerate() {
let params = TreeParams::new(depth.deeper(), errors.is_empty() && index == count - 1);
let r = self.render_xattr(&xattr, params);
rows.push(r);
}
let count = errors.len();
for (index, (error, path)) in errors.into_iter().enumerate() {
let params = TreeParams::new(depth.deeper(), index == count - 1);
let r = self.render_error(&error, params, path);
rows.push(r);
}
}
}
pub fn render_header(&self, header: TableRow) -> Row {
Row {
tree: TreeParams::new(TreeDepth::root(), false),
cells: Some(header),
name: TextCell::paint_str(self.theme.ui.header, "Name"),
}
}
fn render_error(&self, error: &io::Error, tree: TreeParams, path: Option<PathBuf>) -> Row {
use crate::output::file_name::Colours;
let error_message = if let Some(path) = path {
format!("<{}: {}>", path.display(), error)
} else {
format!("<{}>", error)
};
// TODO: broken_symlink() doesn’t quite seem like the right name for
// the style that’s being used here. Maybe split it in two?
let name = TextCell::paint(self.theme.broken_symlink(), error_message);
Row { cells: None, name, tree }
}
fn render_xattr(&self, xattr: &Attribute, tree: TreeParams) -> Row {
let name = TextCell::paint(self.theme.ui.perms.attribute, format!("{} (len {})", xattr.name, xattr.size));
Row { cells: None, name, tree }
}
pub fn render_file(&self, cells: TableRow, name: TextCell, tree: TreeParams) -> Row {
Row { cells: Some(cells), name, tree }
}
pub fn iterate_with_table(&'a self, table: Table<'a>, rows: Vec<Row>) -> TableIter<'a> {
TableIter {
tree_trunk: TreeTrunk::default(),
total_width: table.widths().total(),
table,
inner: rows.into_iter(),
tree_style: self.theme.ui.punctuation,
}
}
pub fn iterate(&'a self, rows: Vec<Row>) -> Iter {
Iter {
tree_trunk: TreeTrunk::default(),
inner: rows.into_iter(),
tree_style: self.theme.ui.punctuation,
}
}
}
pub struct Row {
/// Vector of cells to display.
///
/// Most of the rows will be used to display files’ metadata, so this will
/// almost always be `Some`, containing a vector of cells. It will only be
/// `None` for a row displaying an attribute or error, neither of which
/// have cells.
pub cells: Option<TableRow>,
/// This file’s name, in coloured output. The name is treated separately
/// from the other cells, as it never requires padding.
pub name: TextCell,
/// Information used to determine which symbols to display in a tree.
pub tree: TreeParams,
}
pub struct TableIter<'a> {
inner: VecIntoIter<Row>,
table: Table<'a>,
total_width: usize,
tree_style: Style,
tree_trunk: TreeTrunk,
}
impl<'a> Iterator for TableIter<'a> {
type Item = TextCell;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|row| {
let mut cell =
if let Some(cells) = row.cells {
self.table.render(cells)
}
else {
let mut cell = TextCell::default();
cell.add_spaces(self.total_width);
cell
};
for tree_part in self.tree_trunk.new_row(row.tree) {
cell.push(self.tree_style.paint(tree_part.ascii_art()), 4);
}
// If any tree characters have been printed, then add an extra
// space, which makes the output look much better.
if ! row.tree.is_at_root() {
cell.add_spaces(1);
}
cell.append(row.name);
cell
})
}
}
pub struct Iter {
tree_trunk: TreeTrunk,
tree_style: Style,
inner: VecIntoIter<Row>,
}
impl Iterator for Iter {
type Item = TextCell;
fn next(&mut self) -> Option<Self::Item> {
self.inner.next().map(|row| {
let mut cell = TextCell::default();
for tree_part in self.tree_trunk.new_row(row.tree) {
cell.push(self.tree_style.paint(tree_part.ascii_art()), 4);
}
// If any tree characters have been printed, then add an extra
// space, which makes the output look much better.
if ! row.tree.is_at_root() {
cell.add_spaces(1);
}
cell.append(row.name);
cell
})
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/escape.rs | src/output/escape.rs | use ansi_term::{ANSIString, Style};
pub fn escape(string: String, bits: &mut Vec<ANSIString<'_>>, good: Style, bad: Style) {
if string.chars().all(|c| c >= 0x20 as char && c != 0x7f as char) {
bits.push(good.paint(string));
return;
}
for c in string.chars() {
// The `escape_default` method on `char` is *almost* what we want here, but
// it still escapes non-ASCII UTF-8 characters, which are still printable.
if c >= 0x20 as char && c != 0x7f as char {
// TODO: This allocates way too much,
// hence the `all` check above.
let mut s = String::new();
s.push(c);
bits.push(good.paint(s));
}
else {
let s = c.escape_default().collect::<String>();
bits.push(bad.paint(s));
}
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/cell.rs | src/output/cell.rs | //! The `TextCell` type for the details and lines views.
use std::iter::Sum;
use std::ops::{Add, Deref, DerefMut};
use ansi_term::{Style, ANSIString, ANSIStrings};
use unicode_width::UnicodeWidthStr;
/// An individual cell that holds text in a table, used in the details and
/// lines views to store ANSI-terminal-formatted data before it is printed.
///
/// A text cell is made up of zero or more strings coupled with the
/// pre-computed length of all the strings combined. When constructing details
/// or grid-details tables, the length will have to be queried multiple times,
/// so it makes sense to cache it.
///
/// (This used to be called `Cell`, but was renamed because there’s a Rust
/// type by that name too.)
#[derive(PartialEq, Debug, Clone, Default)]
pub struct TextCell {
/// The contents of this cell, as a vector of ANSI-styled strings.
pub contents: TextCellContents,
/// The Unicode “display width” of this cell.
pub width: DisplayWidth,
}
impl Deref for TextCell {
type Target = TextCellContents;
fn deref(&self) -> &Self::Target {
&self.contents
}
}
impl TextCell {
/// Creates a new text cell that holds the given text in the given style,
/// computing the Unicode width of the text.
pub fn paint(style: Style, text: String) -> Self {
let width = DisplayWidth::from(&*text);
Self {
contents: vec![ style.paint(text) ].into(),
width,
}
}
/// Creates a new text cell that holds the given text in the given style,
/// computing the Unicode width of the text. (This could be merged with
/// `paint`, but.)
pub fn paint_str(style: Style, text: &'static str) -> Self {
let width = DisplayWidth::from(text);
Self {
contents: vec![ style.paint(text) ].into(),
width,
}
}
/// Creates a new “blank” text cell that contains a single hyphen in the
/// given style, which should be the “punctuation” style from a `Colours`
/// value.
///
/// This is used in place of empty table cells, as it is easier to read
/// tabular data when there is *something* in each cell.
pub fn blank(style: Style) -> Self {
Self {
contents: vec![ style.paint("-") ].into(),
width: DisplayWidth::from(1),
}
}
/// Adds the given number of unstyled spaces after this cell.
///
/// This method allocates a `String` to hold the spaces.
pub fn add_spaces(&mut self, count: usize) {
(*self.width) += count;
let spaces: String = " ".repeat(count);
self.contents.0.push(Style::default().paint(spaces));
}
/// Adds the contents of another `ANSIString` to the end of this cell.
pub fn push(&mut self, string: ANSIString<'static>, extra_width: usize) {
self.contents.0.push(string);
(*self.width) += extra_width;
}
/// Adds all the contents of another `TextCell` to the end of this cell.
pub fn append(&mut self, other: Self) {
(*self.width) += *other.width;
self.contents.0.extend(other.contents.0);
}
}
// I’d like to eventually abstract cells so that instead of *every* cell
// storing a vector, only variable-length cells would, and individual cells
// would just store an array of a fixed length (which would usually be just 1
// or 2), which wouldn’t require a heap allocation.
//
// For examples, look at the `render_*` methods in the `Table` object in the
// details view:
//
// - `render_blocks`, `inode`, and `links` will always return a
// one-string-long TextCell;
// - `render_size` will return one or two strings in a TextCell, depending on
// the size and whether one is present;
// - `render_permissions` will return ten or eleven strings;
// - `filename` and `symlink_filename` in the output module root return six or
// five strings.
//
// In none of these cases are we dealing with a *truly variable* number of
// strings: it is only when the strings are concatenated together do we need a
// growable, heap-allocated buffer.
//
// So it would be nice to abstract the `TextCell` type so instead of a `Vec`,
// it can use anything of type `T: IntoIterator<Item=ANSIString<’static>>`.
// This would allow us to still hold all the data, but allocate less.
//
// But exa still has bugs and I need to fix those first :(
/// The contents of a text cell, as a vector of ANSI-styled strings.
///
/// It’s possible to use this type directly in the case where you want a
/// `TextCell` but aren’t concerned with tracking its width, because it occurs
/// in the final cell of a table or grid and there’s no point padding it. This
/// happens when dealing with file names.
#[derive(PartialEq, Debug, Clone, Default)]
pub struct TextCellContents(Vec<ANSIString<'static>>);
impl From<Vec<ANSIString<'static>>> for TextCellContents {
fn from(strings: Vec<ANSIString<'static>>) -> Self {
Self(strings)
}
}
impl Deref for TextCellContents {
type Target = [ANSIString<'static>];
fn deref(&self) -> &Self::Target {
&*self.0
}
}
// No DerefMut implementation here — it would be publicly accessible, and as
// the contents only get changed in this module, the mutators in the struct
// above can just access the value directly.
impl TextCellContents {
/// Produces an `ANSIStrings` value that can be used to print the styled
/// values of this cell as an ANSI-terminal-formatted string.
pub fn strings(&self) -> ANSIStrings<'_> {
ANSIStrings(&self.0)
}
/// Calculates the width that a cell with these contents would take up, by
/// counting the number of characters in each unformatted ANSI string.
pub fn width(&self) -> DisplayWidth {
self.0.iter()
.map(|anstr| DisplayWidth::from(&**anstr))
.sum()
}
/// Promotes these contents to a full cell containing them alongside
/// their calculated width.
pub fn promote(self) -> TextCell {
TextCell {
width: self.width(),
contents: self,
}
}
}
/// The Unicode “display width” of a string.
///
/// This is related to the number of *graphemes* of a string, rather than the
/// number of *characters*, or *bytes*: although most characters are one
/// column wide, a few can be two columns wide, and this is important to note
/// when calculating widths for displaying tables in a terminal.
///
/// This type is used to ensure that the width, rather than the length, is
/// used when constructing a `TextCell` — it’s too easy to write something
/// like `file_name.len()` and assume it will work!
///
/// It has `From` impls that convert an input string or fixed with to values
/// of this type, and will `Deref` to the contained `usize` value.
#[derive(PartialEq, Eq, Debug, Clone, Copy, Default)]
pub struct DisplayWidth(usize);
impl<'a> From<&'a str> for DisplayWidth {
fn from(input: &'a str) -> Self {
Self(UnicodeWidthStr::width(input))
}
}
impl From<usize> for DisplayWidth {
fn from(width: usize) -> Self {
Self(width)
}
}
impl Deref for DisplayWidth {
type Target = usize;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for DisplayWidth {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl Add for DisplayWidth {
type Output = Self;
fn add(self, rhs: Self) -> Self::Output {
Self(self.0 + rhs.0)
}
}
impl Add<usize> for DisplayWidth {
type Output = Self;
fn add(self, rhs: usize) -> Self::Output {
Self(self.0 + rhs)
}
}
impl Sum for DisplayWidth {
fn sum<I>(iter: I) -> Self
where I: Iterator<Item = Self>
{
iter.fold(Self(0), Add::add)
}
}
#[cfg(test)]
mod width_unit_test {
use super::DisplayWidth;
#[test]
fn empty_string() {
let cell = DisplayWidth::from("");
assert_eq!(*cell, 0);
}
#[test]
fn test_string() {
let cell = DisplayWidth::from("Diss Playwidth");
assert_eq!(*cell, 14);
}
#[test]
fn addition() {
let cell_one = DisplayWidth::from("/usr/bin/");
let cell_two = DisplayWidth::from("drinking");
assert_eq!(*(cell_one + cell_two), 17);
}
#[test]
fn addition_usize() {
let cell = DisplayWidth::from("/usr/bin/");
assert_eq!(*(cell + 8), 17);
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/permissions.rs | src/output/render/permissions.rs | use ansi_term::{ANSIString, Style};
use crate::fs::fields as f;
use crate::output::cell::{TextCell, DisplayWidth};
use crate::output::render::FiletypeColours;
impl f::PermissionsPlus {
#[cfg(unix)]
pub fn render<C: Colours+FiletypeColours>(&self, colours: &C) -> TextCell {
let mut chars = vec![ self.file_type.render(colours) ];
chars.extend(self.permissions.render(colours, self.file_type.is_regular_file()));
if self.xattrs {
chars.push(colours.attribute().paint("@"));
}
// As these are all ASCII characters, we can guarantee that they’re
// all going to be one character wide, and don’t need to compute the
// cell’s display width.
TextCell {
width: DisplayWidth::from(chars.len()),
contents: chars.into(),
}
}
#[cfg(windows)]
pub fn render<C: Colours+FiletypeColours>(&self, colours: &C) -> TextCell {
let mut chars = vec![ self.attributes.render_type(colours) ];
chars.extend(self.attributes.render(colours));
TextCell {
width: DisplayWidth::from(chars.len()),
contents: chars.into(),
}
}
}
impl f::Permissions {
pub fn render<C: Colours>(&self, colours: &C, is_regular_file: bool) -> Vec<ANSIString<'static>> {
let bit = |bit, chr: &'static str, style: Style| {
if bit { style.paint(chr) }
else { colours.dash().paint("-") }
};
vec![
bit(self.user_read, "r", colours.user_read()),
bit(self.user_write, "w", colours.user_write()),
self.user_execute_bit(colours, is_regular_file),
bit(self.group_read, "r", colours.group_read()),
bit(self.group_write, "w", colours.group_write()),
self.group_execute_bit(colours),
bit(self.other_read, "r", colours.other_read()),
bit(self.other_write, "w", colours.other_write()),
self.other_execute_bit(colours)
]
}
fn user_execute_bit<C: Colours>(&self, colours: &C, is_regular_file: bool) -> ANSIString<'static> {
match (self.user_execute, self.setuid, is_regular_file) {
(false, false, _) => colours.dash().paint("-"),
(true, false, false) => colours.user_execute_other().paint("x"),
(true, false, true) => colours.user_execute_file().paint("x"),
(false, true, _) => colours.special_other().paint("S"),
(true, true, false) => colours.special_other().paint("s"),
(true, true, true) => colours.special_user_file().paint("s"),
}
}
fn group_execute_bit<C: Colours>(&self, colours: &C) -> ANSIString<'static> {
match (self.group_execute, self.setgid) {
(false, false) => colours.dash().paint("-"),
(true, false) => colours.group_execute().paint("x"),
(false, true) => colours.special_other().paint("S"),
(true, true) => colours.special_other().paint("s"),
}
}
fn other_execute_bit<C: Colours>(&self, colours: &C) -> ANSIString<'static> {
match (self.other_execute, self.sticky) {
(false, false) => colours.dash().paint("-"),
(true, false) => colours.other_execute().paint("x"),
(false, true) => colours.special_other().paint("T"),
(true, true) => colours.special_other().paint("t"),
}
}
}
impl f::Attributes {
pub fn render<C: Colours+FiletypeColours>(&self, colours: &C) -> Vec<ANSIString<'static>> {
let bit = |bit, chr: &'static str, style: Style| {
if bit { style.paint(chr) }
else { colours.dash().paint("-") }
};
vec![
bit(self.archive, "a", colours.normal()),
bit(self.readonly, "r", colours.user_read()),
bit(self.hidden, "h", colours.special_user_file()),
bit(self.system, "s", colours.special_other()),
]
}
pub fn render_type<C: Colours+FiletypeColours>(&self, colours: &C) -> ANSIString<'static> {
if self.reparse_point {
return colours.pipe().paint("l")
}
else if self.directory {
return colours.directory().paint("d")
}
else {
return colours.dash().paint("-")
}
}
}
pub trait Colours {
fn dash(&self) -> Style;
fn user_read(&self) -> Style;
fn user_write(&self) -> Style;
fn user_execute_file(&self) -> Style;
fn user_execute_other(&self) -> Style;
fn group_read(&self) -> Style;
fn group_write(&self) -> Style;
fn group_execute(&self) -> Style;
fn other_read(&self) -> Style;
fn other_write(&self) -> Style;
fn other_execute(&self) -> Style;
fn special_user_file(&self) -> Style;
fn special_other(&self) -> Style;
fn attribute(&self) -> Style;
}
#[cfg(test)]
#[allow(unused_results)]
pub mod test {
use super::Colours;
use crate::output::cell::TextCellContents;
use crate::fs::fields as f;
use ansi_term::Colour::*;
use ansi_term::Style;
struct TestColours;
impl Colours for TestColours {
fn dash(&self) -> Style { Fixed(11).normal() }
fn user_read(&self) -> Style { Fixed(101).normal() }
fn user_write(&self) -> Style { Fixed(102).normal() }
fn user_execute_file(&self) -> Style { Fixed(103).normal() }
fn user_execute_other(&self) -> Style { Fixed(113).normal() }
fn group_read(&self) -> Style { Fixed(104).normal() }
fn group_write(&self) -> Style { Fixed(105).normal() }
fn group_execute(&self) -> Style { Fixed(106).normal() }
fn other_read(&self) -> Style { Fixed(107).normal() }
fn other_write(&self) -> Style { Fixed(108).normal() }
fn other_execute(&self) -> Style { Fixed(109).normal() }
fn special_user_file(&self) -> Style { Fixed(110).normal() }
fn special_other(&self) -> Style { Fixed(111).normal() }
fn attribute(&self) -> Style { Fixed(112).normal() }
}
#[test]
fn negate() {
let bits = f::Permissions {
user_read: false, user_write: false, user_execute: false, setuid: false,
group_read: false, group_write: false, group_execute: false, setgid: false,
other_read: false, other_write: false, other_execute: false, sticky: false,
};
let expected = TextCellContents::from(vec![
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(11).paint("-"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(11).paint("-"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(11).paint("-"),
]);
assert_eq!(expected, bits.render(&TestColours, false).into())
}
#[test]
fn affirm() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: true, setuid: false,
group_read: true, group_write: true, group_execute: true, setgid: false,
other_read: true, other_write: true, other_execute: true, sticky: false,
};
let expected = TextCellContents::from(vec![
Fixed(101).paint("r"), Fixed(102).paint("w"), Fixed(103).paint("x"),
Fixed(104).paint("r"), Fixed(105).paint("w"), Fixed(106).paint("x"),
Fixed(107).paint("r"), Fixed(108).paint("w"), Fixed(109).paint("x"),
]);
assert_eq!(expected, bits.render(&TestColours, true).into())
}
#[test]
fn specials() {
let bits = f::Permissions {
user_read: false, user_write: false, user_execute: true, setuid: true,
group_read: false, group_write: false, group_execute: true, setgid: true,
other_read: false, other_write: false, other_execute: true, sticky: true,
};
let expected = TextCellContents::from(vec![
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(110).paint("s"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(111).paint("s"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(111).paint("t"),
]);
assert_eq!(expected, bits.render(&TestColours, true).into())
}
#[test]
fn extra_specials() {
let bits = f::Permissions {
user_read: false, user_write: false, user_execute: false, setuid: true,
group_read: false, group_write: false, group_execute: false, setgid: true,
other_read: false, other_write: false, other_execute: false, sticky: true,
};
let expected = TextCellContents::from(vec![
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(111).paint("S"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(111).paint("S"),
Fixed(11).paint("-"), Fixed(11).paint("-"), Fixed(111).paint("T"),
]);
assert_eq!(expected, bits.render(&TestColours, true).into())
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/times.rs | src/output/render/times.rs | use std::time::SystemTime;
use datetime::TimeZone;
use ansi_term::Style;
use crate::output::cell::TextCell;
use crate::output::time::TimeFormat;
pub trait Render {
fn render(self, style: Style, tz: &Option<TimeZone>, format: TimeFormat) -> TextCell;
}
impl Render for Option<SystemTime> {
fn render(self, style: Style, tz: &Option<TimeZone>, format: TimeFormat) -> TextCell {
let datestamp = if let Some(time) = self {
if let Some(ref tz) = tz {
format.format_zoned(time, tz)
}
else {
format.format_local(time)
}
}
else {
String::from("-")
};
TextCell::paint(style, datestamp)
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/filetype.rs | src/output/render/filetype.rs | use ansi_term::{ANSIString, Style};
use crate::fs::fields as f;
impl f::Type {
pub fn render<C: Colours>(self, colours: &C) -> ANSIString<'static> {
match self {
Self::File => colours.normal().paint("."),
Self::Directory => colours.directory().paint("d"),
Self::Pipe => colours.pipe().paint("|"),
Self::Link => colours.symlink().paint("l"),
Self::BlockDevice => colours.block_device().paint("b"),
Self::CharDevice => colours.char_device().paint("c"),
Self::Socket => colours.socket().paint("s"),
Self::Special => colours.special().paint("?"),
}
}
}
pub trait Colours {
fn normal(&self) -> Style;
fn directory(&self) -> Style;
fn pipe(&self) -> Style;
fn symlink(&self) -> Style;
fn block_device(&self) -> Style;
fn char_device(&self) -> Style;
fn socket(&self) -> Style;
fn special(&self) -> Style;
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/git.rs | src/output/render/git.rs | use ansi_term::{ANSIString, Style};
use crate::output::cell::{TextCell, DisplayWidth};
use crate::fs::fields as f;
impl f::Git {
pub fn render(self, colours: &dyn Colours) -> TextCell {
TextCell {
width: DisplayWidth::from(2),
contents: vec![
self.staged.render(colours),
self.unstaged.render(colours),
].into(),
}
}
}
impl f::GitStatus {
fn render(self, colours: &dyn Colours) -> ANSIString<'static> {
match self {
Self::NotModified => colours.not_modified().paint("-"),
Self::New => colours.new().paint("N"),
Self::Modified => colours.modified().paint("M"),
Self::Deleted => colours.deleted().paint("D"),
Self::Renamed => colours.renamed().paint("R"),
Self::TypeChange => colours.type_change().paint("T"),
Self::Ignored => colours.ignored().paint("I"),
Self::Conflicted => colours.conflicted().paint("U"),
}
}
}
pub trait Colours {
fn not_modified(&self) -> Style;
#[allow(clippy::new_ret_no_self)]
fn new(&self) -> Style;
fn modified(&self) -> Style;
fn deleted(&self) -> Style;
fn renamed(&self) -> Style;
fn type_change(&self) -> Style;
fn ignored(&self) -> Style;
fn conflicted(&self) -> Style;
}
#[cfg(test)]
pub mod test {
use super::Colours;
use crate::output::cell::{TextCell, DisplayWidth};
use crate::fs::fields as f;
use ansi_term::Colour::*;
use ansi_term::Style;
struct TestColours;
impl Colours for TestColours {
fn not_modified(&self) -> Style { Fixed(90).normal() }
fn new(&self) -> Style { Fixed(91).normal() }
fn modified(&self) -> Style { Fixed(92).normal() }
fn deleted(&self) -> Style { Fixed(93).normal() }
fn renamed(&self) -> Style { Fixed(94).normal() }
fn type_change(&self) -> Style { Fixed(95).normal() }
fn ignored(&self) -> Style { Fixed(96).normal() }
fn conflicted(&self) -> Style { Fixed(97).normal() }
}
#[test]
fn git_blank() {
let stati = f::Git {
staged: f::GitStatus::NotModified,
unstaged: f::GitStatus::NotModified,
};
let expected = TextCell {
width: DisplayWidth::from(2),
contents: vec![
Fixed(90).paint("-"),
Fixed(90).paint("-"),
].into(),
};
assert_eq!(expected, stati.render(&TestColours))
}
#[test]
fn git_new_changed() {
let stati = f::Git {
staged: f::GitStatus::New,
unstaged: f::GitStatus::Modified,
};
let expected = TextCell {
width: DisplayWidth::from(2),
contents: vec![
Fixed(91).paint("N"),
Fixed(92).paint("M"),
].into(),
};
assert_eq!(expected, stati.render(&TestColours))
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/inode.rs | src/output/render/inode.rs | use ansi_term::Style;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
impl f::Inode {
pub fn render(self, style: Style) -> TextCell {
TextCell::paint(style, self.0.to_string())
}
}
#[cfg(test)]
pub mod test {
use crate::output::cell::TextCell;
use crate::fs::fields as f;
use ansi_term::Colour::*;
#[test]
fn blocklessness() {
let io = f::Inode(1_414_213);
let expected = TextCell::paint_str(Cyan.underline(), "1414213");
assert_eq!(expected, io.render(Cyan.underline()));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/users.rs | src/output/render/users.rs | use ansi_term::Style;
use users::Users;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
use crate::output::table::UserFormat;
impl f::User {
pub fn render<C: Colours, U: Users>(self, colours: &C, users: &U, format: UserFormat) -> TextCell {
let user_name = match (format, users.get_user_by_uid(self.0)) {
(_, None) => self.0.to_string(),
(UserFormat::Numeric, _) => self.0.to_string(),
(UserFormat::Name, Some(user)) => user.name().to_string_lossy().into(),
};
let style = if users.get_current_uid() == self.0 { colours.you() }
else { colours.someone_else() };
TextCell::paint(style, user_name)
}
}
pub trait Colours {
fn you(&self) -> Style;
fn someone_else(&self) -> Style;
}
#[cfg(test)]
#[allow(unused_results)]
pub mod test {
use super::Colours;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
use crate::output::table::UserFormat;
use users::User;
use users::mock::MockUsers;
use ansi_term::Colour::*;
use ansi_term::Style;
struct TestColours;
impl Colours for TestColours {
fn you(&self) -> Style { Red.bold() }
fn someone_else(&self) -> Style { Blue.underline() }
}
#[test]
fn named() {
let mut users = MockUsers::with_current_uid(1000);
users.add_user(User::new(1000, "enoch", 100));
let user = f::User(1000);
let expected = TextCell::paint_str(Red.bold(), "enoch");
assert_eq!(expected, user.render(&TestColours, &users, UserFormat::Name));
let expected = TextCell::paint_str(Red.bold(), "1000");
assert_eq!(expected, user.render(&TestColours, &users, UserFormat::Numeric));
}
#[test]
fn unnamed() {
let users = MockUsers::with_current_uid(1000);
let user = f::User(1000);
let expected = TextCell::paint_str(Red.bold(), "1000");
assert_eq!(expected, user.render(&TestColours, &users, UserFormat::Name));
assert_eq!(expected, user.render(&TestColours, &users, UserFormat::Numeric));
}
#[test]
fn different_named() {
let mut users = MockUsers::with_current_uid(0);
users.add_user(User::new(1000, "enoch", 100));
let user = f::User(1000);
let expected = TextCell::paint_str(Blue.underline(), "enoch");
assert_eq!(expected, user.render(&TestColours, &users, UserFormat::Name));
}
#[test]
fn different_unnamed() {
let user = f::User(1000);
let expected = TextCell::paint_str(Blue.underline(), "1000");
assert_eq!(expected, user.render(&TestColours, &MockUsers::with_current_uid(0), UserFormat::Numeric));
}
#[test]
fn overflow() {
let user = f::User(2_147_483_648);
let expected = TextCell::paint_str(Blue.underline(), "2147483648");
assert_eq!(expected, user.render(&TestColours, &MockUsers::with_current_uid(0), UserFormat::Numeric));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/octal.rs | src/output/render/octal.rs | use ansi_term::Style;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
impl f::OctalPermissions {
fn bits_to_octal(r: bool, w: bool, x: bool) -> u8 {
u8::from(r) * 4 + u8::from(w) * 2 + u8::from(x)
}
pub fn render(&self, style: Style) -> TextCell {
let perm = &self.permissions;
let octal_sticky = Self::bits_to_octal(perm.setuid, perm.setgid, perm.sticky);
let octal_owner = Self::bits_to_octal(perm.user_read, perm.user_write, perm.user_execute);
let octal_group = Self::bits_to_octal(perm.group_read, perm.group_write, perm.group_execute);
let octal_other = Self::bits_to_octal(perm.other_read, perm.other_write, perm.other_execute);
TextCell::paint(style, format!("{}{}{}{}", octal_sticky, octal_owner, octal_group, octal_other))
}
}
#[cfg(test)]
pub mod test {
use crate::output::cell::TextCell;
use crate::fs::fields as f;
use ansi_term::Colour::*;
#[test]
fn normal_folder() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: true, setuid: false,
group_read: true, group_write: false, group_execute: true, setgid: false,
other_read: true, other_write: false, other_execute: true, sticky: false,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "0755");
assert_eq!(expected, octal.render(Purple.bold()));
}
#[test]
fn normal_file() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: false, setuid: false,
group_read: true, group_write: false, group_execute: false, setgid: false,
other_read: true, other_write: false, other_execute: false, sticky: false,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "0644");
assert_eq!(expected, octal.render(Purple.bold()));
}
#[test]
fn secret_file() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: false, setuid: false,
group_read: false, group_write: false, group_execute: false, setgid: false,
other_read: false, other_write: false, other_execute: false, sticky: false,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "0600");
assert_eq!(expected, octal.render(Purple.bold()));
}
#[test]
fn sticky1() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: true, setuid: true,
group_read: true, group_write: true, group_execute: true, setgid: false,
other_read: true, other_write: true, other_execute: true, sticky: false,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "4777");
assert_eq!(expected, octal.render(Purple.bold()));
}
#[test]
fn sticky2() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: true, setuid: false,
group_read: true, group_write: true, group_execute: true, setgid: true,
other_read: true, other_write: true, other_execute: true, sticky: false,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "2777");
assert_eq!(expected, octal.render(Purple.bold()));
}
#[test]
fn sticky3() {
let bits = f::Permissions {
user_read: true, user_write: true, user_execute: true, setuid: false,
group_read: true, group_write: true, group_execute: true, setgid: false,
other_read: true, other_write: true, other_execute: true, sticky: true,
};
let octal = f::OctalPermissions{ permissions: bits };
let expected = TextCell::paint_str(Purple.bold(), "1777");
assert_eq!(expected, octal.render(Purple.bold()));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/mod.rs | src/output/render/mod.rs | mod blocks;
pub use self::blocks::Colours as BlocksColours;
mod filetype;
pub use self::filetype::Colours as FiletypeColours;
mod git;
pub use self::git::Colours as GitColours;
#[cfg(unix)]
mod groups;
#[cfg(unix)]
pub use self::groups::Colours as GroupColours;
mod inode;
// inode uses just one colour
mod links;
pub use self::links::Colours as LinksColours;
mod permissions;
pub use self::permissions::Colours as PermissionsColours;
mod size;
pub use self::size::Colours as SizeColours;
mod times;
pub use self::times::Render as TimeRender;
// times does too
#[cfg(unix)]
mod users;
#[cfg(unix)]
pub use self::users::Colours as UserColours;
mod octal;
// octal uses just one colour
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/links.rs | src/output/render/links.rs | use ansi_term::Style;
use locale::Numeric as NumericLocale;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
impl f::Links {
pub fn render<C: Colours>(&self, colours: &C, numeric: &NumericLocale) -> TextCell {
let style = if self.multiple { colours.multi_link_file() }
else { colours.normal() };
TextCell::paint(style, numeric.format_int(self.count))
}
}
pub trait Colours {
fn normal(&self) -> Style;
fn multi_link_file(&self) -> Style;
}
#[cfg(test)]
pub mod test {
use super::Colours;
use crate::output::cell::{TextCell, DisplayWidth};
use crate::fs::fields as f;
use ansi_term::Colour::*;
use ansi_term::Style;
use locale;
struct TestColours;
impl Colours for TestColours {
fn normal(&self) -> Style { Blue.normal() }
fn multi_link_file(&self) -> Style { Blue.on(Red) }
}
#[test]
fn regular_file() {
let stati = f::Links {
count: 1,
multiple: false,
};
let expected = TextCell {
width: DisplayWidth::from(1),
contents: vec![ Blue.paint("1") ].into(),
};
assert_eq!(expected, stati.render(&TestColours, &locale::Numeric::english()));
}
#[test]
fn regular_directory() {
let stati = f::Links {
count: 3005,
multiple: false,
};
let expected = TextCell {
width: DisplayWidth::from(5),
contents: vec![ Blue.paint("3,005") ].into(),
};
assert_eq!(expected, stati.render(&TestColours, &locale::Numeric::english()));
}
#[test]
fn popular_file() {
let stati = f::Links {
count: 3005,
multiple: true,
};
let expected = TextCell {
width: DisplayWidth::from(5),
contents: vec![ Blue.on(Red).paint("3,005") ].into(),
};
assert_eq!(expected, stati.render(&TestColours, &locale::Numeric::english()));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/groups.rs | src/output/render/groups.rs | use ansi_term::Style;
use users::{Users, Groups};
use crate::fs::fields as f;
use crate::output::cell::TextCell;
use crate::output::table::UserFormat;
impl f::Group {
pub fn render<C: Colours, U: Users+Groups>(self, colours: &C, users: &U, format: UserFormat) -> TextCell {
use users::os::unix::GroupExt;
let mut style = colours.not_yours();
let group = match users.get_group_by_gid(self.0) {
Some(g) => (*g).clone(),
None => return TextCell::paint(style, self.0.to_string()),
};
let current_uid = users.get_current_uid();
if let Some(current_user) = users.get_user_by_uid(current_uid) {
if current_user.primary_group_id() == group.gid()
|| group.members().iter().any(|u| u == current_user.name())
{
style = colours.yours();
}
}
let group_name = match format {
UserFormat::Name => group.name().to_string_lossy().into(),
UserFormat::Numeric => group.gid().to_string(),
};
TextCell::paint(style, group_name)
}
}
pub trait Colours {
fn yours(&self) -> Style;
fn not_yours(&self) -> Style;
}
#[cfg(test)]
#[allow(unused_results)]
pub mod test {
use super::Colours;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
use crate::output::table::UserFormat;
use users::{User, Group};
use users::mock::MockUsers;
use users::os::unix::GroupExt;
use ansi_term::Colour::*;
use ansi_term::Style;
struct TestColours;
impl Colours for TestColours {
fn yours(&self) -> Style { Fixed(80).normal() }
fn not_yours(&self) -> Style { Fixed(81).normal() }
}
#[test]
fn named() {
let mut users = MockUsers::with_current_uid(1000);
users.add_group(Group::new(100, "folk"));
let group = f::Group(100);
let expected = TextCell::paint_str(Fixed(81).normal(), "folk");
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Name));
let expected = TextCell::paint_str(Fixed(81).normal(), "100");
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Numeric));
}
#[test]
fn unnamed() {
let users = MockUsers::with_current_uid(1000);
let group = f::Group(100);
let expected = TextCell::paint_str(Fixed(81).normal(), "100");
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Name));
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Numeric));
}
#[test]
fn primary() {
let mut users = MockUsers::with_current_uid(2);
users.add_user(User::new(2, "eve", 100));
users.add_group(Group::new(100, "folk"));
let group = f::Group(100);
let expected = TextCell::paint_str(Fixed(80).normal(), "folk");
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Name))
}
#[test]
fn secondary() {
let mut users = MockUsers::with_current_uid(2);
users.add_user(User::new(2, "eve", 666));
let test_group = Group::new(100, "folk").add_member("eve");
users.add_group(test_group);
let group = f::Group(100);
let expected = TextCell::paint_str(Fixed(80).normal(), "folk");
assert_eq!(expected, group.render(&TestColours, &users, UserFormat::Name))
}
#[test]
fn overflow() {
let group = f::Group(2_147_483_648);
let expected = TextCell::paint_str(Fixed(81).normal(), "2147483648");
assert_eq!(expected, group.render(&TestColours, &MockUsers::with_current_uid(0), UserFormat::Numeric));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/blocks.rs | src/output/render/blocks.rs | use ansi_term::Style;
use crate::fs::fields as f;
use crate::output::cell::TextCell;
impl f::Blocks {
pub fn render<C: Colours>(&self, colours: &C) -> TextCell {
match self {
Self::Some(blk) => TextCell::paint(colours.block_count(), blk.to_string()),
Self::None => TextCell::blank(colours.no_blocks()),
}
}
}
pub trait Colours {
fn block_count(&self) -> Style;
fn no_blocks(&self) -> Style;
}
#[cfg(test)]
pub mod test {
use ansi_term::Style;
use ansi_term::Colour::*;
use super::Colours;
use crate::output::cell::TextCell;
use crate::fs::fields as f;
struct TestColours;
impl Colours for TestColours {
fn block_count(&self) -> Style { Red.blink() }
fn no_blocks(&self) -> Style { Green.italic() }
}
#[test]
fn blocklessness() {
let blox = f::Blocks::None;
let expected = TextCell::blank(Green.italic());
assert_eq!(expected, blox.render(&TestColours));
}
#[test]
fn blockfulity() {
let blox = f::Blocks::Some(3005);
let expected = TextCell::paint_str(Red.blink(), "3005");
assert_eq!(expected, blox.render(&TestColours));
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
ogham/exa | https://github.com/ogham/exa/blob/3d1edbb47052cb416ef9478106c3907586da5150/src/output/render/size.rs | src/output/render/size.rs | use ansi_term::Style;
use locale::Numeric as NumericLocale;
use number_prefix::Prefix;
use crate::fs::fields as f;
use crate::output::cell::{TextCell, DisplayWidth};
use crate::output::table::SizeFormat;
impl f::Size {
pub fn render<C: Colours>(self, colours: &C, size_format: SizeFormat, numerics: &NumericLocale) -> TextCell {
use number_prefix::NumberPrefix;
let size = match self {
Self::Some(s) => s,
Self::None => return TextCell::blank(colours.no_size()),
Self::DeviceIDs(ref ids) => return ids.render(colours),
};
let result = match size_format {
SizeFormat::DecimalBytes => NumberPrefix::decimal(size as f64),
SizeFormat::BinaryBytes => NumberPrefix::binary(size as f64),
SizeFormat::JustBytes => {
// Use the binary prefix to select a style.
let prefix = match NumberPrefix::binary(size as f64) {
NumberPrefix::Standalone(_) => None,
NumberPrefix::Prefixed(p, _) => Some(p),
};
// But format the number directly using the locale.
let string = numerics.format_int(size);
return TextCell::paint(colours.size(prefix), string);
}
};
let (prefix, n) = match result {
NumberPrefix::Standalone(b) => return TextCell::paint(colours.size(None), numerics.format_int(b)),
NumberPrefix::Prefixed(p, n) => (p, n),
};
let symbol = prefix.symbol();
let number = if n < 10_f64 {
numerics.format_float(n, 1)
} else {
numerics.format_int(n.round() as isize)
};
TextCell {
// symbol is guaranteed to be ASCII since unit prefixes are hardcoded.
width: DisplayWidth::from(&*number) + symbol.len(),
contents: vec![
colours.size(Some(prefix)).paint(number),
colours.unit(Some(prefix)).paint(symbol),
].into(),
}
}
}
impl f::DeviceIDs {
fn render<C: Colours>(self, colours: &C) -> TextCell {
let major = self.major.to_string();
let minor = self.minor.to_string();
TextCell {
width: DisplayWidth::from(major.len() + 1 + minor.len()),
contents: vec![
colours.major().paint(major),
colours.comma().paint(","),
colours.minor().paint(minor),
].into(),
}
}
}
pub trait Colours {
fn size(&self, prefix: Option<Prefix>) -> Style;
fn unit(&self, prefix: Option<Prefix>) -> Style;
fn no_size(&self) -> Style;
fn major(&self) -> Style;
fn comma(&self) -> Style;
fn minor(&self) -> Style;
}
#[cfg(test)]
pub mod test {
use super::Colours;
use crate::output::cell::{TextCell, DisplayWidth};
use crate::output::table::SizeFormat;
use crate::fs::fields as f;
use locale::Numeric as NumericLocale;
use ansi_term::Colour::*;
use ansi_term::Style;
use number_prefix::Prefix;
struct TestColours;
impl Colours for TestColours {
fn size(&self, _prefix: Option<Prefix>) -> Style { Fixed(66).normal() }
fn unit(&self, _prefix: Option<Prefix>) -> Style { Fixed(77).bold() }
fn no_size(&self) -> Style { Black.italic() }
fn major(&self) -> Style { Blue.on(Red) }
fn comma(&self) -> Style { Green.italic() }
fn minor(&self) -> Style { Cyan.on(Yellow) }
}
#[test]
fn directory() {
let directory = f::Size::None;
let expected = TextCell::blank(Black.italic());
assert_eq!(expected, directory.render(&TestColours, SizeFormat::JustBytes, &NumericLocale::english()))
}
#[test]
fn file_decimal() {
let directory = f::Size::Some(2_100_000);
let expected = TextCell {
width: DisplayWidth::from(4),
contents: vec![
Fixed(66).paint("2.1"),
Fixed(77).bold().paint("M"),
].into(),
};
assert_eq!(expected, directory.render(&TestColours, SizeFormat::DecimalBytes, &NumericLocale::english()))
}
#[test]
fn file_binary() {
let directory = f::Size::Some(1_048_576);
let expected = TextCell {
width: DisplayWidth::from(5),
contents: vec![
Fixed(66).paint("1.0"),
Fixed(77).bold().paint("Mi"),
].into(),
};
assert_eq!(expected, directory.render(&TestColours, SizeFormat::BinaryBytes, &NumericLocale::english()))
}
#[test]
fn file_bytes() {
let directory = f::Size::Some(1_048_576);
let expected = TextCell {
width: DisplayWidth::from(9),
contents: vec![
Fixed(66).paint("1,048,576"),
].into(),
};
assert_eq!(expected, directory.render(&TestColours, SizeFormat::JustBytes, &NumericLocale::english()))
}
#[test]
fn device_ids() {
let directory = f::Size::DeviceIDs(f::DeviceIDs { major: 10, minor: 80 });
let expected = TextCell {
width: DisplayWidth::from(5),
contents: vec![
Blue.on(Red).paint("10"),
Green.italic().paint(","),
Cyan.on(Yellow).paint("80"),
].into(),
};
assert_eq!(expected, directory.render(&TestColours, SizeFormat::JustBytes, &NumericLocale::english()))
}
}
| rust | MIT | 3d1edbb47052cb416ef9478106c3907586da5150 | 2026-01-04T15:37:45.366656Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/lib.rs | axum/src/lib.rs | //! axum is a web application framework that focuses on ergonomics and modularity.
//!
//! # High-level features
//!
//! - Route requests to handlers with a macro-free API.
//! - Declaratively parse requests using extractors.
//! - Simple and predictable error handling model.
//! - Generate responses with minimal boilerplate.
//! - Take full advantage of the [`tower`] and [`tower-http`] ecosystem of
//! middleware, services, and utilities.
//!
//! In particular, the last point is what sets `axum` apart from other frameworks.
//! `axum` doesn't have its own middleware system but instead uses
//! [`tower::Service`]. This means `axum` gets timeouts, tracing, compression,
//! authorization, and more, for free. It also enables you to share middleware with
//! applications written using [`hyper`] or [`tonic`].
//!
//! # Compatibility
//!
//! axum is designed to work with [tokio] and [hyper]. Runtime and
//! transport layer independence is not a goal, at least for the time being.
//!
//! # Example
//!
//! The "Hello, World!" of axum is:
//!
//! ```rust,no_run
//! use axum::{
//! routing::get,
//! Router,
//! };
//!
//! #[tokio::main]
//! async fn main() {
//! // build our application with a single route
//! let app = Router::new().route("/", get(|| async { "Hello, World!" }));
//!
//! // run our app with hyper, listening globally on port 3000
//! let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
//! axum::serve(listener, app).await;
//! }
//! ```
//!
//! Note using `#[tokio::main]` requires you enable tokio's `macros` and `rt-multi-thread` features
//! or just `full` to enable all features (`cargo add tokio --features macros,rt-multi-thread`).
//!
//! # Routing
//!
//! [`Router`] is used to set up which paths go to which services:
//!
//! ```rust
//! use axum::{Router, routing::get};
//!
//! // our router
//! let app = Router::new()
//! .route("/", get(root))
//! .route("/foo", get(get_foo).post(post_foo))
//! .route("/foo/bar", get(foo_bar));
//!
//! // which calls one of these handlers
//! async fn root() {}
//! async fn get_foo() {}
//! async fn post_foo() {}
//! async fn foo_bar() {}
//! # let _: Router = app;
//! ```
//!
//! See [`Router`] for more details on routing.
//!
//! # Handlers
//!
#![doc = include_str!("docs/handlers_intro.md")]
//!
//! See [`handler`](crate::handler) for more details on handlers.
//!
//! # Extractors
//!
//! An extractor is a type that implements [`FromRequest`] or [`FromRequestParts`]. Extractors are
//! how you pick apart the incoming request to get the parts your handler needs.
//!
//! ```rust
//! use axum::extract::{Path, Query, Json};
//! use std::collections::HashMap;
//!
//! // `Path` gives you the path parameters and deserializes them.
//! async fn path(Path(user_id): Path<u32>) {}
//!
//! // `Query` gives you the query parameters and deserializes them.
//! async fn query(Query(params): Query<HashMap<String, String>>) {}
//!
//! // Buffer the request body and deserialize it as JSON into a
//! // `serde_json::Value`. `Json` supports any type that implements
//! // `serde::Deserialize`.
//! async fn json(Json(payload): Json<serde_json::Value>) {}
//! ```
//!
//! See [`extract`](crate::extract) for more details on extractors.
//!
//! # Responses
//!
//! Anything that implements [`IntoResponse`] can be returned from handlers.
//!
//! ```rust,no_run
//! use axum::{
//! body::Body,
//! routing::get,
//! response::Json,
//! Router,
//! };
//! use serde_json::{Value, json};
//!
//! // `&'static str` becomes a `200 OK` with `content-type: text/plain; charset=utf-8`
//! async fn plain_text() -> &'static str {
//! "foo"
//! }
//!
//! // `Json` gives a content-type of `application/json` and works with any type
//! // that implements `serde::Serialize`
//! async fn json() -> Json<Value> {
//! Json(json!({ "data": 42 }))
//! }
//!
//! let app = Router::new()
//! .route("/plain_text", get(plain_text))
//! .route("/json", get(json));
//! # let _: Router = app;
//! ```
//!
//! See [`response`](crate::response) for more details on building responses.
//!
//! # Error handling
//!
//! axum aims to have a simple and predictable error handling model. That means
//! it is simple to convert errors into responses and you are guaranteed that
//! all errors are handled.
//!
//! See [`error_handling`] for more details on axum's
//! error handling model and how to handle errors gracefully.
//!
//! # Middleware
//!
//! There are several different ways to write middleware for axum. See
//! [`middleware`] for more details.
//!
//! # Sharing state with handlers
//!
//! It is common to share some state between handlers. For example, a
//! pool of database connections or clients to other services may need to
//! be shared.
//!
//! The four most common ways of doing that are:
//!
//! - Using the [`State`] extractor
//! - Using request extensions
//! - Using closure captures
//! - Using task-local variables
//!
//! ## Using the [`State`] extractor
//!
//! ```rust,no_run
//! use axum::{
//! extract::State,
//! routing::get,
//! Router,
//! };
//! use std::sync::Arc;
//!
//! struct AppState {
//! // ...
//! }
//!
//! let shared_state = Arc::new(AppState { /* ... */ });
//!
//! let app = Router::new()
//! .route("/", get(handler))
//! .with_state(shared_state);
//!
//! async fn handler(
//! State(state): State<Arc<AppState>>,
//! ) {
//! // ...
//! }
//! # let _: Router = app;
//! ```
//!
//! You should prefer using [`State`] if possible since it's more type safe. The downside is that
//! it's less dynamic than task-local variables and request extensions.
//!
//! See [`State`] for more details about accessing state.
//!
//! ## Using request extensions
//!
//! Another way to share state with handlers is using [`Extension`] as
//! layer and extractor:
//!
//! ```rust,no_run
//! use axum::{
//! extract::Extension,
//! routing::get,
//! Router,
//! };
//! use std::sync::Arc;
//!
//! struct AppState {
//! // ...
//! }
//!
//! let shared_state = Arc::new(AppState { /* ... */ });
//!
//! let app = Router::new()
//! .route("/", get(handler))
//! .layer(Extension(shared_state));
//!
//! async fn handler(
//! Extension(state): Extension<Arc<AppState>>,
//! ) {
//! // ...
//! }
//! # let _: Router = app;
//! ```
//!
//! The downside to this approach is that you'll get runtime errors
//! (specifically a `500 Internal Server Error` response) if you try and extract
//! an extension that doesn't exist, perhaps because you forgot to add the
//! middleware or because you're extracting the wrong type.
//!
//! ## Using closure captures
//!
//! State can also be passed directly to handlers using closure captures:
//!
//! ```rust,no_run
//! use axum::{
//! Json,
//! extract::{Extension, Path},
//! routing::{get, post},
//! Router,
//! };
//! use std::sync::Arc;
//! use serde::Deserialize;
//!
//! struct AppState {
//! // ...
//! }
//!
//! let shared_state = Arc::new(AppState { /* ... */ });
//!
//! let app = Router::new()
//! .route(
//! "/users",
//! post({
//! let shared_state = Arc::clone(&shared_state);
//! move |body| create_user(body, shared_state)
//! }),
//! )
//! .route(
//! "/users/{id}",
//! get({
//! let shared_state = Arc::clone(&shared_state);
//! move |path| get_user(path, shared_state)
//! }),
//! );
//!
//! async fn get_user(Path(user_id): Path<String>, state: Arc<AppState>) {
//! // ...
//! }
//!
//! async fn create_user(Json(payload): Json<CreateUserPayload>, state: Arc<AppState>) {
//! // ...
//! }
//!
//! #[derive(Deserialize)]
//! struct CreateUserPayload {
//! // ...
//! }
//! # let _: Router = app;
//! ```
//!
//! The downside to this approach is that it's the most verbose approach.
//!
//! ## Using task-local variables
//!
//! This also allows to share state with `IntoResponse` implementations:
//!
//! ```rust,no_run
//! use axum::{
//! extract::Request,
//! http::{header, StatusCode},
//! middleware::{self, Next},
//! response::{IntoResponse, Response},
//! routing::get,
//! Router,
//! };
//! use tokio::task_local;
//!
//! #[derive(Clone)]
//! struct CurrentUser {
//! name: String,
//! }
//! task_local! {
//! pub static USER: CurrentUser;
//! }
//!
//! async fn auth(req: Request, next: Next) -> Result<Response, StatusCode> {
//! let auth_header = req
//! .headers()
//! .get(header::AUTHORIZATION)
//! .and_then(|header| header.to_str().ok())
//! .ok_or(StatusCode::UNAUTHORIZED)?;
//! if let Some(current_user) = authorize_current_user(auth_header).await {
//! // State is setup here in the middleware
//! Ok(USER.scope(current_user, next.run(req)).await)
//! } else {
//! Err(StatusCode::UNAUTHORIZED)
//! }
//! }
//! async fn authorize_current_user(auth_token: &str) -> Option<CurrentUser> {
//! Some(CurrentUser {
//! name: auth_token.to_string(),
//! })
//! }
//!
//! struct UserResponse;
//!
//! impl IntoResponse for UserResponse {
//! fn into_response(self) -> Response {
//! // State is accessed here in the IntoResponse implementation
//! let current_user = USER.with(|u| u.clone());
//! (StatusCode::OK, current_user.name).into_response()
//! }
//! }
//!
//! async fn handler() -> UserResponse {
//! UserResponse
//! }
//!
//! let app: Router = Router::new()
//! .route("/", get(handler))
//! .route_layer(middleware::from_fn(auth));
//! ```
//!
//! The main downside to this approach is that it only works when the async executor being used
//! has the concept of task-local variables. The example above uses
//! [tokio's `task_local` macro](https://docs.rs/tokio/1/tokio/macro.task_local.html).
//! smol does not yet offer equivalent functionality at the time of writing (see
//! [this GitHub issue](https://github.com/smol-rs/async-executor/issues/139)).
//!
//! # Building integrations for axum
//!
//! Libraries authors that want to provide [`FromRequest`], [`FromRequestParts`], or
//! [`IntoResponse`] implementations should depend on the [`axum-core`] crate, instead of `axum` if
//! possible. [`axum-core`] contains core types and traits and is less likely to receive breaking
//! changes.
//!
//! # Required dependencies
//!
//! To use axum there are a few dependencies you have to pull in as well:
//!
//! ```toml
//! [dependencies]
//! axum = "<latest-version>"
//! tokio = { version = "<latest-version>", features = ["full"] }
//! tower = "<latest-version>"
//! ```
//!
//! The `"full"` feature for tokio isn't necessary but it's the easiest way to get started.
//!
//! Tower isn't strictly necessary either but helpful for testing. See the
//! testing example in the repo to learn more about testing axum apps.
//!
//! # Examples
//!
//! The axum repo contains [a number of examples][examples] that show how to put all the
//! pieces together.
//!
//! # Feature flags
//!
//! axum uses a set of [feature flags] to reduce the amount of compiled and
//! optional dependencies.
//!
//! The following optional features are available:
//!
//! Name | Description | Default?
//! ---|---|---
//! `http1` | Enables hyper's `http1` feature | <span role="img" aria-label="Default feature">✔</span>
//! `http2` | Enables hyper's `http2` feature |
//! `json` | Enables the [`Json`] type and some similar convenience functionality | <span role="img" aria-label="Default feature">✔</span>
//! `macros` | Enables optional utility macros |
//! `matched-path` | Enables capturing of every request's router path and the [`MatchedPath`] extractor | <span role="img" aria-label="Default feature">✔</span>
//! `multipart` | Enables parsing `multipart/form-data` requests with [`Multipart`] |
//! `original-uri` | Enables capturing of every request's original URI and the [`OriginalUri`] extractor | <span role="img" aria-label="Default feature">✔</span>
//! `tokio` | Enables `tokio` as a dependency and `axum::serve`, `SSE` and `extract::connect_info` types. | <span role="img" aria-label="Default feature">✔</span>
//! `tower-log` | Enables `tower`'s `log` feature | <span role="img" aria-label="Default feature">✔</span>
//! `tracing` | Log rejections from built-in extractors | <span role="img" aria-label="Default feature">✔</span>
//! `ws` | Enables WebSockets support via [`extract::ws`] |
//! `form` | Enables the `Form` extractor | <span role="img" aria-label="Default feature">✔</span>
//! `query` | Enables the `Query` extractor | <span role="img" aria-label="Default feature">✔</span>
//!
//! [`MatchedPath`]: crate::extract::MatchedPath
//! [`Multipart`]: crate::extract::Multipart
//! [`OriginalUri`]: crate::extract::OriginalUri
//! [`tower`]: https://crates.io/crates/tower
//! [`tower-http`]: https://crates.io/crates/tower-http
//! [`tokio`]: http://crates.io/crates/tokio
//! [`hyper`]: http://crates.io/crates/hyper
//! [`tonic`]: http://crates.io/crates/tonic
//! [feature flags]: https://doc.rust-lang.org/cargo/reference/features.html#the-features-section
//! [`IntoResponse`]: crate::response::IntoResponse
//! [`Timeout`]: tower::timeout::Timeout
//! [examples]: https://github.com/tokio-rs/axum/tree/main/examples
//! [`Router::merge`]: crate::routing::Router::merge
//! [`Service`]: tower::Service
//! [`Service::poll_ready`]: tower::Service::poll_ready
//! [`Service`'s]: tower::Service
//! [`tower::Service`]: tower::Service
//! [tower-guides]: https://github.com/tower-rs/tower/tree/master/guides
//! [`Uuid`]: https://docs.rs/uuid/latest/uuid/
//! [`FromRequest`]: crate::extract::FromRequest
//! [`FromRequestParts`]: crate::extract::FromRequestParts
//! [`HeaderMap`]: http::header::HeaderMap
//! [`Request`]: http::Request
//! [customize-extractor-error]: https://github.com/tokio-rs/axum/blob/main/examples/customize-extractor-error/src/main.rs
//! [axum-macros]: https://docs.rs/axum-macros
//! [`debug_handler`]: https://docs.rs/axum-macros/latest/axum_macros/attr.debug_handler.html
//! [`Handler`]: crate::handler::Handler
//! [`Infallible`]: std::convert::Infallible
//! [load shed]: tower::load_shed
//! [`axum-core`]: http://crates.io/crates/axum-core
//! [`State`]: crate::extract::State
#![cfg_attr(docsrs, feature(doc_cfg))]
#![cfg_attr(test, allow(clippy::float_cmp))]
#![cfg_attr(not(test), warn(clippy::print_stdout, clippy::dbg_macro))]
#[macro_use]
pub(crate) mod macros;
mod boxed;
mod extension;
#[cfg(feature = "form")]
mod form;
#[cfg(feature = "json")]
mod json;
mod service_ext;
mod util;
pub mod body;
pub mod error_handling;
pub mod extract;
pub mod handler;
pub mod middleware;
pub mod response;
pub mod routing;
#[cfg(all(feature = "tokio", any(feature = "http1", feature = "http2")))]
pub mod serve;
#[cfg(any(test, feature = "__private"))]
#[allow(missing_docs, missing_debug_implementations, clippy::print_stdout)]
#[doc(hidden)]
pub mod test_helpers;
#[doc(no_inline)]
pub use http;
#[doc(inline)]
pub use self::extension::Extension;
#[doc(inline)]
#[cfg(feature = "json")]
pub use self::json::Json;
#[doc(inline)]
pub use self::routing::Router;
#[doc(inline)]
#[cfg(feature = "form")]
pub use self::form::Form;
#[doc(inline)]
pub use axum_core::{BoxError, Error, RequestExt, RequestPartsExt};
#[cfg(feature = "macros")]
pub use axum_macros::{debug_handler, debug_middleware};
#[cfg(all(feature = "tokio", any(feature = "http1", feature = "http2")))]
#[doc(inline)]
pub use self::serve::serve;
pub use self::service_ext::ServiceExt;
#[cfg(test)]
use axum_macros::__private_axum_test as test;
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/form.rs | axum/src/form.rs | use crate::extract::Request;
use crate::extract::{rejection::*, FromRequest, RawForm};
use axum_core::response::{IntoResponse, Response};
use axum_core::RequestExt;
use http::header::CONTENT_TYPE;
use http::StatusCode;
use serde_core::{de::DeserializeOwned, Serialize};
/// URL encoded extractor and response.
///
/// # As extractor
///
/// If used as an extractor, `Form` will deserialize form data from the request,
/// specifically:
///
/// - If the request has a method of `GET` or `HEAD`, the form data will be read
/// from the query string (same as with [`Query`])
/// - If the request has a different method, the form will be read from the body
/// of the request. It must have a `content-type` of
/// `application/x-www-form-urlencoded` for this to work. If you want to parse
/// `multipart/form-data` request bodies, use [`Multipart`] instead.
///
/// This matches how HTML forms are sent by browsers by default.
/// In both cases, the inner type `T` must implement [`serde::Deserialize`].
///
/// ⚠️ Since parsing form data might require consuming the request body, the `Form` extractor must be
/// *last* if there are multiple extractors in a handler. See ["the order of
/// extractors"][order-of-extractors]
///
/// [order-of-extractors]: crate::extract#the-order-of-extractors
///
/// ```rust
/// use axum::Form;
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct SignUp {
/// username: String,
/// password: String,
/// }
///
/// async fn accept_form(Form(sign_up): Form<SignUp>) {
/// // ...
/// }
/// ```
///
/// # As response
///
/// `Form` can also be used to encode any type that implements
/// [`serde::Serialize`] as `application/x-www-form-urlencoded`
///
/// ```rust
/// use axum::Form;
/// use serde::Serialize;
///
/// #[derive(Serialize)]
/// struct Payload {
/// value: String,
/// }
///
/// async fn handler() -> Form<Payload> {
/// Form(Payload { value: "foo".to_owned() })
/// }
/// ```
///
/// [`Query`]: crate::extract::Query
/// [`Multipart`]: crate::extract::Multipart
#[cfg_attr(docsrs, doc(cfg(feature = "form")))]
#[derive(Debug, Clone, Copy, Default)]
#[must_use]
pub struct Form<T>(pub T);
impl<T, S> FromRequest<S> for Form<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = FormRejection;
async fn from_request(req: Request, _state: &S) -> Result<Self, Self::Rejection> {
let is_get_or_head =
req.method() == http::Method::GET || req.method() == http::Method::HEAD;
match req.extract().await {
Ok(RawForm(bytes)) => {
let deserializer =
serde_html_form::Deserializer::new(form_urlencoded::parse(&bytes));
let value = serde_path_to_error::deserialize(deserializer).map_err(
|err| -> FormRejection {
if is_get_or_head {
FailedToDeserializeForm::from_err(err).into()
} else {
FailedToDeserializeFormBody::from_err(err).into()
}
},
)?;
Ok(Self(value))
}
Err(RawFormRejection::BytesRejection(r)) => Err(FormRejection::BytesRejection(r)),
Err(RawFormRejection::InvalidFormContentType(r)) => {
Err(FormRejection::InvalidFormContentType(r))
}
}
}
}
impl<T> IntoResponse for Form<T>
where
T: Serialize,
{
fn into_response(self) -> Response {
// Extracted into separate fn so it's only compiled once for all T.
fn make_response(ser_result: Result<String, serde_html_form::ser::Error>) -> Response {
match ser_result {
Ok(body) => (
[(CONTENT_TYPE, mime::APPLICATION_WWW_FORM_URLENCODED.as_ref())],
body,
)
.into_response(),
Err(err) => (StatusCode::INTERNAL_SERVER_ERROR, err.to_string()).into_response(),
}
}
make_response(serde_html_form::to_string(&self.0))
}
}
axum_core::__impl_deref!(Form);
#[cfg(test)]
mod tests {
use crate::{
routing::{on, MethodFilter},
test_helpers::TestClient,
Router,
};
use super::*;
use axum_core::body::Body;
use http::{Method, Request};
use mime::APPLICATION_WWW_FORM_URLENCODED;
use serde::{Deserialize, Serialize};
use std::fmt::Debug;
#[derive(Debug, PartialEq, Serialize, Deserialize)]
struct Pagination {
size: Option<u64>,
page: Option<u64>,
}
async fn check_query<T: DeserializeOwned + PartialEq + Debug>(uri: impl AsRef<str>, value: T) {
let req = Request::builder()
.uri(uri.as_ref())
.body(Body::empty())
.unwrap();
assert_eq!(Form::<T>::from_request(req, &()).await.unwrap().0, value);
}
async fn check_body<T: Serialize + DeserializeOwned + PartialEq + Debug>(value: T) {
let req = Request::builder()
.uri("http://example.com/test")
.method(Method::POST)
.header(CONTENT_TYPE, APPLICATION_WWW_FORM_URLENCODED.as_ref())
.body(Body::from(serde_html_form::to_string(&value).unwrap()))
.unwrap();
assert_eq!(Form::<T>::from_request(req, &()).await.unwrap().0, value);
}
#[crate::test]
async fn test_form_query() {
check_query(
"http://example.com/test",
Pagination {
size: None,
page: None,
},
)
.await;
check_query(
"http://example.com/test?size=10",
Pagination {
size: Some(10),
page: None,
},
)
.await;
check_query(
"http://example.com/test?size=10&page=20",
Pagination {
size: Some(10),
page: Some(20),
},
)
.await;
}
#[crate::test]
async fn test_form_body() {
check_body(Pagination {
size: None,
page: None,
})
.await;
check_body(Pagination {
size: Some(10),
page: None,
})
.await;
check_body(Pagination {
size: Some(10),
page: Some(20),
})
.await;
}
#[crate::test]
async fn test_incorrect_content_type() {
let req = Request::builder()
.uri("http://example.com/test")
.method(Method::POST)
.header(CONTENT_TYPE, mime::APPLICATION_JSON.as_ref())
.body(Body::from(
serde_html_form::to_string(&Pagination {
size: Some(10),
page: None,
})
.unwrap(),
))
.unwrap();
assert!(matches!(
Form::<Pagination>::from_request(req, &())
.await
.unwrap_err(),
FormRejection::InvalidFormContentType(InvalidFormContentType)
));
}
#[tokio::test]
async fn deserialize_error_status_codes() {
#[allow(dead_code)]
#[derive(Deserialize)]
struct Payload {
a: i32,
}
let app = Router::new().route(
"/",
on(
MethodFilter::GET.or(MethodFilter::POST),
|_: Form<Payload>| async {},
),
);
let client = TestClient::new(app);
let res = client.get("/?a=false").await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
assert_eq!(
res.text().await,
"Failed to deserialize form: a: invalid digit found in string"
);
let res = client
.post("/")
.header(CONTENT_TYPE, APPLICATION_WWW_FORM_URLENCODED.as_ref())
.body("a=false")
.await;
assert_eq!(res.status(), StatusCode::UNPROCESSABLE_ENTITY);
assert_eq!(
res.text().await,
"Failed to deserialize form body: a: invalid digit found in string"
);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extension.rs | axum/src/extension.rs | use crate::{extract::rejection::*, response::IntoResponseParts};
use axum_core::extract::OptionalFromRequestParts;
use axum_core::{
extract::FromRequestParts,
response::{IntoResponse, Response, ResponseParts},
};
use http::{request::Parts, Extensions, Request};
use std::{
convert::Infallible,
task::{Context, Poll},
};
use tower_service::Service;
/// Extractor and response for extensions.
///
/// # As extractor
///
/// This is commonly used to share state across handlers.
///
/// ```rust,no_run
/// use axum::{
/// Router,
/// Extension,
/// routing::get,
/// };
/// use std::sync::Arc;
///
/// // Some shared state used throughout our application
/// struct State {
/// // ...
/// }
///
/// async fn handler(state: Extension<Arc<State>>) {
/// // ...
/// }
///
/// let state = Arc::new(State { /* ... */ });
///
/// let app = Router::new().route("/", get(handler))
/// // Add middleware that inserts the state into all incoming request's
/// // extensions.
/// .layer(Extension(state));
/// # let _: Router = app;
/// ```
///
/// If the extension is missing it will reject the request with a `500 Internal
/// Server Error` response. Alternatively, you can use `Option<Extension<T>>` to
/// make the extension extractor optional.
///
/// # As response
///
/// Response extensions can be used to share state with middleware.
///
/// ```rust
/// use axum::{
/// Extension,
/// response::IntoResponse,
/// };
///
/// async fn handler() -> (Extension<Foo>, &'static str) {
/// (
/// Extension(Foo("foo")),
/// "Hello, World!"
/// )
/// }
///
/// #[derive(Clone)]
/// struct Foo(&'static str);
/// ```
#[derive(Debug, Clone, Copy, Default)]
#[must_use]
pub struct Extension<T>(pub T);
impl<T> Extension<T>
where
T: Clone + Send + Sync + 'static,
{
fn from_extensions(extensions: &Extensions) -> Option<Self> {
extensions.get::<T>().cloned().map(Extension)
}
}
impl<T, S> FromRequestParts<S> for Extension<T>
where
T: Clone + Send + Sync + 'static,
S: Send + Sync,
{
type Rejection = ExtensionRejection;
async fn from_request_parts(req: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
Ok(Self::from_extensions(&req.extensions).ok_or_else(|| {
MissingExtension::from_err(format!(
"Extension of type `{}` was not found. Perhaps you forgot to add it? See `axum::Extension`.",
std::any::type_name::<T>()
))
})?)
}
}
impl<T, S> OptionalFromRequestParts<S> for Extension<T>
where
T: Clone + Send + Sync + 'static,
S: Send + Sync,
{
type Rejection = Infallible;
async fn from_request_parts(
req: &mut Parts,
_state: &S,
) -> Result<Option<Self>, Self::Rejection> {
Ok(Self::from_extensions(&req.extensions))
}
}
axum_core::__impl_deref!(Extension);
impl<T> IntoResponseParts for Extension<T>
where
T: Clone + Send + Sync + 'static,
{
type Error = Infallible;
fn into_response_parts(self, mut res: ResponseParts) -> Result<ResponseParts, Self::Error> {
res.extensions_mut().insert(self.0);
Ok(res)
}
}
impl<T> IntoResponse for Extension<T>
where
T: Clone + Send + Sync + 'static,
{
fn into_response(self) -> Response {
let mut res = ().into_response();
res.extensions_mut().insert(self.0);
res
}
}
impl<S, T> tower_layer::Layer<S> for Extension<T>
where
T: Clone + Send + Sync + 'static,
{
type Service = AddExtension<S, T>;
fn layer(&self, inner: S) -> Self::Service {
AddExtension {
inner,
value: self.0.clone(),
}
}
}
/// Middleware for adding some shareable value to [request extensions].
///
/// See [Passing state from middleware to handlers](index.html#passing-state-from-middleware-to-handlers)
/// for more details.
///
/// [request extensions]: https://docs.rs/http/latest/http/struct.Extensions.html
///
/// If you need a layer to add an extension to every request,
/// use the [Layer](tower::Layer) implementation of [Extension].
#[derive(Clone, Copy, Debug)]
pub struct AddExtension<S, T> {
pub(crate) inner: S,
pub(crate) value: T,
}
impl<ResBody, S, T> Service<Request<ResBody>> for AddExtension<S, T>
where
S: Service<Request<ResBody>>,
T: Clone + Send + Sync + 'static,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, mut req: Request<ResBody>) -> Self::Future {
req.extensions_mut().insert(self.value.clone());
self.inner.call(req)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::routing::get;
use crate::test_helpers::TestClient;
use crate::Router;
use http::StatusCode;
#[derive(Clone)]
struct Foo(String);
#[derive(Clone)]
struct Bar(String);
#[crate::test]
async fn extension_extractor() {
async fn requires_foo(Extension(foo): Extension<Foo>) -> String {
foo.0
}
async fn optional_foo(extension: Option<Extension<Foo>>) -> String {
extension.map(|foo| foo.0 .0).unwrap_or("none".to_owned())
}
async fn requires_bar(Extension(bar): Extension<Bar>) -> String {
bar.0
}
async fn optional_bar(extension: Option<Extension<Bar>>) -> String {
extension.map(|bar| bar.0 .0).unwrap_or("none".to_owned())
}
let app = Router::new()
.route("/requires_foo", get(requires_foo))
.route("/optional_foo", get(optional_foo))
.route("/requires_bar", get(requires_bar))
.route("/optional_bar", get(optional_bar))
.layer(Extension(Foo("foo".to_owned())));
let client = TestClient::new(app);
let response = client.get("/requires_foo").await;
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(response.text().await, "foo");
let response = client.get("/optional_foo").await;
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(response.text().await, "foo");
let response = client.get("/requires_bar").await;
assert_eq!(response.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(response.text().await, "Missing request extension: Extension of type `axum::extension::tests::Bar` was not found. Perhaps you forgot to add it? See `axum::Extension`.");
let response = client.get("/optional_bar").await;
assert_eq!(response.status(), StatusCode::OK);
assert_eq!(response.text().await, "none");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/json.rs | axum/src/json.rs | use crate::extract::Request;
use crate::extract::{rejection::*, FromRequest};
use axum_core::extract::OptionalFromRequest;
use axum_core::response::{IntoResponse, Response};
use bytes::{BufMut, Bytes, BytesMut};
use http::{
header::{self, HeaderMap, HeaderValue},
StatusCode,
};
use serde_core::{de::DeserializeOwned, Serialize};
/// JSON Extractor / Response.
///
/// When used as an extractor, it can deserialize request bodies into some type that
/// implements [`serde::de::DeserializeOwned`]. The request will be rejected (and a [`JsonRejection`] will
/// be returned) if:
///
/// - The request doesn't have a `Content-Type: application/json` (or similar) header.
/// - The body doesn't contain syntactically valid JSON.
/// - The body contains syntactically valid JSON, but it couldn't be deserialized into the target type.
/// - Buffering the request body fails.
///
/// ⚠️ Since parsing JSON requires consuming the request body, the `Json` extractor must be
/// *last* if there are multiple extractors in a handler.
/// See ["the order of extractors"][order-of-extractors]
///
/// [order-of-extractors]: crate::extract#the-order-of-extractors
///
/// See [`JsonRejection`] for more details.
///
/// # Extractor example
///
/// ```rust,no_run
/// use axum::{
/// extract,
/// routing::post,
/// Router,
/// };
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct CreateUser {
/// email: String,
/// password: String,
/// }
///
/// async fn create_user(extract::Json(payload): extract::Json<CreateUser>) {
/// // payload is a `CreateUser`
/// }
///
/// let app = Router::new().route("/users", post(create_user));
/// # let _: Router = app;
/// ```
///
/// When used as a response, it can serialize any type that implements [`serde::Serialize`] to
/// `JSON`, and will automatically set `Content-Type: application/json` header.
///
/// If the [`Serialize`] implementation decides to fail
/// or if a map with non-string keys is used,
/// a 500 response will be issued
/// whose body is the error message in UTF-8.
///
/// # Response example
///
/// ```
/// use axum::{
/// extract::Path,
/// routing::get,
/// Router,
/// Json,
/// };
/// use serde::Serialize;
/// use uuid::Uuid;
///
/// #[derive(Serialize)]
/// struct User {
/// id: Uuid,
/// username: String,
/// }
///
/// async fn get_user(Path(user_id) : Path<Uuid>) -> Json<User> {
/// let user = find_user(user_id).await;
/// Json(user)
/// }
///
/// async fn find_user(user_id: Uuid) -> User {
/// // ...
/// # unimplemented!()
/// }
///
/// let app = Router::new().route("/users/{id}", get(get_user));
/// # let _: Router = app;
/// ```
#[derive(Debug, Clone, Copy, Default)]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
#[must_use]
pub struct Json<T>(pub T);
impl<T, S> FromRequest<S> for Json<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = JsonRejection;
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
if !json_content_type(req.headers()) {
return Err(MissingJsonContentType.into());
}
let bytes = Bytes::from_request(req, state).await?;
Self::from_bytes(&bytes)
}
}
impl<T, S> OptionalFromRequest<S> for Json<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = JsonRejection;
async fn from_request(req: Request, state: &S) -> Result<Option<Self>, Self::Rejection> {
let headers = req.headers();
if headers.get(header::CONTENT_TYPE).is_some() {
if json_content_type(headers) {
let bytes = Bytes::from_request(req, state).await?;
Ok(Some(Self::from_bytes(&bytes)?))
} else {
Err(MissingJsonContentType.into())
}
} else {
Ok(None)
}
}
}
fn json_content_type(headers: &HeaderMap) -> bool {
headers
.get(header::CONTENT_TYPE)
.and_then(|content_type| content_type.to_str().ok())
.and_then(|content_type| content_type.parse::<mime::Mime>().ok())
.is_some_and(|mime| {
mime.type_() == "application"
&& (mime.subtype() == "json" || mime.suffix().is_some_and(|name| name == "json"))
})
}
axum_core::__impl_deref!(Json);
impl<T> From<T> for Json<T> {
fn from(inner: T) -> Self {
Self(inner)
}
}
impl<T> Json<T>
where
T: DeserializeOwned,
{
/// Construct a `Json<T>` from a byte slice. Most users should prefer to use the `FromRequest` impl
/// but special cases may require first extracting a `Request` into `Bytes` then optionally
/// constructing a `Json<T>`.
pub fn from_bytes(bytes: &[u8]) -> Result<Self, JsonRejection> {
// Extracted into separate fn so it's only compiled once for all T.
fn make_rejection(err: serde_path_to_error::Error<serde_json::Error>) -> JsonRejection {
match err.inner().classify() {
serde_json::error::Category::Data => JsonDataError::from_err(err).into(),
serde_json::error::Category::Syntax | serde_json::error::Category::Eof => {
JsonSyntaxError::from_err(err).into()
}
serde_json::error::Category::Io => {
if cfg!(debug_assertions) {
// we don't use `serde_json::from_reader` and instead always buffer
// bodies first, so we shouldn't encounter any IO errors
unreachable!()
} else {
JsonSyntaxError::from_err(err).into()
}
}
}
}
let mut deserializer = serde_json::Deserializer::from_slice(bytes);
serde_path_to_error::deserialize(&mut deserializer)
.map_err(make_rejection)
.and_then(|value| {
deserializer
.end()
.map(|()| Self(value))
.map_err(|err| JsonSyntaxError::from_err(err).into())
})
}
}
impl<T> IntoResponse for Json<T>
where
T: Serialize,
{
fn into_response(self) -> Response {
// Extracted into separate fn so it's only compiled once for all T.
fn make_response(buf: BytesMut, ser_result: serde_json::Result<()>) -> Response {
match ser_result {
Ok(()) => (
[(
header::CONTENT_TYPE,
HeaderValue::from_static(mime::APPLICATION_JSON.as_ref()),
)],
buf.freeze(),
)
.into_response(),
Err(err) => (
StatusCode::INTERNAL_SERVER_ERROR,
[(
header::CONTENT_TYPE,
HeaderValue::from_static(mime::TEXT_PLAIN_UTF_8.as_ref()),
)],
err.to_string(),
)
.into_response(),
}
}
// Use a small initial capacity of 128 bytes like serde_json::to_vec
// https://docs.rs/serde_json/1.0.82/src/serde_json/ser.rs.html#2189
let mut buf = BytesMut::with_capacity(128).writer();
let res = serde_json::to_writer(&mut buf, &self.0);
make_response(buf.into_inner(), res)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{routing::post, test_helpers::*, Router};
use serde::Deserialize;
use serde_json::{json, Value};
#[crate::test]
async fn deserialize_body() {
#[derive(Debug, Deserialize)]
struct Input {
foo: String,
}
let app = Router::new().route("/", post(|input: Json<Input>| async { input.0.foo }));
let client = TestClient::new(app);
let res = client.post("/").json(&json!({ "foo": "bar" })).await;
let body = res.text().await;
assert_eq!(body, "bar");
}
#[crate::test]
async fn consume_body_to_json_requires_json_content_type() {
#[derive(Debug, Deserialize)]
struct Input {
foo: String,
}
let app = Router::new().route("/", post(|input: Json<Input>| async { input.0.foo }));
let client = TestClient::new(app);
let res = client.post("/").body(r#"{ "foo": "bar" }"#).await;
let status = res.status();
assert_eq!(status, StatusCode::UNSUPPORTED_MEDIA_TYPE);
}
#[crate::test]
async fn json_content_types() {
async fn valid_json_content_type(content_type: &str) -> bool {
println!("testing {content_type:?}");
let app = Router::new().route("/", post(|Json(_): Json<Value>| async {}));
let res = TestClient::new(app)
.post("/")
.header("content-type", content_type)
.body("{}")
.await;
res.status() == StatusCode::OK
}
assert!(valid_json_content_type("application/json").await);
assert!(valid_json_content_type("application/json; charset=utf-8").await);
assert!(valid_json_content_type("application/json;charset=utf-8").await);
assert!(valid_json_content_type("application/cloudevents+json").await);
assert!(!valid_json_content_type("text/json").await);
}
#[crate::test]
async fn invalid_json_syntax() {
let app = Router::new().route("/", post(|_: Json<serde_json::Value>| async {}));
let client = TestClient::new(app);
let res = client
.post("/")
.body("{")
.header("content-type", "application/json")
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
}
#[crate::test]
async fn extra_chars_after_valid_json_syntax() {
#[derive(Debug, Deserialize)]
struct Input {
foo: String,
}
let app = Router::new().route("/", post(|input: Json<Input>| async { input.0.foo }));
let client = TestClient::new(app);
let res = client
.post("/")
.body(r#"{ "foo": "bar" } baz "#)
.header("content-type", "application/json")
.await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
let body_text = res.text().await;
assert_eq!(
body_text,
"Failed to parse the request body as JSON: trailing characters at line 1 column 18"
);
}
#[derive(Deserialize)]
struct Foo {
#[allow(dead_code)]
a: i32,
#[allow(dead_code)]
b: Vec<Bar>,
}
#[derive(Deserialize)]
struct Bar {
#[allow(dead_code)]
x: i32,
#[allow(dead_code)]
y: i32,
}
#[crate::test]
async fn invalid_json_data() {
let app = Router::new().route("/", post(|_: Json<Foo>| async {}));
let client = TestClient::new(app);
let res = client
.post("/")
.body("{\"a\": 1, \"b\": [{\"x\": 2}]}")
.header("content-type", "application/json")
.await;
assert_eq!(res.status(), StatusCode::UNPROCESSABLE_ENTITY);
let body_text = res.text().await;
assert_eq!(
body_text,
"Failed to deserialize the JSON body into the target type: b[0]: missing field `y` at line 1 column 23"
);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/util.rs | axum/src/util.rs | use axum_core::response::{IntoResponse, Response};
use pin_project_lite::pin_project;
use std::{
future::Future,
ops::Deref,
pin::Pin,
sync::Arc,
task::{ready, Context, Poll},
};
use tower::Service;
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub(crate) struct PercentDecodedStr(Arc<str>);
impl PercentDecodedStr {
pub(crate) fn new<S>(s: S) -> Option<Self>
where
S: AsRef<str>,
{
percent_encoding::percent_decode(s.as_ref().as_bytes())
.decode_utf8()
.ok()
.map(|decoded| Self(decoded.as_ref().into()))
}
pub(crate) fn as_str(&self) -> &str {
&self.0
}
}
impl Deref for PercentDecodedStr {
type Target = str;
#[inline]
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
pin_project! {
#[project = EitherProj]
pub(crate) enum Either<A, B> {
A { #[pin] inner: A },
B { #[pin] inner: B },
}
}
#[derive(Clone)]
pub(crate) struct MapIntoResponse<S> {
inner: S,
}
impl<S> MapIntoResponse<S> {
pub(crate) fn new(inner: S) -> Self {
Self { inner }
}
}
impl<B, S> Service<http::Request<B>> for MapIntoResponse<S>
where
S: Service<http::Request<B>>,
S::Response: IntoResponse,
{
type Response = Response;
type Error = S::Error;
type Future = MapIntoResponseFuture<S::Future>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: http::Request<B>) -> Self::Future {
MapIntoResponseFuture {
inner: self.inner.call(req),
}
}
}
pin_project! {
pub(crate) struct MapIntoResponseFuture<F> {
#[pin]
inner: F,
}
}
impl<F, T, E> Future for MapIntoResponseFuture<F>
where
F: Future<Output = Result<T, E>>,
T: IntoResponse,
{
type Output = Result<Response, E>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let res = ready!(self.project().inner.poll(cx)?);
Poll::Ready(Ok(res.into_response()))
}
}
pub(crate) fn try_downcast<T, K>(k: K) -> Result<T, K>
where
T: 'static,
K: Send + 'static,
{
let mut k = Some(k);
if let Some(k) = <dyn std::any::Any>::downcast_mut::<Option<T>>(&mut k) {
Ok(k.take().unwrap())
} else {
Err(k.unwrap())
}
}
#[test]
fn test_try_downcast() {
assert_eq!(try_downcast::<i32, _>(5_u32), Err(5_u32));
assert_eq!(try_downcast::<i32, _>(5_i32), Ok(5_i32));
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/macros.rs | axum/src/macros.rs | //! Internal macros
macro_rules! opaque_future {
($(#[$m:meta])* pub type $name:ident = $actual:ty;) => {
opaque_future! {
$(#[$m])*
pub type $name<> = $actual;
}
};
($(#[$m:meta])* pub type $name:ident<$($param:ident),*> = $actual:ty;) => {
pin_project_lite::pin_project! {
$(#[$m])*
pub struct $name<$($param),*> {
#[pin] future: $actual,
}
}
impl<$($param),*> $name<$($param),*> {
pub(crate) fn new(future: $actual) -> Self {
Self { future }
}
}
impl<$($param),*> std::fmt::Debug for $name<$($param),*> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct(stringify!($name)).finish_non_exhaustive()
}
}
impl<$($param),*> std::future::Future for $name<$($param),*>
where
$actual: std::future::Future,
{
type Output = <$actual as std::future::Future>::Output;
#[inline]
fn poll(
self: std::pin::Pin<&mut Self>,
cx: &mut std::task::Context<'_>,
) -> std::task::Poll<Self::Output> {
self.project().future.poll(cx)
}
}
};
}
#[rustfmt::skip]
macro_rules! all_the_tuples {
($name:ident) => {
$name!([], T1);
$name!([T1], T2);
$name!([T1, T2], T3);
$name!([T1, T2, T3], T4);
$name!([T1, T2, T3, T4], T5);
$name!([T1, T2, T3, T4, T5], T6);
$name!([T1, T2, T3, T4, T5, T6], T7);
$name!([T1, T2, T3, T4, T5, T6, T7], T8);
$name!([T1, T2, T3, T4, T5, T6, T7, T8], T9);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9], T10);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10], T11);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11], T12);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12], T13);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13], T14);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14], T15);
$name!([T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15], T16);
};
}
#[cfg(feature = "tracing")]
#[allow(unused_macros)]
macro_rules! trace {
($($tt:tt)*) => {
tracing::trace!($($tt)*)
}
}
#[cfg(feature = "tracing")]
#[allow(unused_macros)]
macro_rules! error {
($($tt:tt)*) => {
tracing::error!($($tt)*)
};
}
#[cfg(not(feature = "tracing"))]
#[allow(unused_macros)]
macro_rules! trace {
($($tt:tt)*) => {};
}
#[cfg(not(feature = "tracing"))]
#[allow(unused_macros)]
macro_rules! error {
($($tt:tt)*) => {};
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/boxed.rs | axum/src/boxed.rs | use std::{convert::Infallible, fmt};
use crate::extract::Request;
use tower::Service;
use crate::{
handler::Handler,
routing::{future::RouteFuture, Route},
Router,
};
pub(crate) struct BoxedIntoRoute<S, E>(Box<dyn ErasedIntoRoute<S, E>>);
impl<S> BoxedIntoRoute<S, Infallible>
where
S: Clone + Send + Sync + 'static,
{
pub(crate) fn from_handler<H, T>(handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
{
Self(Box::new(MakeErasedHandler {
handler,
into_route: |handler, state| Route::new(Handler::with_state(handler, state)),
}))
}
}
impl<S, E> BoxedIntoRoute<S, E> {
pub(crate) fn map<F, E2>(self, f: F) -> BoxedIntoRoute<S, E2>
where
S: 'static,
E: 'static,
F: FnOnce(Route<E>) -> Route<E2> + Clone + Send + Sync + 'static,
E2: 'static,
{
BoxedIntoRoute(Box::new(Map {
inner: self.0,
layer: Box::new(f),
}))
}
pub(crate) fn into_route(self, state: S) -> Route<E> {
self.0.into_route(state)
}
}
impl<S, E> Clone for BoxedIntoRoute<S, E> {
fn clone(&self) -> Self {
Self(self.0.clone_box())
}
}
impl<S, E> fmt::Debug for BoxedIntoRoute<S, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("BoxedIntoRoute").finish()
}
}
pub(crate) trait ErasedIntoRoute<S, E>: Send + Sync {
fn clone_box(&self) -> Box<dyn ErasedIntoRoute<S, E>>;
fn into_route(self: Box<Self>, state: S) -> Route<E>;
#[allow(dead_code)]
fn call_with_state(self: Box<Self>, request: Request, state: S) -> RouteFuture<E>;
}
pub(crate) struct MakeErasedHandler<H, S> {
pub(crate) handler: H,
pub(crate) into_route: fn(H, S) -> Route,
}
impl<H, S> ErasedIntoRoute<S, Infallible> for MakeErasedHandler<H, S>
where
H: Clone + Send + Sync + 'static,
S: 'static,
{
fn clone_box(&self) -> Box<dyn ErasedIntoRoute<S, Infallible>> {
Box::new(self.clone())
}
fn into_route(self: Box<Self>, state: S) -> Route {
(self.into_route)(self.handler, state)
}
fn call_with_state(self: Box<Self>, request: Request, state: S) -> RouteFuture<Infallible> {
self.into_route(state).call(request)
}
}
impl<H, S> Clone for MakeErasedHandler<H, S>
where
H: Clone,
{
fn clone(&self) -> Self {
Self {
handler: self.handler.clone(),
into_route: self.into_route,
}
}
}
#[allow(dead_code)]
pub(crate) struct MakeErasedRouter<S> {
pub(crate) router: Router<S>,
pub(crate) into_route: fn(Router<S>, S) -> Route,
}
impl<S> ErasedIntoRoute<S, Infallible> for MakeErasedRouter<S>
where
S: Clone + Send + Sync + 'static,
{
fn clone_box(&self) -> Box<dyn ErasedIntoRoute<S, Infallible>> {
Box::new(self.clone())
}
fn into_route(self: Box<Self>, state: S) -> Route {
(self.into_route)(self.router, state)
}
fn call_with_state(self: Box<Self>, request: Request, state: S) -> RouteFuture<Infallible> {
self.router.call_with_state(request, state)
}
}
impl<S> Clone for MakeErasedRouter<S>
where
S: Clone,
{
fn clone(&self) -> Self {
Self {
router: self.router.clone(),
into_route: self.into_route,
}
}
}
pub(crate) struct Map<S, E, E2> {
pub(crate) inner: Box<dyn ErasedIntoRoute<S, E>>,
pub(crate) layer: Box<dyn LayerFn<E, E2>>,
}
impl<S, E, E2> ErasedIntoRoute<S, E2> for Map<S, E, E2>
where
S: 'static,
E: 'static,
E2: 'static,
{
fn clone_box(&self) -> Box<dyn ErasedIntoRoute<S, E2>> {
Box::new(Self {
inner: self.inner.clone_box(),
layer: self.layer.clone_box(),
})
}
fn into_route(self: Box<Self>, state: S) -> Route<E2> {
(self.layer)(self.inner.into_route(state))
}
fn call_with_state(self: Box<Self>, request: Request, state: S) -> RouteFuture<E2> {
(self.layer)(self.inner.into_route(state)).call(request)
}
}
pub(crate) trait LayerFn<E, E2>: FnOnce(Route<E>) -> Route<E2> + Send + Sync {
fn clone_box(&self) -> Box<dyn LayerFn<E, E2>>;
}
impl<F, E, E2> LayerFn<E, E2> for F
where
F: FnOnce(Route<E>) -> Route<E2> + Clone + Send + Sync + 'static,
{
fn clone_box(&self) -> Box<dyn LayerFn<E, E2>> {
Box::new(self.clone())
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/service_ext.rs | axum/src/service_ext.rs | use crate::error_handling::HandleError;
#[cfg(feature = "tokio")]
use crate::extract::connect_info::IntoMakeServiceWithConnectInfo;
use crate::routing::IntoMakeService;
use tower_service::Service;
/// Extension trait that adds additional methods to any [`Service`].
pub trait ServiceExt<R>: Service<R> + Sized {
/// Convert this service into a [`MakeService`], that is a [`Service`] whose
/// response is another service.
///
/// This is commonly used when applying middleware around an entire [`Router`]. See ["Rewriting
/// request URI in middleware"] for more details.
///
/// [`MakeService`]: tower::make::MakeService
/// ["Rewriting request URI in middleware"]: crate::middleware#rewriting-request-uri-in-middleware
/// [`Router`]: crate::Router
fn into_make_service(self) -> IntoMakeService<Self>;
/// Convert this service into a [`MakeService`], that will store `C`'s
/// associated `ConnectInfo` in a request extension such that [`ConnectInfo`]
/// can extract it.
///
/// This enables extracting things like the client's remote address.
/// This is commonly used when applying middleware around an entire [`Router`]. See ["Rewriting
/// request URI in middleware"] for more details.
///
/// [`MakeService`]: tower::make::MakeService
/// ["Rewriting request URI in middleware"]: crate::middleware#rewriting-request-uri-in-middleware
/// [`Router`]: crate::Router
/// [`ConnectInfo`]: crate::extract::connect_info::ConnectInfo
#[cfg(feature = "tokio")]
fn into_make_service_with_connect_info<C>(self) -> IntoMakeServiceWithConnectInfo<Self, C>;
/// Convert this service into a [`HandleError`], that will handle errors
/// by converting them into responses.
///
/// See ["error handling model"] for more details.
///
/// [`HandleError`]: crate::error_handling::HandleError
/// ["error handling model"]: crate::error_handling#axums-error-handling-model
fn handle_error<F, T>(self, f: F) -> HandleError<Self, F, T> {
HandleError::new(self, f)
}
}
impl<S, R> ServiceExt<R> for S
where
S: Service<R> + Sized,
{
fn into_make_service(self) -> IntoMakeService<Self> {
IntoMakeService::new(self)
}
#[cfg(feature = "tokio")]
fn into_make_service_with_connect_info<C>(self) -> IntoMakeServiceWithConnectInfo<Self, C> {
IntoMakeServiceWithConnectInfo::new(self)
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/connect_info.rs | axum/src/extract/connect_info.rs | //! Extractor for getting connection information from a client.
//!
//! See [`Router::into_make_service_with_connect_info`] for more details.
//!
//! [`Router::into_make_service_with_connect_info`]: crate::routing::Router::into_make_service_with_connect_info
use crate::extension::AddExtension;
use super::{Extension, FromRequestParts};
use http::request::Parts;
use std::{
convert::Infallible,
fmt,
future::ready,
marker::PhantomData,
net::SocketAddr,
task::{Context, Poll},
};
use tower_layer::Layer;
use tower_service::Service;
/// A [`MakeService`] created from a router.
///
/// See [`Router::into_make_service_with_connect_info`] for more details.
///
/// [`MakeService`]: tower::make::MakeService
/// [`Router::into_make_service_with_connect_info`]: crate::routing::Router::into_make_service_with_connect_info
pub struct IntoMakeServiceWithConnectInfo<S, C> {
svc: S,
_connect_info: PhantomData<fn() -> C>,
}
impl<S, C> IntoMakeServiceWithConnectInfo<S, C> {
pub(crate) fn new(svc: S) -> Self {
Self {
svc,
_connect_info: PhantomData,
}
}
}
impl<S, C> fmt::Debug for IntoMakeServiceWithConnectInfo<S, C>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("IntoMakeServiceWithConnectInfo")
.field("svc", &self.svc)
.finish()
}
}
impl<S, C> Clone for IntoMakeServiceWithConnectInfo<S, C>
where
S: Clone,
{
fn clone(&self) -> Self {
Self {
svc: self.svc.clone(),
_connect_info: PhantomData,
}
}
}
/// Trait that connected IO resources implement and use to produce information
/// about the connection.
///
/// The goal for this trait is to allow users to implement custom IO types that
/// can still provide the same connection metadata.
///
/// See [`Router::into_make_service_with_connect_info`] for more details.
///
/// [`Router::into_make_service_with_connect_info`]: crate::routing::Router::into_make_service_with_connect_info
pub trait Connected<T>: Clone + Send + Sync + 'static {
/// Create type holding information about the connection.
fn connect_info(stream: T) -> Self;
}
#[cfg(all(feature = "tokio", any(feature = "http1", feature = "http2")))]
const _: () = {
use crate::serve;
impl<L> Connected<serve::IncomingStream<'_, L>> for SocketAddr
where
L: serve::Listener<Addr = Self>,
{
fn connect_info(stream: serve::IncomingStream<'_, L>) -> Self {
*stream.remote_addr()
}
}
};
impl Connected<Self> for SocketAddr {
fn connect_info(remote_addr: Self) -> Self {
remote_addr
}
}
impl<S, C, T> Service<T> for IntoMakeServiceWithConnectInfo<S, C>
where
S: Clone,
C: Connected<T>,
{
type Response = AddExtension<S, ConnectInfo<C>>;
type Error = Infallible;
type Future = ResponseFuture<S, C>;
#[inline]
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, target: T) -> Self::Future {
let connect_info = ConnectInfo(C::connect_info(target));
let svc = Extension(connect_info).layer(self.svc.clone());
ResponseFuture::new(ready(Ok(svc)))
}
}
opaque_future! {
/// Response future for [`IntoMakeServiceWithConnectInfo`].
pub type ResponseFuture<S, C> =
std::future::Ready<Result<AddExtension<S, ConnectInfo<C>>, Infallible>>;
}
/// Extractor for getting connection information produced by a [`Connected`].
///
/// Note this extractor requires you to use
/// [`Router::into_make_service_with_connect_info`] to run your app
/// otherwise it will fail at runtime.
///
/// See [`Router::into_make_service_with_connect_info`] for more details.
///
/// [`Router::into_make_service_with_connect_info`]: crate::routing::Router::into_make_service_with_connect_info
#[derive(Clone, Copy, Debug)]
pub struct ConnectInfo<T>(pub T);
impl<S, T> FromRequestParts<S> for ConnectInfo<T>
where
S: Send + Sync,
T: Clone + Send + Sync + 'static,
{
type Rejection = <Extension<Self> as FromRequestParts<S>>::Rejection;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
match Extension::<Self>::from_request_parts(parts, state).await {
Ok(Extension(connect_info)) => Ok(connect_info),
Err(err) => match parts.extensions.get::<MockConnectInfo<T>>() {
Some(MockConnectInfo(connect_info)) => Ok(Self(connect_info.clone())),
None => Err(err),
},
}
}
}
axum_core::__impl_deref!(ConnectInfo);
/// Middleware used to mock [`ConnectInfo`] during tests.
///
/// If you're accidentally using [`MockConnectInfo`] and
/// [`Router::into_make_service_with_connect_info`] at the same time then
/// [`Router::into_make_service_with_connect_info`] takes precedence.
///
/// # Example
///
/// ```
/// use axum::{
/// Router,
/// extract::connect_info::{MockConnectInfo, ConnectInfo},
/// body::Body,
/// routing::get,
/// http::{Request, StatusCode},
/// };
/// use std::net::SocketAddr;
/// use tower::ServiceExt;
///
/// async fn handler(ConnectInfo(addr): ConnectInfo<SocketAddr>) {}
///
/// // this router you can run with `app.into_make_service_with_connect_info::<SocketAddr>()`
/// fn app() -> Router {
/// Router::new().route("/", get(handler))
/// }
///
/// // use this router for tests
/// fn test_app() -> Router {
/// app().layer(MockConnectInfo(SocketAddr::from(([0, 0, 0, 0], 1337))))
/// }
///
/// // #[tokio::test]
/// async fn some_test() {
/// let app = test_app();
///
/// let request = Request::new(Body::empty());
/// let response = app.oneshot(request).await.unwrap();
/// assert_eq!(response.status(), StatusCode::OK);
/// }
/// #
/// # #[tokio::main]
/// # async fn main() {
/// # some_test().await;
/// # }
/// ```
///
/// [`Router::into_make_service_with_connect_info`]: crate::Router::into_make_service_with_connect_info
#[derive(Clone, Copy, Debug)]
pub struct MockConnectInfo<T>(pub T);
impl<S, T> Layer<S> for MockConnectInfo<T>
where
T: Clone + Send + Sync + 'static,
{
type Service = <Extension<Self> as Layer<S>>::Service;
fn layer(&self, inner: S) -> Self::Service {
Extension(self.clone()).layer(inner)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
extract::connect_info::Connected, routing::get, serve::IncomingStream, serve::Listener,
test_helpers::TestClient, Router,
};
use tokio::net::{TcpListener, TcpStream};
#[test]
fn into_make_service_traits() {
use crate::test_helpers::*;
assert_send::<IntoMakeServiceWithConnectInfo<(), NotSendSync>>();
}
#[allow(dead_code)]
#[allow(clippy::todo)]
fn connected_traits() {
// Test that the `Connected` trait can be used with custom address and listener types.
fn create_router() -> Router {
todo!()
}
fn tcp_listener() -> TcpListener {
todo!()
}
#[derive(Clone)]
struct CustomAddr(SocketAddr);
impl Connected<IncomingStream<'_, TcpListener>> for CustomAddr {
fn connect_info(_stream: IncomingStream<'_, TcpListener>) -> Self {
todo!()
}
}
impl Connected<IncomingStream<'_, CustomListener>> for CustomAddr {
fn connect_info(_stream: IncomingStream<'_, CustomListener>) -> Self {
todo!()
}
}
struct CustomListener {}
impl Listener for CustomListener {
type Io = TcpStream;
type Addr = SocketAddr;
async fn accept(&mut self) -> (Self::Io, Self::Addr) {
todo!()
}
fn local_addr(&self) -> tokio::io::Result<Self::Addr> {
todo!()
}
}
fn custom_connected() {
let router = create_router();
let _ = crate::serve(
tcp_listener(),
router.into_make_service_with_connect_info::<CustomAddr>(),
);
}
fn custom_listener() {
let router = create_router();
let _ = crate::serve(CustomListener {}, router.into_make_service());
}
fn custom_listener_with_connect() {
let router = create_router();
let _ = crate::serve(
CustomListener {},
router.into_make_service_with_connect_info::<SocketAddr>(),
);
}
fn custom_listener_with_custom_connect() {
let router = create_router();
let _ = crate::serve(
CustomListener {},
router.into_make_service_with_connect_info::<CustomAddr>(),
);
}
}
#[crate::test]
async fn socket_addr() {
async fn handler(ConnectInfo(addr): ConnectInfo<SocketAddr>) -> String {
format!("{addr}")
}
let listener = TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
let (tx, rx) = tokio::sync::oneshot::channel();
tokio::spawn(async move {
let app = Router::new().route("/", get(handler));
tx.send(()).unwrap();
crate::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.await;
});
rx.await.unwrap();
let client = reqwest::Client::new();
let res = client.get(format!("http://{addr}")).send().await.unwrap();
let body = res.text().await.unwrap();
assert!(body.starts_with("127.0.0.1:"));
}
#[crate::test]
async fn custom() {
#[derive(Clone, Debug)]
struct MyConnectInfo {
value: &'static str,
}
impl Connected<IncomingStream<'_, TcpListener>> for MyConnectInfo {
fn connect_info(_target: IncomingStream<'_, TcpListener>) -> Self {
Self {
value: "it worked!",
}
}
}
async fn handler(ConnectInfo(addr): ConnectInfo<MyConnectInfo>) -> &'static str {
addr.value
}
let listener = TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
let (tx, rx) = tokio::sync::oneshot::channel();
tokio::spawn(async move {
let app = Router::new().route("/", get(handler));
tx.send(()).unwrap();
crate::serve(
listener,
app.into_make_service_with_connect_info::<MyConnectInfo>(),
)
.await;
});
rx.await.unwrap();
let client = reqwest::Client::new();
let res = client.get(format!("http://{addr}")).send().await.unwrap();
let body = res.text().await.unwrap();
assert_eq!(body, "it worked!");
}
#[crate::test]
async fn mock_connect_info() {
async fn handler(ConnectInfo(addr): ConnectInfo<SocketAddr>) -> String {
format!("{addr}")
}
let app = Router::new()
.route("/", get(handler))
.layer(MockConnectInfo(SocketAddr::from(([0, 0, 0, 0], 1337))));
let client = TestClient::new(app);
let res = client.get("/").await;
let body = res.text().await;
assert!(body.starts_with("0.0.0.0:1337"));
}
#[crate::test]
async fn both_mock_and_real_connect_info() {
async fn handler(ConnectInfo(addr): ConnectInfo<SocketAddr>) -> String {
format!("{addr}")
}
let listener = TcpListener::bind("127.0.0.1:0").await.unwrap();
let addr = listener.local_addr().unwrap();
tokio::spawn(async move {
let app = Router::new()
.route("/", get(handler))
.layer(MockConnectInfo(SocketAddr::from(([0, 0, 0, 0], 1337))));
crate::serve(
listener,
app.into_make_service_with_connect_info::<SocketAddr>(),
)
.await;
});
let client = reqwest::Client::new();
let res = client.get(format!("http://{addr}")).send().await.unwrap();
let body = res.text().await.unwrap();
assert!(body.starts_with("127.0.0.1:"));
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/raw_query.rs | axum/src/extract/raw_query.rs | use super::FromRequestParts;
use http::request::Parts;
use std::convert::Infallible;
/// Extractor that extracts the raw query string, without parsing it.
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::RawQuery,
/// routing::get,
/// Router,
/// };
/// use futures_util::StreamExt;
///
/// async fn handler(RawQuery(query): RawQuery) {
/// // ...
/// }
///
/// let app = Router::new().route("/users", get(handler));
/// # let _: Router = app;
/// ```
#[derive(Debug)]
pub struct RawQuery(pub Option<String>);
impl<S> FromRequestParts<S> for RawQuery
where
S: Send + Sync,
{
type Rejection = Infallible;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let query = parts.uri.query().map(|query| query.to_owned());
Ok(Self(query))
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/multipart.rs | axum/src/extract/multipart.rs | //! Extractor that parses `multipart/form-data` requests commonly used with file uploads.
//!
//! See [`Multipart`] for more details.
use super::{FromRequest, Request};
use crate::body::Bytes;
use axum_core::{
__composite_rejection as composite_rejection, __define_rejection as define_rejection,
extract::OptionalFromRequest,
response::{IntoResponse, Response},
RequestExt,
};
use futures_core::Stream;
use http::{
header::{HeaderMap, CONTENT_TYPE},
StatusCode,
};
use std::{
error::Error,
fmt,
pin::Pin,
task::{Context, Poll},
};
/// Extractor that parses `multipart/form-data` requests (commonly used with file uploads).
///
/// ⚠️ Since extracting multipart form data from the request requires consuming the body, the
/// `Multipart` extractor must be *last* if there are multiple extractors in a handler.
/// See ["the order of extractors"][order-of-extractors]
///
/// [order-of-extractors]: crate::extract#the-order-of-extractors
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::Multipart,
/// routing::post,
/// Router,
/// };
/// use futures_util::stream::StreamExt;
///
/// async fn upload(mut multipart: Multipart) {
/// while let Some(mut field) = multipart.next_field().await.unwrap() {
/// let name = field.name().unwrap().to_string();
/// let data = field.bytes().await.unwrap();
///
/// println!("Length of `{}` is {} bytes", name, data.len());
/// }
/// }
///
/// let app = Router::new().route("/upload", post(upload));
/// # let _: Router = app;
/// ```
///
/// # Large Files
///
/// For security reasons, by default, `Multipart` limits the request body size to 2MB.
/// See [`DefaultBodyLimit`][default-body-limit] for how to configure this limit.
///
/// [default-body-limit]: crate::extract::DefaultBodyLimit
#[cfg_attr(docsrs, doc(cfg(feature = "multipart")))]
#[derive(Debug)]
pub struct Multipart {
inner: multer::Multipart<'static>,
}
impl<S> FromRequest<S> for Multipart
where
S: Send + Sync,
{
type Rejection = MultipartRejection;
async fn from_request(req: Request, _state: &S) -> Result<Self, Self::Rejection> {
let boundary = content_type_str(req.headers())
.and_then(|content_type| multer::parse_boundary(content_type).ok())
.ok_or(InvalidBoundary)?;
let stream = req.with_limited_body().into_body();
let multipart = multer::Multipart::new(stream.into_data_stream(), boundary);
Ok(Self { inner: multipart })
}
}
impl<S> OptionalFromRequest<S> for Multipart
where
S: Send + Sync,
{
type Rejection = MultipartRejection;
async fn from_request(req: Request, _state: &S) -> Result<Option<Self>, Self::Rejection> {
let Some(content_type) = content_type_str(req.headers()) else {
return Ok(None);
};
match multer::parse_boundary(content_type) {
Ok(boundary) => {
let stream = req.with_limited_body().into_body();
let multipart = multer::Multipart::new(stream.into_data_stream(), boundary);
Ok(Some(Self { inner: multipart }))
}
Err(multer::Error::NoMultipart) => Ok(None),
Err(_) => Err(MultipartRejection::InvalidBoundary(InvalidBoundary)),
}
}
}
impl Multipart {
/// Yields the next [`Field`] if available.
pub async fn next_field(&mut self) -> Result<Option<Field<'_>>, MultipartError> {
let field = self
.inner
.next_field()
.await
.map_err(MultipartError::from_multer)?;
if let Some(field) = field {
Ok(Some(Field {
inner: field,
_multipart: self,
}))
} else {
Ok(None)
}
}
}
/// A single field in a multipart stream.
#[derive(Debug)]
pub struct Field<'a> {
inner: multer::Field<'static>,
// multer requires there to only be one live `multer::Field` at any point. This enforces that
// statically, which multer does not do, it returns an error instead.
_multipart: &'a mut Multipart,
}
impl Stream for Field<'_> {
type Item = Result<Bytes, MultipartError>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
Pin::new(&mut self.inner)
.poll_next(cx)
.map_err(MultipartError::from_multer)
}
}
impl Field<'_> {
/// The field name found in the
/// [`Content-Disposition`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition)
/// header.
#[must_use]
pub fn name(&self) -> Option<&str> {
self.inner.name()
}
/// The file name found in the
/// [`Content-Disposition`](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Disposition)
/// header.
#[must_use]
pub fn file_name(&self) -> Option<&str> {
self.inner.file_name()
}
/// Get the [content type](https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Content-Type) of the field.
#[must_use]
pub fn content_type(&self) -> Option<&str> {
self.inner.content_type().map(|m| m.as_ref())
}
/// Get a map of headers as [`HeaderMap`].
#[must_use]
pub fn headers(&self) -> &HeaderMap {
self.inner.headers()
}
/// Get the full data of the field as [`Bytes`].
pub async fn bytes(self) -> Result<Bytes, MultipartError> {
self.inner
.bytes()
.await
.map_err(MultipartError::from_multer)
}
/// Get the full field data as text.
pub async fn text(self) -> Result<String, MultipartError> {
self.inner.text().await.map_err(MultipartError::from_multer)
}
/// Stream a chunk of the field data.
///
/// When the field data has been exhausted, this will return [`None`].
///
/// Note this does the same thing as `Field`'s [`Stream`] implementation.
///
/// # Example
///
/// ```
/// use axum::{
/// extract::Multipart,
/// routing::post,
/// response::IntoResponse,
/// http::StatusCode,
/// Router,
/// };
///
/// async fn upload(mut multipart: Multipart) -> Result<(), (StatusCode, String)> {
/// while let Some(mut field) = multipart
/// .next_field()
/// .await
/// .map_err(|err| (StatusCode::BAD_REQUEST, err.to_string()))?
/// {
/// while let Some(chunk) = field
/// .chunk()
/// .await
/// .map_err(|err| (StatusCode::BAD_REQUEST, err.to_string()))?
/// {
/// println!("received {} bytes", chunk.len());
/// }
/// }
///
/// Ok(())
/// }
///
/// let app = Router::new().route("/upload", post(upload));
/// # let _: Router = app;
/// ```
pub async fn chunk(&mut self) -> Result<Option<Bytes>, MultipartError> {
self.inner
.chunk()
.await
.map_err(MultipartError::from_multer)
}
}
/// Errors associated with parsing `multipart/form-data` requests.
#[derive(Debug)]
pub struct MultipartError {
source: multer::Error,
}
impl MultipartError {
fn from_multer(multer: multer::Error) -> Self {
Self { source: multer }
}
/// Get the response body text used for this rejection.
#[must_use]
pub fn body_text(&self) -> String {
self.source.to_string()
}
/// Get the status code used for this rejection.
#[must_use]
pub fn status(&self) -> http::StatusCode {
status_code_from_multer_error(&self.source)
}
}
fn status_code_from_multer_error(err: &multer::Error) -> StatusCode {
match err {
multer::Error::UnknownField { .. }
| multer::Error::IncompleteFieldData { .. }
| multer::Error::IncompleteHeaders
| multer::Error::ReadHeaderFailed(..)
| multer::Error::DecodeHeaderName { .. }
| multer::Error::DecodeContentType(..)
| multer::Error::NoBoundary
| multer::Error::DecodeHeaderValue { .. }
| multer::Error::NoMultipart
| multer::Error::IncompleteStream => StatusCode::BAD_REQUEST,
multer::Error::FieldSizeExceeded { .. } | multer::Error::StreamSizeExceeded { .. } => {
StatusCode::PAYLOAD_TOO_LARGE
}
multer::Error::StreamReadFailed(err) => {
if let Some(err) = err.downcast_ref::<multer::Error>() {
return status_code_from_multer_error(err);
}
if err
.downcast_ref::<crate::Error>()
.and_then(|err| err.source())
.and_then(|err| err.downcast_ref::<http_body_util::LengthLimitError>())
.is_some()
{
return StatusCode::PAYLOAD_TOO_LARGE;
}
StatusCode::INTERNAL_SERVER_ERROR
}
_ => StatusCode::INTERNAL_SERVER_ERROR,
}
}
impl fmt::Display for MultipartError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Error parsing `multipart/form-data` request")
}
}
impl std::error::Error for MultipartError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
Some(&self.source)
}
}
impl IntoResponse for MultipartError {
fn into_response(self) -> Response {
let body = self.body_text();
axum_core::__log_rejection!(
rejection_type = Self,
body_text = body,
status = self.status(),
);
(self.status(), body).into_response()
}
}
fn content_type_str(headers: &HeaderMap) -> Option<&str> {
headers.get(CONTENT_TYPE)?.to_str().ok()
}
composite_rejection! {
/// Rejection used for [`Multipart`].
///
/// Contains one variant for each way the [`Multipart`] extractor can fail.
pub enum MultipartRejection {
InvalidBoundary,
}
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "Invalid `boundary` for `multipart/form-data` request"]
/// Rejection type used if the `boundary` in a `multipart/form-data` is
/// missing or invalid.
pub struct InvalidBoundary;
}
#[cfg(test)]
mod tests {
use axum_core::extract::DefaultBodyLimit;
use super::*;
use crate::{routing::post, test_helpers::*, Router};
#[crate::test]
async fn content_type_with_encoding() {
const BYTES: &[u8] = "<!doctype html><title>🦀</title>".as_bytes();
const FILE_NAME: &str = "index.html";
const CONTENT_TYPE: &str = "text/html; charset=utf-8";
async fn handle(mut multipart: Multipart) -> impl IntoResponse {
let field = multipart.next_field().await.unwrap().unwrap();
assert_eq!(field.file_name().unwrap(), FILE_NAME);
assert_eq!(field.content_type().unwrap(), CONTENT_TYPE);
assert_eq!(field.headers()["foo"], "bar");
assert_eq!(field.bytes().await.unwrap(), BYTES);
assert!(multipart.next_field().await.unwrap().is_none());
}
let app = Router::new().route("/", post(handle));
let client = TestClient::new(app);
let form = reqwest::multipart::Form::new().part(
"file",
reqwest::multipart::Part::bytes(BYTES)
.file_name(FILE_NAME)
.mime_str(CONTENT_TYPE)
.unwrap()
.headers(reqwest::header::HeaderMap::from_iter([(
reqwest::header::HeaderName::from_static("foo"),
reqwest::header::HeaderValue::from_static("bar"),
)])),
);
client.post("/").multipart(form).await;
}
// No need for this to be a #[test], we just want to make sure it compiles
fn _multipart_from_request_limited() {
async fn handler(_: Multipart) {}
let _app: Router = Router::new()
.route("/", post(handler))
.layer(tower_http::limit::RequestBodyLimitLayer::new(1024));
}
#[crate::test]
async fn body_too_large() {
const BYTES: &[u8] = "<!doctype html><title>🦀</title>".as_bytes();
async fn handle(mut multipart: Multipart) -> Result<(), MultipartError> {
while let Some(field) = multipart.next_field().await? {
field.bytes().await?;
}
Ok(())
}
let app = Router::new()
.route("/", post(handle))
.layer(DefaultBodyLimit::max(BYTES.len() - 1));
let client = TestClient::new(app);
let form =
reqwest::multipart::Form::new().part("file", reqwest::multipart::Part::bytes(BYTES));
let res = client.post("/").multipart(form).await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[crate::test]
async fn optional_multipart() {
const BYTES: &[u8] = "<!doctype html><title>🦀</title>".as_bytes();
async fn handle(multipart: Option<Multipart>) -> Result<StatusCode, MultipartError> {
if let Some(mut multipart) = multipart {
while let Some(field) = multipart.next_field().await? {
field.bytes().await?;
}
Ok(StatusCode::OK)
} else {
Ok(StatusCode::NO_CONTENT)
}
}
let app = Router::new().route("/", post(handle));
let client = TestClient::new(app);
let form =
reqwest::multipart::Form::new().part("file", reqwest::multipart::Part::bytes(BYTES));
let res = client.post("/").multipart(form).await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/").await;
assert_eq!(res.status(), StatusCode::NO_CONTENT);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/state.rs | axum/src/extract/state.rs | use axum_core::extract::{FromRef, FromRequestParts};
use http::request::Parts;
use std::{
convert::Infallible,
ops::{Deref, DerefMut},
};
/// Extractor for state.
///
/// See ["Accessing state in middleware"][state-from-middleware] for how to
/// access state in middleware.
///
/// State is global and used in every request a router with state receives.
/// For accessing data derived from requests, such as authorization data, see [`Extension`].
///
/// [state-from-middleware]: crate::middleware#accessing-state-in-middleware
/// [`Extension`]: crate::Extension
///
/// # With `Router`
///
/// ```
/// use axum::{Router, routing::get, extract::State};
///
/// // the application state
/// //
/// // here you can put configuration, database connection pools, or whatever
/// // state you need
/// #[derive(Clone)]
/// struct AppState {}
///
/// let state = AppState {};
///
/// // create a `Router` that holds our state
/// let app = Router::new()
/// .route("/", get(handler))
/// // provide the state so the router can access it
/// .with_state(state);
///
/// async fn handler(
/// // access the state via the `State` extractor
/// // extracting a state of the wrong type results in a compile error
/// State(state): State<AppState>,
/// ) {
/// // use `state`...
/// }
/// # let _: axum::Router = app;
/// ```
///
/// Note that `State` is an extractor, so be sure to put it before any body
/// extractors, see ["the order of extractors"][order-of-extractors].
///
/// [order-of-extractors]: crate::extract#the-order-of-extractors
///
/// ## Combining stateful routers
///
/// Multiple [`Router`]s can be combined with [`Router::nest`] or [`Router::merge`]
/// When combining [`Router`]s with one of these methods, the [`Router`]s must have
/// the same state type. Generally, this can be inferred automatically:
///
/// ```
/// use axum::{Router, routing::get, extract::State};
///
/// #[derive(Clone)]
/// struct AppState {}
///
/// let state = AppState {};
///
/// // create a `Router` that will be nested within another
/// let api = Router::new()
/// .route("/posts", get(posts_handler));
///
/// let app = Router::new()
/// .nest("/api", api)
/// .with_state(state);
///
/// async fn posts_handler(State(state): State<AppState>) {
/// // use `state`...
/// }
/// # let _: axum::Router = app;
/// ```
///
/// However, if you are composing [`Router`]s that are defined in separate scopes,
/// you may need to annotate the [`State`] type explicitly:
///
/// ```
/// use axum::{Router, routing::get, extract::State};
///
/// #[derive(Clone)]
/// struct AppState {}
///
/// fn make_app() -> Router {
/// let state = AppState {};
///
/// Router::new()
/// .nest("/api", make_api())
/// .with_state(state) // the outer Router's state is inferred
/// }
///
/// // the inner Router must specify its state type to compose with the
/// // outer router
/// fn make_api() -> Router<AppState> {
/// Router::new()
/// .route("/posts", get(posts_handler))
/// }
///
/// async fn posts_handler(State(state): State<AppState>) {
/// // use `state`...
/// }
/// # let _: axum::Router = make_app();
/// ```
///
/// In short, a [`Router`]'s generic state type defaults to `()`
/// (no state) unless [`Router::with_state`] is called or the value
/// of the generic type is given explicitly.
///
/// [`Router`]: crate::Router
/// [`Router::merge`]: crate::Router::merge
/// [`Router::nest`]: crate::Router::nest
/// [`Router::with_state`]: crate::Router::with_state
///
/// # With `MethodRouter`
///
/// ```
/// use axum::{routing::get, extract::State};
///
/// #[derive(Clone)]
/// struct AppState {}
///
/// let state = AppState {};
///
/// let method_router_with_state = get(handler)
/// // provide the state so the handler can access it
/// .with_state(state);
/// # let _: axum::routing::MethodRouter = method_router_with_state;
///
/// async fn handler(State(state): State<AppState>) {
/// // use `state`...
/// }
/// ```
///
/// # With `Handler`
///
/// ```
/// use axum::{routing::get, handler::Handler, extract::State};
///
/// #[derive(Clone)]
/// struct AppState {}
///
/// let state = AppState {};
///
/// async fn handler(State(state): State<AppState>) {
/// // use `state`...
/// }
///
/// // provide the state so the handler can access it
/// let handler_with_state = handler.with_state(state);
///
/// # async {
/// let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
/// axum::serve(listener, handler_with_state.into_make_service()).await;
/// # };
/// ```
///
/// # Substates
///
/// [`State`] only allows a single state type but you can use [`FromRef`] to extract "substates":
///
/// ```
/// use axum::{Router, routing::get, extract::{State, FromRef}};
///
/// // the application state
/// #[derive(Clone)]
/// struct AppState {
/// // that holds some api specific state
/// api_state: ApiState,
/// }
///
/// // the api specific state
/// #[derive(Clone)]
/// struct ApiState {}
///
/// // support converting an `AppState` in an `ApiState`
/// impl FromRef<AppState> for ApiState {
/// fn from_ref(app_state: &AppState) -> ApiState {
/// app_state.api_state.clone()
/// }
/// }
///
/// let state = AppState {
/// api_state: ApiState {},
/// };
///
/// let app = Router::new()
/// .route("/", get(handler))
/// .route("/api/users", get(api_users))
/// .with_state(state);
///
/// async fn api_users(
/// // access the api specific state
/// State(api_state): State<ApiState>,
/// ) {
/// }
///
/// async fn handler(
/// // we can still access to top level state
/// State(state): State<AppState>,
/// ) {
/// }
/// # let _: axum::Router = app;
/// ```
///
/// For convenience `FromRef` can also be derived using `#[derive(FromRef)]`.
///
/// # For library authors
///
/// If you're writing a library that has an extractor that needs state, this is the recommended way
/// to do it:
///
/// ```rust
/// use axum_core::extract::{FromRequestParts, FromRef};
/// use http::request::Parts;
/// use std::convert::Infallible;
///
/// // the extractor your library provides
/// struct MyLibraryExtractor;
///
/// impl<S> FromRequestParts<S> for MyLibraryExtractor
/// where
/// // keep `S` generic but require that it can produce a `MyLibraryState`
/// // this means users will have to implement `FromRef<UserState> for MyLibraryState`
/// MyLibraryState: FromRef<S>,
/// S: Send + Sync,
/// {
/// type Rejection = Infallible;
///
/// async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
/// // get a `MyLibraryState` from a reference to the state
/// let state = MyLibraryState::from_ref(state);
///
/// // ...
/// # todo!()
/// }
/// }
///
/// // the state your library needs
/// struct MyLibraryState {
/// // ...
/// }
/// ```
///
/// # Shared mutable state
///
/// [As state is global within a `Router`][global] you can't directly get a mutable reference to
/// the state.
///
/// The most basic solution is to use an `Arc<Mutex<_>>`. Which kind of mutex you need depends on
/// your use case. See [the tokio docs] for more details.
///
/// Note that holding a locked `std::sync::Mutex` across `.await` points will result in `!Send`
/// futures which are incompatible with axum. If you need to hold a mutex across `.await` points,
/// consider using a `tokio::sync::Mutex` instead.
///
/// ## Example
///
/// ```
/// use axum::{Router, routing::get, extract::State};
/// use std::sync::{Arc, Mutex};
///
/// #[derive(Clone)]
/// struct AppState {
/// data: Arc<Mutex<String>>,
/// }
///
/// async fn handler(State(state): State<AppState>) {
/// {
/// let mut data = state.data.lock().expect("mutex was poisoned");
/// *data = "updated foo".to_owned();
/// }
///
/// // ...
/// }
///
/// let state = AppState {
/// data: Arc::new(Mutex::new("foo".to_owned())),
/// };
///
/// let app = Router::new()
/// .route("/", get(handler))
/// .with_state(state);
/// # let _: Router = app;
/// ```
///
/// [global]: crate::Router::with_state
/// [the tokio docs]: https://docs.rs/tokio/1.25.0/tokio/sync/struct.Mutex.html#which-kind-of-mutex-should-you-use
#[derive(Debug, Default, Clone, Copy)]
pub struct State<S>(pub S);
impl<OuterState, InnerState> FromRequestParts<OuterState> for State<InnerState>
where
InnerState: FromRef<OuterState>,
OuterState: Send + Sync,
{
type Rejection = Infallible;
async fn from_request_parts(
_parts: &mut Parts,
state: &OuterState,
) -> Result<Self, Self::Rejection> {
let inner_state = InnerState::from_ref(state);
Ok(Self(inner_state))
}
}
impl<S> Deref for State<S> {
type Target = S;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<S> DerefMut for State<S> {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/nested_path.rs | axum/src/extract/nested_path.rs | use std::{
sync::Arc,
task::{Context, Poll},
};
use crate::extract::Request;
use axum_core::extract::FromRequestParts;
use http::request::Parts;
use tower_layer::{layer_fn, Layer};
use tower_service::Service;
use super::rejection::NestedPathRejection;
/// Access the path the matched the route is nested at.
///
/// This can for example be used when doing redirects.
///
/// # Example
///
/// ```
/// use axum::{
/// Router,
/// extract::NestedPath,
/// routing::get,
/// };
///
/// let api = Router::new().route(
/// "/users",
/// get(|path: NestedPath| async move {
/// // `path` will be "/api" because that's what this
/// // router is nested at when we build `app`
/// let path = path.as_str();
/// })
/// );
///
/// let app = Router::new().nest("/api", api);
/// # let _: Router = app;
/// ```
#[derive(Debug, Clone)]
pub struct NestedPath(Arc<str>);
impl NestedPath {
/// Returns a `str` representation of the path.
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
#[diagnostic::do_not_recommend] // pretty niche type
impl<S> FromRequestParts<S> for NestedPath
where
S: Send + Sync,
{
type Rejection = NestedPathRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
match parts.extensions.get::<Self>() {
Some(nested_path) => Ok(nested_path.clone()),
None => Err(NestedPathRejection),
}
}
}
#[derive(Clone)]
pub(crate) struct SetNestedPath<S> {
inner: S,
path: Arc<str>,
}
impl<S> SetNestedPath<S> {
pub(crate) fn layer(path: &str) -> impl Layer<S, Service = Self> + Clone {
let path = Arc::from(path);
layer_fn(move |inner| Self {
inner,
path: Arc::clone(&path),
})
}
}
impl<S, B> Service<Request<B>> for SetNestedPath<S>
where
S: Service<Request<B>>,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, mut req: Request<B>) -> Self::Future {
if let Some(prev) = req.extensions_mut().get_mut::<NestedPath>() {
let new_path = if prev.as_str() == "/" {
Arc::clone(&self.path)
} else {
format!("{}{}", prev.as_str().trim_end_matches('/'), self.path).into()
};
prev.0 = new_path;
} else {
req.extensions_mut()
.insert(NestedPath(Arc::clone(&self.path)));
};
self.inner.call(req)
}
}
#[cfg(test)]
mod tests {
use axum_core::response::Response;
use http::StatusCode;
use crate::{
extract::{NestedPath, Request},
middleware::{from_fn, Next},
routing::get,
test_helpers::*,
Router,
};
#[crate::test]
async fn one_level_of_nesting() {
let api = Router::new().route(
"/users",
get(|nested_path: NestedPath| {
assert_eq!(nested_path.as_str(), "/api");
async {}
}),
);
let app = Router::new().nest("/api", api);
let client = TestClient::new(app);
let res = client.get("/api/users").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn one_level_of_nesting_with_trailing_slash() {
let api = Router::new().route(
"/users",
get(|nested_path: NestedPath| {
assert_eq!(nested_path.as_str(), "/api/");
async {}
}),
);
let app = Router::new().nest("/api/", api);
let client = TestClient::new(app);
let res = client.get("/api/users").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn two_levels_of_nesting() {
let api = Router::new().route(
"/users",
get(|nested_path: NestedPath| {
assert_eq!(nested_path.as_str(), "/api/v2");
async {}
}),
);
let app = Router::new().nest("/api", Router::new().nest("/v2", api));
let client = TestClient::new(app);
let res = client.get("/api/v2/users").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn two_levels_of_nesting_with_trailing_slash() {
let api = Router::new().route(
"/users",
get(|nested_path: NestedPath| {
assert_eq!(nested_path.as_str(), "/api/v2");
async {}
}),
);
let app = Router::new().nest("/api/", Router::new().nest("/v2", api));
let client = TestClient::new(app);
let res = client.get("/api/v2/users").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn in_fallbacks() {
let api = Router::new().fallback(get(|nested_path: NestedPath| {
assert_eq!(nested_path.as_str(), "/api");
async {}
}));
let app = Router::new().nest("/api", api);
let client = TestClient::new(app);
let res = client.get("/api/doesnt-exist").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn in_middleware() {
async fn middleware(nested_path: NestedPath, req: Request, next: Next) -> Response {
assert_eq!(nested_path.as_str(), "/api");
next.run(req).await
}
let api = Router::new()
.route("/users", get(|| async {}))
.layer(from_fn(middleware));
let app = Router::new().nest("/api", api);
let client = TestClient::new(app);
let res = client.get("/api/users").await;
assert_eq!(res.status(), StatusCode::OK);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/raw_form.rs | axum/src/extract/raw_form.rs | use axum_core::extract::{FromRequest, Request};
use bytes::Bytes;
use http::Method;
use super::{
has_content_type,
rejection::{InvalidFormContentType, RawFormRejection},
};
/// Extractor that extracts raw form requests.
///
/// For `GET` requests it will extract the raw query. For other methods it extracts the raw
/// `application/x-www-form-urlencoded` encoded request body.
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::RawForm,
/// routing::get,
/// Router
/// };
///
/// async fn handler(RawForm(form): RawForm) {}
///
/// let app = Router::new().route("/", get(handler));
/// # let _: Router = app;
/// ```
#[derive(Debug)]
pub struct RawForm(pub Bytes);
impl<S> FromRequest<S> for RawForm
where
S: Send + Sync,
{
type Rejection = RawFormRejection;
async fn from_request(req: Request, state: &S) -> Result<Self, Self::Rejection> {
if req.method() == Method::GET {
if let Some(query) = req.uri().query() {
return Ok(Self(Bytes::copy_from_slice(query.as_bytes())));
}
Ok(Self(Bytes::new()))
} else {
if !has_content_type(req.headers(), &mime::APPLICATION_WWW_FORM_URLENCODED) {
return Err(InvalidFormContentType.into());
}
Ok(Self(Bytes::from_request(req, state).await?))
}
}
}
#[cfg(test)]
mod tests {
use axum_core::body::Body;
use http::{header::CONTENT_TYPE, Request};
use super::{InvalidFormContentType, RawForm, RawFormRejection};
use crate::extract::FromRequest;
async fn check_query(uri: &str, value: &[u8]) {
let req = Request::builder().uri(uri).body(Body::empty()).unwrap();
assert_eq!(RawForm::from_request(req, &()).await.unwrap().0, value);
}
async fn check_body(body: &'static [u8]) {
let req = Request::post("http://example.com/test")
.header(CONTENT_TYPE, mime::APPLICATION_WWW_FORM_URLENCODED.as_ref())
.body(Body::from(body))
.unwrap();
assert_eq!(RawForm::from_request(req, &()).await.unwrap().0, body);
}
#[crate::test]
async fn test_from_query() {
check_query("http://example.com/test", b"").await;
check_query("http://example.com/test?page=0&size=10", b"page=0&size=10").await;
}
#[crate::test]
async fn test_from_body() {
check_body(b"").await;
check_body(b"username=user&password=secure%20password").await;
}
#[crate::test]
async fn test_incorrect_content_type() {
let req = Request::post("http://example.com/test")
.body(Body::from("page=0&size=10"))
.unwrap();
assert!(matches!(
RawForm::from_request(req, &()).await.unwrap_err(),
RawFormRejection::InvalidFormContentType(InvalidFormContentType)
))
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/mod.rs | axum/src/extract/mod.rs | #![doc = include_str!("../docs/extract.md")]
use http::header::{self, HeaderMap};
#[cfg(feature = "tokio")]
pub mod connect_info;
pub mod path;
pub mod rejection;
#[cfg(feature = "ws")]
pub mod ws;
pub(crate) mod nested_path;
#[cfg(feature = "original-uri")]
mod original_uri;
mod raw_form;
mod raw_query;
mod state;
#[doc(inline)]
pub use axum_core::extract::{
DefaultBodyLimit, FromRef, FromRequest, FromRequestParts, OptionalFromRequest,
OptionalFromRequestParts, Request,
};
#[cfg(feature = "macros")]
pub use axum_macros::{FromRef, FromRequest, FromRequestParts};
#[doc(inline)]
pub use self::{
nested_path::NestedPath,
path::{Path, RawPathParams},
raw_form::RawForm,
raw_query::RawQuery,
state::State,
};
#[doc(inline)]
#[cfg(feature = "tokio")]
pub use self::connect_info::ConnectInfo;
#[doc(no_inline)]
#[cfg(feature = "json")]
pub use crate::Json;
#[doc(no_inline)]
pub use crate::Extension;
#[cfg(feature = "form")]
#[doc(no_inline)]
pub use crate::form::Form;
#[cfg(feature = "matched-path")]
pub(crate) mod matched_path;
#[cfg(feature = "matched-path")]
#[doc(inline)]
pub use self::matched_path::MatchedPath;
#[cfg(feature = "multipart")]
pub mod multipart;
#[cfg(feature = "multipart")]
#[doc(inline)]
pub use self::multipart::Multipart;
#[cfg(feature = "query")]
mod query;
#[cfg(feature = "query")]
#[doc(inline)]
pub use self::query::Query;
#[cfg(feature = "original-uri")]
#[doc(inline)]
pub use self::original_uri::OriginalUri;
#[cfg(feature = "ws")]
#[doc(inline)]
pub use self::ws::WebSocketUpgrade;
// this is duplicated in `axum-extra/src/extract/form.rs`
pub(super) fn has_content_type(headers: &HeaderMap, expected_content_type: &mime::Mime) -> bool {
let Some(content_type) = headers.get(header::CONTENT_TYPE) else {
return false;
};
let Ok(content_type) = content_type.to_str() else {
return false;
};
content_type.starts_with(expected_content_type.as_ref())
}
#[cfg(test)]
mod tests {
use crate::{routing::get, test_helpers::*, Router};
#[crate::test]
async fn consume_body() {
let app = Router::new().route("/", get(|body: String| async { body }));
let client = TestClient::new(app);
let res = client.get("/").body("foo").await;
let body = res.text().await;
assert_eq!(body, "foo");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/ws.rs | axum/src/extract/ws.rs | //! Handle WebSocket connections.
//!
//! # Example
//!
//! ```
//! use axum::{
//! extract::ws::{WebSocketUpgrade, WebSocket},
//! routing::any,
//! response::{IntoResponse, Response},
//! Router,
//! };
//!
//! let app = Router::new().route("/ws", any(handler));
//!
//! async fn handler(ws: WebSocketUpgrade) -> Response {
//! ws.on_upgrade(handle_socket)
//! }
//!
//! async fn handle_socket(mut socket: WebSocket) {
//! while let Some(msg) = socket.recv().await {
//! let msg = if let Ok(msg) = msg {
//! msg
//! } else {
//! // client disconnected
//! return;
//! };
//!
//! if socket.send(msg).await.is_err() {
//! // client disconnected
//! return;
//! }
//! }
//! }
//! # let _: Router = app;
//! ```
//!
//! # Passing data and/or state to an `on_upgrade` callback
//!
//! ```
//! use axum::{
//! extract::{ws::{WebSocketUpgrade, WebSocket}, State},
//! response::Response,
//! routing::any,
//! Router,
//! };
//!
//! #[derive(Clone)]
//! struct AppState {
//! // ...
//! }
//!
//! async fn handler(ws: WebSocketUpgrade, State(state): State<AppState>) -> Response {
//! ws.on_upgrade(|socket| handle_socket(socket, state))
//! }
//!
//! async fn handle_socket(socket: WebSocket, state: AppState) {
//! // ...
//! }
//!
//! let app = Router::new()
//! .route("/ws", any(handler))
//! .with_state(AppState { /* ... */ });
//! # let _: Router = app;
//! ```
//!
//! # Read and write concurrently
//!
//! If you need to read and write concurrently from a [`WebSocket`] you can use
//! [`StreamExt::split`]:
//!
//! ```rust,no_run
//! use axum::{Error, extract::ws::{WebSocket, Message}};
//! use futures_util::{sink::SinkExt, stream::{StreamExt, SplitSink, SplitStream}};
//!
//! async fn handle_socket(mut socket: WebSocket) {
//! let (mut sender, mut receiver) = socket.split();
//!
//! tokio::spawn(write(sender));
//! tokio::spawn(read(receiver));
//! }
//!
//! async fn read(receiver: SplitStream<WebSocket>) {
//! // ...
//! }
//!
//! async fn write(sender: SplitSink<WebSocket, Message>) {
//! // ...
//! }
//! ```
//!
//! [`StreamExt::split`]: https://docs.rs/futures/0.3.17/futures/stream/trait.StreamExt.html#method.split
use self::rejection::*;
use super::FromRequestParts;
use crate::{body::Bytes, response::Response, Error};
use axum_core::body::Body;
use futures_core::{FusedStream, Stream};
use futures_sink::Sink;
use futures_util::{sink::SinkExt, stream::StreamExt};
use http::{
header::{self, HeaderMap, HeaderName, HeaderValue},
request::Parts,
Method, StatusCode, Version,
};
use hyper_util::rt::TokioIo;
use sha1::{Digest, Sha1};
use std::{
borrow::Cow,
future::Future,
pin::Pin,
task::{ready, Context, Poll},
};
use tokio_tungstenite::{
tungstenite::{
self as ts,
protocol::{self, WebSocketConfig},
},
WebSocketStream,
};
/// Extractor for establishing WebSocket connections.
///
/// For HTTP/1.1 requests, this extractor requires the request method to be `GET`;
/// in later versions, `CONNECT` is used instead.
/// To support both, it should be used with [`any`](crate::routing::any).
///
/// See the [module docs](self) for an example.
///
/// [`MethodFilter`]: crate::routing::MethodFilter
#[cfg_attr(docsrs, doc(cfg(feature = "ws")))]
#[must_use]
pub struct WebSocketUpgrade<F = DefaultOnFailedUpgrade> {
config: WebSocketConfig,
/// The chosen protocol sent in the `Sec-WebSocket-Protocol` header of the response.
protocol: Option<HeaderValue>,
/// `None` if HTTP/2+ WebSockets are used.
sec_websocket_key: Option<HeaderValue>,
on_upgrade: hyper::upgrade::OnUpgrade,
on_failed_upgrade: F,
sec_websocket_protocol: Option<HeaderValue>,
}
impl<F> std::fmt::Debug for WebSocketUpgrade<F> {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("WebSocketUpgrade")
.field("config", &self.config)
.field("protocol", &self.protocol)
.field("sec_websocket_key", &self.sec_websocket_key)
.field("sec_websocket_protocol", &self.sec_websocket_protocol)
.finish_non_exhaustive()
}
}
impl<F> WebSocketUpgrade<F> {
/// Read buffer capacity. The default value is 128KiB
pub fn read_buffer_size(mut self, size: usize) -> Self {
self.config.read_buffer_size = size;
self
}
/// The target minimum size of the write buffer to reach before writing the data
/// to the underlying stream.
///
/// The default value is 128 KiB.
///
/// If set to `0` each message will be eagerly written to the underlying stream.
/// It is often more optimal to allow them to buffer a little, hence the default value.
///
/// Note: [`flush`](SinkExt::flush) will always fully write the buffer regardless.
pub fn write_buffer_size(mut self, size: usize) -> Self {
self.config.write_buffer_size = size;
self
}
/// The max size of the write buffer in bytes. Setting this can provide backpressure
/// in the case the write buffer is filling up due to write errors.
///
/// The default value is unlimited.
///
/// Note: The write buffer only builds up past [`write_buffer_size`](Self::write_buffer_size)
/// when writes to the underlying stream are failing. So the **write buffer can not
/// fill up if you are not observing write errors even if not flushing**.
///
/// Note: Should always be at least [`write_buffer_size + 1 message`](Self::write_buffer_size)
/// and probably a little more depending on error handling strategy.
pub fn max_write_buffer_size(mut self, max: usize) -> Self {
self.config.max_write_buffer_size = max;
self
}
/// Set the maximum message size (defaults to 64 megabytes)
pub fn max_message_size(mut self, max: usize) -> Self {
self.config.max_message_size = Some(max);
self
}
/// Set the maximum frame size (defaults to 16 megabytes)
pub fn max_frame_size(mut self, max: usize) -> Self {
self.config.max_frame_size = Some(max);
self
}
/// Allow server to accept unmasked frames (defaults to false)
pub fn accept_unmasked_frames(mut self, accept: bool) -> Self {
self.config.accept_unmasked_frames = accept;
self
}
/// Set the known protocols.
///
/// If the protocol name specified by `Sec-WebSocket-Protocol` header
/// to match any of them, the upgrade response will include `Sec-WebSocket-Protocol` header and
/// return the protocol name.
///
/// The protocols should be listed in decreasing order of preference: if the client offers
/// multiple protocols that the server could support, the server will pick the first one in
/// this list.
///
/// # Examples
///
/// ```
/// use axum::{
/// extract::ws::{WebSocketUpgrade, WebSocket},
/// routing::any,
/// response::{IntoResponse, Response},
/// Router,
/// };
///
/// let app = Router::new().route("/ws", any(handler));
///
/// async fn handler(ws: WebSocketUpgrade) -> Response {
/// ws.protocols(["graphql-ws", "graphql-transport-ws"])
/// .on_upgrade(|socket| async {
/// // ...
/// })
/// }
/// # let _: Router = app;
/// ```
pub fn protocols<I>(mut self, protocols: I) -> Self
where
I: IntoIterator,
I::Item: Into<Cow<'static, str>>,
{
if let Some(req_protocols) = self
.sec_websocket_protocol
.as_ref()
.and_then(|p| p.to_str().ok())
{
self.protocol = protocols
.into_iter()
// FIXME: This will often allocate a new `String` and so is less efficient than it
// could be. But that can't be fixed without breaking changes to the public API.
.map(Into::into)
.find(|protocol| {
req_protocols
.split(',')
.any(|req_protocol| req_protocol.trim() == protocol)
})
.map(|protocol| match protocol {
Cow::Owned(s) => HeaderValue::from_str(&s).unwrap(),
Cow::Borrowed(s) => HeaderValue::from_static(s),
});
}
self
}
/// Return the selected WebSocket subprotocol, if one has been chosen.
///
/// If [`protocols()`][Self::protocols] has been called and a matching
/// protocol has been selected, the return value will be `Some` containing
/// said protocol. Otherwise, it will be `None`.
pub fn selected_protocol(&self) -> Option<&HeaderValue> {
self.protocol.as_ref()
}
/// Provide a callback to call if upgrading the connection fails.
///
/// The connection upgrade is performed in a background task. If that fails this callback
/// will be called.
///
/// By default any errors will be silently ignored.
///
/// # Example
///
/// ```
/// use axum::{
/// extract::{WebSocketUpgrade},
/// response::Response,
/// };
///
/// async fn handler(ws: WebSocketUpgrade) -> Response {
/// ws.on_failed_upgrade(|error| {
/// report_error(error);
/// })
/// .on_upgrade(|socket| async { /* ... */ })
/// }
/// #
/// # fn report_error(_: axum::Error) {}
/// ```
pub fn on_failed_upgrade<C>(self, callback: C) -> WebSocketUpgrade<C>
where
C: OnFailedUpgrade,
{
WebSocketUpgrade {
config: self.config,
protocol: self.protocol,
sec_websocket_key: self.sec_websocket_key,
on_upgrade: self.on_upgrade,
on_failed_upgrade: callback,
sec_websocket_protocol: self.sec_websocket_protocol,
}
}
/// Finalize upgrading the connection and call the provided callback with
/// the stream.
#[must_use = "to set up the WebSocket connection, this response must be returned"]
pub fn on_upgrade<C, Fut>(self, callback: C) -> Response
where
C: FnOnce(WebSocket) -> Fut + Send + 'static,
Fut: Future<Output = ()> + Send + 'static,
F: OnFailedUpgrade,
{
let on_upgrade = self.on_upgrade;
let config = self.config;
let on_failed_upgrade = self.on_failed_upgrade;
let protocol = self.protocol.clone();
tokio::spawn(async move {
let upgraded = match on_upgrade.await {
Ok(upgraded) => upgraded,
Err(err) => {
on_failed_upgrade.call(Error::new(err));
return;
}
};
let upgraded = TokioIo::new(upgraded);
let socket =
WebSocketStream::from_raw_socket(upgraded, protocol::Role::Server, Some(config))
.await;
let socket = WebSocket {
inner: socket,
protocol,
};
callback(socket).await;
});
let mut response = if let Some(sec_websocket_key) = &self.sec_websocket_key {
// If `sec_websocket_key` was `Some`, we are using HTTP/1.1.
#[allow(clippy::declare_interior_mutable_const)]
const UPGRADE: HeaderValue = HeaderValue::from_static("upgrade");
#[allow(clippy::declare_interior_mutable_const)]
const WEBSOCKET: HeaderValue = HeaderValue::from_static("websocket");
Response::builder()
.status(StatusCode::SWITCHING_PROTOCOLS)
.header(header::CONNECTION, UPGRADE)
.header(header::UPGRADE, WEBSOCKET)
.header(
header::SEC_WEBSOCKET_ACCEPT,
sign(sec_websocket_key.as_bytes()),
)
.body(Body::empty())
.unwrap()
} else {
// Otherwise, we are HTTP/2+. As established in RFC 9113 section 8.5, we just respond
// with a 2XX with an empty body:
// <https://datatracker.ietf.org/doc/html/rfc9113#name-the-connect-method>.
Response::new(Body::empty())
};
if let Some(protocol) = self.protocol {
response
.headers_mut()
.insert(header::SEC_WEBSOCKET_PROTOCOL, protocol);
}
response
}
}
/// What to do when a connection upgrade fails.
///
/// See [`WebSocketUpgrade::on_failed_upgrade`] for more details.
pub trait OnFailedUpgrade: Send + 'static {
/// Call the callback.
fn call(self, error: Error);
}
impl<F> OnFailedUpgrade for F
where
F: FnOnce(Error) + Send + 'static,
{
fn call(self, error: Error) {
self(error)
}
}
/// The default `OnFailedUpgrade` used by `WebSocketUpgrade`.
///
/// It simply ignores the error.
#[non_exhaustive]
#[derive(Debug)]
pub struct DefaultOnFailedUpgrade;
impl OnFailedUpgrade for DefaultOnFailedUpgrade {
#[inline]
fn call(self, _error: Error) {}
}
impl<S> FromRequestParts<S> for WebSocketUpgrade<DefaultOnFailedUpgrade>
where
S: Send + Sync,
{
type Rejection = WebSocketUpgradeRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let sec_websocket_key = if parts.version <= Version::HTTP_11 {
if parts.method != Method::GET {
return Err(MethodNotGet.into());
}
if !header_contains(&parts.headers, &header::CONNECTION, "upgrade") {
return Err(InvalidConnectionHeader.into());
}
if !header_eq(&parts.headers, &header::UPGRADE, "websocket") {
return Err(InvalidUpgradeHeader.into());
}
Some(
parts
.headers
.get(header::SEC_WEBSOCKET_KEY)
.ok_or(WebSocketKeyHeaderMissing)?
.clone(),
)
} else {
if parts.method != Method::CONNECT {
return Err(MethodNotConnect.into());
}
// if this feature flag is disabled, we won’t be receiving an HTTP/2 request to begin
// with.
#[cfg(feature = "http2")]
if parts
.extensions
.get::<hyper::ext::Protocol>()
.map_or(true, |p| p.as_str() != "websocket")
{
return Err(InvalidProtocolPseudoheader.into());
}
None
};
if !header_eq(&parts.headers, &header::SEC_WEBSOCKET_VERSION, "13") {
return Err(InvalidWebSocketVersionHeader.into());
}
let on_upgrade = parts
.extensions
.remove::<hyper::upgrade::OnUpgrade>()
.ok_or(ConnectionNotUpgradable)?;
let sec_websocket_protocol = parts.headers.get(header::SEC_WEBSOCKET_PROTOCOL).cloned();
Ok(Self {
config: Default::default(),
protocol: None,
sec_websocket_key,
on_upgrade,
sec_websocket_protocol,
on_failed_upgrade: DefaultOnFailedUpgrade,
})
}
}
fn header_eq(headers: &HeaderMap, key: &HeaderName, value: &'static str) -> bool {
if let Some(header) = headers.get(key) {
header.as_bytes().eq_ignore_ascii_case(value.as_bytes())
} else {
false
}
}
fn header_contains(headers: &HeaderMap, key: &HeaderName, value: &'static str) -> bool {
let Some(header) = headers.get(key) else {
return false;
};
if let Ok(header) = std::str::from_utf8(header.as_bytes()) {
header.to_ascii_lowercase().contains(value)
} else {
false
}
}
/// A stream of WebSocket messages.
///
/// See [the module level documentation](self) for more details.
#[derive(Debug)]
pub struct WebSocket {
inner: WebSocketStream<TokioIo<hyper::upgrade::Upgraded>>,
protocol: Option<HeaderValue>,
}
impl WebSocket {
/// Receive another message.
///
/// Returns `None` if the stream has closed.
pub async fn recv(&mut self) -> Option<Result<Message, Error>> {
self.next().await
}
/// Send a message.
pub async fn send(&mut self, msg: Message) -> Result<(), Error> {
self.inner
.send(msg.into_tungstenite())
.await
.map_err(Error::new)
}
/// Return the selected WebSocket subprotocol, if one has been chosen.
pub fn protocol(&self) -> Option<&HeaderValue> {
self.protocol.as_ref()
}
}
impl FusedStream for WebSocket {
/// Returns true if the websocket has been terminated.
fn is_terminated(&self) -> bool {
self.inner.is_terminated()
}
}
impl Stream for WebSocket {
type Item = Result<Message, Error>;
fn poll_next(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Option<Self::Item>> {
loop {
match ready!(self.inner.poll_next_unpin(cx)) {
Some(Ok(msg)) => {
if let Some(msg) = Message::from_tungstenite(msg) {
return Poll::Ready(Some(Ok(msg)));
}
}
Some(Err(err)) => return Poll::Ready(Some(Err(Error::new(err)))),
None => return Poll::Ready(None),
}
}
}
}
impl Sink<Message> for WebSocket {
type Error = Error;
fn poll_ready(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.inner).poll_ready(cx).map_err(Error::new)
}
fn start_send(mut self: Pin<&mut Self>, item: Message) -> Result<(), Self::Error> {
Pin::new(&mut self.inner)
.start_send(item.into_tungstenite())
.map_err(Error::new)
}
fn poll_flush(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.inner).poll_flush(cx).map_err(Error::new)
}
fn poll_close(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Pin::new(&mut self.inner).poll_close(cx).map_err(Error::new)
}
}
/// UTF-8 wrapper for [Bytes].
///
/// An [Utf8Bytes] is always guaranteed to contain valid UTF-8.
#[derive(Debug, Clone, PartialEq, Eq, Default)]
pub struct Utf8Bytes(ts::Utf8Bytes);
impl Utf8Bytes {
/// Creates from a static str.
#[inline]
#[must_use]
pub const fn from_static(str: &'static str) -> Self {
Self(ts::Utf8Bytes::from_static(str))
}
/// Returns as a string slice.
#[inline]
pub fn as_str(&self) -> &str {
self.0.as_str()
}
fn into_tungstenite(self) -> ts::Utf8Bytes {
self.0
}
}
impl std::ops::Deref for Utf8Bytes {
type Target = str;
/// ```
/// /// Example fn that takes a str slice
/// fn a(s: &str) {}
///
/// let data = axum::extract::ws::Utf8Bytes::from_static("foo123");
///
/// // auto-deref as arg
/// a(&data);
///
/// // deref to str methods
/// assert_eq!(data.len(), 6);
/// ```
#[inline]
fn deref(&self) -> &Self::Target {
self.as_str()
}
}
impl std::fmt::Display for Utf8Bytes {
#[inline]
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.write_str(self.as_str())
}
}
impl TryFrom<Bytes> for Utf8Bytes {
type Error = std::str::Utf8Error;
#[inline]
fn try_from(bytes: Bytes) -> Result<Self, Self::Error> {
Ok(Self(bytes.try_into()?))
}
}
impl TryFrom<Vec<u8>> for Utf8Bytes {
type Error = std::str::Utf8Error;
#[inline]
fn try_from(v: Vec<u8>) -> Result<Self, Self::Error> {
Ok(Self(v.try_into()?))
}
}
impl From<String> for Utf8Bytes {
#[inline]
fn from(s: String) -> Self {
Self(s.into())
}
}
impl From<&str> for Utf8Bytes {
#[inline]
fn from(s: &str) -> Self {
Self(s.into())
}
}
impl From<&String> for Utf8Bytes {
#[inline]
fn from(s: &String) -> Self {
Self(s.into())
}
}
impl From<Utf8Bytes> for Bytes {
#[inline]
fn from(Utf8Bytes(bytes): Utf8Bytes) -> Self {
bytes.into()
}
}
impl<T> PartialEq<T> for Utf8Bytes
where
for<'a> &'a str: PartialEq<T>,
{
/// ```
/// let payload = axum::extract::ws::Utf8Bytes::from_static("foo123");
/// assert_eq!(payload, "foo123");
/// assert_eq!(payload, "foo123".to_string());
/// assert_eq!(payload, &"foo123".to_string());
/// assert_eq!(payload, std::borrow::Cow::from("foo123"));
/// ```
#[inline]
fn eq(&self, other: &T) -> bool {
self.as_str() == *other
}
}
/// Status code used to indicate why an endpoint is closing the WebSocket connection.
pub type CloseCode = u16;
/// A struct representing the close command.
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct CloseFrame {
/// The reason as a code.
pub code: CloseCode,
/// The reason as text string.
pub reason: Utf8Bytes,
}
/// A WebSocket message.
//
// This code comes from https://github.com/snapview/tungstenite-rs/blob/master/src/protocol/message.rs and is under following license:
// Copyright (c) 2017 Alexey Galakhov
// Copyright (c) 2016 Jason Housley
//
// Permission is hereby granted, free of charge, to any person obtaining a copy
// of this software and associated documentation files (the "Software"), to deal
// in the Software without restriction, including without limitation the rights
// to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
// copies of the Software, and to permit persons to whom the Software is
// furnished to do so, subject to the following conditions:
//
// The above copyright notice and this permission notice shall be included in
// all copies or substantial portions of the Software.
//
// THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
// IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
// FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
// AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
// LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
// OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
// THE SOFTWARE.
#[derive(Debug, Eq, PartialEq, Clone)]
pub enum Message {
/// A text WebSocket message
Text(Utf8Bytes),
/// A binary WebSocket message
Binary(Bytes),
/// A ping message with the specified payload
///
/// The payload here must have a length less than 125 bytes.
///
/// Ping messages will be automatically responded to by the server, so you do not have to worry
/// about dealing with them yourself.
Ping(Bytes),
/// A pong message with the specified payload
///
/// The payload here must have a length less than 125 bytes.
///
/// Pong messages will be automatically sent to the client if a ping message is received, so
/// you do not have to worry about constructing them yourself unless you want to implement a
/// [unidirectional heartbeat](https://tools.ietf.org/html/rfc6455#section-5.5.3).
Pong(Bytes),
/// A close message with the optional close frame.
///
/// You may "uncleanly" close a WebSocket connection at any time
/// by simply dropping the [`WebSocket`].
/// However, you may also use the graceful closing protocol, in which
/// 1. peer A sends a close frame, and does not send any further messages;
/// 2. peer B responds with a close frame, and does not send any further messages;
/// 3. peer A processes the remaining messages sent by peer B, before finally
/// 4. both peers close the connection.
///
/// After sending a close frame,
/// you may still read messages,
/// but attempts to send another message will error.
/// After receiving a close frame,
/// axum will automatically respond with a close frame if necessary
/// (you do not have to deal with this yourself).
/// Since no further messages will be received,
/// you may either do nothing
/// or explicitly drop the connection.
Close(Option<CloseFrame>),
}
impl Message {
fn into_tungstenite(self) -> ts::Message {
match self {
Self::Text(text) => ts::Message::Text(text.into_tungstenite()),
Self::Binary(binary) => ts::Message::Binary(binary),
Self::Ping(ping) => ts::Message::Ping(ping),
Self::Pong(pong) => ts::Message::Pong(pong),
Self::Close(Some(close)) => ts::Message::Close(Some(ts::protocol::CloseFrame {
code: ts::protocol::frame::coding::CloseCode::from(close.code),
reason: close.reason.into_tungstenite(),
})),
Self::Close(None) => ts::Message::Close(None),
}
}
fn from_tungstenite(message: ts::Message) -> Option<Self> {
match message {
ts::Message::Text(text) => Some(Self::Text(Utf8Bytes(text))),
ts::Message::Binary(binary) => Some(Self::Binary(binary)),
ts::Message::Ping(ping) => Some(Self::Ping(ping)),
ts::Message::Pong(pong) => Some(Self::Pong(pong)),
ts::Message::Close(Some(close)) => Some(Self::Close(Some(CloseFrame {
code: close.code.into(),
reason: Utf8Bytes(close.reason),
}))),
ts::Message::Close(None) => Some(Self::Close(None)),
// we can ignore `Frame` frames as recommended by the tungstenite maintainers
// https://github.com/snapview/tungstenite-rs/issues/268
ts::Message::Frame(_) => None,
}
}
/// Consume the WebSocket and return it as binary data.
pub fn into_data(self) -> Bytes {
match self {
Self::Text(string) => Bytes::from(string),
Self::Binary(data) | Self::Ping(data) | Self::Pong(data) => data,
Self::Close(None) => Bytes::new(),
Self::Close(Some(frame)) => Bytes::from(frame.reason),
}
}
/// Attempt to consume the WebSocket message and convert it to a Utf8Bytes.
pub fn into_text(self) -> Result<Utf8Bytes, Error> {
match self {
Self::Text(string) => Ok(string),
Self::Binary(data) | Self::Ping(data) | Self::Pong(data) => {
Ok(Utf8Bytes::try_from(data).map_err(Error::new)?)
}
Self::Close(None) => Ok(Utf8Bytes::default()),
Self::Close(Some(frame)) => Ok(frame.reason),
}
}
/// Attempt to get a &str from the WebSocket message,
/// this will try to convert binary data to utf8.
pub fn to_text(&self) -> Result<&str, Error> {
match *self {
Self::Text(ref string) => Ok(string.as_str()),
Self::Binary(ref data) | Self::Ping(ref data) | Self::Pong(ref data) => {
Ok(std::str::from_utf8(data).map_err(Error::new)?)
}
Self::Close(None) => Ok(""),
Self::Close(Some(ref frame)) => Ok(&frame.reason),
}
}
/// Create a new text WebSocket message from a stringable.
pub fn text<S>(string: S) -> Self
where
S: Into<Utf8Bytes>,
{
Self::Text(string.into())
}
/// Create a new binary WebSocket message by converting to `Bytes`.
pub fn binary<B>(bin: B) -> Self
where
B: Into<Bytes>,
{
Self::Binary(bin.into())
}
}
impl From<String> for Message {
fn from(string: String) -> Self {
Self::Text(string.into())
}
}
impl<'s> From<&'s str> for Message {
fn from(string: &'s str) -> Self {
Self::Text(string.into())
}
}
impl<'b> From<&'b [u8]> for Message {
fn from(data: &'b [u8]) -> Self {
Self::Binary(Bytes::copy_from_slice(data))
}
}
impl From<Bytes> for Message {
fn from(data: Bytes) -> Self {
Self::Binary(data)
}
}
impl From<Vec<u8>> for Message {
fn from(data: Vec<u8>) -> Self {
Self::Binary(data.into())
}
}
impl From<Message> for Vec<u8> {
fn from(msg: Message) -> Self {
msg.into_data().to_vec()
}
}
fn sign(key: &[u8]) -> HeaderValue {
use base64::engine::Engine as _;
let mut sha1 = Sha1::default();
sha1.update(key);
sha1.update(&b"258EAFA5-E914-47DA-95CA-C5AB0DC85B11"[..]);
let b64 = Bytes::from(base64::engine::general_purpose::STANDARD.encode(sha1.finalize()));
HeaderValue::from_maybe_shared(b64).expect("base64 is a valid value")
}
pub mod rejection {
//! WebSocket specific rejections.
use axum_core::__composite_rejection as composite_rejection;
use axum_core::__define_rejection as define_rejection;
define_rejection! {
#[status = METHOD_NOT_ALLOWED]
#[body = "Request method must be `GET`"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct MethodNotGet;
}
define_rejection! {
#[status = METHOD_NOT_ALLOWED]
#[body = "Request method must be `CONNECT`"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct MethodNotConnect;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "Connection header did not include 'upgrade'"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct InvalidConnectionHeader;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "`Upgrade` header did not include 'websocket'"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct InvalidUpgradeHeader;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "`:protocol` pseudo-header did not include 'websocket'"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct InvalidProtocolPseudoheader;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "`Sec-WebSocket-Version` header did not include '13'"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct InvalidWebSocketVersionHeader;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "`Sec-WebSocket-Key` header missing"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
pub struct WebSocketKeyHeaderMissing;
}
define_rejection! {
#[status = UPGRADE_REQUIRED]
#[body = "WebSocket request couldn't be upgraded since no upgrade state was present"]
/// Rejection type for [`WebSocketUpgrade`](super::WebSocketUpgrade).
///
/// This rejection is returned if the connection cannot be upgraded for example if the
/// request is HTTP/1.0.
///
/// See [MDN] for more details about connection upgrades.
///
/// [MDN]: https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Upgrade
pub struct ConnectionNotUpgradable;
}
composite_rejection! {
/// Rejection used for [`WebSocketUpgrade`](super::WebSocketUpgrade).
///
/// Contains one variant for each way the [`WebSocketUpgrade`](super::WebSocketUpgrade)
/// extractor can fail.
pub enum WebSocketUpgradeRejection {
MethodNotGet,
MethodNotConnect,
InvalidConnectionHeader,
InvalidUpgradeHeader,
InvalidProtocolPseudoheader,
InvalidWebSocketVersionHeader,
WebSocketKeyHeaderMissing,
ConnectionNotUpgradable,
}
}
}
pub mod close_code {
//! Constants for [`CloseCode`]s.
//!
//! [`CloseCode`]: super::CloseCode
/// Indicates a normal closure, meaning that the purpose for which the connection was
/// established has been fulfilled.
pub const NORMAL: u16 = 1000;
/// Indicates that an endpoint is "going away", such as a server going down or a browser having
/// navigated away from a page.
pub const AWAY: u16 = 1001;
/// Indicates that an endpoint is terminating the connection due to a protocol error.
pub const PROTOCOL: u16 = 1002;
/// Indicates that an endpoint is terminating the connection because it has received a type of
/// data that it cannot accept.
///
/// For example, an endpoint MAY send this if it understands only text data, but receives a binary message.
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | true |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/rejection.rs | axum/src/extract/rejection.rs | //! Rejection response types.
use axum_core::__composite_rejection as composite_rejection;
use axum_core::__define_rejection as define_rejection;
pub use crate::extract::path::{FailedToDeserializePathParams, InvalidUtf8InPathParam};
pub use axum_core::extract::rejection::*;
#[cfg(feature = "json")]
define_rejection! {
#[status = UNPROCESSABLE_ENTITY]
#[body = "Failed to deserialize the JSON body into the target type"]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
/// Rejection type for [`Json`](super::Json).
///
/// This rejection is used if the request body is syntactically valid JSON but couldn't be
/// deserialized into the target type.
pub struct JsonDataError(Error);
}
#[cfg(feature = "json")]
define_rejection! {
#[status = BAD_REQUEST]
#[body = "Failed to parse the request body as JSON"]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
/// Rejection type for [`Json`](super::Json).
///
/// This rejection is used if the request body didn't contain syntactically valid JSON.
pub struct JsonSyntaxError(Error);
}
#[cfg(feature = "json")]
define_rejection! {
#[status = UNSUPPORTED_MEDIA_TYPE]
#[body = "Expected request with `Content-Type: application/json`"]
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
/// Rejection type for [`Json`](super::Json) used if the `Content-Type`
/// header is missing.
pub struct MissingJsonContentType;
}
define_rejection! {
#[status = INTERNAL_SERVER_ERROR]
#[body = "Missing request extension"]
/// Rejection type for [`Extension`](super::Extension) if an expected
/// request extension was not found.
pub struct MissingExtension(Error);
}
define_rejection! {
#[status = INTERNAL_SERVER_ERROR]
#[body = "No paths parameters found for matched route"]
/// Rejection type used if axum's internal representation of path parameters
/// is missing. This is commonly caused by extracting `Request<_>`. `Path`
/// must be extracted first.
pub struct MissingPathParams;
}
define_rejection! {
#[status = UNSUPPORTED_MEDIA_TYPE]
#[body = "Form requests must have `Content-Type: application/x-www-form-urlencoded`"]
/// Rejection type for [`Form`](super::Form) or [`RawForm`](super::RawForm)
/// used if the `Content-Type` header is missing
/// or its value is not `application/x-www-form-urlencoded`.
pub struct InvalidFormContentType;
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "Failed to deserialize form"]
/// Rejection type used if the [`Form`](super::Form) extractor is unable to
/// deserialize the form into the target type.
pub struct FailedToDeserializeForm(Error);
}
define_rejection! {
#[status = UNPROCESSABLE_ENTITY]
#[body = "Failed to deserialize form body"]
/// Rejection type used if the [`Form`](super::Form) extractor is unable to
/// deserialize the form body into the target type.
pub struct FailedToDeserializeFormBody(Error);
}
define_rejection! {
#[status = BAD_REQUEST]
#[body = "Failed to deserialize query string"]
/// Rejection type used if the [`Query`](super::Query) extractor is unable to
/// deserialize the query string into the target type.
pub struct FailedToDeserializeQueryString(Error);
}
composite_rejection! {
/// Rejection used for [`Query`](super::Query).
///
/// Contains one variant for each way the [`Query`](super::Query) extractor
/// can fail.
pub enum QueryRejection {
FailedToDeserializeQueryString,
}
}
composite_rejection! {
/// Rejection used for [`Form`](super::Form).
///
/// Contains one variant for each way the [`Form`](super::Form) extractor
/// can fail.
pub enum FormRejection {
InvalidFormContentType,
FailedToDeserializeForm,
FailedToDeserializeFormBody,
BytesRejection,
}
}
composite_rejection! {
/// Rejection used for [`RawForm`](super::RawForm).
///
/// Contains one variant for each way the [`RawForm`](super::RawForm) extractor
/// can fail.
pub enum RawFormRejection {
InvalidFormContentType,
BytesRejection,
}
}
#[cfg(feature = "json")]
composite_rejection! {
/// Rejection used for [`Json`](super::Json).
///
/// Contains one variant for each way the [`Json`](super::Json) extractor
/// can fail.
#[cfg_attr(docsrs, doc(cfg(feature = "json")))]
pub enum JsonRejection {
JsonDataError,
JsonSyntaxError,
MissingJsonContentType,
BytesRejection,
}
}
composite_rejection! {
/// Rejection used for [`Extension`](super::Extension).
///
/// Contains one variant for each way the [`Extension`](super::Extension) extractor
/// can fail.
pub enum ExtensionRejection {
MissingExtension,
}
}
composite_rejection! {
/// Rejection used for [`Path`](super::Path).
///
/// Contains one variant for each way the [`Path`](super::Path) extractor
/// can fail.
pub enum PathRejection {
FailedToDeserializePathParams,
MissingPathParams,
}
}
composite_rejection! {
/// Rejection used for [`RawPathParams`](super::RawPathParams).
///
/// Contains one variant for each way the [`RawPathParams`](super::RawPathParams) extractor
/// can fail.
pub enum RawPathParamsRejection {
InvalidUtf8InPathParam,
MissingPathParams,
}
}
#[cfg(feature = "matched-path")]
define_rejection! {
#[status = INTERNAL_SERVER_ERROR]
#[body = "No matched path found"]
/// Rejection if no matched path could be found.
///
/// See [`MatchedPath`](super::MatchedPath) for more details.
#[cfg_attr(docsrs, doc(cfg(feature = "matched-path")))]
pub struct MatchedPathMissing;
}
#[cfg(feature = "matched-path")]
composite_rejection! {
/// Rejection used for [`MatchedPath`](super::MatchedPath).
#[cfg_attr(docsrs, doc(cfg(feature = "matched-path")))]
pub enum MatchedPathRejection {
MatchedPathMissing,
}
}
define_rejection! {
#[status = INTERNAL_SERVER_ERROR]
#[body = "The matched route is not nested"]
/// Rejection type for [`NestedPath`](super::NestedPath).
///
/// This rejection is used if the matched route wasn't nested.
pub struct NestedPathRejection;
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/query.rs | axum/src/extract/query.rs | use super::{rejection::*, FromRequestParts};
use http::{request::Parts, Uri};
use serde_core::de::DeserializeOwned;
/// Extractor that deserializes query strings into some type.
///
/// `T` is expected to implement [`serde::Deserialize`].
///
/// # Examples
///
/// ```rust,no_run
/// use axum::{
/// extract::Query,
/// routing::get,
/// Router,
/// };
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct Pagination {
/// page: usize,
/// per_page: usize,
/// }
///
/// // This will parse query strings like `?page=2&per_page=30` into `Pagination`
/// // structs.
/// async fn list_things(pagination: Query<Pagination>) {
/// let pagination: Pagination = pagination.0;
///
/// // ...
/// }
///
/// let app = Router::new().route("/list_things", get(list_things));
/// # let _: Router = app;
/// ```
///
/// If the query string cannot be parsed it will reject the request with a `400
/// Bad Request` response.
#[cfg_attr(docsrs, doc(cfg(feature = "query")))]
#[derive(Debug, Clone, Copy, Default)]
pub struct Query<T>(pub T);
impl<T, S> FromRequestParts<S> for Query<T>
where
T: DeserializeOwned,
S: Send + Sync,
{
type Rejection = QueryRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
Self::try_from_uri(&parts.uri)
}
}
impl<T> Query<T>
where
T: DeserializeOwned,
{
/// Attempts to construct a [`Query`] from a reference to a [`Uri`].
///
/// # Example
/// ```
/// use axum::extract::Query;
/// use http::Uri;
/// use serde::Deserialize;
///
/// #[derive(Deserialize)]
/// struct ExampleParams {
/// foo: String,
/// bar: u32,
/// }
///
/// let uri: Uri = "http://example.com/path?foo=hello&bar=42".parse().unwrap();
/// let result: Query<ExampleParams> = Query::try_from_uri(&uri).unwrap();
/// assert_eq!(result.foo, String::from("hello"));
/// assert_eq!(result.bar, 42);
/// ```
pub fn try_from_uri(value: &Uri) -> Result<Self, QueryRejection> {
let query = value.query().unwrap_or_default();
let deserializer =
serde_html_form::Deserializer::new(form_urlencoded::parse(query.as_bytes()));
let params = serde_path_to_error::deserialize(deserializer)
.map_err(FailedToDeserializeQueryString::from_err)?;
Ok(Self(params))
}
}
axum_core::__impl_deref!(Query);
#[cfg(test)]
mod tests {
use crate::{routing::get, test_helpers::TestClient, Router};
use super::*;
use axum_core::{body::Body, extract::FromRequest};
use http::{Request, StatusCode};
use serde::Deserialize;
use std::fmt::Debug;
async fn check<T>(uri: impl AsRef<str>, value: T)
where
T: DeserializeOwned + PartialEq + Debug,
{
let req = Request::builder()
.uri(uri.as_ref())
.body(Body::empty())
.unwrap();
assert_eq!(Query::<T>::from_request(req, &()).await.unwrap().0, value);
}
#[crate::test]
async fn test_query() {
#[derive(Debug, PartialEq, Deserialize)]
struct Pagination {
size: Option<u64>,
page: Option<u64>,
}
check(
"http://example.com/test",
Pagination {
size: None,
page: None,
},
)
.await;
check(
"http://example.com/test?size=10",
Pagination {
size: Some(10),
page: None,
},
)
.await;
check(
"http://example.com/test?size=10&page=20",
Pagination {
size: Some(10),
page: Some(20),
},
)
.await;
}
#[crate::test]
async fn correct_rejection_status_code() {
#[derive(Deserialize)]
#[allow(dead_code)]
struct Params {
n: i32,
}
async fn handler(_: Query<Params>) {}
let app = Router::new().route("/", get(handler));
let client = TestClient::new(app);
let res = client.get("/?n=hi").await;
assert_eq!(res.status(), StatusCode::BAD_REQUEST);
assert_eq!(
res.text().await,
"Failed to deserialize query string: n: invalid digit found in string"
);
}
#[test]
fn test_try_from_uri() {
#[derive(Deserialize)]
struct TestQueryParams {
foo: String,
bar: u32,
}
let uri: Uri = "http://example.com/path?foo=hello&bar=42".parse().unwrap();
let result: Query<TestQueryParams> = Query::try_from_uri(&uri).unwrap();
assert_eq!(result.foo, String::from("hello"));
assert_eq!(result.bar, 42);
}
#[test]
fn test_try_from_uri_with_invalid_query() {
#[derive(Deserialize)]
struct TestQueryParams {
_foo: String,
_bar: u32,
}
let uri: Uri = "http://example.com/path?foo=hello&bar=invalid"
.parse()
.unwrap();
let result: Result<Query<TestQueryParams>, _> = Query::try_from_uri(&uri);
assert!(result.is_err());
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/matched_path.rs | axum/src/extract/matched_path.rs | use super::{rejection::*, FromRequestParts};
use crate::routing::{RouteId, NEST_TAIL_PARAM_CAPTURE};
use axum_core::extract::OptionalFromRequestParts;
use http::request::Parts;
use std::{collections::HashMap, convert::Infallible, sync::Arc};
/// Access the path in the router that matches the request.
///
/// ```
/// use axum::{
/// Router,
/// extract::MatchedPath,
/// routing::get,
/// };
///
/// let app = Router::new().route(
/// "/users/{id}",
/// get(|path: MatchedPath| async move {
/// let path = path.as_str();
/// // `path` will be "/users/{id}"
/// })
/// );
/// # let _: Router = app;
/// ```
///
/// # Accessing `MatchedPath` via extensions
///
/// `MatchedPath` can also be accessed from middleware via request extensions.
///
/// This is useful for example with [`Trace`](tower_http::trace::Trace) to
/// create a span that contains the matched path:
///
/// ```
/// use axum::{
/// Router,
/// extract::{Request, MatchedPath},
/// routing::get,
/// };
/// use tower_http::trace::TraceLayer;
///
/// let app = Router::new()
/// .route("/users/{id}", get(|| async { /* ... */ }))
/// .layer(
/// TraceLayer::new_for_http().make_span_with(|req: &Request<_>| {
/// let path = if let Some(path) = req.extensions().get::<MatchedPath>() {
/// path.as_str()
/// } else {
/// req.uri().path()
/// };
/// tracing::info_span!("http-request", %path)
/// }),
/// );
/// # let _: Router = app;
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "matched-path")))]
#[derive(Clone, Debug)]
pub struct MatchedPath(pub(crate) Arc<str>);
impl MatchedPath {
/// Returns a `str` representation of the path.
#[must_use]
pub fn as_str(&self) -> &str {
&self.0
}
}
impl<S> FromRequestParts<S> for MatchedPath
where
S: Send + Sync,
{
type Rejection = MatchedPathRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let matched_path = parts
.extensions
.get::<Self>()
.ok_or(MatchedPathRejection::MatchedPathMissing(MatchedPathMissing))?
.clone();
Ok(matched_path)
}
}
impl<S> OptionalFromRequestParts<S> for MatchedPath
where
S: Send + Sync,
{
type Rejection = Infallible;
async fn from_request_parts(
parts: &mut Parts,
_state: &S,
) -> Result<Option<Self>, Self::Rejection> {
Ok(parts.extensions.get::<Self>().cloned())
}
}
#[derive(Clone, Debug)]
struct MatchedNestedPath(Arc<str>);
pub(crate) fn set_matched_path_for_request(
id: RouteId,
route_id_to_path: &HashMap<RouteId, Arc<str>>,
extensions: &mut http::Extensions,
) {
let Some(matched_path) = route_id_to_path.get(&id) else {
#[cfg(debug_assertions)]
panic!("should always have a matched path for a route id");
#[cfg(not(debug_assertions))]
return;
};
let matched_path = append_nested_matched_path(matched_path, extensions);
if matched_path.ends_with(NEST_TAIL_PARAM_CAPTURE) {
extensions.insert(MatchedNestedPath(matched_path));
debug_assert!(extensions.remove::<MatchedPath>().is_none());
} else {
extensions.insert(MatchedPath(matched_path));
extensions.remove::<MatchedNestedPath>();
}
}
// a previous `MatchedPath` might exist if we're inside a nested Router
fn append_nested_matched_path(matched_path: &Arc<str>, extensions: &http::Extensions) -> Arc<str> {
if let Some(previous) = extensions
.get::<MatchedPath>()
.map(|matched_path| matched_path.as_str())
.or_else(|| Some(&extensions.get::<MatchedNestedPath>()?.0))
{
let previous = previous
.strip_suffix(NEST_TAIL_PARAM_CAPTURE)
.unwrap_or(previous);
let matched_path = format!("{previous}{matched_path}");
matched_path.into()
} else {
Arc::clone(matched_path)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{
extract::Request,
handler::HandlerWithoutStateExt,
middleware::map_request,
routing::{any, get},
test_helpers::*,
Router,
};
use http::StatusCode;
#[crate::test]
async fn extracting_on_handler() {
let app = Router::new().route(
"/{a}",
get(|path: MatchedPath| async move { path.as_str().to_owned() }),
);
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.text().await, "/{a}");
}
#[crate::test]
async fn extracting_on_handler_in_nested_router() {
let app = Router::new().nest(
"/{a}",
Router::new().route(
"/{b}",
get(|path: MatchedPath| async move { path.as_str().to_owned() }),
),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.text().await, "/{a}/{b}");
}
#[crate::test]
async fn extracting_on_handler_in_deeply_nested_router() {
let app = Router::new().nest(
"/{a}",
Router::new().nest(
"/{b}",
Router::new().route(
"/{c}",
get(|path: MatchedPath| async move { path.as_str().to_owned() }),
),
),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.text().await, "/{a}/{b}/{c}");
}
#[crate::test]
async fn cannot_extract_nested_matched_path_in_middleware() {
async fn extract_matched_path<B>(
matched_path: Option<MatchedPath>,
req: Request<B>,
) -> Request<B> {
assert!(matched_path.is_none());
req
}
let app = Router::new()
.nest_service("/{a}", Router::new().route("/{b}", get(|| async move {})))
.layer(map_request(extract_matched_path));
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn can_extract_nested_matched_path_in_middleware_using_nest() {
async fn extract_matched_path<B>(
matched_path: Option<MatchedPath>,
req: Request<B>,
) -> Request<B> {
assert_eq!(matched_path.unwrap().as_str(), "/{a}/{b}");
req
}
let app = Router::new()
.nest("/{a}", Router::new().route("/{b}", get(|| async move {})))
.layer(map_request(extract_matched_path));
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn cannot_extract_nested_matched_path_in_middleware_via_extension() {
async fn assert_no_matched_path<B>(req: Request<B>) -> Request<B> {
assert!(req.extensions().get::<MatchedPath>().is_none());
req
}
let app = Router::new()
.nest_service("/{a}", Router::new().route("/{b}", get(|| async move {})))
.layer(map_request(assert_no_matched_path));
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[tokio::test]
async fn can_extract_nested_matched_path_in_middleware_via_extension_using_nest() {
async fn assert_matched_path<B>(req: Request<B>) -> Request<B> {
assert!(req.extensions().get::<MatchedPath>().is_some());
req
}
let app = Router::new()
.nest("/{a}", Router::new().route("/{b}", get(|| async move {})))
.layer(map_request(assert_matched_path));
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn can_extract_nested_matched_path_in_middleware_on_nested_router() {
async fn extract_matched_path<B>(matched_path: MatchedPath, req: Request<B>) -> Request<B> {
assert_eq!(matched_path.as_str(), "/{a}/{b}");
req
}
let app = Router::new().nest(
"/{a}",
Router::new()
.route("/{b}", get(|| async move {}))
.layer(map_request(extract_matched_path)),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn can_extract_nested_matched_path_in_middleware_on_nested_router_via_extension() {
async fn extract_matched_path<B>(req: Request<B>) -> Request<B> {
let matched_path = req.extensions().get::<MatchedPath>().unwrap();
assert_eq!(matched_path.as_str(), "/{a}/{b}");
req
}
let app = Router::new().nest(
"/{a}",
Router::new()
.route("/{b}", get(|| async move {}))
.layer(map_request(extract_matched_path)),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn extracting_on_nested_handler() {
async fn handler(path: Option<MatchedPath>) {
assert!(path.is_none());
}
let app = Router::new().nest_service("/{a}", handler.into_service());
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
// https://github.com/tokio-rs/axum/issues/1579
#[crate::test]
async fn doesnt_panic_if_router_called_from_wildcard_route() {
use tower::ServiceExt;
let app = Router::new().route(
"/{*path}",
any(|req: Request| {
Router::new()
.nest("/foo", Router::new().route("/bar", get(|| async {})))
.oneshot(req)
}),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn cant_extract_in_fallback() {
async fn handler(path: Option<MatchedPath>, req: Request) {
assert!(path.is_none());
assert!(req.extensions().get::<MatchedPath>().is_none());
}
let app = Router::new().fallback(handler);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn matching_colon() {
let app = Router::new().without_v07_checks().route(
"/:foo",
get(|path: MatchedPath| async move { path.as_str().to_owned() }),
);
let client = TestClient::new(app);
let res = client.get("/:foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/:foo");
let res = client.get("/:bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn matching_asterisk() {
let app = Router::new().without_v07_checks().route(
"/*foo",
get(|path: MatchedPath| async move { path.as_str().to_owned() }),
);
let client = TestClient::new(app);
let res = client.get("/*foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/*foo");
let res = client.get("/*bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/original_uri.rs | axum/src/extract/original_uri.rs | use super::{Extension, FromRequestParts};
use http::{request::Parts, Uri};
use std::convert::Infallible;
/// Extractor that gets the original request URI regardless of nesting.
///
/// This is necessary since [`Uri`](http::Uri), when used as an extractor, will
/// have the prefix stripped if used in a nested service.
///
/// # Example
///
/// ```
/// use axum::{
/// routing::get,
/// Router,
/// extract::OriginalUri,
/// http::Uri
/// };
///
/// let api_routes = Router::new()
/// .route(
/// "/users",
/// get(|uri: Uri, OriginalUri(original_uri): OriginalUri| async {
/// // `uri` is `/users`
/// // `original_uri` is `/api/users`
/// }),
/// );
///
/// let app = Router::new().nest("/api", api_routes);
/// # let _: Router = app;
/// ```
///
/// # Extracting via request extensions
///
/// `OriginalUri` can also be accessed from middleware via request extensions.
/// This is useful for example with [`Trace`](tower_http::trace::Trace) to
/// create a span that contains the full path, if your service might be nested:
///
/// ```
/// use axum::{
/// Router,
/// extract::OriginalUri,
/// http::Request,
/// routing::get,
/// };
/// use tower_http::trace::TraceLayer;
///
/// let api_routes = Router::new()
/// .route("/users/{id}", get(|| async { /* ... */ }))
/// .layer(
/// TraceLayer::new_for_http().make_span_with(|req: &Request<_>| {
/// let path = if let Some(path) = req.extensions().get::<OriginalUri>() {
/// // This will include `/api`
/// path.0.path().to_owned()
/// } else {
/// // The `OriginalUri` extension will always be present if using
/// // `Router` unless another extractor or middleware has removed it
/// req.uri().path().to_owned()
/// };
/// tracing::info_span!("http-request", %path)
/// }),
/// );
///
/// let app = Router::new().nest("/api", api_routes);
/// # let _: Router = app;
/// ```
#[derive(Debug, Clone)]
pub struct OriginalUri(pub Uri);
impl<S> FromRequestParts<S> for OriginalUri
where
S: Send + Sync,
{
type Rejection = Infallible;
async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
let uri = Extension::<Self>::from_request_parts(parts, state)
.await
.unwrap_or_else(|_| Extension(Self(parts.uri.clone())))
.0;
Ok(uri)
}
}
axum_core::__impl_deref!(OriginalUri: Uri);
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/path/mod.rs | axum/src/extract/path/mod.rs | //! Extractor that will get captures from the URL and parse them using
//! [`serde`].
mod de;
use crate::{
extract::{rejection::*, FromRequestParts},
routing::url_params::UrlParams,
util::PercentDecodedStr,
};
use axum_core::{
extract::OptionalFromRequestParts,
response::{IntoResponse, Response},
RequestPartsExt as _,
};
use http::{request::Parts, StatusCode};
use serde_core::de::DeserializeOwned;
use std::{fmt, sync::Arc};
/// Extractor that will get captures from the URL and parse them using
/// [`serde`].
///
/// Any percent encoded parameters will be automatically decoded. The decoded
/// parameters must be valid UTF-8, otherwise `Path` will fail and return a `400
/// Bad Request` response.
///
/// # `Option<Path<T>>` behavior
///
/// You can use `Option<Path<T>>` as an extractor to allow the same handler to
/// be used in a route with parameters that deserialize to `T`, and another
/// route with no parameters at all.
///
/// # Example
///
/// These examples assume the `serde` feature of the [`uuid`] crate is enabled.
///
/// One `Path` can extract multiple captures. It is not necessary (and does
/// not work) to give a handler more than one `Path` argument.
///
/// [`uuid`]: https://crates.io/crates/uuid
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// routing::get,
/// Router,
/// };
/// use uuid::Uuid;
///
/// async fn users_teams_show(
/// Path((user_id, team_id)): Path<(Uuid, Uuid)>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new().route("/users/{user_id}/team/{team_id}", get(users_teams_show));
/// # let _: Router = app;
/// ```
///
/// If the path contains only one parameter, then you can omit the tuple.
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// routing::get,
/// Router,
/// };
/// use uuid::Uuid;
///
/// async fn user_info(Path(user_id): Path<Uuid>) {
/// // ...
/// }
///
/// let app = Router::new().route("/users/{user_id}", get(user_info));
/// # let _: Router = app;
/// ```
///
/// Path segments also can be deserialized into any type that implements
/// [`serde::Deserialize`]. This includes tuples and structs:
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// routing::get,
/// Router,
/// };
/// use serde::Deserialize;
/// use uuid::Uuid;
///
/// // Path segment labels will be matched with struct field names
/// #[derive(Deserialize)]
/// struct Params {
/// user_id: Uuid,
/// team_id: Uuid,
/// }
///
/// async fn users_teams_show(
/// Path(Params { user_id, team_id }): Path<Params>,
/// ) {
/// // ...
/// }
///
/// // When using tuples the path segments will be matched by their position in the route
/// async fn users_teams_create(
/// Path((user_id, team_id)): Path<(String, String)>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new().route(
/// "/users/{user_id}/team/{team_id}",
/// get(users_teams_show).post(users_teams_create),
/// );
/// # let _: Router = app;
/// ```
///
/// If you wish to capture all path parameters you can use `HashMap` or `Vec`:
///
/// ```rust,no_run
/// use axum::{
/// extract::Path,
/// routing::get,
/// Router,
/// };
/// use std::collections::HashMap;
///
/// async fn params_map(
/// Path(params): Path<HashMap<String, String>>,
/// ) {
/// // ...
/// }
///
/// async fn params_vec(
/// Path(params): Path<Vec<(String, String)>>,
/// ) {
/// // ...
/// }
///
/// let app = Router::new()
/// .route("/users/{user_id}/team/{team_id}", get(params_map).post(params_vec));
/// # let _: Router = app;
/// ```
///
/// # Providing detailed rejection output
///
/// If the URI cannot be deserialized into the target type the request will be rejected and an
/// error response will be returned. See [`customize-path-rejection`] for an example of how to customize that error.
///
/// [`serde`]: https://crates.io/crates/serde
/// [`serde::Deserialize`]: https://docs.rs/serde/1.0.127/serde/trait.Deserialize.html
/// [`customize-path-rejection`]: https://github.com/tokio-rs/axum/blob/main/examples/customize-path-rejection/src/main.rs
#[derive(Debug)]
pub struct Path<T>(pub T);
axum_core::__impl_deref!(Path);
impl<T, S> FromRequestParts<S> for Path<T>
where
T: DeserializeOwned + Send,
S: Send + Sync,
{
type Rejection = PathRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
// Extracted into separate fn so it's only compiled once for all T.
fn get_params(parts: &Parts) -> Result<&[(Arc<str>, PercentDecodedStr)], PathRejection> {
match parts.extensions.get::<UrlParams>() {
Some(UrlParams::Params(params)) => Ok(params),
Some(UrlParams::InvalidUtf8InPathParam { key }) => {
let err = PathDeserializationError {
kind: ErrorKind::InvalidUtf8InPathParam {
key: key.to_string(),
},
};
Err(FailedToDeserializePathParams(err).into())
}
None => Err(MissingPathParams.into()),
}
}
fn failed_to_deserialize_path_params(err: PathDeserializationError) -> PathRejection {
PathRejection::FailedToDeserializePathParams(FailedToDeserializePathParams(err))
}
match T::deserialize(de::PathDeserializer::new(get_params(parts)?)) {
Ok(val) => Ok(Self(val)),
Err(e) => Err(failed_to_deserialize_path_params(e)),
}
}
}
impl<T, S> OptionalFromRequestParts<S> for Path<T>
where
T: DeserializeOwned + Send + 'static,
S: Send + Sync,
{
type Rejection = PathRejection;
async fn from_request_parts(
parts: &mut Parts,
_state: &S,
) -> Result<Option<Self>, Self::Rejection> {
match parts.extract::<Self>().await {
Ok(Self(params)) => Ok(Some(Self(params))),
Err(PathRejection::FailedToDeserializePathParams(e))
if matches!(e.kind(), ErrorKind::WrongNumberOfParameters { got: 0, .. }) =>
{
Ok(None)
}
Err(e) => Err(e),
}
}
}
// this wrapper type is used as the deserializer error to hide the `serde::de::Error` impl which
// would otherwise be public if we used `ErrorKind` as the error directly
#[derive(Debug)]
pub(crate) struct PathDeserializationError {
pub(super) kind: ErrorKind,
}
impl PathDeserializationError {
pub(super) fn new(kind: ErrorKind) -> Self {
Self { kind }
}
pub(super) fn wrong_number_of_parameters() -> WrongNumberOfParameters<()> {
WrongNumberOfParameters { got: () }
}
#[track_caller]
pub(super) fn unsupported_type(name: &'static str) -> Self {
Self::new(ErrorKind::UnsupportedType { name })
}
}
pub(super) struct WrongNumberOfParameters<G> {
got: G,
}
impl<G> WrongNumberOfParameters<G> {
#[allow(clippy::unused_self)]
pub(super) fn got<G2>(self, got: G2) -> WrongNumberOfParameters<G2> {
WrongNumberOfParameters { got }
}
}
impl WrongNumberOfParameters<usize> {
pub(super) fn expected(self, expected: usize) -> PathDeserializationError {
PathDeserializationError::new(ErrorKind::WrongNumberOfParameters {
got: self.got,
expected,
})
}
}
impl serde_core::de::Error for PathDeserializationError {
#[inline]
fn custom<T>(msg: T) -> Self
where
T: fmt::Display,
{
Self {
kind: ErrorKind::Message(msg.to_string()),
}
}
}
impl fmt::Display for PathDeserializationError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.kind.fmt(f)
}
}
impl std::error::Error for PathDeserializationError {}
/// The kinds of errors that can happen we deserializing into a [`Path`].
///
/// This type is obtained through [`FailedToDeserializePathParams::kind`] or
/// [`FailedToDeserializePathParams::into_kind`] and is useful for building
/// more precise error messages.
#[must_use]
#[derive(Debug, PartialEq, Eq)]
#[non_exhaustive]
pub enum ErrorKind {
/// The URI contained the wrong number of parameters.
WrongNumberOfParameters {
/// The number of actual parameters in the URI.
got: usize,
/// The number of expected parameters.
expected: usize,
},
/// Failed to parse the value at a specific key into the expected type.
///
/// This variant is used when deserializing into types that have named fields, such as structs.
ParseErrorAtKey {
/// The key at which the value was located.
key: String,
/// The value from the URI.
value: String,
/// The expected type of the value.
expected_type: &'static str,
},
/// Failed to parse the value at a specific index into the expected type.
///
/// This variant is used when deserializing into sequence types, such as tuples.
ParseErrorAtIndex {
/// The index at which the value was located.
index: usize,
/// The value from the URI.
value: String,
/// The expected type of the value.
expected_type: &'static str,
},
/// Failed to parse a value into the expected type.
///
/// This variant is used when deserializing into a primitive type (such as `String` and `u32`).
ParseError {
/// The value from the URI.
value: String,
/// The expected type of the value.
expected_type: &'static str,
},
/// A parameter contained text that, once percent decoded, wasn't valid UTF-8.
InvalidUtf8InPathParam {
/// The key at which the invalid value was located.
key: String,
},
/// Tried to serialize into an unsupported type such as nested maps.
///
/// This error kind is caused by programmer errors and thus gets converted into a `500 Internal
/// Server Error` response.
UnsupportedType {
/// The name of the unsupported type.
name: &'static str,
},
/// Failed to deserialize the value with a custom deserialization error.
DeserializeError {
/// The key at which the invalid value was located.
key: String,
/// The value that failed to deserialize.
value: String,
/// The deserializaation failure message.
message: String,
},
/// Catch-all variant for errors that don't fit any other variant.
Message(String),
}
impl fmt::Display for ErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Message(error) => error.fmt(f),
Self::InvalidUtf8InPathParam { key } => write!(f, "Invalid UTF-8 in `{key}`"),
Self::WrongNumberOfParameters { got, expected } => {
write!(
f,
"Wrong number of path arguments for `Path`. Expected {expected} but got {got}"
)?;
if *expected == 1 {
write!(f, ". Note that multiple parameters must be extracted with a tuple `Path<(_, _)>` or a struct `Path<YourParams>`")?;
}
Ok(())
}
Self::UnsupportedType { name } => write!(f, "Unsupported type `{name}`"),
Self::ParseErrorAtKey {
key,
value,
expected_type,
} => write!(
f,
"Cannot parse `{key}` with value `{value}` to a `{expected_type}`"
),
Self::ParseError {
value,
expected_type,
} => write!(f, "Cannot parse `{value}` to a `{expected_type}`"),
Self::ParseErrorAtIndex {
index,
value,
expected_type,
} => write!(
f,
"Cannot parse value at index {index} with value `{value}` to a `{expected_type}`"
),
Self::DeserializeError {
key,
value,
message,
} => write!(f, "Cannot parse `{key}` with value `{value}`: {message}"),
}
}
}
/// Rejection type for [`Path`] if the captured routes params couldn't be deserialized
/// into the expected type.
#[derive(Debug)]
pub struct FailedToDeserializePathParams(PathDeserializationError);
impl FailedToDeserializePathParams {
/// Get a reference to the underlying error kind.
pub fn kind(&self) -> &ErrorKind {
&self.0.kind
}
/// Convert this error into the underlying error kind.
pub fn into_kind(self) -> ErrorKind {
self.0.kind
}
/// Get the response body text used for this rejection.
#[must_use]
pub fn body_text(&self) -> String {
match self.0.kind {
ErrorKind::Message(_)
| ErrorKind::DeserializeError { .. }
| ErrorKind::InvalidUtf8InPathParam { .. }
| ErrorKind::ParseError { .. }
| ErrorKind::ParseErrorAtIndex { .. }
| ErrorKind::ParseErrorAtKey { .. } => format!("Invalid URL: {}", self.0.kind),
ErrorKind::WrongNumberOfParameters { .. } | ErrorKind::UnsupportedType { .. } => {
self.0.kind.to_string()
}
}
}
/// Get the status code used for this rejection.
#[must_use]
pub fn status(&self) -> StatusCode {
match self.0.kind {
ErrorKind::Message(_)
| ErrorKind::DeserializeError { .. }
| ErrorKind::InvalidUtf8InPathParam { .. }
| ErrorKind::ParseError { .. }
| ErrorKind::ParseErrorAtIndex { .. }
| ErrorKind::ParseErrorAtKey { .. } => StatusCode::BAD_REQUEST,
ErrorKind::WrongNumberOfParameters { .. } | ErrorKind::UnsupportedType { .. } => {
StatusCode::INTERNAL_SERVER_ERROR
}
}
}
}
impl IntoResponse for FailedToDeserializePathParams {
fn into_response(self) -> Response {
let body = self.body_text();
axum_core::__log_rejection!(
rejection_type = Self,
body_text = body,
status = self.status(),
);
(self.status(), body).into_response()
}
}
impl fmt::Display for FailedToDeserializePathParams {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl std::error::Error for FailedToDeserializePathParams {}
/// Extractor that will get captures from the URL without deserializing them.
///
/// In general you should prefer to use [`Path`] as it is higher level, however `RawPathParams` is
/// suitable if just want the raw params without deserializing them and thus saving some
/// allocations.
///
/// Any percent encoded parameters will be automatically decoded. The decoded parameters must be
/// valid UTF-8, otherwise `RawPathParams` will fail and return a `400 Bad Request` response.
///
/// # Example
///
/// ```rust,no_run
/// use axum::{
/// extract::RawPathParams,
/// routing::get,
/// Router,
/// };
///
/// async fn users_teams_show(params: RawPathParams) {
/// for (key, value) in ¶ms {
/// println!("{key:?} = {value:?}");
/// }
/// }
///
/// let app = Router::new().route("/users/{user_id}/team/{team_id}", get(users_teams_show));
/// # let _: Router = app;
/// ```
#[derive(Debug)]
pub struct RawPathParams(Vec<(Arc<str>, PercentDecodedStr)>);
impl<S> FromRequestParts<S> for RawPathParams
where
S: Send + Sync,
{
type Rejection = RawPathParamsRejection;
async fn from_request_parts(parts: &mut Parts, _state: &S) -> Result<Self, Self::Rejection> {
let params = match parts.extensions.get::<UrlParams>() {
Some(UrlParams::Params(params)) => params,
Some(UrlParams::InvalidUtf8InPathParam { key }) => {
return Err(InvalidUtf8InPathParam {
key: Arc::clone(key),
}
.into());
}
None => {
return Err(MissingPathParams.into());
}
};
Ok(Self(params.clone()))
}
}
impl RawPathParams {
/// Get an iterator over the path parameters.
#[must_use]
pub fn iter(&self) -> RawPathParamsIter<'_> {
self.into_iter()
}
}
impl<'a> IntoIterator for &'a RawPathParams {
type Item = (&'a str, &'a str);
type IntoIter = RawPathParamsIter<'a>;
fn into_iter(self) -> Self::IntoIter {
RawPathParamsIter(self.0.iter())
}
}
/// An iterator over raw path parameters.
///
/// Created with [`RawPathParams::iter`].
#[derive(Debug)]
pub struct RawPathParamsIter<'a>(std::slice::Iter<'a, (Arc<str>, PercentDecodedStr)>);
impl<'a> Iterator for RawPathParamsIter<'a> {
type Item = (&'a str, &'a str);
fn next(&mut self) -> Option<Self::Item> {
let (key, value) = self.0.next()?;
Some((&**key, value.as_str()))
}
}
/// Rejection used by [`RawPathParams`] if a parameter contained text that, once percent decoded,
/// wasn't valid UTF-8.
#[derive(Debug)]
pub struct InvalidUtf8InPathParam {
key: Arc<str>,
}
impl InvalidUtf8InPathParam {
/// Get the response body text used for this rejection.
#[must_use]
pub fn body_text(&self) -> String {
self.to_string()
}
/// Get the status code used for this rejection.
#[must_use]
pub fn status(&self) -> StatusCode {
StatusCode::BAD_REQUEST
}
}
impl fmt::Display for InvalidUtf8InPathParam {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "Invalid UTF-8 in `{}`", self.key)
}
}
impl std::error::Error for InvalidUtf8InPathParam {}
impl IntoResponse for InvalidUtf8InPathParam {
fn into_response(self) -> Response {
let body = self.body_text();
axum_core::__log_rejection!(
rejection_type = Self,
body_text = body,
status = self.status(),
);
(self.status(), body).into_response()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{routing::get, test_helpers::*, Router};
use serde::Deserialize;
use std::collections::HashMap;
#[crate::test]
async fn extracting_url_params() {
let app = Router::new().route(
"/users/{id}",
get(|Path(id): Path<i32>| async move {
assert_eq!(id, 42);
})
.post(|Path(params_map): Path<HashMap<String, i32>>| async move {
assert_eq!(params_map.get("id").unwrap(), &1337);
}),
);
let client = TestClient::new(app);
let res = client.get("/users/42").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/users/1337").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn extracting_url_params_multiple_times() {
let app = Router::new().route("/users/{id}", get(|_: Path<i32>, _: Path<String>| async {}));
let client = TestClient::new(app);
let res = client.get("/users/42").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn percent_decoding() {
let app = Router::new().route(
"/{key}",
get(|Path(param): Path<String>| async move { param }),
);
let client = TestClient::new(app);
let res = client.get("/one%20two").await;
assert_eq!(res.text().await, "one two");
}
#[crate::test]
async fn supports_128_bit_numbers() {
let app = Router::new()
.route(
"/i/{key}",
get(|Path(param): Path<i128>| async move { param.to_string() }),
)
.route(
"/u/{key}",
get(|Path(param): Path<u128>| async move { param.to_string() }),
);
let client = TestClient::new(app);
let res = client.get("/i/123").await;
assert_eq!(res.text().await, "123");
let res = client.get("/u/123").await;
assert_eq!(res.text().await, "123");
}
#[crate::test]
async fn wildcard() {
let app = Router::new()
.route(
"/foo/{*rest}",
get(|Path(param): Path<String>| async move { param }),
)
.route(
"/bar/{*rest}",
get(|Path(params): Path<HashMap<String, String>>| async move {
params.get("rest").unwrap().clone()
}),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.text().await, "bar/baz");
let res = client.get("/bar/baz/qux").await;
assert_eq!(res.text().await, "baz/qux");
}
#[crate::test]
async fn captures_dont_match_empty_path() {
let app = Router::new().route("/{key}", get(|| async {}));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn captures_match_empty_inner_segments() {
let app = Router::new().route(
"/{key}/method",
get(|Path(param): Path<String>| async move { param.clone() }),
);
let client = TestClient::new(app);
let res = client.get("/abc/method").await;
assert_eq!(res.text().await, "abc");
let res = client.get("//method").await;
assert_eq!(res.text().await, "");
}
#[crate::test]
async fn captures_match_empty_inner_segments_near_end() {
let app = Router::new().route(
"/method/{key}/",
get(|Path(param): Path<String>| async move { param.clone() }),
);
let client = TestClient::new(app);
let res = client.get("/method/abc").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/method/abc/").await;
assert_eq!(res.text().await, "abc");
let res = client.get("/method//").await;
assert_eq!(res.text().await, "");
}
#[crate::test]
async fn captures_match_empty_trailing_segment() {
let app = Router::new().route(
"/method/{key}",
get(|Path(param): Path<String>| async move { param.clone() }),
);
let client = TestClient::new(app);
let res = client.get("/method/abc/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/method/abc").await;
assert_eq!(res.text().await, "abc");
let res = client.get("/method/").await;
assert_eq!(res.text().await, "");
let res = client.get("/method").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn str_reference_deserialize() {
struct Param(String);
impl<'de> serde::Deserialize<'de> for Param {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::Deserializer<'de>,
{
let s = <&str as serde::Deserialize>::deserialize(deserializer)?;
Ok(Self(s.to_owned()))
}
}
let app = Router::new().route(
"/{key}",
get(|param: Path<Param>| async move { param.0 .0 }),
);
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.text().await, "foo");
// percent decoding should also work
let res = client.get("/foo%20bar").await;
assert_eq!(res.text().await, "foo bar");
}
#[crate::test]
async fn two_path_extractors() {
let app = Router::new().route("/{a}/{b}", get(|_: Path<String>, _: Path<String>| async {}));
let client = TestClient::new(app);
let res = client.get("/a/b").await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(
res.text().await,
"Wrong number of path arguments for `Path`. Expected 1 but got 2. \
Note that multiple parameters must be extracted with a tuple `Path<(_, _)>` or a struct `Path<YourParams>`",
);
}
#[crate::test]
async fn tuple_param_matches_exactly() {
#[allow(dead_code)]
#[derive(Deserialize)]
struct Tuple(String, String);
let app = Router::new()
.route(
"/foo/{a}/{b}/{c}",
get(|_: Path<(String, String)>| async {}),
)
.route("/bar/{a}/{b}/{c}", get(|_: Path<Tuple>| async {}));
let client = TestClient::new(app);
let res = client.get("/foo/a/b/c").await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(
res.text().await,
"Wrong number of path arguments for `Path`. Expected 2 but got 3",
);
let res = client.get("/bar/a/b/c").await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(
res.text().await,
"Wrong number of path arguments for `Path`. Expected 2 but got 3",
);
}
#[crate::test]
async fn deserialize_into_vec_of_tuples() {
let app = Router::new().route(
"/{a}/{b}",
get(|Path(params): Path<Vec<(String, String)>>| async move {
assert_eq!(
params,
vec![
("a".to_owned(), "foo".to_owned()),
("b".to_owned(), "bar".to_owned())
]
);
}),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn type_that_uses_deserialize_any() {
use time::Date;
#[derive(Deserialize)]
struct Params {
a: Date,
b: Date,
c: Date,
}
let app = Router::new()
.route(
"/single/{a}",
get(|Path(a): Path<Date>| async move { format!("single: {a}") }),
)
.route(
"/tuple/{a}/{b}/{c}",
get(|Path((a, b, c)): Path<(Date, Date, Date)>| async move {
format!("tuple: {a} {b} {c}")
}),
)
.route(
"/vec/{a}/{b}/{c}",
get(|Path(vec): Path<Vec<Date>>| async move {
let [a, b, c]: [Date; 3] = vec.try_into().unwrap();
format!("vec: {a} {b} {c}")
}),
)
.route(
"/vec_pairs/{a}/{b}/{c}",
get(|Path(vec): Path<Vec<(String, Date)>>| async move {
let [(_, a), (_, b), (_, c)]: [(String, Date); 3] = vec.try_into().unwrap();
format!("vec_pairs: {a} {b} {c}")
}),
)
.route(
"/map/{a}/{b}/{c}",
get(|Path(mut map): Path<HashMap<String, Date>>| async move {
let a = map.remove("a").unwrap();
let b = map.remove("b").unwrap();
let c = map.remove("c").unwrap();
format!("map: {a} {b} {c}")
}),
)
.route(
"/struct/{a}/{b}/{c}",
get(|Path(params): Path<Params>| async move {
format!("struct: {} {} {}", params.a, params.b, params.c)
}),
);
let client = TestClient::new(app);
let res = client.get("/single/2023-01-01").await;
assert_eq!(res.text().await, "single: 2023-01-01");
let res = client.get("/tuple/2023-01-01/2023-01-02/2023-01-03").await;
assert_eq!(res.text().await, "tuple: 2023-01-01 2023-01-02 2023-01-03");
let res = client.get("/vec/2023-01-01/2023-01-02/2023-01-03").await;
assert_eq!(res.text().await, "vec: 2023-01-01 2023-01-02 2023-01-03");
let res = client
.get("/vec_pairs/2023-01-01/2023-01-02/2023-01-03")
.await;
assert_eq!(
res.text().await,
"vec_pairs: 2023-01-01 2023-01-02 2023-01-03",
);
let res = client.get("/map/2023-01-01/2023-01-02/2023-01-03").await;
assert_eq!(res.text().await, "map: 2023-01-01 2023-01-02 2023-01-03");
let res = client.get("/struct/2023-01-01/2023-01-02/2023-01-03").await;
assert_eq!(res.text().await, "struct: 2023-01-01 2023-01-02 2023-01-03");
}
#[crate::test]
async fn wrong_number_of_parameters_json() {
use serde_json::Value;
let app = Router::new()
.route("/one/{a}", get(|_: Path<(Value, Value)>| async {}))
.route("/two/{a}/{b}", get(|_: Path<Value>| async {}));
let client = TestClient::new(app);
let res = client.get("/one/1").await;
assert!(res
.text()
.await
.starts_with("Wrong number of path arguments for `Path`. Expected 2 but got 1"));
let res = client.get("/two/1/2").await;
assert!(res
.text()
.await
.starts_with("Wrong number of path arguments for `Path`. Expected 1 but got 2"));
}
#[crate::test]
async fn raw_path_params() {
let app = Router::new().route(
"/{a}/{b}/{c}",
get(|params: RawPathParams| async move {
params
.into_iter()
.map(|(key, value)| format!("{key}={value}"))
.collect::<Vec<_>>()
.join(" ")
}),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
let body = res.text().await;
assert_eq!(body, "a=foo b=bar c=baz");
}
#[crate::test]
async fn deserialize_error_single_value() {
let app = Router::new().route(
"/resources/{res}",
get(|res: Path<uuid::Uuid>| async move {
let _res = res;
}),
);
let client = TestClient::new(app);
let response = client.get("/resources/123123-123-123123").await;
let body = response.text().await;
assert_eq!(
body,
"Invalid URL: Cannot parse `res` with value `123123-123-123123`: UUID parsing failed: invalid group count: expected 5, found 3"
);
}
#[crate::test]
async fn deserialize_error_multi_value() {
let app = Router::new().route(
"/resources/{res}/sub/{sub}",
get(
|Path((res, sub)): Path<(uuid::Uuid, uuid::Uuid)>| async move {
let _res = res;
let _sub = sub;
},
),
);
let client = TestClient::new(app);
let response = client.get("/resources/456456-123-456456/sub/123").await;
let body = response.text().await;
assert_eq!(
body,
"Invalid URL: Cannot parse `res` with value `456456-123-456456`: UUID parsing failed: invalid group count: expected 5, found 3"
);
}
#[crate::test]
async fn regression_3038() {
#[derive(Deserialize)]
#[allow(dead_code)]
struct MoreChars {
first_two: [char; 2],
second_two: [char; 2],
crate_name: String,
}
let app = Router::new().route(
"/{first_two}/{second_two}/{crate_name}",
get(|Path(_): Path<MoreChars>| async move {}),
);
let client = TestClient::new(app);
let res = client.get("/te/st/_thing").await;
let body = res.text().await;
assert_eq!(body, "Invalid URL: array types are not supported");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/extract/path/de.rs | axum/src/extract/path/de.rs | use super::{ErrorKind, PathDeserializationError};
use crate::util::PercentDecodedStr;
use serde_core::{
de::{self, DeserializeSeed, EnumAccess, Error, MapAccess, SeqAccess, VariantAccess, Visitor},
forward_to_deserialize_any, Deserializer,
};
use std::{any::type_name, sync::Arc};
macro_rules! unsupported_type {
($trait_fn:ident) => {
fn $trait_fn<V>(self, _: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(type_name::<
V::Value,
>()))
}
};
}
macro_rules! parse_single_value {
($trait_fn:ident, $visit_fn:ident, $ty:literal) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.url_params.len() != 1 {
return Err(PathDeserializationError::wrong_number_of_parameters()
.got(self.url_params.len())
.expected(1));
}
let value = self.url_params[0].1.parse().map_err(|_| {
PathDeserializationError::new(ErrorKind::ParseError {
value: self.url_params[0].1.as_str().to_owned(),
expected_type: $ty,
})
})?;
visitor.$visit_fn(value)
}
};
}
pub(crate) struct PathDeserializer<'de> {
url_params: &'de [(Arc<str>, PercentDecodedStr)],
}
impl<'de> PathDeserializer<'de> {
#[inline]
pub(crate) fn new(url_params: &'de [(Arc<str>, PercentDecodedStr)]) -> Self {
PathDeserializer { url_params }
}
}
impl<'de> Deserializer<'de> for PathDeserializer<'de> {
type Error = PathDeserializationError;
unsupported_type!(deserialize_bytes);
unsupported_type!(deserialize_option);
unsupported_type!(deserialize_identifier);
unsupported_type!(deserialize_ignored_any);
parse_single_value!(deserialize_bool, visit_bool, "bool");
parse_single_value!(deserialize_i8, visit_i8, "i8");
parse_single_value!(deserialize_i16, visit_i16, "i16");
parse_single_value!(deserialize_i32, visit_i32, "i32");
parse_single_value!(deserialize_i64, visit_i64, "i64");
parse_single_value!(deserialize_i128, visit_i128, "i128");
parse_single_value!(deserialize_u8, visit_u8, "u8");
parse_single_value!(deserialize_u16, visit_u16, "u16");
parse_single_value!(deserialize_u32, visit_u32, "u32");
parse_single_value!(deserialize_u64, visit_u64, "u64");
parse_single_value!(deserialize_u128, visit_u128, "u128");
parse_single_value!(deserialize_f32, visit_f32, "f32");
parse_single_value!(deserialize_f64, visit_f64, "f64");
parse_single_value!(deserialize_string, visit_string, "String");
parse_single_value!(deserialize_byte_buf, visit_string, "String");
parse_single_value!(deserialize_char, visit_char, "char");
fn deserialize_any<V>(self, v: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_str(v)
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.url_params.len() != 1 {
return Err(PathDeserializationError::wrong_number_of_parameters()
.got(self.url_params.len())
.expected(1));
}
let key = &self.url_params[0].0;
let value = &self.url_params[0].1;
visitor
.visit_borrowed_str(value)
.map_err(|e: PathDeserializationError| {
if let ErrorKind::Message(message) = &e.kind {
PathDeserializationError::new(ErrorKind::DeserializeError {
key: key.to_string(),
value: value.as_str().to_owned(),
message: message.to_owned(),
})
} else {
e
}
})
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_newtype_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_seq<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_seq(SeqDeserializer {
params: self.url_params,
idx: 0,
})
}
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.url_params.len() != len {
return Err(PathDeserializationError::wrong_number_of_parameters()
.got(self.url_params.len())
.expected(len));
}
visitor.visit_seq(SeqDeserializer {
params: self.url_params,
idx: 0,
})
}
fn deserialize_tuple_struct<V>(
self,
_name: &'static str,
len: usize,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.url_params.len() != len {
return Err(PathDeserializationError::wrong_number_of_parameters()
.got(self.url_params.len())
.expected(len));
}
visitor.visit_seq(SeqDeserializer {
params: self.url_params,
idx: 0,
})
}
fn deserialize_map<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_map(MapDeserializer {
params: self.url_params,
value: None,
key: None,
})
}
fn deserialize_struct<V>(
self,
_name: &'static str,
_fields: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_map(visitor)
}
fn deserialize_enum<V>(
self,
_name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
if self.url_params.len() != 1 {
return Err(PathDeserializationError::wrong_number_of_parameters()
.got(self.url_params.len())
.expected(1));
}
visitor.visit_enum(EnumDeserializer {
value: &self.url_params[0].1,
})
}
}
struct MapDeserializer<'de> {
params: &'de [(Arc<str>, PercentDecodedStr)],
key: Option<KeyOrIdx<'de>>,
value: Option<&'de PercentDecodedStr>,
}
impl<'de> MapAccess<'de> for MapDeserializer<'de> {
type Error = PathDeserializationError;
fn next_key_seed<K>(&mut self, seed: K) -> Result<Option<K::Value>, Self::Error>
where
K: DeserializeSeed<'de>,
{
match self.params.split_first() {
Some(((key, value), tail)) => {
self.value = Some(value);
self.params = tail;
self.key = Some(KeyOrIdx::Key(key));
seed.deserialize(KeyDeserializer { key }).map(Some)
}
None => Ok(None),
}
}
fn next_value_seed<V>(&mut self, seed: V) -> Result<V::Value, Self::Error>
where
V: DeserializeSeed<'de>,
{
match self.value.take() {
Some(value) => seed.deserialize(ValueDeserializer {
key: self.key.take(),
value,
}),
None => Err(PathDeserializationError::custom("value is missing")),
}
}
}
struct KeyDeserializer<'de> {
key: &'de str,
}
macro_rules! parse_key {
($trait_fn:ident) => {
fn $trait_fn<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_str(&self.key)
}
};
}
impl<'de> Deserializer<'de> for KeyDeserializer<'de> {
type Error = PathDeserializationError;
parse_key!(deserialize_identifier);
parse_key!(deserialize_str);
parse_key!(deserialize_string);
fn deserialize_any<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::custom("Unexpected key type"))
}
forward_to_deserialize_any! {
bool i8 i16 i32 i64 i128 u8 u16 u32 u64 u128 f32 f64 char bytes
byte_buf option unit unit_struct seq tuple
tuple_struct map newtype_struct struct enum ignored_any
}
}
macro_rules! parse_value {
($trait_fn:ident, $visit_fn:ident, $ty:literal) => {
fn $trait_fn<V>(mut self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
let v = self.value.parse().map_err(|_| {
if let Some(key) = self.key.take() {
let kind = match key {
KeyOrIdx::Key(key) => ErrorKind::ParseErrorAtKey {
key: key.to_owned(),
value: self.value.as_str().to_owned(),
expected_type: $ty,
},
KeyOrIdx::Idx { idx: index, key: _ } => ErrorKind::ParseErrorAtIndex {
index,
value: self.value.as_str().to_owned(),
expected_type: $ty,
},
};
PathDeserializationError::new(kind)
} else {
PathDeserializationError::new(ErrorKind::ParseError {
value: self.value.as_str().to_owned(),
expected_type: $ty,
})
}
})?;
visitor.$visit_fn(v)
}
};
}
#[derive(Debug)]
struct ValueDeserializer<'de> {
key: Option<KeyOrIdx<'de>>,
value: &'de PercentDecodedStr,
}
impl<'de> Deserializer<'de> for ValueDeserializer<'de> {
type Error = PathDeserializationError;
unsupported_type!(deserialize_map);
unsupported_type!(deserialize_identifier);
parse_value!(deserialize_bool, visit_bool, "bool");
parse_value!(deserialize_i8, visit_i8, "i8");
parse_value!(deserialize_i16, visit_i16, "i16");
parse_value!(deserialize_i32, visit_i32, "i32");
parse_value!(deserialize_i64, visit_i64, "i64");
parse_value!(deserialize_i128, visit_i128, "i128");
parse_value!(deserialize_u8, visit_u8, "u8");
parse_value!(deserialize_u16, visit_u16, "u16");
parse_value!(deserialize_u32, visit_u32, "u32");
parse_value!(deserialize_u64, visit_u64, "u64");
parse_value!(deserialize_u128, visit_u128, "u128");
parse_value!(deserialize_f32, visit_f32, "f32");
parse_value!(deserialize_f64, visit_f64, "f64");
parse_value!(deserialize_string, visit_string, "String");
parse_value!(deserialize_byte_buf, visit_string, "String");
parse_value!(deserialize_char, visit_char, "char");
fn deserialize_any<V>(self, v: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
self.deserialize_str(v)
}
fn deserialize_str<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor
.visit_borrowed_str(self.value)
.map_err(|e: PathDeserializationError| {
if let (ErrorKind::Message(message), Some(key)) = (&e.kind, self.key.as_ref()) {
PathDeserializationError::new(ErrorKind::DeserializeError {
key: key.key().to_owned(),
value: self.value.as_str().to_owned(),
message: message.to_owned(),
})
} else {
e
}
})
}
fn deserialize_bytes<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_borrowed_bytes(self.value.as_bytes())
}
fn deserialize_option<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_some(self)
}
fn deserialize_unit<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_unit_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
fn deserialize_newtype_struct<V>(
self,
_name: &'static str,
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_newtype_struct(self)
}
fn deserialize_tuple<V>(self, len: usize, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
struct PairDeserializer<'de> {
key: Option<KeyOrIdx<'de>>,
value: Option<&'de PercentDecodedStr>,
}
impl<'de> SeqAccess<'de> for PairDeserializer<'de> {
type Error = PathDeserializationError;
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
where
T: DeserializeSeed<'de>,
{
match self.key.take() {
Some(KeyOrIdx::Idx { idx: _, key }) => {
return seed.deserialize(KeyDeserializer { key }).map(Some);
}
Some(KeyOrIdx::Key(_)) => {
return Err(PathDeserializationError::custom(
"array types are not supported",
));
}
None => {}
};
self.value
.take()
.map(|value| seed.deserialize(ValueDeserializer { key: None, value }))
.transpose()
}
}
if len == 2 {
match self.key {
Some(key) => visitor.visit_seq(PairDeserializer {
key: Some(key),
value: Some(self.value),
}),
// `self.key` is only `None` when deserializing maps so `deserialize_seq`
// wouldn't be called for that
None => unreachable!(),
}
} else {
Err(PathDeserializationError::unsupported_type(type_name::<
V::Value,
>()))
}
}
fn deserialize_seq<V>(self, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(type_name::<
V::Value,
>()))
}
fn deserialize_tuple_struct<V>(
self,
_name: &'static str,
_len: usize,
_visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(type_name::<
V::Value,
>()))
}
fn deserialize_struct<V>(
self,
_name: &'static str,
_fields: &'static [&'static str],
_visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(type_name::<
V::Value,
>()))
}
fn deserialize_enum<V>(
self,
_name: &'static str,
_variants: &'static [&'static str],
visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_enum(EnumDeserializer { value: self.value })
}
fn deserialize_ignored_any<V>(self, visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
visitor.visit_unit()
}
}
struct EnumDeserializer<'de> {
value: &'de str,
}
impl<'de> EnumAccess<'de> for EnumDeserializer<'de> {
type Error = PathDeserializationError;
type Variant = UnitVariant;
fn variant_seed<V>(self, seed: V) -> Result<(V::Value, Self::Variant), Self::Error>
where
V: de::DeserializeSeed<'de>,
{
Ok((
seed.deserialize(KeyDeserializer { key: self.value })?,
UnitVariant,
))
}
}
struct UnitVariant;
impl<'de> VariantAccess<'de> for UnitVariant {
type Error = PathDeserializationError;
fn unit_variant(self) -> Result<(), Self::Error> {
Ok(())
}
fn newtype_variant_seed<T>(self, _seed: T) -> Result<T::Value, Self::Error>
where
T: DeserializeSeed<'de>,
{
Err(PathDeserializationError::unsupported_type(
"newtype enum variant",
))
}
fn tuple_variant<V>(self, _len: usize, _visitor: V) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(
"tuple enum variant",
))
}
fn struct_variant<V>(
self,
_fields: &'static [&'static str],
_visitor: V,
) -> Result<V::Value, Self::Error>
where
V: Visitor<'de>,
{
Err(PathDeserializationError::unsupported_type(
"struct enum variant",
))
}
}
struct SeqDeserializer<'de> {
params: &'de [(Arc<str>, PercentDecodedStr)],
idx: usize,
}
impl<'de> SeqAccess<'de> for SeqDeserializer<'de> {
type Error = PathDeserializationError;
fn next_element_seed<T>(&mut self, seed: T) -> Result<Option<T::Value>, Self::Error>
where
T: DeserializeSeed<'de>,
{
match self.params.split_first() {
Some(((key, value), tail)) => {
self.params = tail;
let idx = self.idx;
self.idx += 1;
Ok(Some(seed.deserialize(ValueDeserializer {
key: Some(KeyOrIdx::Idx { idx, key }),
value,
})?))
}
None => Ok(None),
}
}
}
#[derive(Debug, Clone)]
enum KeyOrIdx<'de> {
Key(&'de str),
Idx { idx: usize, key: &'de str },
}
impl<'de> KeyOrIdx<'de> {
fn key(&self) -> &'de str {
match &self {
Self::Idx { key, .. } | Self::Key(key) => key,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use serde::Deserialize;
use std::collections::HashMap;
#[derive(Debug, Deserialize, Eq, PartialEq)]
enum MyEnum {
A,
B,
#[serde(rename = "c")]
C,
}
#[derive(Debug, Deserialize, Eq, PartialEq)]
struct Struct {
c: String,
b: bool,
a: i32,
}
fn create_url_params<I, K, V>(values: I) -> Vec<(Arc<str>, PercentDecodedStr)>
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<str>,
V: AsRef<str>,
{
values
.into_iter()
.map(|(k, v)| (Arc::from(k.as_ref()), PercentDecodedStr::new(v).unwrap()))
.collect()
}
macro_rules! check_single_value {
($ty:ty, $value_str:literal, $value:expr) => {
#[allow(clippy::bool_assert_comparison)]
{
let url_params = create_url_params(vec![("value", $value_str)]);
let deserializer = PathDeserializer::new(&url_params);
assert_eq!(<$ty>::deserialize(deserializer).unwrap(), $value);
}
};
}
#[test]
fn test_parse_single_value() {
check_single_value!(bool, "true", true);
check_single_value!(bool, "false", false);
check_single_value!(i8, "-123", -123);
check_single_value!(i16, "-123", -123);
check_single_value!(i32, "-123", -123);
check_single_value!(i64, "-123", -123);
check_single_value!(i128, "123", 123);
check_single_value!(u8, "123", 123);
check_single_value!(u16, "123", 123);
check_single_value!(u32, "123", 123);
check_single_value!(u64, "123", 123);
check_single_value!(u128, "123", 123);
check_single_value!(f32, "123", 123.0);
check_single_value!(f64, "123", 123.0);
check_single_value!(String, "abc", "abc");
check_single_value!(String, "one%20two", "one two");
check_single_value!(&str, "abc", "abc");
check_single_value!(&str, "one%20two", "one two");
check_single_value!(char, "a", 'a');
let url_params = create_url_params(vec![("a", "B")]);
assert_eq!(
MyEnum::deserialize(PathDeserializer::new(&url_params)).unwrap(),
MyEnum::B
);
let url_params = create_url_params(vec![("a", "1"), ("b", "2")]);
let error_kind = i32::deserialize(PathDeserializer::new(&url_params))
.unwrap_err()
.kind;
assert!(matches!(
error_kind,
ErrorKind::WrongNumberOfParameters {
expected: 1,
got: 2
}
));
}
#[test]
fn test_parse_seq() {
let url_params = create_url_params(vec![("a", "1"), ("b", "true"), ("c", "abc")]);
assert_eq!(
<(i32, bool, String)>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
(1, true, "abc".to_owned())
);
#[derive(Debug, Deserialize, Eq, PartialEq)]
struct TupleStruct(i32, bool, String);
assert_eq!(
TupleStruct::deserialize(PathDeserializer::new(&url_params)).unwrap(),
TupleStruct(1, true, "abc".to_owned())
);
let url_params = create_url_params(vec![("a", "1"), ("b", "2"), ("c", "3")]);
assert_eq!(
<Vec<i32>>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
vec![1, 2, 3]
);
let url_params = create_url_params(vec![("a", "c"), ("a", "B")]);
assert_eq!(
<Vec<MyEnum>>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
vec![MyEnum::C, MyEnum::B]
);
}
#[test]
fn test_parse_seq_tuple_string_string() {
let url_params = create_url_params(vec![("a", "foo"), ("b", "bar")]);
assert_eq!(
<Vec<(String, String)>>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
vec![
("a".to_owned(), "foo".to_owned()),
("b".to_owned(), "bar".to_owned())
]
);
}
#[test]
fn test_parse_seq_tuple_string_parse() {
let url_params = create_url_params(vec![("a", "1"), ("b", "2")]);
assert_eq!(
<Vec<(String, u32)>>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
vec![("a".to_owned(), 1), ("b".to_owned(), 2)]
);
}
#[test]
fn test_parse_struct() {
let url_params = create_url_params(vec![("a", "1"), ("b", "true"), ("c", "abc")]);
assert_eq!(
Struct::deserialize(PathDeserializer::new(&url_params)).unwrap(),
Struct {
c: "abc".to_owned(),
b: true,
a: 1,
}
);
}
#[test]
fn test_parse_struct_ignoring_additional_fields() {
let url_params = create_url_params(vec![
("a", "1"),
("b", "true"),
("c", "abc"),
("d", "false"),
]);
assert_eq!(
Struct::deserialize(PathDeserializer::new(&url_params)).unwrap(),
Struct {
c: "abc".to_owned(),
b: true,
a: 1,
}
);
}
#[test]
fn test_parse_map() {
let url_params = create_url_params(vec![("a", "1"), ("b", "true"), ("c", "abc")]);
assert_eq!(
<HashMap<String, String>>::deserialize(PathDeserializer::new(&url_params)).unwrap(),
[("a", "1"), ("b", "true"), ("c", "abc")]
.iter()
.map(|(key, value)| ((*key).to_owned(), (*value).to_owned()))
.collect()
);
}
macro_rules! test_parse_error {
(
$params:expr,
$ty:ty,
$expected_error_kind:expr $(,)?
) => {
let url_params = create_url_params($params);
let actual_error_kind = <$ty>::deserialize(PathDeserializer::new(&url_params))
.unwrap_err()
.kind;
assert_eq!(actual_error_kind, $expected_error_kind);
};
}
#[test]
fn test_parse_tuple_too_many_fields() {
test_parse_error!(
vec![("a", "abc"), ("b", "true"), ("c", "1"), ("d", "false"),],
(&str, bool, u32),
ErrorKind::WrongNumberOfParameters {
got: 4,
expected: 3,
}
);
}
#[test]
fn test_wrong_number_of_parameters_error() {
test_parse_error!(
vec![("a", "1")],
(u32, u32),
ErrorKind::WrongNumberOfParameters {
got: 1,
expected: 2,
}
);
}
#[test]
fn test_parse_error_at_key_error() {
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
struct Params {
a: u32,
}
test_parse_error!(
vec![("a", "false")],
Params,
ErrorKind::ParseErrorAtKey {
key: "a".to_owned(),
value: "false".to_owned(),
expected_type: "u32",
}
);
}
#[test]
fn test_parse_error_at_key_error_multiple() {
#[derive(Debug, Deserialize)]
#[allow(dead_code)]
struct Params {
a: u32,
b: u32,
}
test_parse_error!(
vec![("a", "false")],
Params,
ErrorKind::ParseErrorAtKey {
key: "a".to_owned(),
value: "false".to_owned(),
expected_type: "u32",
}
);
}
#[test]
fn test_parse_error_at_index_error() {
test_parse_error!(
vec![("a", "false"), ("b", "true")],
(bool, u32),
ErrorKind::ParseErrorAtIndex {
index: 1,
value: "true".to_owned(),
expected_type: "u32",
}
);
}
#[test]
fn test_parse_error_error() {
test_parse_error!(
vec![("a", "false")],
u32,
ErrorKind::ParseError {
value: "false".to_owned(),
expected_type: "u32",
}
);
}
#[test]
fn test_unsupported_type_error_nested_data_structure() {
test_parse_error!(
vec![("a", "false")],
Vec<Vec<u32>>,
ErrorKind::UnsupportedType {
name: "alloc::vec::Vec<u32>",
}
);
}
#[test]
fn test_parse_seq_tuple_unsupported_key_type() {
test_parse_error!(
vec![("a", "false")],
Vec<(u32, String)>,
ErrorKind::Message("Unexpected key type".to_owned())
);
}
#[test]
fn test_parse_seq_wrong_tuple_length() {
test_parse_error!(
vec![("a", "false")],
Vec<(String, String, String)>,
ErrorKind::UnsupportedType {
name: "(alloc::string::String, alloc::string::String, alloc::string::String)",
}
);
}
#[test]
fn test_parse_seq_seq() {
test_parse_error!(
vec![("a", "false")],
Vec<Vec<String>>,
ErrorKind::UnsupportedType {
name: "alloc::vec::Vec<alloc::string::String>",
}
);
}
#[test]
fn test_deserialize_key_value() {
test_parse_error!(
vec![("id", "123123-123-123123")],
uuid::Uuid,
ErrorKind::DeserializeError {
key: "id".to_owned(),
value: "123123-123-123123".to_owned(),
message: "UUID parsing failed: invalid group count: expected 5, found 3".to_owned(),
}
);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/strip_prefix.rs | axum/src/routing/strip_prefix.rs | use http::{Request, Uri};
use std::{
sync::Arc,
task::{Context, Poll},
};
use tower::Layer;
use tower_layer::layer_fn;
use tower_service::Service;
#[derive(Clone)]
pub(super) struct StripPrefix<S> {
inner: S,
prefix: Arc<str>,
}
impl<S> StripPrefix<S> {
pub(super) fn layer(prefix: &str) -> impl Layer<S, Service = Self> + Clone {
let prefix = Arc::from(prefix);
layer_fn(move |inner| Self {
inner,
prefix: Arc::clone(&prefix),
})
}
}
impl<S, B> Service<Request<B>> for StripPrefix<S>
where
S: Service<Request<B>>,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, mut req: Request<B>) -> Self::Future {
if let Some(new_uri) = strip_prefix(req.uri(), &self.prefix) {
*req.uri_mut() = new_uri;
}
self.inner.call(req)
}
}
fn strip_prefix(uri: &Uri, prefix: &str) -> Option<Uri> {
let path_and_query = uri.path_and_query()?;
// Check whether the prefix matches the path and if so how long the matching prefix is.
//
// For example:
//
// prefix = /api
// path = /api/users
// ^^^^ this much is matched and the length is 4. Thus if we chop off the first 4
// characters we get the remainder
//
// prefix = /api/{version}
// path = /api/v0/users
// ^^^^^^^ this much is matched and the length is 7.
let mut matching_prefix_length = Some(0);
for item in zip_longest(segments(path_and_query.path()), segments(prefix)) {
// count the `/`
*matching_prefix_length.as_mut().unwrap() += 1;
match item {
Item::Both(path_segment, prefix_segment) => {
if is_capture(prefix_segment) || path_segment == prefix_segment {
// the prefix segment is either a param, which matches anything, or
// it actually matches the path segment
*matching_prefix_length.as_mut().unwrap() += path_segment.len();
} else if prefix_segment.is_empty() {
// the prefix ended in a `/` so we got a match.
//
// For example:
//
// prefix = /foo/
// path = /foo/bar
//
// The prefix matches and the new path should be `/bar`
break;
} else {
// the prefix segment didn't match so there is no match
matching_prefix_length = None;
break;
}
}
// the path had more segments than the prefix but we got a match.
//
// For example:
//
// prefix = /foo
// path = /foo/bar
Item::First(_) => {
break;
}
// the prefix had more segments than the path so there is no match
Item::Second(_) => {
matching_prefix_length = None;
break;
}
}
}
// if the prefix matches it will always do so up until a `/`, it cannot match only
// part of a segment. Therefore this will always be at a char boundary and `split_at` won't
// panic
let after_prefix = uri.path().split_at(matching_prefix_length?).1;
let new_path_and_query = match (after_prefix.starts_with('/'), path_and_query.query()) {
(true, None) => after_prefix.parse().unwrap(),
(true, Some(query)) => format!("{after_prefix}?{query}").parse().unwrap(),
(false, None) => format!("/{after_prefix}").parse().unwrap(),
(false, Some(query)) => format!("/{after_prefix}?{query}").parse().unwrap(),
};
let mut parts = uri.clone().into_parts();
parts.path_and_query = Some(new_path_and_query);
Some(Uri::from_parts(parts).unwrap())
}
fn segments(s: &str) -> impl Iterator<Item = &str> {
assert!(
s.starts_with('/'),
"path didn't start with '/'. axum should have caught this higher up."
);
s.split('/')
// skip one because paths always start with `/` so `/a/b` would become ["", "a", "b"]
// otherwise
.skip(1)
}
fn zip_longest<I, I2>(a: I, b: I2) -> impl Iterator<Item = Item<I::Item>>
where
I: Iterator,
I2: Iterator<Item = I::Item>,
{
let a = a.map(Some).chain(std::iter::repeat_with(|| None));
let b = b.map(Some).chain(std::iter::repeat_with(|| None));
a.zip(b).map_while(|(a, b)| match (a, b) {
(Some(a), Some(b)) => Some(Item::Both(a, b)),
(Some(a), None) => Some(Item::First(a)),
(None, Some(b)) => Some(Item::Second(b)),
(None, None) => None,
})
}
fn is_capture(segment: &str) -> bool {
segment.starts_with('{')
&& segment.ends_with('}')
&& !segment.starts_with("{{")
&& !segment.ends_with("}}")
&& !segment.starts_with("{*")
}
#[derive(Debug)]
enum Item<T> {
Both(T, T),
First(T),
Second(T),
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
use quickcheck::Arbitrary;
use quickcheck_macros::quickcheck;
macro_rules! test {
(
$name:ident,
uri = $uri:literal,
prefix = $prefix:literal,
expected = $expected:expr,
) => {
#[test]
fn $name() {
let uri = $uri.parse().unwrap();
let new_uri = strip_prefix(&uri, $prefix).map(|uri| uri.to_string());
assert_eq!(new_uri.as_deref(), $expected);
}
};
}
test!(empty, uri = "/", prefix = "/", expected = Some("/"),);
test!(
single_segment,
uri = "/a",
prefix = "/a",
expected = Some("/"),
);
test!(
single_segment_root_uri,
uri = "/",
prefix = "/a",
expected = None,
);
// the prefix is empty, so removing it should have no effect
test!(
single_segment_root_prefix,
uri = "/a",
prefix = "/",
expected = Some("/a"),
);
test!(
single_segment_no_match,
uri = "/a",
prefix = "/b",
expected = None,
);
test!(
single_segment_trailing_slash,
uri = "/a/",
prefix = "/a/",
expected = Some("/"),
);
test!(
single_segment_trailing_slash_2,
uri = "/a",
prefix = "/a/",
expected = None,
);
test!(
single_segment_trailing_slash_3,
uri = "/a/",
prefix = "/a",
expected = Some("/"),
);
test!(
multi_segment,
uri = "/a/b",
prefix = "/a",
expected = Some("/b"),
);
test!(
multi_segment_2,
uri = "/b/a",
prefix = "/a",
expected = None,
);
test!(
multi_segment_3,
uri = "/a",
prefix = "/a/b",
expected = None,
);
test!(
multi_segment_4,
uri = "/a/b",
prefix = "/b",
expected = None,
);
test!(
multi_segment_trailing_slash,
uri = "/a/b/",
prefix = "/a/b/",
expected = Some("/"),
);
test!(
multi_segment_trailing_slash_2,
uri = "/a/b",
prefix = "/a/b/",
expected = None,
);
test!(
multi_segment_trailing_slash_3,
uri = "/a/b/",
prefix = "/a/b",
expected = Some("/"),
);
test!(
param_0,
uri = "/",
prefix = "/{param}",
expected = Some("/"),
);
test!(
param_1,
uri = "/a",
prefix = "/{param}",
expected = Some("/"),
);
test!(
param_2,
uri = "/a/b",
prefix = "/{param}",
expected = Some("/b"),
);
test!(
param_3,
uri = "/b/a",
prefix = "/{param}",
expected = Some("/a"),
);
test!(
param_4,
uri = "/a/b",
prefix = "/a/{param}",
expected = Some("/"),
);
test!(
param_5,
uri = "/b/a",
prefix = "/a/{param}",
expected = None,
);
test!(
param_6,
uri = "/a/b",
prefix = "/{param}/a",
expected = None,
);
test!(
param_7,
uri = "/b/a",
prefix = "/{param}/a",
expected = Some("/"),
);
test!(
param_8,
uri = "/a/b/c",
prefix = "/a/{param}/c",
expected = Some("/"),
);
test!(
param_9,
uri = "/c/b/a",
prefix = "/a/{param}/c",
expected = None,
);
test!(
param_10,
uri = "/a/",
prefix = "/{param}",
expected = Some("/"),
);
test!(param_11, uri = "/a", prefix = "/{param}/", expected = None,);
test!(
param_12,
uri = "/a/",
prefix = "/{param}/",
expected = Some("/"),
);
test!(
param_13,
uri = "/a/a",
prefix = "/a/",
expected = Some("/a"),
);
#[quickcheck]
fn does_not_panic(uri_and_prefix: UriAndPrefix) -> bool {
let UriAndPrefix { uri, prefix } = uri_and_prefix;
strip_prefix(&uri, &prefix);
true
}
#[derive(Clone, Debug)]
struct UriAndPrefix {
uri: Uri,
prefix: String,
}
impl Arbitrary for UriAndPrefix {
fn arbitrary(g: &mut quickcheck::Gen) -> Self {
let mut uri = String::new();
let mut prefix = String::new();
let size = u8_between(1, 20, g);
for _ in 0..size {
let segment = ascii_alphanumeric(g);
uri.push('/');
uri.push_str(&segment);
prefix.push('/');
let make_matching_segment = bool::arbitrary(g);
let make_capture = bool::arbitrary(g);
match (make_matching_segment, make_capture) {
(_, true) => {
prefix.push_str(":a");
}
(true, false) => {
prefix.push_str(&segment);
}
(false, false) => {
prefix.push_str(&ascii_alphanumeric(g));
}
}
}
if bool::arbitrary(g) {
uri.push('/');
}
if bool::arbitrary(g) {
prefix.push('/');
}
Self {
uri: uri.parse().unwrap(),
prefix,
}
}
}
fn ascii_alphanumeric(g: &mut quickcheck::Gen) -> String {
#[derive(Clone)]
struct AsciiAlphanumeric(String);
impl Arbitrary for AsciiAlphanumeric {
fn arbitrary(g: &mut quickcheck::Gen) -> Self {
let mut out = String::new();
let size = u8_between(1, 20, g) as usize;
while out.len() < size {
let c = char::arbitrary(g);
if c.is_ascii_alphanumeric() {
out.push(c);
}
}
Self(out)
}
}
let out = AsciiAlphanumeric::arbitrary(g).0;
assert!(!out.is_empty());
out
}
fn u8_between(lower: u8, upper: u8, g: &mut quickcheck::Gen) -> u8 {
loop {
let size = u8::arbitrary(g);
if size > lower && size <= upper {
break size;
}
}
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/future.rs | axum/src/routing/future.rs | //! Future types.
pub use super::{
into_make_service::IntoMakeServiceFuture,
route::{InfallibleRouteFuture, RouteFuture},
};
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/url_params.rs | axum/src/routing/url_params.rs | use crate::util::PercentDecodedStr;
use http::Extensions;
use matchit::Params;
use std::sync::Arc;
#[derive(Clone)]
pub(crate) enum UrlParams {
Params(Vec<(Arc<str>, PercentDecodedStr)>),
InvalidUtf8InPathParam { key: Arc<str> },
}
pub(super) fn insert_url_params(extensions: &mut Extensions, params: &Params<'_, '_>) {
let current_params = extensions.get_mut();
if let Some(UrlParams::InvalidUtf8InPathParam { .. }) = current_params {
// nothing to do here since an error was stored earlier
return;
}
let params = params
.iter()
.filter(|(key, _)| !key.starts_with(super::NEST_TAIL_PARAM))
.filter(|(key, _)| !key.starts_with(super::FALLBACK_PARAM))
.map(|(k, v)| {
if let Some(decoded) = PercentDecodedStr::new(v) {
Ok((Arc::from(k), decoded))
} else {
Err(Arc::from(k))
}
})
.collect::<Result<Vec<_>, _>>();
match (current_params, params) {
(Some(UrlParams::InvalidUtf8InPathParam { .. }), _) => {
unreachable!("we check for this state earlier in this method")
}
(_, Err(invalid_key)) => {
extensions.insert(UrlParams::InvalidUtf8InPathParam { key: invalid_key });
}
(Some(UrlParams::Params(current)), Ok(params)) => {
current.extend(params);
}
(None, Ok(params)) => {
extensions.insert(UrlParams::Params(params));
}
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/method_routing.rs | axum/src/routing/method_routing.rs | //! Route to services and handlers based on HTTP methods.
use super::{future::InfallibleRouteFuture, IntoMakeService};
#[cfg(feature = "tokio")]
use crate::extract::connect_info::IntoMakeServiceWithConnectInfo;
use crate::{
body::{Body, Bytes, HttpBody},
boxed::BoxedIntoRoute,
error_handling::{HandleError, HandleErrorLayer},
handler::Handler,
http::{Method, StatusCode},
response::Response,
routing::{future::RouteFuture, Fallback, MethodFilter, Route},
};
use axum_core::{extract::Request, response::IntoResponse, BoxError};
use bytes::BytesMut;
use std::{
borrow::Cow,
convert::Infallible,
fmt,
task::{Context, Poll},
};
use tower::service_fn;
use tower_layer::Layer;
use tower_service::Service;
macro_rules! top_level_service_fn {
(
$name:ident, GET
) => {
top_level_service_fn!(
/// Route `GET` requests to the given service.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::Request,
/// Router,
/// routing::get_service,
/// body::Body,
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// // Requests to `GET /` will go to `service`.
/// let app = Router::new().route("/", get_service(service));
/// # let _: Router = app;
/// ```
///
/// Note that `get` routes will also be called for `HEAD` requests but will have
/// the response body removed. Make sure to add explicit `HEAD` routes
/// afterwards.
$name,
GET
);
};
(
$name:ident, CONNECT
) => {
top_level_service_fn!(
/// Route `CONNECT` requests to the given service.
///
/// See [`MethodFilter::CONNECT`] for when you'd want to use this,
/// and [`get_service`] for an example.
$name,
CONNECT
);
};
(
$name:ident, $method:ident
) => {
top_level_service_fn!(
#[doc = concat!("Route `", stringify!($method) ,"` requests to the given service.")]
///
/// See [`get_service`] for an example.
$name,
$method
);
};
(
$(#[$m:meta])+
$name:ident, $method:ident
) => {
$(#[$m])+
pub fn $name<T, S>(svc: T) -> MethodRouter<S, T::Error>
where
T: Service<Request> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
S: Clone,
{
on_service(MethodFilter::$method, svc)
}
};
}
macro_rules! top_level_handler_fn {
(
$name:ident, GET
) => {
top_level_handler_fn!(
/// Route `GET` requests to the given handler.
///
/// # Example
///
/// ```rust
/// use axum::{
/// routing::get,
/// Router,
/// };
///
/// async fn handler() {}
///
/// // Requests to `GET /` will go to `handler`.
/// let app = Router::new().route("/", get(handler));
/// # let _: Router = app;
/// ```
///
/// Note that `get` routes will also be called for `HEAD` requests but will have
/// the response body removed. Make sure to add explicit `HEAD` routes
/// afterwards.
$name,
GET
);
};
(
$name:ident, CONNECT
) => {
top_level_handler_fn!(
/// Route `CONNECT` requests to the given handler.
///
/// See [`MethodFilter::CONNECT`] for when you'd want to use this,
/// and [`get`] for an example.
$name,
CONNECT
);
};
(
$name:ident, $method:ident
) => {
top_level_handler_fn!(
#[doc = concat!("Route `", stringify!($method) ,"` requests to the given handler.")]
///
/// See [`get`] for an example.
$name,
$method
);
};
(
$(#[$m:meta])+
$name:ident, $method:ident
) => {
$(#[$m])+
pub fn $name<H, T, S>(handler: H) -> MethodRouter<S, Infallible>
where
H: Handler<T, S>,
T: 'static,
S: Clone + Send + Sync + 'static,
{
on(MethodFilter::$method, handler)
}
};
}
macro_rules! chained_service_fn {
(
$name:ident, GET
) => {
chained_service_fn!(
/// Chain an additional service that will only accept `GET` requests.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::Request,
/// Router,
/// routing::post_service,
/// body::Body,
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// let other_service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// // Requests to `POST /` will go to `service` and `GET /` will go to
/// // `other_service`.
/// let app = Router::new().route("/", post_service(service).get_service(other_service));
/// # let _: Router = app;
/// ```
///
/// Note that `get` routes will also be called for `HEAD` requests but will have
/// the response body removed. Make sure to add explicit `HEAD` routes
/// afterwards.
$name,
GET
);
};
(
$name:ident, CONNECT
) => {
chained_service_fn!(
/// Chain an additional service that will only accept `CONNECT` requests.
///
/// See [`MethodFilter::CONNECT`] for when you'd want to use this,
/// and [`MethodRouter::get_service`] for an example.
$name,
CONNECT
);
};
(
$name:ident, $method:ident
) => {
chained_service_fn!(
#[doc = concat!("Chain an additional service that will only accept `", stringify!($method),"` requests.")]
///
/// See [`MethodRouter::get_service`] for an example.
$name,
$method
);
};
(
$(#[$m:meta])+
$name:ident, $method:ident
) => {
$(#[$m])+
#[track_caller]
pub fn $name<T>(self, svc: T) -> Self
where
T: Service<Request, Error = E>
+ Clone
+ Send
+ Sync
+ 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
{
self.on_service(MethodFilter::$method, svc)
}
};
}
macro_rules! chained_handler_fn {
(
$name:ident, GET
) => {
chained_handler_fn!(
/// Chain an additional handler that will only accept `GET` requests.
///
/// # Example
///
/// ```rust
/// use axum::{routing::post, Router};
///
/// async fn handler() {}
///
/// async fn other_handler() {}
///
/// // Requests to `POST /` will go to `handler` and `GET /` will go to
/// // `other_handler`.
/// let app = Router::new().route("/", post(handler).get(other_handler));
/// # let _: Router = app;
/// ```
///
/// Note that `get` routes will also be called for `HEAD` requests but will have
/// the response body removed. Make sure to add explicit `HEAD` routes
/// afterwards.
$name,
GET
);
};
(
$name:ident, CONNECT
) => {
chained_handler_fn!(
/// Chain an additional handler that will only accept `CONNECT` requests.
///
/// See [`MethodFilter::CONNECT`] for when you'd want to use this,
/// and [`MethodRouter::get`] for an example.
$name,
CONNECT
);
};
(
$name:ident, $method:ident
) => {
chained_handler_fn!(
#[doc = concat!("Chain an additional handler that will only accept `", stringify!($method),"` requests.")]
///
/// See [`MethodRouter::get`] for an example.
$name,
$method
);
};
(
$(#[$m:meta])+
$name:ident, $method:ident
) => {
$(#[$m])+
#[track_caller]
pub fn $name<H, T>(self, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
S: Send + Sync + 'static,
{
self.on(MethodFilter::$method, handler)
}
};
}
top_level_service_fn!(connect_service, CONNECT);
top_level_service_fn!(delete_service, DELETE);
top_level_service_fn!(get_service, GET);
top_level_service_fn!(head_service, HEAD);
top_level_service_fn!(options_service, OPTIONS);
top_level_service_fn!(patch_service, PATCH);
top_level_service_fn!(post_service, POST);
top_level_service_fn!(put_service, PUT);
top_level_service_fn!(trace_service, TRACE);
/// Route requests with the given method to the service.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::Request,
/// routing::on,
/// Router,
/// body::Body,
/// routing::{MethodFilter, on_service},
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// // Requests to `POST /` will go to `service`.
/// let app = Router::new().route("/", on_service(MethodFilter::POST, service));
/// # let _: Router = app;
/// ```
pub fn on_service<T, S>(filter: MethodFilter, svc: T) -> MethodRouter<S, T::Error>
where
T: Service<Request> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
S: Clone,
{
MethodRouter::new().on_service(filter, svc)
}
/// Route requests to the given service regardless of its method.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::Request,
/// Router,
/// routing::any_service,
/// body::Body,
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// // All requests to `/` will go to `service`.
/// let app = Router::new().route("/", any_service(service));
/// # let _: Router = app;
/// ```
///
/// Additional methods can still be chained:
///
/// ```rust
/// use axum::{
/// extract::Request,
/// Router,
/// routing::any_service,
/// body::Body,
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// # Ok::<_, Infallible>(Response::new(Body::empty()))
/// // ...
/// });
///
/// let other_service = tower::service_fn(|request: Request| async {
/// # Ok::<_, Infallible>(Response::new(Body::empty()))
/// // ...
/// });
///
/// // `POST /` goes to `other_service`. All other requests go to `service`
/// let app = Router::new().route("/", any_service(service).post_service(other_service));
/// # let _: Router = app;
/// ```
pub fn any_service<T, S>(svc: T) -> MethodRouter<S, T::Error>
where
T: Service<Request> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
S: Clone,
{
MethodRouter::new()
.fallback_service(svc)
.skip_allow_header()
}
top_level_handler_fn!(connect, CONNECT);
top_level_handler_fn!(delete, DELETE);
top_level_handler_fn!(get, GET);
top_level_handler_fn!(head, HEAD);
top_level_handler_fn!(options, OPTIONS);
top_level_handler_fn!(patch, PATCH);
top_level_handler_fn!(post, POST);
top_level_handler_fn!(put, PUT);
top_level_handler_fn!(trace, TRACE);
/// Route requests with the given method to the handler.
///
/// # Example
///
/// ```rust
/// use axum::{
/// routing::on,
/// Router,
/// routing::MethodFilter,
/// };
///
/// async fn handler() {}
///
/// // Requests to `POST /` will go to `handler`.
/// let app = Router::new().route("/", on(MethodFilter::POST, handler));
/// # let _: Router = app;
/// ```
pub fn on<H, T, S>(filter: MethodFilter, handler: H) -> MethodRouter<S, Infallible>
where
H: Handler<T, S>,
T: 'static,
S: Clone + Send + Sync + 'static,
{
MethodRouter::new().on(filter, handler)
}
/// Route requests with the given handler regardless of the method.
///
/// # Example
///
/// ```rust
/// use axum::{
/// routing::any,
/// Router,
/// };
///
/// async fn handler() {}
///
/// // All requests to `/` will go to `handler`.
/// let app = Router::new().route("/", any(handler));
/// # let _: Router = app;
/// ```
///
/// Additional methods can still be chained:
///
/// ```rust
/// use axum::{
/// routing::any,
/// Router,
/// };
///
/// async fn handler() {}
///
/// async fn other_handler() {}
///
/// // `POST /` goes to `other_handler`. All other requests go to `handler`
/// let app = Router::new().route("/", any(handler).post(other_handler));
/// # let _: Router = app;
/// ```
pub fn any<H, T, S>(handler: H) -> MethodRouter<S, Infallible>
where
H: Handler<T, S>,
T: 'static,
S: Clone + Send + Sync + 'static,
{
MethodRouter::new().fallback(handler).skip_allow_header()
}
/// A [`Service`] that accepts requests based on a [`MethodFilter`] and
/// allows chaining additional handlers and services.
///
/// # When does `MethodRouter` implement [`Service`]?
///
/// Whether or not `MethodRouter` implements [`Service`] depends on the state type it requires.
///
/// ```
/// use tower::Service;
/// use axum::{routing::get, extract::{State, Request}, body::Body};
///
/// // this `MethodRouter` doesn't require any state, i.e. the state is `()`,
/// let method_router = get(|| async {});
/// // and thus it implements `Service`
/// assert_service(method_router);
///
/// // this requires a `String` and doesn't implement `Service`
/// let method_router = get(|_: State<String>| async {});
/// // until you provide the `String` with `.with_state(...)`
/// let method_router_with_state = method_router.with_state(String::new());
/// // and then it implements `Service`
/// assert_service(method_router_with_state);
///
/// // helper to check that a value implements `Service`
/// fn assert_service<S>(service: S)
/// where
/// S: Service<Request>,
/// {}
/// ```
#[must_use]
pub struct MethodRouter<S = (), E = Infallible> {
get: MethodEndpoint<S, E>,
head: MethodEndpoint<S, E>,
delete: MethodEndpoint<S, E>,
options: MethodEndpoint<S, E>,
patch: MethodEndpoint<S, E>,
post: MethodEndpoint<S, E>,
put: MethodEndpoint<S, E>,
trace: MethodEndpoint<S, E>,
connect: MethodEndpoint<S, E>,
fallback: Fallback<S, E>,
allow_header: AllowHeader,
}
#[derive(Clone, Debug)]
enum AllowHeader {
/// No `Allow` header value has been built-up yet. This is the default state
None,
/// Don't set an `Allow` header. This is used when `any` or `any_service` are called.
Skip,
/// The current value of the `Allow` header.
Bytes(BytesMut),
}
impl AllowHeader {
fn merge(self, other: Self) -> Self {
match (self, other) {
(Self::Skip, _) | (_, Self::Skip) => Self::Skip,
(Self::None, Self::None) => Self::None,
(Self::None, Self::Bytes(pick)) | (Self::Bytes(pick), Self::None) => Self::Bytes(pick),
(Self::Bytes(mut a), Self::Bytes(b)) => {
a.extend_from_slice(b",");
a.extend_from_slice(&b);
Self::Bytes(a)
}
}
}
}
impl<S, E> fmt::Debug for MethodRouter<S, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MethodRouter")
.field("get", &self.get)
.field("head", &self.head)
.field("delete", &self.delete)
.field("options", &self.options)
.field("patch", &self.patch)
.field("post", &self.post)
.field("put", &self.put)
.field("trace", &self.trace)
.field("connect", &self.connect)
.field("fallback", &self.fallback)
.field("allow_header", &self.allow_header)
.finish()
}
}
impl<S> MethodRouter<S, Infallible>
where
S: Clone,
{
/// Chain an additional handler that will accept requests matching the given
/// `MethodFilter`.
///
/// # Example
///
/// ```rust
/// use axum::{
/// routing::get,
/// Router,
/// routing::MethodFilter
/// };
///
/// async fn handler() {}
///
/// async fn other_handler() {}
///
/// // Requests to `GET /` will go to `handler` and `DELETE /` will go to
/// // `other_handler`
/// let app = Router::new().route("/", get(handler).on(MethodFilter::DELETE, other_handler));
/// # let _: Router = app;
/// ```
#[track_caller]
pub fn on<H, T>(self, filter: MethodFilter, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
S: Send + Sync + 'static,
{
self.on_endpoint(
filter,
&MethodEndpoint::BoxedHandler(BoxedIntoRoute::from_handler(handler)),
)
}
chained_handler_fn!(connect, CONNECT);
chained_handler_fn!(delete, DELETE);
chained_handler_fn!(get, GET);
chained_handler_fn!(head, HEAD);
chained_handler_fn!(options, OPTIONS);
chained_handler_fn!(patch, PATCH);
chained_handler_fn!(post, POST);
chained_handler_fn!(put, PUT);
chained_handler_fn!(trace, TRACE);
/// Add a fallback [`Handler`] to the router.
pub fn fallback<H, T>(mut self, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
S: Send + Sync + 'static,
{
self.fallback = Fallback::BoxedHandler(BoxedIntoRoute::from_handler(handler));
self
}
/// Get a [`MethodFilter`] for the methods that this `MethodRouter` has
/// custom code for.
///
/// Note that `MethodRouter`'s [`Service`] implementation never fails (it
/// always creates an HTTP response) based on which HTTP method was used.
/// However, the information which methods have the default behavior of
/// returning HTTP 405 is stored, and can be queried with this method.
///
/// Returns `None` if the `MethodRouter` was constructed with [`any`] or
/// has had a [`fallback`][Self::fallback] set.
pub fn method_filter(&self) -> Option<MethodFilter> {
let Self {
get,
head,
delete,
options,
patch,
post,
put,
trace,
connect,
fallback,
allow_header: _,
} = self;
if !fallback.is_default() {
return None;
}
let filter = [
(get, MethodFilter::GET),
(head, MethodFilter::HEAD),
(delete, MethodFilter::DELETE),
(options, MethodFilter::OPTIONS),
(patch, MethodFilter::PATCH),
(post, MethodFilter::POST),
(put, MethodFilter::PUT),
(trace, MethodFilter::TRACE),
(connect, MethodFilter::CONNECT),
]
.into_iter()
.filter_map(|(ep, f)| ep.is_some().then_some(f))
.reduce(MethodFilter::or)
.expect("can't create a MethodRouter with all-default handlers");
Some(filter)
}
/// Add a fallback [`Handler`] if no custom one has been provided.
pub(crate) fn default_fallback<H, T>(self, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
S: Send + Sync + 'static,
{
match self.fallback {
Fallback::Default(_) => self.fallback(handler),
_ => self,
}
}
}
impl MethodRouter<(), Infallible> {
/// Convert the router into a [`MakeService`].
///
/// This allows you to serve a single `MethodRouter` if you don't need any
/// routing based on the path:
///
/// ```rust
/// use axum::{
/// handler::Handler,
/// http::{Uri, Method},
/// response::IntoResponse,
/// routing::get,
/// };
/// use std::net::SocketAddr;
///
/// async fn handler(method: Method, uri: Uri, body: String) -> String {
/// format!("received `{method} {uri}` with body `{body:?}`")
/// }
///
/// let router = get(handler).post(handler);
///
/// # async {
/// let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
/// axum::serve(listener, router.into_make_service()).await;
/// # };
/// ```
///
/// [`MakeService`]: tower::make::MakeService
#[must_use]
pub fn into_make_service(self) -> IntoMakeService<Self> {
IntoMakeService::new(self.with_state(()))
}
/// Convert the router into a [`MakeService`] which stores information
/// about the incoming connection.
///
/// See [`Router::into_make_service_with_connect_info`] for more details.
///
/// ```rust
/// use axum::{
/// handler::Handler,
/// response::IntoResponse,
/// extract::ConnectInfo,
/// routing::get,
/// };
/// use std::net::SocketAddr;
///
/// async fn handler(ConnectInfo(addr): ConnectInfo<SocketAddr>) -> String {
/// format!("Hello {addr}")
/// }
///
/// let router = get(handler).post(handler);
///
/// # async {
/// let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
/// axum::serve(listener, router.into_make_service()).await;
/// # };
/// ```
///
/// [`MakeService`]: tower::make::MakeService
/// [`Router::into_make_service_with_connect_info`]: crate::routing::Router::into_make_service_with_connect_info
#[cfg(feature = "tokio")]
#[must_use]
pub fn into_make_service_with_connect_info<C>(self) -> IntoMakeServiceWithConnectInfo<Self, C> {
IntoMakeServiceWithConnectInfo::new(self.with_state(()))
}
}
impl<S, E> MethodRouter<S, E>
where
S: Clone,
{
/// Create a default `MethodRouter` that will respond with `405 Method Not Allowed` to all
/// requests.
pub fn new() -> Self {
let fallback = Route::new(service_fn(|_: Request| async {
Ok(StatusCode::METHOD_NOT_ALLOWED)
}));
Self {
get: MethodEndpoint::None,
head: MethodEndpoint::None,
delete: MethodEndpoint::None,
options: MethodEndpoint::None,
patch: MethodEndpoint::None,
post: MethodEndpoint::None,
put: MethodEndpoint::None,
trace: MethodEndpoint::None,
connect: MethodEndpoint::None,
allow_header: AllowHeader::None,
fallback: Fallback::Default(fallback),
}
}
/// Provide the state for the router.
pub fn with_state<S2>(self, state: S) -> MethodRouter<S2, E> {
MethodRouter {
get: self.get.with_state(&state),
head: self.head.with_state(&state),
delete: self.delete.with_state(&state),
options: self.options.with_state(&state),
patch: self.patch.with_state(&state),
post: self.post.with_state(&state),
put: self.put.with_state(&state),
trace: self.trace.with_state(&state),
connect: self.connect.with_state(&state),
allow_header: self.allow_header,
fallback: self.fallback.with_state(state),
}
}
/// Chain an additional service that will accept requests matching the given
/// `MethodFilter`.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::Request,
/// Router,
/// routing::{MethodFilter, on_service},
/// body::Body,
/// };
/// use http::Response;
/// use std::convert::Infallible;
///
/// let service = tower::service_fn(|request: Request| async {
/// Ok::<_, Infallible>(Response::new(Body::empty()))
/// });
///
/// // Requests to `DELETE /` will go to `service`
/// let app = Router::new().route("/", on_service(MethodFilter::DELETE, service));
/// # let _: Router = app;
/// ```
#[track_caller]
pub fn on_service<T>(self, filter: MethodFilter, svc: T) -> Self
where
T: Service<Request, Error = E> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
{
self.on_endpoint(filter, &MethodEndpoint::Route(Route::new(svc)))
}
#[track_caller]
fn on_endpoint(mut self, filter: MethodFilter, endpoint: &MethodEndpoint<S, E>) -> Self {
// written as a separate function to generate less IR
#[track_caller]
fn set_endpoint<S, E>(
method_name: &str,
out: &mut MethodEndpoint<S, E>,
endpoint: &MethodEndpoint<S, E>,
endpoint_filter: MethodFilter,
filter: MethodFilter,
allow_header: &mut AllowHeader,
methods: &[&'static str],
) where
MethodEndpoint<S, E>: Clone,
S: Clone,
{
if endpoint_filter.contains(filter) {
if out.is_some() {
panic!(
"Overlapping method route. Cannot add two method routes that both handle \
`{method_name}`",
);
}
*out = endpoint.clone();
for method in methods {
append_allow_header(allow_header, method);
}
}
}
set_endpoint(
"GET",
&mut self.get,
endpoint,
filter,
MethodFilter::GET,
&mut self.allow_header,
&["GET", "HEAD"],
);
set_endpoint(
"HEAD",
&mut self.head,
endpoint,
filter,
MethodFilter::HEAD,
&mut self.allow_header,
&["HEAD"],
);
set_endpoint(
"TRACE",
&mut self.trace,
endpoint,
filter,
MethodFilter::TRACE,
&mut self.allow_header,
&["TRACE"],
);
set_endpoint(
"PUT",
&mut self.put,
endpoint,
filter,
MethodFilter::PUT,
&mut self.allow_header,
&["PUT"],
);
set_endpoint(
"POST",
&mut self.post,
endpoint,
filter,
MethodFilter::POST,
&mut self.allow_header,
&["POST"],
);
set_endpoint(
"PATCH",
&mut self.patch,
endpoint,
filter,
MethodFilter::PATCH,
&mut self.allow_header,
&["PATCH"],
);
set_endpoint(
"OPTIONS",
&mut self.options,
endpoint,
filter,
MethodFilter::OPTIONS,
&mut self.allow_header,
&["OPTIONS"],
);
set_endpoint(
"DELETE",
&mut self.delete,
endpoint,
filter,
MethodFilter::DELETE,
&mut self.allow_header,
&["DELETE"],
);
set_endpoint(
"CONNECT",
&mut self.options,
endpoint,
filter,
MethodFilter::CONNECT,
&mut self.allow_header,
&["CONNECT"],
);
self
}
chained_service_fn!(connect_service, CONNECT);
chained_service_fn!(delete_service, DELETE);
chained_service_fn!(get_service, GET);
chained_service_fn!(head_service, HEAD);
chained_service_fn!(options_service, OPTIONS);
chained_service_fn!(patch_service, PATCH);
chained_service_fn!(post_service, POST);
chained_service_fn!(put_service, PUT);
chained_service_fn!(trace_service, TRACE);
#[doc = include_str!("../docs/method_routing/fallback.md")]
pub fn fallback_service<T>(mut self, svc: T) -> Self
where
T: Service<Request, Error = E> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
{
self.fallback = Fallback::Service(Route::new(svc));
self
}
#[doc = include_str!("../docs/method_routing/layer.md")]
pub fn layer<L, NewError>(self, layer: L) -> MethodRouter<S, NewError>
where
L: Layer<Route<E>> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<NewError> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
E: 'static,
S: 'static,
NewError: 'static,
{
let layer_fn = move |route: Route<E>| route.layer(layer.clone());
MethodRouter {
get: self.get.map(layer_fn.clone()),
head: self.head.map(layer_fn.clone()),
delete: self.delete.map(layer_fn.clone()),
options: self.options.map(layer_fn.clone()),
patch: self.patch.map(layer_fn.clone()),
post: self.post.map(layer_fn.clone()),
put: self.put.map(layer_fn.clone()),
trace: self.trace.map(layer_fn.clone()),
connect: self.connect.map(layer_fn.clone()),
fallback: self.fallback.map(layer_fn),
allow_header: self.allow_header,
}
}
#[doc = include_str!("../docs/method_routing/route_layer.md")]
#[track_caller]
pub fn route_layer<L>(mut self, layer: L) -> Self
where
L: Layer<Route<E>> + Clone + Send + Sync + 'static,
L::Service: Service<Request, Error = E> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
E: 'static,
S: 'static,
{
if self.get.is_none()
&& self.head.is_none()
&& self.delete.is_none()
&& self.options.is_none()
&& self.patch.is_none()
&& self.post.is_none()
&& self.put.is_none()
&& self.trace.is_none()
&& self.connect.is_none()
{
panic!(
"Adding a route_layer before any routes is a no-op. \
Add the routes you want the layer to apply to first."
);
}
let layer_fn = move |svc| Route::new(layer.layer(svc));
self.get = self.get.map(layer_fn.clone());
self.head = self.head.map(layer_fn.clone());
self.delete = self.delete.map(layer_fn.clone());
self.options = self.options.map(layer_fn.clone());
self.patch = self.patch.map(layer_fn.clone());
self.post = self.post.map(layer_fn.clone());
self.put = self.put.map(layer_fn.clone());
self.trace = self.trace.map(layer_fn.clone());
self.connect = self.connect.map(layer_fn);
self
}
pub(crate) fn merge_for_path(
mut self,
path: Option<&str>,
other: Self,
) -> Result<Self, Cow<'static, str>> {
// written using inner functions to generate less IR
fn merge_inner<S, E>(
path: Option<&str>,
name: &str,
first: MethodEndpoint<S, E>,
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | true |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/not_found.rs | axum/src/routing/not_found.rs | use crate::response::Response;
use axum_core::response::IntoResponse;
use http::{Request, StatusCode};
use std::{
convert::Infallible,
future::ready,
task::{Context, Poll},
};
use tower_service::Service;
/// A [`Service`] that responds with `404 Not Found` to all requests.
///
/// This is used as the bottom service in a method router. You shouldn't have to
/// use it manually.
#[derive(Clone, Copy, Debug)]
pub(super) struct NotFound;
impl<B> Service<Request<B>> for NotFound
where
B: Send + 'static,
{
type Response = Response;
type Error = Infallible;
type Future = std::future::Ready<Result<Response, Self::Error>>;
#[inline]
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, _req: Request<B>) -> Self::Future {
ready(Ok(StatusCode::NOT_FOUND.into_response()))
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/mod.rs | axum/src/routing/mod.rs | //! Routing between [`Service`]s and handlers.
use self::{future::RouteFuture, not_found::NotFound, path_router::PathRouter};
#[cfg(feature = "tokio")]
use crate::extract::connect_info::IntoMakeServiceWithConnectInfo;
#[cfg(feature = "matched-path")]
use crate::extract::MatchedPath;
use crate::{
body::{Body, HttpBody},
boxed::BoxedIntoRoute,
handler::Handler,
util::try_downcast,
};
use axum_core::{
extract::Request,
response::{IntoResponse, Response},
};
use std::{
convert::Infallible,
fmt,
marker::PhantomData,
sync::Arc,
task::{Context, Poll},
};
use tower::service_fn;
use tower_layer::{layer_fn, Layer};
use tower_service::Service;
pub mod future;
pub mod method_routing;
mod into_make_service;
mod method_filter;
mod not_found;
pub(crate) mod path_router;
mod route;
mod strip_prefix;
pub(crate) mod url_params;
#[cfg(test)]
mod tests;
pub use self::{into_make_service::IntoMakeService, method_filter::MethodFilter, route::Route};
pub use self::method_routing::{
any, any_service, connect, connect_service, delete, delete_service, get, get_service, head,
head_service, on, on_service, options, options_service, patch, patch_service, post,
post_service, put, put_service, trace, trace_service, MethodRouter,
};
macro_rules! panic_on_err {
($expr:expr) => {
match $expr {
Ok(x) => x,
Err(err) => panic!("{err}"),
}
};
}
#[derive(Clone, Copy, Debug, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub(crate) struct RouteId(usize);
/// The router type for composing handlers and services.
///
/// `Router<S>` means a router that is _missing_ a state of type `S` to be able
/// to handle requests. Thus, only `Router<()>` (i.e. without missing state) can
/// be passed to [`serve`]. See [`Router::with_state`] for more details.
///
/// [`serve`]: crate::serve()
#[must_use]
pub struct Router<S = ()> {
inner: Arc<RouterInner<S>>,
}
impl<S> Clone for Router<S> {
fn clone(&self) -> Self {
Self {
inner: Arc::clone(&self.inner),
}
}
}
struct RouterInner<S> {
path_router: PathRouter<S>,
default_fallback: bool,
catch_all_fallback: Fallback<S>,
}
impl<S> Default for Router<S>
where
S: Clone + Send + Sync + 'static,
{
fn default() -> Self {
Self::new()
}
}
impl<S> fmt::Debug for Router<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Router")
.field("path_router", &self.inner.path_router)
.field("default_fallback", &self.inner.default_fallback)
.field("catch_all_fallback", &self.inner.catch_all_fallback)
.finish()
}
}
pub(crate) const NEST_TAIL_PARAM: &str = "__private__axum_nest_tail_param";
#[cfg(feature = "matched-path")]
pub(crate) const NEST_TAIL_PARAM_CAPTURE: &str = "/{*__private__axum_nest_tail_param}";
pub(crate) const FALLBACK_PARAM: &str = "__private__axum_fallback";
pub(crate) const FALLBACK_PARAM_PATH: &str = "/{*__private__axum_fallback}";
macro_rules! map_inner {
( $self_:ident, $inner:pat_param => $expr:expr) => {
#[allow(redundant_semicolons)]
{
let $inner = $self_.into_inner();
Router {
inner: Arc::new($expr),
}
}
};
}
macro_rules! tap_inner {
( $self_:ident, mut $inner:ident => { $($stmt:stmt)* } ) => {
#[allow(redundant_semicolons)]
{
let mut $inner = $self_.into_inner();
$($stmt)*;
Router {
inner: Arc::new($inner),
}
}
};
}
impl<S> Router<S>
where
S: Clone + Send + Sync + 'static,
{
/// Create a new `Router`.
///
/// Unless you add additional routes this will respond with `404 Not Found` to
/// all requests.
pub fn new() -> Self {
Self {
inner: Arc::new(RouterInner {
path_router: Default::default(),
default_fallback: true,
catch_all_fallback: Fallback::Default(Route::new(NotFound)),
}),
}
}
fn into_inner(self) -> RouterInner<S> {
match Arc::try_unwrap(self.inner) {
Ok(inner) => inner,
Err(arc) => RouterInner {
path_router: arc.path_router.clone(),
default_fallback: arc.default_fallback,
catch_all_fallback: arc.catch_all_fallback.clone(),
},
}
}
#[doc = include_str!("../docs/routing/without_v07_checks.md")]
pub fn without_v07_checks(self) -> Self {
tap_inner!(self, mut this => {
this.path_router.without_v07_checks();
})
}
#[doc = include_str!("../docs/routing/route.md")]
#[track_caller]
pub fn route(self, path: &str, method_router: MethodRouter<S>) -> Self {
tap_inner!(self, mut this => {
panic_on_err!(this.path_router.route(path, method_router));
})
}
#[doc = include_str!("../docs/routing/route_service.md")]
pub fn route_service<T>(self, path: &str, service: T) -> Self
where
T: Service<Request, Error = Infallible> + Clone + Send + Sync + 'static,
T::Response: IntoResponse,
T::Future: Send + 'static,
{
let Err(service) = try_downcast::<Self, _>(service) else {
panic!(
"Invalid route: `Router::route_service` cannot be used with `Router`s. \
Use `Router::nest` instead"
);
};
tap_inner!(self, mut this => {
panic_on_err!(this.path_router.route_service(path, service));
})
}
#[doc = include_str!("../docs/routing/nest.md")]
#[doc(alias = "scope")] // Some web frameworks like actix-web use this term
#[track_caller]
pub fn nest(self, path: &str, router: Self) -> Self {
if path.is_empty() || path == "/" {
panic!("Nesting at the root is no longer supported. Use merge instead.");
}
let RouterInner {
path_router,
default_fallback: _,
// we don't need to inherit the catch-all fallback. It is only used for CONNECT
// requests with an empty path. If we were to inherit the catch-all fallback
// it would end up matching `/{path}/*` which doesn't match empty paths.
catch_all_fallback: _,
} = router.into_inner();
tap_inner!(self, mut this => {
panic_on_err!(this.path_router.nest(path, path_router));
})
}
/// Like [`nest`](Self::nest), but accepts an arbitrary `Service`.
#[track_caller]
pub fn nest_service<T>(self, path: &str, service: T) -> Self
where
T: Service<Request, Error = Infallible> + Clone + Send + Sync + 'static,
T::Response: IntoResponse,
T::Future: Send + 'static,
{
if path.is_empty() || path == "/" {
panic!("Nesting at the root is no longer supported. Use fallback_service instead.");
}
tap_inner!(self, mut this => {
panic_on_err!(this.path_router.nest_service(path, service));
})
}
#[doc = include_str!("../docs/routing/merge.md")]
#[track_caller]
pub fn merge<R>(self, other: R) -> Self
where
R: Into<Self>,
{
let other: Self = other.into();
let RouterInner {
path_router,
default_fallback,
catch_all_fallback,
} = other.into_inner();
map_inner!(self, mut this => {
match (this.default_fallback, default_fallback) {
// other has a default fallback
// use the one from other
(_, true) => {}
// this has default fallback, other has a custom fallback
(true, false) => {
this.default_fallback = false;
}
// both have a custom fallback, not allowed
(false, false) => {
panic!("Cannot merge two `Router`s that both have a fallback")
}
};
panic_on_err!(this.path_router.merge(path_router));
this.catch_all_fallback = this
.catch_all_fallback
.merge(catch_all_fallback)
.unwrap_or_else(|| panic!("Cannot merge two `Router`s that both have a fallback"));
this
})
}
#[doc = include_str!("../docs/routing/layer.md")]
pub fn layer<L>(self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
map_inner!(self, this => RouterInner {
path_router: this.path_router.layer(layer.clone()),
default_fallback: this.default_fallback,
catch_all_fallback: this.catch_all_fallback.map(|route| route.layer(layer)),
})
}
#[doc = include_str!("../docs/routing/route_layer.md")]
#[track_caller]
pub fn route_layer<L>(self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
map_inner!(self, this => RouterInner {
path_router: this.path_router.route_layer(layer),
default_fallback: this.default_fallback,
catch_all_fallback: this.catch_all_fallback,
})
}
/// True if the router currently has at least one route added.
#[must_use]
pub fn has_routes(&self) -> bool {
self.inner.path_router.has_routes()
}
#[track_caller]
#[doc = include_str!("../docs/routing/fallback.md")]
pub fn fallback<H, T>(self, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
{
tap_inner!(self, mut this => {
this.catch_all_fallback =
Fallback::BoxedHandler(BoxedIntoRoute::from_handler(handler.clone()));
})
.fallback_endpoint(Endpoint::MethodRouter(any(handler)))
}
/// Add a fallback [`Service`] to the router.
///
/// See [`Router::fallback`] for more details.
pub fn fallback_service<T>(self, service: T) -> Self
where
T: Service<Request, Error = Infallible> + Clone + Send + Sync + 'static,
T::Response: IntoResponse,
T::Future: Send + 'static,
{
let route = Route::new(service);
tap_inner!(self, mut this => {
this.catch_all_fallback = Fallback::Service(route.clone());
})
.fallback_endpoint(Endpoint::Route(route))
}
#[doc = include_str!("../docs/routing/method_not_allowed_fallback.md")]
#[allow(clippy::needless_pass_by_value)]
pub fn method_not_allowed_fallback<H, T>(self, handler: H) -> Self
where
H: Handler<T, S>,
T: 'static,
{
tap_inner!(self, mut this => {
this.path_router
.method_not_allowed_fallback(&handler);
})
}
/// Reset the fallback to its default.
///
/// Useful to merge two routers with fallbacks, as [`merge`] doesn't allow
/// both routers to have an explicit fallback. Use this method to remove the
/// one you want to discard before merging.
///
/// [`merge`]: Self::merge
pub fn reset_fallback(self) -> Self {
tap_inner!(self, mut this => {
this.default_fallback = true;
this.catch_all_fallback = Fallback::Default(Route::new(NotFound));
})
}
fn fallback_endpoint(self, endpoint: Endpoint<S>) -> Self {
// TODO make this better, get rid of the `unwrap`s.
// We need the returned `Service` to be `Clone` and the function inside `service_fn` to be
// `FnMut` so instead of just using the owned service, we do this trick with `Option`. We
// know this will be called just once so it's fine. We're doing that so that we avoid one
// clone inside `oneshot_inner` so that the `Router` and subsequently the `State` is not
// cloned too much.
tap_inner!(self, mut this => {
_ = this.path_router.route_endpoint(
"/",
endpoint.clone().layer(
layer_fn(
|service: Route| {
let mut service = Some(service);
service_fn(
#[cfg_attr(not(feature = "matched-path"), allow(unused_mut))]
move |mut request: Request| {
#[cfg(feature = "matched-path")]
request.extensions_mut().remove::<MatchedPath>();
service.take().unwrap().oneshot_inner_owned(request)
}
)
}
)
)
);
_ = this.path_router.route_endpoint(
FALLBACK_PARAM_PATH,
endpoint.layer(
layer_fn(
|service: Route| {
let mut service = Some(service);
service_fn(
#[cfg_attr(not(feature = "matched-path"), allow(unused_mut))]
move |mut request: Request| {
#[cfg(feature = "matched-path")]
request.extensions_mut().remove::<MatchedPath>();
service.take().unwrap().oneshot_inner_owned(request)
}
)
}
)
)
);
this.default_fallback = false;
})
}
#[doc = include_str!("../docs/routing/with_state.md")]
pub fn with_state<S2>(self, state: S) -> Router<S2> {
map_inner!(self, this => RouterInner {
path_router: this.path_router.with_state(state.clone()),
default_fallback: this.default_fallback,
catch_all_fallback: this.catch_all_fallback.with_state(state),
})
}
pub(crate) fn call_with_state(&self, req: Request, state: S) -> RouteFuture<Infallible> {
let (req, state) = match self.inner.path_router.call_with_state(req, state) {
Ok(future) => return future,
Err((req, state)) => (req, state),
};
self.inner
.catch_all_fallback
.clone()
.call_with_state(req, state)
}
/// Convert the router into a borrowed [`Service`] with a fixed request body type, to aid type
/// inference.
///
/// In some cases when calling methods from [`tower::ServiceExt`] on a [`Router`] you might get
/// type inference errors along the lines of
///
/// ```not_rust
/// let response = router.ready().await?.call(request).await?;
/// ^^^^^ cannot infer type for type parameter `B`
/// ```
///
/// This happens because `Router` implements [`Service`] with `impl<B> Service<Request<B>> for Router<()>`.
///
/// For example:
///
/// ```compile_fail
/// use axum::{
/// Router,
/// routing::get,
/// http::Request,
/// body::Body,
/// };
/// use tower::{Service, ServiceExt};
///
/// # async fn async_main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut router = Router::new().route("/", get(|| async {}));
/// let request = Request::new(Body::empty());
/// let response = router.ready().await?.call(request).await?;
/// # Ok(())
/// # }
/// ```
///
/// Calling `Router::as_service` fixes that:
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// http::Request,
/// body::Body,
/// };
/// use tower::{Service, ServiceExt};
///
/// # async fn async_main() -> Result<(), Box<dyn std::error::Error>> {
/// let mut router = Router::new().route("/", get(|| async {}));
/// let request = Request::new(Body::empty());
/// let response = router.as_service().ready().await?.call(request).await?;
/// # Ok(())
/// # }
/// ```
///
/// This is mainly used when calling `Router` in tests. It shouldn't be necessary when running
/// the `Router` normally via [`Router::into_make_service`].
pub fn as_service<B>(&mut self) -> RouterAsService<'_, B, S> {
RouterAsService {
router: self,
_marker: PhantomData,
}
}
/// Convert the router into an owned [`Service`] with a fixed request body type, to aid type
/// inference.
///
/// This is the same as [`Router::as_service`] instead it returns an owned [`Service`]. See
/// that method for more details.
#[must_use]
pub fn into_service<B>(self) -> RouterIntoService<B, S> {
RouterIntoService {
router: self,
_marker: PhantomData,
}
}
}
impl Router {
/// Convert this router into a [`MakeService`], that is a [`Service`] whose
/// response is another service.
///
/// ```
/// use axum::{
/// routing::get,
/// Router,
/// };
///
/// let app = Router::new().route("/", get(|| async { "Hi!" }));
///
/// # async {
/// let listener = tokio::net::TcpListener::bind("0.0.0.0:3000").await.unwrap();
/// axum::serve(listener, app).await;
/// # };
/// ```
///
/// [`MakeService`]: tower::make::MakeService
#[must_use]
pub fn into_make_service(self) -> IntoMakeService<Self> {
// call `Router::with_state` such that everything is turned into `Route` eagerly
// rather than doing that per request
IntoMakeService::new(self.with_state(()))
}
#[doc = include_str!("../docs/routing/into_make_service_with_connect_info.md")]
#[cfg(feature = "tokio")]
#[must_use]
pub fn into_make_service_with_connect_info<C>(self) -> IntoMakeServiceWithConnectInfo<Self, C> {
// call `Router::with_state` such that everything is turned into `Route` eagerly
// rather than doing that per request
IntoMakeServiceWithConnectInfo::new(self.with_state(()))
}
}
// for `axum::serve(listener, router)`
#[cfg(all(feature = "tokio", any(feature = "http1", feature = "http2")))]
const _: () = {
use crate::serve;
impl<L> Service<serve::IncomingStream<'_, L>> for Router<()>
where
L: serve::Listener,
{
type Response = Self;
type Error = Infallible;
type Future = std::future::Ready<Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, _req: serve::IncomingStream<'_, L>) -> Self::Future {
// call `Router::with_state` such that everything is turned into `Route` eagerly
// rather than doing that per request
std::future::ready(Ok(self.clone().with_state(())))
}
}
};
impl<B> Service<Request<B>> for Router<()>
where
B: HttpBody<Data = bytes::Bytes> + Send + 'static,
B::Error: Into<axum_core::BoxError>,
{
type Response = Response;
type Error = Infallible;
type Future = RouteFuture<Infallible>;
#[inline]
fn poll_ready(&mut self, _: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: Request<B>) -> Self::Future {
let req = req.map(Body::new);
self.call_with_state(req, ())
}
}
/// A [`Router`] converted into a borrowed [`Service`] with a fixed body type.
///
/// See [`Router::as_service`] for more details.
pub struct RouterAsService<'a, B, S = ()> {
router: &'a mut Router<S>,
_marker: PhantomData<fn(B)>,
}
impl<B> Service<Request<B>> for RouterAsService<'_, B, ()>
where
B: HttpBody<Data = bytes::Bytes> + Send + 'static,
B::Error: Into<axum_core::BoxError>,
{
type Response = Response;
type Error = Infallible;
type Future = RouteFuture<Infallible>;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
<Router as Service<Request<B>>>::poll_ready(self.router, cx)
}
#[inline]
fn call(&mut self, req: Request<B>) -> Self::Future {
self.router.call(req)
}
}
impl<B, S> fmt::Debug for RouterAsService<'_, B, S>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RouterAsService")
.field("router", &self.router)
.finish()
}
}
/// A [`Router`] converted into an owned [`Service`] with a fixed body type.
///
/// See [`Router::into_service`] for more details.
pub struct RouterIntoService<B, S = ()> {
router: Router<S>,
_marker: PhantomData<fn(B)>,
}
impl<B, S> Clone for RouterIntoService<B, S>
where
Router<S>: Clone,
{
fn clone(&self) -> Self {
Self {
router: self.router.clone(),
_marker: PhantomData,
}
}
}
impl<B> Service<Request<B>> for RouterIntoService<B, ()>
where
B: HttpBody<Data = bytes::Bytes> + Send + 'static,
B::Error: Into<axum_core::BoxError>,
{
type Response = Response;
type Error = Infallible;
type Future = RouteFuture<Infallible>;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
<Router as Service<Request<B>>>::poll_ready(&mut self.router, cx)
}
#[inline]
fn call(&mut self, req: Request<B>) -> Self::Future {
self.router.call(req)
}
}
impl<B, S> fmt::Debug for RouterIntoService<B, S>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("RouterIntoService")
.field("router", &self.router)
.finish()
}
}
enum Fallback<S, E = Infallible> {
Default(Route<E>),
Service(Route<E>),
BoxedHandler(BoxedIntoRoute<S, E>),
}
impl<S, E> Fallback<S, E>
where
S: Clone,
{
fn merge(self, other: Self) -> Option<Self> {
match (self, other) {
// If either are `Default`, return the opposite one.
(Self::Default(_), pick) | (pick, Self::Default(_)) => Some(pick),
// Otherwise, return None
_ => None,
}
}
fn map<F, E2>(self, f: F) -> Fallback<S, E2>
where
S: 'static,
E: 'static,
F: FnOnce(Route<E>) -> Route<E2> + Clone + Send + Sync + 'static,
E2: 'static,
{
match self {
Self::Default(route) => Fallback::Default(f(route)),
Self::Service(route) => Fallback::Service(f(route)),
Self::BoxedHandler(handler) => Fallback::BoxedHandler(handler.map(f)),
}
}
fn with_state<S2>(self, state: S) -> Fallback<S2, E> {
match self {
Self::Default(route) => Fallback::Default(route),
Self::Service(route) => Fallback::Service(route),
Self::BoxedHandler(handler) => Fallback::Service(handler.into_route(state)),
}
}
fn call_with_state(self, req: Request, state: S) -> RouteFuture<E> {
match self {
Self::Default(route) | Self::Service(route) => route.oneshot_inner_owned(req),
Self::BoxedHandler(handler) => {
let route = handler.into_route(state);
route.oneshot_inner_owned(req)
}
}
}
fn is_default(&self) -> bool {
matches!(self, Self::Default(..))
}
}
impl<S, E> Clone for Fallback<S, E> {
fn clone(&self) -> Self {
match self {
Self::Default(inner) => Self::Default(inner.clone()),
Self::Service(inner) => Self::Service(inner.clone()),
Self::BoxedHandler(inner) => Self::BoxedHandler(inner.clone()),
}
}
}
impl<S, E> fmt::Debug for Fallback<S, E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Default(inner) => f.debug_tuple("Default").field(inner).finish(),
Self::Service(inner) => f.debug_tuple("Service").field(inner).finish(),
Self::BoxedHandler(_) => f.debug_tuple("BoxedHandler").finish(),
}
}
}
#[allow(clippy::large_enum_variant)]
enum Endpoint<S> {
MethodRouter(MethodRouter<S>),
Route(Route),
}
impl<S> Endpoint<S>
where
S: Clone + Send + Sync + 'static,
{
fn layer<L>(self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
match self {
Self::MethodRouter(method_router) => Self::MethodRouter(method_router.layer(layer)),
Self::Route(route) => Self::Route(route.layer(layer)),
}
}
}
impl<S> Clone for Endpoint<S> {
fn clone(&self) -> Self {
match self {
Self::MethodRouter(inner) => Self::MethodRouter(inner.clone()),
Self::Route(inner) => Self::Route(inner.clone()),
}
}
}
impl<S> fmt::Debug for Endpoint<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::MethodRouter(method_router) => {
f.debug_tuple("MethodRouter").field(method_router).finish()
}
Self::Route(route) => f.debug_tuple("Route").field(route).finish(),
}
}
}
#[test]
fn traits() {
use crate::test_helpers::*;
assert_send::<Router<()>>();
assert_sync::<Router<()>>();
assert_send::<RouterAsService<'static, Body, ()>>();
assert_sync::<RouterAsService<'static, Body, ()>>();
assert_send::<RouterIntoService<Body, ()>>();
assert_sync::<RouterIntoService<Body, ()>>();
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/route.rs | axum/src/routing/route.rs | use crate::{
body::{Body, HttpBody},
response::Response,
util::MapIntoResponse,
};
use axum_core::{extract::Request, response::IntoResponse};
use bytes::Bytes;
use http::{
header::{self, CONTENT_LENGTH},
HeaderMap, HeaderValue, Method,
};
use pin_project_lite::pin_project;
use std::{
convert::Infallible,
fmt,
future::Future,
pin::Pin,
task::{ready, Context, Poll},
};
use tower::{
util::{BoxCloneSyncService, MapErrLayer, Oneshot},
ServiceExt,
};
use tower_layer::Layer;
use tower_service::Service;
/// How routes are stored inside a [`Router`](super::Router).
///
/// You normally shouldn't need to care about this type. It's used in
/// [`Router::layer`](super::Router::layer).
pub struct Route<E = Infallible>(BoxCloneSyncService<Request, Response, E>);
impl<E> Route<E> {
pub(crate) fn new<T>(svc: T) -> Self
where
T: Service<Request, Error = E> + Clone + Send + Sync + 'static,
T::Response: IntoResponse + 'static,
T::Future: Send + 'static,
{
Self(BoxCloneSyncService::new(MapIntoResponse::new(svc)))
}
/// Variant of [`Route::call`] that takes ownership of the route to avoid cloning.
pub(crate) fn call_owned(self, req: Request<Body>) -> RouteFuture<E> {
let req = req.map(Body::new);
self.oneshot_inner_owned(req).not_top_level()
}
pub(crate) fn oneshot_inner(&self, req: Request) -> RouteFuture<E> {
let method = req.method().clone();
RouteFuture::new(method, self.0.clone().oneshot(req))
}
/// Variant of [`Route::oneshot_inner`] that takes ownership of the route to avoid cloning.
pub(crate) fn oneshot_inner_owned(self, req: Request) -> RouteFuture<E> {
let method = req.method().clone();
RouteFuture::new(method, self.0.oneshot(req))
}
pub(crate) fn layer<L, NewError>(self, layer: L) -> Route<NewError>
where
L: Layer<Self> + Clone + Send + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<NewError> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
NewError: 'static,
{
let layer = (MapErrLayer::new(Into::into), layer);
Route::new(layer.layer(self))
}
}
impl<E> Clone for Route<E> {
#[track_caller]
fn clone(&self) -> Self {
Self(self.0.clone())
}
}
impl<E> fmt::Debug for Route<E> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Route").finish()
}
}
impl<B, E> Service<Request<B>> for Route<E>
where
B: HttpBody<Data = bytes::Bytes> + Send + 'static,
B::Error: Into<axum_core::BoxError>,
{
type Response = Response;
type Error = E;
type Future = RouteFuture<E>;
#[inline]
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
#[inline]
fn call(&mut self, req: Request<B>) -> Self::Future {
self.oneshot_inner(req.map(Body::new)).not_top_level()
}
}
pin_project! {
/// Response future for [`Route`].
pub struct RouteFuture<E> {
#[pin]
inner: Oneshot<BoxCloneSyncService<Request, Response, E>, Request>,
method: Method,
allow_header: Option<Bytes>,
top_level: bool,
}
}
impl<E> RouteFuture<E> {
fn new(
method: Method,
inner: Oneshot<BoxCloneSyncService<Request, Response, E>, Request>,
) -> Self {
Self {
inner,
method,
allow_header: None,
top_level: true,
}
}
pub(crate) fn allow_header(mut self, allow_header: Bytes) -> Self {
self.allow_header = Some(allow_header);
self
}
pub(crate) fn not_top_level(mut self) -> Self {
self.top_level = false;
self
}
}
impl<E> Future for RouteFuture<E> {
type Output = Result<Response, E>;
#[inline]
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
let mut res = ready!(this.inner.poll(cx))?;
if *this.method == Method::CONNECT && res.status().is_success() {
// From https://httpwg.org/specs/rfc9110.html#CONNECT:
// > A server MUST NOT send any Transfer-Encoding or
// > Content-Length header fields in a 2xx (Successful)
// > response to CONNECT.
if res.headers().contains_key(&CONTENT_LENGTH)
|| res.headers().contains_key(&header::TRANSFER_ENCODING)
|| res.size_hint().lower() != 0
{
error!("response to CONNECT with nonempty body");
res = res.map(|_| Body::empty());
}
} else if *this.top_level {
if res.status() == http::StatusCode::METHOD_NOT_ALLOWED {
// From https://httpwg.org/specs/rfc9110.html#field.allow:
// An origin server MUST generate an `Allow` header field in a 405 (Method Not Allowed) response and MAY do so in any other response.
set_allow_header(res.headers_mut(), this.allow_header);
}
// make sure to set content-length before removing the body
set_content_length(&res.size_hint(), res.headers_mut());
if *this.method == Method::HEAD {
*res.body_mut() = Body::empty();
}
}
Poll::Ready(Ok(res))
}
}
fn set_allow_header(headers: &mut HeaderMap, allow_header: &mut Option<Bytes>) {
match allow_header.take() {
Some(allow_header) if !headers.contains_key(header::ALLOW) => {
headers.insert(
header::ALLOW,
HeaderValue::from_maybe_shared(allow_header).expect("invalid `Allow` header"),
);
}
_ => {}
}
}
fn set_content_length(size_hint: &http_body::SizeHint, headers: &mut HeaderMap) {
if headers.contains_key(CONTENT_LENGTH) {
return;
}
if let Some(size) = size_hint.exact() {
let header_value = if size == 0 {
#[allow(clippy::declare_interior_mutable_const)]
const ZERO: HeaderValue = HeaderValue::from_static("0");
ZERO
} else {
let mut buffer = itoa::Buffer::new();
HeaderValue::from_str(buffer.format(size)).unwrap()
};
headers.insert(CONTENT_LENGTH, header_value);
}
}
pin_project! {
/// A [`RouteFuture`] that always yields a [`Response`].
pub struct InfallibleRouteFuture {
#[pin]
future: RouteFuture<Infallible>,
}
}
impl InfallibleRouteFuture {
pub(crate) fn new(future: RouteFuture<Infallible>) -> Self {
Self { future }
}
}
impl Future for InfallibleRouteFuture {
type Output = Response;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
match ready!(self.project().future.poll(cx)) {
Ok(response) => Poll::Ready(response),
Err(err) => match err {},
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn traits() {
use crate::test_helpers::*;
assert_send::<Route<()>>();
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/method_filter.rs | axum/src/routing/method_filter.rs | use http::Method;
use std::{
fmt,
fmt::{Debug, Formatter},
};
/// A filter that matches one or more HTTP methods.
#[derive(Debug, Copy, Clone, PartialEq)]
pub struct MethodFilter(u16);
impl MethodFilter {
/// Match `CONNECT` requests.
///
/// This is useful for implementing HTTP/2's [extended CONNECT method],
/// in which the `:protocol` pseudoheader is read
/// (using [`hyper::ext::Protocol`])
/// and the connection upgraded to a bidirectional byte stream
/// (using [`hyper::upgrade::on`]).
///
/// As seen in the [HTTP Upgrade Token Registry],
/// common uses include WebSockets and proxying UDP or IP –
/// though note that when using [`WebSocketUpgrade`]
/// it's more useful to use [`any`](crate::routing::any)
/// as HTTP/1.1 WebSockets need to support `GET`.
///
/// [extended CONNECT]: https://www.rfc-editor.org/rfc/rfc8441.html#section-4
/// [HTTP Upgrade Token Registry]: https://www.iana.org/assignments/http-upgrade-tokens/http-upgrade-tokens.xhtml
/// [`WebSocketUpgrade`]: crate::extract::WebSocketUpgrade
pub const CONNECT: Self = Self::from_bits(0b0_0000_0001);
/// Match `DELETE` requests.
pub const DELETE: Self = Self::from_bits(0b0_0000_0010);
/// Match `GET` requests.
pub const GET: Self = Self::from_bits(0b0_0000_0100);
/// Match `HEAD` requests.
pub const HEAD: Self = Self::from_bits(0b0_0000_1000);
/// Match `OPTIONS` requests.
pub const OPTIONS: Self = Self::from_bits(0b0_0001_0000);
/// Match `PATCH` requests.
pub const PATCH: Self = Self::from_bits(0b0_0010_0000);
/// Match `POST` requests.
pub const POST: Self = Self::from_bits(0b0_0100_0000);
/// Match `PUT` requests.
pub const PUT: Self = Self::from_bits(0b0_1000_0000);
/// Match `TRACE` requests.
pub const TRACE: Self = Self::from_bits(0b1_0000_0000);
const fn bits(self) -> u16 {
let bits = self;
bits.0
}
const fn from_bits(bits: u16) -> Self {
Self(bits)
}
pub(crate) const fn contains(self, other: Self) -> bool {
self.bits() & other.bits() == other.bits()
}
/// Performs the OR operation between the [`MethodFilter`] in `self` with `other`.
#[must_use]
pub const fn or(self, other: Self) -> Self {
Self(self.0 | other.0)
}
}
/// Error type used when converting a [`Method`] to a [`MethodFilter`] fails.
#[derive(Debug)]
pub struct NoMatchingMethodFilter {
method: Method,
}
impl NoMatchingMethodFilter {
/// Get the [`Method`] that couldn't be converted to a [`MethodFilter`].
pub fn method(&self) -> &Method {
&self.method
}
}
impl fmt::Display for NoMatchingMethodFilter {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(f, "no `MethodFilter` for `{}`", self.method.as_str())
}
}
impl std::error::Error for NoMatchingMethodFilter {}
impl TryFrom<Method> for MethodFilter {
type Error = NoMatchingMethodFilter;
fn try_from(m: Method) -> Result<Self, NoMatchingMethodFilter> {
match m {
Method::CONNECT => Ok(Self::CONNECT),
Method::DELETE => Ok(Self::DELETE),
Method::GET => Ok(Self::GET),
Method::HEAD => Ok(Self::HEAD),
Method::OPTIONS => Ok(Self::OPTIONS),
Method::PATCH => Ok(Self::PATCH),
Method::POST => Ok(Self::POST),
Method::PUT => Ok(Self::PUT),
Method::TRACE => Ok(Self::TRACE),
other => Err(NoMatchingMethodFilter { method: other }),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn from_http_method() {
assert_eq!(
MethodFilter::try_from(Method::CONNECT).unwrap(),
MethodFilter::CONNECT
);
assert_eq!(
MethodFilter::try_from(Method::DELETE).unwrap(),
MethodFilter::DELETE
);
assert_eq!(
MethodFilter::try_from(Method::GET).unwrap(),
MethodFilter::GET
);
assert_eq!(
MethodFilter::try_from(Method::HEAD).unwrap(),
MethodFilter::HEAD
);
assert_eq!(
MethodFilter::try_from(Method::OPTIONS).unwrap(),
MethodFilter::OPTIONS
);
assert_eq!(
MethodFilter::try_from(Method::PATCH).unwrap(),
MethodFilter::PATCH
);
assert_eq!(
MethodFilter::try_from(Method::POST).unwrap(),
MethodFilter::POST
);
assert_eq!(
MethodFilter::try_from(Method::PUT).unwrap(),
MethodFilter::PUT
);
assert_eq!(
MethodFilter::try_from(Method::TRACE).unwrap(),
MethodFilter::TRACE
);
assert!(
MethodFilter::try_from(http::Method::from_bytes(b"CUSTOM").unwrap())
.unwrap_err()
.to_string()
.contains("CUSTOM")
);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/path_router.rs | axum/src/routing/path_router.rs | use crate::{
extract::{nested_path::SetNestedPath, Request},
handler::Handler,
};
use axum_core::response::IntoResponse;
use matchit::MatchError;
use std::{borrow::Cow, collections::HashMap, convert::Infallible, fmt, sync::Arc};
use tower_layer::Layer;
use tower_service::Service;
use super::{
future::RouteFuture, strip_prefix::StripPrefix, url_params, Endpoint, MethodRouter, Route,
RouteId, NEST_TAIL_PARAM,
};
pub(super) struct PathRouter<S> {
routes: Vec<Endpoint<S>>,
node: Arc<Node>,
v7_checks: bool,
}
fn validate_path(v7_checks: bool, path: &str) -> Result<(), &'static str> {
if path.is_empty() {
return Err("Paths must start with a `/`. Use \"/\" for root routes");
} else if !path.starts_with('/') {
return Err("Paths must start with a `/`");
}
if v7_checks {
validate_v07_paths(path)?;
}
Ok(())
}
fn validate_v07_paths(path: &str) -> Result<(), &'static str> {
path.split('/')
.find_map(|segment| {
if segment.starts_with(':') {
Some(Err(
"Path segments must not start with `:`. For capture groups, use \
`{capture}`. If you meant to literally match a segment starting with \
a colon, call `without_v07_checks` on the router.",
))
} else if segment.starts_with('*') {
Some(Err(
"Path segments must not start with `*`. For wildcard capture, use \
`{*wildcard}`. If you meant to literally match a segment starting with \
an asterisk, call `without_v07_checks` on the router.",
))
} else {
None
}
})
.unwrap_or(Ok(()))
}
impl<S> PathRouter<S>
where
S: Clone + Send + Sync + 'static,
{
pub(super) fn without_v07_checks(&mut self) {
self.v7_checks = false;
}
pub(super) fn route(
&mut self,
path: &str,
method_router: MethodRouter<S>,
) -> Result<(), Cow<'static, str>> {
validate_path(self.v7_checks, path)?;
if let Some((route_id, Endpoint::MethodRouter(prev_method_router))) = self
.node
.path_to_route_id
.get(path)
.and_then(|route_id| self.routes.get(route_id.0).map(|svc| (*route_id, svc)))
{
// if we're adding a new `MethodRouter` to a route that already has one just
// merge them. This makes `.route("/", get(_)).route("/", post(_))` work
let service = Endpoint::MethodRouter(
prev_method_router
.clone()
.merge_for_path(Some(path), method_router)?,
);
self.routes[route_id.0] = service;
} else {
let endpoint = Endpoint::MethodRouter(method_router);
self.new_route(path, endpoint)?;
}
Ok(())
}
pub(super) fn method_not_allowed_fallback<H, T>(&mut self, handler: &H)
where
H: Handler<T, S>,
T: 'static,
{
for endpoint in self.routes.iter_mut() {
if let Endpoint::MethodRouter(rt) = endpoint {
*rt = rt.clone().default_fallback(handler.clone());
}
}
}
pub(super) fn route_service<T>(
&mut self,
path: &str,
service: T,
) -> Result<(), Cow<'static, str>>
where
T: Service<Request, Error = Infallible> + Clone + Send + Sync + 'static,
T::Response: IntoResponse,
T::Future: Send + 'static,
{
self.route_endpoint(path, Endpoint::Route(Route::new(service)))
}
pub(super) fn route_endpoint(
&mut self,
path: &str,
endpoint: Endpoint<S>,
) -> Result<(), Cow<'static, str>> {
validate_path(self.v7_checks, path)?;
self.new_route(path, endpoint)?;
Ok(())
}
fn set_node(&mut self, path: &str, id: RouteId) -> Result<(), String> {
let node = Arc::make_mut(&mut self.node);
node.insert(path, id)
.map_err(|err| format!("Invalid route {path:?}: {err}"))
}
fn new_route(&mut self, path: &str, endpoint: Endpoint<S>) -> Result<(), String> {
let id = RouteId(self.routes.len());
self.set_node(path, id)?;
self.routes.push(endpoint);
Ok(())
}
pub(super) fn merge(&mut self, other: Self) -> Result<(), Cow<'static, str>> {
let Self {
routes,
node,
v7_checks,
} = other;
// If either of the two did not allow paths starting with `:` or `*`, do not allow them for the merged router either.
self.v7_checks |= v7_checks;
for (id, route) in routes.into_iter().enumerate() {
let route_id = RouteId(id);
let path = node
.route_id_to_path
.get(&route_id)
.expect("no path for route id. This is a bug in axum. Please file an issue");
match route {
Endpoint::MethodRouter(method_router) => self.route(path, method_router)?,
Endpoint::Route(route) => self.route_service(path, route)?,
}
}
Ok(())
}
pub(super) fn nest(
&mut self,
path_to_nest_at: &str,
router: Self,
) -> Result<(), Cow<'static, str>> {
let prefix = validate_nest_path(self.v7_checks, path_to_nest_at)?;
let Self {
routes,
node,
// Ignore the configuration of the nested router
v7_checks: _,
} = router;
for (id, endpoint) in routes.into_iter().enumerate() {
let route_id = RouteId(id);
let inner_path = node
.route_id_to_path
.get(&route_id)
.expect("no path for route id. This is a bug in axum. Please file an issue");
let path = path_for_nested_route(prefix, inner_path);
let layer = (
StripPrefix::layer(prefix),
SetNestedPath::layer(path_to_nest_at),
);
match endpoint.layer(layer) {
Endpoint::MethodRouter(method_router) => {
self.route(&path, method_router)?;
}
Endpoint::Route(route) => {
self.route_endpoint(&path, Endpoint::Route(route))?;
}
}
}
Ok(())
}
pub(super) fn nest_service<T>(
&mut self,
path_to_nest_at: &str,
svc: T,
) -> Result<(), Cow<'static, str>>
where
T: Service<Request, Error = Infallible> + Clone + Send + Sync + 'static,
T::Response: IntoResponse,
T::Future: Send + 'static,
{
let path = validate_nest_path(self.v7_checks, path_to_nest_at)?;
let prefix = path;
let path = if path.ends_with('/') {
format!("{path}{{*{NEST_TAIL_PARAM}}}")
} else {
format!("{path}/{{*{NEST_TAIL_PARAM}}}")
};
let layer = (
StripPrefix::layer(prefix),
SetNestedPath::layer(path_to_nest_at),
);
let endpoint = Endpoint::Route(Route::new(layer.layer(svc)));
self.route_endpoint(&path, endpoint.clone())?;
// `/{*rest}` is not matched by `/` so we need to also register a router at the
// prefix itself. Otherwise if you were to nest at `/foo` then `/foo` itself
// wouldn't match, which it should
self.route_endpoint(prefix, endpoint.clone())?;
if !prefix.ends_with('/') {
// same goes for `/foo/`, that should also match
self.route_endpoint(&format!("{prefix}/"), endpoint)?;
}
Ok(())
}
pub(super) fn layer<L>(self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
let routes = self
.routes
.into_iter()
.map(|endpoint| endpoint.layer(layer.clone()))
.collect();
Self {
routes,
node: self.node,
v7_checks: self.v7_checks,
}
}
#[track_caller]
pub(super) fn route_layer<L>(self, layer: L) -> Self
where
L: Layer<Route> + Clone + Send + Sync + 'static,
L::Service: Service<Request> + Clone + Send + Sync + 'static,
<L::Service as Service<Request>>::Response: IntoResponse + 'static,
<L::Service as Service<Request>>::Error: Into<Infallible> + 'static,
<L::Service as Service<Request>>::Future: Send + 'static,
{
if self.routes.is_empty() {
panic!(
"Adding a route_layer before any routes is a no-op. \
Add the routes you want the layer to apply to first."
);
}
let routes = self
.routes
.into_iter()
.map(|endpoint| endpoint.layer(layer.clone()))
.collect();
Self {
routes,
node: self.node,
v7_checks: self.v7_checks,
}
}
pub(super) fn has_routes(&self) -> bool {
!self.routes.is_empty()
}
pub(super) fn with_state<S2>(self, state: S) -> PathRouter<S2> {
let routes = self
.routes
.into_iter()
.map(|endpoint| match endpoint {
Endpoint::MethodRouter(method_router) => {
Endpoint::MethodRouter(method_router.with_state(state.clone()))
}
Endpoint::Route(route) => Endpoint::Route(route),
})
.collect();
PathRouter {
routes,
node: self.node,
v7_checks: self.v7_checks,
}
}
#[allow(clippy::result_large_err)]
pub(super) fn call_with_state(
&self,
#[cfg_attr(not(feature = "original-uri"), allow(unused_mut))] mut req: Request,
state: S,
) -> Result<RouteFuture<Infallible>, (Request, S)> {
#[cfg(feature = "original-uri")]
{
use crate::extract::OriginalUri;
if req.extensions().get::<OriginalUri>().is_none() {
let original_uri = OriginalUri(req.uri().clone());
req.extensions_mut().insert(original_uri);
}
}
let (mut parts, body) = req.into_parts();
match self.node.at(parts.uri.path()) {
Ok(match_) => {
let id = *match_.value;
#[cfg(feature = "matched-path")]
crate::extract::matched_path::set_matched_path_for_request(
id,
&self.node.route_id_to_path,
&mut parts.extensions,
);
url_params::insert_url_params(&mut parts.extensions, &match_.params);
let endpoint = self
.routes
.get(id.0)
.expect("no route for id. This is a bug in axum. Please file an issue");
let req = Request::from_parts(parts, body);
match endpoint {
Endpoint::MethodRouter(method_router) => {
Ok(method_router.call_with_state(req, state))
}
Endpoint::Route(route) => Ok(route.clone().call_owned(req)),
}
}
// explicitly handle all variants in case matchit adds
// new ones we need to handle differently
Err(MatchError::NotFound) => Err((Request::from_parts(parts, body), state)),
}
}
}
impl<S> Default for PathRouter<S> {
fn default() -> Self {
Self {
routes: Default::default(),
node: Default::default(),
v7_checks: true,
}
}
}
impl<S> fmt::Debug for PathRouter<S> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("PathRouter")
.field("routes", &self.routes)
.field("node", &self.node)
.finish()
}
}
impl<S> Clone for PathRouter<S> {
fn clone(&self) -> Self {
Self {
routes: self.routes.clone(),
node: self.node.clone(),
v7_checks: self.v7_checks,
}
}
}
/// Wrapper around `matchit::Router` that supports merging two `Router`s.
#[derive(Clone, Default)]
struct Node {
inner: matchit::Router<RouteId>,
route_id_to_path: HashMap<RouteId, Arc<str>>,
path_to_route_id: HashMap<Arc<str>, RouteId>,
}
impl Node {
fn insert(
&mut self,
path: impl Into<String>,
val: RouteId,
) -> Result<(), matchit::InsertError> {
let path = path.into();
self.inner.insert(&path, val)?;
let shared_path: Arc<str> = path.into();
self.route_id_to_path.insert(val, shared_path.clone());
self.path_to_route_id.insert(shared_path, val);
Ok(())
}
fn at<'n, 'p>(
&'n self,
path: &'p str,
) -> Result<matchit::Match<'n, 'p, &'n RouteId>, MatchError> {
self.inner.at(path)
}
}
impl fmt::Debug for Node {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("Node")
.field("paths", &self.route_id_to_path)
.finish()
}
}
fn validate_nest_path(v7_checks: bool, path: &str) -> Result<&str, &'static str> {
if !path.starts_with('/') {
return Err("Nesting paths must start with a `/`.");
}
if path.len() < 2 {
return Err("Nesting at `/` is not supported.");
}
if path.split('/').any(|segment| {
segment.starts_with("{*") && segment.ends_with('}') && !segment.ends_with("}}")
}) {
return Err("Invalid route: nested routes cannot contain wildcards (*)");
}
if v7_checks {
validate_v07_paths(path)?;
}
Ok(path)
}
pub(crate) fn path_for_nested_route<'a>(prefix: &'a str, path: &'a str) -> Cow<'a, str> {
debug_assert!(prefix.starts_with('/'));
debug_assert!(path.starts_with('/'));
if prefix.ends_with('/') {
format!("{prefix}{}", path.trim_start_matches('/')).into()
} else if path == "/" {
prefix.into()
} else {
format!("{prefix}{path}").into()
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/into_make_service.rs | axum/src/routing/into_make_service.rs | use std::{
convert::Infallible,
future::ready,
task::{Context, Poll},
};
use tower_service::Service;
/// A [`MakeService`] that produces axum router services.
///
/// [`MakeService`]: tower::make::MakeService
#[derive(Debug, Clone)]
pub struct IntoMakeService<S> {
svc: S,
}
impl<S> IntoMakeService<S> {
pub(crate) fn new(svc: S) -> Self {
Self { svc }
}
}
impl<S, T> Service<T> for IntoMakeService<S>
where
S: Clone,
{
type Response = S;
type Error = Infallible;
type Future = IntoMakeServiceFuture<S>;
#[inline]
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, _target: T) -> Self::Future {
IntoMakeServiceFuture::new(ready(Ok(self.svc.clone())))
}
}
opaque_future! {
/// Response future for [`IntoMakeService`].
pub type IntoMakeServiceFuture<S> =
std::future::Ready<Result<S, Infallible>>;
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn traits() {
use crate::test_helpers::*;
assert_send::<IntoMakeService<()>>();
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/merge.rs | axum/src/routing/tests/merge.rs | use super::*;
use crate::extract::OriginalUri;
use serde_json::{json, Value};
use tower::limit::ConcurrencyLimitLayer;
#[crate::test]
async fn basic() {
let one = Router::new()
.route("/foo", get(|| async {}))
.route("/bar", get(|| async {}));
let two = Router::new().route("/baz", get(|| async {}));
let app = one.merge(two);
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/baz").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/qux").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn multiple_ors_balanced_differently() {
let one = Router::new().route("/one", get(|| async { "one" }));
let two = Router::new().route("/two", get(|| async { "two" }));
let three = Router::new().route("/three", get(|| async { "three" }));
let four = Router::new().route("/four", get(|| async { "four" }));
test(
"one",
one.clone()
.merge(two.clone())
.merge(three.clone())
.merge(four.clone()),
)
.await;
test(
"two",
one.clone()
.merge(two.clone())
.merge(three.clone().merge(four.clone())),
)
.await;
test(
"three",
one.clone()
.merge(two.clone().merge(three.clone()).merge(four.clone())),
)
.await;
test("four", one.merge(two.merge(three.merge(four)))).await;
async fn test(name: &str, app: Router) {
let client = TestClient::new(app);
for n in ["one", "two", "three", "four"].iter() {
println!("running: {name} / {n}");
let res = client.get(&format!("/{n}")).await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, *n);
}
}
}
#[crate::test]
async fn nested_or() {
let bar = Router::new().route("/bar", get(|| async { "bar" }));
let baz = Router::new().route("/baz", get(|| async { "baz" }));
let bar_or_baz = bar.merge(baz);
let client = TestClient::new(bar_or_baz.clone());
assert_eq!(client.get("/bar").await.text().await, "bar");
assert_eq!(client.get("/baz").await.text().await, "baz");
let client = TestClient::new(Router::new().nest("/foo", bar_or_baz));
assert_eq!(client.get("/foo/bar").await.text().await, "bar");
assert_eq!(client.get("/foo/baz").await.text().await, "baz");
}
#[crate::test]
async fn or_with_route_following() {
let one = Router::new().route("/one", get(|| async { "one" }));
let two = Router::new().route("/two", get(|| async { "two" }));
let app = one.merge(two).route("/three", get(|| async { "three" }));
let client = TestClient::new(app);
let res = client.get("/one").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/two").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/three").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn layer() {
let one = Router::new().route("/foo", get(|| async {}));
let two = Router::new()
.route("/bar", get(|| async {}))
.layer(ConcurrencyLimitLayer::new(10));
let app = one.merge(two);
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn layer_and_handle_error() {
let one = Router::new().route("/foo", get(|| async {}));
let two = Router::new()
.route("/timeout", get(std::future::pending::<()>))
.layer(TimeoutLayer::new(Duration::from_millis(10)));
let app = one.merge(two);
let client = TestClient::new(app);
let res = client.get("/timeout").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn nesting() {
let one = Router::new().route("/foo", get(|| async {}));
let two = Router::new().nest("/bar", Router::new().route("/baz", get(|| async {})));
let app = one.merge(two);
let client = TestClient::new(app);
let res = client.get("/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn boxed() {
let one = Router::new().route("/foo", get(|| async {}));
let two = Router::new().route("/bar", get(|| async {}));
let app = one.merge(two);
let client = TestClient::new(app);
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn many_ors() {
let app = Router::new()
.route("/r1", get(|| async {}))
.merge(Router::new().route("/r2", get(|| async {})))
.merge(Router::new().route("/r3", get(|| async {})))
.merge(Router::new().route("/r4", get(|| async {})))
.merge(Router::new().route("/r5", get(|| async {})))
.merge(Router::new().route("/r6", get(|| async {})))
.merge(Router::new().route("/r7", get(|| async {})));
let client = TestClient::new(app);
for n in 1..=7 {
let res = client.get(&format!("/r{n}")).await;
assert_eq!(res.status(), StatusCode::OK);
}
let res = client.get("/r8").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn services() {
use crate::routing::get_service;
let app = Router::new()
.route(
"/foo",
get_service(service_fn(|_: Request| async {
Ok::<_, Infallible>(Response::new(Body::empty()))
})),
)
.merge(Router::new().route(
"/bar",
get_service(service_fn(|_: Request| async {
Ok::<_, Infallible>(Response::new(Body::empty()))
})),
));
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
async fn all_the_uris(
uri: Uri,
OriginalUri(original_uri): OriginalUri,
req: Request,
) -> impl IntoResponse {
Json(json!({
"uri": uri.to_string(),
"request_uri": req.uri().to_string(),
"original_uri": original_uri.to_string(),
}))
}
#[crate::test]
async fn nesting_and_seeing_the_right_uri() {
let one = Router::new().nest("/foo/", Router::new().route("/bar", get(all_the_uris)));
let two = Router::new().route("/foo", get(all_the_uris));
let client = TestClient::new(one.merge(two));
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/bar",
"request_uri": "/bar",
"original_uri": "/foo/bar",
})
);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/foo",
"request_uri": "/foo",
"original_uri": "/foo",
})
);
}
#[crate::test]
async fn nesting_and_seeing_the_right_uri_at_more_levels_of_nesting() {
let one = Router::new().nest(
"/foo/",
Router::new().nest("/bar", Router::new().route("/baz", get(all_the_uris))),
);
let two = Router::new().route("/foo", get(all_the_uris));
let client = TestClient::new(one.merge(two));
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/baz",
"request_uri": "/baz",
"original_uri": "/foo/bar/baz",
})
);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/foo",
"request_uri": "/foo",
"original_uri": "/foo",
})
);
}
#[crate::test]
async fn nesting_and_seeing_the_right_uri_ors_with_nesting() {
let one = Router::new().nest(
"/one",
Router::new().nest("/bar", Router::new().route("/baz", get(all_the_uris))),
);
let two = Router::new().nest("/two", Router::new().route("/qux", get(all_the_uris)));
let three = Router::new().route("/three", get(all_the_uris));
let client = TestClient::new(one.merge(two).merge(three));
let res = client.get("/one/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/baz",
"request_uri": "/baz",
"original_uri": "/one/bar/baz",
})
);
let res = client.get("/two/qux").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/qux",
"request_uri": "/qux",
"original_uri": "/two/qux",
})
);
let res = client.get("/three").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/three",
"request_uri": "/three",
"original_uri": "/three",
})
);
}
#[crate::test]
async fn nesting_and_seeing_the_right_uri_ors_with_multi_segment_uris() {
let one = Router::new().nest(
"/one",
Router::new().nest("/foo", Router::new().route("/bar", get(all_the_uris))),
);
let two = Router::new().route("/two/foo", get(all_the_uris));
let client = TestClient::new(one.merge(two));
let res = client.get("/one/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/bar",
"request_uri": "/bar",
"original_uri": "/one/foo/bar",
})
);
let res = client.get("/two/foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(
res.json::<Value>().await,
json!({
"uri": "/two/foo",
"request_uri": "/two/foo",
"original_uri": "/two/foo",
})
);
}
#[crate::test]
async fn middleware_that_return_early() {
let private = Router::new()
.route("/", get(|| async {}))
.layer(ValidateRequestHeaderLayer::bearer("password"));
let public = Router::new().route("/public", get(|| async {}));
let client = TestClient::new(private.merge(public));
assert_eq!(client.get("/").await.status(), StatusCode::UNAUTHORIZED);
assert_eq!(
client
.get("/")
.header("authorization", "Bearer password")
.await
.status(),
StatusCode::OK
);
assert_eq!(
client.get("/doesnt-exist").await.status(),
StatusCode::NOT_FOUND
);
assert_eq!(client.get("/public").await.status(), StatusCode::OK);
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/handle_error.rs | axum/src/routing/tests/handle_error.rs | use super::*;
use std::future::pending;
use tower::timeout::TimeoutLayer;
async fn unit() {}
async fn forever() {
pending().await
}
fn timeout() -> TimeoutLayer {
TimeoutLayer::new(Duration::from_millis(10))
}
#[crate::test]
async fn handler() {
let app = Router::new().route(
"/",
get(forever.layer((
HandleErrorLayer::new(|_: BoxError| async { StatusCode::REQUEST_TIMEOUT }),
timeout(),
))),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
}
#[crate::test]
async fn handler_multiple_methods_first() {
let app = Router::new().route(
"/",
get(forever.layer((
HandleErrorLayer::new(|_: BoxError| async { StatusCode::REQUEST_TIMEOUT }),
timeout(),
)))
.post(unit),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
}
#[crate::test]
async fn handler_multiple_methods_middle() {
let app = Router::new().route(
"/",
delete(unit)
.get(forever.layer((
HandleErrorLayer::new(|_: BoxError| async { StatusCode::REQUEST_TIMEOUT }),
timeout(),
)))
.post(unit),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
}
#[crate::test]
async fn handler_multiple_methods_last() {
let app = Router::new().route(
"/",
delete(unit).get(forever.layer((
HandleErrorLayer::new(|_: BoxError| async { StatusCode::REQUEST_TIMEOUT }),
timeout(),
))),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
}
#[crate::test]
async fn handler_service_ext() {
let fallible_service = tower::service_fn(|_| async { Err::<(), ()>(()) });
let handle_error_service =
fallible_service.handle_error(|_| async { StatusCode::INTERNAL_SERVER_ERROR });
let app = Router::new().route("/", get_service(handle_error_service));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/nest.rs | axum/src/routing/tests/nest.rs | use super::*;
use std::collections::HashMap;
use tower_http::services::ServeDir;
#[crate::test]
async fn nesting_apps() {
let api_routes = Router::new()
.route(
"/users",
get(|| async { "users#index" }).post(|| async { "users#create" }),
)
.route(
"/users/{id}",
get(
|params: extract::Path<HashMap<String, String>>| async move {
format!(
"{}: users#show ({})",
params.get("version").unwrap(),
params.get("id").unwrap()
)
},
),
)
.route(
"/games/{id}",
get(
|params: extract::Path<HashMap<String, String>>| async move {
format!(
"{}: games#show ({})",
params.get("version").unwrap(),
params.get("id").unwrap()
)
},
),
);
let app = Router::new()
.route("/", get(|| async { "hi" }))
.nest("/{version}/api", api_routes);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "hi");
let res = client.get("/v0/api/users").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "users#index");
let res = client.get("/v0/api/users/123").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "v0: users#show (123)");
let res = client.get("/v0/api/games/123").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "v0: games#show (123)");
}
#[crate::test]
async fn wrong_method_nest() {
let nested_app = Router::new().route("/", get(|| async {}));
let app = Router::new().nest("/foo", nested_app);
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/foo").await;
assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED);
assert_eq!(res.headers()[ALLOW], "GET,HEAD");
let res = client.patch("/foo/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[test]
#[should_panic(expected = "Nesting at the root is no longer supported. Use merge instead.")]
fn nest_router_at_root() {
let nested = Router::new().route("/foo", get(|| async {}));
let _: Router = Router::new().nest("/", nested);
}
#[test]
#[should_panic(expected = "Nesting at the root is no longer supported. Use merge instead.")]
fn nest_router_at_empty_path() {
let nested = Router::new().route("/foo", get(|| async {}));
let _: Router = Router::new().nest("", nested);
}
#[test]
#[should_panic(
expected = "Nesting at the root is no longer supported. Use fallback_service instead."
)]
fn nest_service_at_root() {
let _: Router = Router::new().nest_service("/", get(|| async {}));
}
#[test]
#[should_panic(
expected = "Nesting at the root is no longer supported. Use fallback_service instead."
)]
fn nest_service_at_empty_path() {
let _: Router = Router::new().nest_service("", get(|| async {}));
}
#[test]
#[should_panic(expected = "Nesting paths must start with a `/`.")]
fn nest_no_slash() {
let _: Router = Router::new().nest("x", Router::new());
}
#[test]
#[should_panic(expected = "Nesting paths must start with a `/`.")]
fn nest_service_no_slash() {
let _: Router = Router::new().nest_service("x", get(|| async {}));
}
#[crate::test]
async fn nested_url_extractor() {
let app = Router::new().nest(
"/foo",
Router::new().nest(
"/bar",
Router::new()
.route("/baz", get(|uri: Uri| async move { uri.to_string() }))
.route(
"/qux",
get(|req: Request| async move { req.uri().to_string() }),
),
),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/baz");
let res = client.get("/foo/bar/qux").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/qux");
}
#[crate::test]
async fn nested_url_original_extractor() {
let app = Router::new().nest(
"/foo",
Router::new().nest(
"/bar",
Router::new().route(
"/baz",
get(|uri: extract::OriginalUri| async move { uri.0.to_string() }),
),
),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/foo/bar/baz");
}
#[crate::test]
async fn nested_service_sees_stripped_uri() {
let app = Router::new().nest(
"/foo",
Router::new().nest(
"/bar",
Router::new().route_service(
"/baz",
service_fn(|req: Request| async move {
let body = Body::from(req.uri().to_string());
Ok::<_, Infallible>(Response::new(body))
}),
),
),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "/baz");
}
#[crate::test]
async fn nest_static_file_server() {
let app = Router::new().nest_service("/static", ServeDir::new("."));
let client = TestClient::new(app);
let res = client.get("/static/README.md").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn nested_multiple_routes() {
let app = Router::new()
.nest(
"/api",
Router::new()
.route("/users", get(|| async { "users" }))
.route("/teams", get(|| async { "teams" })),
)
.route("/", get(|| async { "root" }));
let client = TestClient::new(app);
assert_eq!(client.get("/").await.text().await, "root");
assert_eq!(client.get("/api/users").await.text().await, "users");
assert_eq!(client.get("/api/teams").await.text().await, "teams");
}
#[crate::test]
async fn multiple_top_level_nests() {
let app = Router::new()
.nest(
"/one",
Router::new().route("/route", get(|| async { "one" })),
)
.nest(
"/two",
Router::new().route("/route", get(|| async { "two" })),
);
let client = TestClient::new(app);
assert_eq!(client.get("/one/route").await.text().await, "one");
assert_eq!(client.get("/two/route").await.text().await, "two");
}
#[crate::test]
#[should_panic(expected = "Invalid route: nested routes cannot contain wildcards (*)")]
async fn nest_cannot_contain_wildcards() {
_ = Router::<()>::new().nest("/one/{*rest}", Router::new());
}
#[crate::test]
async fn outer_middleware_still_see_whole_url() {
#[derive(Clone)]
struct SetUriExtension<S>(S);
#[derive(Clone)]
struct Uri(http::Uri);
impl<S, B> Service<Request<B>> for SetUriExtension<S>
where
S: Service<Request<B>>,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.0.poll_ready(cx)
}
fn call(&mut self, mut req: Request<B>) -> Self::Future {
let uri = Uri(req.uri().clone());
req.extensions_mut().insert(uri);
self.0.call(req)
}
}
async fn handler(Extension(Uri(middleware_uri)): Extension<Uri>) -> impl IntoResponse {
middleware_uri.to_string()
}
let app = Router::new()
.route("/", get(handler))
.route("/foo", get(handler))
.route("/foo/bar", get(handler))
.nest("/one", Router::new().route("/two", get(handler)))
.fallback(handler)
.layer(tower::layer::layer_fn(SetUriExtension));
let client = TestClient::new(app);
assert_eq!(client.get("/").await.text().await, "/");
assert_eq!(client.get("/foo").await.text().await, "/foo");
assert_eq!(client.get("/foo/bar").await.text().await, "/foo/bar");
assert_eq!(client.get("/not-found").await.text().await, "/not-found");
assert_eq!(client.get("/one/two").await.text().await, "/one/two");
}
#[crate::test]
async fn nest_at_capture() {
let api_routes = Router::new().route(
"/{b}",
get(|Path((a, b)): Path<(String, String)>| async move { format!("a={a} b={b}") }),
);
let app = Router::new().nest("/{a}", api_routes);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "a=foo b=bar");
}
#[crate::test]
async fn nest_with_and_without_trailing() {
let app = Router::new().nest_service("/foo", get(|| async {}));
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/foo/").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[tokio::test]
async fn nesting_with_root_inner_router() {
let app = Router::new()
.nest_service("/service", Router::new().route("/", get(|| async {})))
.nest("/router", Router::new().route("/", get(|| async {})))
.nest("/router-slash/", Router::new().route("/", get(|| async {})));
let client = TestClient::new(app);
// `/service/` does match the `/service` prefix and the remaining path is technically
// empty, which is the same as `/` which matches `.route("/", _)`
let res = client.get("/service").await;
assert_eq!(res.status(), StatusCode::OK);
// `/service/` does match the `/service` prefix and the remaining path is `/`
// which matches `.route("/", _)`
//
// this is perhaps a little surprising but don't think there is much we can do
let res = client.get("/service/").await;
assert_eq!(res.status(), StatusCode::OK);
// at least it does work like you'd expect when using `nest`
let res = client.get("/router").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/router/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/router-slash").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/router-slash/").await;
assert_eq!(res.status(), StatusCode::OK);
}
macro_rules! nested_route_test {
(
$name:ident,
// the path we nest the inner router at
nest = $nested_path:literal,
// the route the inner router accepts
route = $route_path:literal,
// the route we expect to be able to call
expected = $expected_path:literal $(,)?
) => {
#[crate::test]
async fn $name() {
let inner = Router::new().route($route_path, get(|| async {}));
let app = Router::new().nest($nested_path, inner);
let client = TestClient::new(app);
let res = client.get($expected_path).await;
let status = res.status();
assert_eq!(status, StatusCode::OK, "Router");
}
};
}
// test cases taken from https://github.com/tokio-rs/axum/issues/714#issuecomment-1058144460
nested_route_test!(nest_1, nest = "/a", route = "/", expected = "/a");
nested_route_test!(nest_2, nest = "/a", route = "/a", expected = "/a/a");
nested_route_test!(nest_3, nest = "/a", route = "/a/", expected = "/a/a/");
nested_route_test!(nest_4, nest = "/a/", route = "/", expected = "/a/");
nested_route_test!(nest_5, nest = "/a/", route = "/a", expected = "/a/a");
nested_route_test!(nest_6, nest = "/a/", route = "/a/", expected = "/a/a/");
#[crate::test]
#[should_panic(
expected = "Path segments must not start with `:`. For capture groups, use `{capture}`. If you meant to literally match a segment starting with a colon, call `without_v07_checks` on the router."
)]
async fn colon_in_route() {
_ = Router::<()>::new().nest("/:foo", Router::new());
}
#[crate::test]
#[should_panic(
expected = "Path segments must not start with `*`. For wildcard capture, use `{*wildcard}`. If you meant to literally match a segment starting with an asterisk, call `without_v07_checks` on the router."
)]
async fn asterisk_in_route() {
_ = Router::<()>::new().nest("/*foo", Router::new());
}
#[crate::test]
async fn nesting_router_with_fallback() {
let nested = Router::new().fallback(|| async { "nested" });
let router = Router::new().route("/{x}/{y}", get(|| async { "two segments" }));
let client = TestClient::new(router.nest("/nest", nested));
let res = client.get("/a/b").await;
let body = res.text().await;
assert_eq!(body, "two segments");
let res = client.get("/nest/b").await;
let body = res.text().await;
assert_eq!(body, "nested");
}
#[crate::test]
async fn defining_missing_routes_in_nested_router() {
let router = Router::new()
.route("/nest/before", get(|| async { "before" }))
.nest(
"/nest",
Router::new()
.route("/mid", get(|| async { "nested mid" }))
.fallback(|| async { "nested fallback" }),
)
.route("/nest/after", get(|| async { "after" }));
let client = TestClient::new(router);
let res = client.get("/nest/before").await;
let body = res.text().await;
assert_eq!(body, "before");
let res = client.get("/nest/after").await;
let body = res.text().await;
assert_eq!(body, "after");
let res = client.get("/nest/mid").await;
let body = res.text().await;
assert_eq!(body, "nested mid");
let res = client.get("/nest/fallback").await;
let body = res.text().await;
assert_eq!(body, "nested fallback");
}
#[test]
#[should_panic(
expected = "Overlapping method route. Handler for `GET /nest/override` already exists"
)]
fn overriding_by_nested_router() {
_ = Router::<()>::new()
.route("/nest/override", get(|| async { "outer" }))
.nest(
"/nest",
Router::new().route("/override", get(|| async { "inner" })),
);
}
#[test]
#[should_panic(
expected = "Overlapping method route. Handler for `GET /nest/override` already exists"
)]
fn overriding_nested_router_() {
_ = Router::<()>::new()
.nest(
"/nest",
Router::new().route("/override", get(|| async { "inner" })),
)
.route("/nest/override", get(|| async { "outer" }));
}
// This is just documenting current state, not intended behavior.
#[crate::test]
async fn overriding_nested_service_router() {
let router = Router::new()
.route("/nest/before", get(|| async { "outer" }))
.nest_service(
"/nest",
Router::new()
.route("/before", get(|| async { "inner" }))
.route("/after", get(|| async { "inner" })),
)
.route("/nest/after", get(|| async { "outer" }));
let client = TestClient::new(router);
let res = client.get("/nest/before").await;
let body = res.text().await;
assert_eq!(body, "outer");
let res = client.get("/nest/after").await;
let body = res.text().await;
assert_eq!(body, "outer");
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/mod.rs | axum/src/routing/tests/mod.rs | use crate::{
body::{Body, Bytes},
error_handling::HandleErrorLayer,
extract::{self, DefaultBodyLimit, FromRef, Path, State},
handler::{Handler, HandlerWithoutStateExt},
middleware::{self, Next},
response::{IntoResponse, Response},
routing::{
delete, get, get_service, on, on_service, patch, patch_service,
path_router::path_for_nested_route, post, MethodFilter,
},
test_helpers::{
tracing_helpers::{capture_tracing, TracingEvent},
*,
},
BoxError, Extension, Json, Router, ServiceExt,
};
use axum_core::extract::Request;
use counting_cloneable_state::CountingCloneableState;
use futures_util::stream::StreamExt;
use http::{
header::{ALLOW, CONTENT_LENGTH, HOST},
HeaderMap, Method, StatusCode, Uri,
};
use http_body_util::BodyExt;
use serde::Deserialize;
use serde_json::json;
use std::{
convert::Infallible,
future::{ready, IntoFuture, Ready},
sync::atomic::{AtomicUsize, Ordering},
task::{Context, Poll},
time::Duration,
};
use tower::{service_fn, util::MapResponseLayer, ServiceExt as TowerServiceExt};
use tower_http::{
limit::RequestBodyLimitLayer, timeout::TimeoutLayer,
validate_request::ValidateRequestHeaderLayer,
};
use tower_service::Service;
mod fallback;
mod get_to_head;
mod handle_error;
mod merge;
mod nest;
#[crate::test]
async fn hello_world() {
async fn root(_: Request) -> &'static str {
"Hello, World!"
}
async fn foo(_: Request) -> &'static str {
"foo"
}
async fn users_create(_: Request) -> &'static str {
"users#create"
}
let app = Router::new()
.route("/", get(root).post(foo))
.route("/users", post(users_create));
let client = TestClient::new(app);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "Hello, World!");
let res = client.post("/").await;
let body = res.text().await;
assert_eq!(body, "foo");
let res = client.post("/users").await;
let body = res.text().await;
assert_eq!(body, "users#create");
}
#[crate::test]
async fn routing() {
let app = Router::new()
.route(
"/users",
get(|_: Request| async { "users#index" }).post(|_: Request| async { "users#create" }),
)
.route("/users/{id}", get(|_: Request| async { "users#show" }))
.route(
"/users/{id}/action",
get(|_: Request| async { "users#action" }),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/users").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "users#index");
let res = client.post("/users").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "users#create");
let res = client.get("/users/1").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "users#show");
let res = client.get("/users/1/action").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "users#action");
}
#[crate::test]
async fn router_type_doesnt_change() {
let app: Router = Router::new()
.route(
"/",
on(MethodFilter::GET, |_: Request| async { "hi from GET" })
.on(MethodFilter::POST, |_: Request| async { "hi from POST" }),
)
.layer(tower_http::trace::TraceLayer::new_for_http());
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "hi from GET");
let res = client.post("/").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "hi from POST");
}
#[crate::test]
async fn routing_between_services() {
use std::convert::Infallible;
use tower::service_fn;
async fn handle(_: Request) -> &'static str {
"handler"
}
let app = Router::new()
.route(
"/one",
get_service(service_fn(|_: Request| async {
Ok::<_, Infallible>(Response::new(Body::from("one get")))
}))
.post_service(service_fn(|_: Request| async {
Ok::<_, Infallible>(Response::new(Body::from("one post")))
}))
.on_service(
MethodFilter::PUT,
service_fn(|_: Request| async {
Ok::<_, Infallible>(Response::new(Body::from("one put")))
}),
),
)
.route("/two", on_service(MethodFilter::GET, handle.into_service()));
let client = TestClient::new(app);
let res = client.get("/one").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "one get");
let res = client.post("/one").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "one post");
let res = client.put("/one").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "one put");
let res = client.get("/two").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "handler");
}
#[crate::test]
async fn middleware_on_single_route() {
use tower_http::trace::TraceLayer;
async fn handle(_: Request) -> &'static str {
"Hello, World!"
}
let app = Router::new().route("/", get(handle.layer(TraceLayer::new_for_http())));
let client = TestClient::new(app);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "Hello, World!");
}
#[crate::test]
async fn service_in_bottom() {
async fn handler(_req: Request) -> Result<Response<Body>, Infallible> {
Ok(Response::new(Body::empty()))
}
let app = Router::new().route("/", get_service(service_fn(handler)));
TestClient::new(app);
}
#[crate::test]
async fn wrong_method_handler() {
let app = Router::new()
.route("/", get(|| async {}).post(|| async {}))
.route("/foo", patch(|| async {}));
let client = TestClient::new(app);
let res = client.patch("/").await;
assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED);
assert_eq!(res.headers()[ALLOW], "GET,HEAD,POST");
let res = client.patch("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/foo").await;
assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED);
assert_eq!(res.headers()[ALLOW], "PATCH");
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn wrong_method_service() {
#[derive(Clone)]
struct Svc;
impl<R> Service<R> for Svc {
type Response = Response;
type Error = Infallible;
type Future = Ready<Result<Self::Response, Self::Error>>;
fn poll_ready(&mut self, _cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
Poll::Ready(Ok(()))
}
fn call(&mut self, _req: R) -> Self::Future {
ready(Ok(().into_response()))
}
}
let app = Router::new()
.route("/", get_service(Svc).post_service(Svc))
.route("/foo", patch_service(Svc));
let client = TestClient::new(app);
let res = client.patch("/").await;
assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED);
assert_eq!(res.headers()[ALLOW], "GET,HEAD,POST");
let res = client.patch("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/foo").await;
assert_eq!(res.status(), StatusCode::METHOD_NOT_ALLOWED);
assert_eq!(res.headers()[ALLOW], "PATCH");
let res = client.get("/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn multiple_methods_for_one_handler() {
async fn root(_: Request) -> &'static str {
"Hello, World!"
}
let app = Router::new().route("/", on(MethodFilter::GET.or(MethodFilter::POST), root));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn wildcard_sees_whole_url() {
let app = Router::new().route(
"/api/{*rest}",
get(|uri: Uri| async move { uri.to_string() }),
);
let client = TestClient::new(app);
let res = client.get("/api/foo/bar").await;
assert_eq!(res.text().await, "/api/foo/bar");
}
#[crate::test]
async fn middleware_applies_to_routes_above() {
let app = Router::new()
.route("/one", get(std::future::pending::<()>))
.layer(TimeoutLayer::new(Duration::ZERO))
.route("/two", get(|| async {}));
let client = TestClient::new(app);
let res = client.get("/one").await;
assert_eq!(res.status(), StatusCode::REQUEST_TIMEOUT);
let res = client.get("/two").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn not_found_for_extra_trailing_slash() {
let app = Router::new().route("/foo", get(|| async {}));
let client = TestClient::new(app);
let res = client.get("/foo/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn not_found_for_missing_trailing_slash() {
let app = Router::new().route("/foo/", get(|| async {}));
let client = TestClient::new(app);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
}
#[crate::test]
async fn with_and_without_trailing_slash() {
let app = Router::new()
.route("/foo", get(|| async { "without tsr" }))
.route("/foo/", get(|| async { "with tsr" }));
let client = TestClient::new(app);
let res = client.get("/foo/").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "with tsr");
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "without tsr");
}
// for https://github.com/tokio-rs/axum/issues/420
#[crate::test]
async fn wildcard_doesnt_match_just_trailing_slash() {
let app = Router::new().route(
"/x/{*path}",
get(|Path(path): Path<String>| async move { path }),
);
let client = TestClient::new(app);
let res = client.get("/x").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/x/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let res = client.get("/x/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "foo/bar");
}
#[crate::test]
async fn what_matches_wildcard() {
let app = Router::new()
.route("/{*key}", get(|| async { "root" }))
.route("/x/{*key}", get(|| async { "x" }))
.fallback(|| async { "fallback" });
let client = TestClient::new(app);
let get = |path| {
let f = client.get(path);
async move { f.await.text().await }
};
assert_eq!(get("/").await, "fallback");
assert_eq!(get("/a").await, "root");
assert_eq!(get("/a/").await, "root");
assert_eq!(get("/a/b").await, "root");
assert_eq!(get("/a/b/").await, "root");
assert_eq!(get("/x").await, "root");
assert_eq!(get("/x/").await, "root");
assert_eq!(get("/x/a").await, "x");
assert_eq!(get("/x/a/").await, "x");
assert_eq!(get("/x/a/b").await, "x");
assert_eq!(get("/x/a/b/").await, "x");
}
#[should_panic(
expected = "Invalid route \"/{*wild}\": Insertion failed due to conflict with previously registered route: /{*__private__axum_fallback}"
)]
#[test]
fn colliding_fallback_with_wildcard() {
_ = Router::<()>::new()
.fallback(|| async { "fallback" })
.route("/{*wild}", get(|| async { "wildcard" }));
}
// We might want to reject this too
#[crate::test]
async fn colliding_wildcard_with_fallback() {
let router = Router::new()
.route("/{*wild}", get(|| async { "wildcard" }))
.fallback(|| async { "fallback" });
let client = TestClient::new(router);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "fallback");
let res = client.get("/x").await;
let body = res.text().await;
assert_eq!(body, "wildcard");
}
// We might want to reject this too
#[crate::test]
async fn colliding_fallback_with_fallback() {
let router = Router::new()
.fallback(|| async { "fallback1" })
.fallback(|| async { "fallback2" });
let client = TestClient::new(router);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "fallback1");
let res = client.get("/x").await;
let body = res.text().await;
assert_eq!(body, "fallback1");
}
#[crate::test]
async fn colliding_root_with_fallback() {
let router = Router::new()
.route("/", get(|| async { "root" }))
.fallback(|| async { "fallback" });
let client = TestClient::new(router);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "root");
let res = client.get("/x").await;
let body = res.text().await;
assert_eq!(body, "fallback");
}
#[crate::test]
async fn colliding_fallback_with_root() {
let router = Router::new()
.fallback(|| async { "fallback" })
.route("/", get(|| async { "root" }));
let client = TestClient::new(router);
// This works because fallback registers `any` so the `get` gets merged into it.
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "root");
let res = client.get("/x").await;
let body = res.text().await;
assert_eq!(body, "fallback");
}
#[crate::test]
async fn static_and_dynamic_paths() {
let app = Router::new()
.route(
"/{key}",
get(|Path(key): Path<String>| async move { format!("dynamic: {key}") }),
)
.route("/foo", get(|| async { "static" }));
let client = TestClient::new(app);
let res = client.get("/bar").await;
assert_eq!(res.text().await, "dynamic: bar");
let res = client.get("/foo").await;
assert_eq!(res.text().await, "static");
}
#[crate::test]
#[should_panic(expected = "Paths must start with a `/`. Use \"/\" for root routes")]
async fn empty_route() {
let app = Router::new().route("", get(|| async {}));
TestClient::new(app);
}
#[crate::test]
async fn middleware_still_run_for_unmatched_requests() {
#[derive(Clone)]
struct CountMiddleware<S>(S);
static COUNT: AtomicUsize = AtomicUsize::new(0);
impl<R, S> Service<R> for CountMiddleware<S>
where
S: Service<R>,
{
type Response = S::Response;
type Error = S::Error;
type Future = S::Future;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.0.poll_ready(cx)
}
fn call(&mut self, req: R) -> Self::Future {
COUNT.fetch_add(1, Ordering::SeqCst);
self.0.call(req)
}
}
let app = Router::new()
.route("/", get(|| async {}))
.layer(tower::layer::layer_fn(CountMiddleware));
let client = TestClient::new(app);
assert_eq!(COUNT.load(Ordering::SeqCst), 0);
client.get("/").await;
assert_eq!(COUNT.load(Ordering::SeqCst), 1);
client.get("/not-found").await;
assert_eq!(COUNT.load(Ordering::SeqCst), 2);
}
#[crate::test]
#[should_panic(expected = "\
Invalid route: `Router::route_service` cannot be used with `Router`s. \
Use `Router::nest` instead\
")]
async fn routing_to_router_panics() {
TestClient::new(Router::new().route_service("/", Router::new()));
}
#[crate::test]
async fn route_layer() {
let app = Router::new()
.route("/foo", get(|| async {}))
.route_layer(ValidateRequestHeaderLayer::bearer("password"));
let client = TestClient::new(app);
let res = client
.get("/foo")
.header("authorization", "Bearer password")
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.get("/foo").await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
let res = client.get("/not-found").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
// it would be nice if this would return `405 Method Not Allowed`
// but that requires knowing more about which method route we're calling, which we
// don't know currently since it's just a generic `Service`
let res = client.post("/foo").await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
}
#[crate::test]
async fn different_methods_added_in_different_routes() {
let app = Router::new()
.route("/", get(|| async { "GET" }))
.route("/", post(|| async { "POST" }));
let client = TestClient::new(app);
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "GET");
let res = client.post("/").await;
let body = res.text().await;
assert_eq!(body, "POST");
}
#[crate::test]
#[should_panic(expected = "Cannot merge two `Router`s that both have a fallback")]
async fn merging_routers_with_fallbacks_panics() {
async fn fallback() {}
let one = Router::new().fallback(fallback);
let two = Router::new().fallback(fallback);
TestClient::new(one.merge(two));
}
#[test]
#[should_panic(expected = "Overlapping method route. Handler for `GET /foo/bar` already exists")]
fn routes_with_overlapping_method_routes() {
async fn handler() {}
let _: Router = Router::new()
.route("/foo/bar", get(handler))
.route("/foo/bar", get(handler));
}
#[test]
#[should_panic(expected = "Overlapping method route. Handler for `GET /foo/bar` already exists")]
fn merging_with_overlapping_method_routes() {
async fn handler() {}
let app: Router = Router::new().route("/foo/bar", get(handler));
_ = app.clone().merge(app);
}
#[crate::test]
async fn merging_routers_with_same_paths_but_different_methods() {
let one = Router::new().route("/", get(|| async { "GET" }));
let two = Router::new().route("/", post(|| async { "POST" }));
let client = TestClient::new(one.merge(two));
let res = client.get("/").await;
let body = res.text().await;
assert_eq!(body, "GET");
let res = client.post("/").await;
let body = res.text().await;
assert_eq!(body, "POST");
}
#[crate::test]
async fn head_content_length_through_hyper_server() {
let app = Router::new()
.route("/", get(|| async { "foo" }))
.route("/json", get(|| async { Json(json!({ "foo": 1 })) }));
let client = TestClient::new(app);
let res = client.head("/").await;
assert_eq!(res.headers()["content-length"], "3");
assert!(res.text().await.is_empty());
let res = client.head("/json").await;
assert_eq!(res.headers()["content-length"], "9");
assert!(res.text().await.is_empty());
}
#[crate::test]
async fn head_content_length_through_hyper_server_that_hits_fallback() {
let app = Router::new().fallback(|| async { "foo" });
let client = TestClient::new(app);
let res = client.head("/").await;
assert_eq!(res.headers()["content-length"], "3");
}
#[crate::test]
async fn head_with_middleware_applied() {
use tower_http::compression::{predicate::SizeAbove, CompressionLayer};
let app = Router::new()
.nest(
"/foo",
Router::new().route("/", get(|| async { "Hello, World!" })),
)
.layer(CompressionLayer::new().compress_when(SizeAbove::new(0)));
let client = TestClient::new(app);
// send GET request
let res = client.get("/foo").header("accept-encoding", "gzip").await;
assert_eq!(res.headers()["transfer-encoding"], "chunked");
// cannot have `transfer-encoding: chunked` and `content-length`
assert!(!res.headers().contains_key("content-length"));
// send HEAD request
let res = client.head("/foo").header("accept-encoding", "gzip").await;
// no response body so no `transfer-encoding`
assert!(!res.headers().contains_key("transfer-encoding"));
// no content-length since we cannot know it since the response
// is compressed
assert!(!res.headers().contains_key("content-length"));
}
#[crate::test]
#[should_panic(expected = "Paths must start with a `/`")]
async fn routes_must_start_with_slash() {
let app = Router::new().route(":foo", get(|| async {}));
TestClient::new(app);
}
#[crate::test]
async fn body_limited_by_default() {
let app = Router::new()
.route("/bytes", post(|_: Bytes| async {}))
.route("/string", post(|_: String| async {}))
.route("/json", post(|_: Json<serde_json::Value>| async {}));
let client = TestClient::new(app);
for uri in ["/bytes", "/string", "/json"] {
println!("calling {uri}");
let stream = futures_util::stream::repeat("a".repeat(1000)).map(Ok::<_, hyper::Error>);
let body = reqwest::Body::wrap_stream(stream);
let res_future = client
.post(uri)
.header("content-type", "application/json")
.body(body)
.into_future();
let res = tokio::time::timeout(Duration::from_secs(3), res_future)
.await
.expect("never got response");
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
}
#[crate::test]
async fn disabling_the_default_limit() {
let app = Router::new()
.route("/", post(|_: Bytes| async {}))
.layer(DefaultBodyLimit::disable());
let client = TestClient::new(app);
// `DEFAULT_LIMIT` is 2mb so make a body larger than that
let body = reqwest::Body::from("a".repeat(3_000_000));
let res = client.post("/").body(body).await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn limited_body_with_content_length() {
const LIMIT: usize = 3;
let app = Router::new()
.route(
"/",
post(|headers: HeaderMap, _body: Bytes| async move {
assert!(headers.get(CONTENT_LENGTH).is_some());
}),
)
.layer(RequestBodyLimitLayer::new(LIMIT));
let client = TestClient::new(app);
let res = client.post("/").body("a".repeat(LIMIT)).await;
assert_eq!(res.status(), StatusCode::OK);
let res = client.post("/").body("a".repeat(LIMIT * 2)).await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[crate::test]
async fn changing_the_default_limit() {
let new_limit = 2;
let app = Router::new()
.route("/", post(|_: Bytes| async {}))
.layer(DefaultBodyLimit::max(new_limit));
let client = TestClient::new(app);
let res = client
.post("/")
.body(reqwest::Body::from("a".repeat(new_limit)))
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client
.post("/")
.body(reqwest::Body::from("a".repeat(new_limit + 1)))
.await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[crate::test]
async fn changing_the_default_limit_differently_on_different_routes() {
let limit1 = 2;
let limit2 = 10;
let app = Router::new()
.route(
"/limit1",
post(|_: Bytes| async {}).layer(DefaultBodyLimit::max(limit1)),
)
.route(
"/limit2",
post(|_: Bytes| async {}).layer(DefaultBodyLimit::max(limit2)),
)
.route("/default", post(|_: Bytes| async {}));
let client = TestClient::new(app);
let res = client
.post("/limit1")
.body(reqwest::Body::from("a".repeat(limit1)))
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client
.post("/limit1")
.body(reqwest::Body::from("a".repeat(limit2)))
.await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
let res = client
.post("/limit2")
.body(reqwest::Body::from("a".repeat(limit1)))
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client
.post("/limit2")
.body(reqwest::Body::from("a".repeat(limit2)))
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client
.post("/limit2")
.body(reqwest::Body::from("a".repeat(limit1 + limit2)))
.await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
let res = client
.post("/default")
.body(reqwest::Body::from("a".repeat(limit1 + limit2)))
.await;
assert_eq!(res.status(), StatusCode::OK);
let res = client
.post("/default")
// `DEFAULT_LIMIT` is 2mb so make a body larger than that
.body(reqwest::Body::from("a".repeat(3_000_000)))
.await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[crate::test]
async fn limited_body_with_streaming_body() {
const LIMIT: usize = 3;
let app = Router::new()
.route(
"/",
post(|headers: HeaderMap, _body: Bytes| async move {
assert!(headers.get(CONTENT_LENGTH).is_none());
}),
)
.layer(RequestBodyLimitLayer::new(LIMIT));
let client = TestClient::new(app);
let stream = futures_util::stream::iter(vec![Ok::<_, hyper::Error>("a".repeat(LIMIT))]);
let res = client
.post("/")
.body(reqwest::Body::wrap_stream(stream))
.await;
assert_eq!(res.status(), StatusCode::OK);
let stream = futures_util::stream::iter(vec![Ok::<_, hyper::Error>("a".repeat(LIMIT * 2))]);
let res = client
.post("/")
.body(reqwest::Body::wrap_stream(stream))
.await;
assert_eq!(res.status(), StatusCode::PAYLOAD_TOO_LARGE);
}
#[crate::test]
async fn extract_state() {
#[derive(Clone)]
struct AppState {
value: i32,
inner: InnerState,
}
#[derive(Clone)]
struct InnerState {
value: i32,
}
impl FromRef<AppState> for InnerState {
fn from_ref(state: &AppState) -> Self {
state.inner.clone()
}
}
async fn handler(State(outer): State<AppState>, State(inner): State<InnerState>) {
assert_eq!(outer.value, 1);
assert_eq!(inner.value, 2);
}
let state = AppState {
value: 1,
inner: InnerState { value: 2 },
};
let app = Router::new().route("/", get(handler)).with_state(state);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn explicitly_set_state() {
let app = Router::new()
.route_service(
"/",
get(|State(state): State<&'static str>| async move { state }).with_state("foo"),
)
.with_state("...");
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.text().await, "foo");
}
#[crate::test]
async fn layer_response_into_response() {
fn map_response<B>(_res: Response<B>) -> Result<Response<B>, impl IntoResponse> {
let headers = [("x-foo", "bar")];
let status = StatusCode::IM_A_TEAPOT;
Err((headers, status))
}
let app = Router::new()
.route("/", get(|| async {}))
.layer(MapResponseLayer::new(map_response));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.headers()["x-foo"], "bar");
assert_eq!(res.status(), StatusCode::IM_A_TEAPOT);
}
#[allow(dead_code)]
fn method_router_fallback_with_state() {
async fn fallback(_: State<&'static str>) {}
async fn not_found(_: State<&'static str>) {}
let state = "foo";
let _: Router = Router::new()
.fallback(get(fallback).fallback(not_found))
.with_state(state);
}
#[test]
fn test_path_for_nested_route() {
assert_eq!(path_for_nested_route("/", "/"), "/");
assert_eq!(path_for_nested_route("/a", "/"), "/a");
assert_eq!(path_for_nested_route("/", "/b"), "/b");
assert_eq!(path_for_nested_route("/a/", "/"), "/a/");
assert_eq!(path_for_nested_route("/", "/b/"), "/b/");
assert_eq!(path_for_nested_route("/a", "/b"), "/a/b");
assert_eq!(path_for_nested_route("/a/", "/b"), "/a/b");
assert_eq!(path_for_nested_route("/a", "/b/"), "/a/b/");
assert_eq!(path_for_nested_route("/a/", "/b/"), "/a/b/");
}
#[crate::test]
async fn state_isnt_cloned_too_much() {
let state = CountingCloneableState::new();
let app = Router::new()
.route("/", get(|_: State<CountingCloneableState>| async {}))
.with_state(state.clone());
let client = TestClient::new(app);
// ignore clones made during setup
state.setup_done();
client.get("/").await;
assert_eq!(state.count(), 3);
}
#[crate::test]
async fn state_isnt_cloned_too_much_in_layer() {
async fn layer(State(_): State<CountingCloneableState>, req: Request, next: Next) -> Response {
next.run(req).await
}
let state = CountingCloneableState::new();
let app = Router::new().layer(middleware::from_fn_with_state(state.clone(), layer));
let client = TestClient::new(app);
// ignore clones made during setup
state.setup_done();
client.get("/").await;
assert_eq!(state.count(), 3);
}
#[crate::test]
async fn logging_rejections() {
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(deny_unknown_fields)]
struct RejectionEvent {
message: String,
status: u16,
body: String,
rejection_type: String,
}
let events = capture_tracing::<RejectionEvent, _>(|| async {
let app = Router::new()
.route("/extension", get(|_: Extension<Infallible>| async {}))
.route("/string", post(|_: String| async {}));
let client = TestClient::new(app);
assert_eq!(
client.get("/extension").await.status(),
StatusCode::INTERNAL_SERVER_ERROR
);
assert_eq!(
client
.post("/string")
.body(Vec::from([0, 159, 146, 150]))
.await
.status(),
StatusCode::BAD_REQUEST,
);
})
.with_filter("axum::rejection=trace")
.await;
assert_eq!(
events,
Vec::from([
TracingEvent {
fields: RejectionEvent {
message: "rejecting request".to_owned(),
status: 500,
body: "Missing request extension: Extension of \
type `core::convert::Infallible` was not found. \
Perhaps you forgot to add it? See `axum::Extension`."
.to_owned(),
rejection_type: "axum::extract::rejection::MissingExtension".to_owned(),
},
target: "axum::rejection".to_owned(),
level: "TRACE".to_owned(),
},
TracingEvent {
fields: RejectionEvent {
message: "rejecting request".to_owned(),
status: 400,
body: "Request body didn't contain valid UTF-8: \
invalid utf-8 sequence of 1 bytes from index 1"
.to_owned(),
rejection_type: "axum_core::extract::rejection::InvalidUtf8".to_owned(),
},
target: "axum::rejection".to_owned(),
level: "TRACE".to_owned(),
},
])
)
}
// https://github.com/tokio-rs/axum/issues/1955
#[crate::test]
async fn connect_going_to_custom_fallback() {
let app = Router::new().fallback(|| async { (StatusCode::NOT_FOUND, "custom fallback") });
let req = Request::builder()
.uri("example.com:443")
.method(Method::CONNECT)
.header(HOST, "example.com:443")
.body(Body::empty())
.unwrap();
let res = app.oneshot(req).await.unwrap();
assert_eq!(res.status(), StatusCode::NOT_FOUND);
let text = String::from_utf8(res.collect().await.unwrap().to_bytes().to_vec()).unwrap();
assert_eq!(text, "custom fallback");
}
// https://github.com/tokio-rs/axum/issues/1955
#[crate::test]
async fn connect_going_to_default_fallback() {
let app = Router::new();
let req = Request::builder()
.uri("example.com:443")
.method(Method::CONNECT)
.header(HOST, "example.com:443")
.body(Body::empty())
.unwrap();
let res = app.oneshot(req).await.unwrap();
assert_eq!(res.status(), StatusCode::NOT_FOUND);
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | true |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/get_to_head.rs | axum/src/routing/tests/get_to_head.rs | use super::*;
use http::Method;
use tower::ServiceExt;
mod for_handlers {
use super::*;
#[crate::test]
async fn get_handles_head() {
let app = Router::new().route(
"/",
get(|| async {
let mut headers = HeaderMap::new();
headers.insert("x-some-header", "foobar".parse().unwrap());
(headers, "you shouldn't see this")
}),
);
// don't use reqwest because it always strips bodies from HEAD responses
let res = app
.oneshot(
Request::builder()
.uri("/")
.method(Method::HEAD)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.headers()["x-some-header"], "foobar");
let body = BodyExt::collect(res.into_body()).await.unwrap().to_bytes();
assert_eq!(body.len(), 0);
}
}
mod for_services {
use super::*;
#[crate::test]
async fn get_handles_head() {
let app = Router::new().route(
"/",
get_service(service_fn(|_req: Request| async move {
Ok::<_, Infallible>(
([("x-some-header", "foobar")], "you shouldn't see this").into_response(),
)
})),
);
// don't use reqwest because it always strips bodies from HEAD responses
let res = app
.oneshot(
Request::builder()
.uri("/")
.method(Method::HEAD)
.body(Body::empty())
.unwrap(),
)
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.headers()["x-some-header"], "foobar");
let body = BodyExt::collect(res.into_body()).await.unwrap().to_bytes();
assert_eq!(body.len(), 0);
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/routing/tests/fallback.rs | axum/src/routing/tests/fallback.rs | use super::*;
use crate::middleware::{map_request, map_response};
#[crate::test]
async fn basic() {
let app = Router::new()
.route("/foo", get(|| async {}))
.fallback(|| async { "fallback" });
let client = TestClient::new(app);
assert_eq!(client.get("/foo").await.status(), StatusCode::OK);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "fallback");
}
#[crate::test]
async fn nest() {
let app = Router::new()
.nest("/foo", Router::new().route("/bar", get(|| async {})))
.fallback(|| async { "fallback" });
let client = TestClient::new(app);
assert_eq!(client.get("/foo/bar").await.status(), StatusCode::OK);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "fallback");
}
#[crate::test]
async fn two() {
let app = Router::new()
.route("/first", get(|| async {}))
.route("/second", get(|| async {}))
.fallback(get(|| async { "fallback" }));
let client = TestClient::new(app);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "fallback");
}
#[crate::test]
async fn or() {
let one = Router::new().route("/one", get(|| async {}));
let two = Router::new().route("/two", get(|| async {}));
let app = one.merge(two).fallback(|| async { "fallback" });
let client = TestClient::new(app);
assert_eq!(client.get("/one").await.status(), StatusCode::OK);
assert_eq!(client.get("/two").await.status(), StatusCode::OK);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "fallback");
}
#[crate::test]
async fn fallback_accessing_state() {
let app = Router::new()
.fallback(|State(state): State<&'static str>| async move { state })
.with_state("state");
let client = TestClient::new(app);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::OK);
assert_eq!(res.text().await, "state");
}
async fn inner_fallback() -> impl IntoResponse {
(StatusCode::NOT_FOUND, "inner")
}
async fn outer_fallback() -> impl IntoResponse {
(StatusCode::NOT_FOUND, "outer")
}
#[crate::test]
async fn nested_router_inherits_fallback() {
let inner = Router::new();
let app = Router::new().nest("/foo", inner).fallback(outer_fallback);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn doesnt_inherit_fallback_if_overridden() {
let inner = Router::new().fallback(inner_fallback);
let app = Router::new().nest("/foo", inner).fallback(outer_fallback);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn deeply_nested_inherit_from_top() {
let app = Router::new()
.nest("/foo", Router::new().nest("/bar", Router::new()))
.fallback(outer_fallback);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn deeply_nested_inherit_from_middle() {
let app = Router::new().nest(
"/foo",
Router::new()
.nest("/bar", Router::new())
.fallback(outer_fallback),
);
let client = TestClient::new(app);
let res = client.get("/foo/bar/baz").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn with_middleware_on_inner_fallback() {
async fn never_called<B>(_: Request<B>) -> Request<B> {
panic!("should never be called")
}
let inner = Router::new().layer(map_request(never_called));
let app = Router::new().nest("/foo", inner).fallback(outer_fallback);
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn also_inherits_default_layered_fallback() {
async fn set_header<B>(mut res: Response<B>) -> Response<B> {
res.headers_mut()
.insert("x-from-fallback", "1".parse().unwrap());
res
}
let inner = Router::new();
let app = Router::new()
.nest("/foo", inner)
.fallback(outer_fallback)
.layer(map_response(set_header));
let client = TestClient::new(app);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.headers()["x-from-fallback"], "1");
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn nest_fallback_on_inner() {
let app = Router::new()
.nest(
"/foo",
Router::new()
.route("/", get(|| async {}))
.fallback(|| async { (StatusCode::NOT_FOUND, "inner fallback") }),
)
.fallback(|| async { (StatusCode::NOT_FOUND, "outer fallback") });
let client = TestClient::new(app);
let res = client.get("/foo/not-found").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner fallback");
}
// https://github.com/tokio-rs/axum/issues/1931
#[crate::test]
async fn doesnt_panic_if_used_with_nested_router() {
async fn handler() {}
let routes_static =
Router::new().nest_service("/foo", crate::routing::get_service(handler.into_service()));
let routes_all = Router::new().fallback_service(routes_static);
let client = TestClient::new(routes_all);
let res = client.get("/foo/bar").await;
assert_eq!(res.status(), StatusCode::OK);
}
#[crate::test]
async fn issue_2072() {
let nested_routes = Router::new().fallback(inner_fallback);
let app = Router::new()
.nest("/nested", nested_routes)
.merge(Router::new());
let client = TestClient::new(app);
let res = client.get("/nested/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "");
}
#[crate::test]
async fn issue_2072_outer_fallback_before_merge() {
let nested_routes = Router::new().fallback(inner_fallback);
let app = Router::new()
.nest("/nested", nested_routes)
.fallback(outer_fallback)
.merge(Router::new());
let client = TestClient::new(app);
let res = client.get("/nested/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn issue_2072_outer_fallback_after_merge() {
let nested_routes = Router::new().fallback(inner_fallback);
let app = Router::new()
.nest("/nested", nested_routes)
.merge(Router::new())
.fallback(outer_fallback);
let client = TestClient::new(app);
let res = client.get("/nested/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn merge_router_with_fallback_into_nested_router_with_fallback() {
let nested_routes = Router::new().fallback(inner_fallback);
let app = Router::new()
.nest("/nested", nested_routes)
.merge(Router::new().fallback(outer_fallback));
let client = TestClient::new(app);
let res = client.get("/nested/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn merging_nested_router_with_fallback_into_router_with_fallback() {
let nested_routes = Router::new().fallback(inner_fallback);
let app = Router::new()
.fallback(outer_fallback)
.merge(Router::new().nest("/nested", nested_routes));
let client = TestClient::new(app);
let res = client.get("/nested/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "inner");
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn merge_empty_into_router_with_fallback() {
let app = Router::new().fallback(outer_fallback).merge(Router::new());
let client = TestClient::new(app);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn merge_router_with_fallback_into_empty() {
let app = Router::new().merge(Router::new().fallback(outer_fallback));
let client = TestClient::new(app);
let res = client.get("/does-not-exist").await;
assert_eq!(res.status(), StatusCode::NOT_FOUND);
assert_eq!(res.text().await, "outer");
}
#[crate::test]
async fn mna_fallback_not_405() {
let app = Router::new()
.route("/path", get(|| async { "path" }))
.method_not_allowed_fallback(|| async { (http::StatusCode::NOT_FOUND, "Not Found") });
let client = TestClient::new(app);
let method_not_allowed_fallback = client.post("/path").await;
assert_eq!(
method_not_allowed_fallback.status(),
http::StatusCode::NOT_FOUND
);
assert_eq!(method_not_allowed_fallback.headers().get(ALLOW), None);
assert_eq!(method_not_allowed_fallback.text().await, "Not Found");
}
#[crate::test]
async fn mna_fallback_with_existing_fallback() {
let app = Router::new()
.route(
"/",
get(|| async { "test" }).fallback(|| async { "index fallback" }),
)
.route("/path", get(|| async { "path" }))
.method_not_allowed_fallback(|| async { "method not allowed fallback" });
let client = TestClient::new(app);
let index_fallback = client.post("/").await;
let method_not_allowed_fallback = client.post("/path").await;
assert_eq!(index_fallback.text().await, "index fallback");
assert_eq!(
method_not_allowed_fallback.text().await,
"method not allowed fallback"
);
}
#[crate::test]
async fn mna_fallback_with_state() {
let app = Router::new()
.route("/", get(|| async { "index" }))
.method_not_allowed_fallback(|State(state): State<&'static str>| async move { state })
.with_state("state");
let client = TestClient::new(app);
let res = client.post("/").await;
assert_eq!(res.text().await, "state");
}
#[crate::test]
async fn mna_fallback_with_unused_state() {
let app = Router::new()
.route("/", get(|| async { "index" }))
.with_state(())
.method_not_allowed_fallback(|| async move { "bla" });
let client = TestClient::new(app);
let res = client.post("/").await;
assert_eq!(res.text().await, "bla");
}
#[crate::test]
async fn state_isnt_cloned_too_much_with_fallback() {
let state = CountingCloneableState::new();
let app = Router::new()
.fallback(|_: State<CountingCloneableState>| async {})
.with_state(state.clone());
let client = TestClient::new(app);
// ignore clones made during setup
state.setup_done();
client.get("/does-not-exist").await;
assert_eq!(state.count(), 3);
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/from_extractor.rs | axum/src/middleware/from_extractor.rs | use crate::{
extract::FromRequestParts,
response::{IntoResponse, Response},
};
use futures_core::future::BoxFuture;
use http::Request;
use pin_project_lite::pin_project;
use std::{
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{ready, Context, Poll},
};
use tower_layer::Layer;
use tower_service::Service;
/// Create a middleware from an extractor.
///
/// If the extractor succeeds the value will be discarded and the inner service
/// will be called. If the extractor fails the rejection will be returned and
/// the inner service will _not_ be called.
///
/// This can be used to perform validation of requests if the validation doesn't
/// produce any useful output, and run the extractor for several handlers
/// without repeating it in the function signature.
///
/// Note that if the extractor consumes the request body, as `String` or
/// [`Bytes`] does, an empty body will be left in its place. Thus won't be
/// accessible to subsequent extractors or handlers.
///
/// # Example
///
/// ```rust
/// use axum::{
/// extract::FromRequestParts,
/// middleware::from_extractor,
/// routing::{get, post},
/// Router,
/// http::{header, StatusCode, request::Parts},
/// };
///
/// // An extractor that performs authorization.
/// struct RequireAuth;
///
/// impl<S> FromRequestParts<S> for RequireAuth
/// where
/// S: Send + Sync,
/// {
/// type Rejection = StatusCode;
///
/// async fn from_request_parts(parts: &mut Parts, state: &S) -> Result<Self, Self::Rejection> {
/// let auth_header = parts
/// .headers
/// .get(header::AUTHORIZATION)
/// .and_then(|value| value.to_str().ok());
///
/// match auth_header {
/// Some(auth_header) if token_is_valid(auth_header) => {
/// Ok(Self)
/// }
/// _ => Err(StatusCode::UNAUTHORIZED),
/// }
/// }
/// }
///
/// fn token_is_valid(token: &str) -> bool {
/// // ...
/// # false
/// }
///
/// async fn handler() {
/// // If we get here the request has been authorized
/// }
///
/// async fn other_handler() {
/// // If we get here the request has been authorized
/// }
///
/// let app = Router::new()
/// .route("/", get(handler))
/// .route("/foo", post(other_handler))
/// // The extractor will run before all routes
/// .route_layer(from_extractor::<RequireAuth>());
/// # let _: Router = app;
/// ```
///
/// [`Bytes`]: bytes::Bytes
pub fn from_extractor<E>() -> FromExtractorLayer<E, ()> {
from_extractor_with_state(())
}
/// Create a middleware from an extractor with the given state.
///
/// See [`State`](crate::extract::State) for more details about accessing state.
pub fn from_extractor_with_state<E, S>(state: S) -> FromExtractorLayer<E, S> {
FromExtractorLayer {
state,
_marker: PhantomData,
}
}
/// [`Layer`] that applies [`FromExtractor`] that runs an extractor and
/// discards the value.
///
/// See [`from_extractor`] for more details.
///
/// [`Layer`]: tower::Layer
#[must_use]
pub struct FromExtractorLayer<E, S> {
state: S,
_marker: PhantomData<fn() -> E>,
}
impl<E, S> Clone for FromExtractorLayer<E, S>
where
S: Clone,
{
fn clone(&self) -> Self {
Self {
state: self.state.clone(),
_marker: PhantomData,
}
}
}
impl<E, S> fmt::Debug for FromExtractorLayer<E, S>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FromExtractorLayer")
.field("state", &self.state)
.field("extractor", &format_args!("{}", std::any::type_name::<E>()))
.finish()
}
}
impl<E, T, S> Layer<T> for FromExtractorLayer<E, S>
where
S: Clone,
{
type Service = FromExtractor<T, E, S>;
fn layer(&self, inner: T) -> Self::Service {
FromExtractor {
inner,
state: self.state.clone(),
_extractor: PhantomData,
}
}
}
/// Middleware that runs an extractor and discards the value.
///
/// See [`from_extractor`] for more details.
pub struct FromExtractor<T, E, S> {
inner: T,
state: S,
_extractor: PhantomData<fn() -> E>,
}
#[test]
fn traits() {
use crate::test_helpers::*;
assert_send::<FromExtractor<(), NotSendSync, ()>>();
assert_sync::<FromExtractor<(), NotSendSync, ()>>();
}
impl<T, E, S> Clone for FromExtractor<T, E, S>
where
T: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
inner: self.inner.clone(),
state: self.state.clone(),
_extractor: PhantomData,
}
}
}
impl<T, E, S> fmt::Debug for FromExtractor<T, E, S>
where
T: fmt::Debug,
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FromExtractor")
.field("inner", &self.inner)
.field("state", &self.state)
.field("extractor", &format_args!("{}", std::any::type_name::<E>()))
.finish()
}
}
impl<T, E, B, S> Service<Request<B>> for FromExtractor<T, E, S>
where
E: FromRequestParts<S> + 'static,
B: Send + 'static,
T: Service<Request<B>> + Clone,
T::Response: IntoResponse,
S: Clone + Send + Sync + 'static,
{
type Response = Response;
type Error = T::Error;
type Future = ResponseFuture<B, T, E, S>;
#[inline]
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request<B>) -> Self::Future {
let state = self.state.clone();
let (mut parts, body) = req.into_parts();
let extract_future = Box::pin(async move {
let extracted = E::from_request_parts(&mut parts, &state).await;
let req = Request::from_parts(parts, body);
(req, extracted)
});
ResponseFuture {
state: State::Extracting {
future: extract_future,
},
svc: Some(self.inner.clone()),
}
}
}
pin_project! {
/// Response future for [`FromExtractor`].
#[allow(missing_debug_implementations)]
pub struct ResponseFuture<B, T, E, S>
where
E: FromRequestParts<S>,
T: Service<Request<B>>,
{
#[pin]
state: State<B, T, E, S>,
svc: Option<T>,
}
}
pin_project! {
#[project = StateProj]
enum State<B, T, E, S>
where
E: FromRequestParts<S>,
T: Service<Request<B>>,
{
Extracting {
future: BoxFuture<'static, (Request<B>, Result<E, E::Rejection>)>,
},
Call { #[pin] future: T::Future },
}
}
impl<B, T, E, S> Future for ResponseFuture<B, T, E, S>
where
E: FromRequestParts<S>,
T: Service<Request<B>>,
T::Response: IntoResponse,
{
type Output = Result<Response, T::Error>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
loop {
let mut this = self.as_mut().project();
let new_state = match this.state.as_mut().project() {
StateProj::Extracting { future } => {
let (req, extracted) = ready!(future.as_mut().poll(cx));
match extracted {
Ok(_) => {
let mut svc = this.svc.take().expect("future polled after completion");
let future = svc.call(req);
State::Call { future }
}
Err(err) => {
let res = err.into_response();
return Poll::Ready(Ok(res));
}
}
}
StateProj::Call { future } => {
return future
.poll(cx)
.map(|result| result.map(IntoResponse::into_response));
}
};
this.state.set(new_state);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{handler::Handler, routing::get, test_helpers::*, Router};
use axum_core::extract::FromRef;
use http::{header, request::Parts, StatusCode};
use tower_http::limit::RequestBodyLimitLayer;
#[crate::test]
async fn test_from_extractor() {
#[derive(Clone)]
struct Secret(&'static str);
struct RequireAuth;
impl<S> FromRequestParts<S> for RequireAuth
where
S: Send + Sync,
Secret: FromRef<S>,
{
type Rejection = StatusCode;
async fn from_request_parts(
parts: &mut Parts,
state: &S,
) -> Result<Self, Self::Rejection> {
let Secret(secret) = Secret::from_ref(state);
if let Some(auth) = parts
.headers
.get(header::AUTHORIZATION)
.and_then(|v| v.to_str().ok())
{
if auth == secret {
return Ok(Self);
}
}
Err(StatusCode::UNAUTHORIZED)
}
}
async fn handler() {}
let state = Secret("secret");
let app = Router::new().route(
"/",
get(handler.layer(from_extractor_with_state::<RequireAuth, _>(state))),
);
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::UNAUTHORIZED);
let res = client
.get("/")
.header(http::header::AUTHORIZATION, "secret")
.await;
assert_eq!(res.status(), StatusCode::OK);
}
// just needs to compile
#[allow(dead_code)]
fn works_with_request_body_limit() {
struct MyExtractor;
impl<S> FromRequestParts<S> for MyExtractor
where
S: Send + Sync,
{
type Rejection = std::convert::Infallible;
async fn from_request_parts(
_parts: &mut Parts,
_state: &S,
) -> Result<Self, Self::Rejection> {
unimplemented!()
}
}
let _: Router = Router::new()
.layer(from_extractor::<MyExtractor>())
.layer(RequestBodyLimitLayer::new(1));
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/from_fn.rs | axum/src/middleware/from_fn.rs | use axum_core::extract::{FromRequest, FromRequestParts, Request};
use futures_core::future::BoxFuture;
use std::{
any::type_name,
convert::Infallible,
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use tower::util::BoxCloneSyncService;
use tower_layer::Layer;
use tower_service::Service;
use crate::{
response::{IntoResponse, Response},
util::MapIntoResponse,
};
/// Create a middleware from an async function.
///
/// `from_fn` requires the function given to
///
/// 1. Be an `async fn`.
/// 2. Take zero or more [`FromRequestParts`] extractors.
/// 3. Take exactly one [`FromRequest`] extractor as the second to last argument.
/// 4. Take [`Next`](Next) as the last argument.
/// 5. Return something that implements [`IntoResponse`].
///
/// Note that this function doesn't support extracting [`State`]. For that, use [`from_fn_with_state`].
///
/// # Example
///
/// ```rust
/// use axum::{
/// Router,
/// http,
/// routing::get,
/// response::Response,
/// middleware::{self, Next},
/// extract::Request,
/// };
///
/// async fn my_middleware(
/// request: Request,
/// next: Next,
/// ) -> Response {
/// // do something with `request`...
///
/// let response = next.run(request).await;
///
/// // do something with `response`...
///
/// response
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .layer(middleware::from_fn(my_middleware));
/// # let app: Router = app;
/// ```
///
/// # Running extractors
///
/// ```rust
/// use axum::{
/// Router,
/// extract::Request,
/// http::{StatusCode, HeaderMap},
/// middleware::{self, Next},
/// response::Response,
/// routing::get,
/// };
///
/// async fn auth(
/// // run the `HeaderMap` extractor
/// headers: HeaderMap,
/// // you can also add more extractors here but the last
/// // extractor must implement `FromRequest` which
/// // `Request` does
/// request: Request,
/// next: Next,
/// ) -> Result<Response, StatusCode> {
/// match get_token(&headers) {
/// Some(token) if token_is_valid(token) => {
/// let response = next.run(request).await;
/// Ok(response)
/// }
/// _ => {
/// Err(StatusCode::UNAUTHORIZED)
/// }
/// }
/// }
///
/// fn get_token(headers: &HeaderMap) -> Option<&str> {
/// // ...
/// # None
/// }
///
/// fn token_is_valid(token: &str) -> bool {
/// // ...
/// # false
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .route_layer(middleware::from_fn(auth));
/// # let app: Router = app;
/// ```
///
/// [extractors]: crate::extract::FromRequest
/// [`State`]: crate::extract::State
pub fn from_fn<F, T>(f: F) -> FromFnLayer<F, (), T> {
from_fn_with_state((), f)
}
/// Create a middleware from an async function with the given state.
///
/// For the requirements for the function supplied see [`from_fn`].
///
/// See [`State`](crate::extract::State) for more details about accessing state.
///
/// # Example
///
/// ```rust
/// use axum::{
/// Router,
/// http::StatusCode,
/// routing::get,
/// response::{IntoResponse, Response},
/// middleware::{self, Next},
/// extract::{Request, State},
/// };
///
/// #[derive(Clone)]
/// struct AppState { /* ... */ }
///
/// async fn my_middleware(
/// State(state): State<AppState>,
/// // you can add more extractors here but the last
/// // extractor must implement `FromRequest` which
/// // `Request` does
/// request: Request,
/// next: Next,
/// ) -> Response {
/// // do something with `request`...
///
/// let response = next.run(request).await;
///
/// // do something with `response`...
///
/// response
/// }
///
/// let state = AppState { /* ... */ };
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .route_layer(middleware::from_fn_with_state(state.clone(), my_middleware))
/// .with_state(state);
/// # let _: axum::Router = app;
/// ```
pub fn from_fn_with_state<F, S, T>(state: S, f: F) -> FromFnLayer<F, S, T> {
FromFnLayer {
f,
state,
_extractor: PhantomData,
}
}
/// A [`tower::Layer`] from an async function.
///
/// [`tower::Layer`] is used to apply middleware to [`Router`](crate::Router)'s.
///
/// Created with [`from_fn`] or [`from_fn_with_state`]. See those functions for more details.
#[must_use]
pub struct FromFnLayer<F, S, T> {
f: F,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, T> Clone for FromFnLayer<F, S, T>
where
F: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
impl<S, I, F, T> Layer<I> for FromFnLayer<F, S, T>
where
F: Clone,
S: Clone,
{
type Service = FromFn<F, S, I, T>;
fn layer(&self, inner: I) -> Self::Service {
FromFn {
f: self.f.clone(),
state: self.state.clone(),
inner,
_extractor: PhantomData,
}
}
}
impl<F, S, T> fmt::Debug for FromFnLayer<F, S, T>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FromFnLayer")
// Write out the type name, without quoting it as `&type_name::<F>()` would
.field("f", &format_args!("{}", type_name::<F>()))
.field("state", &self.state)
.finish()
}
}
/// A middleware created from an async function.
///
/// Created with [`from_fn`] or [`from_fn_with_state`]. See those functions for more details.
pub struct FromFn<F, S, I, T> {
f: F,
inner: I,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, I, T> Clone for FromFn<F, S, I, T>
where
F: Clone,
I: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
inner: self.inner.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
macro_rules! impl_service {
(
[$($ty:ident),*], $last:ident
) => {
#[allow(non_snake_case, unused_mut)]
impl<F, Fut, Out, S, I, $($ty,)* $last> Service<Request> for FromFn<F, S, I, ($($ty,)* $last,)>
where
F: FnMut($($ty,)* $last, Next) -> Fut + Clone + Send + 'static,
$( $ty: FromRequestParts<S> + Send, )*
$last: FromRequest<S> + Send,
Fut: Future<Output = Out> + Send + 'static,
Out: IntoResponse + 'static,
I: Service<Request, Error = Infallible>
+ Clone
+ Send
+ Sync
+ 'static,
I::Response: IntoResponse,
I::Future: Send + 'static,
S: Clone + Send + Sync + 'static,
{
type Response = Response;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request) -> Self::Future {
let not_ready_inner = self.inner.clone();
let ready_inner = std::mem::replace(&mut self.inner, not_ready_inner);
let mut f = self.f.clone();
let state = self.state.clone();
let (mut parts, body) = req.into_parts();
let future = Box::pin(async move {
$(
let $ty = match $ty::from_request_parts(&mut parts, &state).await {
Ok(value) => value,
Err(rejection) => return rejection.into_response(),
};
)*
let req = Request::from_parts(parts, body);
let $last = match $last::from_request(req, &state).await {
Ok(value) => value,
Err(rejection) => return rejection.into_response(),
};
let inner = BoxCloneSyncService::new(MapIntoResponse::new(ready_inner));
let next = Next { inner };
f($($ty,)* $last, next).await.into_response()
});
ResponseFuture {
inner: future
}
}
}
};
}
all_the_tuples!(impl_service);
impl<F, S, I, T> fmt::Debug for FromFn<F, S, I, T>
where
S: fmt::Debug,
I: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FromFnLayer")
.field("f", &format_args!("{}", type_name::<F>()))
.field("inner", &self.inner)
.field("state", &self.state)
.finish()
}
}
/// The remainder of a middleware stack, including the handler.
#[derive(Debug, Clone)]
pub struct Next {
inner: BoxCloneSyncService<Request, Response, Infallible>,
}
impl Next {
/// Execute the remaining middleware stack.
pub async fn run(mut self, req: Request) -> Response {
match self.inner.call(req).await {
Ok(res) => res,
Err(err) => match err {},
}
}
}
impl Service<Request> for Next {
type Response = Response;
type Error = Infallible;
type Future = Pin<Box<dyn Future<Output = Result<Self::Response, Self::Error>> + Send>>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request) -> Self::Future {
self.inner.call(req)
}
}
/// Response future for [`FromFn`].
pub struct ResponseFuture {
inner: BoxFuture<'static, Response>,
}
impl Future for ResponseFuture {
type Output = Result<Response, Infallible>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.inner.as_mut().poll(cx).map(Ok)
}
}
impl fmt::Debug for ResponseFuture {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ResponseFuture").finish()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{body::Body, routing::get, Router};
use http::{HeaderMap, StatusCode};
use http_body_util::BodyExt;
use tower::ServiceExt;
#[crate::test]
async fn basic() {
async fn insert_header(mut req: Request, next: Next) -> impl IntoResponse {
req.headers_mut()
.insert("x-axum-test", "ok".parse().unwrap());
next.run(req).await
}
async fn handle(headers: HeaderMap) -> String {
headers["x-axum-test"].to_str().unwrap().to_owned()
}
let app = Router::new()
.route("/", get(handle))
.layer(from_fn(insert_header));
let res = app
.oneshot(Request::builder().uri("/").body(Body::empty()).unwrap())
.await
.unwrap();
assert_eq!(res.status(), StatusCode::OK);
let body = res.collect().await.unwrap().to_bytes();
assert_eq!(&body[..], b"ok");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/mod.rs | axum/src/middleware/mod.rs | //! Utilities for writing middleware
//!
#![doc = include_str!("../docs/middleware.md")]
mod from_extractor;
mod from_fn;
mod map_request;
mod map_response;
mod response_axum_body;
pub use self::from_extractor::{
from_extractor, from_extractor_with_state, FromExtractor, FromExtractorLayer,
};
pub use self::from_fn::{from_fn, from_fn_with_state, FromFn, FromFnLayer, Next};
pub use self::map_request::{
map_request, map_request_with_state, IntoMapRequestResult, MapRequest, MapRequestLayer,
};
pub use self::map_response::{
map_response, map_response_with_state, MapResponse, MapResponseLayer,
};
pub use self::response_axum_body::{
ResponseAxumBody, ResponseAxumBodyFuture, ResponseAxumBodyLayer,
};
pub use crate::extension::AddExtension;
pub mod future {
//! Future types.
pub use super::from_extractor::ResponseFuture as FromExtractorResponseFuture;
pub use super::from_fn::ResponseFuture as FromFnResponseFuture;
pub use super::map_request::ResponseFuture as MapRequestResponseFuture;
pub use super::map_response::ResponseFuture as MapResponseResponseFuture;
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/response_axum_body.rs | axum/src/middleware/response_axum_body.rs | use std::{
error::Error,
future::Future,
pin::Pin,
task::{ready, Context, Poll},
};
use axum_core::{body::Body, response::Response};
use bytes::Bytes;
use http_body::Body as HttpBody;
use pin_project_lite::pin_project;
use tower::{Layer, Service};
/// Layer that transforms the Response body to [`crate::body::Body`].
///
/// This is useful when another layer maps the body to some other type to convert it back.
#[derive(Debug, Clone)]
pub struct ResponseAxumBodyLayer;
impl<S> Layer<S> for ResponseAxumBodyLayer {
type Service = ResponseAxumBody<S>;
fn layer(&self, inner: S) -> Self::Service {
ResponseAxumBody::<S>(inner)
}
}
/// Service generated by [`ResponseAxumBodyLayer`].
#[derive(Debug, Clone)]
pub struct ResponseAxumBody<S>(S);
impl<S, Request, ResBody> Service<Request> for ResponseAxumBody<S>
where
S: Service<Request, Response = Response<ResBody>>,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
<ResBody as HttpBody>::Error: Error + Send + Sync,
{
type Response = Response;
type Error = S::Error;
type Future = ResponseAxumBodyFuture<S::Future>;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.0.poll_ready(cx)
}
fn call(&mut self, req: Request) -> Self::Future {
ResponseAxumBodyFuture {
inner: self.0.call(req),
}
}
}
pin_project! {
/// Response future for [`ResponseAxumBody`].
pub struct ResponseAxumBodyFuture<Fut> {
#[pin]
inner: Fut,
}
}
impl<Fut, ResBody, E> Future for ResponseAxumBodyFuture<Fut>
where
Fut: Future<Output = Result<Response<ResBody>, E>>,
ResBody: HttpBody<Data = Bytes> + Send + 'static,
<ResBody as HttpBody>::Error: Error + Send + Sync,
{
type Output = Result<Response<Body>, E>;
fn poll(self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
let this = self.project();
let res = ready!(this.inner.poll(cx)?);
Poll::Ready(Ok(res.map(Body::new)))
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/map_request.rs | axum/src/middleware/map_request.rs | use crate::body::{Body, Bytes, HttpBody};
use crate::response::{IntoResponse, Response};
use crate::BoxError;
use axum_core::extract::{FromRequest, FromRequestParts};
use futures_core::future::BoxFuture;
use http::Request;
use std::{
any::type_name,
convert::Infallible,
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use tower_layer::Layer;
use tower_service::Service;
/// Create a middleware from an async function that transforms a request.
///
/// This differs from [`tower::util::MapRequest`] in that it allows you to easily run axum-specific
/// extractors.
///
/// # Example
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// middleware::map_request,
/// http::Request,
/// };
///
/// async fn set_header<B>(mut request: Request<B>) -> Request<B> {
/// request.headers_mut().insert("x-foo", "foo".parse().unwrap());
/// request
/// }
///
/// async fn handler<B>(request: Request<B>) {
/// // `request` will have an `x-foo` header
/// }
///
/// let app = Router::new()
/// .route("/", get(handler))
/// .layer(map_request(set_header));
/// # let _: Router = app;
/// ```
///
/// # Rejecting the request
///
/// The function given to `map_request` is allowed to also return a `Result` which can be used to
/// reject the request and return a response immediately, without calling the remaining
/// middleware.
///
/// Specifically the valid return types are:
///
/// - `Request<B>`
/// - `Result<Request<B>, E> where E: IntoResponse`
///
/// ```
/// use axum::{
/// Router,
/// http::{Request, StatusCode},
/// routing::get,
/// middleware::map_request,
/// };
///
/// async fn auth<B>(request: Request<B>) -> Result<Request<B>, StatusCode> {
/// let auth_header = request.headers()
/// .get(http::header::AUTHORIZATION)
/// .and_then(|header| header.to_str().ok());
///
/// match auth_header {
/// Some(auth_header) if token_is_valid(auth_header) => Ok(request),
/// _ => Err(StatusCode::UNAUTHORIZED),
/// }
/// }
///
/// fn token_is_valid(token: &str) -> bool {
/// // ...
/// # false
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .route_layer(map_request(auth));
/// # let app: Router = app;
/// ```
///
/// # Running extractors
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// middleware::map_request,
/// extract::Path,
/// http::Request,
/// };
/// use std::collections::HashMap;
///
/// async fn log_path_params<B>(
/// Path(path_params): Path<HashMap<String, String>>,
/// request: Request<B>,
/// ) -> Request<B> {
/// tracing::debug!(?path_params);
/// request
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .layer(map_request(log_path_params));
/// # let _: Router = app;
/// ```
///
/// Note that to access state you must use either [`map_request_with_state`].
pub fn map_request<F, T>(f: F) -> MapRequestLayer<F, (), T> {
map_request_with_state((), f)
}
/// Create a middleware from an async function that transforms a request, with the given state.
///
/// See [`State`](crate::extract::State) for more details about accessing state.
///
/// # Example
///
/// ```rust
/// use axum::{
/// Router,
/// http::{Request, StatusCode},
/// routing::get,
/// response::IntoResponse,
/// middleware::map_request_with_state,
/// extract::State,
/// };
///
/// #[derive(Clone)]
/// struct AppState { /* ... */ }
///
/// async fn my_middleware<B>(
/// State(state): State<AppState>,
/// // you can add more extractors here but the last
/// // extractor must implement `FromRequest` which
/// // `Request` does
/// request: Request<B>,
/// ) -> Request<B> {
/// // do something with `state` and `request`...
/// request
/// }
///
/// let state = AppState { /* ... */ };
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .route_layer(map_request_with_state(state.clone(), my_middleware))
/// .with_state(state);
/// # let _: axum::Router = app;
/// ```
pub fn map_request_with_state<F, S, T>(state: S, f: F) -> MapRequestLayer<F, S, T> {
MapRequestLayer {
f,
state,
_extractor: PhantomData,
}
}
/// A [`tower::Layer`] from an async function that transforms a request.
///
/// Created with [`map_request`]. See that function for more details.
#[must_use]
pub struct MapRequestLayer<F, S, T> {
f: F,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, T> Clone for MapRequestLayer<F, S, T>
where
F: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
impl<S, I, F, T> Layer<I> for MapRequestLayer<F, S, T>
where
F: Clone,
S: Clone,
{
type Service = MapRequest<F, S, I, T>;
fn layer(&self, inner: I) -> Self::Service {
MapRequest {
f: self.f.clone(),
state: self.state.clone(),
inner,
_extractor: PhantomData,
}
}
}
impl<F, S, T> fmt::Debug for MapRequestLayer<F, S, T>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapRequestLayer")
// Write out the type name, without quoting it as `&type_name::<F>()` would
.field("f", &format_args!("{}", type_name::<F>()))
.field("state", &self.state)
.finish()
}
}
/// A middleware created from an async function that transforms a request.
///
/// Created with [`map_request`]. See that function for more details.
pub struct MapRequest<F, S, I, T> {
f: F,
inner: I,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, I, T> Clone for MapRequest<F, S, I, T>
where
F: Clone,
I: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
inner: self.inner.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
macro_rules! impl_service {
(
[$($ty:ident),*], $last:ident
) => {
#[allow(non_snake_case, unused_mut)]
impl<F, Fut, S, I, B, $($ty,)* $last> Service<Request<B>> for MapRequest<F, S, I, ($($ty,)* $last,)>
where
F: FnMut($($ty,)* $last) -> Fut + Clone + Send + 'static,
$( $ty: FromRequestParts<S> + Send, )*
$last: FromRequest<S> + Send,
Fut: Future + Send + 'static,
Fut::Output: IntoMapRequestResult<B> + Send + 'static,
I: Service<Request<B>, Error = Infallible>
+ Clone
+ Send
+ 'static,
I::Response: IntoResponse,
I::Future: Send + 'static,
B: HttpBody<Data = Bytes> + Send + 'static,
B::Error: Into<BoxError>,
S: Clone + Send + Sync + 'static,
{
type Response = Response;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request<B>) -> Self::Future {
let req = req.map(Body::new);
let not_ready_inner = self.inner.clone();
let mut ready_inner = std::mem::replace(&mut self.inner, not_ready_inner);
let mut f = self.f.clone();
let state = self.state.clone();
let (mut parts, body) = req.into_parts();
let future = Box::pin(async move {
$(
let $ty = match $ty::from_request_parts(&mut parts, &state).await {
Ok(value) => value,
Err(rejection) => return rejection.into_response(),
};
)*
let req = Request::from_parts(parts, body);
let $last = match $last::from_request(req, &state).await {
Ok(value) => value,
Err(rejection) => return rejection.into_response(),
};
match f($($ty,)* $last).await.into_map_request_result() {
Ok(req) => {
ready_inner.call(req).await.into_response()
}
Err(res) => {
res
}
}
});
ResponseFuture {
inner: future
}
}
}
};
}
all_the_tuples!(impl_service);
impl<F, S, I, T> fmt::Debug for MapRequest<F, S, I, T>
where
S: fmt::Debug,
I: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapRequest")
.field("f", &format_args!("{}", type_name::<F>()))
.field("inner", &self.inner)
.field("state", &self.state)
.finish()
}
}
/// Response future for [`MapRequest`].
pub struct ResponseFuture {
inner: BoxFuture<'static, Response>,
}
impl Future for ResponseFuture {
type Output = Result<Response, Infallible>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.inner.as_mut().poll(cx).map(Ok)
}
}
impl fmt::Debug for ResponseFuture {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ResponseFuture").finish()
}
}
mod private {
use crate::{http::Request, response::IntoResponse};
pub trait Sealed<B> {}
impl<B, E> Sealed<B> for Result<Request<B>, E> where E: IntoResponse {}
impl<B> Sealed<B> for Request<B> {}
}
/// Trait implemented by types that can be returned from [`map_request`],
/// [`map_request_with_state`].
///
/// This trait is sealed such that it cannot be implemented outside this crate.
pub trait IntoMapRequestResult<B>: private::Sealed<B> {
/// Perform the conversion.
#[allow(clippy::result_large_err)]
fn into_map_request_result(self) -> Result<Request<B>, Response>;
}
impl<B, E> IntoMapRequestResult<B> for Result<Request<B>, E>
where
E: IntoResponse,
{
fn into_map_request_result(self) -> Result<Request<B>, Response> {
self.map_err(IntoResponse::into_response)
}
}
impl<B> IntoMapRequestResult<B> for Request<B> {
fn into_map_request_result(self) -> Result<Self, Response> {
Ok(self)
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::{routing::get, test_helpers::TestClient, Router};
use http::{HeaderMap, StatusCode};
#[crate::test]
async fn works() {
async fn add_header<B>(mut req: Request<B>) -> Request<B> {
req.headers_mut().insert("x-foo", "foo".parse().unwrap());
req
}
async fn handler(headers: HeaderMap) -> Response {
headers["x-foo"]
.to_str()
.unwrap()
.to_owned()
.into_response()
}
let app = Router::new()
.route("/", get(handler))
.layer(map_request(add_header));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.text().await, "foo");
}
#[crate::test]
async fn works_for_short_circutting() {
async fn add_header<B>(_req: Request<B>) -> Result<Request<B>, (StatusCode, &'static str)> {
Err((StatusCode::INTERNAL_SERVER_ERROR, "something went wrong"))
}
async fn handler(_headers: HeaderMap) -> Response {
unreachable!()
}
let app = Router::new()
.route("/", get(handler))
.layer(map_request(add_header));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.status(), StatusCode::INTERNAL_SERVER_ERROR);
assert_eq!(res.text().await, "something went wrong");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/middleware/map_response.rs | axum/src/middleware/map_response.rs | use crate::response::{IntoResponse, Response};
use axum_core::extract::FromRequestParts;
use futures_core::future::BoxFuture;
use http::Request;
use std::{
any::type_name,
convert::Infallible,
fmt,
future::Future,
marker::PhantomData,
pin::Pin,
task::{Context, Poll},
};
use tower_layer::Layer;
use tower_service::Service;
/// Create a middleware from an async function that transforms a response.
///
/// This differs from [`tower::util::MapResponse`] in that it allows you to easily run axum-specific
/// extractors.
///
/// # Example
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// middleware::map_response,
/// response::Response,
/// };
///
/// async fn set_header<B>(mut response: Response<B>) -> Response<B> {
/// response.headers_mut().insert("x-foo", "foo".parse().unwrap());
/// response
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .layer(map_response(set_header));
/// # let _: Router = app;
/// ```
///
/// # Running extractors
///
/// It is also possible to run extractors that implement [`FromRequestParts`]. These will be run
/// before calling the handler.
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// middleware::map_response,
/// extract::Path,
/// response::Response,
/// };
/// use std::collections::HashMap;
///
/// async fn log_path_params<B>(
/// Path(path_params): Path<HashMap<String, String>>,
/// response: Response<B>,
/// ) -> Response<B> {
/// tracing::debug!(?path_params);
/// response
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .layer(map_response(log_path_params));
/// # let _: Router = app;
/// ```
///
/// Note that to access state you must use either [`map_response_with_state`].
///
/// # Returning any `impl IntoResponse`
///
/// It is also possible to return anything that implements [`IntoResponse`]
///
/// ```
/// use axum::{
/// Router,
/// routing::get,
/// middleware::map_response,
/// response::{Response, IntoResponse},
/// };
/// use std::collections::HashMap;
///
/// async fn set_header(response: Response) -> impl IntoResponse {
/// (
/// [("x-foo", "foo")],
/// response,
/// )
/// }
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .layer(map_response(set_header));
/// # let _: Router = app;
/// ```
pub fn map_response<F, T>(f: F) -> MapResponseLayer<F, (), T> {
map_response_with_state((), f)
}
/// Create a middleware from an async function that transforms a response, with the given state.
///
/// See [`State`](crate::extract::State) for more details about accessing state.
///
/// # Example
///
/// ```rust
/// use axum::{
/// Router,
/// http::StatusCode,
/// routing::get,
/// response::Response,
/// middleware::map_response_with_state,
/// extract::State,
/// };
///
/// #[derive(Clone)]
/// struct AppState { /* ... */ }
///
/// async fn my_middleware<B>(
/// State(state): State<AppState>,
/// // you can add more extractors here but they must
/// // all implement `FromRequestParts`
/// // `FromRequest` is not allowed
/// response: Response<B>,
/// ) -> Response<B> {
/// // do something with `state` and `response`...
/// response
/// }
///
/// let state = AppState { /* ... */ };
///
/// let app = Router::new()
/// .route("/", get(|| async { /* ... */ }))
/// .route_layer(map_response_with_state(state.clone(), my_middleware))
/// .with_state(state);
/// # let _: axum::Router = app;
/// ```
pub fn map_response_with_state<F, S, T>(state: S, f: F) -> MapResponseLayer<F, S, T> {
MapResponseLayer {
f,
state,
_extractor: PhantomData,
}
}
/// A [`tower::Layer`] from an async function that transforms a response.
///
/// Created with [`map_response`]. See that function for more details.
#[must_use]
pub struct MapResponseLayer<F, S, T> {
f: F,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, T> Clone for MapResponseLayer<F, S, T>
where
F: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
impl<S, I, F, T> Layer<I> for MapResponseLayer<F, S, T>
where
F: Clone,
S: Clone,
{
type Service = MapResponse<F, S, I, T>;
fn layer(&self, inner: I) -> Self::Service {
MapResponse {
f: self.f.clone(),
state: self.state.clone(),
inner,
_extractor: PhantomData,
}
}
}
impl<F, S, T> fmt::Debug for MapResponseLayer<F, S, T>
where
S: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapResponseLayer")
// Write out the type name, without quoting it as `&type_name::<F>()` would
.field("f", &format_args!("{}", type_name::<F>()))
.field("state", &self.state)
.finish()
}
}
/// A middleware created from an async function that transforms a response.
///
/// Created with [`map_response`]. See that function for more details.
pub struct MapResponse<F, S, I, T> {
f: F,
inner: I,
state: S,
_extractor: PhantomData<fn() -> T>,
}
impl<F, S, I, T> Clone for MapResponse<F, S, I, T>
where
F: Clone,
I: Clone,
S: Clone,
{
fn clone(&self) -> Self {
Self {
f: self.f.clone(),
inner: self.inner.clone(),
state: self.state.clone(),
_extractor: self._extractor,
}
}
}
macro_rules! impl_service {
(
$($ty:ident),*
) => {
#[allow(non_snake_case, unused_mut)]
impl<F, Fut, S, I, B, ResBody, $($ty,)*> Service<Request<B>> for MapResponse<F, S, I, ($($ty,)*)>
where
F: FnMut($($ty,)* Response<ResBody>) -> Fut + Clone + Send + 'static,
$( $ty: FromRequestParts<S> + Send, )*
Fut: Future + Send + 'static,
Fut::Output: IntoResponse + Send + 'static,
I: Service<Request<B>, Response = Response<ResBody>, Error = Infallible>
+ Clone
+ Send
+ 'static,
I::Future: Send + 'static,
B: Send + 'static,
ResBody: Send + 'static,
S: Clone + Send + Sync + 'static,
{
type Response = Response;
type Error = Infallible;
type Future = ResponseFuture;
fn poll_ready(&mut self, cx: &mut Context<'_>) -> Poll<Result<(), Self::Error>> {
self.inner.poll_ready(cx)
}
fn call(&mut self, req: Request<B>) -> Self::Future {
let not_ready_inner = self.inner.clone();
let mut ready_inner = std::mem::replace(&mut self.inner, not_ready_inner);
let mut f = self.f.clone();
let _state = self.state.clone();
let (mut parts, body) = req.into_parts();
let future = Box::pin(async move {
$(
let $ty = match $ty::from_request_parts(&mut parts, &_state).await {
Ok(value) => value,
Err(rejection) => return rejection.into_response(),
};
)*
let req = Request::from_parts(parts, body);
match ready_inner.call(req).await {
Ok(res) => {
f($($ty,)* res).await.into_response()
}
Err(err) => match err {}
}
});
ResponseFuture {
inner: future
}
}
}
};
}
impl_service!();
impl_service!(T1);
impl_service!(T1, T2);
impl_service!(T1, T2, T3);
impl_service!(T1, T2, T3, T4);
impl_service!(T1, T2, T3, T4, T5);
impl_service!(T1, T2, T3, T4, T5, T6);
impl_service!(T1, T2, T3, T4, T5, T6, T7);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15);
impl_service!(T1, T2, T3, T4, T5, T6, T7, T8, T9, T10, T11, T12, T13, T14, T15, T16);
impl<F, S, I, T> fmt::Debug for MapResponse<F, S, I, T>
where
S: fmt::Debug,
I: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MapResponse")
.field("f", &format_args!("{}", type_name::<F>()))
.field("inner", &self.inner)
.field("state", &self.state)
.finish()
}
}
/// Response future for [`MapResponse`].
pub struct ResponseFuture {
inner: BoxFuture<'static, Response>,
}
impl Future for ResponseFuture {
type Output = Result<Response, Infallible>;
fn poll(mut self: Pin<&mut Self>, cx: &mut Context<'_>) -> Poll<Self::Output> {
self.inner.as_mut().poll(cx).map(Ok)
}
}
impl fmt::Debug for ResponseFuture {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("ResponseFuture").finish()
}
}
#[cfg(test)]
mod tests {
#[allow(unused_imports)]
use super::*;
use crate::{test_helpers::TestClient, Router};
#[crate::test]
async fn works() {
async fn add_header<B>(mut res: Response<B>) -> Response<B> {
res.headers_mut().insert("x-foo", "foo".parse().unwrap());
res
}
let app = Router::new().layer(map_response(add_header));
let client = TestClient::new(app);
let res = client.get("/").await;
assert_eq!(res.headers()["x-foo"], "foo");
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
tokio-rs/axum | https://github.com/tokio-rs/axum/blob/183ace306ab126f034c55d9147ea3513c8a5a139/axum/src/test_helpers/tracing_helpers.rs | axum/src/test_helpers/tracing_helpers.rs | use std::{
future::{Future, IntoFuture},
io,
marker::PhantomData,
pin::Pin,
sync::{Arc, Mutex},
};
use serde::{de::DeserializeOwned, Deserialize};
use tracing::instrument::WithSubscriber;
use tracing_subscriber::prelude::*;
use tracing_subscriber::{filter::Targets, fmt::MakeWriter};
#[derive(Deserialize, Eq, PartialEq, Debug)]
#[serde(deny_unknown_fields)]
pub(crate) struct TracingEvent<T> {
pub(crate) fields: T,
pub(crate) target: String,
pub(crate) level: String,
}
/// Run an async closure and capture the tracing output it produces.
pub(crate) fn capture_tracing<T, F>(f: F) -> CaptureTracing<T, F>
where
T: DeserializeOwned,
{
CaptureTracing {
f,
filter: None,
_phantom: PhantomData,
}
}
pub(crate) struct CaptureTracing<T, F> {
f: F,
filter: Option<Targets>,
_phantom: PhantomData<fn() -> T>,
}
impl<T, F> CaptureTracing<T, F> {
pub(crate) fn with_filter(mut self, filter_string: &str) -> Self {
self.filter = Some(filter_string.parse().unwrap());
self
}
}
impl<T, F, Fut> IntoFuture for CaptureTracing<T, F>
where
F: Fn() -> Fut + Send + Sync + 'static,
Fut: Future + Send,
T: DeserializeOwned,
{
type Output = Vec<TracingEvent<T>>;
type IntoFuture = Pin<Box<dyn Future<Output = Self::Output> + Send>>;
fn into_future(self) -> Self::IntoFuture {
let Self { f, filter, .. } = self;
Box::pin(async move {
let (make_writer, handle) = TestMakeWriter::new();
let filter = filter.unwrap_or_else(|| "axum=trace".parse().unwrap());
let subscriber = tracing_subscriber::registry().with(
tracing_subscriber::fmt::layer()
.with_writer(make_writer)
.with_target(true)
.without_time()
.with_ansi(false)
.json()
.flatten_event(false)
.with_filter(filter),
);
let guard = tracing::subscriber::set_default(subscriber);
f().with_current_subscriber().await;
drop(guard);
handle
.take()
.lines()
.map(|line| serde_json::from_str(line).unwrap())
.collect()
})
}
}
struct TestMakeWriter {
write: Arc<Mutex<Option<Vec<u8>>>>,
}
impl TestMakeWriter {
fn new() -> (Self, Handle) {
let write = Arc::new(Mutex::new(Some(Vec::<u8>::new())));
(
Self {
write: write.clone(),
},
Handle { write },
)
}
}
impl<'a> MakeWriter<'a> for TestMakeWriter {
type Writer = Writer<'a>;
fn make_writer(&'a self) -> Self::Writer {
Writer(self)
}
}
struct Writer<'a>(&'a TestMakeWriter);
impl io::Write for Writer<'_> {
fn write(&mut self, buf: &[u8]) -> io::Result<usize> {
match &mut *self.0.write.lock().unwrap() {
Some(vec) => {
let len = buf.len();
vec.extend(buf);
Ok(len)
}
None => Err(io::Error::other("inner writer has been taken")),
}
}
fn flush(&mut self) -> io::Result<()> {
Ok(())
}
}
struct Handle {
write: Arc<Mutex<Option<Vec<u8>>>>,
}
impl Handle {
fn take(self) -> String {
let vec = self.write.lock().unwrap().take().unwrap();
String::from_utf8(vec).unwrap()
}
}
| rust | MIT | 183ace306ab126f034c55d9147ea3513c8a5a139 | 2026-01-04T15:37:41.118512Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.