repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/utils.rs | src/core/utils.rs | //! Utility items for internal crate operation
use crate::CLI_TABBING;
use std::io::{LineWriter, Write};
/// Writes a given buffer to terminal using [LineWriter] and splits every 80
/// characters, making it ideal for concise terminal displays for help messages
pub(crate) fn writeln_term(
to_write: impl Into<String>,
buf: &mut impl Write,
) -> std::io::Result<()> {
let mut line_buf = LineWriter::new(buf);
let newline_byte = "\n".as_bytes();
for line in to_write.into().as_bytes().chunks(80 - CLI_TABBING.len()) {
line_buf.write(&[CLI_TABBING.as_bytes(), line, newline_byte].concat())?;
}
Ok(())
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/mod.rs | src/core/mod.rs | //! Core components of climake, re-exported with wildcard into library root
mod argument;
mod cli_make;
mod subcommand;
mod utils;
pub use argument::Argument;
pub use cli_make::CliMake;
pub use subcommand::Subcommand;
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/cli_make/impl_help.rs | src/core/cli_make/impl_help.rs | //! Contains help implementations for [CliMake]
use super::CliMake;
use crate::core::utils::writeln_term;
use std::io::Write;
use std::env;
impl<'a> CliMake<'a> {
/// Generates header and streams to given [Write] buffer for displaying info
/// about this cli.
///
/// Please check [CliMake::help_msg] for the full help message generation used
/// throughout automatic execution of this cli. The `usage_suffix` input used
/// for this method is used for [Subcommand] help where the subcommand in
/// question would like to display itself on the end of the top usage line
/// for the header
///
/// # Example
///
/// What this may display:
///
/// ```none
/// Usage: ./my-app [OPTIONS]
///
/// My app v0.1.0 — A simple application
/// ```
pub(crate) fn header_msg(
&self,
usage_suffix: impl Into<Option<&'a str>>,
buf: &mut impl Write,
) -> std::io::Result<()> {
let cur_exe = env::current_exe().unwrap(); // TODO: better errors
let cur_stem = cur_exe.file_stem().unwrap().to_str().unwrap(); // TOOD: better errors
match usage_suffix.into() {
Some(suffix) => {
buf.write_fmt(format_args!("Usage: ./{} {} [OPTIONS]\n", cur_stem, suffix))?
}
None => buf.write_fmt(format_args!("Usage: ./{} [OPTIONS]\n", cur_stem))?,
}
match self.description.clone() {
Some(d) => {
buf.write("\n".as_bytes())?; // write formatting empty byte
writeln_term(
match &self.version {
Some(v) => format!("{} v{} — {}", self.name, v, d),
None => format!("{} — {}", self.name, d),
},
buf,
)
}
None => Ok(()),
}
}
/// Displays help infomation for climake which is used inside the execution
/// of the cli
///
/// # Help sources
///
/// This method gets sections of messaging such as the header from various
/// *public*-available methods inside of this library:
///
/// - [CliMake::header_msg]: Header generation for help message and errors
/// - [Argument::help_name_msg]: Help generation for single [Argument]s
///
/// # Example
///
/// What this may look like:
///
/// ```none
/// Usage: ./my-app [OPTIONS]
///
/// My app v0.1.0 — A simple application
///
/// Arguments:
/// (-v, --verbose) — Verbose mode
/// ```
pub(crate) fn help_msg(&self, buf: &mut impl Write) -> std::io::Result<()> {
self.header_msg(None, buf)?;
buf.write("\nArguments:\n".as_bytes())?;
if self.arguments.len() > 0 {
for argument in self.arguments.iter() {
argument.help_name_msg(buf)?;
}
} else {
buf.write(" No arguments found\n".as_bytes())?;
}
buf.write("\nSubcommands:\n".as_bytes())?;
if self.subcommands.len() > 0 {
for subcommand in self.subcommands.iter() {
subcommand.help_name_msg(buf)?;
}
} else {
buf.write(" No subcommands found\n".as_bytes())?;
}
Ok(())
}
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/cli_make/mod.rs | src/core/cli_make/mod.rs | //! Contains [CliMake]-related items, see specific documentation for more information
mod impl_basic;
mod impl_help;
mod impl_parse;
pub use impl_basic::*;
pub use impl_help::*;
pub use impl_parse::*;
use crate::{Argument, Subcommand};
/// The core climake structure, facilitating creation and parsing of both arguments
/// and subcommands
#[derive(Debug, PartialEq, Clone)]
pub struct CliMake<'a> {
/// Name of the program using the cli
name: &'a str,
/// Internal [Argument]s stored inside the cli once created/added to
arguments: Vec<&'a Argument<'a>>,
/// Internal [Subcommand]s stored inside the cli once created/added to
subcommands: Vec<&'a Subcommand<'a>>,
/// Optional short description of the program using the cli
description: Option<&'a str>,
/// Optional version string of the program using the cli
///
/// # Crate version
///
/// If you would like this value to automatically update with your crates version,
/// you may use a variation of the following function:
///
/// ```rust
/// pub fn crate_version() -> String {
/// format!(
/// "{}.{}.{}{}",
/// env!("CARGO_PKG_VERSION_MAJOR"),
/// env!("CARGO_PKG_VERSION_MINOR"),
/// env!("CARGO_PKG_VERSION_PATCH"),
/// option_env!("CARGO_PKG_VERSION_PRE").unwrap_or("")
/// )
/// }
/// ```
version: Option<&'a str>,
/// Internal/private tabbing to use, defaults to [CLI_TABBING](crate::CLI_TABBING)
tabbing: &'static str,
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/cli_make/impl_basic.rs | src/core/cli_make/impl_basic.rs | //! Contains basic implementations for [CliMake]
use super::CliMake;
use crate::{Argument, Subcommand, CLI_TABBING};
impl<'a> CliMake<'a> {
/// Creates a new [Argument] from given passed values
pub fn new(
name: impl Into<&'a str>,
arguments: impl Into<Vec<&'a Argument<'a>>>,
subcommands: impl Into<Vec<&'a Subcommand<'a>>>,
description: impl Into<Option<&'a str>>,
version: impl Into<Option<&'a str>>,
) -> Self {
CliMake {
name: name.into(),
arguments: arguments.into(),
subcommands: subcommands.into(),
description: description.into(),
version: version.into(),
tabbing: CLI_TABBING,
}
}
/// Adds a single argument to this root [CliMake], chainable
pub fn add_arg(&mut self, argument: impl Into<&'a Argument<'a>>) -> &mut Self {
self.arguments.push(argument.into());
self
}
/// Adds multiple arguments to this root [CliMake], chainable
pub fn add_args(&mut self, arguments: impl IntoIterator<Item = &'a Argument<'a>>) -> &mut Self {
for arg in arguments.into_iter() {
self.add_arg(arg);
}
self
}
/// Adds a single subcommand to this root [CliMake], chainable
pub fn add_subcmd(&mut self, subcommand: impl Into<&'a Subcommand<'a>>) -> &mut Self {
self.subcommands.push(subcommand.into());
self
}
/// Adds multiple subcommands to this root [CliMake], chainable
pub fn add_subcmds(
&mut self,
subcommands: impl IntoIterator<Item = &'a Subcommand<'a>>,
) -> &mut Self {
for subcommand in subcommands.into_iter() {
self.add_subcmd(subcommand);
}
self
}
/// Sets the tabbing characters for cli help, the default for this is 2 spaces,
/// i.e. ` `.
pub fn tabbing(&mut self, tab_chars: &'static str) -> &mut Self {
self.tabbing = tab_chars;
self
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::io::Input;
/// Checks that the [CliMake::add_arg] method works correctly
#[test]
fn cli_add_arg() {
let mut cli = CliMake::new("example", vec![], vec![], "Add arg check", None);
let arg = Argument::new("arg help", vec![], vec![], Input::None);
cli.add_arg(&arg).add_arg(&arg);
assert_eq!(cli.arguments, vec![&arg, &arg])
}
/// Checks that the [CliMake::add_args] method works correctly
#[test]
fn cli_add_args() {
let mut cli = CliMake::new("example", vec![], vec![], "Add arg check", None);
let arg = Argument::new("arg help", vec![], vec![], Input::None);
cli.add_args(vec![&arg, &arg]).add_args(vec![&arg, &arg]);
assert_eq!(cli.arguments, vec![&arg, &arg, &arg, &arg])
}
/// Checks that the [CliMake::add_subcmds] method works correctly
#[test]
fn cli_add_subcmds() {
let mut cli = CliMake::new("example", vec![], vec![], "Add arg check", None);
let subcmd = Subcommand::new("example", vec![], vec![], None);
cli.add_subcmds(vec![&subcmd, &subcmd])
.add_subcmds(vec![&subcmd, &subcmd]);
assert_eq!(cli.subcommands, vec![&subcmd, &subcmd, &subcmd, &subcmd])
}
/// Checks that the [CliMake::add_subcmd] method works correctly
#[test]
fn cli_add_subcmd() {
let mut cli = CliMake::new("example", vec![], vec![], "Add arg check", None);
let subcmd = Subcommand::new("example", vec![], vec![], None);
cli.add_subcmd(&subcmd).add_subcmd(&subcmd);
assert_eq!(cli.subcommands, vec![&subcmd, &subcmd])
}
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/src/core/cli_make/impl_parse.rs | src/core/cli_make/impl_parse.rs | //! Contains parsing implementations for [CliMake]
use super::{Argument, CliMake, Subcommand};
use crate::core::argument::CallType;
use crate::parsed::{ParsedArgument, ParsedCli, ParsedSubcommand};
use std::env;
/// Container enumeration for [crate::parsed]-related structs to be sent up the
/// chain from [match_next] recursive parsing
enum ParsedLayer<'a> {
ParsedArgument(ParsedArgument<'a>),
ParsedSubcommand(ParsedSubcommand<'a>),
}
/// Internal error enum representing instances of user-facing errors whilst parsing
/// (i.e. due to bad user input). These should be converted into strings and shown
/// to the user as directly as possible
enum ParseError {
/// When a given subcommand which is being parsed in [match_next_subcommand]
/// could not be found
SubcommandNotFound(String),
}
/// Recurses down from an initial empty [ParsedSubcommand] to fill it in. This
/// is used as the main "entrypoint" to parsing
fn match_next_subcommand<'a>(
inputs: &mut impl Iterator<Item = String>,
mut parsed_subcommand: ParsedSubcommand<'a>,
) -> Result<ParsedSubcommand<'a>, ParseError> {
loop {
let next_input = inputs.next();
match next_input {
Some(input) => {
if input.starts_with('-') {
// argument matched
// match find_argument(input, parsed_subcommand.inner.arguments) {
// TODO
// }
} else {
// subcommand matched
match find_subcommand(&input, &parsed_subcommand.inner.subcommands) {
Some(subcommand) => parsed_subcommand.subcommands.push(
match_next_subcommand(inputs, ParsedSubcommand::new_empty(subcommand))?,
), // found subcommand, parse and add to `subcommands`
None => return Err(ParseError::SubcommandNotFound(input)), // subcommand was not found
}
}
}
None => break,
}
}
Ok(parsed_subcommand)
}
/// Finds `name`'d argument(s) in the passed vector of [Argument]s
fn find_argument<'a>(call: impl AsRef<str>, arguments: Vec<&'a Argument<'a>>) -> Vec<&'a Argument<'a>> {
let mut found_arguments = vec![]; // arg output vec
if &call.as_ref()[..2] == "--" {
// long call matched
let call_match = &call.as_ref()[2..];
for argument in arguments.iter() {
unimplemented!()
}
}
found_arguments
}
/// Finds `name`'d subcommand in the passed vector of `subcommands`
fn find_subcommand<'a>(name: impl AsRef<str>, subcommands: &Vec<&'a Subcommand>) -> Option<&'a Subcommand<'a>> {
for subcommand in subcommands.iter() {
if name.as_ref() == subcommand.name {
return Some(subcommand);
}
}
None
}
impl<'a> CliMake<'a> {
/// Parses all arguments from a custom iterator, see [CliMake::parse] for
/// default parsing from [env::args]
pub fn parse_custom(&'a self, inputs: impl IntoIterator<Item = String>) -> ParsedCli<'a> {
unimplemented!()
}
/// Parses default arguments coming from [env::args]
pub fn parse(&'a self) -> ParsedCli<'a> {
self.parse_custom(env::args())
}
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
rust-cli/climake | https://github.com/rust-cli/climake/blob/07a87ab9e8721d01ce6e37c3107c69b835e83ee7/examples/simple.rs | examples/simple.rs | //! Demo of a simple package manager
use climake::prelude::*;
fn main() {
let package = Argument::new(
"The package name",
vec!['p', 'i'],
vec!["pkg, package"],
Input::Text,
);
let add = Subcommand::new("add", vec![&package], vec![], "Adds a package");
let rem = Subcommand::new("rem", vec![&package], vec![], "Removes a package");
let cli = CliMake::new(
"MyPkg",
vec![],
vec![&add, &rem],
"A simple package manager demo",
"1.0.0",
);
let parsed = cli.parse();
for subcommand in parsed.subcommands {
if subcommand.inner == &add {
println!("Adding package {:?}..", subcommand.arguments[0]);
} else if subcommand.inner == &rem {
println!("Removing package {:?}..", subcommand.arguments[0]);
}
}
}
| rust | Apache-2.0 | 07a87ab9e8721d01ce6e37c3107c69b835e83ee7 | 2026-01-04T20:21:15.127100Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/build.rs | src-tauri/build.rs | fn main() {
tauri_build::build()
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/lib.rs | src-tauri/src/lib.rs | //! Library code for the Jute application.
#![deny(unsafe_code)]
#![warn(missing_docs)]
use std::io;
pub mod backend;
pub mod commands;
pub mod entity;
pub mod menu;
pub mod plugins;
pub mod state;
pub mod window;
/// A serializable error type for application errors.
#[derive(Debug, thiserror::Error)]
pub enum Error {
/// An error occurred while starting or managing a subprocess.
#[error("failed to run subprocess: {0}")]
Subprocess(io::Error),
/// Could not connect to the kernel.
#[error("could not connect to the kernel: {0}")]
KernelConnect(String),
/// Disconnected while communicating with a kernel.
#[error("disconnected from the kernel")]
KernelDisconnect,
/// Could not find the kernel.
#[error("kernel not found")]
KernelNotFound,
/// Could not find the kernel process.
#[error("kernel process not found")]
KernelProcessNotFound,
/// An invalid URL was provided or constructed.
#[error("invalid URL: {0}")]
InvalidUrl(#[from] url::ParseError),
/// HTTP error from reqwest while making a request.
#[error("HTTP failure: {0}")]
ReqwestError(#[from] reqwest::Error),
/// Error while deserializing a message.
#[error("could not deserialize message: {0}")]
DeserializeMessage(String),
/// Error originating from ZeroMQ.
#[error("zeromq: {0}")]
Zmq(#[from] zeromq::ZmqError),
/// Error originating from serde_json.
#[error("serde_json error: {0}")]
SerdeJson(#[from] serde_json::error::Error),
/// Error interacting with the filesystem.
#[error("filesystem error: {0}")]
Filesystem(io::Error),
/// Error returned directly from Tauri.
#[error("tauri error: {0}")]
Tauri(#[from] tauri::Error),
/// Error while interacting with the shell plugin.
#[error("shell plugin error: {0}")]
PluginShell(#[from] tauri_plugin_shell::Error),
}
impl serde::Serialize for Error {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
serializer.serialize_str(self.to_string().as_ref())
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/entity.rs | src-tauri/src/entity.rs | //! Types for generating and representing unique, labelled IDs.
use std::{array, fmt, str::FromStr};
use anyhow::bail;
use rand::Rng;
use serde_with::{DeserializeFromStr, SerializeDisplay};
use strum::{EnumIter, IntoEnumIterator};
/// Entity category for generated IDs.
#[derive(Debug, Clone, Copy, PartialEq, Eq, EnumIter)]
pub enum Entity {
/// Python virtual environments created by Jute.
Venv,
}
impl Entity {
/// Get the prefix for IDs generated for this entity.
pub const fn id_prefix(&self) -> &'static str {
match self {
Entity::Venv => "ve-",
}
}
}
/// An entity ID generated for a specific category of object.
#[derive(Clone, Copy, PartialEq, Eq, SerializeDisplay, DeserializeFromStr)]
pub struct EntityId {
/// Which kind of entity this ID represents.
pub kind: Entity,
/// The unique ID for this entity.
id: [u8; Self::ID_LENGTH],
}
impl EntityId {
/// The length of the random portion of the ID in characters.
pub const ID_LENGTH: usize = 12;
/// Get the ID as a string.
pub fn new(kind: Entity) -> Self {
// Sample 12 characters, each one of 36 possibilities (lowercase letters and
// numbers).
let charset = b"0123456789abcdefghijklmnopqrstuvwxyz";
let mut rng = rand::thread_rng();
let id = array::from_fn(|_| charset[rng.gen_range(0..charset.len())]);
EntityId { kind, id }
}
}
impl fmt::Display for EntityId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"{}{}",
self.kind.id_prefix(),
String::from_utf8_lossy(&self.id)
)
}
}
impl fmt::Debug for EntityId {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_tuple("EntityId")
.field(&format_args!(
"{}{}",
self.kind.id_prefix(),
String::from_utf8_lossy(&self.id)
))
.finish()
}
}
impl FromStr for EntityId {
type Err = anyhow::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
for kind in Entity::iter() {
if let Some(maybe_id) = s.strip_prefix(kind.id_prefix()) {
let id = maybe_id.as_bytes();
if let Ok(id) = id.try_into() {
return Ok(EntityId { kind, id });
} else {
bail!("invalid entity ID length {s}")
}
}
}
bail!("invalid entity prefix {s}")
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_entity_id() {
let id = EntityId::new(Entity::Venv);
assert_eq!(id.to_string().len(), 12 + 3);
assert_eq!(id.to_string().chars().count(), 12 + 3);
assert!(id.to_string().starts_with("ve-"));
assert_eq!(id, id);
assert_ne!(id, EntityId::new(Entity::Venv));
}
#[test]
fn test_entity_id_parse() {
let id = EntityId::new(Entity::Venv);
let parsed = id.to_string().parse::<EntityId>().unwrap();
assert_eq!(id, parsed);
}
#[test]
fn test_entity_id_validation() {
let parsed = "ve-1234567890".parse::<EntityId>();
assert!(parsed.is_err());
let parsed = "ve-1234567890ab".parse::<EntityId>();
assert!(parsed.is_ok());
let parsed = parsed.unwrap();
assert_eq!(parsed.kind, Entity::Venv);
assert_eq!(parsed.to_string(), "ve-1234567890ab");
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend.rs | src-tauri/src/backend.rs | //! Backend for Jute, connecting to local Jupyter kernels or remote servers.
//!
//! The local and remote kernels have a shared wire protocol, so that lives
//! outside either folder.
pub use wire_protocol::{create_websocket_connection, create_zeromq_connection, KernelConnection};
pub mod commands;
pub mod local;
pub mod notebook;
pub mod remote;
pub mod wire_protocol;
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/state.rs | src-tauri/src/state.rs | //! Defines state and stores for the Tauri application.
use dashmap::DashMap;
use crate::backend::local::LocalKernel;
/// State for the running Tauri application.
#[derive(Default)]
pub struct State {
/// Current kernels running in the application.
pub kernels: DashMap<String, LocalKernel>,
}
impl State {
/// Create a new state object.
pub fn new() -> Self {
Self::default()
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/menu.rs | src-tauri/src/menu.rs | //! Sets up the native window menu with file, about, and other commands.
//!
//! Menus in Tauri v2 are attached to each window for Windows and Linux, but
//! they are global for macOS. Each item in the menu has a `MenuId` string
//! attached to it, and it emits a `MenuEvent` when clicked.
//!
//! There is no way to associate a `MenuEvent` with a specific window other than
//! creating separate menus for each window with a different UUID. This is
//! awkward, so we'll instead take the simpler approach of iterating through all
//! windows of the app and finding the focused one.
//!
//! Spacedrive has a good example of using the Menu API.
//! <https://github.com/spacedriveapp/spacedrive/blob/0.4.3/apps/desktop/src-tauri/src/menu.rs>
use serde::{Deserialize, Serialize};
use tauri::{
menu::{
AboutMetadata, Menu, MenuBuilder, MenuItemBuilder, PredefinedMenuItem, SubmenuBuilder,
HELP_SUBMENU_ID, WINDOW_SUBMENU_ID,
},
AppHandle, Runtime,
};
use tauri_plugin_dialog::{DialogExt, MessageDialogKind};
use tauri_plugin_opener::OpenerExt;
use tracing::warn;
use ts_rs::TS;
/// The events that can be emitted as menu IDs.
#[derive(
Debug,
Clone,
Copy,
Deserialize,
Serialize,
TS,
strum::EnumString,
strum::AsRefStr,
strum::Display,
)]
pub enum MenuEvent {
/// Open a notebook file.
OpenFile,
/// Open the issue tracker URL.
ReportIssue,
}
/// Set up the menu for application windows.
///
/// This code was modified from the original source of [`Menu::default`],
/// customizing that menu to add new buttons.
pub fn setup_menu<R: Runtime>(app: &AppHandle<R>) -> tauri::Result<Menu<R>> {
app.on_menu_event(move |app, event| {
let Ok(event) = event.id().as_ref().parse::<MenuEvent>() else {
warn!("unknown menu event: {:?}", event.id());
return;
};
match event {
MenuEvent::OpenFile => {
let app = app.clone();
app.dialog()
.file()
.add_filter("Jupyter Notebook", &["ipynb"])
.pick_file(move |path| {
if let Some(path) = path {
match path.into_path() {
Ok(path) => {
_ = crate::window::open_notebook_path(&app, &path);
}
Err(err) => {
app.dialog()
.message(err.to_string())
.kind(MessageDialogKind::Error)
.show(|_| {});
}
}
}
});
}
MenuEvent::ReportIssue => {
_ = app
.opener()
.open_url("https://github.com/ekzhang/jute/issues", None::<&str>);
}
}
});
let pkg_info = app.package_info();
let config = app.config();
let about_metadata = AboutMetadata {
name: Some(pkg_info.name.clone()),
version: Some(pkg_info.version.to_string()),
copyright: config.bundle.copyright.clone(),
authors: config.bundle.publisher.clone().map(|p| vec![p]),
icon: app.default_window_icon().cloned(),
website: Some("https://github.com/ekzhang/jute".into()),
website_label: Some("github.com/ekzhang/jute".into()),
..Default::default()
};
#[allow(unused_mut)]
let mut menu = MenuBuilder::new(app);
// App name submenu, only for macOS ("Jute").
#[cfg(target_os = "macos")]
{
let app_menu = SubmenuBuilder::new(app, pkg_info.name.clone())
.about(Some(about_metadata))
.separator()
.services()
.separator()
.hide()
.hide_others()
.separator()
.quit()
.build()?;
menu = menu.item(&app_menu);
}
// File submenu.
let file_menu = SubmenuBuilder::new(app, "File")
.item(
&MenuItemBuilder::with_id(MenuEvent::OpenFile, "Open File…")
.accelerator("CmdOrCtrl+O")
.build(app)?,
)
.items(&[
// From the default menu: seems like this is not supported on Linux.
#[cfg(not(any(
target_os = "linux",
target_os = "dragonfly",
target_os = "freebsd",
target_os = "netbsd",
target_os = "openbsd"
)))]
&PredefinedMenuItem::close_window(app, None)?,
// This is already in a different menu for macOS.
#[cfg(not(target_os = "macos"))]
&PredefinedMenuItem::quit(app, None)?,
])
.build()?;
// Edit submenu.
let edit_menu = SubmenuBuilder::new(app, "Edit")
.undo()
.redo()
.separator()
.cut()
.copy()
.paste()
.select_all()
.build()?;
let view_menu = SubmenuBuilder::new(app, "View")
.items(&[
#[cfg(target_os = "macos")]
&PredefinedMenuItem::fullscreen(app, None)?,
])
.build()?;
let window_menu = SubmenuBuilder::with_id(app, WINDOW_SUBMENU_ID, "Window")
.minimize()
.maximize()
.separator()
.close_window()
.build()?;
let help_menu = SubmenuBuilder::with_id(app, HELP_SUBMENU_ID, "Help")
.items(&[
#[cfg(not(target_os = "macos"))]
&PredefinedMenuItem::about(app, None, Some(about_metadata))?,
])
.text(MenuEvent::ReportIssue, "Report Issue")
.build()?;
let menu = menu
.item(&file_menu)
.item(&edit_menu)
.item(&view_menu)
.item(&window_menu)
.item(&help_menu)
.build()?;
Ok(menu)
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/commands.rs | src-tauri/src/commands.rs | //! Invoke handlers for commands callable from the frontend.
use std::env;
use sysinfo::{Pid, System};
use tauri::ipc::Channel;
use tracing::info;
use crate::{
backend::{
commands::{self, RunCellEvent},
local::{environment, KernelUsageInfo, LocalKernel},
notebook::NotebookRoot,
},
state::State,
Error,
};
pub mod venv;
/// Measure the kernel's CPU and memory usage as a percentage of total system
/// resources.
#[tauri::command]
pub async fn kernel_usage_info(
kernel_id: &str,
state: tauri::State<'_, State>,
) -> Result<KernelUsageInfo, Error> {
// find the pid from _state.kernels
let kernel = state.kernels.get(kernel_id).ok_or(Error::KernelNotFound)?;
let pid: Pid = Pid::from_u32(kernel.pid().ok_or(Error::KernelProcessNotFound)?);
let mut system = System::new_all();
system.refresh_all();
tokio::time::sleep(std::time::Duration::from_millis(100)).await;
system.refresh_process(pid);
if let Some(process) = system.process(pid) {
let cpu_total = system.cpus().len();
let cpu_used = process.cpu_usage();
let total_memory_kb = system.total_memory();
let process_memory_kb = process.memory();
Ok(KernelUsageInfo {
cpu_consumed: cpu_used,
cpu_available: cpu_total as f32,
memory_consumed: process_memory_kb as f32,
memory_available: total_memory_kb as f32,
})
} else {
Err(Error::KernelProcessNotFound)
}
}
/// Start a new Jupyter kernel.
#[tauri::command]
pub async fn start_kernel(
spec_name: &str,
state: tauri::State<'_, State>,
) -> Result<String, Error> {
// TODO: Save the client in a better place.
// let client = JupyterClient::new("", "")?;
// Temporary hack to just start a kernel locally with ZeroMQ.
let kernels = environment::list_kernels(None).await;
let mut kernel_spec = match kernels
.iter()
.find(|(path, _spec)| path.file_name().and_then(|s| s.to_str()) == Some(spec_name))
{
Some((_, kernel_spec)) => kernel_spec.clone(),
None => {
return Err(Error::KernelConnect(format!(
"no kernel named {spec_name:?} found"
)))
}
};
if kernel_spec.argv[0] == "python" {
if let Ok(python_path) = env::var("PYTHON_PATH") {
kernel_spec.argv[0] = python_path;
} else {
// Temporary hack
kernel_spec.argv[0] = "/opt/homebrew/bin/python3.11".into();
}
}
let kernel = LocalKernel::start(&kernel_spec).await?;
let info = commands::kernel_info(kernel.conn()).await?;
info!(banner = info.banner, "started new jute kernel");
let kernel_id = String::from(kernel.id());
state.kernels.insert(kernel_id.clone(), kernel);
Ok(kernel_id)
}
/// Stop a Jupyter kernel.
#[tauri::command]
pub async fn stop_kernel(kernel_id: &str, state: tauri::State<'_, State>) -> Result<(), Error> {
info!("stopping jute kernel {kernel_id}");
let (_, mut kernel) = state
.kernels
.remove(kernel_id)
.ok_or(Error::KernelDisconnect)?;
kernel.kill().await?;
Ok(())
}
/// Get the contents of a Jupyter notebook on disk.
#[tauri::command]
pub async fn get_notebook(path: &str) -> Result<NotebookRoot, Error> {
info!("getting notebook at {path}");
let contents = tokio::fs::read_to_string(path)
.await
.map_err(Error::Filesystem)?;
Ok(serde_json::from_str(&contents)?)
}
/// Run a code cell in a Jupyter kernel.
#[tauri::command]
pub async fn run_cell(
kernel_id: &str,
code: &str,
on_event: Channel<RunCellEvent>,
state: tauri::State<'_, State>,
) -> Result<(), Error> {
let conn = state
.kernels
.get(kernel_id)
.ok_or(Error::KernelDisconnect)?
.conn()
.clone();
let rx = commands::run_cell(&conn, code).await?;
while let Ok(event) = rx.recv().await {
if on_event.send(event).is_err() {
break;
}
}
Ok(())
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/window.rs | src-tauri/src/window.rs | //! Shared code to open windows in Jute and notebooks.
use std::path::Path;
use anyhow::Context;
use tauri::{AppHandle, Manager, Runtime, WebviewWindow, WebviewWindowBuilder};
use uuid::Uuid;
/// Initializes window size, min width, and other common settings on the
/// builder.
pub fn initialize_builder<'a, R: Runtime, M: Manager<R>>(
manager: &'a M,
path: &str,
) -> WebviewWindowBuilder<'a, R, M> {
// Generate a unique window label since duplicates are not allowed.
let label = format!("jute-window-{}", Uuid::new_v4());
let url = tauri::WebviewUrl::App(path.trim_start_matches('/').into());
#[allow(unused_mut)]
let mut builder = WebviewWindowBuilder::new(manager, &label, url)
.title("Jute")
.inner_size(960.0, 800.0)
.min_inner_size(720.0, 600.0)
.fullscreen(false)
.resizable(true);
#[cfg(target_os = "macos")]
{
// These methods are only available on macOS.
builder = builder.title_bar_style(tauri::TitleBarStyle::Overlay);
builder = builder.hidden_title(true);
}
builder
}
/// Opens a window with the home page.
pub fn open_home<R: Runtime>(app: &AppHandle<R>) -> tauri::Result<WebviewWindow<R>> {
initialize_builder(app, "/").build()
}
/// Opens a window with the notebook file at the given path.
pub fn open_notebook_path<R: Runtime>(
app: &AppHandle<R>,
file: &Path,
) -> tauri::Result<WebviewWindow<R>> {
let query = serde_urlencoded::to_string([("path", file.to_string_lossy())])
.context("could not encode path")?;
initialize_builder(app, &format!("/notebook?{query}")).build()
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/plugins.rs | src-tauri/src/plugins.rs | //! Module containing custom plugins for the Tauri app.
#[cfg(target_os = "macos")]
pub mod macos_traffic_lights;
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/main.rs | src-tauri/src/main.rs | // Prevents additional console window on Windows in release, DO NOT REMOVE!!
#![cfg_attr(not(debug_assertions), windows_subsystem = "windows")]
use std::{env, path::PathBuf};
use jute::state::State;
use tauri::AppHandle;
#[allow(unused_imports)]
use tauri::Manager;
/// Handle file associations opened in the application.
///
/// Jute registers itself as an application to open `.ipynb` files, which are
/// the file type for Jupyter Notebooks. This function is called when the user
/// double-clicks on a notebook file to open it with Jute.
///
/// Depending on the operating system, it will either launch a new process with
/// the file in `argv[1]` or send a [`tauri::RunEvent::Opened`] event. There may
/// be multiple file paths in `argv`, and they can be provided either as paths
/// or in the `file://` URL format.
///
/// Currently, each file should be opened as a separate window.
///
/// This function's code is adapted from the [`file-associations`] example in
/// the Tauri docs.
///
/// [`file-associations`]: https://github.com/tauri-apps/tauri/blob/tauri-v2.2.0/examples/file-associations/src-tauri/src/main.rs
fn handle_file_associations(
app: &AppHandle,
files: &[PathBuf],
) -> Result<(), Box<dyn std::error::Error>> {
for file in files {
jute::window::open_notebook_path(app, file)?;
}
Ok(())
}
fn main() {
tracing_subscriber::fmt().init();
#[allow(unused_mut)]
let mut app = tauri::Builder::default();
#[cfg(target_os = "macos")]
{
app = app.plugin(jute::plugins::macos_traffic_lights::init());
}
app.manage(State::new())
.plugin(tauri_plugin_dialog::init())
.plugin(tauri_plugin_fs::init())
.plugin(tauri_plugin_opener::init())
.plugin(tauri_plugin_shell::init())
.invoke_handler(tauri::generate_handler![
jute::commands::kernel_usage_info,
jute::commands::start_kernel,
jute::commands::stop_kernel,
jute::commands::run_cell,
jute::commands::get_notebook,
jute::commands::venv::venv_list_python_versions,
jute::commands::venv::venv_create,
jute::commands::venv::venv_list,
jute::commands::venv::venv_delete,
])
.setup(|app| {
// Parse files that were opened via CLI arguments (Windows + Linux).
if cfg!(any(windows, target_os = "linux")) {
let mut files = Vec::new();
for maybe_file in env::args().skip(1) {
// Skip flags like -f or --flag
if maybe_file.starts_with('-') {
continue;
}
// Handle `file://` path URLs and skip other URLs.
if let Ok(url) = url::Url::parse(&maybe_file) {
if url.scheme() == "file" {
if let Ok(path) = url.to_file_path() {
files.push(path);
}
}
} else {
files.push(PathBuf::from(maybe_file));
}
}
if files.is_empty() {
// Open a default window if no files were provided (this is if you opened the
// app in the launcher, for instance).
jute::window::open_home(app.handle())?;
} else {
handle_file_associations(app.handle(), &files)?;
}
}
Ok(())
})
.menu(jute::menu::setup_menu)
.build(tauri::generate_context!())
.expect("error while running tauri application")
.run(
#[allow(unused_variables)]
|app, event| {
// Handle files opened in macOS.
#[cfg(target_os = "macos")]
match event {
tauri::RunEvent::Opened { urls } => {
let files = urls
.into_iter()
.filter_map(|url| url.to_file_path().ok())
.collect::<Vec<_>>();
handle_file_associations(app, &files).unwrap();
}
tauri::RunEvent::Ready => {
// If no files were opened, open a default window.
if app.webview_windows().is_empty() {
jute::window::open_home(app).unwrap();
}
}
_ => {}
}
},
);
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/commands/venv.rs | src-tauri/src/commands/venv.rs | //! Commands for the management of local virtual environments with `uv`.
use std::io;
use ini::Ini;
use serde::Serialize;
use tauri::{AppHandle, Manager};
use tauri_plugin_shell::ShellExt;
use tracing::{error, info};
use crate::{
entity::{Entity, EntityId},
Error,
};
/// Return a list of Python versions that can be used to create a virtual
/// environment.
#[tauri::command]
pub async fn venv_list_python_versions(app: AppHandle) -> Result<Vec<String>, Error> {
let output = app
.shell()
.sidecar("uv")?
.args(["--color", "never"])
.args(["python", "list", "--all-versions"])
.args(["--python-preference", "only-managed"])
.output()
.await?;
if output.status.success() {
let mut versions = Vec::new();
for line in String::from_utf8_lossy(&output.stdout).lines() {
if let Some(version_string) = line.split_whitespace().next() {
// Some versions are prefixed with `pypy-`, ignore those for now.
if let Some(stripped) = version_string.strip_prefix("cpython-") {
let version_number = match stripped.find("-") {
Some(index) => &stripped[..index],
None => stripped,
};
versions.push(version_number.to_string());
}
}
}
Ok(versions)
} else {
let message = String::from_utf8_lossy(&output.stderr);
Err(Error::Subprocess(io::Error::new(
io::ErrorKind::Other,
message.trim(),
)))
}
}
/// Create a new virtual environment, and return its ID.
#[tauri::command]
pub async fn venv_create(python_version: &str, app: AppHandle) -> Result<EntityId, Error> {
let venv_id = EntityId::new(Entity::Venv);
let venv_path = app
.path()
.app_data_dir()?
.join("venv")
.join(venv_id.to_string());
let output = app
.shell()
.sidecar("uv")?
.args(["--color", "never"])
.args(["venv", "--no-project", "--seed", "--relocatable"])
.args([
"--python",
python_version,
"--python-preference",
"only-managed",
])
.arg(&venv_path)
.output()
.await?;
if !output.status.success() {
let message = String::from_utf8_lossy(&output.stderr);
return Err(Error::Subprocess(io::Error::new(
io::ErrorKind::Other,
message.trim(),
)));
}
info!("created venv at {venv_path:?}");
let venv_python_path = venv_path.join("bin/python");
let packages = ["ipykernel", "black", "basedpyright"];
let output = app
.shell()
.sidecar("uv")?
.args(["--color", "never"])
.args(["pip", "install"])
.arg("--python")
.arg(&venv_python_path)
.args(packages)
.output()
.await?;
if !output.status.success() {
error!("failed to install packages in venv, will remove");
_ = tokio::fs::remove_dir_all(&venv_path).await;
let message = String::from_utf8_lossy(&output.stderr);
return Err(Error::Subprocess(io::Error::new(
io::ErrorKind::Other,
message.trim(),
)));
}
Ok(venv_id)
}
/// List item returned by [`venv_list`].
#[derive(Serialize, Debug)]
pub struct VenvListItem {
venv_id: EntityId,
python_version: Option<String>,
uv_version: Option<String>,
implementation: Option<String>,
home: Option<String>,
}
/// Return a list of virtual environments managed by Jute.
#[tauri::command]
pub async fn venv_list(app: AppHandle) -> Result<Vec<VenvListItem>, Error> {
let venv_dir = app.path().app_data_dir()?.join("venv");
let mut venvs = Vec::new();
let mut it = match tokio::fs::read_dir(venv_dir).await {
Ok(it) => it,
Err(err) if err.kind() == io::ErrorKind::NotFound => return Ok(venvs),
Err(err) => return Err(Error::Filesystem(err)),
};
while let Some(entry) = it.next_entry().await.map_err(Error::Filesystem)? {
if entry.file_type().await.is_ok_and(|f| f.is_dir()) {
if let Ok(venv_id) = entry.file_name().into_string() {
if let Ok(venv_id) = venv_id.parse::<EntityId>() {
// Read the venv metadata file to get the Python version.
let metadata_path = entry.path().join("pyvenv.cfg");
let mut python_version = None;
let mut uv_version = None;
let mut implementation = None;
let mut home = None;
if let Ok(metadata) = tokio::fs::read_to_string(&metadata_path).await {
if let Ok(conf) = Ini::load_from_str(&metadata) {
let sec = conf.general_section();
python_version = sec.get("version_info").map(String::from);
uv_version = sec.get("uv").map(String::from);
implementation = sec.get("implementation").map(String::from);
home = sec.get("home").map(String::from);
}
}
venvs.push(VenvListItem {
venv_id,
python_version,
uv_version,
implementation,
home,
});
}
}
}
}
Ok(venvs)
}
/// Delete a virtual environment by ID.
#[tauri::command]
pub async fn venv_delete(venv_id: EntityId, app: AppHandle) -> Result<bool, Error> {
let venv_dir = app.path().app_data_dir()?.join("venv");
let venv_path = venv_dir.join(venv_id.to_string());
if tokio::fs::metadata(&venv_path).await.is_ok() {
tokio::fs::remove_dir_all(&venv_path)
.await
.map_err(Error::Filesystem)?;
Ok(true)
} else {
Ok(false)
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/plugins/macos_traffic_lights.rs | src-tauri/src/plugins/macos_traffic_lights.rs | //! Traffic light positioner for Tauri, making inset controls on macOS.
//!
//! This plugin is sourced from a [Gist], and it uses the macOS native APIs to
//! position the standard window buttons on the top-left of the screen at a
//! different offset padding.
//!
//! [Gist]: https://gist.github.com/charrondev/43150e940bd2771b1ea88256d491c7a9/4304f71664f71d301e85fcd783cde6ccb38a5498
#![allow(unsafe_code)]
use objc::{msg_send, sel, sel_impl};
use rand::{distributions::Alphanumeric, Rng};
use tauri::{
plugin::{Builder, TauriPlugin},
Runtime, Window,
}; // 0.8
// This produces equal 13px padding on the left and top of the window controls.
// The Figma prototype this is based on has 14px positioning, but this includes
// 1px of inset window border.
//
// I'm not sure why the numbers are (12, 17) if the actual positioning is 13. It
// works on macOS Sequoia. May need to be adjusted for other macOS versions.
const WINDOW_CONTROL_PAD_X: f64 = 12.0;
const WINDOW_CONTROL_PAD_Y: f64 = 17.0;
struct UnsafeWindowHandle(*mut std::ffi::c_void);
unsafe impl Send for UnsafeWindowHandle {}
unsafe impl Sync for UnsafeWindowHandle {}
/// Initialize a plugin for setting traffic light positions.
pub fn init<R: Runtime>() -> TauriPlugin<R> {
Builder::new("traffic_light_positioner")
.on_window_ready(|window| {
#[cfg(target_os = "macos")]
setup_traffic_light_positioner(window);
})
.build()
}
#[cfg(target_os = "macos")]
fn position_traffic_lights(ns_window_handle: UnsafeWindowHandle, x: f64, y: f64) {
use cocoa::appkit::{NSView, NSWindow, NSWindowButton};
use cocoa::foundation::NSRect;
let ns_window = ns_window_handle.0 as cocoa::base::id;
unsafe {
let close = ns_window.standardWindowButton_(NSWindowButton::NSWindowCloseButton);
let miniaturize =
ns_window.standardWindowButton_(NSWindowButton::NSWindowMiniaturizeButton);
let zoom = ns_window.standardWindowButton_(NSWindowButton::NSWindowZoomButton);
let title_bar_container_view = close.superview().superview();
let close_rect: NSRect = msg_send![close, frame];
let button_height = close_rect.size.height;
let title_bar_frame_height = button_height + y;
let mut title_bar_rect = NSView::frame(title_bar_container_view);
title_bar_rect.size.height = title_bar_frame_height;
title_bar_rect.origin.y = NSView::frame(ns_window).size.height - title_bar_frame_height;
let _: () = msg_send![title_bar_container_view, setFrame: title_bar_rect];
let window_buttons = vec![close, miniaturize, zoom];
let space_between = NSView::frame(miniaturize).origin.x - NSView::frame(close).origin.x;
for (i, button) in window_buttons.into_iter().enumerate() {
let mut rect: NSRect = NSView::frame(button);
rect.origin.x = x + (i as f64 * space_between);
button.setFrameOrigin(rect.origin);
}
}
}
#[cfg(target_os = "macos")]
#[derive(Debug)]
struct WindowState<R: Runtime> {
window: Window<R>,
}
/// Set up the traffic light positioner plugin.
#[cfg(target_os = "macos")]
pub fn setup_traffic_light_positioner<R: Runtime>(window: Window<R>) {
use std::ffi::c_void;
use cocoa::appkit::NSWindow;
use cocoa::base::{id, BOOL};
use cocoa::delegate;
use cocoa::foundation::NSUInteger;
use objc::runtime::{Object, Sel};
use tauri::Emitter;
// Do the initial positioning
position_traffic_lights(
UnsafeWindowHandle(window.ns_window().expect("Failed to create window handle")),
WINDOW_CONTROL_PAD_X,
WINDOW_CONTROL_PAD_Y,
);
// Ensure they stay in place while resizing the window.
fn with_window_state<R: Runtime, F: FnOnce(&mut WindowState<R>) -> T, T>(
this: &Object,
func: F,
) {
let ptr = unsafe {
let x: *mut c_void = *this.get_ivar("app_box");
&mut *(x as *mut WindowState<R>)
};
func(ptr);
}
unsafe {
let ns_win = window
.ns_window()
.expect("NS Window should exist to mount traffic light delegate.")
as id;
let current_delegate: id = ns_win.delegate();
extern "C" fn on_window_should_close(this: &Object, _cmd: Sel, sender: id) -> BOOL {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
msg_send![super_del, windowShouldClose: sender]
}
}
extern "C" fn on_window_will_close(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowWillClose: notification];
}
}
extern "C" fn on_window_did_resize<R: Runtime>(this: &Object, _cmd: Sel, notification: id) {
unsafe {
with_window_state(this, |state: &mut WindowState<R>| {
let id = state
.window
.ns_window()
.expect("NS window should exist on state to handle resize")
as id;
#[cfg(target_os = "macos")]
position_traffic_lights(
UnsafeWindowHandle(id as *mut std::ffi::c_void),
WINDOW_CONTROL_PAD_X,
WINDOW_CONTROL_PAD_Y,
);
});
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidResize: notification];
}
}
extern "C" fn on_window_did_move(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidMove: notification];
}
}
extern "C" fn on_window_did_change_backing_properties(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidChangeBackingProperties: notification];
}
}
extern "C" fn on_window_did_become_key(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidBecomeKey: notification];
}
}
extern "C" fn on_window_did_resign_key(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidResignKey: notification];
}
}
extern "C" fn on_dragging_entered(this: &Object, _cmd: Sel, notification: id) -> BOOL {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
msg_send![super_del, draggingEntered: notification]
}
}
extern "C" fn on_prepare_for_drag_operation(
this: &Object,
_cmd: Sel,
notification: id,
) -> BOOL {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
msg_send![super_del, prepareForDragOperation: notification]
}
}
extern "C" fn on_perform_drag_operation(this: &Object, _cmd: Sel, sender: id) -> BOOL {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
msg_send![super_del, performDragOperation: sender]
}
}
extern "C" fn on_conclude_drag_operation(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, concludeDragOperation: notification];
}
}
extern "C" fn on_dragging_exited(this: &Object, _cmd: Sel, notification: id) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, draggingExited: notification];
}
}
extern "C" fn on_window_will_use_full_screen_presentation_options(
this: &Object,
_cmd: Sel,
window: id,
proposed_options: NSUInteger,
) -> NSUInteger {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
msg_send![super_del, window: window willUseFullScreenPresentationOptions: proposed_options]
}
}
extern "C" fn on_window_did_enter_full_screen<R: Runtime>(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
with_window_state(this, |state: &mut WindowState<R>| {
state
.window
.emit("did-enter-fullscreen", ())
.expect("Failed to emit event");
});
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidEnterFullScreen: notification];
}
}
extern "C" fn on_window_will_enter_full_screen<R: Runtime>(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
with_window_state(this, |state: &mut WindowState<R>| {
state
.window
.emit("will-enter-fullscreen", ())
.expect("Failed to emit event");
});
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowWillEnterFullScreen: notification];
}
}
extern "C" fn on_window_did_exit_full_screen<R: Runtime>(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
with_window_state(this, |state: &mut WindowState<R>| {
state
.window
.emit("did-exit-fullscreen", ())
.expect("Failed to emit event");
let id = state.window.ns_window().expect("Failed to emit event") as id;
position_traffic_lights(
UnsafeWindowHandle(id as *mut std::ffi::c_void),
WINDOW_CONTROL_PAD_X,
WINDOW_CONTROL_PAD_Y,
);
});
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidExitFullScreen: notification];
}
}
extern "C" fn on_window_will_exit_full_screen<R: Runtime>(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
with_window_state(this, |state: &mut WindowState<R>| {
state
.window
.emit("will-exit-fullscreen", ())
.expect("Failed to emit event");
});
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowWillExitFullScreen: notification];
}
}
extern "C" fn on_window_did_fail_to_enter_full_screen(
this: &Object,
_cmd: Sel,
window: id,
) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, windowDidFailToEnterFullScreen: window];
}
}
extern "C" fn on_effective_appearance_did_change(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![super_del, effectiveAppearanceDidChange: notification];
}
}
extern "C" fn on_effective_appearance_did_changed_on_main_thread(
this: &Object,
_cmd: Sel,
notification: id,
) {
unsafe {
let super_del: id = *this.get_ivar("super_delegate");
let _: () = msg_send![
super_del,
effectiveAppearanceDidChangedOnMainThread: notification
];
}
}
// Are we deallocing this properly ? (I miss safe Rust :( )
let window_label = window.label().to_string();
let app_state = WindowState { window };
let app_box = Box::into_raw(Box::new(app_state)) as *mut c_void;
let random_str: String = rand::thread_rng()
.sample_iter(&Alphanumeric)
.take(20)
.map(char::from)
.collect();
// We need to ensure we have a unique delegate name, otherwise we will panic
// while trying to create a duplicate delegate with the same name.
let delegate_name = format!("windowDelegate_{}_{}", window_label, random_str);
ns_win.setDelegate_(delegate!(&delegate_name, {
window: id = ns_win,
app_box: *mut c_void = app_box,
toolbar: id = cocoa::base::nil,
super_delegate: id = current_delegate,
(windowShouldClose:) => on_window_should_close as extern fn(&Object, Sel, id) -> BOOL,
(windowWillClose:) => on_window_will_close as extern fn(&Object, Sel, id),
(windowDidResize:) => on_window_did_resize::<R> as extern fn(&Object, Sel, id),
(windowDidMove:) => on_window_did_move as extern fn(&Object, Sel, id),
(windowDidChangeBackingProperties:) => on_window_did_change_backing_properties as extern fn(&Object, Sel, id),
(windowDidBecomeKey:) => on_window_did_become_key as extern fn(&Object, Sel, id),
(windowDidResignKey:) => on_window_did_resign_key as extern fn(&Object, Sel, id),
(draggingEntered:) => on_dragging_entered as extern fn(&Object, Sel, id) -> BOOL,
(prepareForDragOperation:) => on_prepare_for_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
(performDragOperation:) => on_perform_drag_operation as extern fn(&Object, Sel, id) -> BOOL,
(concludeDragOperation:) => on_conclude_drag_operation as extern fn(&Object, Sel, id),
(draggingExited:) => on_dragging_exited as extern fn(&Object, Sel, id),
(window:willUseFullScreenPresentationOptions:) => on_window_will_use_full_screen_presentation_options as extern fn(&Object, Sel, id, NSUInteger) -> NSUInteger,
(windowDidEnterFullScreen:) => on_window_did_enter_full_screen::<R> as extern fn(&Object, Sel, id),
(windowWillEnterFullScreen:) => on_window_will_enter_full_screen::<R> as extern fn(&Object, Sel, id),
(windowDidExitFullScreen:) => on_window_did_exit_full_screen::<R> as extern fn(&Object, Sel, id),
(windowWillExitFullScreen:) => on_window_will_exit_full_screen::<R> as extern fn(&Object, Sel, id),
(windowDidFailToEnterFullScreen:) => on_window_did_fail_to_enter_full_screen as extern fn(&Object, Sel, id),
(effectiveAppearanceDidChange:) => on_effective_appearance_did_change as extern fn(&Object, Sel, id),
(effectiveAppearanceDidChangedOnMainThread:) => on_effective_appearance_did_changed_on_main_thread as extern fn(&Object, Sel, id)
}))
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/notebook.rs | src-tauri/src/backend/notebook.rs | //! JSON schema for the Jupyter notebook `.ipynb` file format and Jute's
//! extensions.
//!
//! This file is based on the official [nbformat v4].
//!
//! [nbformat v4]: https://github.com/jupyter/nbformat/blob/v5.10.4/nbformat/v4/nbformat.v4.schema.json
use std::collections::BTreeMap;
use serde::{Deserialize, Serialize};
use serde_json::{Map, Value};
use ts_rs::TS;
/// Represents the root structure of a Jupyter Notebook file.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct NotebookRoot {
/// Root-level metadata of the notebook.
pub metadata: NotebookMetadata,
/// Notebook format (minor number). Incremented for backward-compatible
/// changes.
pub nbformat_minor: u8,
/// Notebook format (major number). Incremented for incompatible changes.
pub nbformat: u8,
/// Array of cells in the notebook.
pub cells: Vec<Cell>,
}
/// Root-level metadata for the notebook.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct NotebookMetadata {
/// Kernel information.
#[ts(optional)]
pub kernelspec: Option<KernelSpec>,
/// Programming language information.
#[ts(optional)]
pub language_info: Option<LanguageInfo>,
/// Original notebook format before conversion.
#[ts(optional)]
pub orig_nbformat: Option<u8>,
/// Title of the notebook document.
#[ts(optional)]
pub title: Option<String>,
/// Authors of the notebook document.
#[ts(optional)]
pub authors: Option<Vec<Author>>,
/// Additional unrecognized attributes in metadata.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Kernel specification metadata.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct KernelSpec {
/// Name of the kernel specification.
pub name: String,
/// Display name of the kernel.
pub display_name: String,
/// Additional unrecognized attributes in kernel specification.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Programming language information.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct LanguageInfo {
/// Programming language name.
pub name: String,
/// CodeMirror mode to use for the language.
#[ts(optional)]
pub codemirror_mode: Option<CodeMirrorMode>,
/// File extension for files in this language.
#[ts(optional)]
pub file_extension: Option<String>,
/// MIME type for files in this language.
#[ts(optional)]
pub mimetype: Option<String>,
/// Pygments lexer for syntax highlighting.
#[ts(optional)]
pub pygments_lexer: Option<String>,
/// Additional unrecognized attributes in language information.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Represents the CodeMirror mode, which could be a string or a nested object.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(untagged)]
pub enum CodeMirrorMode {
/// String representation of the CodeMirror mode.
String(String),
/// Nested object representation of the CodeMirror mode.
Object(BTreeMap<String, Value>),
}
/// Author information for the notebook document.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct Author {
/// Name of the author.
#[ts(optional)]
pub name: Option<String>,
/// Additional unrecognized attributes for authors.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Represents a notebook cell, which can be a raw, markdown, or code cell.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(tag = "cell_type", rename_all = "snake_case")]
pub enum Cell {
/// Raw cell type.
Raw(RawCell),
/// Markdown cell type.
Markdown(MarkdownCell),
/// Code cell type.
Code(CodeCell),
}
/// Raw cell in the notebook.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct RawCell {
/// Identifier of the cell.
#[ts(optional)]
pub id: Option<String>,
/// Metadata for the cell.
pub metadata: CellMetadata,
/// Content of the cell.
pub source: MultilineString,
/// Attachments (e.g., images) in the cell.
#[ts(optional)]
pub attachments: Option<CellAttachments>,
}
/// Markdown cell in the notebook.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct MarkdownCell {
/// Identifier of the cell.
#[ts(optional)]
pub id: Option<String>,
/// Metadata for the cell.
pub metadata: CellMetadata,
/// Content of the cell.
pub source: MultilineString,
/// Attachments (e.g., images) in the cell.
#[ts(optional)]
pub attachments: Option<CellAttachments>,
}
/// Code cell in the notebook.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CodeCell {
/// Identifier of the cell.
#[ts(optional)]
pub id: Option<String>,
/// Metadata for the cell.
pub metadata: CellMetadata,
/// Content of the cell.
pub source: MultilineString,
/// Execution count of the cell (null if not executed).
pub execution_count: Option<u32>,
/// Outputs from executing the cell.
pub outputs: Vec<Output>,
}
/// Metadata for a cell.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CellMetadata {
/// Additional unrecognized attributes in cell metadata.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Attachments for a cell, represented as MIME bundles keyed by filenames.
pub type CellAttachments = BTreeMap<String, MimeBundle>;
/// MIME bundle for representing various types of data.
pub type MimeBundle = BTreeMap<String, Value>;
/// Represents a string or array of strings (multiline).
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(untagged)]
pub enum MultilineString {
/// Single-line string.
Single(String),
/// Multi-line array of strings.
Multi(Vec<String>),
}
impl From<MultilineString> for String {
fn from(m: MultilineString) -> Self {
match m {
MultilineString::Single(s) => s,
MultilineString::Multi(v) if v.len() == 1 => v.into_iter().next().unwrap(),
MultilineString::Multi(v) => v.join(""),
}
}
}
impl MultilineString {
/// Convert a string to a multiline string, mimicking Jupyter.
///
/// Usually, we could just use `MultilineString::Single`, but Jupyter's
/// behavior is to always return an array, so we respect that. It also
/// breaks strings after newline characters.
pub fn normalize(&self) -> Self {
let value = match self {
MultilineString::Single(s) => s,
MultilineString::Multi(v) => &v.join(""),
};
let mut lines = Vec::new();
let mut remaining = &value[..];
while !remaining.is_empty() {
let next_break = remaining.find('\n').map_or(remaining.len(), |i| i + 1);
lines.push(remaining[..next_break].to_string());
remaining = &remaining[next_break..];
}
MultilineString::Multi(lines)
}
}
/// Output from executing a code cell.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(tag = "output_type", rename_all = "snake_case")]
pub enum Output {
/// Execution result output.
ExecuteResult(OutputExecuteResult),
/// Display data output.
DisplayData(OutputDisplayData),
/// Stream output.
Stream(OutputStream),
/// Error output.
Error(OutputError),
}
/// Result of executing a code cell.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct OutputExecuteResult {
/// Execution count of the result.
pub execution_count: Option<u32>,
/// Data returned by the execution.
pub data: MimeBundle,
/// Metadata associated with the result.
pub metadata: OutputMetadata,
/// Additional unrecognized attributes in execution results.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Display data output.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct OutputDisplayData {
/// Data to display.
pub data: MimeBundle,
/// Metadata associated with the display data.
pub metadata: OutputMetadata,
/// Additional unrecognized attributes in display data.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Stream output.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct OutputStream {
/// Name of the stream (e.g., stdout or stderr).
pub name: String,
/// Text content of the stream.
pub text: MultilineString,
/// Additional unrecognized attributes in stream output.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Error output.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct OutputError {
/// Name of the error.
pub ename: String,
/// Value or message of the error.
pub evalue: String,
/// Traceback of the error.
pub traceback: Vec<String>,
/// Additional unrecognized attributes in error output.
#[serde(flatten)]
#[ts(skip)]
pub other: Map<String, Value>,
}
/// Metadata associated with outputs.
pub type OutputMetadata = BTreeMap<String, Value>;
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn parse_notebook() {
let json = r#"
{
"metadata": {
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
},
"language_info": {
"name": "python",
"codemirror_mode": {
"name": "ipython",
"version": 3
},
"file_extension": ".py",
"mimetype": "text/x-python",
"pygments_lexer": "ipython3",
"version": "3.8.5",
"nbconvert_exporter": "python"
},
"orig_nbformat": 4,
"title": "Example Notebook",
"authors": [
{
"name": "Alice"
},
{
"name": "Bob"
}
],
"custom": "metadata"
},
"nbformat_minor": 4,
"nbformat": 4,
"cells": [
{
"cell_type": "code",
"id": "cell-1",
"metadata": {
"custom": "metadata"
},
"source": "print('Hello, world!')",
"execution_count": 1,
"outputs": [
{
"output_type": "execute_result",
"execution_count": 1,
"data": {
"text/plain": "Hello, world!"
},
"metadata": {
"custom": "metadata"
}
}
]
}
]
}
"#;
let notebook: NotebookRoot = serde_json::from_str(json).unwrap();
assert_eq!(
notebook.metadata.kernelspec.as_ref().unwrap().name,
"python3"
);
assert_eq!(
notebook.metadata.language_info.as_ref().unwrap().name,
"python"
);
assert_eq!(notebook.metadata.orig_nbformat, Some(4));
assert_eq!(
notebook.metadata.title.as_ref().unwrap(),
"Example Notebook"
);
assert_eq!(
notebook.metadata.authors.as_ref().unwrap()[0]
.name
.as_ref()
.unwrap(),
"Alice"
);
assert_eq!(
notebook.metadata.authors.as_ref().unwrap()[1]
.name
.as_ref()
.unwrap(),
"Bob"
);
assert_eq!(notebook.metadata.other.get("custom").unwrap(), "metadata");
assert_eq!(notebook.nbformat_minor, 4);
assert_eq!(notebook.nbformat, 4);
assert_eq!(notebook.cells.len(), 1);
}
#[test]
fn string_to_multiline() {
let empty = MultilineString::Single("".into()).normalize();
assert_eq!(empty, MultilineString::Multi(vec![]));
let single = MultilineString::Single("Hello, world!".into()).normalize();
assert_eq!(
single,
MultilineString::Multi(vec!["Hello, world!".to_string()])
);
let multi = MultilineString::Single("Hello,\nworld!".into()).normalize();
assert_eq!(
multi,
MultilineString::Multi(vec!["Hello,\n".to_string(), "world!".to_string()])
);
let multi = MultilineString::Single("Hello,\n\nworld!\n".into()).normalize();
assert_eq!(
multi,
MultilineString::Multi(vec![
"Hello,\n".to_string(),
"\n".to_string(),
"world!\n".to_string()
])
);
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/local.rs | src-tauri/src/backend/local.rs | //! Code that starts local kernels to be `jupyter-server` compatible.
//!
//! This is currently unused while Jute relies on `jupyter-server`, but in the
//! future it could replace the Jupyter installation by directly invoking
//! kernels, or introduce new APIs for developer experience.
use std::process::Stdio;
use serde::{Deserialize, Serialize};
use serde_json::json;
use tokio::fs;
use tokio::net::TcpListener;
use ts_rs::TS;
use uuid::Uuid;
use self::environment::KernelSpec;
use super::{create_zeromq_connection, KernelConnection};
use crate::Error;
pub mod environment;
/// Contains information about the CPU and memory usage of a kernel.
#[derive(Serialize, Deserialize, Debug, Clone, TS)]
pub struct KernelUsageInfo {
/// Number of CPUs used.
pub cpu_consumed: f32,
/// Number of CPUs available.
pub cpu_available: f32,
/// Memory consumed in KB.
pub memory_consumed: f32,
/// Memory available in KB.
pub memory_available: f32,
}
/// Represents a connection to an active kernel.
pub struct LocalKernel {
child: tokio::process::Child,
kernel_id: String,
spec: KernelSpec,
conn: KernelConnection,
}
impl LocalKernel {
/// Start a new kernel based on a spec, and connect to it.
pub async fn start(spec: &KernelSpec) -> Result<Self, Error> {
let (control_port, shell_port, iopub_port, stdin_port, heartbeat_port) = tokio::try_join!(
get_available_port(),
get_available_port(),
get_available_port(),
get_available_port(),
get_available_port(),
)?;
let signing_key = Uuid::new_v4().to_string();
let connection_file = json!({
"control_port": control_port,
"shell_port": shell_port,
"iopub_port": iopub_port,
"stdin_port": stdin_port,
"hb_port": heartbeat_port,
"transport": "tcp",
"ip": "127.0.0.1",
"signature_scheme": "hmac-sha256",
"key": signing_key,
});
let kernel_id = Uuid::new_v4().to_string();
let runtime_dir = environment::runtime_dir();
let connection_filename = runtime_dir + &format!("jute-{kernel_id}.json");
fs::write(&connection_filename, connection_file.to_string())
.await
.map_err(|err| {
Error::KernelConnect(format!("could not write connection file: {err}"))
})?;
if spec.argv.is_empty() {
return Err(Error::KernelConnect("kernel spec has no argv".into()));
}
let argv: Vec<String> = spec
.argv
.iter()
.map(|s| s.replace("{connection_file}", &connection_filename))
.collect();
// TODO: Handle spec.env
let child = tokio::process::Command::new(&argv[0])
.args(&argv[1..])
.kill_on_drop(true)
.stdout(Stdio::null())
.stderr(Stdio::null())
.spawn()
.map_err(Error::Subprocess)?;
let conn = create_zeromq_connection(
shell_port,
control_port,
iopub_port,
stdin_port,
heartbeat_port,
&signing_key,
)
.await?;
Ok(Self {
child,
kernel_id,
spec: spec.clone(),
conn,
})
}
/// Get the kernel ID.
pub fn id(&self) -> &str {
&self.kernel_id
}
/// Get the kernel connection object.
pub fn conn(&self) -> &KernelConnection {
&self.conn
}
/// Return the spec used to start the kernel.
pub fn spec(&self) -> &KernelSpec {
&self.spec
}
/// Check if the kernel is still alive.
pub fn is_alive(&mut self) -> bool {
matches!(self.child.try_wait(), Ok(None))
}
/// Kill the kernel by sending a SIGKILL signal.
pub async fn kill(&mut self) -> Result<(), Error> {
self.child.kill().await.map_err(Error::Subprocess)
}
/// Get the pid of the kernel process.
pub fn pid(&self) -> Option<u32> {
self.child.id()
}
}
async fn get_available_port() -> Result<u16, Error> {
let addr = TcpListener::bind("127.0.0.1:0")
.await
.map_err(|err| Error::KernelConnect(format!("could not get available port: {err}")))?
.local_addr()
.map_err(|_| Error::KernelConnect("tcp listener has no local address".into()))?;
Ok(addr.port())
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/wire_protocol.rs | src-tauri/src/backend/wire_protocol.rs | //! Jupyter kernel wire protocol implementations.
//!
//! See the [Messaging in Jupyter](https://jupyter-client.readthedocs.io/en/stable/messaging.html)
//! page for documentation about how this works. The wire protocol is used to
//! communicate with Jupyter kernels over ZeroMQ or WebSocket.
use std::collections::BTreeMap;
use std::sync::Arc;
use bytes::Bytes;
use dashmap::DashMap;
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use time::OffsetDateTime;
use tokio::sync::oneshot;
use tokio_util::sync::{CancellationToken, DropGuard};
use ts_rs::TS;
use uuid::Uuid;
pub use self::driver_websocket::create_websocket_connection;
pub use self::driver_zeromq::create_zeromq_connection;
use crate::Error;
mod driver_websocket;
mod driver_zeromq;
/// Type of a kernel wire protocol message, either request or reply.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(rename_all = "snake_case")]
#[non_exhaustive]
pub enum KernelMessageType {
/// Execute a block of code.
ExecuteRequest,
/// Return execution results.
ExecuteReply,
/// Request detailed information about a piece of code.
InspectRequest,
/// Return detailed information about the inspected code.
InspectReply,
/// Request code completions or suggestions.
CompleteRequest,
/// Return completions or suggestions for the code.
CompleteReply,
/// Request execution history (not often used).
HistoryRequest,
/// Return the requested execution history (not often used).
HistoryReply,
/// Request to check if code is complete.
IsCompleteRequest,
/// Reply indicating if code is complete.
IsCompleteReply,
/// Request information about existing comms.
CommInfoRequest,
/// Reply with information about existing comms.
CommInfoReply,
/// Request kernel information.
KernelInfoRequest,
/// Reply with kernel information.
KernelInfoReply,
/// Request kernel shutdown.
ShutdownRequest,
/// Reply to confirm kernel shutdown.
ShutdownReply,
/// Request to interrupt kernel execution.
InterruptRequest,
/// Reply to confirm kernel interruption.
InterruptReply,
/// Request to start or stop a debugger.
DebugRequest,
/// Reply with debugger status.
DebugReply,
/// Streams of output (stdout, stderr) from the kernel.
Stream,
/// Bring back data to be displayed in frontends.
DisplayData,
/// Update display data with new information.
UpdateDisplayData,
/// Re-broadcast of code in ExecuteRequest.
ExecuteInput,
/// Results of a code execution.
ExecuteResult,
/// When an error occurs during code execution.
Error,
/// Updates about kernel status.
Status,
/// Clear output visible on the frontend.
ClearOutput,
/// For debugging kernels to send events.
DebugEvent,
/// Open a comm to the frontend, used for interactive widgets.
CommOpen,
/// A one-way comm message, with no expected reply format.
CommMsg,
/// Close a comm to the frontend.
CommClose,
/// Another kernel message type that is unrecognized.
#[serde(untagged)]
Other(String),
}
/// Header of a message, generally part of the {header, parent_header, metadata,
/// content, buffers} 5-tuple.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct KernelHeader {
/// Typically UUID, must be unique per message.
pub msg_id: String,
/// Typically UUID, should be unique per session.
pub session: String,
/// The username of the user sending the message.
pub username: String,
/// ISO 8601 timestamp for when the message is created.
#[serde(with = "time::serde::iso8601")]
#[ts(type = "string")]
pub date: OffsetDateTime,
/// The message type.
pub msg_type: KernelMessageType,
/// Message protocol version.
pub version: String,
}
/// A message sent to or received from a Jupyter kernel.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct KernelMessage<T = serde_json::Value> {
/// The message header.
pub header: KernelHeader,
/// The parent message header, if any.
pub parent_header: Option<KernelHeader>,
/// The content of the message.
pub content: T,
/// Buffers for large data, if any (used by extensions).
pub buffers: Vec<Bytes>,
}
impl<T> KernelMessage<T> {
/// Create a basic kernel message with the given header and content.
pub fn new(msg_type: KernelMessageType, content: T) -> Self {
Self {
header: KernelHeader {
msg_id: Uuid::new_v4().to_string(),
session: "jute-session".to_string(),
username: "jute-user".to_string(),
date: OffsetDateTime::now_utc(),
msg_type,
version: "5.4".into(),
},
parent_header: None,
content,
buffers: Vec::new(),
}
}
}
impl<T: Serialize> KernelMessage<T> {
/// Produce a variant of the message as a serialized JSON type.
pub fn into_json(self) -> KernelMessage {
KernelMessage {
header: self.header,
parent_header: self.parent_header,
content: serde_json::to_value(&self.content).expect("KernelMessage JSON serialization"),
buffers: self.buffers,
}
}
}
impl KernelMessage {
/// Deserialize the content of the message into a specific type.
pub fn into_typed<T: DeserializeOwned>(self) -> Result<KernelMessage<T>, Error> {
Ok(KernelMessage {
header: self.header,
parent_header: self.parent_header,
content: serde_json::from_value(self.content)
.map_err(|err| Error::DeserializeMessage(err.to_string()))?,
buffers: self.buffers,
})
}
}
/// The content of a reply to a kernel message, with status attached.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
#[serde(tag = "status", rename_all = "snake_case")]
pub enum Reply<T> {
/// The request was processed successfully.
Ok(T),
/// The request failed due to an error.
Error(ErrorReply),
/// This is the same as `status="error"` but with no information about the
/// error. No fields should be present other than status.
///
/// Some messages like execute_reply return "aborted" instead, see
/// <https://github.com/ipython/ipykernel/issues/367> for details.
#[serde(alias = "aborted")]
Abort,
}
/// Content of an error response message.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ErrorReply {
/// The error name, such as 'NameError'.
pub ename: String,
/// The error message, such as 'NameError: name 'x' is not defined'.
pub evalue: String,
/// The traceback frames of the error as a list of strings.
pub traceback: Vec<String>,
}
/// Execute code on behalf of the user.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ExecuteRequest {
/// Source code to be executed by the kernel, one or more lines.
pub code: String,
/// A boolean flag which, if true, signals the kernel to execute the code as
/// quietly as possible.
pub silent: bool,
/// A boolean flag which, if true, signals the kernel to populate the
/// history.
pub store_history: bool,
/// A dictionary mapping names to expressions to be evaluated in the user's
/// dictionary. The rich display-data representation of each will be
/// evaluated after execution.
pub user_expressions: BTreeMap<String, String>,
/// If true, code running in the kernel can prompt the user for input with
/// an `input_request` message. If false, the kernel should not send
/// these messages.
pub allow_stdin: bool,
/// A boolean flag, which, if true, aborts the execution queue if an
/// exception is encountered. If false, queued `execute_requests` will
/// execute even if this request generates an exception.
pub stop_on_error: bool,
}
/// Represents a reply to an execute request.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ExecuteReply {
/// The execution count, which increments with each request that stores
/// history.
pub execution_count: i32,
/// Results for the user expressions evaluated during execution. Only
/// present when status is 'ok'.
pub user_expressions: Option<BTreeMap<String, String>>,
}
/// Request for introspection of code to retrieve useful information as
/// determined by the kernel.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct InspectRequest {
/// The code context in which introspection is requested, potentially
/// multiple lines.
pub code: String,
/// The cursor position within 'code' where introspection is requested, in
/// Unicode characters.
pub cursor_pos: u32,
/// The level of detail desired, where 0 might be basic info (`x?` in
/// IPython) and 1 includes more detail (`x??` in IPython).
pub detail_level: u8,
}
/// Represents a reply to an inspect request with potentially formatted
/// information about the code context.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct InspectReply {
/// Indicates whether an object was found during the inspection.
pub found: bool,
/// A dictionary containing the data representing the inspected object, can
/// be empty if nothing is found.
pub data: BTreeMap<String, serde_json::Value>,
/// Metadata associated with the data, can also be empty.
pub metadata: BTreeMap<String, serde_json::Value>,
}
/// Request for code completion based on the context provided in the code and
/// cursor position.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CompleteRequest {
/// The code context in which completion is requested, possibly a multiline
/// string.
pub code: String,
/// The cursor position within 'code' in Unicode characters where completion
/// is requested.
pub cursor_pos: u32,
}
/// Represents a reply to a completion request.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CompleteReply {
/// A list of all matches to the completion request.
pub matches: Vec<String>,
/// The starting position of the text that should be replaced by the
/// completion.
pub cursor_start: u32,
/// The ending position of the text that should be replaced by the
/// completion.
pub cursor_end: u32,
/// Metadata providing additional information about completions.
pub metadata: BTreeMap<String, serde_json::Value>,
}
/// Request for information about the kernel.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct KernelInfoRequest {}
/// Represents a reply to a kernel_info request, providing details about the
/// kernel.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct KernelInfoReply {
/// Version of the messaging protocol used by the kernel.
pub protocol_version: String,
/// The name of the kernel implementation (e.g., 'ipython').
pub implementation: String,
/// The version number of the kernel's implementation.
pub implementation_version: String,
/// Detailed information about the programming language used by the kernel.
pub language_info: LanguageInfo,
/// A banner of information about the kernel, dispalyed in console.
pub banner: String,
/// Indicates if the kernel supports debugging.
#[serde(default)]
pub debugger: bool,
}
/// Detailed information about the programming language of the kernel.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct LanguageInfo {
/// Name of the programming language.
pub name: String,
/// Version number of the language.
pub version: String,
/// MIME type for script files in this language.
pub mimetype: String,
/// File extension for script files in this language.
pub file_extension: String,
/// Nbconvert exporter, if notebooks should be exported differently than the
/// general script.
pub nbconvert_exporter: String,
}
/// Request to shut down the kernel, possibly to prepare for a restart.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ShutdownRequest {
/// Indicates whether the shutdown is final or precedes a restart.
pub restart: bool,
}
/// Represents a reply to a shutdown request.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ShutdownReply {
/// Matches the restart flag from the request to indicate the intended
/// shutdown behavior.
pub restart: bool,
}
/// Request to interrupt the kernel's current operation.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct InterruptRequest {}
/// Represents a reply to an interrupt request.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct InterruptReply {}
/// Streams of output from the kernel, such as stdout and stderr.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct Stream {
/// The name of the stream, one of 'stdout' or 'stderr'.
pub name: String,
/// The text to be displayed in the stream.
pub text: String,
}
/// Data to be displayed in frontends, such as images or HTML.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct DisplayData {
/// The data to be displayed, typically a MIME type and the data itself.
pub data: BTreeMap<String, serde_json::Value>,
/// Metadata associated with the data, can be empty.
pub metadata: BTreeMap<String, serde_json::Value>,
/// Any information not to be persisted to a notebook.
pub transient: Option<DisplayDataTransient>,
}
/// Transient data associated with display data, such as display IDs.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct DisplayDataTransient {
/// Specifies an ID for the display, which can be updated.
pub display_id: Option<String>,
}
/// Re-broadcast of code in an execute request to let all frontends know.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ExecuteInput {
/// The code that was executed.
pub code: String,
/// The execution count, which increments with each request that stores
/// history.
pub execution_count: i32,
}
/// Results of a code execution, such as the output or return value.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ExecuteResult {
/// The execution count, which increments with each request that stores
/// history.
pub execution_count: i32,
/// The data to be displayed, typically a MIME type and the data itself. A
/// plain text representation should always be provided in the `text/plain`
/// mime-type.
pub data: BTreeMap<String, serde_json::Value>,
/// Metadata associated with the data, can be empty.
pub metadata: BTreeMap<String, serde_json::Value>,
}
/// Used by frontends to monitor the status of the kernel.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct Status {
/// Current status of the kernel.
pub execution_state: KernelStatus,
}
/// Possible states of the kernel. When the kernel starts to handle a message,
/// it will enter the 'busy' state and when it finishes, it will enter the
/// 'idle' state. The kernel will publish state 'starting' exactly once at
/// process startup.
#[derive(Serialize, Deserialize, Copy, Clone, Debug, PartialEq, Eq, TS)]
#[serde(rename_all = "snake_case")]
pub enum KernelStatus {
/// The kernel is starting up.
Starting,
/// The kernel is ready to execute code.
Idle,
/// The kernel is currently executing code.
Busy,
}
/// Request to clear output visible on the frontend.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct ClearOutput {
/// Wait to clear the output until new output is available to replace it.
pub wait: bool,
}
/// Open a comm to the frontend, used for interactive widgets.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CommOpen {
/// The unique ID of the comm.
pub comm_id: String,
/// The target name of the comm. If this is is not understood by the
/// frontend, they must reply with a `comm_close` message.
pub target_name: String,
/// The data to be sent to the frontend.
pub data: serde_json::Value,
}
/// A one-way comm message, with no expected reply format. This struct is reused
/// for both `comm_msg` and `comm_close` message types.
#[derive(Serialize, Deserialize, Clone, Debug, PartialEq, Eq, TS)]
pub struct CommMessage {
/// The unique ID of the comm.
pub comm_id: String,
/// The data to be sent to the frontend.
pub data: serde_json::Value,
}
/// Represents a stateful kernel connection that can be used to communicate with
/// a running Jupyter kernel.
///
/// Connections can be obtained through either WebSocket or ZeroMQ network
/// protocols. They send messages to the kernel and receive responses through
/// one of the five dedicated channels:
///
/// - Shell: Main channel for code execution and info requests.
/// - IOPub: Broadcast channel for side effects (stdout, stderr) and requests
/// from any client over the shell channel.
/// - Stdin: Requests from the kernel to the client for standard input.
/// - Control: Just like Shell, but separated to avoid queueing.
/// - Heartbeat: Periodic ping/pong to ensure the connection is alive. This
/// appears to only be supported by ZeroMQ, so we don't implement it here.
///
/// The specific details of which messages are sent on which channels are left
/// to the user. Functions will block if disconnected or return an error after
/// the driver has been closed.
#[derive(Clone)]
pub struct KernelConnection {
shell_tx: async_channel::Sender<KernelMessage>,
control_tx: async_channel::Sender<KernelMessage>,
iopub_rx: async_channel::Receiver<KernelMessage>,
reply_tx_map: Arc<DashMap<String, oneshot::Sender<KernelMessage>>>,
signal: CancellationToken,
_drop_guard: Arc<DropGuard>,
}
impl KernelConnection {
/// Send a message to the kernel over the shell channel.
///
/// On success, return a receiver for the reply from the kernel on the same
/// channel, when it is finished.
pub async fn call_shell<T: Serialize>(
&self,
message: KernelMessage<T>,
) -> Result<PendingRequest, Error> {
let (reply_tx, reply_rx) = oneshot::channel();
let msg_id = message.header.msg_id.clone();
self.reply_tx_map.insert(msg_id.clone(), reply_tx);
self.shell_tx
.send(message.into_json())
.await
.map_err(|_| Error::KernelDisconnect)?;
Ok(PendingRequest {
reply_tx_map: self.reply_tx_map.clone(),
reply_rx,
msg_id,
})
}
/// Send a message to the kernel over the control channel.
pub async fn call_control<T: Serialize>(
&self,
message: KernelMessage<T>,
) -> Result<PendingRequest, Error> {
let (reply_tx, reply_rx) = oneshot::channel();
let msg_id = message.header.msg_id.clone();
self.reply_tx_map.insert(msg_id.clone(), reply_tx);
self.control_tx
.send(message.into_json())
.await
.map_err(|_| Error::KernelDisconnect)?;
Ok(PendingRequest {
reply_tx_map: self.reply_tx_map.clone(),
reply_rx,
msg_id,
})
}
/// Receieve a message from the kernel over the iopub channel.
pub async fn recv_iopub(&self) -> Result<KernelMessage, Error> {
self.iopub_rx
.recv()
.await
.map_err(|_| Error::KernelDisconnect)
}
/// Receive an immediate message over the iopub channel without waiting.
pub fn try_recv_iopub(&self) -> Option<KernelMessage> {
self.iopub_rx.try_recv().ok()
}
/// Close the connection to the kernel, shutting down all channels.
pub fn close(&self) {
self.shell_tx.close();
self.control_tx.close();
self.iopub_rx.close();
self.signal.cancel(); // This is the only necessary line, but we close
// the channels for good measure regardless.
}
}
/// Receives a reply from a previous kernel router-dealer request.
pub struct PendingRequest {
reply_tx_map: Arc<DashMap<String, oneshot::Sender<KernelMessage>>>,
reply_rx: oneshot::Receiver<KernelMessage>,
msg_id: String,
}
impl PendingRequest {
/// Wait for the reply to the previous request from the kernel.
pub async fn get_reply<U: DeserializeOwned>(
&mut self,
) -> Result<KernelMessage<Reply<U>>, Error> {
(&mut self.reply_rx)
.await
.map_err(|_| Error::KernelDisconnect)?
.into_typed()
}
}
impl Drop for PendingRequest {
fn drop(&mut self) {
// This ensures that we don't leak memory by leaving the channel in the map.
self.reply_tx_map.remove(&self.msg_id);
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/remote.rs | src-tauri/src/backend/remote.rs | //! Connections to remote Jupyter servers over HTTP and WebSocket.
use std::time::Duration;
use reqwest::{
header::{self, HeaderMap},
StatusCode,
};
use serde::Deserialize;
use serde_json::json;
use time::OffsetDateTime;
use url::Url;
use super::{create_websocket_connection, KernelConnection};
use crate::Error;
/// A running Jupyter kernel connected over the WebSocket wire protocol.
#[derive(Clone)]
pub struct RemoteKernel {
client: JupyterClient,
kernel_id: String,
conn: KernelConnection,
}
impl RemoteKernel {
/// Start a new kernel on the server.
pub async fn start(client: &JupyterClient, spec_name: &str) -> Result<Self, Error> {
let kernel_info = client.create_kernel(spec_name).await?;
let ws_url = client
.server_url
.join(&format!("/api/kernels/{}/channels", kernel_info.id))?;
let mut ws_url = ws_url.to_string();
if ws_url.starts_with("https://") {
ws_url = ws_url.replacen("https://", "wss://", 1);
} else {
ws_url = ws_url.replacen("http://", "ws://", 1);
}
let conn = create_websocket_connection(&ws_url, &client.token).await?;
Ok(Self {
client: client.clone(),
kernel_id: kernel_info.id,
conn,
})
}
/// Get the kernel ID.
pub fn id(&self) -> &str {
&self.kernel_id
}
/// Kill the kernel and delete its kernel ID.
pub async fn kill(self) -> Result<(), Error> {
self.client.kill_kernel(&self.kernel_id).await
}
/// Get a reference to the kernel connection object.
pub fn conn(&self) -> &KernelConnection {
&self.conn
}
}
/// HTTP client for a remote Jupyter server.
///
/// This client can make REST API requests and create new WebSocket connections.
/// It is generally stateless and cheaply cloneable though.
#[derive(Clone)]
pub struct JupyterClient {
server_url: Url,
token: String,
http_client: reqwest::Client,
}
impl JupyterClient {
/// Return a new client to a Jupyter server without connecting.
pub fn new(server_url: &str, token: &str) -> Result<Self, Error> {
let headers = HeaderMap::from_iter([(
header::AUTHORIZATION,
format!("token {token}")
.parse()
.expect("server token parse"),
)]);
let server_url = Url::parse(server_url)?;
let http_client = reqwest::ClientBuilder::new()
.connect_timeout(Duration::from_secs(1))
.default_headers(headers)
.build()?;
Ok(Self {
server_url,
token: token.into(),
http_client,
})
}
/// Get the API version of the Jupyter server.
pub async fn get_api_version(&self) -> Result<String, Error> {
let url = self.server_url.join("/api")?;
let resp = self.http_client.get(url).send().await?.error_for_status()?;
#[derive(Deserialize)]
struct ApiVersion {
version: String,
}
Ok(resp.json::<ApiVersion>().await?.version)
}
/// List the active kernels on the Jupyter server.
pub async fn list_kernels(&self) -> Result<Vec<KernelInfo>, Error> {
let url = self.server_url.join("/api/kernels")?;
let resp = self.http_client.get(url).send().await?.error_for_status()?;
Ok(resp.json().await?)
}
/// Get information about a specific kernel by its ID.
pub async fn get_kernel_by_id(&self, kernel_id: &str) -> Result<Option<KernelInfo>, Error> {
let url = self.server_url.join(&format!("/api/kernels/{kernel_id}"))?;
let resp = self.http_client.get(url).send().await?;
if resp.status() == StatusCode::NOT_FOUND {
return Ok(None);
}
Ok(resp.error_for_status()?.json().await?)
}
/// Create a new kernel from the spec with the give name.
pub async fn create_kernel(&self, spec_name: &str) -> Result<KernelInfo, Error> {
let url = self.server_url.join("/api/kernels")?;
let resp = self
.http_client
.post(url)
.json(&json!({ "name": spec_name }))
.send()
.await?
.error_for_status()?;
Ok(resp.json().await?)
}
/// Kill a kernel and delete its kernel ID.
pub async fn kill_kernel(&self, kernel_id: &str) -> Result<(), Error> {
let url = self.server_url.join(&format!("/api/kernels/{kernel_id}"))?;
self.http_client
.delete(url)
.send()
.await?
.error_for_status()?;
Ok(())
}
}
/// Information about a remote Jupyter kernel.
#[derive(Clone, Debug, Deserialize)]
pub struct KernelInfo {
/// The unique identifier of the kernel.
pub id: String,
/// Name of the type of kernel being run (e.g., `python3`).
pub name: String,
/// Last activity ISO timestamp, typically UTC.
#[serde(with = "time::serde::iso8601")]
pub last_activity: OffsetDateTime,
/// The execution state of the kernel: `starting`, `running`, etc.
pub execution_state: String,
/// The number of active connections to the kernel.
pub connections: u32,
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/commands.rs | src-tauri/src/backend/commands.rs | //! High-level APIs for doing operations over [`KernelConnection`] objects.
use serde::Serialize;
use ts_rs::TS;
use super::{
wire_protocol::{
ClearOutput, DisplayData, ErrorReply, ExecuteRequest, ExecuteResult, KernelInfoReply,
KernelInfoRequest, KernelMessage, KernelMessageType, KernelStatus, Reply, Status, Stream,
},
KernelConnection,
};
use crate::Error;
/// Get information through the KernelInfo command.
pub async fn kernel_info(conn: &KernelConnection) -> Result<KernelInfoReply, Error> {
let mut req = conn
.call_shell(KernelMessage::new(
KernelMessageType::KernelInfoRequest,
KernelInfoRequest {},
))
.await?;
let msg = req.get_reply::<KernelInfoReply>().await?;
match msg.content {
Reply::Ok(info) => Ok(info),
Reply::Error(_) | Reply::Abort => Err(Error::KernelDisconnect),
}
}
/// Events that can be received while running a cell.
#[derive(Debug, Clone, Serialize, TS)]
#[serde(rename_all = "snake_case", tag = "event", content = "data")]
pub enum RunCellEvent {
/// Standard output from the kernel.
Stdout(String),
/// Standard error from the kernel.
Stderr(String),
/// Result of cell execution (i.e., if the last line is an expression).
ExecuteResult(ExecuteResult),
/// Display data in a MIME type (e.g., a matplotlib chart).
DisplayData(DisplayData),
/// Update previously-displayed data with a display ID.
UpdateDisplayData(DisplayData),
/// Clear the output of a cell.
ClearOutput(ClearOutput),
/// Error if the cell raised an exception.
Error(ErrorReply),
/// Special message indicating the kernel disconnected.
Disconnect(String),
}
/// Run a code cell, returning the events received in the meantime.
pub async fn run_cell(
conn: &KernelConnection,
code: &str,
) -> Result<async_channel::Receiver<RunCellEvent>, Error> {
// Clear out existing iopub messages before running the cell, in case there are
// any lingering messages from previous runs.
while conn.try_recv_iopub().is_some() {}
conn.call_shell(KernelMessage::new(
KernelMessageType::ExecuteRequest,
ExecuteRequest {
code: code.into(),
silent: false,
store_history: true,
user_expressions: Default::default(),
allow_stdin: false,
stop_on_error: true,
},
))
.await?;
let (tx, rx) = async_channel::unbounded();
let conn = conn.clone();
let tx2 = tx.clone();
let stream_results_fut = async move {
let mut status = KernelStatus::Busy;
while status != KernelStatus::Idle {
let msg = conn.recv_iopub().await?;
match msg.header.msg_type {
KernelMessageType::Status => {
let msg = msg.into_typed::<Status>()?;
status = msg.content.execution_state;
}
KernelMessageType::Stream => {
let msg = msg.into_typed::<Stream>()?;
if msg.content.name == "stdout" {
_ = tx.send(RunCellEvent::Stdout(msg.content.text)).await;
} else {
_ = tx.send(RunCellEvent::Stderr(msg.content.text)).await;
}
}
// We ignore ExecuteInput messages since they just echo the input code.
KernelMessageType::ExecuteInput => {}
KernelMessageType::ExecuteResult => {
let msg = msg.into_typed::<ExecuteResult>()?;
_ = tx.send(RunCellEvent::ExecuteResult(msg.content)).await;
}
KernelMessageType::DisplayData => {
let msg = msg.into_typed::<DisplayData>()?;
_ = tx.send(RunCellEvent::DisplayData(msg.content)).await;
}
KernelMessageType::UpdateDisplayData => {
let msg = msg.into_typed::<DisplayData>()?;
_ = tx.send(RunCellEvent::UpdateDisplayData(msg.content)).await;
}
KernelMessageType::ClearOutput => {
let msg = msg.into_typed::<ClearOutput>()?;
_ = tx.send(RunCellEvent::ClearOutput(msg.content)).await;
}
KernelMessageType::Error => {
let msg = msg.into_typed::<ErrorReply>()?;
_ = tx.send(RunCellEvent::Error(msg.content)).await;
}
_ => {}
}
}
Ok::<_, Error>(())
};
tokio::spawn(async move {
// Translate any errors into a disconnect message.
if let Err(err) = stream_results_fut.await {
_ = tx2.send(RunCellEvent::Disconnect(err.to_string())).await;
}
});
Ok(rx)
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/wire_protocol/driver_zeromq.rs | src-tauri/src/backend/wire_protocol/driver_zeromq.rs | //! Adapter for the Jupyter wire protocol over ZeroMQ.
//!
//! This protocol is documented in the `jupyter-client` project at
//! <https://jupyter-client.readthedocs.io/en/stable/messaging.html>. It relies
//! on 5 dedicated sockets for different types of messages.
use std::sync::Arc;
use bytes::Bytes;
use dashmap::DashMap;
use tokio_util::sync::CancellationToken;
use tracing::{error, warn};
use zeromq::{Socket, SocketRecv, SocketSend, ZmqMessage};
use super::{KernelConnection, KernelHeader, KernelMessage};
use crate::Error;
/// Sign a message using HMAC-SHA256 with the kernel's signing key.
fn sign_message(signing_key: &str, bytes: &[Bytes]) -> String {
use hmac::{Hmac, Mac};
use sha2::Sha256;
let mut mac: Hmac<Sha256> = Hmac::new_from_slice(signing_key.as_bytes()).unwrap();
for b in bytes {
mac.update(b);
}
format!("{:x}", mac.finalize().into_bytes())
}
fn to_zmq_payload(msg: &KernelMessage, signing_key: &str) -> Option<ZmqMessage> {
let header = Bytes::from(serde_json::to_vec(&msg.header).ok()?);
let parent_header = Bytes::from(serde_json::to_vec(&msg.parent_header).ok()?);
let metadata = Bytes::from_static(b"{}");
let content = Bytes::from(serde_json::to_vec(&msg.content).ok()?);
let mut payload = vec![header, parent_header, metadata, content];
payload.extend(msg.buffers.iter().cloned());
let signature = sign_message(signing_key, &payload);
payload.insert(0, Bytes::from(signature));
payload.insert(0, Bytes::from_static(b"<IDS|MSG>"));
ZmqMessage::try_from(payload).ok()
}
fn from_zmq_payload(payload: ZmqMessage) -> Option<KernelMessage> {
let payload = payload.into_vec();
let delim_idx = payload.iter().position(|b| *b == b"<IDS|MSG>" as &[u8])?;
let header = serde_json::from_slice(&payload[delim_idx + 2]).ok()?;
let parent_header = serde_json::from_slice(&payload[delim_idx + 3]).ok()?;
// serde_json::from_slice(&payload[delim_idx + 4]).ok()?;
let content = serde_json::from_slice(&payload[delim_idx + 5]).ok()?;
let buffers = payload[delim_idx + 6..].to_vec();
Some(KernelMessage {
header,
parent_header,
content,
buffers,
})
}
/// Connect to Jupyter via ZeroMQ to a local kernel.
pub async fn create_zeromq_connection(
shell_port: u16,
control_port: u16,
iopub_port: u16,
stdin_port: u16,
heartbeat_port: u16,
signing_key: &str,
) -> Result<KernelConnection, Error> {
let (shell_tx, shell_rx) = async_channel::bounded(8);
let (control_tx, control_rx) = async_channel::bounded(8);
let (iopub_tx, iopub_rx) = async_channel::bounded(64);
let reply_tx_map = Arc::new(DashMap::new());
let signal = CancellationToken::new();
let conn = KernelConnection {
shell_tx,
control_tx,
iopub_rx,
reply_tx_map: reply_tx_map.clone(),
signal: signal.clone(),
_drop_guard: Arc::new(signal.clone().drop_guard()),
};
let mut shell = zeromq::DealerSocket::new();
shell
.connect(&format!("tcp://127.0.0.1:{shell_port}"))
.await?;
let mut control = zeromq::DealerSocket::new();
control
.connect(&format!("tcp://127.0.0.1:{control_port}"))
.await?;
let mut iopub = zeromq::SubSocket::new();
iopub
.connect(&format!("tcp://127.0.0.1:{iopub_port}"))
.await?;
iopub.subscribe("").await?;
let mut stdin = zeromq::DealerSocket::new();
stdin
.connect(&format!("tcp://127.0.0.1:{stdin_port}"))
.await?;
let mut heartbeat = zeromq::ReqSocket::new();
heartbeat
.connect(&format!("tcp://127.0.0.1:{heartbeat_port}"))
.await?;
let _ = (stdin, heartbeat); // Not supported yet.
let key = signing_key.to_string();
let tx_map = reply_tx_map.clone();
let shell_fut = async move {
// Send and receive shell messages.
loop {
tokio::select! {
Ok(msg) = shell_rx.recv() => {
let Some(payload) = to_zmq_payload(&msg, &key) else {
error!("error converting shell message to zmq payload");
continue;
};
if let Err(err) = shell.send(payload).await {
warn!("error sending zmq shell message: {err:?}");
}
}
Ok(payload) = shell.recv() => {
if let Some(msg) = from_zmq_payload(payload) {
if let Some(KernelHeader { msg_id, .. }) = &msg.parent_header {
if let Some((_, reply_tx)) = tx_map.remove(msg_id) {
_ = reply_tx.send(msg);
}
}
} else {
warn!("error converting zmq payload to shell reply");
}
}
else => break,
}
}
};
let key = signing_key.to_string();
let tx_map = reply_tx_map.clone();
let control_fut = async move {
// Send and receive control messages.
loop {
tokio::select! {
Ok(msg) = control_rx.recv() => {
let Some(payload) = to_zmq_payload(&msg, &key) else {
error!("error converting control message to zmq payload");
continue;
};
if let Err(err) = control.send(payload).await {
warn!("error sending zmq control message: {err:?}");
}
}
Ok(payload) = control.recv() => {
if let Some(msg) = from_zmq_payload(payload) {
if let Some(KernelHeader { msg_id, .. }) = &msg.parent_header {
if let Some((_, reply_tx)) = tx_map.remove(msg_id) {
_ = reply_tx.send(msg);
}
}
} else {
warn!("error converting zmq payload to control reply");
}
}
else => break,
}
}
};
let iopub_fut = async move {
// Receive iopub messages.
while let Ok(payload) = iopub.recv().await {
if let Some(msg) = from_zmq_payload(payload) {
_ = iopub_tx.send(msg).await;
} else {
warn!("error converting zmq payload to iopub message");
}
}
};
tokio::spawn(async move {
tokio::select! {
_ = async { tokio::join!(shell_fut, control_fut, iopub_fut) } => {}
_ = signal.cancelled() => {}
}
});
Ok(conn)
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/wire_protocol/driver_websocket.rs | src-tauri/src/backend/wire_protocol/driver_websocket.rs | //! Adapter for the Jupyter wire protocol over WebSocket.
//!
//! This protocol is documented in the `jupyter-server` project at
//! <https://jupyter-server.readthedocs.io/en/latest/developers/websocket-protocols.html>.
//!
//! It is very similar to the ZeroMQ protocol, but there is a thin framing layer
//! that allows messages to be sent over WebSocket binary payloads instead of
//! raw TCP sockets.
use std::sync::Arc;
use bytes::Bytes;
use dashmap::DashMap;
use futures_util::{SinkExt, StreamExt};
use reqwest::header::{HeaderValue, AUTHORIZATION, SEC_WEBSOCKET_PROTOCOL};
use tokio_tungstenite::tungstenite::{client::IntoClientRequest, Message};
use tokio_util::sync::CancellationToken;
use tracing::{error, warn};
use super::{KernelConnection, KernelHeader, KernelMessage};
use crate::Error;
// In this protocol, a kernel message is serialized over WebSocket as follows,
// where all integers are little-endian (indices refer to bytes):
//
// 0: offset_number (n+1)
// 8: offset_0
// 16: offset_1
// 8*offset_number: offset_n
// offset_0: channel
// offset_1: header
// offset_2: parent_header
// offset_3: metadata
// offset_4: content
// offset_5: buffer_0
// (offset_6: buffer_1 ... and so on)
fn to_ws_payload(msg: &KernelMessage, channel: &str) -> Option<Vec<u8>> {
let offset_number = 5 + msg.buffers.len() as u64;
let offset_0 = 8 * (offset_number + 1);
let mut offsets = vec![offset_number];
let mut payload = Vec::new();
// offset_0: channel
offsets.push(offset_0 + payload.len() as u64);
payload.extend_from_slice(channel.as_bytes());
// offset_1: header
offsets.push(offset_0 + payload.len() as u64);
payload.append(&mut serde_json::to_vec(&msg.header).ok()?);
// offset_2: parent_header
offsets.push(offset_0 + payload.len() as u64);
payload.append(&mut serde_json::to_vec(&msg.parent_header).ok()?);
// offset_3: metadata
offsets.push(offset_0 + payload.len() as u64);
payload.extend_from_slice(b"{}");
// offset_4: content
offsets.push(offset_0 + payload.len() as u64);
payload.append(&mut serde_json::to_vec(&msg.content).ok()?);
for buffer in &msg.buffers {
offsets.push(offset_0 + payload.len() as u64);
payload.extend_from_slice(buffer);
}
Some(
offsets
.into_iter()
.flat_map(|n| n.to_le_bytes())
.chain(payload)
.collect::<Vec<u8>>(),
)
}
fn from_ws_payload(payload: &[u8]) -> Option<(KernelMessage, String)> {
let offset_number: usize = u64::from_le_bytes(payload.get(0..8)?.try_into().ok()?)
.try_into()
.ok()?;
let mut offsets = Vec::with_capacity(offset_number);
for i in 0..offset_number {
let index = 8 * (i + 1);
offsets.push(
u64::from_le_bytes(payload.get(index..index + 8)?.try_into().ok()?)
.try_into()
.ok()?,
);
}
offsets.push(payload.len());
let channel = String::from_utf8(payload.get(offsets[0]..offsets[1])?.to_vec()).ok()?;
let header = serde_json::from_slice(payload.get(offsets[1]..offsets[2])?).ok()?;
let parent_header = serde_json::from_slice(payload.get(offsets[2]..offsets[3])?).ok()?;
// serde_json::from_slice(payload.get(offsets[3]..offsets[4])?).ok()?;
let content = serde_json::from_slice(payload.get(offsets[4]..offsets[5])?).ok()?;
let mut buffers = Vec::new();
for i in 5..offset_number {
buffers.push(Bytes::from(
payload.get(offsets[i]..offsets[i + 1])?.to_vec(),
));
}
let msg = KernelMessage {
header,
parent_header,
content,
buffers,
};
Some((msg, channel))
}
/// Connect to Jupyter via the `v1.kernel.websocket.jupyter.org` protocol.
pub async fn create_websocket_connection(
websocket_url: &str,
token: &str,
) -> Result<KernelConnection, Error> {
let (shell_tx, shell_rx) = async_channel::bounded(8);
let (control_tx, control_rx) = async_channel::bounded(8);
let (iopub_tx, iopub_rx) = async_channel::bounded(64);
let reply_tx_map = Arc::new(DashMap::new());
let signal = CancellationToken::new();
let conn = KernelConnection {
shell_tx,
control_tx,
iopub_rx,
reply_tx_map: reply_tx_map.clone(),
signal: signal.clone(),
_drop_guard: Arc::new(signal.clone().drop_guard()),
};
let mut req = websocket_url
.into_client_request()
.map_err(|err| Error::KernelConnect(err.to_string()))?;
req.headers_mut().insert(
SEC_WEBSOCKET_PROTOCOL,
HeaderValue::from_static("v1.kernel.websocket.jupyter.org"),
);
req.headers_mut().insert(
AUTHORIZATION,
format!("token {token}")
.parse::<HeaderValue>()
.map_err(|err| Error::KernelConnect(err.to_string()))?,
);
let (ws, _resp) = tokio_tungstenite::connect_async(req)
.await
.map_err(|err| Error::KernelConnect(err.to_string()))?;
let (mut ws_tx, mut ws_rx) = ws.split();
let send_fut = async move {
// Send shell and control messages over the WebSocket.
loop {
let (msg, channel) = tokio::select! {
Ok(msg) = shell_rx.recv() => (msg, "shell"),
Ok(msg) = control_rx.recv() => (msg, "control"),
else => break,
};
let Some(payload) = to_ws_payload(&msg, channel) else {
error!("error converting message to ws payload");
continue;
};
if ws_tx.send(Message::Binary(payload)).await.is_err() {
// The WebSocket has been closed.
// TODO: Handle reconnection.
error!("WebSocket closed, reconnection not yet implemented");
break;
}
}
};
let receive_fut = async move {
// Receieve shell, control, and iopub messages from the WebSocket.
while let Some(Ok(ws_payload)) = ws_rx.next().await {
let payload = match ws_payload {
Message::Binary(payload) => payload,
_ => continue,
};
let (msg, channel) = match from_ws_payload(&payload) {
Some(msg) => msg,
None => continue,
};
match &*channel {
"shell" | "control" => {
if let Some(KernelHeader { msg_id, .. }) = &msg.parent_header {
if let Some((_, tx)) = reply_tx_map.remove(msg_id) {
// Optional, it's not an error if this receiver has been dropped.
_ = tx.send(msg);
}
}
}
"iopub" => {
_ = iopub_tx.send(msg).await;
}
_ => {
warn!("received WebSocket message on unexpected channel: {channel}");
}
}
}
};
// Run both futures until cancellation or completion.
tokio::spawn(async move {
tokio::select! {
_ = async { tokio::join!(send_fut, receive_fut) } => {}
_ = signal.cancelled() => {}
}
});
Ok(conn)
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/backend/local/environment.rs | src-tauri/src/backend/local/environment.rs | //! Metadata about the kernel environment and file system configuration.
use std::{
collections::BTreeMap,
env,
path::{Path, PathBuf},
};
use futures_util::future::join_all;
use serde::Deserialize;
use tokio::fs;
/// The path separator for the current platform.
pub const SEP: &str = if cfg!(windows) { "\\" } else { "/" };
/// Information parsed from the `kernel.json` file.
///
/// See <https://jupyter-client.readthedocs.io/en/latest/kernels.html#kernel-specs>
/// for more information about the kernel spec format.
#[derive(Deserialize, Debug, Clone)]
pub struct KernelSpec {
/// List of command-line arguments to start the kernel.
pub argv: Vec<String>,
/// The display name of the kernel.
pub display_name: String,
/// The language of the kernel.
pub language: String,
/// The interrupt mode of the kernel ("signal" by default).
#[serde(default)]
pub interrupt_mode: KernelInterruptMode,
/// A dictionary of environment variables to set for the kernel.
#[serde(default)]
pub env: BTreeMap<String, String>,
}
/// The interrupt mode of the kernel.
#[derive(Default, Copy, Clone, Debug, Deserialize)]
#[serde(rename_all = "snake_case")]
pub enum KernelInterruptMode {
/// Interrupts are communicated by sending a signal.
#[default]
Signal,
/// Interrupts are communicated by messages on the control socket.
Message,
}
/// Lists the ordered search path to find installable data files.
///
/// This is specified in
/// <https://docs.jupyter.org/en/latest/use/jupyter-directories.html#data-files>.
async fn data_search_paths(interpreter_prefix: Option<&str>) -> Vec<String> {
let mut dirs = Vec::new();
if let Ok(jupyter_path) = env::var("JUPYTER_PATH") {
let pathsep = if cfg!(windows) { ";" } else { ":" };
dirs.extend(jupyter_path.split(pathsep).map(String::from));
}
if let Ok(jupyter_data_dir) = env::var("JUPYTER_DATA_DIR") {
dirs.push(jupyter_data_dir);
} else {
#[cfg(windows)]
dirs.push(env::var("AppData").unwrap() + "\\jupyter");
#[cfg(target_os = "macos")]
dirs.push(env::var("HOME").unwrap() + "/Library/Jupyter");
#[cfg(target_os = "linux")]
match env::var("XDG_DATA_HOME") {
Ok(xdg_data_home) => dirs.push(xdg_data_home + "/jupyter"),
Err(_) => dirs.push(env::var("HOME").unwrap() + "/.local/share/jupyter"),
}
}
if let Some(prefix) = interpreter_prefix {
dirs.push(prefix.to_string() + "/share/jupyter");
}
#[cfg(windows)]
dirs.push(env::var("ProgramData").unwrap() + "\\jupyter");
#[cfg(unix)]
dirs.extend([
String::from("/usr/share/jupyter"),
String::from("/usr/local/share/jupyter"),
]);
dirs
}
/// List all available kernels from the environment, checking the search path.
pub async fn list_kernels(interpreter_prefix: Option<&str>) -> Vec<(PathBuf, KernelSpec)> {
let dirs = data_search_paths(interpreter_prefix).await;
join_all(dirs.iter().map(|path| list_kernels_from_path(path)))
.await
.into_iter()
.flatten()
.collect()
}
/// List all the available kernels from a given path.
async fn list_kernels_from_path(path: &str) -> Vec<(PathBuf, KernelSpec)> {
let mut kernels = Vec::new();
let Ok(mut items) = fs::read_dir(Path::new(path).join("kernels")).await else {
return kernels;
};
while let Ok(Some(entry)) = items.next_entry().await {
let kernel_path = entry.path();
if let Ok(kernel_json) = fs::read(&kernel_path.join("kernel.json")).await {
if let Ok(kernel) = serde_json::from_slice(&kernel_json) {
kernels.push((kernel_path, kernel));
}
}
}
kernels
}
/// Get the configured directory for data files.
pub fn data_dir() -> String {
if let Ok(jupyter_data_dir) = env::var("JUPYTER_DATA_DIR") {
return jupyter_data_dir.trim_end_matches(SEP).into();
}
cfg_if::cfg_if! {
if #[cfg(windows)] {
env::var("AppData").unwrap() + "\\jupyter"
} else if #[cfg(target_os = "macos")] {
env::var("HOME").unwrap() + "/Library/Jupyter"
} else if #[cfg(target_os = "linux")] {
match env::var("XDG_DATA_HOME") {
Ok(xdg_data_home) => xdg_data_home + "/jupyter",
Err(_) => env::var("HOME").unwrap() + "/.local/share/jupyter",
}
} else {
panic!("Unsupported platform, cannot determine data directory")
}
}
}
/// Get the configured directory where runtime connection files are stored.
pub fn runtime_dir() -> String {
match env::var("JUPYTER_RUNTIME_DIR") {
Ok(jupyter_runtime_dir) => jupyter_runtime_dir.trim_end_matches(SEP).into(),
Err(_) => data_dir() + SEP + "runtime",
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/src/bin/ts-rs-export.rs | src-tauri/src/bin/ts-rs-export.rs | use std::{
fs,
path::Path,
process::{exit, Command},
};
use jute::backend::{commands::RunCellEvent, local::KernelUsageInfo, notebook::NotebookRoot};
use ts_rs::TS;
fn main() {
let export_path = Path::new("../src/bindings");
// print the full path of the export directory
println!(
"Exporting TypeScript bindings to `{:?}`",
fs::canonicalize(export_path).expect("Failed to get full path of export directory")
);
// Clear the `src/bindings` directory
if export_path.exists() {
println!("Clearing old bindings...");
fs::remove_dir_all(export_path).expect("Failed to clear bindings directory");
}
fs::create_dir_all(export_path).expect("Failed to recreate bindings directory");
// Generate TypeScript bindings
println!("Exporting TypeScript bindings...");
NotebookRoot::export_all_to(export_path).unwrap();
RunCellEvent::export_all_to(export_path).unwrap();
KernelUsageInfo::export_all_to(export_path).unwrap();
// Generate `index.ts` file
println!("Generating index.ts...");
let mut index_file = String::new();
for entry in fs::read_dir(export_path).expect("Failed to read bindings directory") {
let entry = entry.expect("Failed to read directory entry");
let path = entry.path();
if let Some(extension) = path.extension() {
if extension == "ts" {
let file_name = path.file_stem().unwrap().to_string_lossy();
index_file.push_str(&format!("export * from './{}';\n", file_name));
}
}
}
fs::write(export_path.join("index.ts"), index_file).expect("Failed to write index.ts");
// Format the bindings with Prettier
println!("Formatting with Prettier...");
let status = Command::new("npx")
.arg("prettier")
.arg("--write")
.arg(format!("{}/**/*.ts", export_path.display()))
.status()
.expect("Failed to run Prettier");
if !status.success() {
eprintln!("Prettier formatting failed");
exit(1);
}
println!("Done!");
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/examples/run_kernel.rs | src-tauri/examples/run_kernel.rs | //! Shell example of running a kernel from Rust code.
use std::io::Write;
use jute::backend::{
commands::{self, RunCellEvent},
local::{environment, LocalKernel},
};
#[tokio::main]
async fn main() {
tracing_subscriber::fmt::init();
println!("Available kernels:");
let kernels = environment::list_kernels(None).await;
for (path, kernel_spec) in &kernels {
println!(" {:20} {}", kernel_spec.display_name, path.display());
}
let mut kernel_spec = loop {
print!("\nPick a kernel: ");
std::io::stdout().flush().unwrap();
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
let input = input.trim();
match kernels.iter().find(|(_, spec)| spec.display_name == input) {
Some((_, kernel_spec)) => break kernel_spec.clone(),
None => println!("Invalid kernel name, try again"),
}
};
if kernel_spec.argv[0] == "python" {
// Temporary hack
kernel_spec.argv[0] = "python3.11".into();
}
let mut kernel = LocalKernel::start(&kernel_spec).await.unwrap();
println!("\nStarted kernel.");
let info = commands::kernel_info(kernel.conn()).await.unwrap();
println!("{}", info.banner);
while kernel.is_alive() {
print!("> ");
std::io::stdout().flush().unwrap();
let mut input = String::new();
std::io::stdin().read_line(&mut input).unwrap();
let rx = commands::run_cell(kernel.conn(), &input).await.unwrap();
while let Ok(event) = rx.recv().await {
match event {
RunCellEvent::Stdout(text) => print!("{}", text),
RunCellEvent::Stderr(text) => eprint!("{}", text),
RunCellEvent::ExecuteResult(msg) => {
println!("-> {}", msg.data["text/plain"].as_str().unwrap())
}
RunCellEvent::DisplayData(msg) => {
println!("DisplayData: {:?}", msg.data.keys());
}
RunCellEvent::UpdateDisplayData(msg) => {
println!("UpdateDisplayData: {:?}", msg.data.keys());
}
RunCellEvent::ClearOutput(_) => {}
RunCellEvent::Error(msg) => {
for line in &msg.traceback {
eprintln!("{line}");
}
}
RunCellEvent::Disconnect(msg) => {
eprintln!("Kernel disconnected abnormally: {}", msg);
break;
}
}
}
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
ekzhang/jute | https://github.com/ekzhang/jute/blob/18723a036b843d9efc1d07b326bda5614b2020e7/src-tauri/examples/list_kernels.rs | src-tauri/examples/list_kernels.rs | //! An example that lists all available kernels.
use jute::backend::local::environment;
#[tokio::main]
async fn main() {
println!("Available kernels:");
for (path, kernel_spec) in environment::list_kernels(None).await {
println!(" {:20} {}", kernel_spec.display_name, path.display());
}
}
| rust | MIT | 18723a036b843d9efc1d07b326bda5614b2020e7 | 2026-01-04T20:21:18.693962Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/build.rs | build.rs | use std::error::Error;
use vergen_gitcl::{Emitter, GitclBuilder};
fn main() -> Result<(), Box<dyn Error>> {
// Try to get the git sha from the local git repository
match GitclBuilder::all_git() {
Ok(gitcl) => match Emitter::default().fail_on_error().add_instructions(&gitcl) {
Ok(emitter) => {
if emitter.emit().is_err() {
fallback_git_sha();
}
}
Err(_) => {
fallback_git_sha();
}
},
Err(_) => {
fallback_git_sha();
}
};
Ok(())
}
fn fallback_git_sha() {
// Unable to get the git sha
if let Ok(sha) = std::env::var("GIT_SHA") {
// Set it from an env var
println!("cargo:rustc-env=VERGEN_GIT_SHA={sha}");
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/flux.rs | src/flux.rs | use crate::results::BenchmarkResults;
use std::sync::{Arc, Mutex};
// Flux pattern
#[derive(Clone)]
pub struct Dispatcher {
store: Arc<Mutex<Store>>,
}
impl Dispatcher {
pub(crate) fn new(store: Arc<Mutex<Store>>) -> Self {
Self { store }
}
pub(crate) fn dispatch(&mut self, action: Action) {
self.store.lock().unwrap().update(action);
}
}
#[derive(Clone)]
pub struct AppState {
pub(crate) messages: Vec<crate::app::LogMessageUI>,
pub(crate) benchmarks: Vec<crate::app::BenchmarkUI>,
pub(crate) results: Vec<BenchmarkResults>,
}
impl AppState {
fn new() -> Self {
Self {
messages: Vec::new(),
benchmarks: Vec::new(),
results: Vec::new(),
}
}
}
pub struct Store {
state: AppState,
}
impl Store {
pub(crate) fn new() -> Self {
let state = AppState::new();
Self { state }
}
fn update(&mut self, action: Action) {
match action {
Action::LogMessage(message) => self.state.messages.push(message),
Action::AddBenchmark(benchmark) => {
// add or update benchmark
let index = self
.state
.benchmarks
.iter()
.position(|b| b.id == benchmark.id);
match index {
Some(i) => {
self.state.benchmarks[i] = benchmark;
}
None => {
self.state.benchmarks.push(benchmark);
}
}
}
Action::AddBenchmarkResults(results) => {
let index = self
.state
.results
.iter_mut()
.position(|b| b.id == results.id);
match index {
Some(i) => {
self.state.results[i] = results;
}
None => {
self.state.results.push(results);
}
}
}
}
}
pub(crate) fn get_state(&self) -> AppState {
self.state.clone()
}
}
pub enum Action {
LogMessage(crate::app::LogMessageUI),
AddBenchmark(crate::app::BenchmarkUI),
AddBenchmarkResults(BenchmarkResults),
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/app.rs | src/app.rs | use crate::benchmark::Event as BenchmarkEvent;
use crate::event::{terminal_event_task, AppEvent};
use crate::flux::{Action, AppState, Dispatcher, Store};
use crate::scheduler::ExecutorType;
use crate::BenchmarkConfig;
use crossterm::event::{KeyCode, KeyEvent, KeyModifiers};
use ratatui::layout::{Constraint, Direction, Layout};
use ratatui::text::Span;
use ratatui::widgets::ListDirection::BottomToTop;
use ratatui::widgets::{Cell, Dataset, List, ListItem, Row, Table};
use ratatui::{
buffer::Buffer,
layout::{Alignment, Rect},
style::Stylize as OtherStylize,
symbols,
symbols::border,
text::{Line, Text},
widgets::{Block, Paragraph, Widget},
DefaultTerminal, Frame,
};
use std::collections::HashMap;
use std::io;
use std::sync::{Arc, Mutex};
use tokio::sync::broadcast::Sender;
use tokio::sync::mpsc::{Receiver, UnboundedReceiver};
use tokio::sync::{broadcast, mpsc};
pub struct App {
exit: bool,
store: Arc<Mutex<Store>>,
dispatcher: Arc<Mutex<Dispatcher>>,
receiver: Receiver<AppEvent>,
benchmark_config: BenchmarkConfig,
stop_sender: broadcast::Sender<()>,
}
pub async fn run_console(
benchmark_config: BenchmarkConfig,
mut receiver: UnboundedReceiver<BenchmarkEvent>,
stop_sender: broadcast::Sender<()>,
) {
let (app_tx, app_rx) = mpsc::channel(8);
// Create event task
let stop_receiver_signal = stop_sender.subscribe();
tokio::spawn(terminal_event_task(250, app_tx, stop_receiver_signal));
let mut app = App::new(benchmark_config, app_rx, stop_sender.clone());
app.dispatcher
.lock()
.expect("lock")
.dispatch(Action::LogMessage(LogMessageUI {
message: "Starting benchmark".to_string(),
level: LogLevel::Info,
timestamp: chrono::Utc::now(),
}));
let dispatcher = app.dispatcher.clone();
let mut stop_receiver_signal = stop_sender.subscribe();
let event_thread = tokio::spawn(async move {
tokio::select! {
_=async{
while let Some(event) = receiver.recv().await {
match event {
BenchmarkEvent::BenchmarkStart(event) => {
dispatcher.lock().expect("lock").dispatch(Action::AddBenchmark(BenchmarkUI {
id: event.id,
status: BenchmarkStatus::Running,
progress: 0.0,
throughput: "0".to_string(),
successful_requests: 0,
failed_requests: 0,
}));
}
BenchmarkEvent::BenchmarkProgress(event) => {
let (successful_requests,failed_requests) = (event.successful_requests,event.failed_requests);
dispatcher.lock().expect("lock").dispatch(Action::AddBenchmark(BenchmarkUI {
id: event.id,
status: BenchmarkStatus::Running,
progress: event.progress,
throughput: event.request_throughput.map_or("0".to_string(), |e| format!("{e:.2}")),
successful_requests,
failed_requests,
}));
}
BenchmarkEvent::BenchmarkEnd(event) => {
dispatcher.lock().expect("lock").dispatch(Action::LogMessage(LogMessageUI {
message: format!("Benchmark {} ended", event.id),
level: LogLevel::Info,
timestamp: chrono::Utc::now(),
}));
if let Some(results) = event.results {
let (successful_requests,failed_requests) = (results.successful_requests() as u64,results.failed_requests() as u64);
dispatcher.lock().expect("lock").dispatch(Action::AddBenchmark(BenchmarkUI {
id: event.id,
status: BenchmarkStatus::Completed,
progress: 100.0,
throughput: event.request_throughput.map_or("0".to_string(), |e| format!("{e:.2}")),
successful_requests,
failed_requests,
}));
dispatcher.lock().expect("lock").dispatch(Action::AddBenchmarkResults(results));
}
}
BenchmarkEvent::Message(event) => {
dispatcher.lock().expect("lock").dispatch(Action::LogMessage(LogMessageUI {
message: event.message,
level: LogLevel::Info,
timestamp: event.timestamp,
}));
}
BenchmarkEvent::BenchmarkReportEnd(path) => {
dispatcher.lock().expect("lock").dispatch(Action::LogMessage(LogMessageUI {
message: format!("Benchmark report saved to {}", path),
level: LogLevel::Info,
timestamp: chrono::Utc::now(),
}));
break;
}
BenchmarkEvent::BenchmarkError(event) => {
dispatcher.lock().expect("lock").dispatch(Action::LogMessage(LogMessageUI {
message: format!("Error running benchmark: {:?}", event),
level: LogLevel::Error,
timestamp: chrono::Utc::now(),
}));
break;
}
}
}
}=>{}
_ = stop_receiver_signal.recv() => {}
}
});
let mut stop_receiver_signal = stop_sender.subscribe();
let app_thread = tokio::spawn(async move {
tokio::select! {
_ = async {
let _ = app.run(&mut ratatui::init()).await;
ratatui::restore();
}=>{}
_ = stop_receiver_signal.recv() => {}
}
});
let _ = event_thread.await;
let _ = app_thread.await;
}
impl App {
pub fn new(
benchmark_config: BenchmarkConfig,
receiver: Receiver<AppEvent>,
stop_sender: Sender<()>,
) -> App {
let store = Arc::from(Mutex::new(Store::new()));
let dispatcher = Arc::from(Mutex::new(Dispatcher::new(store.clone())));
App {
exit: false,
store: store.clone(),
dispatcher: dispatcher.clone(),
receiver,
benchmark_config,
stop_sender,
}
}
pub async fn run(&mut self, terminal: &mut DefaultTerminal) -> io::Result<()> {
while !self.exit {
terminal.draw(|frame| self.draw(frame))?;
self.handle_events().await?;
}
// signal everybody to stop
let _ = self.stop_sender.send(());
Ok(())
}
fn draw(&self, frame: &mut Frame) {
frame.render_widget(self, frame.area())
}
async fn handle_events(&mut self) -> io::Result<()> {
match self.receiver.recv().await {
None => Err(io::Error::new(io::ErrorKind::Other, "No event")),
Some(event) => match event {
AppEvent::Tick => Ok(()),
AppEvent::Key(key_event) => self.handle_key_event(key_event),
AppEvent::Resize => Ok(()),
},
}
}
fn handle_key_event(&mut self, key_event: KeyEvent) -> io::Result<()> {
match key_event {
KeyEvent {
code: KeyCode::Char('q'),
..
} => self.exit(),
KeyEvent {
code: KeyCode::Char('c'),
modifiers: KeyModifiers::CONTROL,
..
} => self.exit(),
_ => {}
}
Ok(())
}
fn exit(&mut self) {
self.exit = true;
}
fn create_datasets(&self, state: AppState) -> HashMap<String, Vec<(f64, f64)>> {
let token_throughput_rate = state
.results
.iter()
.filter_map(|r| match r.executor_type() {
ExecutorType::ConstantArrivalRate => {
let throughput = r.token_throughput_secs().unwrap_or(0.0);
Some((r.executor_config().rate.unwrap(), throughput))
}
ExecutorType::ConstantVUs => None,
})
.collect::<Vec<_>>();
let token_throughput_vus = state
.results
.iter()
.filter_map(|r| match r.executor_type() {
ExecutorType::ConstantVUs => {
let throughput = r.token_throughput_secs().unwrap_or(0.0);
Some((r.executor_config().max_vus as f64, throughput))
}
ExecutorType::ConstantArrivalRate => None,
})
.collect::<Vec<_>>();
let inter_token_latency_rate = state
.results
.iter()
.filter_map(|r| match r.executor_type() {
ExecutorType::ConstantArrivalRate => {
let latency = r
.inter_token_latency_avg()
.unwrap_or_default()
.as_secs_f64();
Some((r.executor_config().rate.unwrap(), latency))
}
ExecutorType::ConstantVUs => None,
})
.collect::<Vec<_>>();
let inter_token_latency_vus = state
.results
.iter()
.filter_map(|r| match r.executor_type() {
ExecutorType::ConstantVUs => {
let latency = r
.inter_token_latency_avg()
.unwrap_or_default()
.as_secs_f64();
Some((r.executor_config().max_vus as f64, latency))
}
ExecutorType::ConstantArrivalRate => None,
})
.collect::<Vec<_>>();
HashMap::from([
("token_throughput_rate".to_string(), token_throughput_rate),
("token_throughput_vus".to_string(), token_throughput_vus),
(
"inter_token_latency_rate".to_string(),
inter_token_latency_rate,
),
(
"inter_token_latency_vus".to_string(),
inter_token_latency_vus,
),
])
}
}
impl Widget for &App {
fn render(self, area: Rect, buf: &mut Buffer) {
let state = self.store.lock().unwrap().get_state();
let data = self.create_datasets(state.clone());
let main_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Length(1), Constraint::Min(20)])
.split(area);
let bottom_layout = Layout::default()
.direction(Direction::Vertical)
.constraints([Constraint::Percentage(50), Constraint::Percentage(50)])
.split(main_layout[1]);
let steps_graph_layout = Layout::default()
.direction(Direction::Horizontal)
.constraints([Constraint::Percentage(35), Constraint::Percentage(65)])
.split(bottom_layout[0]);
// LOGS
let logs_title = Line::from("Logs".bold()).centered();
let logs_block = Block::bordered()
.title_top(logs_title)
.border_set(border::THICK);
List::new(
state
.messages
.iter()
.rev()
.map(|m| {
let level_span = match m.level {
LogLevel::Info => {
Span::raw(m.level.to_string().to_uppercase()).green().bold()
}
LogLevel::Warning => Span::raw(m.level.to_string().to_uppercase())
.yellow()
.bold(),
LogLevel::Error => {
Span::raw(m.level.to_string().to_uppercase()).red().bold()
}
};
let content = Line::from(vec![
m.formatted_timestamp().clone().gray(),
Span::raw(" "),
level_span,
Span::raw(" "),
Span::raw(m.message.to_string()).bold(),
]);
ListItem::new(content)
})
.collect::<Vec<_>>(),
)
.direction(BottomToTop)
.block(logs_block)
.render(bottom_layout[1], buf);
// BENCHMARK config
let rate_mode = match self.benchmark_config.rates {
None => "Automatic".to_string(),
Some(_) => "Manual".to_string(),
};
let config_text = Text::from(vec![Line::from(vec![
format!("Profile: {profile} | Benchmark: {kind} | Max VUs: {max_vus} | Duration: {duration} sec | Rates: {rates} | Warmup: {warmup} sec",
profile = self.benchmark_config.profile.clone().unwrap_or("N/A".to_string()),
kind = self.benchmark_config.benchmark_kind,
max_vus = self.benchmark_config.max_vus,
duration = self.benchmark_config.duration.as_secs_f64(),
rates = rate_mode,
warmup = self.benchmark_config.warmup_duration.as_secs_f64()).white().bold(),
])]);
Paragraph::new(config_text.clone()).render(main_layout[0], buf);
// STEPS
let steps_block_title = Line::from("Benchmark steps".bold()).centered();
let steps_block = Block::bordered()
.title(steps_block_title.alignment(Alignment::Center))
.border_set(border::THICK);
let step_rows = state
.benchmarks
.iter()
.map(|b| {
let error_rate = if b.failed_requests > 0 {
format!(
"{:4.0}%",
b.failed_requests as f64
/ (b.failed_requests + b.successful_requests) as f64
* 100.
)
.light_red()
.bold()
} else {
format!("{:4.0}%", 0).to_string().white()
};
let cells = vec![
b.id.clone().white(),
b.status.to_string().white(),
format!("{:4.0}%", b.progress).white(),
error_rate,
format!("{:>6.6} req/sec avg", b.throughput).green().bold(),
];
Row::new(cells)
})
.collect::<Vec<_>>();
let widths = [
Constraint::Length(30),
Constraint::Length(10),
Constraint::Length(5),
Constraint::Length(5),
Constraint::Length(20),
];
// steps table
Table::new(step_rows, widths)
.header(Row::new(vec![
Cell::new(Line::from("Bench").alignment(Alignment::Left)),
Cell::new(Line::from("Status").alignment(Alignment::Left)),
Cell::new(Line::from("%").alignment(Alignment::Left)),
Cell::new(Line::from("Err").alignment(Alignment::Left)),
Cell::new(Line::from("Throughput").alignment(Alignment::Left)),
]))
.block(steps_block)
.render(steps_graph_layout[0], buf);
// CHARTS
let graphs_block_title = Line::from("Token throughput rate".bold()).centered();
let graphs_block = Block::bordered()
.title(graphs_block_title.alignment(Alignment::Center))
.border_set(border::THICK);
let binding = data.get("token_throughput_rate").unwrap().clone();
let datasets = vec![Dataset::default()
.name("Token throughput rate".to_string())
.marker(symbols::Marker::Dot)
.graph_type(ratatui::widgets::GraphType::Scatter)
.style(ratatui::style::Style::default().fg(ratatui::style::Color::LightMagenta))
.data(&binding)];
let (xmax, ymax) = get_max_bounds(&binding, (10.0, 100.0));
let x_axis = ratatui::widgets::Axis::default()
.title("Arrival rate (req/s)".to_string())
.style(ratatui::style::Style::default().white())
.bounds([0.0, xmax])
.labels(get_axis_labels(0.0, xmax, 5));
let y_axis = ratatui::widgets::Axis::default()
.title("Throughput (tokens/s)".to_string())
.style(ratatui::style::Style::default().white())
.bounds([0.0, ymax])
.labels(get_axis_labels(0.0, ymax, 5));
ratatui::widgets::Chart::new(datasets)
.x_axis(x_axis)
.y_axis(y_axis)
.block(graphs_block)
.legend_position(None)
.render(steps_graph_layout[1], buf);
}
}
fn get_max_bounds(data: &[(f64, f64)], default_max: (f64, f64)) -> (f64, f64) {
let xmax = data
.iter()
.map(|(x, _)| x)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(&default_max.0);
let ymax = data
.iter()
.map(|(_, y)| y)
.max_by(|a, b| a.partial_cmp(b).unwrap())
.unwrap_or(&default_max.1);
(*xmax, *ymax)
}
fn get_axis_labels(min: f64, max: f64, num_labels: u32) -> Vec<String> {
let step = (max - min) / num_labels as f64;
(0..num_labels)
.map(|i| format!("{:.2}", min + i as f64 * step))
.collect()
}
#[allow(dead_code)]
#[derive(Clone, strum_macros::Display)]
enum LogLevel {
Info,
Warning,
Error,
}
#[derive(Clone)]
pub(crate) struct LogMessageUI {
message: String,
level: LogLevel,
timestamp: chrono::DateTime<chrono::Utc>,
}
impl LogMessageUI {
fn formatted_timestamp(&self) -> String {
self.timestamp.to_rfc3339()
}
}
#[derive(Clone)]
pub(crate) struct BenchmarkUI {
pub(crate) id: String,
status: BenchmarkStatus,
progress: f64,
throughput: String,
successful_requests: u64,
failed_requests: u64,
}
#[derive(Clone, strum_macros::Display)]
enum BenchmarkStatus {
Running,
Completed,
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/profiles.rs | src/profiles.rs | use crate::{RunConfiguration, TokenizeOptions};
use std::string::ToString;
pub fn apply_profile(
profile: &str,
run_configuration: RunConfiguration,
) -> anyhow::Result<RunConfiguration> {
match profile {
"fixed-length" => Ok(RunConfiguration {
max_vus: 128,
duration: std::time::Duration::from_secs(120),
rates: None,
num_rates: 10,
benchmark_kind: "sweep".to_string(),
warmup_duration: std::time::Duration::from_secs(30),
prompt_options: Some(TokenizeOptions {
num_tokens: Some(200),
min_tokens: 200,
max_tokens: 200,
variance: 0,
}),
decode_options: Some(TokenizeOptions {
num_tokens: Some(800),
min_tokens: 50,
max_tokens: 800,
variance: 100,
}),
dataset: "hlarcher/inference-benchmarker".to_string(),
dataset_file: "share_gpt_0_turns.json".to_string(),
..run_configuration
}),
"chat" => {
// Simulates a multi-turn chat scenario
// in which the model answers to successive user prompts.
// The model is prompted with the whole conversation history
// at each turn. Prefix caching will have a significant impact
// on the performance of this benchmark.
Ok(RunConfiguration {
max_vus: 128,
duration: std::time::Duration::from_secs(120),
rates: None,
num_rates: 10,
benchmark_kind: "sweep".to_string(),
warmup_duration: std::time::Duration::from_secs(30),
prompt_options: None, // use prompts from dataset
decode_options: Some(TokenizeOptions {
num_tokens: Some(800), // decode up to 800 tokens
min_tokens: 50,
max_tokens: 800,
variance: 100,
}),
dataset: "hlarcher/inference-benchmarker".to_string(),
dataset_file: "share_gpt_turns.json".to_string(),
..run_configuration
})
}
"code-generation" => {
// Simulates code-complete scenarios. Model is given large code snippets and
// asked to complete them with a few tokens (e.g. a function name, a few code lines).
Ok(RunConfiguration {
max_vus: 128,
duration: std::time::Duration::from_secs(120),
rates: None,
num_rates: 10,
benchmark_kind: "sweep".to_string(),
warmup_duration: std::time::Duration::from_secs(30),
prompt_options: Some(TokenizeOptions {
num_tokens: Some(4096),
min_tokens: 3000,
max_tokens: 6000,
variance: 1000,
}),
decode_options: Some(TokenizeOptions {
num_tokens: Some(50),
min_tokens: 30,
max_tokens: 80,
variance: 10,
}),
dataset: "hlarcher/inference-benchmarker".to_string(),
dataset_file: "github_code.json".to_string(),
..run_configuration
})
}
"classification" => {
// Simulates cases where the model is fed with large chunks
// of business data or document repeatedly and users
// ask simple questions about the content (summarization, classification...)
// Those use cases benefit a lot from prefix caching and chunked prefill.
Ok(RunConfiguration {
max_vus: 128,
duration: std::time::Duration::from_secs(120),
rates: None,
num_rates: 10,
benchmark_kind: "sweep".to_string(),
warmup_duration: std::time::Duration::from_secs(30),
prompt_options: Some(TokenizeOptions {
num_tokens: Some(10000),
min_tokens: 8000,
max_tokens: 12000,
variance: 5000,
}),
decode_options: Some(TokenizeOptions {
num_tokens: Some(50),
min_tokens: 30,
max_tokens: 80,
variance: 10,
}),
dataset: "hlarcher/inference-benchmarker".to_string(),
dataset_file: "classification.json".to_string(),
..run_configuration
})
}
_ => Err(anyhow::anyhow!("Unknown profile: {}", profile)),
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/event.rs | src/event.rs | use crossterm::event;
use crossterm::event::KeyEvent;
use std::time::{Duration, Instant};
use tokio::sync::{broadcast, mpsc};
pub enum AppEvent {
Tick,
Key(KeyEvent),
Resize,
}
pub async fn terminal_event_task(
fps: u32,
event_sender: mpsc::Sender<AppEvent>,
mut shutdown_receiver: broadcast::Receiver<()>,
) {
// End task if a message is received on shutdown_receiver
// _shutdown_guard_sender will be dropped once the task is finished
tokio::select! {
_ = event_loop(fps, event_sender) => {
},
_ = shutdown_receiver.recv() => {}
}
}
async fn event_loop(fps: u32, event_sender: mpsc::Sender<AppEvent>) {
// Frame budget
let per_frame = Duration::from_secs(1) / fps;
// When was last frame executed
let mut last_frame = Instant::now();
loop {
// Sleep to avoid blocking the thread for too long
if let Some(sleep) = per_frame.checked_sub(last_frame.elapsed()) {
tokio::time::sleep(sleep).await;
}
// Get crossterm event and send a new one over the channel
if event::poll(Duration::from_secs(0)).expect("no events available") {
match event::read().expect("unable to read event") {
event::Event::Key(e) => event_sender.send(AppEvent::Key(e)).await.unwrap_or(()),
event::Event::Resize(_w, _h) => {
event_sender.send(AppEvent::Resize).await.unwrap_or(())
}
_ => (),
}
}
// Frame budget exceeded
if last_frame.elapsed() >= per_frame {
// Send tick
event_sender.send(AppEvent::Tick).await.unwrap_or(());
// Rest last_frame time
last_frame = Instant::now();
}
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/lib.rs | src/lib.rs | use std::collections::HashMap;
use std::fs::File;
use std::io;
use std::io::Write;
use std::path::Path;
use std::sync::Arc;
pub use crate::app::run_console;
pub use crate::benchmark::{BenchmarkConfig, BenchmarkKind};
use crate::benchmark::{Event, MessageEvent};
pub use crate::profiles::apply_profile;
use crate::requests::OpenAITextGenerationBackend;
pub use crate::requests::TokenizeOptions;
use chrono::Local;
use crossterm::ExecutableCommand;
use log::{debug, error, info, warn, Level, LevelFilter};
use reqwest::Url;
use tokenizers::{FromPretrainedParameters, Tokenizer};
use tokio::sync::broadcast::Sender;
use tokio::sync::Mutex;
use writers::BenchmarkReportWriter;
mod app;
mod benchmark;
mod event;
mod executors;
mod flux;
mod profiles;
mod requests;
mod results;
mod scheduler;
mod table;
mod writers;
pub struct RunConfiguration {
pub url: Url,
pub api_key: String,
pub tokenizer_name: String,
pub profile: Option<String>,
pub max_vus: u64,
pub duration: std::time::Duration,
pub rates: Option<Vec<f64>>,
pub num_rates: u64,
pub benchmark_kind: String,
pub warmup_duration: std::time::Duration,
pub interactive: bool,
pub prompt_options: Option<TokenizeOptions>,
pub decode_options: Option<TokenizeOptions>,
pub dataset: String,
pub dataset_file: String,
pub hf_token: Option<String>,
pub extra_metadata: Option<HashMap<String, String>>,
pub model_name: String,
pub run_id: String,
}
pub async fn run(mut run_config: RunConfiguration, stop_sender: Sender<()>) -> anyhow::Result<()> {
info!("Starting benchmark");
// set process system limits
sysinfo::set_open_files_limit(0);
// apply profile if needed
run_config = match run_config.profile.clone() {
None => run_config,
Some(profile) => match apply_profile(profile.as_str(), run_config) {
Ok(config) => {
info!("Profile applied: {}", profile);
config
}
Err(e) => {
error!("Failed to apply profile: {:?}", e);
return Err(e);
}
},
};
// initialize tokenizer
let params = FromPretrainedParameters {
token: run_config.hf_token.clone(),
..Default::default()
};
let tokenizer =
match Tokenizer::from_pretrained(run_config.tokenizer_name.clone(), Some(params)) {
Ok(tokenizer) => tokenizer,
Err(e) => {
return Err(anyhow::anyhow!("Error loading tokenizer: {e}"));
}
};
let tokenizer = Arc::new(tokenizer);
let backend = OpenAITextGenerationBackend::try_new(
run_config.api_key,
run_config.url,
run_config.model_name.clone(),
tokenizer,
run_config.duration,
)?;
let config = BenchmarkConfig {
max_vus: run_config.max_vus,
duration: run_config.duration,
benchmark_kind: match run_config.benchmark_kind.to_lowercase().as_str() {
"throughput" => BenchmarkKind::Throughput,
"sweep" => BenchmarkKind::Sweep,
"rate" => BenchmarkKind::Rate,
_ => BenchmarkKind::Sweep,
},
warmup_duration: run_config.warmup_duration,
rates: run_config.rates,
num_rates: run_config.num_rates,
prompt_options: run_config.prompt_options.clone(),
decode_options: run_config.decode_options.clone(),
tokenizer: run_config.tokenizer_name.clone(),
model_name: run_config.model_name.clone(),
profile: run_config.profile.clone(),
extra_metadata: run_config.extra_metadata.clone(),
run_id: run_config.run_id.clone(),
};
config.validate()?;
let (tx, mut rx) = tokio::sync::mpsc::unbounded_channel();
if run_config.interactive {
// send logs to file
let target = Box::new(File::create("log.txt").expect("Can't create file"));
env_logger::Builder::new()
.target(env_logger::Target::Pipe(target))
.filter(Some("inference_benchmarker"), LevelFilter::Debug)
.format(|buf, record| {
writeln!(
buf,
"[{} {} {}:{}] {}",
Local::now().format("%Y-%m-%d %H:%M:%S%.3f"),
record.level(),
record.file().unwrap_or("unknown"),
record.line().unwrap_or(0),
record.args()
)
})
.init();
} else {
env_logger::init();
}
let config_clone = config.clone();
let mut stop_receiver = stop_sender.subscribe();
let stop_sender_clone = stop_sender.clone();
let ui_thread = tokio::spawn(async move {
tokio::select! {
_ = stop_receiver.recv() => {
debug!("Received stop signal, stopping benchmark");
}
_ = async{
if run_config.interactive {
run_console(config_clone, rx, stop_sender_clone).await;
} else {
// consume the channel to avoid closed channel error
while rx.recv().await.is_some() {}
}
} => {}
}
});
// download prompts dataset
info!("Downloading dataset");
let _ = tx.send(Event::Message(MessageEvent {
message: "Downloading dataset".to_string(),
timestamp: chrono::Utc::now(),
level: Level::Info,
}));
let filepath = requests::ConversationTextRequestGenerator::download_dataset(
run_config.dataset,
run_config.dataset_file,
run_config.hf_token.clone(),
)
.expect("Can't download dataset");
let requests = requests::ConversationTextRequestGenerator::load(
filepath,
run_config.tokenizer_name.clone(),
run_config.prompt_options,
run_config.decode_options,
run_config.hf_token,
)?;
let mut benchmark = benchmark::Benchmark::new(
config.clone(),
Box::new(backend),
Arc::from(Mutex::from(requests)),
tx.clone(),
stop_sender.clone(),
);
let mut stop_receiver = stop_sender.subscribe();
tokio::select! {
report = benchmark.run() => {
match report {
Ok(_) => {
let report = benchmark.get_report();
let path = format!("results/{}_{}.json",run_config.tokenizer_name.replace("/","_").replace(".","_"), chrono::Utc::now().format("%Y-%m-%d-%H-%M-%S"));
let path=Path::new(&path);
let writer=BenchmarkReportWriter::try_new(config.clone(), report)?;
writer.json(path).await?;
info!("Report saved to {:?}",path);
let _ = tx.send(Event::BenchmarkReportEnd(format!("{:?}", path)));
},
Err(e) => {
error!("Error running benchmark: {:?}", e.to_string());
let _ = tx.send(Event::BenchmarkError(e.to_string()));
}
};
}
_ = stop_receiver.recv() => {
debug!("Received stop signal, stopping benchmark");
}
}
info!("Benchmark finished");
if !run_config.interactive {
// quit app if not interactive
let _ = stop_sender.send(());
}
ui_thread.await?;
// Revert terminal to original view
io::stdout().execute(ratatui::crossterm::terminal::LeaveAlternateScreen)?;
ratatui::crossterm::terminal::disable_raw_mode()?;
io::stdout().execute(ratatui::crossterm::cursor::Show)?;
let report = benchmark.get_report();
match BenchmarkReportWriter::try_new(config.clone(), report) {
Ok(writer) => {
writer.stdout().await?;
}
Err(_) => {
warn!("No results to report.");
}
};
Ok(())
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/benchmark.rs | src/benchmark.rs | use crate::requests::{TextGenerationBackend, TextRequestGenerator, TokenizeOptions};
use crate::results::{BenchmarkReport, BenchmarkResults};
use crate::scheduler::{ExecutorType, SchedulerProgress};
use crate::{executors, scheduler};
use log::{debug, info};
use serde::Serialize;
use std::collections::HashMap;
use std::sync::Arc;
use std::time::Duration;
use tokio::sync::mpsc::{Receiver, Sender};
use tokio::sync::{broadcast, mpsc, Mutex};
const THROUGHPUT_BUDGET: f64 = 1.2; // sweep up to 120% of max throughput
#[derive(Clone, Debug, strum_macros::Display, Serialize)]
pub enum BenchmarkKind {
Throughput,
Sweep,
Rate,
}
pub struct MessageEvent {
pub message: String,
pub timestamp: chrono::DateTime<chrono::Utc>,
pub level: log::Level,
}
pub struct BenchmarkEvent {
pub id: String,
pub scheduler_type: ExecutorType,
pub request_throughput: Option<f64>,
pub progress: f64,
pub results: Option<BenchmarkResults>,
pub successful_requests: u64,
pub failed_requests: u64,
}
pub enum Event {
BenchmarkStart(BenchmarkEvent),
BenchmarkProgress(BenchmarkEvent),
BenchmarkEnd(BenchmarkEvent),
Message(MessageEvent),
BenchmarkReportEnd(String),
BenchmarkError(String),
}
pub struct Benchmark {
start_time: Option<tokio::time::Instant>,
end_time: Option<tokio::time::Instant>,
backend: Box<dyn TextGenerationBackend + Send + Sync>,
requests: Arc<Mutex<dyn TextRequestGenerator + Send>>,
report: BenchmarkReport,
pub(crate) config: BenchmarkConfig,
event_bus: mpsc::UnboundedSender<Event>,
stop_sender: broadcast::Sender<()>,
}
#[serde_with::serde_as]
#[derive(Clone, Serialize)]
pub struct BenchmarkConfig {
pub max_vus: u64,
#[serde(rename = "duration_secs")]
#[serde_as(as = "serde_with::DurationSeconds<u64>")]
pub duration: Duration,
pub benchmark_kind: BenchmarkKind,
#[serde(rename = "warmup_duration_secs")]
#[serde_as(as = "serde_with::DurationSeconds<u64>")]
pub warmup_duration: Duration,
pub rates: Option<Vec<f64>>,
pub num_rates: u64,
pub prompt_options: Option<TokenizeOptions>,
pub decode_options: Option<TokenizeOptions>,
pub tokenizer: String,
pub model_name: String,
pub profile: Option<String>,
#[serde(rename = "meta")]
pub extra_metadata: Option<HashMap<String, String>>,
pub run_id: String,
}
impl BenchmarkConfig {
pub fn validate(&self) -> anyhow::Result<()> {
if self.max_vus == 0 {
return Err(anyhow::anyhow!("max_vus must be greater than 0"));
}
if self.duration.as_secs() == 0 {
return Err(anyhow::anyhow!("duration must be greater than 0"));
}
if self.warmup_duration.as_secs() == 0 {
return Err(anyhow::anyhow!("warmup_duration must be greater than 0"));
}
match self.benchmark_kind {
BenchmarkKind::Throughput => {
if self.rates.is_some() {
return Err(anyhow::anyhow!(
"rates must not be specified for throughput benchmark"
));
}
}
BenchmarkKind::Sweep => {
if self.rates.is_some() {
return Err(anyhow::anyhow!(
"rates must not be specified for sweep benchmark"
));
}
}
BenchmarkKind::Rate => {
if self.rates.is_none() {
return Err(anyhow::anyhow!(
"rates must be specified for rate benchmark"
));
}
}
}
Ok(())
}
}
pub struct BenchmarkProgress {
id: String,
progress: SchedulerProgress,
}
impl Benchmark {
pub fn new(
config: BenchmarkConfig,
backend: Box<dyn TextGenerationBackend + Send + Sync>,
requests: Arc<Mutex<dyn TextRequestGenerator + Send>>,
event_bus: mpsc::UnboundedSender<Event>,
stop_sender: broadcast::Sender<()>,
) -> Benchmark {
Benchmark {
start_time: None,
end_time: None,
report: BenchmarkReport::new(),
config: config.clone(),
backend,
requests,
event_bus,
stop_sender,
}
}
pub fn get_report(&self) -> BenchmarkReport {
self.report.clone()
}
pub async fn run(&mut self) -> anyhow::Result<BenchmarkReport> {
self.start_time = Some(tokio::time::Instant::now());
self.report.start();
info!("Prewarming backend");
self.warmup().await?;
info!("Prewarm complete");
match self.config.benchmark_kind {
BenchmarkKind::Throughput => {
self.run_throughput().await?;
}
BenchmarkKind::Sweep => {
self.run_sweep().await?;
}
BenchmarkKind::Rate => {
self.run_rates().await?;
}
}
self.end_time = Some(tokio::time::Instant::now());
self.event_bus.send(Event::Message(MessageEvent {
message: format!(
"Benchmark complete in {:?}",
self.duration().expect("duration exists")
),
timestamp: chrono::Utc::now(),
level: log::Level::Info,
}))?;
self.report.end();
Ok(self.report.clone())
}
pub fn duration(&self) -> Option<std::time::Duration> {
match (self.start_time, self.end_time) {
(Some(start), Some(end)) => Some(end.duration_since(start)),
_ => None,
}
}
async fn handle_progress(&self, id: String) -> Sender<Option<SchedulerProgress>> {
let (tx, mut rx): (
Sender<Option<SchedulerProgress>>,
Receiver<Option<SchedulerProgress>>,
) = mpsc::channel(8);
let event_bus = self.event_bus.clone();
tokio::spawn(async move {
while let Some(event) = rx.recv().await {
match event {
None => {
break;
}
Some(progress) => {
let progress_evt = BenchmarkProgress {
id: id.clone(),
progress,
};
let _ = event_bus.send(Event::BenchmarkProgress(BenchmarkEvent {
id: progress_evt.id,
scheduler_type: ExecutorType::ConstantVUs,
request_throughput: Some(progress_evt.progress.requests_throughput),
progress: progress_evt.progress.progress,
successful_requests: progress_evt.progress.successful_requests,
failed_requests: progress_evt.progress.failed_requests,
results: None,
}));
}
}
}
});
tx
}
pub async fn warmup(&mut self) -> anyhow::Result<()> {
// run a warmup benchmark to prewarm the server
let id = "warmup".to_string();
// notify start event
self.event_bus.send(Event::BenchmarkStart(BenchmarkEvent {
id: id.to_string(),
scheduler_type: ExecutorType::ConstantVUs,
request_throughput: None,
progress: 0.0,
results: None,
successful_requests: 0,
failed_requests: 0,
}))?;
// create progress handler
let tx = self.handle_progress(id.clone()).await;
// start scheduler
let mut scheduler = scheduler::Scheduler::new(
id,
self.backend.clone(),
ExecutorType::ConstantVUs,
executors::ExecutorConfig {
max_vus: 1,
duration: self.config.warmup_duration,
rate: None,
},
self.requests.clone(),
tx.clone(),
self.stop_sender.clone(),
);
scheduler.run().await?;
let results = scheduler.get_results().lock().await.clone();
self.report.add_benchmark_result(results.clone());
// send None to close the progress handler
tx.send(None).await.unwrap();
// notify end event
self.event_bus.send(Event::BenchmarkEnd(BenchmarkEvent {
id: "warmup".to_string(),
scheduler_type: ExecutorType::ConstantVUs,
request_throughput: results.successful_request_rate().ok(),
progress: 100.0,
results: Some(results.clone()),
successful_requests: results.successful_requests() as u64,
failed_requests: results.failed_requests() as u64,
}))?;
Ok(())
}
pub async fn run_throughput(&mut self) -> anyhow::Result<()> {
info!("Running throughput benchmark");
let id = "throughput".to_string();
// notify start event
self.event_bus.send(Event::BenchmarkStart(BenchmarkEvent {
id: id.clone(),
scheduler_type: ExecutorType::ConstantVUs,
request_throughput: None,
progress: 0.0,
results: None,
successful_requests: 0,
failed_requests: 0,
}))?;
// create progress handler
let tx = self.handle_progress(id.clone()).await;
// start scheduler
let mut scheduler = scheduler::Scheduler::new(
id.clone(),
self.backend.clone(),
ExecutorType::ConstantVUs,
executors::ExecutorConfig {
max_vus: self.config.max_vus,
duration: self.config.duration,
rate: None,
},
self.requests.clone(),
tx.clone(),
self.stop_sender.clone(),
);
scheduler.run().await?;
let results = scheduler.get_results().lock().await.clone();
let rate = results.successful_request_rate().ok();
self.report.add_benchmark_result(results.clone());
// send None to close the progress handler
tx.send(None).await.unwrap();
// notify end event
self.event_bus.send(Event::BenchmarkEnd(BenchmarkEvent {
id: id.clone(),
scheduler_type: ExecutorType::ConstantVUs,
request_throughput: rate,
progress: 100.0,
results: Some(results.clone()),
successful_requests: results.successful_requests() as u64,
failed_requests: results.failed_requests() as u64,
}))?;
Ok(())
}
pub async fn run_sweep(&mut self) -> anyhow::Result<()> {
// run a throughput benchmark to retrieve the maximum throughput of server
self.run_throughput().await?;
// get the max throughput from the second benchmark result (first is warmup)
let throughput_results = &self.report.get_results()[1];
let max_throughput = throughput_results.successful_request_rate()?;
let max_tokens_throughput = throughput_results.token_throughput_secs()?;
// notify event bus
self.event_bus.send(Event::Message(MessageEvent {
message: format!(
"Max throughput detected at: {:.2} req/s | {:.2} tokens/s",
max_throughput, max_tokens_throughput
),
timestamp: chrono::Utc::now(),
level: log::Level::Info,
}))?;
// run a sweep benchmark for 10 different rates from 1req/s to max throughput
let mut rates = Vec::new();
let num_rates = self.config.num_rates;
for i in 1..=num_rates {
rates.push(i as f64 * max_throughput * THROUGHPUT_BUDGET / num_rates as f64);
}
for rate in rates {
self.run_rate(rate).await?;
}
Ok(())
}
pub async fn run_rates(&mut self) -> anyhow::Result<()> {
let rates = self.config.rates.clone().expect("config already validated");
for rate in rates {
self.run_rate(rate).await?;
}
Ok(())
}
pub async fn run_rate(&mut self, rate: f64) -> anyhow::Result<()> {
debug!("Running benchmark with rate: {} req/s", rate);
let id = format!("constant@{:.2}req/s", rate);
// notify start event
self.event_bus.send(Event::BenchmarkStart(BenchmarkEvent {
id: id.clone(),
scheduler_type: ExecutorType::ConstantArrivalRate,
request_throughput: None,
progress: 0.0,
results: None,
successful_requests: 0,
failed_requests: 0,
}))?;
// create progress handler
let tx = self.handle_progress(id.clone()).await;
// start scheduler
let mut scheduler = scheduler::Scheduler::new(
id,
self.backend.clone(),
scheduler::ExecutorType::ConstantArrivalRate,
executors::ExecutorConfig {
max_vus: self.config.max_vus,
duration: self.config.duration,
rate: Some(rate),
},
self.requests.clone(),
tx.clone(),
self.stop_sender.clone(),
);
scheduler.run().await?;
let results = scheduler.get_results().lock().await.clone();
self.report.add_benchmark_result(results.clone());
// send None to close the progress handler
tx.send(None).await.unwrap();
// notify end event
self.event_bus.send(Event::BenchmarkEnd(BenchmarkEvent {
id: format!("constant@{:.2}req/s", rate),
scheduler_type: ExecutorType::ConstantArrivalRate,
request_throughput: results.successful_request_rate().ok(),
progress: 100.0,
results: Some(results.clone()),
successful_requests: results.successful_requests() as u64,
failed_requests: results.failed_requests() as u64,
}))?;
Ok(())
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::requests::DummyTextGenerationBackend;
use crate::requests::DummyTextRequestGenerator;
use std::time::Duration;
#[tokio::test]
async fn test_sweep_benchmark_timings() {
let generation_time = Duration::from_secs(2);
let (event_tx, mut _event_rx) = tokio::sync::mpsc::unbounded_channel();
let (stop_sender, _) = tokio::sync::broadcast::channel(1);
let backend = Box::new(DummyTextGenerationBackend::new(Duration::from_secs(
generation_time.as_secs(),
)));
let requests_generator = Arc::from(Mutex::from(DummyTextRequestGenerator::new()));
let mut benchmark = Benchmark::new(
BenchmarkConfig {
max_vus: 100,
duration: Duration::from_secs(10),
benchmark_kind: BenchmarkKind::Sweep,
warmup_duration: Duration::from_secs(1),
rates: None,
num_rates: 2,
prompt_options: None,
decode_options: None,
tokenizer: "gpt2".to_string(),
model_name: "gpt2".to_string(),
profile: None,
extra_metadata: None,
run_id: "test".to_string(),
},
backend,
requests_generator,
event_tx,
stop_sender,
);
let report = benchmark.run().await.unwrap();
assert_eq!(report.get_results().len(), 4);
let generation_time_per_token_milli = generation_time.as_millis() as i128 / 10;
for result in report.get_results() {
let delta_ttft = result.time_to_first_token_avg().unwrap().as_millis() as i128
- generation_time_per_token_milli; // Dummy backends generates 10 tokens
let delta_itl = result.inter_token_latency_avg().unwrap().as_millis() as i128
- generation_time_per_token_milli;
let delta_e2e = result.e2e_latency_avg().unwrap().as_millis() as i128
- generation_time.as_millis() as i128;
let allowed_error_ms = 3; // allow error margin for timing tests
assert!(
delta_ttft.abs() <= allowed_error_ms,
"time_to_first_token_delta: {:?}, expected {:?}",
delta_ttft.abs(),
allowed_error_ms
);
assert!(
delta_itl.abs() <= allowed_error_ms,
"inter_token_latency_delta: {:?}, expected {:?}",
delta_itl.abs(),
allowed_error_ms
);
assert!(
delta_e2e.abs() <= allowed_error_ms * 10, // Cumulative error for 10 tokens
"e2e_latency_delta: {:?}, expected {:?}",
delta_e2e.abs(),
allowed_error_ms * 10
);
}
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/executors.rs | src/executors.rs | use std::sync::atomic::AtomicI64;
use std::sync::Arc;
use std::time::Duration;
use async_trait::async_trait;
use log::{info, trace, warn};
use serde::Serialize;
use tokio::sync::mpsc::{Receiver, Sender, UnboundedSender};
use tokio::sync::{broadcast, Mutex};
use tokio::task::JoinHandle;
use crate::requests::{
TextGenerationAggregatedResponse, TextGenerationBackend, TextGenerationRequest,
TextRequestGenerator,
};
#[serde_with::serde_as]
#[derive(Clone, Serialize)]
pub struct ExecutorConfig {
pub max_vus: u64,
#[serde(rename = "duration_secs")]
#[serde_as(as = "serde_with::DurationSeconds<u64>")]
pub duration: Duration,
pub rate: Option<f64>,
}
#[async_trait]
pub trait Executor {
async fn run(
&self,
requests: Arc<Mutex<dyn TextRequestGenerator + Send>>,
responses_tx: UnboundedSender<TextGenerationAggregatedResponse>,
stop_sender: broadcast::Sender<()>,
);
}
pub struct ConstantVUsExecutor {
config: ExecutorConfig,
backend: Box<dyn TextGenerationBackend + Send + Sync>,
}
impl ConstantVUsExecutor {
pub fn new(
backend: Box<dyn TextGenerationBackend + Send + Sync>,
max_vus: u64,
duration: Duration,
) -> ConstantVUsExecutor {
Self {
backend,
config: ExecutorConfig {
max_vus,
duration,
rate: None,
},
}
}
}
#[async_trait]
impl Executor for ConstantVUsExecutor {
async fn run(
&self,
requests: Arc<Mutex<dyn TextRequestGenerator + Send>>,
responses_tx: UnboundedSender<TextGenerationAggregatedResponse>,
stop_sender: broadcast::Sender<()>,
) {
let start = std::time::Instant::now();
// channel to handle ending VUs
let (end_tx, mut end_rx): (Sender<bool>, Receiver<bool>) =
tokio::sync::mpsc::channel(self.config.max_vus as usize);
let active_vus = Arc::new(AtomicI64::new(0));
// start all VUs
for _ in 0..self.config.max_vus {
let mut requests_guard = requests.lock().await;
let request = Arc::from(requests_guard.generate_request());
drop(requests_guard);
start_vu(
self.backend.clone(),
request,
responses_tx.clone(),
end_tx.clone(),
stop_sender.clone(),
)
.await;
active_vus.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
}
let mut stop_receiver = stop_sender.subscribe();
tokio::select! {
_ = stop_receiver.recv() => {
return;
},
_ = async {
// replenish VUs as they finish
while end_rx.recv().await.is_some() {
active_vus.fetch_sub(1, std::sync::atomic::Ordering::SeqCst);
if start.elapsed() > self.config.duration{
// signal that the VU work is done
let _ = responses_tx.send(TextGenerationAggregatedResponse::new_as_ended());
info!("Duration reached, waiting for all VUs to finish...");
if active_vus.load(std::sync::atomic::Ordering::SeqCst) == 0 {
break;
}
} else {
let mut requests_guard = requests.lock().await;
let request = Arc::from(requests_guard.generate_request());
drop(requests_guard);
active_vus.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
start_vu(self.backend.clone(), request, responses_tx.clone(), end_tx.clone(), stop_sender.clone()).await;
}
}
}=>{}
}
}
}
async fn start_vu(
backend: Box<dyn TextGenerationBackend + Send + Sync>,
request: Arc<TextGenerationRequest>,
responses_tx: UnboundedSender<TextGenerationAggregatedResponse>,
end_tx: Sender<bool>,
stop_sender: broadcast::Sender<()>,
) -> JoinHandle<()> {
let mut stop_receiver = stop_sender.subscribe();
tokio::spawn(async move {
tokio::select! {
_ = stop_receiver.recv() => {
let _ = end_tx.send(true).await;
},
_ = async{
let (tx, mut rx): (Sender<TextGenerationAggregatedResponse>, Receiver<TextGenerationAggregatedResponse>) = tokio::sync::mpsc::channel(1);
trace!("VU started with request: {:?}", request);
let req_thread = tokio::spawn(async move {
backend.generate(request.clone(), tx).await;
});
let send_thread = tokio::spawn(async move {
while let Some(response) = rx.recv().await {
// ignore errors, if the receiver is gone we want to finish the request
// to leave remote server in clean state
let _ = responses_tx.send(response);
}
});
req_thread.await.unwrap();
send_thread.await.unwrap();
// signal that the VU work is done
let _ = end_tx.send(true).await;
}=>{}
}
})
}
pub struct ConstantArrivalRateExecutor {
config: ExecutorConfig,
backend: Box<dyn TextGenerationBackend + Send + Sync>,
}
impl ConstantArrivalRateExecutor {
pub fn new(
backend: Box<dyn TextGenerationBackend + Send + Sync>,
max_vus: u64,
duration: Duration,
rate: f64,
) -> ConstantArrivalRateExecutor {
Self {
backend,
config: ExecutorConfig {
max_vus,
duration,
rate: Some(rate),
},
}
}
}
#[async_trait]
impl Executor for ConstantArrivalRateExecutor {
async fn run(
&self,
requests: Arc<Mutex<dyn TextRequestGenerator + Send>>,
responses_tx: UnboundedSender<TextGenerationAggregatedResponse>,
stop_sender: broadcast::Sender<()>,
) {
let start = std::time::Instant::now();
let active_vus = Arc::new(AtomicI64::new(0));
// channel to handle ending VUs
let (end_tx, mut end_rx): (Sender<bool>, Receiver<bool>) =
tokio::sync::mpsc::channel(self.config.max_vus as usize);
let rate = self.config.rate.expect("checked in new()");
// spawn new VUs every `tick_ms` to reach the expected `rate` per second, until the duration is reached
let tick_ms = 10;
let mut interval = tokio::time::interval(Duration::from_millis(tick_ms));
let backend = self.backend.clone();
let duration = self.config.duration;
let max_vus = self.config.max_vus;
let active_vus_thread = active_vus.clone();
let mut stop_receiver_signal = stop_sender.subscribe();
let vu_thread = tokio::spawn(async move {
tokio::select! {
_ = stop_receiver_signal.recv() => {},
_= async {
let mut spawn_queue = 0.; // start with at least one VU
while start.elapsed() < duration {
spawn_queue += rate * (tick_ms as f64) / 1000.0;
// delay spawning if we can't spawn a full VU yet
if spawn_queue < 1.0 {
interval.tick().await;
continue;
}
// spawn VUs, keep track of the fraction of VU to spawn for the next iteration
let to_spawn = spawn_queue.floor() as u64;
spawn_queue -= to_spawn as f64;
for _ in 0..to_spawn {
if active_vus_thread.load(std::sync::atomic::Ordering::SeqCst) < max_vus as i64 {
let mut requests_guard = requests.lock().await;
let request = Arc::from(requests_guard.generate_request());
start_vu(backend.clone(), request.clone(), responses_tx.clone(), end_tx.clone(),stop_sender.clone()).await;
active_vus_thread.fetch_add(1, std::sync::atomic::Ordering::SeqCst);
} else {
warn!("Max VUs reached, skipping request");
break;
}
}
interval.tick().await;
}
// signal that the VU work is done
info!("Duration reached, waiting for all VUs to finish...");
let _ = responses_tx.send(TextGenerationAggregatedResponse::new_as_ended());
}=>{}
}
});
while end_rx.recv().await.is_some() {
active_vus.fetch_sub(1, std::sync::atomic::Ordering::SeqCst);
// wait for all VUs to finish
if start.elapsed() > self.config.duration
&& active_vus.load(std::sync::atomic::Ordering::SeqCst) == 0
{
break;
}
}
// wait for the VU thread to finish
vu_thread.await.unwrap();
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/results.rs | src/results.rs | use crate::executors::ExecutorConfig;
use crate::requests::TextGenerationAggregatedResponse;
use crate::results::BenchmarkErrors::NoResponses;
use crate::scheduler::ExecutorType;
use chrono::Utc;
use std::fmt::{Debug, Display, Formatter};
use std::time::Duration;
#[derive(Debug)]
pub(crate) enum BenchmarkErrors {
NoResponses,
}
impl Display for BenchmarkErrors {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
match self {
NoResponses => write!(f, "Backend did not return any valid response. It is either not responding or test duration is too short."),
}
}
}
#[derive(Clone)]
pub struct BenchmarkResults {
pub id: String,
aggregated_responses: Vec<TextGenerationAggregatedResponse>,
executor_type: ExecutorType,
executor_config: ExecutorConfig,
}
impl BenchmarkResults {
pub fn new(
id: String,
executor_type: ExecutorType,
executor_config: ExecutorConfig,
) -> BenchmarkResults {
BenchmarkResults {
id,
aggregated_responses: Vec::new(),
executor_type,
executor_config,
}
}
pub fn add_response(&mut self, response: TextGenerationAggregatedResponse) {
self.aggregated_responses.push(response);
}
pub fn total_requests(&self) -> usize {
self.aggregated_responses.len()
}
pub fn start_time(&self) -> Option<tokio::time::Instant> {
self.aggregated_responses
.first()
.and_then(|response| response.start_time)
}
pub fn end_time(&self) -> Option<tokio::time::Instant> {
self.aggregated_responses
.last()
.and_then(|response| response.end_time)
}
fn is_ready(&self) -> bool {
self.start_time().is_some() && self.end_time().is_some()
}
pub fn failed_requests(&self) -> usize {
self.aggregated_responses
.iter()
.filter(|response| response.failed)
.count()
}
pub fn successful_requests(&self) -> usize {
self.aggregated_responses
.iter()
.filter(|response| !response.failed)
.count()
}
pub fn token_throughput_secs(&self) -> anyhow::Result<f64> {
if self.is_ready() {
let total_tokens: u64 = self.total_tokens();
Ok(total_tokens as f64 / self.duration().unwrap_or_default().as_secs_f64())
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn total_tokens_sent(&self) -> u64 {
self.get_successful_responses()
.iter()
.map(|response| response.request.clone().unwrap().num_prompt_tokens)
.sum()
}
pub fn total_prompt_tokens(&self) -> u64 {
self.get_successful_responses()
.iter()
.map(|response| response.request.clone().unwrap().num_prompt_tokens)
.sum()
}
pub fn prompt_tokens_avg(&self) -> anyhow::Result<f64> {
if self.is_ready() {
let total_prompt_tokens = self.total_prompt_tokens();
Ok(total_prompt_tokens as f64 / self.successful_requests() as f64)
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn successful_request_rate(&self) -> anyhow::Result<f64> {
if self.is_ready() {
let total_requests = self.successful_requests();
Ok(total_requests as f64 / self.duration().unwrap_or_default().as_secs_f64())
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn total_tokens(&self) -> u64 {
self.get_successful_responses()
.iter()
.map(|response| response.num_generated_tokens)
.sum()
}
pub fn duration(&self) -> anyhow::Result<std::time::Duration> {
if self.is_ready() {
Ok(self
.end_time()
.unwrap()
.duration_since(self.start_time().unwrap()))
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn e2e_latency_avg(&self) -> anyhow::Result<std::time::Duration> {
if self.is_ready() {
if self.successful_requests() == 0 {
return Ok(Duration::from_secs(0));
}
Ok(self
.get_successful_responses()
.iter()
.map(|response| response.e2e_latency().unwrap_or_default())
.sum::<Duration>()
/ self.successful_requests() as u32)
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn e2e_latency_percentile(&self, percentile: f64) -> anyhow::Result<std::time::Duration> {
let quantile = self.quantile_duration(
self.get_successful_responses()
.iter()
.map(|response| response.e2e_latency().unwrap_or_default())
.collect(),
percentile,
)?;
Ok(Duration::from_secs_f64(quantile))
}
pub fn time_to_first_token_avg(&self) -> anyhow::Result<std::time::Duration> {
if self.is_ready() {
if self.successful_requests() == 0 {
return Ok(Duration::from_secs(0));
}
Ok(self
.get_successful_responses()
.iter()
.map(|response| response.time_to_first_token().unwrap_or_default())
.sum::<Duration>()
/ self.successful_requests() as u32)
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn time_to_first_token_percentile(&self, percentile: f64) -> anyhow::Result<Duration> {
let quantile = self.quantile_duration(
self.get_successful_responses()
.iter()
.map(|response| response.time_to_first_token().unwrap_or_default())
.collect(),
percentile,
)?;
Ok(Duration::from_secs_f64(quantile))
}
pub fn inter_token_latency_avg(&self) -> anyhow::Result<std::time::Duration> {
if self.is_ready() {
if self.successful_requests() == 0 {
return Ok(Duration::from_secs(0));
}
Ok(self
.get_successful_responses()
.iter()
.map(|response| response.inter_token_latency().unwrap_or_default())
.sum::<Duration>()
/ self.successful_requests() as u32)
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
pub fn inter_token_latency_percentile(&self, percentile: f64) -> anyhow::Result<Duration> {
let quantile = self.quantile_duration(
self.get_successful_responses()
.iter()
.map(|response| response.inter_token_latency().unwrap_or_default())
.collect(),
percentile,
)?;
Ok(Duration::from_secs_f64(quantile))
}
pub fn executor_type(&self) -> ExecutorType {
self.executor_type.clone()
}
pub fn executor_config(&self) -> ExecutorConfig {
self.executor_config.clone()
}
fn get_successful_responses(&self) -> Vec<&TextGenerationAggregatedResponse> {
self.aggregated_responses
.iter()
.filter(|response| !response.failed)
.collect()
}
pub fn get_responses(&self) -> Vec<TextGenerationAggregatedResponse> {
self.aggregated_responses.clone()
}
/// Calculate the quantile of a given data set using interpolation method
/// Results are similar to `numpy.percentile`
fn quantile_duration(&self, mut data: Vec<Duration>, quantile: f64) -> anyhow::Result<f64> {
if self.is_ready() {
data.sort();
let i = (quantile * (data.len() - 1) as f64).floor();
let delta = (data.len() - 1) as f64 * quantile - i;
if i as usize >= data.len() {
return Err(anyhow::anyhow!(NoResponses));
}
let quantile = (1. - delta) * data[i as usize].as_secs_f64()
+ delta * data[i as usize + 1].as_secs_f64();
Ok(quantile)
} else {
Err(anyhow::anyhow!(NoResponses))
}
}
}
impl Debug for BenchmarkResults {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
f.debug_struct("BenchmarkResult")
.field("id", &self.id)
.field("executor_type", &self.executor_type.to_string())
.field("total_requests", &self.total_requests())
.field("start_time", &self.start_time())
.field("end_time", &self.end_time())
.field("total_tokens", &self.total_tokens())
.field(
"token_throughput_secs",
&self
.token_throughput_secs()
.or::<anyhow::Result<f64>>(Ok(-1.0)),
)
.field(
"duration_ms",
&self
.duration()
.or::<anyhow::Result<Duration>>(Ok(Duration::from_secs(0))),
)
.field(
"average_time_to_first_token",
&self
.time_to_first_token_avg()
.or::<anyhow::Result<Duration>>(Ok(Duration::from_secs(0))),
)
.field(
"average_inter_token_latency",
&self
.inter_token_latency_avg()
.or::<anyhow::Result<Duration>>(Ok(Duration::from_secs(0))),
)
.field("failed_requests", &self.failed_requests())
.field("successful_requests", &self.successful_requests())
.field(
"request_rate",
&self
.successful_request_rate()
.or::<anyhow::Result<f64>>(Ok(-1.0)),
)
.field("sent_prompt_tokens", &self.total_tokens_sent())
.field(
"e2e_latency_avg",
&self
.e2e_latency_avg()
.or::<anyhow::Result<Duration>>(Ok(Duration::from_secs(0))),
)
.finish()
}
}
#[derive(Debug, Clone)]
pub struct BenchmarkReport {
results: Vec<BenchmarkResults>,
start_time: Option<chrono::DateTime<Utc>>,
end_time: Option<chrono::DateTime<Utc>>,
}
impl BenchmarkReport {
pub fn new() -> BenchmarkReport {
BenchmarkReport {
results: Vec::new(),
start_time: None,
end_time: None,
}
}
pub fn start(&mut self) {
self.start_time = Some(Utc::now());
}
pub fn end(&mut self) {
self.end_time = Some(Utc::now());
}
pub fn add_benchmark_result(&mut self, result: BenchmarkResults) {
self.results.push(result);
}
pub fn get_results(&self) -> Vec<BenchmarkResults> {
self.results.clone()
}
pub fn start_time(&self) -> Option<chrono::DateTime<Utc>> {
self.start_time
}
pub fn end_time(&self) -> Option<chrono::DateTime<Utc>> {
self.end_time
}
}
#[cfg(test)]
mod test {
use super::*;
use crate::requests::TextGenerationRequest;
use std::sync::Arc;
#[test]
fn test_time_to_first_token_percentile() {
let request = Arc::from(TextGenerationRequest {
id: None,
prompt: "test".to_string(),
num_prompt_tokens: 10,
num_decode_tokens: None,
});
let mut response1 = TextGenerationAggregatedResponse::new(request.clone());
response1.start_time = Some(tokio::time::Instant::now());
response1.end_time =
Some(tokio::time::Instant::now() + tokio::time::Duration::from_millis(100));
response1.num_generated_tokens = 100;
response1.failed = false;
response1.times_to_tokens = vec![
Duration::from_millis(100),
Duration::from_millis(200),
Duration::from_millis(300),
Duration::from_millis(400),
Duration::from_millis(500),
];
let mut response2 = TextGenerationAggregatedResponse::new(request.clone());
response2.start_time = Some(tokio::time::Instant::now());
response2.end_time =
Some(tokio::time::Instant::now() + tokio::time::Duration::from_millis(200));
response2.num_generated_tokens = 100;
response2.failed = false;
response2.times_to_tokens = vec![
Duration::from_millis(600),
Duration::from_millis(700),
Duration::from_millis(800),
Duration::from_millis(900),
Duration::from_millis(1000),
];
let mut response3 = TextGenerationAggregatedResponse::new(request.clone());
response3.start_time = Some(tokio::time::Instant::now());
response3.end_time =
Some(tokio::time::Instant::now() + tokio::time::Duration::from_millis(300));
response3.num_generated_tokens = 100;
response3.failed = false;
response3.times_to_tokens = vec![
Duration::from_millis(1100),
Duration::from_millis(1200),
Duration::from_millis(1300),
Duration::from_millis(1400),
Duration::from_millis(1500),
];
let mut response4 = TextGenerationAggregatedResponse::new(request.clone());
response4.start_time = Some(tokio::time::Instant::now());
response4.end_time =
Some(tokio::time::Instant::now() + tokio::time::Duration::from_millis(300));
response4.num_generated_tokens = 100;
response4.failed = false;
response4.times_to_tokens = vec![
Duration::from_millis(1600),
Duration::from_millis(1700),
Duration::from_millis(1800),
Duration::from_millis(1900),
Duration::from_millis(2000),
];
let mut results = BenchmarkResults::new(
"test".to_string(),
ExecutorType::ConstantArrivalRate,
ExecutorConfig {
max_vus: 0,
duration: Default::default(),
rate: None,
},
);
results.add_response(response1);
results.add_response(response2);
results.add_response(response3);
results.add_response(response4);
assert_eq!(
results.time_to_first_token_percentile(0.9).unwrap(),
Duration::from_millis(1450)
);
assert_eq!(
results.time_to_first_token_percentile(0.5).unwrap(),
Duration::from_millis(850)
);
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/table.rs | src/table.rs | use crate::results::BenchmarkReport;
use crate::BenchmarkConfig;
use tabled::builder::Builder;
pub fn parameters_table(benchmark: BenchmarkConfig) -> anyhow::Result<tabled::Table> {
let mut builder = Builder::default();
let rates = benchmark
.rates
.map_or("N/A".to_string(), |e| format!("{:?}", e));
let prompt_options = benchmark
.prompt_options
.map_or("N/A".to_string(), |e| format!("{}", e));
let decode_options = benchmark
.decode_options
.map_or("N/A".to_string(), |e| format!("{}", e));
let extra_metadata = benchmark
.extra_metadata
.map_or("N/A".to_string(), |e| format!("{:?}", e));
builder.set_header(vec!["Parameter", "Value"]);
builder.push_record(vec!["Max VUs", benchmark.max_vus.to_string().as_str()]);
builder.push_record(vec![
"Duration",
benchmark.duration.as_secs().to_string().as_str(),
]);
builder.push_record(vec![
"Warmup Duration",
benchmark.warmup_duration.as_secs().to_string().as_str(),
]);
builder.push_record(vec![
"Benchmark Kind",
benchmark.benchmark_kind.to_string().as_str(),
]);
builder.push_record(vec!["Rates", rates.as_str()]);
builder.push_record(vec!["Num Rates", benchmark.num_rates.to_string().as_str()]);
builder.push_record(vec!["Prompt Options", prompt_options.as_str()]);
builder.push_record(vec!["Decode Options", decode_options.as_str()]);
builder.push_record(vec!["Tokenizer", benchmark.tokenizer.to_string().as_str()]);
builder.push_record(vec!["Extra Metadata", extra_metadata.as_str()]);
let mut table = builder.build();
table.with(tabled::settings::Style::sharp());
Ok(table)
}
pub fn results_table(benchmark: BenchmarkReport) -> anyhow::Result<tabled::Table> {
let mut builder = Builder::default();
builder.set_header(vec![
"Benchmark",
"QPS",
"E2E Latency (avg)",
"TTFT (avg)",
"ITL (avg)",
"Throughput",
"Error Rate",
"Successful Requests",
"Prompt tokens per req (avg)",
"Decoded tokens per req (avg)",
]);
let results = benchmark.get_results();
for result in results {
let qps = format!("{:.2} req/s", result.successful_request_rate()?);
let e2e = format!("{:.2} sec", result.e2e_latency_avg()?.as_secs_f64());
let ttft = format!(
"{:.2} ms",
result.time_to_first_token_avg()?.as_micros() as f64 / 1000.0
);
let itl = format!(
"{:.2} ms",
result.inter_token_latency_avg()?.as_micros() as f64 / 1000.0
);
let throughput = format!("{:.2} tokens/sec", result.token_throughput_secs()?);
let error_rate = result.failed_requests() as f64 / result.total_requests() as f64 * 100.0;
let error_rate = format!("{:.2}%", error_rate);
builder.push_record(vec![
result.id.as_str(),
qps.as_str(),
e2e.as_str(),
ttft.as_str(),
itl.as_str(),
throughput.as_str(),
error_rate.as_str(),
format!(
"{}/{}",
result.successful_requests(),
result.total_requests()
)
.as_str(),
format!("{:.2}", result.prompt_tokens_avg()?).as_str(),
format!(
"{:.2}",
result.total_tokens() as f64 / result.successful_requests() as f64
)
.as_str(),
]);
}
let mut table = builder.build();
table.with(tabled::settings::Style::sharp());
Ok(table)
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/writers.rs | src/writers.rs | use crate::results::{BenchmarkReport, BenchmarkResults};
use crate::{executors, table, BenchmarkConfig};
use serde::Serialize;
use std::path::Path;
use sysinfo::{CpuRefreshKind, MemoryRefreshKind, System};
use tokio::fs;
#[derive(Serialize)]
pub struct PercentilesWriter {
pub p50: f64,
pub p60: f64,
pub p70: f64,
pub p80: f64,
pub p90: f64,
pub p95: f64,
pub p99: f64,
pub avg: f64,
}
#[derive(Serialize)]
pub struct BenchmarkResultsWriter {
id: String,
executor_type: String,
config: executors::ExecutorConfig,
total_requests: u64,
total_tokens: u64,
token_throughput_secs: f64,
duration_ms: u128,
time_to_first_token_ms: PercentilesWriter,
inter_token_latency_ms: PercentilesWriter,
failed_requests: u64,
successful_requests: u64,
request_rate: f64,
total_tokens_sent: u64,
e2e_latency_ms: PercentilesWriter,
}
impl BenchmarkResultsWriter {
pub fn new(results: BenchmarkResults) -> anyhow::Result<BenchmarkResultsWriter> {
Ok(BenchmarkResultsWriter {
id: results.id.clone(),
executor_type: results.executor_type().to_string(),
config: results.executor_config(),
total_requests: results.total_requests() as u64,
total_tokens: results.total_tokens(),
token_throughput_secs: results.token_throughput_secs()?,
duration_ms: results.duration().ok().unwrap().as_micros() / 1000,
time_to_first_token_ms: PercentilesWriter {
p50: results.time_to_first_token_percentile(0.5)?.as_micros() as f64 / 1000.,
p60: results.time_to_first_token_percentile(0.6)?.as_micros() as f64 / 1000.,
p70: results.time_to_first_token_percentile(0.7)?.as_micros() as f64 / 1000.,
p80: results.time_to_first_token_percentile(0.8)?.as_micros() as f64 / 1000.,
p90: results.time_to_first_token_percentile(0.9)?.as_micros() as f64 / 1000.,
p95: results.time_to_first_token_percentile(0.95)?.as_micros() as f64 / 1000.,
p99: results.time_to_first_token_percentile(0.99)?.as_micros() as f64 / 1000.,
avg: results.time_to_first_token_avg().ok().unwrap().as_micros() as f64 / 1000.,
},
inter_token_latency_ms: PercentilesWriter {
p50: results.inter_token_latency_percentile(0.5)?.as_micros() as f64 / 1000.,
p60: results.inter_token_latency_percentile(0.6)?.as_micros() as f64 / 1000.,
p70: results.inter_token_latency_percentile(0.7)?.as_micros() as f64 / 1000.,
p80: results.inter_token_latency_percentile(0.8)?.as_micros() as f64 / 1000.,
p90: results.inter_token_latency_percentile(0.9)?.as_micros() as f64 / 1000.,
p95: results.inter_token_latency_percentile(0.95)?.as_micros() as f64 / 1000.,
p99: results.inter_token_latency_percentile(0.99)?.as_micros() as f64 / 1000.,
avg: results.inter_token_latency_avg().ok().unwrap().as_micros() as f64 / 1000.,
},
failed_requests: results.failed_requests() as u64,
successful_requests: results.successful_requests() as u64,
request_rate: results.successful_request_rate()?,
total_tokens_sent: results.total_tokens_sent(),
e2e_latency_ms: PercentilesWriter {
p50: results.e2e_latency_percentile(0.5)?.as_micros() as f64 / 1000.,
p60: results.e2e_latency_percentile(0.6)?.as_micros() as f64 / 1000.,
p70: results.e2e_latency_percentile(0.7)?.as_micros() as f64 / 1000.,
p80: results.e2e_latency_percentile(0.8)?.as_micros() as f64 / 1000.,
p90: results.e2e_latency_percentile(0.9)?.as_micros() as f64 / 1000.,
p95: results.e2e_latency_percentile(0.95)?.as_micros() as f64 / 1000.,
p99: results.e2e_latency_percentile(0.99)?.as_micros() as f64 / 1000.,
avg: results.e2e_latency_avg().ok().unwrap().as_micros() as f64 / 1000.,
},
})
}
}
#[derive(Serialize)]
pub struct SystemInfo {
pub cpu: Vec<String>,
pub memory: String,
pub os_name: String,
pub os_version: String,
pub kernel: String,
pub hostname: String,
}
impl SystemInfo {
pub fn new() -> SystemInfo {
let s = System::new_with_specifics(
sysinfo::RefreshKind::nothing()
.with_memory(MemoryRefreshKind::everything())
.with_cpu(CpuRefreshKind::everything()),
);
let cpu_info = s
.cpus()
.iter()
.map(|cpu| format!("{} {}@{:.0}MHz", cpu.brand(), cpu.name(), cpu.frequency()))
.collect::<Vec<String>>();
SystemInfo {
cpu: cpu_info,
memory: format!(
"{:.2} GB",
s.total_memory() as f64 / 1024.0 / 1024.0 / 1024.0
),
os_name: System::name().ok_or("N/A").unwrap(),
os_version: System::os_version().ok_or("N/A").unwrap(),
kernel: System::kernel_version().ok_or("N/A").unwrap(),
hostname: System::host_name().ok_or("N/A").unwrap(),
}
}
}
#[derive(Serialize)]
pub struct BenchmarkReportWriter {
config: BenchmarkConfig,
results: Vec<BenchmarkResultsWriter>,
start_time: String,
end_time: String,
system: SystemInfo,
#[serde(skip)]
report: BenchmarkReport,
}
impl BenchmarkReportWriter {
pub fn try_new(
config: BenchmarkConfig,
report: BenchmarkReport,
) -> anyhow::Result<BenchmarkReportWriter> {
let mut results: Vec<BenchmarkResultsWriter> = Vec::new();
for result in report.get_results() {
let writer = BenchmarkResultsWriter::new(result)?;
results.push(writer);
}
Ok(BenchmarkReportWriter {
config,
results,
start_time: report
.start_time()
.ok_or(anyhow::anyhow!("start_time not set"))?
.to_rfc3339(),
end_time: report
.end_time()
.ok_or(anyhow::anyhow!("end_time not set"))?
.to_rfc3339(),
system: SystemInfo::new(),
report,
})
}
pub async fn json(&self, path: &Path) -> anyhow::Result<()> {
// write the benchmark report to json
let report = serde_json::to_string(&self)?;
// create path hierarchy if it doesn't exist
if !path.exists() {
if let Some(parent) = path.parent() {
fs::create_dir_all(parent).await?;
}
}
fs::write(path, report).await?;
Ok(())
}
pub async fn stdout(&self) -> anyhow::Result<()> {
let param_table = table::parameters_table(self.config.clone())?;
println!("\n{param_table}\n");
let results_table = table::results_table(self.report.clone())?;
println!("\n{results_table}\n");
Ok(())
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/main.rs | src/main.rs | use clap::error::ErrorKind::InvalidValue;
use clap::{ArgGroup, Error, Parser};
use inference_benchmarker::{run, RunConfiguration, TokenizeOptions};
use log::{debug, error};
use reqwest::Url;
use std::collections::HashMap;
use std::time::Duration;
use tokio::sync::broadcast;
#[derive(Parser, Debug)]
#[clap(author, version, about, long_about = None, group(ArgGroup::new("group_profile").multiple(true)),group(ArgGroup::new("group_manual").multiple(true).conflicts_with("group_profile"))
)]
struct Args {
/// The name of the tokenizer to use
#[clap(short, long, env)]
tokenizer_name: String,
/// The name of the model to use. If not provided, the same name as the tokenizer will be used.
#[clap(long, env)]
model_name: Option<String>,
/// The maximum number of virtual users to use
#[clap(default_value = "128", short, long, env, group = "group_manual")]
max_vus: u64,
/// The duration of each benchmark step
#[clap(default_value = "120s", short, long, env, group = "group_manual")]
#[arg(value_parser = parse_duration)]
duration: Duration,
/// A list of rates of requests to send per second (only valid for the ConstantArrivalRate benchmark).
#[clap(short, long, env)]
rates: Option<Vec<f64>>,
/// The number of rates to sweep through (only valid for the "sweep" benchmark)
/// The rates will be linearly spaced up to the detected maximum rate
#[clap(default_value = "10", long, env)]
num_rates: u64,
/// A benchmark profile to use
#[clap(long, env, group = "group_profile")]
profile: Option<String>,
/// The kind of benchmark to run (throughput, sweep, optimum)
#[clap(default_value = "sweep", short, long, env, group = "group_manual")]
benchmark_kind: String,
/// The duration of the prewarm step ran before the benchmark to warm up the backend (JIT, caches, etc.)
#[clap(default_value = "30s", short, long, env, group = "group_manual")]
#[arg(value_parser = parse_duration)]
warmup: Duration,
/// The URL of the backend to benchmark. Must be compatible with OpenAI Message API
#[clap(default_value = "http://localhost:8000", short, long, env)]
url: Url,
/// The api key send to the [`url`] as Header "Authorization: Bearer {API_KEY}".
#[clap(default_value = "", short, long, env)]
api_key: String,
/// Disable console UI
#[clap(short, long, env)]
no_console: bool,
/// Constraints for prompt length.
/// No value means use the input prompt as defined in input dataset.
/// We sample the number of tokens to generate from a normal distribution.
/// Specified as a comma-separated list of key=value pairs.
/// * num_tokens: target number of prompt tokens
/// * min_tokens: minimum number of prompt tokens
/// * max_tokens: maximum number of prompt tokens
/// * variance: variance in the number of prompt tokens
///
/// Example: num_tokens=200,max_tokens=210,min_tokens=190,variance=10
#[clap(
long,
env,
value_parser(parse_tokenizer_options),
group = "group_manual"
)]
prompt_options: Option<TokenizeOptions>,
/// Constraints for the generated text.
/// We sample the number of tokens to generate from a normal distribution.
/// Specified as a comma-separated list of key=value pairs.
/// * num_tokens: target number of generated tokens
/// * min_tokens: minimum number of generated tokens
/// * max_tokens: maximum number of generated tokens
/// * variance: variance in the number of generated tokens
///
/// Example: num_tokens=200,max_tokens=210,min_tokens=190,variance=10
#[clap(
long,
env,
value_parser(parse_tokenizer_options),
group = "group_manual"
)]
decode_options: Option<TokenizeOptions>,
/// Hugging Face dataset to use for prompt generation
#[clap(
default_value = "hlarcher/inference-benchmarker",
long,
env,
group = "group_manual"
)]
dataset: String,
/// File to use in the Dataset
#[clap(
default_value = "share_gpt_filtered_small.json",
long,
env,
group = "group_manual"
)]
dataset_file: String,
/// Extra metadata to include in the benchmark results file, comma-separated key-value pairs.
/// It can be, for example, used to include information about the configuration of the
/// benched server.
/// Example: --extra-meta "key1=value1,key2=value2"
#[clap(long, env, value_parser(parse_key_val))]
extra_meta: Option<HashMap<String, String>>,
// A run identifier to use for the benchmark. This is used to identify the benchmark in the
// results file.
#[clap(long, env)]
run_id: Option<String>,
}
fn parse_duration(s: &str) -> Result<Duration, Error> {
humantime::parse_duration(s).map_err(|_| Error::new(InvalidValue))
}
fn parse_key_val(s: &str) -> Result<HashMap<String, String>, Error> {
let mut key_val_map = HashMap::new();
let items = s.split(",").collect::<Vec<&str>>();
for item in items.iter() {
let key_value = item.split("=").collect::<Vec<&str>>();
if key_value.len() % 2 != 0 {
return Err(Error::new(InvalidValue));
}
for i in 0..key_value.len() / 2 {
key_val_map.insert(
key_value[i * 2].to_string(),
key_value[i * 2 + 1].to_string(),
);
}
}
Ok(key_val_map)
}
fn parse_tokenizer_options(s: &str) -> Result<TokenizeOptions, Error> {
let mut tokenizer_options = TokenizeOptions::new();
let items = s.split(",").collect::<Vec<&str>>();
for item in items.iter() {
let key_value = item.split("=").collect::<Vec<&str>>();
if key_value.len() != 2 {
return Err(Error::new(InvalidValue));
}
match key_value[0] {
"num_tokens" => {
tokenizer_options.num_tokens = Some(key_value[1].parse::<u64>().unwrap())
}
"min_tokens" => tokenizer_options.min_tokens = key_value[1].parse::<u64>().unwrap(),
"max_tokens" => tokenizer_options.max_tokens = key_value[1].parse::<u64>().unwrap(),
"variance" => tokenizer_options.variance = key_value[1].parse::<u64>().unwrap(),
_ => return Err(Error::new(InvalidValue)),
}
}
if tokenizer_options.num_tokens.is_some()
&& (tokenizer_options.num_tokens.unwrap() == 0
|| tokenizer_options.min_tokens == 0
|| tokenizer_options.max_tokens == 0)
{
return Err(Error::new(InvalidValue));
}
if tokenizer_options.min_tokens > tokenizer_options.max_tokens {
return Err(Error::new(InvalidValue));
}
Ok(tokenizer_options)
}
#[tokio::main]
async fn main() {
let args = Args::parse();
let git_sha = option_env!("VERGEN_GIT_SHA").unwrap_or("unknown");
println!(
"Text Generation Inference Benchmark {} ({})",
env!("CARGO_PKG_VERSION"),
git_sha
);
let (stop_sender, _) = broadcast::channel(1);
// handle ctrl-c
let stop_sender_clone = stop_sender.clone();
tokio::spawn(async move {
tokio::signal::ctrl_c()
.await
.expect("Failed to listen for ctrl-c");
debug!("Received stop signal, stopping benchmark");
stop_sender_clone
.send(())
.expect("Failed to send stop signal");
});
let stop_sender_clone = stop_sender.clone();
// get HF token
let token_env_key = "HF_TOKEN".to_string();
let cache = hf_hub::Cache::from_env();
let hf_token = match std::env::var(token_env_key).ok() {
Some(token) => Some(token),
None => cache.token(),
};
let model_name = args
.model_name
.clone()
.unwrap_or(args.tokenizer_name.clone());
let run_id = args
.run_id
.unwrap_or(uuid::Uuid::new_v4().to_string()[..7].to_string());
let run_config = RunConfiguration {
url: args.url,
api_key: args.api_key,
profile: args.profile.clone(),
tokenizer_name: args.tokenizer_name.clone(),
max_vus: args.max_vus,
duration: args.duration,
rates: args.rates,
num_rates: args.num_rates,
benchmark_kind: args.benchmark_kind.clone(),
warmup_duration: args.warmup,
interactive: !args.no_console,
prompt_options: args.prompt_options.clone(),
decode_options: args.decode_options.clone(),
dataset: args.dataset.clone(),
dataset_file: args.dataset_file.clone(),
extra_metadata: args.extra_meta.clone(),
hf_token,
model_name,
run_id,
};
let main_thread = tokio::spawn(async move {
match run(run_config, stop_sender_clone).await {
Ok(_) => {}
Err(e) => {
error!("Fatal: {:?}", e);
println!("Fatal: {:?}", e)
}
};
});
let _ = main_thread.await;
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/scheduler.rs | src/scheduler.rs | use crate::executors::{
ConstantArrivalRateExecutor, ConstantVUsExecutor, Executor, ExecutorConfig,
};
use crate::requests::{
TextGenerationAggregatedResponse, TextGenerationBackend, TextRequestGenerator,
};
use crate::results::BenchmarkErrors::NoResponses;
use crate::results::BenchmarkResults;
use log::{debug, trace, warn};
use std::sync::Arc;
use tokio::sync::mpsc::{Sender, UnboundedReceiver, UnboundedSender};
use tokio::sync::{broadcast, Mutex};
#[derive(Clone, strum_macros::Display)]
pub enum ExecutorType {
ConstantVUs,
ConstantArrivalRate,
}
pub struct Scheduler {
id: String,
executor: Arc<Mutex<dyn Executor + Send>>,
requests_generator: Arc<Mutex<dyn TextRequestGenerator + Send>>,
results: Arc<Mutex<BenchmarkResults>>,
progress_tx: Sender<Option<SchedulerProgress>>,
stop_sender: broadcast::Sender<()>,
}
pub struct SchedulerProgress {
pub progress: f64,
pub requests_throughput: f64,
pub successful_requests: u64,
pub failed_requests: u64,
}
impl Scheduler {
pub fn new(
id: String,
backend: Box<dyn TextGenerationBackend + Send + Sync>,
executor_type: ExecutorType,
config: ExecutorConfig,
requests_generator: Arc<Mutex<dyn TextRequestGenerator + Send>>,
progress_tx: Sender<Option<SchedulerProgress>>,
stop_sender: broadcast::Sender<()>,
) -> Scheduler {
match executor_type {
ExecutorType::ConstantVUs => Scheduler {
id: id.clone(),
executor: Arc::from(Mutex::from(ConstantVUsExecutor::new(
backend.clone(),
config.max_vus,
config.duration,
))),
results: Arc::from(Mutex::from(BenchmarkResults::new(
id.clone(),
ExecutorType::ConstantVUs,
config,
))),
requests_generator,
progress_tx,
stop_sender,
},
ExecutorType::ConstantArrivalRate => {
if config.rate.is_none() {
panic!("Rate must be specified for ConstantArrivalRateExecutor");
}
let rate = config.rate.unwrap();
Scheduler {
id: id.clone(),
executor: Arc::from(Mutex::from(ConstantArrivalRateExecutor::new(
backend.clone(),
config.max_vus,
config.duration,
rate,
))),
results: Arc::from(Mutex::from(BenchmarkResults::new(
id.clone(),
ExecutorType::ConstantArrivalRate,
config,
))),
requests_generator,
progress_tx,
stop_sender,
}
}
}
}
pub async fn run(&mut self) -> anyhow::Result<BenchmarkResults> {
debug!("Starting scheduler '{}'", self.id);
// add responses to the benchmark result as they arrive
let (tx, mut rx): (
UnboundedSender<TextGenerationAggregatedResponse>,
UnboundedReceiver<TextGenerationAggregatedResponse>,
) = tokio::sync::mpsc::unbounded_channel();
let results = self.results.clone();
let progress_tx = self.progress_tx.clone();
let mut stop_receiver = self.stop_sender.subscribe();
let req_gen = self.requests_generator.clone();
tokio::spawn(async move {
tokio::select! {
_ = stop_receiver.recv() => {
debug!("Received stop signal, stopping benchmark");
}
_ = async{
while let Some(response) = rx.recv().await{
// call generator callback
let response_txt=response.response.clone();
if let Some(request)= response.request.clone(){
req_gen.lock().await.callback(request, response_txt.unwrap_or_default().as_str());
}
let result = results.clone();
let progress_tx = progress_tx.clone();
trace!("Received response: {:?}", response);
if response.ended {
return;
}
let mut result = result.lock().await;
result.add_response(response);
let expected_duration = result.executor_config().duration.as_secs_f64();
let start_time = result.start_time().unwrap_or(tokio::time::Instant::now());
let _ = progress_tx.send(Some(SchedulerProgress {
progress: (100.0 * (1.0 - (expected_duration - start_time.elapsed().as_secs_f64()) / expected_duration)).min(100.0),
requests_throughput: result.successful_request_rate().unwrap_or_default(),
successful_requests: result.successful_requests() as u64,
failed_requests: result.failed_requests() as u64,
})).await;
}
}=>{}
}
});
self.executor
.lock()
.await
.run(
self.requests_generator.clone(),
tx,
self.stop_sender.clone(),
)
.await;
warn!("{:?}", self.results.clone());
if self.results.lock().await.successful_requests() == 0 {
Err(anyhow::anyhow!(NoResponses))
} else {
Ok(self.results.lock().await.clone())
}
}
pub fn get_results(&self) -> Arc<Mutex<BenchmarkResults>> {
self.results.clone()
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::requests::OpenAITextGenerationBackend;
use std::time::Duration;
use tokenizers::Tokenizer;
use tokio::time;
#[tokio::test]
async fn test_constant_arrival_rate_scheduler() {
let (progress_tx, _) = tokio::sync::mpsc::channel(10000);
let (stop_sender, _) = tokio::sync::broadcast::channel(1);
let backend = Box::new(crate::requests::DummyTextGenerationBackend::new(
Duration::from_secs(1),
));
let requests_generator = Arc::from(Mutex::from(
crate::requests::DummyTextRequestGenerator::new(),
));
let mut scheduler = Scheduler::new(
"test".to_string(),
backend,
ExecutorType::ConstantArrivalRate,
ExecutorConfig {
max_vus: 800,
duration: std::time::Duration::from_secs(10),
rate: Some(20.0),
},
requests_generator,
progress_tx,
stop_sender,
);
let results = scheduler.run().await.unwrap();
assert_eq!(results.successful_requests(), 180); // 20 requests per second for 10 seconds - 20 requests for last second as the backend has a 1 second delay
}
#[tokio::test]
async fn test_constant_vus_scheduler() {
let (progress_tx, _) = tokio::sync::mpsc::channel(10000);
let (stop_sender, _) = broadcast::channel(1);
let backend = Box::new(crate::requests::DummyTextGenerationBackend::new(
Duration::from_secs(1),
));
let requests_generator = Arc::from(Mutex::from(
crate::requests::DummyTextRequestGenerator::new(),
));
let mut scheduler = Scheduler::new(
"test".to_string(),
backend,
ExecutorType::ConstantVUs,
ExecutorConfig {
max_vus: 800,
duration: Duration::from_secs(10),
rate: None,
},
requests_generator,
progress_tx,
stop_sender,
);
let results = scheduler.run().await.unwrap();
assert!(
results.successful_requests() > 7200,
"Expected at least 7200 requests, got {}",
results.successful_requests()
);
}
#[tokio::test]
async fn test_constant_arrival_rate_openai_backend() {
let (progress_tx, _) = tokio::sync::mpsc::channel(10000);
let (stop_sender, _) = tokio::sync::broadcast::channel(1);
let mut s = mockito::Server::new_async().await;
s.mock("POST", "/v1/chat/completions")
.with_status(200)
.with_header("content-type", "text/event-stream")
.with_chunked_body(|w| {
w.write_all(b"data: {\"choices\": [{\"message\": null, \"finish_reason\": null, \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
std::thread::sleep(Duration::from_millis(500));
w.write_all(b"data: {\"choices\": [{\"message\": {\"content\": \"Hello, world!Hello, world!Hello, world!Hello, world!\", \"role\": \"user\"}, \"finish_reason\": \"stop\", \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
w.write_all(b"data: [DONE]\n\n")
})
.create_async().await;
let url = s.url().parse().unwrap();
let tokenizer = Arc::new(Tokenizer::from_pretrained("gpt2", None).unwrap());
let backend = OpenAITextGenerationBackend::try_new(
"".to_string(),
url,
"gpt2".to_string(),
tokenizer,
time::Duration::from_secs(10),
)
.unwrap();
let requests_generator = Arc::from(Mutex::from(
crate::requests::DummyTextRequestGenerator::new(),
));
let mut scheduler = Scheduler::new(
"test".to_string(),
Box::new(backend),
ExecutorType::ConstantArrivalRate,
ExecutorConfig {
max_vus: 800,
duration: Duration::from_secs(10),
rate: Some(50.0),
},
requests_generator,
progress_tx,
stop_sender,
);
let results = scheduler.run().await.unwrap();
assert_eq!(results.successful_requests(), 475); // 25 expected missing requests due to the 500ms delay in the backend
}
}
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | false |
huggingface/inference-benchmarker | https://github.com/huggingface/inference-benchmarker/blob/0a54882d9ae3d2e183005345bd86b5c47382a338/src/requests.rs | src/requests.rs | use async_trait::async_trait;
use futures_util::StreamExt;
use hf_hub::api::sync::ApiBuilder;
use indicatif::{ProgressBar, ProgressStyle};
use log::{debug, error, info, trace, warn};
use rand_distr::Distribution;
use rayon::iter::split;
use rayon::prelude::*;
use reqwest::Url;
use reqwest_eventsource::{Error, Event, EventSource};
use serde::{Deserialize, Serialize};
use std::cmp::Ordering;
use std::collections::{BinaryHeap, HashMap};
use std::fmt::Display;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use std::time;
use tokenizers::{FromPretrainedParameters, Tokenizer};
use tokio::sync::mpsc::Sender;
use tokio::time::{sleep, Instant};
use uuid::Uuid;
#[derive(Debug, Clone, Eq, PartialEq)]
pub struct TextGenerationRequest {
pub id: Option<Uuid>,
pub prompt: String,
pub num_prompt_tokens: u64,
pub num_decode_tokens: Option<u64>,
}
#[async_trait]
pub trait TextGenerationBackend: TextGenerationBackendClone {
async fn generate(
&self,
request: Arc<TextGenerationRequest>,
sender: Sender<TextGenerationAggregatedResponse>,
);
}
pub trait TextGenerationBackendClone {
fn clone_box(&self) -> Box<dyn TextGenerationBackend + Send + Sync>;
}
impl<T> TextGenerationBackendClone for T
where
T: 'static + TextGenerationBackend + Clone + Send + Sync,
{
fn clone_box(&self) -> Box<dyn TextGenerationBackend + Send + Sync> {
Box::new(self.clone())
}
}
impl Clone for Box<dyn TextGenerationBackend + Send + Sync> {
fn clone(&self) -> Box<dyn TextGenerationBackend + Send + Sync> {
self.clone_box()
}
}
#[derive(Debug, Clone)]
pub struct OpenAITextGenerationBackend {
pub api_key: String,
pub base_url: Url,
pub model_name: String,
pub client: reqwest::Client,
pub tokenizer: Arc<Tokenizer>,
pub timeout: time::Duration,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OpenAITextGenerationMessage {
pub content: String,
pub role: String,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OpenAITextGenerationDelta {
pub content: Option<String>,
}
#[derive(Deserialize, Serialize, Clone, Debug)]
pub struct OpenAITextGenerationChoice {
pub message: Option<OpenAITextGenerationMessage>,
pub finish_reason: Option<String>,
pub delta: Option<OpenAITextGenerationDelta>,
}
#[derive(Deserialize, Serialize, Clone)]
pub struct OpenAITextGenerationResponse {
pub choices: Vec<OpenAITextGenerationChoice>,
}
#[derive(Deserialize, Serialize, Clone)]
pub struct OpenAITextGenerationRequest {
pub model: String,
pub messages: Vec<OpenAITextGenerationMessage>,
pub max_tokens: Option<u64>,
pub stream: bool,
pub stop: Option<String>,
pub temperature: f64,
}
impl OpenAITextGenerationBackend {
pub fn try_new(
api_key: String,
base_url: Url,
model_name: String,
tokenizer: Arc<Tokenizer>,
timeout: time::Duration,
) -> anyhow::Result<Self> {
let client = reqwest::Client::builder()
.timeout(timeout)
.build()
.map_err(|e| anyhow::anyhow!("Error creating HTTP client: {e}"))?;
Ok(Self {
client,
api_key,
base_url,
model_name,
tokenizer,
timeout,
})
}
}
#[async_trait]
impl TextGenerationBackend for OpenAITextGenerationBackend {
async fn generate(
&self,
request: Arc<TextGenerationRequest>,
sender: Sender<TextGenerationAggregatedResponse>,
) {
let mut url = self.base_url.clone();
url.set_path("/v1/chat/completions");
// let url = format!("{base_url}", base_url = self.base_url);
let mut aggregated_response = TextGenerationAggregatedResponse::new(request.clone());
let messages = vec![OpenAITextGenerationMessage {
role: "user".to_string(),
content: request.prompt.clone(),
}];
let body = OpenAITextGenerationRequest {
model: self.model_name.clone(),
messages,
max_tokens: request.num_decode_tokens,
stream: true,
stop: None,
temperature: 0.0,
};
let req = self
.client
.post(url)
.header(
"Authorization",
format!("Bearer {token}", token = self.api_key),
)
.json(&serde_json::json!(body))
.timeout(self.timeout);
// start timer
aggregated_response.start();
let mut es = EventSource::new(req).unwrap();
let mut final_response = "".to_string();
while let Some(event) = es.next().await {
match event {
Ok(Event::Open) => trace!("SSE connection opened"),
Ok(Event::Message(message)) => {
if message.data == "\n" || message.data == "[DONE]" {
aggregated_response.stop();
continue;
}
if message.data.starts_with("{\"error\":") {
error!("Error from OpenAI API: {message}", message = message.data);
aggregated_response.fail();
es.close();
break;
}
// deserialize message data
let oai_response: OpenAITextGenerationResponse =
match serde_json::from_str(&message.data) {
Ok(response) => response,
Err(e) => {
error!("Error deserializing OpenAI API response: {e}", e = e);
aggregated_response.fail();
es.close();
break;
}
};
let choices = oai_response.choices;
let content = choices[0]
.clone()
.delta
.unwrap()
.content
.unwrap_or("".to_string());
if content.is_empty() {
// skip empty responses
continue;
}
// we need to count the number of tokens generated as each delta chunk may contain multiple tokens
// that's the case with vLLM chunked prefill or speculative decoding
let num_tokens =
self.tokenizer.encode(content.clone(), false).unwrap().len() as u64;
if num_tokens > 1 {
warn!(
"Generated more than one token: {num_tokens}",
num_tokens = num_tokens
);
}
match choices[0].clone().finish_reason {
None => {
aggregated_response.add_tokens(num_tokens);
final_response += content.as_str();
}
Some(_) => {
aggregated_response.add_tokens(num_tokens);
aggregated_response.stop();
trace!("Generated text using OpenAI API | prompt: {prompt}, max tokens: {max_tokens:?}, response: {message}", prompt = request.prompt, max_tokens = request.num_decode_tokens,message = &content);
}
};
}
Err(e) => {
match e {
Error::Utf8(_) => {
aggregated_response.fail();
}
Error::Parser(_) => {
aggregated_response.fail();
}
Error::Transport(_) => {
aggregated_response.fail();
}
Error::InvalidContentType(_, _) => {
aggregated_response.fail();
}
Error::InvalidStatusCode(_, _) => {
aggregated_response.fail();
}
Error::InvalidLastEventId(_) => {
aggregated_response.fail();
}
Error::StreamEnded => {
if aggregated_response.num_generated_tokens == 0 {
// server sent no data
aggregated_response.fail();
}
if aggregated_response.end_time.is_none() {
// server closed the connection before we received the final response
warn!("Connection closed before completion. Received :: {num_tokens}/{max_tokens} tokens. Response: {final_response}", num_tokens = aggregated_response.num_generated_tokens, max_tokens = request.num_decode_tokens.unwrap_or(0));
aggregated_response.fail();
}
}
}
es.close();
}
};
}
sender
.send(aggregated_response.clone())
.await
.expect("Error sending response to channel");
//debug!("Final response: {response}", response = final_response);
}
}
#[derive(Debug, Clone)]
pub struct DummyTextGenerationBackend {
time_to_generate: time::Duration,
}
impl DummyTextGenerationBackend {
pub fn new(time_to_generate: time::Duration) -> Self {
Self { time_to_generate }
}
}
impl Default for DummyTextGenerationBackend {
fn default() -> Self {
Self::new(time::Duration::from_secs(1))
}
}
#[async_trait]
impl TextGenerationBackend for DummyTextGenerationBackend {
async fn generate(
&self,
request: Arc<TextGenerationRequest>,
sender: Sender<crate::requests::TextGenerationAggregatedResponse>,
) {
let mut response = TextGenerationAggregatedResponse::new(request.clone());
response.start();
let num_tokens = request.num_decode_tokens.unwrap_or(10);
let time_per_token = self
.time_to_generate
.checked_div(num_tokens as u32)
.unwrap();
for _ in 0..num_tokens {
sleep(time_per_token).await;
response.add_tokens(1);
}
response.stop();
sender
.send(response.clone())
.await
.expect("Error sending response to channel");
}
}
pub trait TextRequestGenerator: Sync {
fn generate_request(&mut self) -> TextGenerationRequest;
/// callback can be used by generators to add new requests to the queue based on the response (e.g. for multi-turn conversation generation)
fn callback(&mut self, request: Arc<TextGenerationRequest>, response: &str);
}
#[derive(Deserialize, Serialize, Clone)]
pub struct Conversation {
pub role: String,
pub content: String,
}
#[derive(Deserialize, Serialize, Clone)]
pub struct ConversationEntry {
pub id: String,
pub conversations: Vec<Conversation>,
}
#[derive(Clone, Serialize, Debug)]
pub struct TokenizeOptions {
pub num_tokens: Option<u64>,
pub min_tokens: u64,
pub max_tokens: u64,
pub variance: u64,
}
impl TokenizeOptions {
pub fn new() -> Self {
Self {
num_tokens: None,
min_tokens: 0,
max_tokens: u64::MAX,
variance: 0,
}
}
}
impl Default for TokenizeOptions {
fn default() -> Self {
Self::new()
}
}
impl Display for TokenizeOptions {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(
f,
"num_tokens={num_tokens:?},min_tokens={min_tokens},max_tokens={max_tokens},variance={variance}",
num_tokens = self.num_tokens,
min_tokens = self.min_tokens,
max_tokens = self.max_tokens,
variance = self.variance
)
}
}
#[derive(Clone, Eq, PartialEq)]
pub struct ConversationTurnRequest {
id: Uuid,
priority: u64,
tie: Instant,
request: TextGenerationRequest,
}
impl Ord for ConversationTurnRequest {
// order by increasing priority and decreasing tie-breaking
// this way, we can pop the item with the highest priority and oldest tie-breaking
fn cmp(&self, other: &Self) -> Ordering {
self.priority
.cmp(&other.priority)
.then_with(|| self.tie.cmp(&other.tie).reverse())
}
}
impl PartialOrd for ConversationTurnRequest {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
#[derive(Clone)]
pub struct ConversationTextRequestGenerator {
pub requests: HashMap<Uuid, ConversationTurnRequest>,
pub queue: BinaryHeap<ConversationTurnRequest>,
pub tokenizer: Arc<Tokenizer>,
}
impl ConversationTextRequestGenerator {
/// Load a conversation dataset from a JSON file
/// The JSON file should be an array of objects, each object representing a conversation entry
/// Each conversation entry should have an `id` field and a `conversations` field
/// The `conversations` field should be an array of objects, each object representing a turn in the conversation
/// Each turn should have a `role` field and a `content` field
/// The `role` field should be either "user" or "system"
/// The `content` field should be the text of the turn
/// All conversation turns are tokenized and converted into `TextGenerationRequest`. The `id` field is used to link turns in the conversation,
/// so that each `TextGenerationRequest` has a reference to the next turn in the conversation.
pub fn load(
filepath: PathBuf,
tokenizer: String,
prompt_tokenize_opts: Option<TokenizeOptions>,
decode_tokenize_opts: Option<TokenizeOptions>,
hf_token: Option<String>,
) -> anyhow::Result<Self> {
let params = FromPretrainedParameters {
token: hf_token,
..Default::default()
};
let tokenizer = match Tokenizer::from_pretrained(tokenizer, Some(params)) {
Ok(tokenizer) => tokenizer,
Err(e) => {
return Err(anyhow::anyhow!("Error loading tokenizer: {e}"));
}
};
let tokenizer = Arc::new(tokenizer);
// load json file
let input = std::fs::read_to_string(&filepath)?;
let data: Vec<ConversationEntry> = serde_json::from_str(&input).expect("Unable to parse input file. Check that it is valid JSON and matches the expected format.");
// generate requests
let requests: Arc<Mutex<HashMap<Uuid, ConversationTurnRequest>>> =
Arc::from(Mutex::from(HashMap::new()));
info!(
"Generating requests from {filepath}",
filepath = filepath.display()
);
let bar = ProgressBar::new(data.len() as u64);
bar.set_style(ProgressStyle::with_template(
"Tokenizing prompts [{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}",
)?);
split(data, entry_splitter).for_each(|subrange| {
for entry in subrange {
bar.inc(1);
if entry.conversations.is_empty() {
continue;
}
let ids = (0..entry.conversations.len())
.map(|_| Uuid::new_v4())
.collect::<Vec<Uuid>>();
let filtered_conversations = entry
.conversations
.iter()
.filter(|c| c.role == "user" || c.role == "system")
.collect::<Vec<&Conversation>>();
for (turn_idx, c) in filtered_conversations.iter().enumerate() {
let prompt = c.content.clone();
let num_decode_tokens = decode_tokenize_opts.clone().map_or_else(
|| None,
|opts| {
opts.num_tokens.map(|num_tokens| {
sample_num_tokens(
num_tokens,
opts.min_tokens,
opts.max_tokens,
opts.variance,
)
})
},
);
let next_id = if turn_idx == entry.conversations.len() - 1 {
None
} else {
Some(ids[turn_idx + 1]) // link to next turn in the conversation
};
debug!("Prompt: {prompt}", prompt = prompt);
match &prompt_tokenize_opts {
None => {
let (_, num_tokens) = match tokenize_prompt(
prompt.clone(),
tokenizer.clone(),
&TokenizeOptions::default(),
) {
Ok((prompt, num_tokens)) => (prompt, num_tokens),
Err(e) => {
debug!("Error tokenizing prompt: {e}");
return;
}
};
requests.lock().unwrap().insert(
ids[turn_idx],
ConversationTurnRequest {
id: ids[turn_idx],
priority: turn_idx as u64,
tie: Instant::now(),
request: TextGenerationRequest {
id: next_id,
prompt,
num_prompt_tokens: num_tokens,
num_decode_tokens,
},
},
);
}
Some(options) => {
// compute number of tokens to generate using a Gaussian distribution
let (sampled_prompt, prompt_tokens) =
match tokenize_prompt(prompt.clone(), tokenizer.clone(), options) {
Ok(prompt) => prompt,
Err(e) => {
debug!("Error tokenizing prompt: {e}");
return;
}
};
requests.lock().unwrap().insert(
ids[turn_idx],
ConversationTurnRequest {
id: ids[turn_idx],
tie: Instant::now(),
priority: turn_idx as u64,
request: TextGenerationRequest {
id: next_id,
prompt: sampled_prompt,
num_prompt_tokens: prompt_tokens,
num_decode_tokens,
},
},
);
}
}
}
// TODO: check that we have enough requests
}
});
let requests = requests.lock().unwrap();
info!(
"Generated {num_requests} requests",
num_requests = requests.len()
);
// create the queue from the hashmap. Only queue first turns in the conversation
let queue = BinaryHeap::from(
requests
.values()
.filter(|item| item.priority == 0)
.cloned()
.collect::<Vec<ConversationTurnRequest>>(),
);
Ok(Self {
requests: requests.clone(),
tokenizer,
queue,
})
}
pub fn download_dataset(
repo_name: String,
filename: String,
hf_token: Option<String>,
) -> anyhow::Result<PathBuf> {
let api = ApiBuilder::from_env().with_token(hf_token).build()?;
let repo = api.dataset(repo_name);
let dataset = repo.get(&filename)?;
Ok(dataset)
}
}
fn sample_num_tokens(num_tokens: u64, min_tokens: u64, max_tokens: u64, variance: u64) -> u64 {
let normal = rand_distr::Normal::new(num_tokens as f64, variance as f64).unwrap();
let mut num_tokens = normal.sample(&mut rand::rng()) as u64;
if num_tokens < min_tokens {
num_tokens = min_tokens;
}
if num_tokens > max_tokens {
num_tokens = max_tokens;
}
num_tokens
}
fn entry_splitter(
gen: Vec<ConversationEntry>,
) -> (Vec<ConversationEntry>, Option<Vec<ConversationEntry>>) {
if gen.len() <= 2 {
return (gen, None);
}
let middle = gen.len() / 2;
let (left, right) = gen.split_at(middle);
let left = left.to_vec();
let right = right.to_vec();
(left, Some(right))
}
impl TextRequestGenerator for ConversationTextRequestGenerator {
fn generate_request(&mut self) -> TextGenerationRequest {
let item = self.queue.pop().expect("Queue is empty");
// add the item back to the end of the queue if it is a first turn in the conversation
if item.priority == 0 {
let mut cloned_item = item.clone();
cloned_item.tie = Instant::now(); // update the tie-breaking for intra-priority sorting
self.queue.push(cloned_item);
}
item.request
}
/// Use callback to add a new chat turn to the queue.
/// The turn is generated from the `TextGenerationRequest`, using the `id` field to link it to
/// the next turn in the conversation.
/// Those turns must be scheduled as soon as possible so that we may benefit from
/// KV cache hits. The `priority` field is used to move the turn to the front of the queue.
fn callback(&mut self, request: Arc<TextGenerationRequest>, response: &str) {
// retrieve current turn id
let id = match request.id {
None => {
return;
}
Some(id) => id,
};
// retrieve next turn from id
let next_request = match self.requests.get(&id) {
None => {
return;
}
Some(request) => request,
};
// create a new turn with the prompt concatenated with the response and next turn's prompt
// and add the next turn id to the new turn
let new_prompt =
request.prompt.clone() + "\n" + response + "\n" + next_request.request.prompt.as_str();
// tokenize the prompt
let (prompt, num_tokens) = match tokenize_prompt(
new_prompt.to_string(),
self.tokenizer.clone(),
&TokenizeOptions::default(),
) {
Ok((prompt, num_tokens)) => (prompt, num_tokens),
Err(_) => {
return;
}
};
let next_id = next_request.request.id;
let turn = ConversationTurnRequest {
id,
priority: 100, // move to the front of the queue
tie: Instant::now(), // use the current time as tie-breaking (older turns have higher priority)
request: TextGenerationRequest {
id: next_id,
prompt,
num_prompt_tokens: num_tokens,
num_decode_tokens: request.num_decode_tokens, // decode tokens do not change between turns
},
};
//debug!("Adding new turn to queue: {turn}", turn = turn.request.prompt);
self.queue.push(turn);
}
}
pub struct DummyTextRequestGenerator {}
impl DummyTextRequestGenerator {
pub fn new() -> Self {
Self {}
}
}
impl Default for DummyTextRequestGenerator {
fn default() -> Self {
Self::new()
}
}
impl TextRequestGenerator for DummyTextRequestGenerator {
fn generate_request(&mut self) -> TextGenerationRequest {
TextGenerationRequest {
id: None,
prompt: "Hello, world!".to_string(),
num_prompt_tokens: 2,
num_decode_tokens: Some(10),
}
}
fn callback(&mut self, _request: Arc<TextGenerationRequest>, _response: &str) {}
}
fn tokenize_prompt(
prompt: String,
tokenizer: Arc<Tokenizer>,
options: &TokenizeOptions,
) -> anyhow::Result<(String, u64)> {
let prompt_tokens = tokenizer
.encode(prompt.clone(), false)
.map_err(|_| anyhow::anyhow!("Error tokenizing prompt"))?;
match options.num_tokens {
None => {
// check if we have a min/max number of tokens, skip prompts that are too short or too long
if prompt_tokens.len() > options.max_tokens as usize
|| prompt_tokens.len() < options.min_tokens as usize
{
return Err(anyhow::anyhow!(format!(
"Prompt is too short or too long, skipping: {}<{}<{}",
options.min_tokens,
prompt_tokens.len(),
options.max_tokens
)));
}
Ok((prompt, prompt_tokens.len() as u64))
}
Some(num_tokens) => {
if prompt_tokens.len() < num_tokens as usize {
return Err(anyhow::anyhow!(format!(
"Prompt is too short to tokenize: {}<{}",
prompt_tokens.len(),
num_tokens
)));
}
let tokens = prompt_tokens
.get_ids()
.iter()
.take(num_tokens as usize)
.copied()
.collect::<Vec<u32>>();
let prompt = tokenizer.decode(&tokens, true).unwrap();
Ok((prompt, num_tokens))
}
}
}
#[derive(Debug, Clone)]
pub struct TextGenerationAggregatedResponse {
pub start_time: Option<tokio::time::Instant>,
pub end_time: Option<tokio::time::Instant>,
pub num_generated_tokens: u64,
pub times_to_tokens: Vec<time::Duration>,
last_received_token_time: tokio::time::Instant,
pub failed: bool,
pub ended: bool,
pub request: Option<Arc<TextGenerationRequest>>,
pub response: Option<String>,
}
impl TextGenerationAggregatedResponse {
pub fn new(request: Arc<TextGenerationRequest>) -> Self {
Self {
start_time: None,
end_time: None,
num_generated_tokens: 0,
times_to_tokens: Vec::new(),
last_received_token_time: tokio::time::Instant::now(),
failed: false,
ended: false,
request: Some(request),
response: None,
}
}
pub fn new_as_ended() -> Self {
Self {
start_time: None,
end_time: None,
num_generated_tokens: 0,
times_to_tokens: Vec::new(),
last_received_token_time: tokio::time::Instant::now(),
failed: false,
ended: true,
request: None,
response: None,
}
}
fn start(&mut self) {
self.start_time = Some(tokio::time::Instant::now());
self.last_received_token_time = tokio::time::Instant::now();
}
fn stop(&mut self) {
self.end_time = Some(tokio::time::Instant::now());
}
fn fail(&mut self) {
self.end_time = Some(tokio::time::Instant::now());
self.failed = true;
}
fn add_tokens(&mut self, num_tokens: u64) {
self.num_generated_tokens += num_tokens;
let time_to_generate = self.last_received_token_time.elapsed();
// make the assumption that when returned simultaneously, tokens were generated at a constant rate
time_to_generate.checked_div(num_tokens as u32).unwrap();
self.last_received_token_time = tokio::time::Instant::now();
self.times_to_tokens.push(time_to_generate);
}
pub fn time_to_first_token(&self) -> Option<std::time::Duration> {
match self.start_time {
Some(_) => self.times_to_tokens.first().copied(),
None => None,
}
}
pub fn inter_token_latency(&self) -> Option<std::time::Duration> {
match self.times_to_tokens.len() {
0 => None,
1 => Some(std::time::Duration::new(0, 0)),
_ => {
let mut total_time = std::time::Duration::new(0, 0);
for i in 1..self.times_to_tokens.len() {
total_time += self.times_to_tokens[i];
}
Some(total_time / (self.num_generated_tokens as u32 - 1))
}
}
}
pub fn e2e_latency(&self) -> Option<std::time::Duration> {
match self.start_time {
Some(start_time) => self.end_time.map(|end_time| end_time - start_time),
None => None,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::executors::ExecutorConfig;
use crate::results::BenchmarkResults;
use crate::scheduler::ExecutorType;
use std::sync::atomic::AtomicU64;
use std::thread::sleep;
use std::time::Duration;
use tokio::sync::RwLock;
#[tokio::test]
async fn test_openai_token_count() {
let mut s = mockito::Server::new_async().await;
s.mock("POST", "/v1/chat/completions")
.with_status(200)
.with_header("content-type", "text/event-stream")
.with_chunked_body(|w| {
w.write_all(b"data: {\"choices\": [{\"message\": null, \"finish_reason\": null, \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
w.write_all(b"data: {\"choices\": [{\"message\": null, \"finish_reason\": null, \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
w.write_all(b"data: {\"choices\": [{\"message\": null, \"finish_reason\": null, \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
w.write_all(b"data: {\"choices\": [{\"message\": {\"content\": \"Hello, world!Hello, world!Hello, world!Hello, world!\", \"role\": \"user\"}, \"finish_reason\": \"stop\", \"delta\": {\"content\": \"Hello, world!\"}}]}\n\n").unwrap();
w.write_all(b"data: [DONE]\n\n")
})
.create_async().await;
let url = s.url().parse().unwrap();
let tokenizer = Arc::new(Tokenizer::from_pretrained("gpt2", None).unwrap());
let backend = OpenAITextGenerationBackend::try_new(
"".to_string(),
url,
"gpt2".to_string(),
tokenizer,
time::Duration::from_secs(10),
)
.unwrap();
let request = TextGenerationRequest {
id: None,
prompt: "Hello, world!".to_string(),
num_prompt_tokens: 2,
num_decode_tokens: Some(10),
};
let (tx, mut rx) = tokio::sync::mpsc::channel(1);
let request = Arc::new(request);
tokio::spawn(async move {
backend.generate(request.clone(), tx).await;
});
let num_tokens = Arc::new(AtomicU64::new(0));
let num_tokens_clone = num_tokens.clone();
let t = tokio::spawn(async move {
while let Some(item) = rx.recv().await {
let response = item;
assert_eq!(response.failed, false);
num_tokens_clone.fetch_add(
response.num_generated_tokens,
std::sync::atomic::Ordering::SeqCst,
);
}
});
t.await.unwrap();
assert_eq!(
num_tokens.load(std::sync::atomic::Ordering::SeqCst),
16 as u64
);
}
/// Test that the timings are correct
/// The tests may be flaky due to the nature of the SSE connection (it may depend on the testing environment)
/// We need to account for the time it takes to establish the connection
| rust | Apache-2.0 | 0a54882d9ae3d2e183005345bd86b5c47382a338 | 2026-01-04T20:20:59.288369Z | true |
BirdbrainEngineer/lenia_ca | https://github.com/BirdbrainEngineer/lenia_ca/blob/ee7b79b09334928a25dbcc96edc33edd57362724/src/lib.rs | src/lib.rs | //! `Lenia_ca` is a crate that provides core functionality for simulating the Lenia system of cellular automata. The crate was made
//! as a programming excersize in making a large-ish Rust project. Since this was the first proper Rust project for the author, then
//! the crate has some weird quirks and inefficient... perhaps even illogical ways of structuring it. In the future, the trait based
//! system will probably be replaced by a system using phantom data or some other way of differenciating different Lenia systems from
//! one-another.
//!
//! For now, the general way to use the crate is to import it like you would any other Rust crate, and then use the `Simulator` struct
//! essentially exclusively. You may also want to look into the `kernels` module and `growth_functions` module, as they contain a number
//! of useful generators and functions for Lenia systems.
//!
//! A rough example of a quick-start code is below... Please note that `display()` function would have to be implemented by the user.
//! ```
//! let starting_pattern: ndarray::ArrayD<f64>; // fill with your data
//! let channel_shape: Vec<usize> = vec![100, 100];
//! let mut simulator = Simulator::<StandardLenia>::new(&channel_shape);
//! simulator.fill_channel(&starting_pattern, 0);
//! while true {
//! simulator.iterate();
//! display(get_channel_as_ref(0));
//! }
//! ```
//!
//! ### Types of Lenia
//!
//! This version of `lenia_ca` crate supports only 2 different types of Lenia systems. `StandardLenia` and `ExpandedLenia` types, and is
//! not capable of simulating types like "asymptotic" or "particle" Lenia.
//!
//! ### Implementation notes
//!
//! The working principle for `StandardLenia` is the following:
//! * Perform a convolution operation (implemented as a FFT-based convolution) between the `channel` and `kernel` of the `convolution_channel`
//! * Each point/pixel's value is then passed into a `growth_function` of the `convolution_channel`.
//! * The resulting points/pixels are then multiplied by the integration step `dt` and added onto the original values in the `channel`.
//! * The resulting points/pixels are then clamped to be in range `0..1`. This result is the next time-step of the `channel`, and would
//! be used as the next iteration's `channel` values.
//!
//! use `set_kernel()` to change how the kernel looks.
//!
//! use `set_growth_function()` to set a specific growth function for the convolution result.
//!
//! use `set_dt()` to change the integration-step of the simulation.
//!
//! [Image of the algorithm available on Github](https://github.com/BirdbrainEngineer/lenia_ca)
//!
//! The working principle for `ExpandedLenia` is the following:
//! * For each `convolution_channel`, perform a convolution operation (implemented as a FFT-based convolution) between a source `channel`
//! and the `convolution_channel`'s `kernel`.
//! * For each `convolution_channel`, pass the convolution results into the `growth_function` of the `convolution_channel`.
//! * For each `channel`, perform an elementwise multiplication between the corresponding `convolution_channel` results and weights of the
//! `channel`
//! * For each `channel`, perform a weighted-sum on the results of the weight-convolution multiplicated results.
//! * For each `channel`, multiply the weighted-sum by the integration step `dt` and add it to the original values in the `channel`.
//! * For each `channel`, clamp the resulting values to be in range `0..1`. This result is the next time-step of the corresponding `channel`, and would
//! be used as the next iteration's `channel` values.
//!
//! [Image of the algorithm available on Github](https://github.com/BirdbrainEngineer/lenia_ca)
//!
//! use `set_channels()` to set the number of channels in the simulation.
//!
//! use `set_convolution_channels()` to set the number of kernels and the associated growth functions.
//!
//! use `set_convolution_channel_source()` to set the channel which will be convoluted by a particular kernel.
//!
//! use `set_kernel()` to change how a `convolution_channel`'s kernel looks like.
//!
//! use `set_growth_function()` to set a specific growth function for the convolution result.
//!
//! use `set_weights()` to set a channel's weights for the corresponding convolution channel results.
//!
//! use `set_dt()` to change the integration-step of the simulation.
#![allow(dead_code)]
#![allow(unused_variables)]
#[cfg(target_has_atomic = "ptr")]
use std::fmt;
use std::{thread::JoinHandle};
use ndarray::{self, Axis, Slice, Order, Ix2};
use num_complex::Complex;
use png;
mod fft;
pub mod lenias;
pub mod kernels;
pub mod growth_functions;
trait SetBytes {
fn set_low(&mut self, value: u8);
fn set_high(&mut self, value: u8);
}
impl SetBytes for u16 {
fn set_low(&mut self, value: u8) {
*self &= !0xff;
*self |= value as u16;
}
fn set_high(&mut self, value: u8) {
*self &= !0xff00;
*self |= (value as u16) << 8;
}
}
trait GetBytes {
fn get_low(&self) -> u8;
fn get_high(&self) -> u8;
}
impl GetBytes for u16 {
fn get_low(&self) -> u8 {
(*self & 0xff) as u8
}
fn get_high(&self) -> u8 {
((*self & 0xff00) >> 8) as u8
}
}
/// Samples the normal distribution where the peak (at `x = mu`) is 1.
/// This is not suitable for use as a gaussian probability density function!
///
/// ### Parameters
///
/// * `x` - Point of the normal distribution to sample.
///
/// * `mu` - The mean (point of the highest value/peak) of the normal distribution.
///
/// * `stddev` - Standard deviation of the normal distribution.
fn sample_normal(x: f64, mu: f64, stddev: f64) -> f64 {
(-(((x - mu) * (x - mu))/(2.0 * (stddev * stddev)))).exp()
}
fn sample_exponential(x: f64, exponent: f64, peak: f64) -> f64 {
peak * (-(x * exponent)).exp()
}
/// Euclidean distance between points `a` and `b`.
fn euclidean_dist(a: &[f64], b: &[f64]) -> f64 {
let mut out: f64 = 0.0;
for i in 0..a.len() {
out += (a[i] - b[i]) * (a[i] - b[i]);
}
out.sqrt()
}
/// Extract data from n-dimensional array into a 2-dimensional array.
///
/// Extract a 2d array (`ndarray::Array2`) of `f64` values of a 2d slice of a channel's data.
/// Use this to simply get a 2d frame for rendering.
///
/// ### Parameters
///
/// * `input` - Channel data to extract the 2d frame from.
///
/// * `output` - 2D array into which to place the extracted frame.
///
/// * `display_axes` - Indexes of the axes to extract
///
/// * `dimensions` - Which indexes in any other axes the 2d slice is extracted from.
/// The entries for axes selected in `display_axes` can be any number, and will be disregarded.
pub fn get_frame(input: &ndarray::ArrayD<f64>, output: &mut ndarray::Array2<f64>, display_axes: &[usize; 2], dimensions: &[usize]) {
if input.shape().len() == 2 {
ndarray::Zip::from(output).and(input.view().into_dimensionality::<ndarray::Ix2>().unwrap()).par_for_each(|a, b| { *a = *b; });
return;
}
let data = input.slice_each_axis(
|a|{
if a.axis.index() == display_axes[0] || a.axis.index() == display_axes[1] {
return Slice {
start: 0,
end: None,
step: 1,
}
}
else {
return Slice {
start: dimensions[a.axis.index()] as isize,
end: Some((dimensions[a.axis.index()] + 1) as isize),
step: 1,
}
}
}
);
let data = data.to_shape(
((
input.shape()[display_axes[0]],
input.shape()[display_axes[1]]
), Order::RowMajor)
).unwrap();
ndarray::Zip::from(output).and(&data).par_for_each(|a, b| { *a = *b; });
}
/// Loads a png into an `ndarray`.
///
/// ### Parameters
///
/// * `file_path` - Path to the 2d slice of a frame to load.
///
/// ### Panics
///
/// * If the bit-depth of the png is less than 8.
///
/// * If the png has a color type different from Grayscale, Grayscale with alpha, RGB or RGBA.
pub fn load_from_png(file_path: &str) -> ndarray::Array2<f64> {
let decoder = png::Decoder::new(std::fs::File::open(file_path).unwrap());
let mut reader = decoder.read_info().unwrap();
let mut buf = vec![0; reader.output_buffer_size()];
let info = reader.next_frame(&mut buf).unwrap();
if info.bit_depth != png::BitDepth::Eight && info.bit_depth != png::BitDepth::Sixteen {
panic!("lenia_ca::load_from_png() - Unable to load from .png, as it has a bit depth of less than 8!");
}
let output: ndarray::Array2::<f64>;
let offset: usize;
match info.color_type {
png::ColorType::Grayscale => {
if info.bit_depth == png::BitDepth::Eight { offset = 1; }
else { offset = 2; }
}
png::ColorType::GrayscaleAlpha => {
if info.bit_depth == png::BitDepth::Eight { offset = 2; }
else { offset = 4; }
}
png::ColorType::Rgb => {
if info.bit_depth == png::BitDepth::Eight { offset = 3; }
else { offset = 6; }
}
png::ColorType::Rgba => {
if info.bit_depth == png::BitDepth::Eight { offset = 4; }
else { offset = 8; }
}
_ => { panic!("lenia_ca::load_from_png() - Unsupported color type!"); }
}
let shape = [info.width as usize, info.height as usize];
if info.bit_depth == png::BitDepth::Sixteen {
output = ndarray::Array2::from_shape_fn(Ix2(shape[0], shape[1]), |a| {
let mut num: u16 = 0;
num.set_high(*buf.get((a.1 * info.width as usize * offset) + (a.0 * offset)).unwrap());
num.set_low(*buf.get((a.1 * info.width as usize * offset) + (a.0 * offset + 1)).unwrap());
num as f64 * (1.0 / 65535.0)
});
}
else {
output = ndarray::Array2::from_shape_fn(Ix2(shape[0], shape[1]), |a| {
*buf.get((a.1 * info.width as usize * offset) + (a.0 * offset)).unwrap() as f64 * (1.0 / 255.0)
});
}
output
}
/// Export a frame as a png or a bunch of png-s if multidimensional.
///
/// The function returns a `JoinHandle` because the exporting takes place on a separate thread.
///
/// ### Parameters
///
/// * `bit_depth` - Controls whether to output as 8-bit grayscale or 16-bit grayscale png.
///
/// * `frame` - Reference to the frame to be stored.
///
/// * `prefix` - Output file name. Numbers will be added after this string based on the 2d slice
/// of the frame (if exporting a 3d or higher dimensionality frame). **This prefix should also
/// contain the frame number, if saving multiple successive frames.**
///
/// * `folder_path` - Folder path to where to save the frame at.
///
/// ### Panics
///
/// Under various circumstances, most commonly if the folder given by `folder_path` does not exist.
pub fn export_frame_as_png(bit_depth: png::BitDepth, frame: &ndarray::ArrayD<f64>, prefix: &str, folder_path: &str) -> JoinHandle<()>{
if frame.shape().is_empty() { panic!("lenia_ca::export_frame_as_png() - Can not export an empty frame!") }
let path_base = format!("{}{}{}",
if folder_path.is_empty() { &"./" } else { folder_path.clone() },
if folder_path.chars().last().unwrap() != '/' && folder_path.chars().last().unwrap() != '\\' { &"/" } else { &"" },
prefix
);
let data;
if frame.shape().len() == 1 {
data = frame.to_shape((ndarray::IxDyn(&[frame.shape()[0], 1]), Order::RowMajor)).unwrap().mapv(|el| { el.clone() } );
}
else {
data = frame.clone();
}
std::thread::spawn(move || {
let mut indexes: Vec<usize> = vec![0; data.shape().len()];
nested_png_export(bit_depth, path_base, &data, &mut indexes, 0);
})
}
fn nested_png_export(bit_depth: png::BitDepth, path: String, data: &ndarray::ArrayD<f64>, indexes: &mut Vec<usize>, current_axis: usize) {
if current_axis == (indexes.len() - 2) {
let file_path = format!("{}.png", &path);
println!("{}", &file_path);
let file = std::fs::File::create(file_path).unwrap();
let buf_writer = std::io::BufWriter::new(file);
let width = data.shape()[data.shape().len()-2];
let height = data.shape()[data.shape().len()-1];
let mut encoder = png::Encoder::new(
buf_writer,
width as u32,
height as u32
);
let mut image_data: Vec<u8> = Vec::with_capacity(width * height * if bit_depth == png::BitDepth::Eight {1} else {2});
let image_data_buffer = data.slice_each_axis(
|a|{
if a.axis.index() == (indexes.len() - 2) || a.axis.index() == (indexes.len() - 1) {
return Slice {
start: 0,
end: None,
step: 1,
}
}
else {
return Slice {
start: indexes[a.axis.index()] as isize,
end: Some((indexes[a.axis.index()] + 1) as isize),
step: 1,
}
}
}
)
.to_shape(((width * height), Order::ColumnMajor))
.unwrap()
.mapv(|el| { el });
for i in 0..(width * height) {
if bit_depth == png::BitDepth::Eight {
image_data.push((image_data_buffer[[i]] * 255.0) as u8);
}
else if bit_depth == png::BitDepth::Sixteen {
let num = (image_data_buffer[[i]] * 65535.0) as u16;
image_data.push(num.get_high());
image_data.push(num.get_low());
}
else {
panic!("lenia_ca::nested_png_export() - Unsupported bit depth!");
}
}
encoder.set_depth(bit_depth);
encoder.set_color(png::ColorType::Grayscale);
let mut writer = encoder.write_header().unwrap();
let write_result = writer.write_image_data(&image_data);
match write_result {
Ok(_) => {}
Err(_) => { panic!("lenia_ca::nested_png_export() - Failed to write png!"); }
}
}
else {
for i in 0..data.shape()[current_axis] {
indexes[current_axis] = i;
nested_png_export(
bit_depth,
format!("{}_{}", &path, i),
data,
indexes,
current_axis + 1
);
}
}
}
/// A Lenia simulation.
///
/// Container type for a `Lenia` implementation. It is not recommended to control the Lenia instance directly on your own.
/// The Simulator has all the needed methods to control a Lenia instance in normal operation.
pub struct Simulator<L: Lenia> {
sim: L,
}
impl<L: Lenia> Simulator<L> {
/// Initialize a Lenia simulator.
///
/// Barring wanting to change the type of the `Lenia` instance used by the `Simulator`,
/// this should ever need to be called only once during the lifetime of your
/// Lenia simulation program.
///
/// ### Parameters
///
/// * `channel_shape` - The shape (number of dimensions and their lengths) of the
/// channels for the `Lenia` instance.
///
/// ### Panics
///
/// If any axis length in `channel_shape`is `0`.
pub fn new(channel_shape: &[usize]) -> Self {
for (i, dim) in channel_shape.iter().enumerate() {
if *dim == 0 {
panic!("Simulator::new() - Axis {} of the provided shape has a length of 0! Each axis must have a length of at least 1.", i);
}
}
Simulator{
sim: L::new(channel_shape),
}
}
/// Re-initialize a Lenia simulator.
///
/// Re-initializes the `Lenia` instance, losing **all** of the previous changes, such as
/// kernel changes, channel additions or any other parameter changes from the defaults
/// of the specific `Lenia` instance implementation.
///
/// Call this if the shape of the channels needs to be changed, or a major restructuring of
/// channels and/or convolution channels is wanted.
///
/// ### Parameters
///
/// * `channel_shape` - The shape (number of dimensions and their lengths) of the
/// channels for the `Lenia` instance.
///
/// ### Panics
///
/// If any axis length in `channel_shape`is `0`.
pub fn remake(&mut self, channel_shape: &[usize]) {
for (i, dim) in channel_shape.iter().enumerate() {
if *dim == 0 {
panic!("Simulator::new() - Axis {} of the provided shape has a length of 0! Each axis must have a length of at least 1.", i);
}
}
self.sim = L::new(channel_shape);
}
/// Set the number of channels in the `Lenia` instance.
///
/// **In case the number of channels
/// is less than the current number of channels, it is up to the user to make sure that
/// no convolution channel tries to use a dropped channel as its source!**
///
/// All values in newly created channels will be set to `0.0`.
///
/// The weights from all convolution channels into any newly created channels will start
/// off at `0.0`.
///
/// ### Parameters
///
/// * `channels` - The number of channels the `Lenia` instance should have.
///
/// ### Panics
///
/// If `channels` is `0`.
pub fn set_channels(&mut self, channels: usize) {
if channels == 0 {
panic!("Simulator::set_channels: Attempting to set the number of channels to 0. This is not allowed.");
}
if channels == self.sim.channels() { return; }
self.sim.set_channels(channels);
}
/// Set the number of convolution channels in the `Lenia` instance.
///
/// If the new number of
/// convolution channels is less than currently, then any convolution channels with an index
/// higher than the new number of channels will be dropped, and their corresponding contribution
/// to weighted averages for summing purged.
///
/// If the new number of convolution channels is greater than currently then any new
/// convolution channels will need to have their kernels and growth functions set. In addition,
/// channel weights for the new convolution channels will default to `0.0`.
///
/// ### Parameters
///
/// * `convolution_channels` - The number of convolution channels the `Lenia` instance should have.
///
/// ### Panics
///
/// If `convolution_channels` is `0`.
pub fn set_convolution_channels(&mut self, convolution_channels: usize) {
if convolution_channels == 0 {
panic!("Simulator::set_convolution_channels: Attempting to set the number of convolution channels to 0. This is not allowed.");
}
if convolution_channels == self.convolution_channels() { return; }
self.sim.set_conv_channels(convolution_channels);
}
/// Set the source channel a given convolution channel should act on.
///
/// ### Parameters
///
/// * `convolution_channel` - The convolution channel which will have its source changed.
///
/// * `source_channel` - The channel that the convolution channel should use as its source
/// for convoluting.
///
/// ### Panics
///
/// * If the specified `convolution_channel` does not exist.
///
/// * If the specified `source_channel` does not exist.
pub fn set_convolution_channel_source(&mut self, convolution_channel: usize, source_channel: usize) {
if convolution_channel >= self.sim.conv_channels() {
panic!("Simulator::set_convolution_channel_source: Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
if source_channel >= self.sim.channels() {
panic!("Simulator::set_convolution_channel_source: Specified channel (index {}) does not exist. Current number of channels: {}.", source_channel, self.sim.channels());
}
self.sim.set_source_channel(convolution_channel, source_channel);
}
/// Set and make the kernel of the specified convolution channel.
///
/// ### Parameters
///
/// * `kernel` - n-dimensional array (`ndarray::ArrayD<f64>`), where the number of
/// dimensions / axes must match the number of dimensions / axes of the channels of the
/// `Lenia` instance.
///
/// * `convolution_channel` - The convolution channel to which the new kernel is to be assigned.
///
/// ### Panics
///
/// If the specified `convolution_channel` does not exist.
///
/// If the dimensionality of the kernel is not the same as the channels'
pub fn set_kernel(&mut self, kernel: ndarray::ArrayD<f64>, convolution_channel: usize) {
if convolution_channel >= self.sim.conv_channels() {
panic!("Simulator::set_kernel: Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
if kernel.shape().len() != self.sim.shape().len() {
panic!("Simulator::set_kernel: Number of kernel dimensionality ({}) does not agree with channels' dimensionality ({}).", kernel.shape().len(), self.sim.shape().len());
}
self.sim.set_kernel(kernel, convolution_channel);
}
/// Set the growth function and its parameters of the specified convolution channel.
///
/// ### Parameters
///
/// * `f` - Growth function to use.
///
/// * `growth_parameters` - The parameters passed to the growth function.
///
/// * `convolution_channel` - The convoltution channel to which the new growth function and
/// parameters are to be assigned.
///
/// ### Panics
///
/// If the specified `convolution_channel` does not exist.
pub fn set_growth_function(&mut self, f: fn(f64, &[f64]) -> f64, growth_parameters: Vec<f64>, convolution_channel: usize) {
if convolution_channel >= self.sim.conv_channels() {
panic!("Simulator::set_growth_function: Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
self.sim.set_growth(f, growth_parameters, convolution_channel);
}
/// Set the convolution channel weights for a specific channel.
///
/// * If the length of weights is greater than the number of convolution channels,
/// then the spare weights will be ignored.
///
/// * If the length of weights is less than the number of convolution channels,
/// then the missing weights will default to `0.0`.
///
/// ### Parameters
///
/// * `channel` - The channel, which the new weights will be assigned to.
///
/// * `weights` - The weights to assign. Index in the array corresponds to
/// the index of the convoution channel.
pub fn set_weights(&mut self, channel: usize, weights: &[f64]) {
self.sim.set_weights(weights, channel);
}
/// Set the integration step (a.k.a. timestep) parameter `dt` of the `Lenia` instance.
///
/// ### Parameters
///
/// * `dt` - The new dt value for the `Lenia` instance to use.
pub fn set_dt(&mut self, dt: f64) {
self.sim.set_dt(dt);
}
/// Performs a single iteration of the `Lenia` instance.
///
/// Channels are updated with the resulting new state of the simulation.
pub fn iterate(&mut self) {
self.sim.iterate();
}
/// Fills a channel with user data.
///
/// The shapes of the `data` and the channel(s) in the `Lenia` instance must be the same.
///
/// ### Parameters
///
/// * `data` - Reference to the n-dimensional array (`ndarray::ArrayD`) of `f64` values
/// from which to fill the channel's data.
///
/// * `channel` - Index of the channel to fill.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn fill_channel(&mut self, data: &ndarray::ArrayD<f64>, channel: usize) {
if channel >= self.sim.channels() {
panic!("Simulator::fill_channel: Specified channel (index {}) does not exist. Current number of channels: {}.", channel, self.sim.channels());
}
let channel_data = self.sim.get_channel_as_mut_ref(channel);
channel_data.zip_mut_with(data,
|a, b| {
*a = *b;
}
);
}
/// Retrieve a referenced to the specified channel's data.
///
/// ### Parameters
///
/// * `channel` - Index of the channel to get a reference from.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_channel_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
if channel >= self.sim.channels() {
panic!("Simulator::get_channel_data_as_ref: Specified channel (index {}) does not exist. Current number of channels: {}.", channel, self.sim.channels());
}
self.sim.get_channel_as_ref(channel)
}
/// Mutable version of `get_channel_as_ref()`.
pub fn get_channel_as_mut_ref(&mut self, channel: usize) -> &mut ndarray::ArrayD<f64> {
if channel >= self.sim.channels() {
panic!("Simulator::get_channel_data_as_ref() - Specified channel (index {}) does not exist. Current number of channels: {}.", channel, self.sim.channels());
}
self.sim.get_channel_as_mut_ref(channel)
}
/// Retrieve a reference to the specified channel's "deltas".
///
/// Deltas are the amounts added onto the
/// previous iteration's result to get the current iteration's result.
///
/// Note that `dt` parameter has not been applied for this field, and no clamp / clip operation has
/// been performed, thus the numbers will be in range `[-1.0..1.0]`.
///
/// ### Parameters
///
/// * `channel` - Index of the channel from which the reference to data is to be taken.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_deltas_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
if channel >= self.sim.channels() {
panic!("Simulator::get_deltas_as_ref() - Specified channel (index {}) does not exist. Current number of channels: {}.", channel, self.sim.channels());
}
self.sim.get_deltas_as_ref(channel)
}
/// Exists for backwards-compatibility with `lenia_ca 0.1.0`; use `get_convolved()` instead.
///
/// Retrieves a `convolution_channel`'s convoution result and extracts only the real component.
///
/// Convolution result is also called the "potential distribution".
///
/// ### Parameters
///
/// * `convolution_channel` - Index of the convolution channel from which to
/// produce the `f64` `ndarray`.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_convoluted(&self, convolution_channel: usize) -> ndarray::ArrayD<f64> {
if convolution_channel >= self.sim.channels() {
panic!("Simulator::get_convoluted() - Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
self.sim.get_convoluted_as_ref(convolution_channel).map(|a| { a.re })
}
/// Retrieves a `convolution_channel`'s convoution result and extracts only the real component.
///
/// Convolution result is also called the "potential distribution".
///
/// ### Parameters
///
/// * `convolution_channel` - Index of the convolution channel from which to
/// produce the `f64` `ndarray`.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_convolved(&self, convolution_channel: usize) -> ndarray::ArrayD<f64> {
if convolution_channel >= self.sim.channels() {
panic!("Simulator::get_convolved() - Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
self.sim.get_convoluted_as_ref(convolution_channel).map(|a| { a.re })
}
/// Retrieve a reference to the specified convolution channel's convolution result.
///
/// Convolution result is also called the "potential distribution".
///
/// Note that the referenced array holds `Complex<f64>` type values.
///
/// ### Parameters
///
/// * `convolution_channel` - Index of the convolution channel from which to
/// produce the `f64` `ndarray`.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_convolved_as_ref(&self, convolution_channel: usize) -> &ndarray::ArrayD<Complex<f64>> {
if convolution_channel >= self.sim.channels() {
panic!("Simulator::get_convolved_as_ref() - Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
self.sim.get_convoluted_as_ref(convolution_channel)
}
/// Retrieve a reference to the specified convolution channel's "activations".
///
/// Activations are the results from passing the convolution results through the growth function.
///
/// ### Parameters
///
/// * `convolution_channel` - Index of the convolution channel from which the
/// reference to data is to be taken.
///
/// ### Panics
///
/// If the specified `channel` does not exist.
pub fn get_activated_as_ref(&self, convolution_channel: usize) -> &ndarray::ArrayD<f64> {
if convolution_channel >= self.sim.channels() {
panic!("Simulator::get_grown_as_ref() - Specified convolution channel (index {}) does not exist. Current number of convolution channels: {}.", convolution_channel, self.sim.conv_channels());
}
self.sim.get_grown_as_ref(convolution_channel)
}
/// Retrieve the kernel being used for the specified convolution channels' convolution.
///
/// ### Parameters
///
/// * `convolution_channel` - Index of the convolution channel from which the kernel will be supplied.
pub fn get_kernel_as_ref(&self, convolution_channel: usize) -> &Kernel {
self.sim.get_kernel_as_ref(convolution_channel)
}
/// Get the current integration step (a.k.a. timestep) parameter `dt` of the `Lenia` instance.
pub fn dt(&self) -> f64 {
self.sim.dt()
}
/// Get the shape of the channels and convolution channels of the `Lenia` instance.
pub fn shape(&self) -> &[usize] {
self.sim.shape()
}
/// Get the number of channels initialized in the `Lenia` instance.
pub fn channels(&self) -> usize {
self.sim.channels()
}
/// Get the number of convolution channels initialized in the `Lenia` instance.
pub fn convolution_channels(&self) -> usize {
self.sim.conv_channels()
}
}
/// Lenia functionality trait.
///
/// Lenia trait organizes together all the functionality to interact with a Lenia simulation.
pub trait Lenia {
/// Creates a new `Lenia` instance.
fn new(shape: &[usize]) -> Self;
/// Sets the number of channels in the `Lenia` instance.
///
/// **If the new number of channels is fewer than currently then the user is responsible for re-making
/// the convolution channels or deleting invalidated convolution channels and
/// make sure that no convolution channel tries to convolute a non-existent channel!**
fn set_channels(&mut self, num_channels: usize);
/// Sets the number of convolution channels in the `Lenia` instance.
///
/// * Any convolution channels
/// that have an index larger than the new number of channels **will be dropped**. Conversely,
/// no convolution channels get invalidated if the new number of convolution channels is
/// greater than the previous number of convolution channels.
///
/// * Any newly initialized convolution channels will have to have their kernels and
/// growth functions added. By default all channels will use a weight of `0.0` for the new
/// channels.
| rust | MIT | ee7b79b09334928a25dbcc96edc33edd57362724 | 2026-01-04T20:21:17.859970Z | true |
BirdbrainEngineer/lenia_ca | https://github.com/BirdbrainEngineer/lenia_ca/blob/ee7b79b09334928a25dbcc96edc33edd57362724/src/lenias.rs | src/lenias.rs | //! Collection of different types of Lenia systems.
use num_complex::Complex;
use std::sync::{Arc, Mutex, RwLock};
use std::thread;
use super::*;
use super::fft::ParPlannedFFTND;
/// Standard type of Lenia
///
/// `StandardLenia` struct implements the non-expanded Lenia system with a 2d field and
/// pre-set parameters to facilitate the creation of the
/// ***Orbium unicaudatus*** glider - hallmark of the Lenia system.
///
/// This version of Lenia does not allow for adding extra channels nor convolution channels.
/// In addition, channel weights are not available for this version of Lenia.
///
/// Changeable parameters include the timestep a.k.a. integration step **dt**,
/// the **growth function**, and the **kernel** given that the kernel is 2-dimensional.
///
/// ### Example of initializing a `StandardLenia`.
/// Initializes a Lenia instance capable of making a soliton.
///
/// ```
/// let starting_pattern: ndarray::ArrayD<f64>; // fill with your data
/// let channel_shape: Vec<usize> = vec![100, 100];
/// let mut simulator = Simulator::<StandardLenia>::new(&channel_shape);
/// simulator.fill_channel(&starting_pattern, 0);
/// while true {
/// simulator.iterate();
/// display(get_channel_as_ref(0));
/// }
/// ```
pub struct StandardLenia {
dt: f64,
channel: Channel,
shape: Vec<usize>,
conv_channel: ConvolutionChannel,
convolved: ndarray::ArrayD<Complex<f64>>,
forward_fft_instance: fft::ParPlannedFFTND,
inverse_fft_instance: fft::ParPlannedFFTND,
}
impl Lenia for StandardLenia {
/// Create and initialize a new instance of "Standard Lenia".
///
/// This version of Lenia
/// can have only a single channel and a single convolution channel and works
/// only in 2D.
/// It also does not support any weights, as it can be "encoded" within the `dt` parameter.
///
/// By default the kernel, growth function and dt parameter are set such that
/// the simulation is capable of producing the ***Orbium unicaudatus*** glider.
/// This does assume that each dimension in `shape` is at least `28`, but ideally much larger...
///
/// ### Parameters
///
/// * `shape` - Reference to the shape that the channels in the `Lenia` instance shall have.
///
/// ### Panics
///
/// * If the length of `shape` is not `2`.
///
/// * If either of the axis lengths in `shape` are `<28`.
fn new(shape: &[usize]) -> Self {
if shape.len() < 2 || shape.len() > 2 {
panic!("StandardLenia::new() - Expected 2 dimensions for Standard Lenia! Found {}.", shape.len());
}
for (i, dim) in shape.iter().enumerate() {
if *dim < 13 {
panic!("StandardLenia::new() - Axis {} is extremely small ({} pixels). Make it larger!", i, *dim);
}
}
let kernel = Kernel::from(
kernels::gaussian_donut_2d(
13,
1.0/6.7
),
shape
);
let conv_channel = ConvolutionChannel {
input_channel: 0,
kernel: kernel,
field: ndarray::ArrayD::from_elem(shape, 0.0),
growth: growth_functions::standard_lenia,
growth_params: vec![0.15, 0.017],
};
let channel = Channel {
field: ndarray::ArrayD::from_elem(shape, 0.0),
weights: vec![1.0],
weight_sum_reciprocal: 1.0,
};
StandardLenia{
forward_fft_instance: fft::ParPlannedFFTND::new(shape, false),
inverse_fft_instance: fft::ParPlannedFFTND::new(shape, true),
dt: 0.1,
channel: channel,
shape: shape.to_vec(),
conv_channel: conv_channel,
convolved: ndarray::ArrayD::from_elem(shape, Complex::new(0.0, 0.0)),
}
}
fn iterate(&mut self) {
self.convolved.zip_mut_with(
&self.channel.field,
|a, b| {
a.re = *b;
a.im = 0.0;
}
);
self.forward_fft_instance.transform(&mut self.convolved);
self.convolved.zip_mut_with(
&self.conv_channel.kernel.transformed,
|a, b| {
// Complex multiplication without cloning
let real = (a.re * b.re) - (a.im * b.im);
a.im = ((a.re + a.im) * (b.re + b.im)) - real;
a.re = real;
}
);
self.inverse_fft_instance.transform(&mut self.convolved);
self.conv_channel.field.zip_mut_with(
&self.convolved,
|a, b| {
*a = (self.conv_channel.growth)(b.re, &self.conv_channel.growth_params);
}
);
self.channel.field.zip_mut_with(&self.conv_channel.field, |a, b| { *a = (*a + (*b * self.dt)).clamp(0.0, 1.0); })
}
fn set_channels(&mut self, num_channels: usize) {
println!("Changing the number of channels is not available for Standard Lenia! Try using a different Lenia instead.");
}
fn set_conv_channels(&mut self, num_conv_channels: usize) {
println!("Changing the number of channels is not available for Standard Lenia! Try using a different Lenia instead.");
}
fn set_source_channel(&mut self, conv_channel: usize, src_channel: usize) {
println!("Adding or changing source channels is not available for Standard Lenia! Try using a different Lenia instead.");
}
fn set_weights(&mut self, new_weights: &[f64], conv_channel: usize) {
println!("Adding or changing convolution output weights is not available for Standard Lenia! Try using a different Lenia instead.");
}
fn set_kernel(&mut self, kernel: ndarray::ArrayD<f64>, conv_channel: usize) {
self.conv_channel.kernel = Kernel::from(kernel, self.channel.field.shape());
}
fn set_growth(&mut self, f: fn(f64, &[f64]) -> f64, growth_params: Vec<f64>, conv_channel: usize) {
self.conv_channel.growth = f;
self.conv_channel.growth_params = growth_params;
}
fn set_dt(&mut self, new_dt: f64) {
self.dt = new_dt;
}
fn shape(&self) -> &[usize] {
&self.shape
}
fn get_channel_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
&self.channel.field
}
fn get_kernel_as_ref(&self, conv_channel: usize) -> &Kernel {
&self.conv_channel.kernel
}
fn get_channel_as_mut_ref(&mut self, channel: usize) -> &mut ndarray::ArrayD<f64> {
&mut self.channel.field
}
fn get_convoluted_as_ref(&self, conv_channel: usize) -> &ndarray::ArrayD<Complex<f64>> {
&self.convolved
}
fn get_grown_as_ref(&self, conv_channel: usize) -> &ndarray::ArrayD<f64> {
&self.conv_channel.field
}
fn get_deltas_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
&self.conv_channel.field // Same as growth result because weights are not available for Standard Lenia
}
fn dt(&self) -> f64 {
self.dt
}
fn channels(&self) -> usize {
1 as usize
}
fn conv_channels(&self) -> usize {
1 as usize
}
fn weights(&self, channel: usize) -> &[f64] {
&self.channel.weights
}
}
/// Expanded type of Lenia
///
/// `ExpandedLenia` struct implements the expanded Lenia system, with support for multiple n-dimensional
/// channels, multiple kernels & associated growth functions (convolution channels) and weights. You will
/// most likely be using this type of Lenia mostly, as it is vastly more "powerful" in its capabilities.
///
/// `ExpandedLenia` **requires that the user sets up all of the kernels, growth functions, weigths and
/// integration step!**
///
/// ### Example of initializing an `ExpandedLenia`.
/// Initializes with a ruleset with 2 different solitons, where the channel_0 solitons are dependent on and tied
/// to the channel_1 solitons.
///
/// ```
/// // initialize
/// let starting_pattern0: ndarray::ArrayD<f64>; // fill with your data
/// let starting_pattern1: ndarray::ArrayD<f64>; // fill with your data
/// let channel_shape: Vec<usize> = vec![100, 100];
/// let mut simulator = Simulator::<ExpandedLenia>::new(&channel_shape);
/// // set up the simulation
/// simulator.set_channels(2);
/// simulator.set_convolution_channels(3);
/// simulator.set_convolution_channel_source(0, 0);
/// simulator.set_convolution_channel_source(1, 1);
/// simulator.set_convolution_channel_source(2, 1);
/// simulator.set_kernel(kernels::gaussian_donut_2d(14, 0.15), 0);
/// simulator.set_kernel(kernels::polynomial_nd(25, 2, &vec![4.0, 1.0, 0.333]), 1);
/// simulator.set_kernel(kernels::polynomial_nd(21, 2, &vec![4.0, 0.0, 1.0]), 2);
/// simulator.set_growth_function(growth_functions::standard_lenia, vec![0.15, 0.02], 0);
/// simulator.set_growth_function(growth_functions::polynomial, vec![0.25, 0.03, 4.0], 1);
/// simulator.set_growth_function(growth_functions::polynomial, vec![0.07, 0.026, 4.0], 2);
/// simulator.set_weights(0, &vec![2.0/3.0, 0.0, 1.0/3.0]);
/// simulator.set_weights(1, &vec![0.0, 1.0, 0.0]);
/// simulator.set_dt(0.1);
/// // seed channels and simulate
/// simulator.fill_channel(&starting_pattern0, 0);
/// simulator.fill_channel(&starting_pattern1, 1);
/// while true {
/// simulator.iterate();
/// display(get_channel_as_ref(0));
/// display(get_channel_as_ref(1));
/// }
/// ```
pub struct ExpandedLenia {
dt: f64,
channels: Vec<Channel>,
deltas: Vec<ndarray::ArrayD<f64>>,
shape: Vec<usize>,
conv_channels: Vec<ConvolutionChannel>,
convolutions: Vec<ndarray::ArrayD<Complex<f64>>>,
forward_fft_instances: Vec<fft::ParPlannedFFTND>,
inverse_fft_instances: Vec<fft::ParPlannedFFTND>,
}
impl Lenia for ExpandedLenia {
/// Create and initialize a new instance of "ExpandedLenia`.
///
/// This type of Lenia is much more powerful than `StandardLenia` as it can have n-dimensional fields,
/// limitless number of channels as well as kernels and associated growth functions.
///
/// The default kernel is a unit size and the default growth function for the kernel is a "pass" function.
///
/// ### Parameters
///
/// * `shape` - The shape of the channels of the Lenia instance.
///
/// ### Panics
///
/// If any dimension/axis in `shape` is 0. This is not allowed, generally each dimension/axis should be
/// relatively large.
fn new(shape: &[usize]) -> Self {
for (i, dim) in shape.iter().enumerate() {
if *dim == 0 { panic!("ExpandedLenia::new() - Dimension/axis {} is 0! This is not allowed!", i); }
}
let kernel = Kernel::from(kernels::pass(shape.len()), shape);
let conv_channel = ConvolutionChannel {
input_channel: 0,
kernel: kernel,
field: ndarray::ArrayD::from_elem(shape, 0.0),
growth: growth_functions::pass,
growth_params: vec![1.0],
};
let channel = Channel {
field: ndarray::ArrayD::from_elem(shape, 0.0),
weights: vec![1.0],
weight_sum_reciprocal: 1.0,
};
let mut channel_shape = Vec::new();
for dim in shape {
channel_shape.push(*dim);
}
ExpandedLenia{
forward_fft_instances: vec![fft::ParPlannedFFTND::new(&channel_shape, false)],
inverse_fft_instances: vec![fft::ParPlannedFFTND::new(&channel_shape, true)],
dt: 0.1,
channels: vec![channel],
deltas: vec![ndarray::ArrayD::from_elem(shape, 0.0)],
conv_channels: vec![conv_channel],
convolutions: vec![ndarray::ArrayD::from_elem(shape, Complex::new(0.0, 0.0))],
shape: shape.to_vec(),
}
}
// This is a very long and complex function, sorry.
// It uses concurrency to calculate multiple convolutions at the same time, as well as
// apply weights and sum the results.
fn iterate(&mut self) {
let mut axes: Vec<usize> = Vec::with_capacity(self.shape.len());
let mut inverse_axes: Vec<usize> = Vec::with_capacity(self.shape.len());
for i in 0..self.shape.len() {
axes.push(i);
}
for i in (0..self.shape.len()).rev() {
inverse_axes.push(i);
}
//Create mutexes and rwlocks
let mut sources: Vec<usize> = Vec::with_capacity(self.conv_channels.len());
let mut channel_rwlocks: Vec<Arc<RwLock<Channel>>> = Vec::with_capacity(self.channels.len());
let mut delta_rwlocks: Vec<Arc<RwLock<ndarray::ArrayD<f64>>>> = Vec::with_capacity(self.deltas.len());
let mut conv_channel_mutexes: Vec<Arc<Mutex<ConvolutionChannel>>> = Vec::with_capacity(self.conv_channels.len());
let mut convolution_mutexes: Vec<Arc<Mutex<ndarray::ArrayD<Complex<f64>>>>> = Vec::with_capacity(self.convolutions.len());
let mut forward_fft_mutexes: Vec<Arc<Mutex<ParPlannedFFTND>>> = Vec::with_capacity(self.forward_fft_instances.len());
let mut inverse_fft_mutexes: Vec<Arc<Mutex<ParPlannedFFTND>>> = Vec::with_capacity(self.inverse_fft_instances.len());
for _ in 0..self.channels.len() {
channel_rwlocks.push(Arc::new(RwLock::new(self.channels.remove(0))));
delta_rwlocks.push(Arc::new(RwLock::new(self.deltas.remove(0))));
}
for _ in 0..self.conv_channels.len() {
sources.push(self.conv_channels[0].input_channel);
conv_channel_mutexes.push(Arc::new(Mutex::new(self.conv_channels.remove(0))));
convolution_mutexes.push(Arc::new(Mutex::new(self.convolutions.remove(0))));
forward_fft_mutexes.push(Arc::new(Mutex::new(self.forward_fft_instances.remove(0))));
inverse_fft_mutexes.push(Arc::new(Mutex::new(self.inverse_fft_instances.remove(0))));
}
// Concurrent convolutions
let mut convolution_handles = Vec::with_capacity(conv_channel_mutexes.len());
for i in 0..conv_channel_mutexes.len() {
// Set up and aquire locks on data
let axes_clone = axes.clone();
let inverse_axes_clone = inverse_axes.clone();
let source_lock = Arc::clone(&channel_rwlocks[sources[i]]);
let delta_lock = Arc::clone(&delta_rwlocks[sources[i]]);
let convolution_lock = Arc::clone(&convolution_mutexes[i]);
let convolution_channel_lock = Arc::clone(&conv_channel_mutexes[i]);
let forward_fft_lock = Arc::clone(&forward_fft_mutexes[i]);
let inverse_fft_lock = Arc::clone(&inverse_fft_mutexes[i]);
convolution_handles.push(thread::spawn(move || {
let mut convolution_channel = convolution_channel_lock.lock().unwrap();
let input = source_lock.read().unwrap();
let delta = delta_lock.read().unwrap();
let mut convolution = convolution_lock.lock().unwrap();
let mut forward_fft = forward_fft_lock.lock().unwrap();
let mut inverse_fft = inverse_fft_lock.lock().unwrap();
// Get data from source channel
convolution.zip_mut_with(
&input.field,
|a, b| {
a.re = *b;
a.im = 0.0;
}
);
// Fourier-transform convolute
// Forward fft the input data
forward_fft.transform(&mut convolution);
// Complex multiplication without cloning
convolution.zip_mut_with(
&convolution_channel.kernel.transformed,
|a, b| {
let real = (a.re * b.re) - (a.im * b.im);
a.im = ((a.re + a.im) * (b.re + b.im)) - real;
a.re = real;
}
);
// Inverse fft to get convolution result
inverse_fft.transform(&mut convolution);
// Apply growth function
let growth_info = (convolution_channel.growth, convolution_channel.growth_params.clone());
convolution_channel.field.zip_mut_with(
&convolution,
|a, b| {
*a = (growth_info.0)(b.re, &growth_info.1);
}
);
}));
}
let mut summing_handles = Vec::with_capacity(channel_rwlocks.len());
for handle in convolution_handles {
handle.join().unwrap();
}
// Collapse convolution channel mutexes back into a single owned vector
let mut convolution_channels: Vec<ConvolutionChannel> = Vec::with_capacity(conv_channel_mutexes.len());
for i in 0..conv_channel_mutexes.len() {
let data = conv_channel_mutexes.remove(0);
convolution_channels.push(Arc::try_unwrap(data).unwrap().into_inner().unwrap());
}
// Concurrent summing of results
// Make and aquire locks
let convoluted_results_rwlock = Arc::new(RwLock::new(convolution_channels));
for i in 0..channel_rwlocks.len() {
let dt = self.dt.clone();
let channel_lock = Arc::clone(&channel_rwlocks[i]);
let delta_lock = Arc::clone(&delta_rwlocks[i]);
let convoluted_results_lock = Arc::clone(&convoluted_results_rwlock);
// Thread code
summing_handles.push(thread::spawn(move || {
let mut channel = channel_lock.write().unwrap();
let mut deltas = delta_lock.write().unwrap();
let convoluted_results = convoluted_results_lock.read().unwrap();
let previous_deltas = deltas.clone();
// Apply weighted sums and dt to get the delta to be added to channel
for i in 0..channel.weights.len(){
deltas.zip_mut_with(&convoluted_results[i].field,
|a, b| {
if i == 0 { *a = 0.0; }
*a += *b * channel.weights[i];
// I should have normalized weights while they are being set... oh well...
if i == channel.weights.len() { *a *= channel.weight_sum_reciprocal; }
}
);
}
// Add update channel and clamp
let dt_reciprocal = 1.0 / dt;
ndarray::Zip::from(&mut channel.field).and(&mut deltas.view_mut()).and(&previous_deltas).par_for_each(|a, b, c| {
let previous = *a;
*a = (previous + (*b * dt)).clamp(0.0, 1.0);
});
}));
}
for _ in 0..convolution_mutexes.len() {
self.forward_fft_instances.push(Arc::try_unwrap(forward_fft_mutexes.remove(0)).unwrap().into_inner().unwrap());
self.inverse_fft_instances.push(Arc::try_unwrap(inverse_fft_mutexes.remove(0)).unwrap().into_inner().unwrap());
self.convolutions.push(Arc::try_unwrap(convolution_mutexes.remove(0)).unwrap().into_inner().unwrap());
}
for handle in summing_handles {
handle.join().unwrap();
}
// Return ownership of all data back to Lenia instance
self.conv_channels = Arc::try_unwrap(convoluted_results_rwlock).unwrap().into_inner().unwrap();
for _ in 0..channel_rwlocks.len() {
self.channels.push(Arc::try_unwrap(channel_rwlocks.remove(0)).unwrap().into_inner().unwrap());
self.deltas.push(Arc::try_unwrap(delta_rwlocks.remove(0)).unwrap().into_inner().unwrap());
}
}
fn set_channels(&mut self, num_channels: usize) {
if num_channels <= self.channels.len() {
for i in (num_channels..self.channels.len()).rev() {
self.channels.remove(i);
self.deltas.remove(i);
}
}
else {
let weights_prototype: Vec<f64> = vec![0.0; self.conv_channels.len()];
for _ in self.channels.len()..num_channels {
self.channels.push(
Channel {
field: ndarray::ArrayD::from_elem(self.shape.clone(), 0.0),
weights: weights_prototype.clone(),
weight_sum_reciprocal: 0.0,
}
);
self.deltas.push(ndarray::ArrayD::from_elem(self.shape.clone(), 0.0));
}
}
}
fn set_conv_channels(&mut self, num_conv_channels: usize) {
if num_conv_channels <= self.conv_channels.len() {
for i in (num_conv_channels..self.conv_channels.len()).rev() {
self.conv_channels.remove(i);
self.forward_fft_instances.remove(i);
self.inverse_fft_instances.remove(i);
self.convolutions.remove(i);
}
for channel in &mut self.channels {
for i in (num_conv_channels..channel.weights.len()).rev() {
channel.weights.remove(i);
}
let sum: f64 = channel.weights.iter().sum();
channel.weight_sum_reciprocal = 1.0 / sum;
}
}
else {
for i in self.conv_channels.len()..num_conv_channels {
self.conv_channels.push(
ConvolutionChannel {
input_channel: 0,
field: self.conv_channels[0].field.clone(),
kernel: Kernel::from(kernels::pass(self.shape.len()), &self.shape),
growth: growth_functions::pass,
growth_params: vec![0.0],
}
);
self.forward_fft_instances.push(fft::ParPlannedFFTND::new(&self.shape, false));
self.inverse_fft_instances.push(fft::ParPlannedFFTND::new(&self.shape, true));
self.convolutions.push(ndarray::ArrayD::from_elem(self.shape.clone(), Complex::new(0.0, 0.0)));
}
for channel in &mut self.channels {
for _ in channel.weights.len()..num_conv_channels {
channel.weights.push(0.0);
}
}
}
}
fn set_weights(&mut self, new_weights: &[f64], channel: usize) {
let mut weights: Vec<f64>;
if new_weights.len() < self.conv_channels.len() {
weights = new_weights.clone().to_vec();
for _ in new_weights.len()..self.conv_channels.len() {
weights.push(0.0);
}
}
else {
weights = Vec::with_capacity(new_weights.len());
for i in 0..self.conv_channels.len() {
weights.push(new_weights[i]);
}
}
let mut sum: f64 = 0.0;
for weight in &weights {
sum += weight.abs();
}
self.channels[channel].weights = weights;
self.channels[channel].weight_sum_reciprocal = 1.0 / sum;
}
fn set_source_channel(&mut self, conv_channel: usize, src_channel: usize) {
self.conv_channels[conv_channel].input_channel = src_channel;
}
fn set_kernel(&mut self, kernel: ndarray::ArrayD<f64>, conv_channel: usize) {
self.conv_channels[conv_channel].kernel = Kernel::from(kernel, &self.shape);
}
fn set_growth(&mut self, f: fn(f64, &[f64]) -> f64, growth_params: Vec<f64>, conv_channel: usize) {
self.conv_channels[conv_channel].growth = f;
self.conv_channels[conv_channel].growth_params = growth_params;
}
fn set_dt(&mut self, new_dt: f64) {
self.dt = new_dt;
}
fn shape(&self) -> &[usize] {
&self.shape
}
fn get_channel_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
&self.channels[channel].field
}
fn get_kernel_as_ref(&self, conv_channel: usize) -> &Kernel {
&self.conv_channels[conv_channel].kernel
}
fn get_channel_as_mut_ref(&mut self, channel: usize) -> &mut ndarray::ArrayD<f64> {
&mut self.channels[channel].field
}
fn get_convoluted_as_ref(&self, conv_channel: usize) -> &ndarray::ArrayD<Complex<f64>> {
&self.convolutions[conv_channel]
}
fn get_grown_as_ref(&self, conv_channel: usize) -> &ndarray::ArrayD<f64> {
&self.conv_channels[conv_channel].field
}
fn get_deltas_as_ref(&self, channel: usize) -> &ndarray::ArrayD<f64> {
&self.deltas[channel]
}
fn dt(&self) -> f64 {
self.dt
}
fn channels(&self) -> usize {
self.channels.len()
}
fn conv_channels(&self) -> usize {
self.conv_channels.len()
}
fn weights(&self, channel: usize) -> &[f64] {
&self.channels[channel].weights
}
}
| rust | MIT | ee7b79b09334928a25dbcc96edc33edd57362724 | 2026-01-04T20:21:17.859970Z | false |
BirdbrainEngineer/lenia_ca | https://github.com/BirdbrainEngineer/lenia_ca/blob/ee7b79b09334928a25dbcc96edc33edd57362724/src/growth_functions.rs | src/growth_functions.rs | //! A collection of often used growth functions.
#![allow(dead_code)]
#![allow(unused_variables)]
/// Standard unimodal "gaussian bump".
///
/// ### Parameters
///
/// * `params[0]` - **mu**: The position of the mean / highest point of the growth function.
///
/// * `params[1]` - **sigma**: Standard deviation of the gaussian bump.
///
/// ### Returns
/// A `f64` in range `[-1.0..1,0]`.
pub fn standard_lenia(num: f64, params: &[f64]) -> f64 {
(2.0 * super::sample_normal(num, params[0], params[1])) - 1.0
}
/// Multimodal "gaussian bumps" growth function.
///
/// While the Lenia paper calls for a unimodal growth function, then strictly speaking, there are no rules!
/// Do whatever you want.
///
/// ### Parameters
///
/// * `params[even index]` - **mu**: The position of the means / the centers of the gaussian bumps.
///
/// * `params[odd index]` - **sigma**: Standard deviations of the gaussian bumps. Each sigma corresponds
/// to the mu defined by the previous `params` index.
pub fn multimodal_normal(num: f64, params: &[f64]) -> f64 {
let mut sum = 0.0;
for i in (0..params.len()).step_by(2) {
sum += super::sample_normal(num, params[i], params[i + 1]);
}
(sum * 2.0) - 1.0
}
/// Standard unimodal "polynomial bump".
///
/// ### Parameters
///
/// `params[0]` - mu
///
/// `params[1]` - sigma
///
/// `params[2]` - alpha
pub fn polynomial(num: f64, params: &[f64]) -> f64 {
let l = (num - params[0]).abs();
let k = params[1] * 3.0;
if l > k { -1.0 }
else {
let a = 1.0 - ((l * l) / (k * k));
let mut out = 1.0;
for _ in 0..(params[2] as usize) {
out *= a;
}
(out * 2.0) - 1.0
}
}
/// Samples from a precalculated distribution.
///
/// The distribution is made of evenly spaced points from
/// `0.0` to `1.0`. In the likely event of the sample falling between 2 points in the distribution,
/// the result will be interpolated linearly between the two points.
///
/// ### Parameters
///
/// * `params[0..n]` - Distribution in range `[0.0..1.0]` to sample from
pub fn precalculated_linear(num: f64, params: &[f64]) -> f64 {
let index = num * params.len() as f64;
if index as usize >= (params.len() - 1) { return params[params.len() - 1] }
if index as usize <= 0 { return params[0] }
let a = params[index.abs().floor() as usize];
let b = params[index.abs().ceil() as usize];
let dx = index - index.floor();
let dy = b - a;
a + (dx * dy)
}
/// Conway's "Game of life" growth function. `Rulestring: B3/S23`
pub fn conway_game_of_life(num: f64, params: &[f64]) -> f64 {
let index = (num * 9.0).round() as usize;
if index == 2 { 0.0 }
else if index == 3 { 1.0 }
else {-1.0 }
}
/// Basic Smooth Life growth function.
///
/// Not faithful to proper SmoothLife, and is not capable of simulating every SmoothLife.
///
/// `params[0]` - Birth range start
///
/// `params[1]` - Birth range end
///
/// `params[2]` - Survive range start
///
/// `params[3]` - Survive range end
pub fn smooth_life(num: f64, params: &[f64]) -> f64 {
if num >= params[0] && num <= params[1] { return 1.0 }
if num >= params[2] && num <= params[3] { return 0.0 }
-1.0
}
/// Smooth Life growth function with smoothed stepping.
///
/// Not faithful to proper SmoothLife and is not capable of simulating every SmoothLife.
///
/// Step width defines the range within which `~99%` of the change between states takes place.
///
/// ### Parameters
///
/// `params[0]` - Birth range start
///
/// `params[1]` - Birth range end
///
/// `params[2]` - Survive range start
///
/// `params[3]` - Survive range end
///
/// `params[4]` - Birth step width
///
/// `params[5]` - Survive step width
pub fn smooth_life_sigmoid_smoothed(num: f64, params: &[f64]) -> f64 {
let birth = (sigmoid(num, params[0], params[4], 2.0) +
sigmoid(num, params[1], -params[4], 2.0)) - 3.0;
let survive = (sigmoid(num, params[2], params[5], 1.0) +
sigmoid(num, params[3], -params[5], 1.0)) - 2.0;
if birth > survive { birth } else { survive }
}
/// Sigmoid function.
fn sigmoid(x: f64, center: f64, sigma: f64, peak: f64) -> f64 {
peak / (1.0 + (-((x - center) * (4.0 / sigma))).exp())
}
/// Pass number on virtually unchanged.
///
/// Returns `num` multiplied by `params[0]`. Use this growth function if you would like to not use a growth function,
/// but merely explore the dynamics of iterative application of kernels.
pub fn pass(num: f64, params: &[f64]) -> f64 {
num * params[0]
} | rust | MIT | ee7b79b09334928a25dbcc96edc33edd57362724 | 2026-01-04T20:21:17.859970Z | false |
BirdbrainEngineer/lenia_ca | https://github.com/BirdbrainEngineer/lenia_ca/blob/ee7b79b09334928a25dbcc96edc33edd57362724/src/kernels.rs | src/kernels.rs | //! Collection of generators for often used kernel shapes.
#![allow(dead_code)]
#![allow(unused_variables)]
use super::*;
use ndarray::IxDyn;
/// Generates a kernel base of a gaussian donut in 2d.
///
/// The mean (position of the highest value) is placed at `0.5`
/// in the range `[0.0..1.0]`, where `0.0` is the center of the kernel and `1.0` the outer edge.
///
/// ### Parameters
///
/// * `radius` - The radius of the kernel. The kernel is guaranteed to be square in shape,
/// but any values outside the radius are set to `0.0`.
///
/// * `stddev` - Standard deviation to use.
pub fn gaussian_donut_2d(radius: usize, stddev: f64) -> ndarray::ArrayD<f64> {
let diameter = radius * 2;
let radius = radius as f64;
let normalizer = 1.0 / radius;
let mut out = ndarray::ArrayD::zeros(IxDyn(&[diameter, diameter]));
let x0 = radius;
let y0 = radius;
for i in 0..out.shape()[0] {
for j in 0..out.shape()[1] {
let x1 = i as f64;
let y1 = j as f64;
let dist = ((x1-x0)*(x1-x0)+(y1-y0)*(y1-y0)).sqrt();
if dist <= radius {
out[[i, j]] = super::sample_normal(dist * normalizer, 0.5, stddev);
}
else {
out[[i, j]] = 0.0
}
}
}
out
}
/// Generates a kernel base of multiple concentric gaussian "donuts" in 2d.
///
/// Each donut/ring is a single index in the list of parameters.
///
/// ### Parameters
///
/// * `radius` - The radius of the kernel. The kernel is guaranteed to be square in shape.
/// but any values outside the radius are set to `0.0`.
///
/// * `means` - The placement of the peak values of individual rings.
/// Should be in range `[0.0..1.0]`, where `0.0` is the center point of the kernel and
/// `1.0` is the outer edge of the circular kernel.
///
/// * `peaks` - The maximum value that each individual ring can create.
/// Can be any positive real number but will later be normalized compared to other rings.
///
/// * `stddevs` - The standard deviations of each individual ring.
pub fn multi_gaussian_donut_2d(radius: usize, means: &[f64], peaks: &[f64], stddevs: &[f64]) -> ndarray::ArrayD<f64> {
if means.len() != peaks.len() || means.len() != stddevs.len() {
panic!("Function \"multi_gaussian_donut_2d\" expects each mean parameter to be accompanied by a peak and stddev parameter!");
}
let diameter = radius * 2;
let radius = radius as f64;
let normalizer = 1.0 / radius;
let mut out = ndarray::ArrayD::zeros(IxDyn(&[diameter, diameter]));
let x0 = radius;
let y0 = radius;
for i in 0..out.shape()[0] {
for j in 0..out.shape()[1] {
let x1 = i as f64;
let y1 = j as f64;
let dist = ((x1-x0)*(x1-x0)+(y1-y0)*(y1-y0)).sqrt();
if dist <= radius {
let mut sum = 0.0;
for i in 0..means.len() {
sum += super::sample_normal(dist * normalizer, means[i], stddevs[i]) * peaks[i].abs();
}
out[[i, j]] = sum;
}
else {
out[[i, j]] = 0.0
}
}
}
out
}
/// Generates a kernel base of a gaussian donut in n-dimensions.
///
/// The mean (position of the highest value) is placed at `0.5`
/// in the range `[0.0..1.0]`, where `0.0` is the center of the kernel and `1.0` the outer edge.
///
/// ### Parameters
///
/// * `radius` - The radius of the kernel in every axis.
/// Any values outside the radius are set to `0.0`.
///
/// * `stddev` - Standard deviation to use.
pub fn gaussian_donut_nd(radius: usize, dimensions: usize, stddev: f64) -> ndarray::ArrayD<f64> {
let radius = radius as f64;
let normalizer = 1.0 / radius;
let center = vec![radius; dimensions];
let mut shape: Vec<usize> = Vec::new();
let mut index: Vec<f64> = Vec::new();
for i in 0..dimensions {
shape.push((radius * 2.0) as usize);
index.push(0.0);
}
let out = ndarray::ArrayD::from_shape_fn(
shape,
|index_info| {
for i in 0..index.len() {
index[i] = index_info[i] as f64;
}
let dist = euclidean_dist(&index, ¢er);
if dist > radius {
0.0
}
else {
sample_normal(dist * normalizer, 0.5, stddev)
}
}
);
out
}
/// Generates a kernel base of multiple radial gaussian "hyper-donuts" in n-dimensions.
///
/// Each donut/ring is a single index in the list of parameters.
///
/// ### Parameters
///
/// * `radius` - The radius of the kernel in each axis.
/// Any values outside the radius are set to `0.0`.
///
/// * `means` - The placement of the peak values of individual donuts
/// Should be in range `[0.0..1.0]`, where `0.0` is the center point of the kernel and
/// `1.0` is the outer surface of the hypersphere.
///
/// * `peaks` - The maximum value that each individual donut can create.
/// Can be any positive real number but will later be normalized compared to other donuts.
///
/// * `stddevs` - The standard deviations of each individual donut.
pub fn multi_gaussian_donut_nd(radius: usize, dimensions: usize, means: &[f64], peaks: &[f64], stddevs: &[f64]) -> ndarray::ArrayD<f64> {
let radius = radius as f64;
let normalizer = 1.0 / radius;
let center = vec![radius; dimensions];
let mut shape: Vec<usize> = Vec::new();
let mut index: Vec<f64> = Vec::new();
for i in 0..dimensions {
shape.push((radius * 2.0) as usize);
index.push(0.0);
}
let out = ndarray::ArrayD::from_shape_fn(
shape,
|index_info| {
for i in 0..index.len() {
index[i] = index_info[i] as f64;
}
let dist = euclidean_dist(&index, ¢er);
if dist > radius {
0.0
}
else {
let mut sum = 0.0;
for i in 0..means.len() {
sum += super::sample_normal(dist * normalizer, means[i], stddevs[i]) * peaks[i].abs();
}
sum
}
}
);
out
}
/// Generates a kernel base of a radially symmetric sampling of precalculated values.
///
/// ### Parameters
///
/// * `radius` - Radius of the kernel field to generate.
///
/// * `dimensions` - dimensionality of the kernel field to generate.
///
/// * `params[0..n]` - Value to set based on the distance from the center of the kernel
/// to the outer edge of the kernel, where `params[0]` is the value at the kernel center
/// and `params[1]` is the value at the edge of the kernel.
pub fn precalculated_linear(radius: usize, dimensions: usize, params: &[f64]) -> ndarray::ArrayD<f64> {
let radius = radius as f64;
let normalizer = 1.0 / radius;
let center = vec![radius; dimensions];
let mut shape: Vec<usize> = Vec::new();
let mut index: Vec<f64> = Vec::new();
for i in 0..dimensions {
shape.push((radius * 2.0) as usize);
index.push(0.0);
}
let out = ndarray::ArrayD::from_shape_fn(
shape,
|index_info| {
for i in 0..index.len() {
index[i] = index_info[i] as f64;
}
let dist = euclidean_dist(&index, ¢er);
if dist > radius {
0.0
}
else {
growth_functions::precalculated_linear(dist * normalizer, params)
}
}
);
out
}
/// Generates a kernel base of "polynomial donuts".
///
/// The peaks of the individual rings are equally spaced around the center of the kernel.
/// Refer to Lenia paper for more context.
///
/// ### Parameters
///
/// * `params[0]` - Polynomial power, usually set to `4.0`;
///
/// * `params[1..n]` - Peak heights of the individual donuts.
pub fn polynomial_nd(radius: usize, dimensions: usize, params: &[f64]) -> ndarray::ArrayD<f64> {
let radius = radius as f64;
let normalizer = 1.0 / radius;
let center = vec![radius; dimensions];
let mut shape: Vec<usize> = Vec::new();
let mut index: Vec<f64> = Vec::new();
for i in 0..dimensions {
shape.push((radius * 2.0) as usize);
index.push(0.0);
}
let out = ndarray::ArrayD::from_shape_fn(
shape,
|index_info| {
for i in 0..index.len() {
index[i] = index_info[i] as f64;
}
let dist = euclidean_dist(&index, ¢er);
if dist > radius {
0.0
}
else {
let dist = dist * normalizer;
if dist == 0.0 { 0.0 }
else {
let peak_index = (dist * (params.len() - 1) as f64).ceil() as usize;
params[peak_index] * c((params.len() - 1) as f64 * dist - (peak_index - 1) as f64, params[0])
}
}
}
);
out
}
/// Refer to Lenia paper or someone more versed in mathematics, I have no clue... I just translated the math into code...
fn c(r: f64, alpha: f64) -> f64 {
let num = 4.0 * r * (1.0 - r);
let mut out = 1.0;
for _ in 0..(alpha as usize) {
out *= num;
}
out
}
/// Moore neighborhood with radius of 1 in 2D.
///
/// This is the kernel to use for Conway's game of life.
pub fn conway_game_of_life() -> ndarray::ArrayD<f64> {
let mut out = ndarray::ArrayD::from_elem(vec![3 as usize, 3], 1.0);
out[[1, 1]] = 0.0;
out
}
/// Generates a kernel base of a "SmoothLife" outer kernel.
///
/// Not completely faithful to SmoothLife.
///
/// ### Parameters
///
/// * `radius` - Radius of the kernel to generate.
///
/// * `dimensions` - Dimensionality of the kernel to generate.
///
/// * `width_ratio` - Controls the width of the neighborhood ring around the center, where `0.0` is empty kernel
/// and `1.0` is a completely filled in disk. Use `0.5` for default SmoothLife.
pub fn smoothlife(radius: usize, dimensions: usize, width_ratio: f64) -> ndarray::ArrayD<f64> {
let width_ratio = width_ratio.clamp(0.0, 1.0);
let center = vec![radius as f64; dimensions];
let shape = vec![radius * 2; dimensions];
let out = ndarray::ArrayD::from_shape_fn(shape, |index_info| {
let mut index = Vec::with_capacity(dimensions);
for i in 0..dimensions {
index.push(index_info[i] as f64);
}
let mut dist = euclidean_dist(&index, ¢er);
dist /= radius as f64;
if dist > (1.0 - ((1.0 - width_ratio) * 0.5)) { 0.0 }
else if dist < ((1.0 - width_ratio) * 0.5) { 0.0 }
else { 1.0 }
});
out
}
/// Generates a kernel base of a single pixel with n-dimensions.
pub fn pass(dimensions: usize) -> ndarray::ArrayD<f64> {
let unit_shape: Vec<usize> = vec![1; dimensions];
ndarray::ArrayD::<f64>::from_shape_fn(unit_shape, |a| { 1.0 })
}
| rust | MIT | ee7b79b09334928a25dbcc96edc33edd57362724 | 2026-01-04T20:21:17.859970Z | false |
BirdbrainEngineer/lenia_ca | https://github.com/BirdbrainEngineer/lenia_ca/blob/ee7b79b09334928a25dbcc96edc33edd57362724/src/fft.rs | src/fft.rs | //! Contains the required functionality for performing n-dimensional fast-fourier-transforms.
#![allow(dead_code)]
#![allow(unused_variables)]
use std::{fmt, sync::Arc};
use rustfft::{Fft, FftPlanner, FftDirection};
use rustfft::num_complex::Complex;
use rayon::prelude::*;
/// Holds all the relevant data for a pre-planned FFT, which is to say, once initialized,
/// it can perform efficient FFT-s on data of the initially specified length.
#[derive(Clone)]
pub struct PlannedFFT {
fft: Arc<dyn Fft<f64>>,
scratch_space: Vec<Complex<f64>>,
}
impl fmt::Debug for PlannedFFT {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("PreplannedFFT")
.field("scratch_space", &format!("Vec<Complex<f64>>, len: {}", self.scratch_space.len()))
.field("fft", &format!("Arc<dyn rustfft::Fft<f64>> => len: {}, direction: {}", self.fft.len(), self.fft.fft_direction()))
.finish()
}
}
impl PlannedFFT {
pub fn new(length: usize, inverse: bool) -> Self {
if length == 0 { panic!("PlannedFFT::new() - Provided length was 0. Length must be at least 1!"); }
let mut planner = FftPlanner::new();
let direction: FftDirection;
match inverse {
true => { direction = FftDirection::Inverse; }
false => { direction = FftDirection::Forward; }
}
let fft = planner.plan_fft(
length,
direction,
);
let scratch_space: Vec<Complex<f64>> = Vec::from_iter(std::iter::repeat(Complex::new(0.0, 0.0)).take(fft.get_inplace_scratch_len()));
PlannedFFT {
fft: fft,
scratch_space: scratch_space,
}
}
pub fn inverse(&self) -> bool {
match self.fft.fft_direction() {
FftDirection::Forward => { return false; }
FftDirection::Inverse => { return true; }
}
}
pub fn length(&self) -> usize {
self.fft.len()
}
pub fn transform(&mut self, data: &mut [Complex<f64>]) {
self.fft.process_with_scratch(data, &mut self.scratch_space);
if self.inverse() { // I fekin' forgot this AGAIN...
let inverse_len = 1.0 / data.len() as f64;
for v in data.iter_mut() {
v.re *= inverse_len;
v.im *= inverse_len;
}
}
}
}
/// Holds all the relevant data for a pre-planned N-dimensional fast-fourier-transform. Operates only
/// on data of the initially specified length.
#[derive(Debug)]
pub struct PlannedFFTND {
shape: Vec<usize>,
fft_instances: Vec<PlannedFFT>,
inverse: bool
}
impl PlannedFFTND {
pub fn new(shape: &[usize], inverse: bool) -> Self {
if shape.is_empty() { panic!("PlannedFFTND::new() - Provided shape was empty! Needs at least 1 dimension!"); }
let mut ffts: Vec<PlannedFFT> = Vec::with_capacity(shape.len());
for dim in shape {
ffts.push(PlannedFFT::new(*dim, inverse));
}
PlannedFFTND {
shape: shape.to_vec(),
fft_instances: ffts,
inverse: inverse,
}
}
pub fn shape(&self) -> &[usize] {
&self.shape
}
pub fn inverse(&self) -> bool {
self.inverse
}
pub fn transform(&mut self, data: &mut ndarray::ArrayD<Complex<f64>>) {
if data.shape() != self.shape { panic!("PlannedFFTND::transform() - shape of the data to be transformed does not agree with the shape that the fft can work on!"); }
let mut axis_iterator: Vec<usize> = Vec::with_capacity(self.shape.len());
if self.inverse() {
for i in (0..self.shape.len()).rev() {
axis_iterator.push(i);
}
}
else {
for i in 0..self.shape.len() {
axis_iterator.push(i);
}
}
for axis in axis_iterator {
for mut lane in data.lanes_mut(ndarray::Axis(axis)) {
let mut buf = lane.to_vec();
self.fft_instances[axis].transform(&mut buf);
for i in 0..lane.len() {
lane[i] = buf[i];
}
}
}
}
}
/// Parallel version (multithreaded) of the PlannedFFTND.
#[derive(Debug)]
pub struct ParPlannedFFTND {
shape: Vec<usize>,
fft_instances: Vec<PlannedFFT>,
inverse: bool
}
impl ParPlannedFFTND {
pub fn new(shape: &[usize], inverse: bool) -> Self {
if shape.is_empty() { panic!("ParPlannedFFTND::new() - Provided shape was empty! Needs at least 1 dimension!"); }
let mut ffts: Vec<PlannedFFT> = Vec::with_capacity(shape.len());
for dim in shape {
ffts.push(PlannedFFT::new(*dim, inverse));
}
ParPlannedFFTND {
shape: shape.to_vec(),
fft_instances: ffts,
inverse: inverse,
}
}
pub fn shape(&self) -> &[usize] {
&self.shape
}
pub fn inverse(&self) -> bool {
self.inverse
}
pub fn transform(&mut self, data: &mut ndarray::ArrayD<Complex<f64>>) {
if data.shape() != self.shape { panic!("ParPlannedFFTND::transform() - shape of the data to be transformed does not agree with the shape that the fft can work on!"); }
let mut axis_iterator: Vec<usize> = Vec::with_capacity(self.shape.len());
if self.inverse() {
for i in (0..self.shape.len()).rev() {
axis_iterator.push(i);
}
}
else {
for i in 0..self.shape.len() {
axis_iterator.push(i);
}
}
for axis in axis_iterator {
let mut data_lane = data.lanes_mut(ndarray::Axis(axis));
let mut fft_lane = &mut self.fft_instances[axis];
ndarray::Zip::from(data_lane)
.into_par_iter()
.for_each_with(self.fft_instances[axis].clone(), |mut fft, mut row| {
let mut lane = row.0.to_vec();
fft.transform(&mut lane);
for i in 0..row.0.len() {
row.0[i] = lane[i];
}
});
}
}
} | rust | MIT | ee7b79b09334928a25dbcc96edc33edd57362724 | 2026-01-04T20:21:17.859970Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/variable.rs | dtl-lexer/src/variable.rs | use miette::{Diagnostic, SourceSpan};
use num_bigint::BigInt;
use thiserror::Error;
use unicode_xid::UnicodeXID;
use crate::common::{
LexerError, NextChar, check_variable_attrs, lex_numeric, lex_text, lex_translated,
lex_variable_argument, trim_variable,
};
use crate::types::{At, TemplateString};
use crate::{END_TRANSLATE_LEN, QUOTE_LEN, START_TRANSLATE_LEN, TemplateContent};
#[derive(Debug, PartialEq, Eq)]
pub enum Argument {
Numeric(At),
Text(At),
TranslatedText(At),
Variable(At),
}
impl Argument {
pub fn content_at(&self) -> At {
match self {
Self::Variable(at) => *at,
Self::Numeric(at) => *at,
Self::Text(at) => {
let (start, len) = at;
let start = start + QUOTE_LEN;
let len = len - 2 * QUOTE_LEN;
(start, len)
}
Self::TranslatedText(at) => {
let (start, len) = at;
let start = start + START_TRANSLATE_LEN + QUOTE_LEN;
let len = len - START_TRANSLATE_LEN - END_TRANSLATE_LEN - 2 * QUOTE_LEN;
(start, len)
}
}
}
}
impl<'t> TemplateContent<'t> for Argument {
fn content(&self, template: TemplateString<'t>) -> &'t str {
let (start, len) = self.content_at();
&template.0[start..start + len]
}
}
#[derive(Debug, PartialEq, Eq)]
pub struct FilterToken {
pub at: At,
pub argument: Option<Argument>,
}
impl<'t> TemplateContent<'t> for FilterToken {
fn content(&self, template: TemplateString<'t>) -> &'t str {
let (start, len) = self.at;
&template.0[start..start + len]
}
}
#[derive(Debug, PartialEq)]
pub enum VariableToken {
Variable,
Int(BigInt),
Float(f64),
}
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum VariableLexerError {
#[error("Variables and attributes may not begin with underscores")]
LeadingUnderscore {
#[label("here")]
at: SourceSpan,
},
#[error(transparent)]
#[diagnostic(transparent)]
LexerError(#[from] LexerError),
#[error("Expected a valid filter name")]
InvalidFilterName {
#[label("here")]
at: SourceSpan,
},
#[error("Expected a valid variable name")]
InvalidVariableName {
#[label("here")]
at: SourceSpan,
},
}
pub fn lex_variable_or_filter(
variable: &str,
start: usize,
) -> Result<Option<(VariableToken, At, FilterLexer<'_>)>, VariableLexerError> {
let rest = variable.trim_start();
if rest.trim().is_empty() {
return Ok(None);
}
let start = start + variable.len() - rest.len();
let content = trim_variable(rest);
if content.is_empty() {
let at = (start, rest.trim().len());
return Err(VariableLexerError::InvalidVariableName { at: at.into() });
}
let end = content.len();
let at = (start, end);
let token_type;
if let Ok(num) = content.parse::<BigInt>() {
token_type = VariableToken::Int(num);
} else if let Ok(num) = content.parse::<f64>()
&& num.is_finite()
{
token_type = VariableToken::Float(num);
} else {
check_variable_attrs(content, start)?;
token_type = VariableToken::Variable;
}
Ok(Some((
token_type,
at,
FilterLexer::new(&rest[end..], start + end)?,
)))
}
#[derive(Debug)]
pub struct FilterLexer<'t> {
rest: &'t str,
byte: usize,
}
impl<'t> FilterLexer<'t> {
fn new(variable: &'t str, start: usize) -> Result<Self, LexerError> {
let Some(offset) = variable.find('|') else {
return Ok(Self {
rest: "",
byte: start + variable.len(),
});
};
let offset = offset + 1;
let variable = &variable[offset..];
let rest = variable.trim_start();
if rest.is_empty() {
Err(LexerError::InvalidRemainder {
at: (start, 1).into(),
})
} else {
Ok(Self {
rest: rest.trim_end(),
byte: start + offset + variable.len() - rest.len(),
})
}
}
fn lex_text(
&mut self,
chars: &mut std::str::Chars,
end: char,
) -> Result<Argument, VariableLexerError> {
match lex_text(self.byte, self.rest, chars, end) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(Argument::Text(at))
}
Err(e) => {
self.rest = "";
Err(e.into())
}
}
}
fn lex_translated(
&mut self,
chars: &mut std::str::Chars,
) -> Result<Argument, VariableLexerError> {
match lex_translated(self.byte, self.rest, chars) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(Argument::TranslatedText(at))
}
Err(e) => {
self.rest = "";
Err(e.into())
}
}
}
fn lex_numeric(&mut self) -> Argument {
let (at, byte, rest) = lex_numeric(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
Argument::Numeric(at)
}
fn lex_variable_argument(&mut self) -> Result<Argument, VariableLexerError> {
match lex_variable_argument(self.byte, self.rest) {
Ok((at, byte, rest)) => {
self.byte = byte;
self.rest = rest;
Ok(Argument::Variable(at))
}
Err(e) => {
self.rest = "";
Err(e.into())
}
}
}
fn lex_filter(&mut self) -> Result<FilterToken, VariableLexerError> {
let filter = self.rest.trim_start();
let start = self.rest.len() - filter.len();
self.byte += start;
self.rest = &self.rest[start..];
let end = filter
.find(|c: char| !c.is_xid_continue())
.unwrap_or(filter.len());
let filter = &filter[..end];
match filter.chars().next() {
Some(c) if c.is_xid_start() => {
let at = (self.byte, end);
self.byte += end;
self.rest = &self.rest[end..];
let (remainder, _start_next) = self.remainder_to_filter_or_argument();
match remainder {
"" => {
let argument = self.lex_argument()?;
Ok(FilterToken { at, argument })
}
_ => {
let at = (self.byte, remainder.trim().len());
self.rest = "";
Err(LexerError::InvalidRemainder { at: at.into() }.into())
}
}
}
_ => {
let next = self.rest.find("|").unwrap_or(self.rest.len());
let at = (self.byte, next);
self.rest = "";
Err(VariableLexerError::InvalidFilterName { at: at.into() })
}
}
}
fn lex_argument(&mut self) -> Result<Option<Argument>, VariableLexerError> {
let Some(a) = self.rest.find(":") else {
return Ok(None);
};
let next = match self.rest.find("|") {
Some(f) if f < a => return Ok(None),
_ => a + 1,
};
self.rest = &self.rest[next..];
self.byte += next;
let mut chars = self.rest.chars();
Ok(Some(match chars.next().unwrap() {
'_' => {
if let Some('(') = chars.next() {
self.lex_translated(&mut chars)?
} else {
let end = self.rest.next_whitespace();
let at = (self.byte, end);
self.byte += self.rest.len();
self.rest = "";
return Err(VariableLexerError::LeadingUnderscore { at: at.into() });
}
}
'\'' => self.lex_text(&mut chars, '\'')?,
'"' => self.lex_text(&mut chars, '"')?,
'0'..='9' | '-' => self.lex_numeric(),
_ => self.lex_variable_argument()?,
}))
}
fn lex_remainder(
&mut self,
token: FilterToken,
remainder: &'t str,
start_next: usize,
) -> Result<FilterToken, VariableLexerError> {
match remainder.find(|c: char| !c.is_whitespace()) {
None => {
self.rest = &self.rest[start_next..];
self.byte += start_next;
Ok(token)
}
Some(n) => {
let at = (self.byte + n, remainder.trim().len());
self.rest = "";
Err(LexerError::InvalidRemainder { at: at.into() }.into())
}
}
}
fn remainder_to_filter_or_argument(&mut self) -> (&'t str, usize) {
match (self.rest.find("|"), self.rest.find(":")) {
(None, None) => (self.rest, self.rest.len()),
(None, Some(a)) => (&self.rest[..a], a + 1),
(Some(f), Some(a)) if a < f => (&self.rest[..a], a + 1),
(Some(f), _) => (&self.rest[..f], f + 1),
}
}
}
impl Iterator for FilterLexer<'_> {
type Item = Result<FilterToken, VariableLexerError>;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
let token = match self.lex_filter() {
Err(e) => return Some(Err(e)),
Ok(token) => token,
};
let (remainder, start_next) = self.remainder_to_filter_or_argument();
Some(self.lex_remainder(token, remainder, start_next))
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::IntoTemplateString;
use crate::{END_TAG_LEN, START_TAG_LEN};
fn contents(
template: &str,
tokens: Vec<Result<FilterToken, VariableLexerError>>,
) -> Vec<(&str, Option<&str>)> {
let template = template.into_template_string();
tokens
.iter()
.map(|t| match t {
Ok(t) => match t.argument {
Some(ref a) => (t.content(template), Some(a.content(template))),
None => (t.content(template), None),
},
Err(_) => unreachable!(),
})
.collect()
}
fn trim_variable(template: &str) -> &str {
&template[START_TAG_LEN..(template.len() - END_TAG_LEN)]
}
#[test]
fn test_lex_empty() {
let variable = " ";
assert!(
lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.is_none()
);
}
#[test]
fn test_lex_variable() {
let template = "{{ foo.bar }}";
let variable = trim_variable(template);
let (token, at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
assert_eq!(token, VariableToken::Variable);
assert_eq!(at, (3, 7));
assert_eq!(TemplateString(template).content(at), "foo.bar");
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![]);
}
#[test]
fn test_lex_variable_index() {
let template = "{{ 1 }}";
let variable = trim_variable(template);
let (token, at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
assert_eq!(token, VariableToken::Int(1.into()));
assert_eq!(TemplateString(template).content(at), "1");
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![]);
}
#[test]
fn test_lex_variable_negative_index() {
let template = "{{ -1 }}";
let variable = trim_variable(template);
let (token, at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
assert_eq!(token, VariableToken::Int((-1).into()));
assert_eq!(TemplateString(template).content(at), "-1");
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![]);
}
#[test]
fn test_lex_variable_start_underscore() {
let variable = " _foo.bar ";
let err = lex_variable_or_filter(variable, START_TAG_LEN).unwrap_err();
assert_eq!(
err,
LexerError::InvalidVariableName { at: (3, 4).into() }.into()
);
}
#[test]
fn test_lex_attribute_start_underscore() {
let variable = " foo._bar ";
let err = lex_variable_or_filter(variable, START_TAG_LEN).unwrap_err();
assert_eq!(
err,
LexerError::InvalidVariableName { at: (7, 4).into() }.into()
);
}
#[test]
fn test_lex_attribute_index() {
let template = "{{ foo.1 }}";
let variable = trim_variable(template);
let (token, at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
assert_eq!(token, VariableToken::Variable);
assert_eq!(at, (3, 5));
assert_eq!(TemplateString(template).content(at), "foo.1");
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![]);
}
#[test]
fn test_lex_attribute_negative_index() {
let template = "{{ foo.-1 }}";
let variable = trim_variable(template);
let err = lex_variable_or_filter(variable, START_TAG_LEN).unwrap_err();
assert_eq!(
err,
LexerError::InvalidVariableName { at: (7, 2).into() }.into()
);
}
#[test]
fn test_lex_filter() {
let template = "{{ foo.bar|title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
at: (11, 5),
argument: None,
})]
);
assert_eq!(contents(template, tokens), vec![("title", None)]);
}
#[test]
fn test_lex_filter_empty() {
let template = "{{ foo.bar| }}";
let variable = trim_variable(template);
let err = lex_variable_or_filter(variable, START_TAG_LEN).unwrap_err();
assert_eq!(
err,
LexerError::InvalidRemainder { at: (10, 1).into() }.into()
);
}
#[test]
fn test_lex_filter_chain() {
let template = "{{ foo.bar|title|length }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Ok(FilterToken {
argument: None,
at: (11, 5),
}),
Ok(FilterToken {
argument: None,
at: (17, 6),
}),
]
);
assert_eq!(
contents(template, tokens),
vec![("title", None), ("length", None)]
);
}
#[test]
fn test_lex_filter_remainder() {
let template = "{{ foo.bar|title'foo' }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidRemainder { at: (16, 5).into() }.into()
)]
);
}
#[test]
fn test_lex_filter_invalid_start() {
let template = "{{ foo.bar|'foo' }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(VariableLexerError::InvalidFilterName {
at: (11, 5).into()
})]
);
}
#[test]
fn test_lex_text_argument_single_quote() {
let template = "{{ foo.bar|default:'foo' }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Text((19, 5))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("foo"))]);
}
#[test]
fn test_lex_text_argument_double_quote() {
let template = "{{ foo.bar|default:\"foo\" }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Text((19, 5))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("foo"))]);
}
#[test]
fn test_lex_text_argument_escaped() {
let template = "{{ foo.bar|default:'foo\\\'' }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Text((19, 7))),
at: (11, 7),
})]
);
assert_eq!(
contents(template, tokens),
vec![("default", Some("foo\\\'"))]
);
}
#[test]
fn test_lex_translated_text_argument() {
let template = "{{ foo.bar|default:_('foo') }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::TranslatedText((19, 8))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("foo"))]);
}
#[test]
fn test_lex_translated_text_argument_double_quoted() {
let template = "{{ foo.bar|default:_(\"foo\") }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::TranslatedText((19, 8))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("foo"))]);
}
#[test]
fn test_lex_numeric_argument() {
let template = "{{ foo.bar|default:500 }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Numeric((19, 3))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("500"))]);
}
#[test]
fn test_lex_numeric_argument_negative() {
let template = "{{ foo.bar|default:-0.5 }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Numeric((19, 4))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("-0.5"))]);
}
#[test]
fn test_lex_numeric_argument_scientific() {
let template = "{{ foo.bar|default:5.2e3 }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Numeric((19, 5))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("5.2e3"))]);
}
#[test]
fn test_lex_numeric_argument_scientific_negative_exponent() {
// Django mishandles this case, so we do too:
// https://code.djangoproject.com/ticket/35816
let template = "{{ foo.bar|default:5.2e-3 }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Err(LexerError::InvalidRemainder { at: (23, 2).into() }.into()),
/* When fixed we can do:
Ok(FilterToken {
argument: Some(Argument::Numeric((19, 6))),
at: (11, 7),
})
*/
]
);
//assert_eq!(contents(template, tokens), vec![("default", Some("5.2e-3"))]);
}
#[test]
fn test_lex_variable_argument() {
let template = "{{ foo.bar|default:spam }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Ok(FilterToken {
argument: Some(Argument::Variable((19, 4))),
at: (11, 7),
})]
);
assert_eq!(contents(template, tokens), vec![("default", Some("spam"))]);
}
#[test]
fn test_lex_variable_argument_then_filter() {
let template = "{{ foo.bar|default:spam|title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Ok(FilterToken {
argument: Some(Argument::Variable((19, 4))),
at: (11, 7),
}),
Ok(FilterToken {
argument: None,
at: (24, 5),
}),
]
);
assert_eq!(
contents(template, tokens),
vec![("default", Some("spam")), ("title", None)]
);
}
#[test]
fn test_lex_string_argument_then_filter() {
let template = "{{ foo.bar|default:\"spam\"|title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Ok(FilterToken {
argument: Some(Argument::Text((19, 6))),
at: (11, 7),
}),
Ok(FilterToken {
argument: None,
at: (26, 5),
}),
]
);
assert_eq!(
contents(template, tokens),
vec![("default", Some("spam")), ("title", None)]
);
}
#[test]
fn test_lex_argument_with_leading_underscore() {
let template = "{{ foo.bar|default:_spam }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(VariableLexerError::LeadingUnderscore {
at: (19, 5).into()
})]
);
}
#[test]
fn test_lex_argument_with_attribute_underscore() {
let template = "{{ foo.bar|default:spam._eggs }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidVariableName { at: (24, 5).into() }.into()
)]
);
}
#[test]
fn test_lex_argument_with_only_underscore() {
let template = "{{ foo.bar|default:_ }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(VariableLexerError::LeadingUnderscore {
at: (19, 1).into()
})]
);
}
#[test]
fn test_lex_text_argument_incomplete() {
let template = "{{ foo.bar|default:'foo }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::IncompleteString { at: (19, 4).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_translated_text_argument_incomplete() {
let template = "{{ foo.bar|default:_('foo' }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::IncompleteTranslatedString { at: (19, 7).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_translated_text_argument_incomplete_string() {
let template = "{{ foo.bar|default:_('foo }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::IncompleteString { at: (21, 4).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_translated_text_argument_incomplete_string_double_quotes() {
let template = "{{ foo.bar|default:_(\"foo }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::IncompleteString { at: (21, 4).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_translated_text_argument_missing_string() {
let template = "{{ foo.bar|default:_( }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::MissingTranslatedString { at: (19, 2).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_translated_text_argument_missing_string_trailing_chars() {
let template = "{{ foo.bar|default:_(foo) }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
let error = LexerError::MissingTranslatedString { at: (19, 6).into() };
assert_eq!(tokens, vec![Err(error.into())]);
}
#[test]
fn test_lex_filter_remainder_before_argument() {
let template = "{{ foo.bar|default'spam':title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidRemainder { at: (18, 6).into() }.into()
)]
);
}
#[test]
fn test_lex_filter_remainder_before_filter() {
let template = "{{ foo.bar|title'spam'|title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidRemainder { at: (16, 6).into() }.into()
)]
);
}
#[test]
fn test_lex_string_argument_remainder() {
let template = "{{ foo.bar|default:\"spam\"title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidRemainder { at: (25, 5).into() }.into()
)]
);
}
#[test]
fn test_lex_string_argument_remainder_before_filter() {
let template = "{{ foo.bar|default:\"spam\"title|title }}";
let variable = trim_variable(template);
let (_token, _at, lexer) = lex_variable_or_filter(variable, START_TAG_LEN)
.unwrap()
.unwrap();
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![Err(
LexerError::InvalidRemainder { at: (25, 5).into() }.into()
)]
);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/lib.rs | dtl-lexer/src/lib.rs | use crate::types::TemplateString;
pub mod common;
pub mod core;
pub mod tag;
pub mod types;
pub mod variable;
pub const START_TAG_LEN: usize = 2;
const END_TAG_LEN: usize = 2;
const START_TRANSLATE_LEN: usize = 2;
const END_TRANSLATE_LEN: usize = 1;
const QUOTE_LEN: usize = 1;
pub trait TemplateContent<'t> {
fn content(&self, template: TemplateString<'t>) -> &'t str;
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/core.rs | dtl-lexer/src/core.rs | use crate::types::{At, TemplateString};
use crate::{END_TAG_LEN, START_TAG_LEN};
enum EndTag {
Variable,
Tag,
Comment,
}
#[derive(Debug, PartialEq, Eq)]
pub enum TokenType {
Text,
Variable,
Tag,
Comment,
}
#[derive(Debug, PartialEq, Eq)]
pub struct Token {
pub token_type: TokenType,
pub at: At,
}
impl Token {
fn text(at: At) -> Self {
Self {
at,
token_type: TokenType::Text,
}
}
fn variable(at: At) -> Self {
Self {
at,
token_type: TokenType::Variable,
}
}
fn tag(at: At) -> Self {
Self {
at,
token_type: TokenType::Tag,
}
}
fn comment(at: At) -> Self {
Self {
at,
token_type: TokenType::Comment,
}
}
}
impl<'t> Token {
pub fn content(&self, template: TemplateString<'t>) -> &'t str {
let (start, len) = self.at;
let start = start + START_TAG_LEN;
let len = len - START_TAG_LEN - END_TAG_LEN;
let at = match self.token_type {
TokenType::Text => self.at,
TokenType::Variable => (start, len),
TokenType::Tag => (start, len),
TokenType::Comment => (start, len),
};
template.content(at)
}
}
pub struct Lexer<'t> {
template: TemplateString<'t>,
rest: &'t str,
byte: usize,
verbatim: Option<&'t str>,
}
impl<'t> Lexer<'t> {
pub fn new(template: TemplateString<'t>) -> Self {
Self {
template,
rest: template.0,
byte: 0,
verbatim: None,
}
}
fn lex_text(&mut self) -> Token {
let next_tag = self.rest.find("{%");
let next_variable = self.rest.find("{{");
let next_comment = self.rest.find("{#");
let next = [next_tag, next_variable, next_comment]
.iter()
.filter_map(|n| *n)
.min();
let len = match next {
None => {
let len = self.rest.len();
self.rest = "";
len
}
Some(n) => {
self.rest = &self.rest[n..];
n
}
};
let at = (self.byte, len);
self.byte += len;
Token::text(at)
}
fn lex_text_to_end(&mut self) -> Token {
let len = self.rest.len();
let at = (self.byte, len);
self.byte += len;
self.rest = "";
Token::text(at)
}
fn lex_tag(&mut self, end_tag: EndTag) -> Token {
let end_str = match end_tag {
EndTag::Variable => "}}",
EndTag::Tag => "%}",
EndTag::Comment => "#}",
};
let Some(n) = self.rest.find(end_str) else {
let len = self.rest.len();
let at = (self.byte, len);
self.byte += len;
self.rest = "";
return Token::text(at);
};
// This can be removed if https://code.djangoproject.com/ticket/35899 lands
match self.rest.find("\n") {
Some(newline) if newline < n => {
let at = (self.byte, newline + 1);
self.byte += newline + 1;
self.rest = &self.rest[newline + 1..];
return Token::text(at);
}
_ => {}
}
let len = n + end_str.len();
self.rest = &self.rest[len..];
let at = (self.byte, len);
self.byte += len;
match end_tag {
EndTag::Variable => Token::variable(at),
EndTag::Tag => Token::tag(at),
EndTag::Comment => Token::comment(at),
}
}
fn lex_verbatim(&mut self, verbatim: &'t str) -> Token {
let verbatim = verbatim.trim();
self.verbatim = None;
let mut rest = self.rest;
let mut index = 0;
loop {
let Some(start_tag) = rest.find("{%") else {
return self.lex_text_to_end();
};
rest = &rest[start_tag..];
let Some(end_tag) = rest.find("%}") else {
return self.lex_text_to_end();
};
let inner = &rest[2..end_tag].trim();
// Check we have the right endverbatim tag
if inner.len() < 3 || &inner[3..] != verbatim {
rest = &rest[end_tag + 2..];
index += start_tag + end_tag + 2;
continue;
}
index += start_tag;
if index == 0 {
// Return the endverbatim tag since we have no text
let tag_len = end_tag + "%}".len();
let at = (self.byte, tag_len);
self.byte += tag_len;
self.rest = &self.rest[tag_len..];
return Token::tag(at);
} else {
self.rest = &self.rest[index..];
let at = (self.byte, index);
self.byte += index;
return Token::text(at);
}
}
}
}
impl Iterator for Lexer<'_> {
type Item = Token;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
Some(match self.verbatim {
None => match self.rest.get(..START_TAG_LEN) {
Some("{{") => self.lex_tag(EndTag::Variable),
Some("{%") => {
let tag = self.lex_tag(EndTag::Tag);
if let Token {
token_type: TokenType::Tag,
..
} = tag
{
let verbatim = tag.content(self.template).trim();
if verbatim == "verbatim" || verbatim.starts_with("verbatim ") {
self.verbatim = Some(verbatim)
}
}
tag
}
Some("{#") => self.lex_tag(EndTag::Comment),
_ => self.lex_text(),
},
Some(verbatim) => self.lex_verbatim(verbatim),
})
}
}
#[cfg(test)]
mod tests {
use super::*;
fn contents<'t>(template: impl Into<TemplateString<'t>>, tokens: Vec<Token>) -> Vec<&'t str> {
let template = template.into();
tokens.iter().map(|t| t.content(template)).collect()
}
#[test]
fn test_lex_empty() {
let template = "";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![]);
}
#[test]
fn test_lex_text() {
let template = "Just some text";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::text((0, 14))]);
assert_eq!(contents(template, tokens), vec![template]);
}
#[test]
fn test_lex_text_whitespace() {
let template = " ";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::text((0, 4))]);
assert_eq!(contents(template, tokens), vec![template]);
}
#[test]
fn test_lex_comment() {
let template = "{# comment #}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::comment((0, 13))]);
assert_eq!(contents(template, tokens), vec![" comment "]);
}
#[test]
fn test_lex_variable() {
let template = "{{ foo.bar|title }}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::variable((0, 19))]);
assert_eq!(contents(template, tokens), vec![" foo.bar|title "]);
}
#[test]
fn test_lex_tag() {
let template = "{% for foo in bar %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::tag((0, 20))]);
assert_eq!(contents(template, tokens), vec![" for foo in bar "]);
}
#[test]
fn test_lex_incomplete_comment() {
let template = "{# comment #";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::text((0, 12))]);
assert_eq!(contents(template, tokens), vec![template]);
}
#[test]
fn test_lex_incomplete_variable() {
let template = "{{ foo.bar|title }";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::text((0, 18))]);
assert_eq!(contents(template, tokens), vec![template]);
}
#[test]
fn test_lex_incomplete_tag() {
let template = "{% for foo in bar %";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::text((0, 19))]);
assert_eq!(contents(template, tokens), vec![template]);
}
#[test]
fn test_django_example() {
let template = "text\n{% if test %}{{ varvalue }}{% endif %}{#comment {{not a var}} {%not a block%} #}end text";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::text((0, 5)),
Token::tag((5, 13)),
Token::variable((18, 14)),
Token::tag((32, 11)),
Token::comment((43, 42)),
Token::text((85, 8)),
]
);
assert_eq!(
contents(template, tokens),
vec![
"text\n",
" if test ",
" varvalue ",
" endif ",
"comment {{not a var}} {%not a block%} ",
"end text",
]
);
}
#[test]
fn test_verbatim_with_variable() {
let template = "{% verbatim %}{{bare }}{% endverbatim %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 14)),
Token::text((14, 11)),
Token::tag((25, 17)),
]
);
assert_eq!(
contents(template, tokens),
vec![" verbatim ", "{{bare }}", " endverbatim "]
);
}
#[test]
fn test_verbatim_with_tag() {
let template = "{% verbatim %}{% endif %}{% endverbatim %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 14)),
Token::text((14, 11)),
Token::tag((25, 17)),
]
);
assert_eq!(
contents(template, tokens),
vec![" verbatim ", "{% endif %}", " endverbatim "]
);
}
#[test]
fn test_verbatim_with_verbatim_tag() {
let template = "{% verbatim %}It's the {% verbatim %} tag{% endverbatim %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 14)),
Token::text((14, 27)),
Token::tag((41, 17)),
]
);
assert_eq!(
contents(template, tokens),
vec![" verbatim ", "It's the {% verbatim %} tag", " endverbatim "]
);
}
#[test]
fn test_verbatim_nested() {
let template = "{% verbatim %}{% verbatim %}{% endverbatim %}{% endverbatim %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 14)),
Token::text((14, 14)),
Token::tag((28, 17)),
Token::tag((45, 17)),
]
);
assert_eq!(
contents(template, tokens),
vec![
" verbatim ",
"{% verbatim %}",
" endverbatim ",
" endverbatim ",
]
);
}
#[test]
fn test_verbatim_adjacent() {
let template = "{% verbatim %}{% endverbatim %}{% verbatim %}{% endverbatim %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 14)),
Token::tag((14, 17)),
Token::tag((31, 14)),
Token::tag((45, 17)),
]
);
assert_eq!(
contents(template, tokens),
vec![" verbatim ", " endverbatim ", " verbatim ", " endverbatim "]
);
}
#[test]
fn test_verbatim_special() {
let template =
"{% verbatim special %}Don't {% endverbatim %} just yet{% endverbatim special %}";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(
tokens,
vec![
Token::tag((0, 22)),
Token::text((22, 32)),
Token::tag((54, 25)),
]
);
assert_eq!(
contents(template, tokens),
vec![
" verbatim special ",
"Don't {% endverbatim %} just yet",
" endverbatim special ",
]
);
}
#[test]
fn test_verbatim_open_tag() {
let template = "{% verbatim %}Don't {% ";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::tag((0, 14)), Token::text((14, 9))]);
assert_eq!(contents(template, tokens), vec![" verbatim ", "Don't {% "]);
}
#[test]
fn test_verbatim_no_tag() {
let template = "{% verbatim %}Don't end verbatim";
let lexer = Lexer::new(template.into());
let tokens: Vec<_> = lexer.collect();
assert_eq!(tokens, vec![Token::tag((0, 14)), Token::text((14, 18))]);
assert_eq!(
contents(template, tokens),
vec![" verbatim ", "Don't end verbatim"]
);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/types.rs | dtl-lexer/src/types.rs | use crate::TemplateContent;
pub type At = (usize, usize);
#[derive(Clone, Copy)]
pub struct TemplateString<'t>(pub &'t str);
impl<'t> TemplateString<'t> {
pub fn content(&self, at: At) -> &'t str {
let (start, len) = at;
&self.0[start..start + len]
}
}
impl<'t> From<&'t str> for TemplateString<'t> {
fn from(value: &'t str) -> Self {
TemplateString(value)
}
}
pub trait IntoTemplateString<'t> {
fn into_template_string(self) -> TemplateString<'t>;
}
impl<'t> IntoTemplateString<'t> for &'t str {
fn into_template_string(self) -> TemplateString<'t> {
TemplateString(self)
}
}
pub struct PartsIterator<'t> {
variable: &'t str,
start: usize,
}
impl<'t> Iterator for PartsIterator<'t> {
type Item = (&'t str, At);
fn next(&mut self) -> Option<Self::Item> {
if self.variable.is_empty() {
return None;
}
match self.variable.find('.') {
Some(index) => {
let part = &self.variable[..index];
let at = (self.start, index);
self.start += index + 1;
self.variable = &self.variable[index + 1..];
Some((part, at))
}
None => {
let part = self.variable;
self.variable = "";
Some((part, (self.start, part.len())))
}
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Variable {
pub at: At,
}
impl<'t> Variable {
pub fn new(at: At) -> Self {
Self { at }
}
pub fn parts(&self, template: TemplateString<'t>) -> impl Iterator<Item = (&'t str, At)> {
let start = self.at.0;
let variable = template.content(self.at);
PartsIterator { variable, start }
}
}
impl<'t> TemplateContent<'t> for Variable {
fn content(&self, template: TemplateString<'t>) -> &'t str {
template.content(self.at)
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/common.rs | dtl-lexer/src/common.rs | use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use unicode_xid::UnicodeXID;
use crate::types::At;
use crate::{END_TRANSLATE_LEN, QUOTE_LEN, START_TRANSLATE_LEN};
pub trait NextChar {
fn next_whitespace(&self) -> usize;
fn next_non_whitespace(&self) -> usize;
}
impl NextChar for str {
fn next_whitespace(&self) -> usize {
self.find(char::is_whitespace).unwrap_or(self.len())
}
fn next_non_whitespace(&self) -> usize {
self.find(|c: char| !c.is_whitespace())
.unwrap_or(self.len())
}
}
#[derive(Clone, Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum LexerError {
#[error("Expected a complete string literal")]
IncompleteString {
#[label("here")]
at: SourceSpan,
},
#[error("Expected a complete translation string")]
IncompleteTranslatedString {
#[label("here")]
at: SourceSpan,
},
#[error("Expected a valid variable name")]
InvalidVariableName {
#[label("here")]
at: SourceSpan,
},
#[error("Could not parse the remainder")]
InvalidRemainder {
#[label("here")]
at: SourceSpan,
},
#[error("Expected a string literal within translation")]
MissingTranslatedString {
#[label("here")]
at: SourceSpan,
},
}
pub fn lex_variable(byte: usize, rest: &str) -> (At, usize, &str) {
let mut in_text = None;
let mut end = 0;
for c in rest.chars() {
match c {
'"' => match in_text {
None => in_text = Some('"'),
Some('"') => in_text = None,
_ => {}
},
'\'' => match in_text {
None => in_text = Some('\''),
Some('\'') => in_text = None,
_ => {}
},
_ if in_text.is_some() => {}
c if !c.is_xid_continue() && c != '.' && c != '|' && c != ':' && c != '-' => break,
_ => {}
}
end += c.len_utf8();
}
let at = (byte, end);
let rest = &rest[end..];
let byte = byte + end;
(at, byte, rest)
}
pub fn lex_text<'t>(
byte: usize,
rest: &'t str,
chars: &mut std::str::Chars,
end: char,
) -> Result<(At, usize, &'t str), LexerError> {
let mut count = 1;
loop {
let Some(next) = chars.next() else {
let at = (byte, count);
return Err(LexerError::IncompleteString { at: at.into() });
};
count += next.len_utf8();
if next == '\\' {
let Some(next) = chars.next() else {
let at = (byte, count);
return Err(LexerError::IncompleteString { at: at.into() });
};
count += next.len_utf8();
} else if next == end {
let at = (byte, count);
let rest = &rest[count..];
let byte = byte + count;
return Ok((at, byte, rest));
}
}
}
pub fn lex_translated<'t>(
byte: usize,
rest: &'t str,
chars: &mut std::str::Chars,
) -> Result<(At, usize, &'t str), LexerError> {
let start = byte;
let byte = byte + START_TRANSLATE_LEN;
let rest = &rest[START_TRANSLATE_LEN..];
let (_at, byte, rest) = match chars.next() {
None => {
let at = (start, START_TRANSLATE_LEN);
return Err(LexerError::MissingTranslatedString { at: at.into() });
}
Some('\'') => lex_text(byte, rest, chars, '\'')?,
Some('"') => lex_text(byte, rest, chars, '"')?,
_ => {
let at = (start, rest.len() + START_TRANSLATE_LEN);
return Err(LexerError::MissingTranslatedString { at: at.into() });
}
};
if let Some(')') = chars.next() {
let byte = byte + END_TRANSLATE_LEN;
let rest = &rest[END_TRANSLATE_LEN..];
let at = (start, byte - start);
Ok((at, byte, rest))
} else {
let at = (start, byte - start);
Err(LexerError::IncompleteTranslatedString { at: at.into() })
}
}
pub fn lex_numeric(byte: usize, rest: &str) -> (At, usize, &str) {
let end = rest
.find(|c: char| !(c.is_ascii_digit() || c == '-' || c == '.' || c == 'e'))
.unwrap_or(rest.len());
let content = &rest[..end];
// Match django bug
let end = match content[1..].find('-') {
Some(n) => n + 1,
None => end,
};
// End match django bug
let at = (byte, end);
(at, byte + end, &rest[end..])
}
pub fn trim_variable(variable: &str) -> &str {
match variable.find(|c: char| !c.is_xid_continue() && c != '.' && c != '-') {
Some(end) => &variable[..end],
None => variable,
}
}
pub fn check_variable_attrs(variable: &str, start: usize) -> Result<(), LexerError> {
let mut offset = 0;
for (i, var) in variable.split('.').enumerate() {
if i == 0 {
let mut chars = var.chars();
chars.next();
if chars.any(|c| c == '-') {
let at = (start + offset, var.len());
return Err(LexerError::InvalidVariableName { at: at.into() });
}
} else if var.find('-').is_some() {
let at = (start + offset, var.len());
return Err(LexerError::InvalidVariableName { at: at.into() });
}
match var.chars().next() {
Some(c) if c != '_' => {
offset += var.len() + 1;
continue;
}
_ => {
let at = (start + offset, var.len());
return Err(LexerError::InvalidVariableName { at: at.into() });
}
}
}
Ok(())
}
pub fn lex_variable_argument(byte: usize, rest: &str) -> Result<(At, usize, &str), LexerError> {
let content = trim_variable(rest);
check_variable_attrs(content, byte)?;
let end = content.len();
let at = (byte, end);
Ok((at, byte + end, &rest[end..]))
}
pub fn text_content_at(at: At) -> At {
let (start, len) = at;
let start = start + QUOTE_LEN;
let len = len - 2 * QUOTE_LEN;
(start, len)
}
pub fn translated_text_content_at(at: At) -> At {
let (start, len) = at;
let start = start + START_TRANSLATE_LEN + QUOTE_LEN;
let len = len - START_TRANSLATE_LEN - END_TRANSLATE_LEN - 2 * QUOTE_LEN;
(start, len)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_text_non_ascii() {
let template = "'N\u{ec655}'";
let mut chars = template.chars();
chars.next();
let (at, byte, rest) = lex_text(1, template, &mut chars, '\'').unwrap();
assert_eq!(at, (1, 7));
assert_eq!(byte, 8);
assert_eq!(rest, "");
}
#[test]
fn test_lex_argument_non_ascii() {
let template = "ZJ5G4YXZJUH6|default:\"#`´କ¯\"";
let (at, byte, rest) = lex_variable(0, template);
assert_eq!(at, (0, 32));
assert_eq!(byte, 32);
assert_eq!(rest, "");
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag.rs | dtl-lexer/src/tag.rs | pub mod autoescape;
pub mod custom_tag;
pub mod forloop;
pub mod ifcondition;
pub mod include;
pub mod load;
pub mod lorem;
use crate::common::NextChar;
use crate::types::{At, TemplateString};
use crate::{END_TAG_LEN, START_TAG_LEN, TemplateContent};
use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use unicode_xid::UnicodeXID;
#[derive(Error, Debug, Diagnostic, Eq, PartialEq)]
pub enum TagLexerError {
#[error("Invalid block tag name")]
InvalidTagName {
#[label("here")]
at: SourceSpan,
},
#[error("Empty block tag")]
EmptyTag {
#[label("here")]
at: SourceSpan,
},
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TagParts {
pub at: At,
}
impl<'t> TemplateContent<'t> for TagParts {
fn content(&self, template: TemplateString<'t>) -> &'t str {
template.content(self.at)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct Tag {
pub at: At,
pub parts: TagParts,
}
impl<'t> TemplateContent<'t> for Tag {
fn content(&self, template: TemplateString<'t>) -> &'t str {
template.content(self.at)
}
}
pub fn lex_tag(tag: &str, start: usize) -> Result<Tag, TagLexerError> {
let rest = tag.trim_start();
if rest.trim().is_empty() {
return Err(TagLexerError::EmptyTag {
at: (
start - START_TAG_LEN,
START_TAG_LEN + tag.len() + END_TAG_LEN,
)
.into(),
});
}
let start = start + tag.len() - rest.len();
let tag = tag.trim();
let Some(tag_len) = tag.find(|c: char| !c.is_xid_continue()) else {
let at = (start, tag.len());
let parts = TagParts {
at: (start + tag.len(), 0),
};
return Ok(Tag { at, parts });
};
let index = tag.next_whitespace();
if index > tag_len {
let at = (start, index);
return Err(TagLexerError::InvalidTagName { at: at.into() });
}
let at = (start, tag_len);
let rest = &tag[tag_len..];
let trimmed = rest.trim();
let start = start + tag_len + rest.len() - trimmed.len();
let parts = TagParts {
at: (start, trimmed.len()),
};
Ok(Tag { at, parts })
}
#[cfg(test)]
mod tests {
use super::*;
use crate::types::IntoTemplateString;
use crate::{END_TAG_LEN, START_TAG_LEN};
fn trim_tag(template: &str) -> &str {
&template[START_TAG_LEN..(template.len() - END_TAG_LEN)]
}
#[test]
fn test_lex_empty() {
let template = "{% %}";
let tag = trim_tag(template);
let error = lex_tag(tag, START_TAG_LEN).unwrap_err();
assert_eq!(error, TagLexerError::EmptyTag { at: (0, 6).into() })
}
#[test]
fn test_lex_tag() {
let template = "{% csrftoken %}";
let tag = trim_tag(template);
let tag = lex_tag(tag, START_TAG_LEN).unwrap();
assert_eq!(tag.at, (3, 9));
assert_eq!(tag.content(template.into_template_string()), "csrftoken");
assert_eq!(tag.parts, TagParts { at: (12, 0) })
}
#[test]
fn test_lex_invalid_tag() {
let template = "{% url'foo' %}";
let tag = trim_tag(template);
let error = lex_tag(tag, START_TAG_LEN).unwrap_err();
assert_eq!(error, TagLexerError::InvalidTagName { at: (3, 8).into() })
}
#[test]
fn test_lex_invalid_tag_rest() {
let template = "{% url'foo' bar %}";
let tag = trim_tag(template);
let error = lex_tag(tag, START_TAG_LEN).unwrap_err();
assert_eq!(error, TagLexerError::InvalidTagName { at: (3, 8).into() })
}
#[test]
fn test_lex_tag_rest() {
let template = "{% url name arg %}";
let tag = trim_tag(template);
let tag = lex_tag(tag, START_TAG_LEN).unwrap();
assert_eq!(tag.at, (3, 3));
assert_eq!(tag.content(template.into_template_string()), "url");
assert_eq!(tag.parts, TagParts { at: (7, 8) })
}
#[test]
fn test_template_content_impl() {
let template = "{% url name arg %}";
let template_string = "{% url name arg %}".into_template_string();
let tag = lex_tag(trim_tag(template), START_TAG_LEN).unwrap();
assert_eq!(tag.content(template.into_template_string()), "url");
assert_eq!(
template_string.content(tag.at),
tag.content(template_string)
);
assert_eq!(tag.parts.content(template_string), "name arg");
assert_eq!(
template_string.content(tag.parts.at),
tag.parts.content(template_string)
);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/autoescape.rs | dtl-lexer/src/tag/autoescape.rs | use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
#[derive(Clone, Debug, PartialEq)]
pub enum AutoescapeEnabled {
On,
Off,
}
impl From<&AutoescapeEnabled> for bool {
fn from(enabled: &AutoescapeEnabled) -> Self {
match enabled {
AutoescapeEnabled::On => true,
AutoescapeEnabled::Off => false,
}
}
}
#[derive(Debug, PartialEq)]
pub struct AutoescapeToken {
pub at: At,
pub enabled: AutoescapeEnabled,
}
#[allow(clippy::enum_variant_names)] // https://github.com/rust-lang/rust-clippy/issues/10599
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum AutoescapeError {
#[error("'autoescape' argument should be 'on' or 'off'.")]
InvalidArgument {
#[label("here")]
at: SourceSpan,
},
#[error("'autoescape' tag missing an 'on' or 'off' argument.")]
MissingArgument {
#[label("here")]
at: SourceSpan,
},
#[error("'autoescape' tag requires exactly one argument.")]
UnexpectedArgument {
#[label("here")]
at: SourceSpan,
},
}
pub fn lex_autoescape_argument(
template: TemplateString<'_>,
parts: TagParts,
) -> Result<AutoescapeToken, AutoescapeError> {
let content = template.content(parts.at);
let at = parts.at;
match content {
"off" => Ok(AutoescapeToken {
at,
enabled: AutoescapeEnabled::Off,
}),
"on" => Ok(AutoescapeToken {
at,
enabled: AutoescapeEnabled::On,
}),
"" => Err(AutoescapeError::MissingArgument { at: at.into() }),
_ => match content.find(char::is_whitespace) {
None => Err(AutoescapeError::InvalidArgument { at: at.into() }),
Some(_) => Err(AutoescapeError::UnexpectedArgument { at: at.into() }),
},
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_autoescape_off() {
let template = "{% autoescape off %}";
let parts = TagParts { at: (14, 3) };
let token = lex_autoescape_argument(template.into(), parts).unwrap();
let off = AutoescapeToken {
at: (14, 3),
enabled: AutoescapeEnabled::Off,
};
assert_eq!(token, off);
}
#[test]
fn test_lex_autoescape_on() {
let template = "{% autoescape on %}";
let parts = TagParts { at: (14, 2) };
let token = lex_autoescape_argument(template.into(), parts).unwrap();
let on = AutoescapeToken {
at: (14, 2),
enabled: AutoescapeEnabled::On,
};
assert_eq!(token, on);
}
#[test]
fn test_lex_autoescape_empty() {
let template = "{% autoescape %}";
let parts = TagParts { at: (8, 0) };
let error = lex_autoescape_argument(template.into(), parts).unwrap_err();
assert_eq!(
error,
AutoescapeError::MissingArgument { at: (8, 0).into() }
);
}
#[test]
fn test_lex_autoescape_invalid() {
let template = "{% autoescape other %}";
let parts = TagParts { at: (14, 5) };
let error = lex_autoescape_argument(template.into(), parts).unwrap_err();
assert_eq!(
error,
AutoescapeError::InvalidArgument { at: (14, 5).into() }
);
}
#[test]
fn test_lex_autoescape_unexpected_argument() {
let template = "{% autoescape off on %}";
let parts = TagParts { at: (14, 6) };
let error = lex_autoescape_argument(template.into(), parts).unwrap_err();
assert_eq!(
error,
AutoescapeError::UnexpectedArgument { at: (14, 6).into() }
);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/lorem.rs | dtl-lexer/src/tag/lorem.rs | #![expect(unused_assignments)]
use crate::common::NextChar;
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
#[derive(Debug, PartialEq, Clone)]
pub enum LoremMethod {
Words,
Paragraphs,
Blocks,
}
#[derive(Debug, PartialEq, Clone)]
pub enum LoremTokenType {
Count,
Method(LoremMethod),
Random,
}
#[derive(Debug, PartialEq, Clone)]
pub struct LoremToken {
pub at: At,
pub token_type: LoremTokenType,
}
pub struct LoremLexer<'t> {
rest: &'t str,
byte: usize,
seen_count: Option<At>,
seen_method: Option<At>,
seen_random: Option<At>,
}
impl<'t> LoremLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
rest: template.content(parts.at),
byte: parts.at.0,
seen_count: None,
seen_method: None,
seen_random: None,
}
}
fn check_method(&mut self, method: LoremMethod, at: At) -> Result<LoremTokenType, LoremError> {
if let Some(random_at) = self.seen_random {
match self.seen_count {
Some(_) => {
return Err(LoremError::MethodAfterRandom {
method_at: at.into(),
random_at: random_at.into(),
});
}
None => self.seen_count = Some(random_at),
}
}
if let Some(method_at) = self.seen_method {
match self.seen_count {
Some(_) => {
return Err(LoremError::DuplicateMethod {
first: method_at.into(),
second: at.into(),
});
}
None => self.seen_count = Some(method_at),
}
}
self.seen_method = Some(at);
Ok(LoremTokenType::Method(method))
}
fn check_random(&mut self, at: At) -> Result<LoremTokenType, LoremError> {
if let Some(random_at) = self.seen_random {
match self.seen_count {
Some(_) => {
return Err(LoremError::DuplicateRandom {
first: random_at.into(),
second: at.into(),
});
}
None => self.seen_count = Some(random_at),
}
}
self.seen_random = Some(at);
Ok(LoremTokenType::Random)
}
fn check_count(&mut self, count_at: At) -> Result<LoremTokenType, LoremError> {
if let Some(first_count_at) = self.seen_count {
return Err(LoremError::DuplicateCount {
first: first_count_at.into(),
second: count_at.into(),
});
}
if let Some(method_at) = self.seen_method {
return Err(LoremError::CountAfterMethod {
count_at: count_at.into(),
method_at: method_at.into(),
});
}
if let Some(random_at) = self.seen_random {
return Err(LoremError::CountAfterRandom {
count_at: count_at.into(),
random_at: random_at.into(),
});
}
self.seen_count = Some(count_at);
Ok(LoremTokenType::Count)
}
}
impl<'t> Iterator for LoremLexer<'t> {
type Item = Result<LoremToken, LoremError>;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
let len = self.rest.next_whitespace();
let at = (self.byte, len);
let token_type = match &self.rest[..len] {
"w" => self.check_method(LoremMethod::Words, at),
"p" => self.check_method(LoremMethod::Paragraphs, at),
"b" => self.check_method(LoremMethod::Blocks, at),
"random" => self.check_random(at),
_ => self.check_count(at),
};
let token_type = match token_type {
Ok(token_type) => token_type,
Err(err) => {
self.rest = "";
return Some(Err(err));
}
};
let rest = &self.rest[len..];
let next = rest.next_non_whitespace();
self.rest = &rest[next..];
self.byte = self.byte + len + next;
Some(Ok(LoremToken { at, token_type }))
}
}
#[derive(Debug, Diagnostic, Error, PartialEq, Eq)]
pub enum LoremError {
#[error("Incorrect format for 'lorem' tag: 'count' must come before the 'method' argument")]
#[diagnostic(help("Move the 'count' argument before the 'method' argument"))]
CountAfterMethod {
#[label("method")]
method_at: SourceSpan,
#[label("count")]
count_at: SourceSpan,
},
#[error("Incorrect format for 'lorem' tag: 'count' must come before the 'random' argument")]
#[diagnostic(help("Move the 'count' argument before the 'random' argument"))]
CountAfterRandom {
#[label("random")]
random_at: SourceSpan,
#[label("count")]
count_at: SourceSpan,
},
#[error("Incorrect format for 'lorem' tag: 'method' must come before the 'random' argument")]
#[diagnostic(help("Move the 'method' argument before the 'random' argument"))]
MethodAfterRandom {
#[label("random")]
random_at: SourceSpan,
#[label("method")]
method_at: SourceSpan,
},
#[error("Incorrect format for 'lorem' tag: 'random' was provided more than once")]
#[diagnostic(help("Try removing the second 'random'"))]
DuplicateRandom {
#[label("first 'random'")]
first: SourceSpan,
#[label("second 'random'")]
second: SourceSpan,
},
#[error("Incorrect format for 'lorem' tag: 'method' argument was provided more than once")]
#[diagnostic(help("Try removing the second 'method'"))]
DuplicateMethod {
#[label("first 'method'")]
first: SourceSpan,
#[label("second 'method'")]
second: SourceSpan,
},
#[error("Incorrect format for 'lorem' tag: 'count' argument was provided more than once")]
#[diagnostic(help("Try removing the second 'count'"))]
DuplicateCount {
#[label("first 'count'")]
first: SourceSpan,
#[label("second 'count'")]
second: SourceSpan,
},
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/include.rs | dtl-lexer/src/tag/include.rs | #![expect(unused_assignments)]
use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use crate::common::text_content_at;
use crate::tag::TagParts;
use crate::tag::custom_tag::{
SimpleTagLexer, SimpleTagLexerError, SimpleTagToken, SimpleTagTokenType,
};
use crate::types::{At, TemplateString};
#[derive(Debug, PartialEq, Eq)]
pub enum IncludeTemplateTokenType {
Text,
Variable,
}
#[derive(Debug, PartialEq, Eq)]
pub struct IncludeTemplateToken {
pub at: At,
pub token_type: IncludeTemplateTokenType,
}
impl IncludeTemplateToken {
pub fn content_at(&self) -> At {
match self.token_type {
IncludeTemplateTokenType::Variable => self.at,
IncludeTemplateTokenType::Text => text_content_at(self.at),
}
}
}
pub enum IncludeWithToken {
None,
With(At),
Only(At),
}
pub enum IncludeToken {
Only(At),
Kwarg { kwarg_at: At, token: SimpleTagToken },
}
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum IncludeLexerError {
#[error(transparent)]
#[diagnostic(transparent)]
SimpleTagLexerError(#[from] SimpleTagLexerError),
#[error("Included template name must be a string or iterable of strings.")]
InvalidTemplateName {
#[label("invalid template name")]
at: SourceSpan,
},
#[error("Included template name cannot be a translatable string.")]
TranslatedTemplateName {
#[label("invalid template name")]
at: SourceSpan,
},
#[error("Unexpected argument")]
#[diagnostic(help("{help}"))]
UnexpectedArgument {
#[label("here")]
at: SourceSpan,
help: &'static str,
},
#[error("Unexpected keyword argument")]
UnexpectedKeywordArgument {
#[label("here")]
at: SourceSpan,
},
#[error("Expected a keyword argument")]
UnexpectedPositionalArgument {
#[label("here")]
at: SourceSpan,
},
}
pub struct IncludeLexer<'t> {
lexer: SimpleTagLexer<'t>,
template: TemplateString<'t>,
}
impl<'t> IncludeLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
lexer: SimpleTagLexer::new(template, parts),
template,
}
}
pub fn lex_template(&mut self) -> Result<Option<IncludeTemplateToken>, IncludeLexerError> {
let token = match self.lexer.next() {
Some(token) => token?,
None => return Ok(None),
};
match token.kwarg {
Some(kwarg_at) => Err(IncludeLexerError::UnexpectedKeywordArgument {
at: kwarg_at.into(),
}),
None => {
let token_type = match token.token_type {
SimpleTagTokenType::Text => IncludeTemplateTokenType::Text,
SimpleTagTokenType::Variable => IncludeTemplateTokenType::Variable,
SimpleTagTokenType::Numeric => {
return Err(IncludeLexerError::InvalidTemplateName {
at: token.at.into(),
});
}
SimpleTagTokenType::TranslatedText => {
return Err(IncludeLexerError::TranslatedTemplateName {
at: token.at.into(),
});
}
};
Ok(Some(IncludeTemplateToken {
at: token.at,
token_type,
}))
}
}
}
fn next_kwarg(&mut self) -> Option<Result<SimpleTagToken, IncludeLexerError>> {
match self.lexer.next() {
None => None,
Some(Ok(token)) => Some(Ok(token)),
Some(Err(error)) => Some(Err(error.into())),
}
}
fn lex_only(&mut self, at: At) -> Result<IncludeToken, IncludeLexerError> {
match self.lexer.next() {
None => Ok(IncludeToken::Only(at)),
Some(token) => Err(IncludeLexerError::UnexpectedArgument {
at: token?.all_at().into(),
help: "Try moving the argument before the 'only' option",
}),
}
}
pub fn lex_with_or_only(&mut self) -> Result<IncludeWithToken, IncludeLexerError> {
let token = match self.next_kwarg() {
None => return Ok(IncludeWithToken::None),
Some(result) => result?,
};
const HELP: &str = "Try adding the 'with' keyword before the argument.";
match token {
SimpleTagToken {
at,
token_type: SimpleTagTokenType::Variable,
kwarg: None,
} => match self.template.content(at) {
"with" => Ok(IncludeWithToken::With(at)),
"only" => Ok(IncludeWithToken::Only(at)),
_ => Err(IncludeLexerError::UnexpectedArgument {
at: at.into(),
help: HELP,
}),
},
token => Err(IncludeLexerError::UnexpectedArgument {
at: token.all_at().into(),
help: HELP,
}),
}
}
}
impl<'t> Iterator for IncludeLexer<'t> {
type Item = Result<IncludeToken, IncludeLexerError>;
fn next(&mut self) -> Option<Self::Item> {
let token = match self.next_kwarg()? {
Ok(token) => token,
Err(error) => {
return Some(Err(error));
}
};
Some(match token.kwarg {
Some(kwarg_at) => Ok(IncludeToken::Kwarg { kwarg_at, token }),
None => {
if token.token_type == SimpleTagTokenType::Variable
&& self.template.content(token.at) == "only"
{
self.lex_only(token.at)
} else {
Err(IncludeLexerError::UnexpectedPositionalArgument {
at: token.at.into(),
})
}
}
})
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/custom_tag.rs | dtl-lexer/src/tag/custom_tag.rs | use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use unicode_xid::UnicodeXID;
use crate::common::{
LexerError, NextChar, lex_numeric, lex_text, lex_translated, lex_variable, text_content_at,
translated_text_content_at,
};
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
#[derive(Debug, PartialEq)]
pub enum SimpleTagTokenType {
Numeric,
Text,
TranslatedText,
Variable,
}
#[derive(Debug, PartialEq)]
pub struct SimpleTagToken {
pub at: At,
pub token_type: SimpleTagTokenType,
pub kwarg: Option<At>,
}
impl SimpleTagToken {
pub fn content_at(&self) -> At {
match self.token_type {
SimpleTagTokenType::Variable => self.at,
SimpleTagTokenType::Numeric => self.at,
SimpleTagTokenType::Text => text_content_at(self.at),
SimpleTagTokenType::TranslatedText => translated_text_content_at(self.at),
}
}
/// The location of the full keyword argument:
/// keyword=variable
/// ────┬───
/// ╰── self.at
/// ───┬───
/// ╰── self.kwarg
/// ────────┬───────
/// ╰── self.all_at()
pub fn all_at(&self) -> At {
match self.kwarg {
None => self.at,
Some(kwarg_at) => (kwarg_at.0, self.at.0 - kwarg_at.0 + self.at.1),
}
}
}
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum SimpleTagLexerError {
#[error(transparent)]
#[diagnostic(transparent)]
LexerError(#[from] LexerError),
#[error("Incomplete keyword argument")]
IncompleteKeywordArgument {
#[label("here")]
at: SourceSpan,
},
}
pub struct SimpleTagLexer<'t> {
rest: &'t str,
byte: usize,
}
impl<'t> SimpleTagLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
rest: template.content(parts.at),
byte: parts.at.0,
}
}
fn lex_numeric(&mut self, kwarg: Option<At>) -> SimpleTagToken {
let (at, byte, rest) = lex_numeric(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
SimpleTagToken {
at,
token_type: SimpleTagTokenType::Numeric,
kwarg,
}
}
fn lex_text(
&mut self,
chars: &mut std::str::Chars,
end: char,
kwarg: Option<At>,
) -> Result<SimpleTagToken, SimpleTagLexerError> {
match lex_text(self.byte, self.rest, chars, end) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(SimpleTagToken {
token_type: SimpleTagTokenType::Text,
at,
kwarg,
})
}
Err(e) => {
self.rest = "";
Err(e.into())
}
}
}
fn lex_translated(
&mut self,
chars: &mut std::str::Chars,
kwarg: Option<At>,
) -> Result<SimpleTagToken, SimpleTagLexerError> {
match lex_translated(self.byte, self.rest, chars) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(SimpleTagToken {
token_type: SimpleTagTokenType::TranslatedText,
at,
kwarg,
})
}
Err(e) => {
self.rest = "";
Err(e.into())
}
}
}
fn lex_kwarg(&mut self) -> Option<At> {
let index = self.rest.find('=')?;
match self.rest.find(|c: char| !c.is_xid_continue()) {
Some(n) if n < index => return None,
_ => {}
}
let at = (self.byte, index);
self.rest = &self.rest[index + 1..];
self.byte += index + 1;
Some(at)
}
fn lex_variable_or_filter(
&mut self,
kwarg: Option<At>,
) -> Result<SimpleTagToken, SimpleTagLexerError> {
let (at, byte, rest) = lex_variable(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
Ok(SimpleTagToken {
token_type: SimpleTagTokenType::Variable,
at,
kwarg,
})
}
fn lex_remainder(
&mut self,
token: Result<SimpleTagToken, SimpleTagLexerError>,
) -> Result<SimpleTagToken, SimpleTagLexerError> {
let remainder = self.rest.next_whitespace();
match remainder {
0 => {
let rest = self.rest.trim_start();
self.byte += self.rest.len() - rest.len();
self.rest = rest;
token
}
n => {
self.rest = "";
let at = (self.byte, n).into();
let err = LexerError::InvalidRemainder { at };
Err(err.into())
}
}
}
}
impl Iterator for SimpleTagLexer<'_> {
type Item = Result<SimpleTagToken, SimpleTagLexerError>;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
let kwarg = self.lex_kwarg();
let mut chars = self.rest.chars();
let next = match chars.next() {
Some(next) if !next.is_whitespace() => next,
_ => {
self.rest = "";
let at = kwarg.expect("kwarg is Some or we'd already have exited");
let at = (at.0, at.1 + 1).into();
return Some(Err(SimpleTagLexerError::IncompleteKeywordArgument { at }));
}
};
let token = match next {
'_' => {
if let Some('(') = chars.next() {
self.lex_translated(&mut chars, kwarg)
} else {
self.lex_variable_or_filter(kwarg)
}
}
'"' => self.lex_text(&mut chars, '"', kwarg),
'\'' => self.lex_text(&mut chars, '\'', kwarg),
'0'..='9' | '-' => Ok(self.lex_numeric(kwarg)),
_ => self.lex_variable_or_filter(kwarg),
};
Some(self.lex_remainder(token))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_url_name_text() {
let template = "{% url 'foo' %}";
let parts = TagParts { at: (7, 5) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 5),
token_type: SimpleTagTokenType::Text,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_text_double_quotes() {
let template = "{% url \"foo\" %}";
let parts = TagParts { at: (7, 5) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 5),
token_type: SimpleTagTokenType::Text,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_text_incomplete() {
let template = "{% url 'foo %}";
let parts = TagParts { at: (7, 4) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
LexerError::IncompleteString { at: (7, 4).into() }.into()
);
}
#[test]
fn test_lex_url_name_variable() {
let template = "{% url foo %}";
let parts = TagParts { at: (7, 3) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 3),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_filter() {
let template = "{% url foo|default:'home' %}";
let parts = TagParts { at: (7, 18) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 18),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_filter_inner_double_quote() {
let template = "{% url foo|default:'home\"' %}";
let parts = TagParts { at: (7, 19) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 19),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_filter_inner_single_quote() {
let template = "{% url foo|default:\"home'\" %}";
let parts = TagParts { at: (7, 19) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 19),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_filter_inner_whitespace() {
let template = "{% url foo|default:'home url' %}";
let parts = TagParts { at: (7, 22) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 22),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_leading_underscore() {
let template = "{% url _foo %}";
let parts = TagParts { at: (7, 4) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 4),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_translated() {
let template = "{% url _('foo') %}";
let parts = TagParts { at: (7, 8) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 8),
token_type: SimpleTagTokenType::TranslatedText,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_translated_incomplete() {
let template = "{% url _('foo' %}";
let parts = TagParts { at: (7, 7) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
LexerError::IncompleteTranslatedString { at: (7, 7).into() }.into()
);
}
#[test]
fn test_lex_url_name_numeric() {
let template = "{% url 5 %}";
let parts = TagParts { at: (7, 1) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (7, 1),
token_type: SimpleTagTokenType::Numeric,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_text_kwarg() {
let template = "{% url name='foo' %}";
let parts = TagParts { at: (7, 10) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 5),
token_type: SimpleTagTokenType::Text,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_text_kwarg_double_quotes() {
let template = "{% url name=\"foo\" %}";
let parts = TagParts { at: (7, 10) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 5),
token_type: SimpleTagTokenType::Text,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_variable_kwarg() {
let template = "{% url name=foo %}";
let parts = TagParts { at: (7, 8) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 3),
token_type: SimpleTagTokenType::Variable,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_leading_underscore_kwarg() {
let template = "{% url name=_foo %}";
let parts = TagParts { at: (7, 9) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 4),
token_type: SimpleTagTokenType::Variable,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_translated_kwarg() {
let template = "{% url name=_('foo') %}";
let parts = TagParts { at: (7, 13) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 8),
token_type: SimpleTagTokenType::TranslatedText,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url_name_numeric_kwarg() {
let template = "{% url name=5 %}";
let parts = TagParts { at: (7, 6) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let name = SimpleTagToken {
at: (12, 1),
token_type: SimpleTagTokenType::Numeric,
kwarg: Some((7, 4)),
};
assert_eq!(tokens, vec![Ok(name)]);
}
#[test]
fn test_lex_url() {
let template = "{% url 'home' next %}";
let parts = TagParts { at: (7, 11) };
let lexer = SimpleTagLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let home = SimpleTagToken {
at: (7, 6),
token_type: SimpleTagTokenType::Text,
kwarg: None,
};
let next = SimpleTagToken {
at: (14, 4),
token_type: SimpleTagTokenType::Variable,
kwarg: None,
};
assert_eq!(tokens, vec![Ok(home), Ok(next)]);
}
#[test]
fn test_lex_url_incomplete_kwarg() {
let template = "{% url name= %}";
let parts = TagParts { at: (7, 5) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
SimpleTagLexerError::IncompleteKeywordArgument { at: (7, 5).into() }
);
}
#[test]
fn test_lex_url_incomplete_kwarg_args() {
let template = "{% url name= foo %}";
let parts = TagParts { at: (7, 9) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
SimpleTagLexerError::IncompleteKeywordArgument { at: (7, 5).into() }
);
}
#[test]
fn test_lex_url_invalid_remainder() {
let template = "{% url 'foo'remainder %}";
let parts = TagParts { at: (7, 14) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
LexerError::InvalidRemainder { at: (12, 9).into() }.into()
);
}
#[test]
fn test_lex_url_kwarg_invalid_remainder() {
let template = "{% url name='foo'=remainder %}";
let parts = TagParts { at: (7, 20) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(
error,
LexerError::InvalidRemainder {
at: (17, 10).into()
}
.into()
);
}
#[test]
fn test_lex_url_incomplete_kwarg_message() {
let template = "{% url name= %}";
let parts = TagParts { at: (7, 5) };
let mut lexer = SimpleTagLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(error.to_string(), "Incomplete keyword argument");
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/load.rs | dtl-lexer/src/tag/load.rs | use crate::common::NextChar;
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
#[derive(Debug, PartialEq)]
pub struct LoadToken {
pub at: At,
}
pub struct LoadLexer<'t> {
rest: &'t str,
byte: usize,
}
impl<'t> LoadLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
rest: template.content(parts.at),
byte: parts.at.0,
}
}
}
impl Iterator for LoadLexer<'_> {
type Item = LoadToken;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
let start = self.byte;
let len = self.rest.next_whitespace();
let rest = &self.rest[len..];
let next = rest.next_non_whitespace();
self.rest = &rest[next..];
self.byte = self.byte + len + next;
let at = (start, len);
Some(LoadToken { at })
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_library() {
let template = "{% load foo %}";
let parts = TagParts { at: (8, 3) };
let lexer = LoadLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foo = LoadToken { at: (8, 3) };
assert_eq!(tokens, [foo]);
}
#[test]
fn test_lex_libraries() {
let template = "{% load foo bar.eggs %}";
let parts = TagParts { at: (8, 12) };
let lexer = LoadLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foo = LoadToken { at: (8, 3) };
let bar_eggs = LoadToken { at: (12, 8) };
assert_eq!(tokens, [foo, bar_eggs]);
}
#[test]
fn test_lex_individual() {
let template = "{% load foo bar from library %}";
let parts = TagParts { at: (8, 20) };
let lexer = LoadLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foo = LoadToken { at: (8, 3) };
let bar = LoadToken { at: (12, 3) };
let from = LoadToken { at: (16, 4) };
let library = LoadToken { at: (21, 7) };
assert_eq!(tokens, [foo, bar, from, library]);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/ifcondition.rs | dtl-lexer/src/tag/ifcondition.rs | use crate::common::{
LexerError, NextChar, lex_numeric, lex_text, lex_translated, lex_variable, text_content_at,
translated_text_content_at,
};
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
#[derive(Debug, PartialEq, Eq)]
pub enum IfConditionAtom {
Numeric,
Text,
TranslatedText,
Variable,
}
#[derive(Debug, PartialEq, Eq, Clone, Copy)]
pub enum IfConditionOperator {
And,
Or,
Equal,
NotEqual,
LessThan,
GreaterThan,
LessThanEqual,
GreaterThanEqual,
In,
NotIn,
Is,
IsNot,
}
#[derive(Debug, PartialEq, Eq)]
pub enum IfConditionTokenType {
Atom(IfConditionAtom),
Operator(IfConditionOperator),
Not,
}
#[derive(Debug, PartialEq, Eq)]
pub struct IfConditionToken {
pub at: At,
pub token_type: IfConditionTokenType,
}
impl IfConditionToken {
pub fn content_at(&self) -> At {
match self.token_type {
IfConditionTokenType::Atom(IfConditionAtom::Text) => text_content_at(self.at),
IfConditionTokenType::Atom(IfConditionAtom::TranslatedText) => {
translated_text_content_at(self.at)
}
_ => self.at,
}
}
}
pub struct IfConditionLexer<'t> {
rest: &'t str,
byte: usize,
}
impl<'t> IfConditionLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
rest: template.content(parts.at),
byte: parts.at.0,
}
}
fn lex_condition(&mut self) -> Result<IfConditionToken, LexerError> {
let mut chars = self.rest.chars();
let token = match chars.next().expect("self.rest is not empty") {
'_' => {
if let Some('(') = chars.next() {
self.lex_translated(&mut chars)?
} else {
self.lex_variable()
}
}
'"' => self.lex_text(&mut chars, '"')?,
'\'' => self.lex_text(&mut chars, '\'')?,
'0'..='9' | '-' => self.lex_numeric(),
_ => self.lex_variable(),
};
self.lex_remainder()?;
Ok(token)
}
fn lex_variable(&mut self) -> IfConditionToken {
let (at, byte, rest) = lex_variable(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
IfConditionToken {
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
at,
}
}
fn lex_numeric(&mut self) -> IfConditionToken {
let (at, byte, rest) = lex_numeric(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
IfConditionToken {
at,
token_type: IfConditionTokenType::Atom(IfConditionAtom::Numeric),
}
}
fn lex_text(
&mut self,
chars: &mut std::str::Chars,
end: char,
) -> Result<IfConditionToken, LexerError> {
match lex_text(self.byte, self.rest, chars, end) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(IfConditionToken {
token_type: IfConditionTokenType::Atom(IfConditionAtom::Text),
at,
})
}
Err(e) => {
self.rest = "";
Err(e)
}
}
}
fn lex_translated(
&mut self,
chars: &mut std::str::Chars,
) -> Result<IfConditionToken, LexerError> {
match lex_translated(self.byte, self.rest, chars) {
Ok((at, byte, rest)) => {
self.rest = rest;
self.byte = byte;
Ok(IfConditionToken {
token_type: IfConditionTokenType::Atom(IfConditionAtom::TranslatedText),
at,
})
}
Err(e) => {
self.rest = "";
Err(e)
}
}
}
fn lex_remainder(&mut self) -> Result<(), LexerError> {
match self.rest.next_whitespace() {
0 => {
let rest = self.rest.trim_start();
self.byte += self.rest.len() - rest.len();
self.rest = rest;
Ok(())
}
n => {
self.rest = "";
let at = (self.byte, n).into();
Err(LexerError::InvalidRemainder { at })
}
}
}
}
impl Iterator for IfConditionLexer<'_> {
type Item = Result<IfConditionToken, LexerError>;
fn next(&mut self) -> Option<Self::Item> {
if self.rest.is_empty() {
return None;
}
let index = self.rest.next_whitespace();
let (token_type, index) = match &self.rest[..index] {
"and" => (
IfConditionTokenType::Operator(IfConditionOperator::And),
index,
),
"or" => (
IfConditionTokenType::Operator(IfConditionOperator::Or),
index,
),
"not" => {
let rest = &self.rest[index..];
let whitespace_index = rest.next_non_whitespace();
let rest = &rest[whitespace_index..];
let next_index = rest.next_whitespace();
match &rest[..next_index] {
"in" => (
IfConditionTokenType::Operator(IfConditionOperator::NotIn),
index + whitespace_index + next_index,
),
_ => (IfConditionTokenType::Not, index),
}
}
"==" => (
IfConditionTokenType::Operator(IfConditionOperator::Equal),
index,
),
"!=" => (
IfConditionTokenType::Operator(IfConditionOperator::NotEqual),
index,
),
"<" => (
IfConditionTokenType::Operator(IfConditionOperator::LessThan),
index,
),
">" => (
IfConditionTokenType::Operator(IfConditionOperator::GreaterThan),
index,
),
"<=" => (
IfConditionTokenType::Operator(IfConditionOperator::LessThanEqual),
index,
),
">=" => (
IfConditionTokenType::Operator(IfConditionOperator::GreaterThanEqual),
index,
),
"in" => (
IfConditionTokenType::Operator(IfConditionOperator::In),
index,
),
"is" => {
let rest = &self.rest[index..];
let whitespace_index = rest.next_non_whitespace();
let rest = &rest[whitespace_index..];
let next_index = rest.next_whitespace();
match &rest[..next_index] {
"not" => (
IfConditionTokenType::Operator(IfConditionOperator::IsNot),
index + whitespace_index + next_index,
),
_ => (
IfConditionTokenType::Operator(IfConditionOperator::Is),
index,
),
}
}
_ => return Some(self.lex_condition()),
};
let at = (self.byte, index);
let rest = &self.rest[index..];
let next_index = rest.next_non_whitespace();
self.byte += index + next_index;
self.rest = &rest[next_index..];
Some(Ok(IfConditionToken { at, token_type }))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_variable() {
let template = "{% if foo %}";
let parts = TagParts { at: (6, 3) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foo = IfConditionToken {
at: (6, 3),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
assert_eq!(tokens, vec![Ok(foo)]);
}
#[test]
fn test_lex_variable_leading_underscorej() {
let template = "{% if _foo %}";
let parts = TagParts { at: (6, 4) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foo = IfConditionToken {
at: (6, 4),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
assert_eq!(tokens, vec![Ok(foo)]);
}
#[test]
fn test_lex_numeric() {
let template = "{% if 5.3 %}";
let parts = TagParts { at: (6, 3) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let numeric = IfConditionToken {
at: (6, 3),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Numeric),
};
assert_eq!(tokens, vec![Ok(numeric)]);
}
#[test]
fn test_lex_text() {
let template = "{% if 'foo' %}";
let parts = TagParts { at: (6, 5) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let text = IfConditionToken {
at: (6, 5),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Text),
};
assert_eq!(tokens, vec![Ok(text)]);
}
#[test]
fn test_lex_text_double_quotes() {
let template = "{% if \"foo\" %}";
let parts = TagParts { at: (6, 5) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let text = IfConditionToken {
at: (6, 5),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Text),
};
assert_eq!(tokens, vec![Ok(text)]);
}
#[test]
fn test_lex_translated() {
let template = "{% if _('foo') %}";
let parts = TagParts { at: (6, 8) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let text = IfConditionToken {
at: (6, 8),
token_type: IfConditionTokenType::Atom(IfConditionAtom::TranslatedText),
};
assert_eq!(tokens, vec![Ok(text)]);
}
#[test]
fn test_lex_translated_error() {
let template = "{% if _('foo' %}";
let parts = TagParts { at: (6, 7) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let error = LexerError::IncompleteTranslatedString { at: (6, 7).into() };
assert_eq!(tokens, vec![Err(error)]);
}
#[test]
fn test_lex_and() {
let template = "{% if and %}";
let parts = TagParts { at: (6, 3) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let and = IfConditionToken {
at: (6, 3),
token_type: IfConditionTokenType::Operator(IfConditionOperator::And),
};
assert_eq!(tokens, vec![Ok(and)]);
}
#[test]
fn test_lex_or() {
let template = "{% if or %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let or = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::Or),
};
assert_eq!(tokens, vec![Ok(or)]);
}
#[test]
fn test_lex_not() {
let template = "{% if not %}";
let parts = TagParts { at: (6, 3) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let not = IfConditionToken {
at: (6, 3),
token_type: IfConditionTokenType::Not,
};
assert_eq!(tokens, vec![Ok(not)]);
}
#[test]
fn test_lex_equal() {
let template = "{% if == %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let equal = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::Equal),
};
assert_eq!(tokens, vec![Ok(equal)]);
}
#[test]
fn test_lex_not_equal() {
let template = "{% if != %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let not_equal = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::NotEqual),
};
assert_eq!(tokens, vec![Ok(not_equal)]);
}
#[test]
fn test_lex_less_than() {
let template = "{% if < %}";
let parts = TagParts { at: (6, 1) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let less_than = IfConditionToken {
at: (6, 1),
token_type: IfConditionTokenType::Operator(IfConditionOperator::LessThan),
};
assert_eq!(tokens, vec![Ok(less_than)]);
}
#[test]
fn test_lex_greater_than() {
let template = "{% if > %}";
let parts = TagParts { at: (6, 1) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let greater_than = IfConditionToken {
at: (6, 1),
token_type: IfConditionTokenType::Operator(IfConditionOperator::GreaterThan),
};
assert_eq!(tokens, vec![Ok(greater_than)]);
}
#[test]
fn test_lex_less_equal() {
let template = "{% if <= %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let less_equal = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::LessThanEqual),
};
assert_eq!(tokens, vec![Ok(less_equal)]);
}
#[test]
fn test_lex_greater_equal() {
let template = "{% if >= %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let greater_equal = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::GreaterThanEqual),
};
assert_eq!(tokens, vec![Ok(greater_equal)]);
}
#[test]
fn test_lex_in() {
let template = "{% if in %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let in_ = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::In),
};
assert_eq!(tokens, vec![Ok(in_)]);
}
#[test]
fn test_lex_not_in() {
let template = "{% if not in %}";
let parts = TagParts { at: (6, 6) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let not_in = IfConditionToken {
at: (6, 6),
token_type: IfConditionTokenType::Operator(IfConditionOperator::NotIn),
};
assert_eq!(tokens, vec![Ok(not_in)]);
}
#[test]
fn test_lex_is() {
let template = "{% if is %}";
let parts = TagParts { at: (6, 2) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let is = IfConditionToken {
at: (6, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::Is),
};
assert_eq!(tokens, vec![Ok(is)]);
}
#[test]
fn test_lex_is_not() {
let template = "{% if is not %}";
let parts = TagParts { at: (6, 6) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let is_not = IfConditionToken {
at: (6, 6),
token_type: IfConditionTokenType::Operator(IfConditionOperator::IsNot),
};
assert_eq!(tokens, vec![Ok(is_not)]);
}
#[test]
fn test_lex_complex_condition() {
let template = "{% if foo.bar|default:'spam' and count >= 1.5 or enabled is not False %}";
let parts = TagParts { at: (6, 63) };
let lexer = IfConditionLexer::new(template.into(), parts);
let tokens: Vec<_> = lexer.collect();
let foobar = IfConditionToken {
at: (6, 22),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
let and = IfConditionToken {
at: (29, 3),
token_type: IfConditionTokenType::Operator(IfConditionOperator::And),
};
let count = IfConditionToken {
at: (33, 5),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
let greater_equal = IfConditionToken {
at: (39, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::GreaterThanEqual),
};
let numeric = IfConditionToken {
at: (42, 3),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Numeric),
};
let or = IfConditionToken {
at: (46, 2),
token_type: IfConditionTokenType::Operator(IfConditionOperator::Or),
};
let enabled = IfConditionToken {
at: (49, 7),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
let is_not = IfConditionToken {
at: (57, 6),
token_type: IfConditionTokenType::Operator(IfConditionOperator::IsNot),
};
let falsey = IfConditionToken {
at: (64, 5),
token_type: IfConditionTokenType::Atom(IfConditionAtom::Variable),
};
let condition = vec![
Ok(foobar),
Ok(and),
Ok(count),
Ok(greater_equal),
Ok(numeric),
Ok(or),
Ok(enabled),
Ok(is_not),
Ok(falsey),
];
assert_eq!(tokens, condition);
}
#[test]
fn test_lex_invalid_remainder() {
let template = "{% if 'foo'remainder %}";
let parts = TagParts { at: (6, 14) };
let mut lexer = IfConditionLexer::new(template.into(), parts);
let error = lexer.next().unwrap().unwrap_err();
assert_eq!(error, LexerError::InvalidRemainder { at: (11, 9).into() });
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-lexer/src/tag/forloop.rs | dtl-lexer/src/tag/forloop.rs | // Silence lint warnings for Miette Diagnostic
// https://github.com/zkat/miette/issues/458
// https://github.com/rust-lang/rust/issues/147648
#![expect(unused_assignments)]
use miette::{Diagnostic, SourceSpan};
use thiserror::Error;
use crate::common::{LexerError, NextChar, lex_numeric, lex_text, lex_translated, lex_variable};
use crate::tag::TagParts;
use crate::types::{At, TemplateString};
#[derive(Clone, Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum ForLexerError {
#[error(transparent)]
LexerError(#[from] LexerError),
#[error("Invalid variable name {name} in for loop:")]
InvalidName {
name: String,
#[label("invalid variable name")]
at: SourceSpan,
},
#[error("Expected an expression after the 'in' keyword:")]
MissingExpression {
#[label("after this keyword")]
at: SourceSpan,
},
#[error("Unexpected expression in for loop:")]
UnexpectedExpression {
#[label("unexpected expression")]
at: SourceSpan,
},
}
#[derive(Clone, Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum ForLexerInError {
#[error("Unexpected expression in for loop. Did you miss a comma when unpacking?")]
MissingComma {
#[label("unexpected expression")]
at: SourceSpan,
},
#[error("Expected the 'in' keyword or a variable name:")]
MissingIn {
#[label("after this name")]
at: SourceSpan,
},
}
#[derive(Debug, PartialEq, Eq)]
pub enum ForTokenType {
Numeric,
Text,
TranslatedText,
Variable,
}
#[derive(Debug, PartialEq, Eq)]
pub struct ForVariableNameToken {
pub at: At,
}
#[derive(Debug, PartialEq, Eq)]
pub struct ForVariableToken {
pub at: At,
pub token_type: ForTokenType,
}
enum State {
VariableName,
Done,
}
pub struct ForLexer<'t> {
rest: &'t str,
byte: usize,
state: State,
previous_at: Option<At>,
}
impl<'t> ForLexer<'t> {
pub fn new(template: TemplateString<'t>, parts: TagParts) -> Self {
Self {
rest: template.content(parts.at),
byte: parts.at.0,
state: State::VariableName,
previous_at: None,
}
}
pub fn lex_expression(&mut self) -> Result<ForVariableToken, ForLexerError> {
if self.rest.is_empty() {
return Err(ForLexerError::MissingExpression {
at: self.previous_at.expect("previous_at is set").into(),
});
}
let mut chars = self.rest.chars();
let token = match chars.next().expect("self.rest is not empty") {
'_' => {
if let Some('(') = chars.next() {
self.lex_translated(&mut chars)?
} else {
self.lex_variable()
}
}
'"' => self.lex_text(&mut chars, '"')?,
'\'' => self.lex_text(&mut chars, '\'')?,
'0'..='9' | '-' => self.lex_numeric(),
_ => self.lex_variable(),
};
self.lex_remainder()?;
Ok(token)
}
fn lex_variable(&mut self) -> ForVariableToken {
let (at, byte, rest) = lex_variable(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
ForVariableToken {
token_type: ForTokenType::Variable,
at,
}
}
fn lex_numeric(&mut self) -> ForVariableToken {
let (at, byte, rest) = lex_numeric(self.byte, self.rest);
self.rest = rest;
self.byte = byte;
ForVariableToken {
at,
token_type: ForTokenType::Numeric,
}
}
fn lex_text(
&mut self,
chars: &mut std::str::Chars,
end: char,
) -> Result<ForVariableToken, ForLexerError> {
let (at, byte, rest) = lex_text(self.byte, self.rest, chars, end)?;
self.rest = rest;
self.byte = byte;
Ok(ForVariableToken {
token_type: ForTokenType::Text,
at,
})
}
fn lex_translated(
&mut self,
chars: &mut std::str::Chars,
) -> Result<ForVariableToken, ForLexerError> {
let (at, byte, rest) = lex_translated(self.byte, self.rest, chars)?;
self.rest = rest;
self.byte = byte;
Ok(ForVariableToken {
token_type: ForTokenType::TranslatedText,
at,
})
}
fn lex_remainder(&mut self) -> Result<(), ForLexerError> {
let remainder = self.rest.next_whitespace();
match remainder {
0 => {
let rest = self.rest.trim_start();
self.byte += self.rest.len() - rest.len();
self.rest = rest;
Ok(())
}
n => Err(LexerError::InvalidRemainder {
at: (self.byte, n).into(),
}
.into()),
}
}
pub fn lex_in(&mut self) -> Result<(), ForLexerInError> {
if self.rest.is_empty() {
return Err(ForLexerInError::MissingIn {
at: self.previous_at.expect("previous_at is set").into(),
});
}
let index = self.rest.next_whitespace();
let at = (self.byte, index);
match &self.rest[..index] {
"in" => {
let next_index = self.rest[index..].next_non_whitespace();
self.byte += index + next_index;
self.rest = &self.rest[index + next_index..];
self.previous_at = Some(at);
Ok(())
}
_ => Err(ForLexerInError::MissingComma { at: at.into() }),
}
}
pub fn lex_reversed(&mut self) -> Result<bool, ForLexerError> {
if self.rest.is_empty() {
return Ok(false);
}
let index = self.rest.next_whitespace();
let at = match &self.rest[..index] {
"reversed" => {
let next_index = self.rest[index..].next_non_whitespace();
match self.rest[index + next_index..].len() {
0 => return Ok(true),
len => (self.byte + index + next_index, len),
}
}
_ => (self.byte, index),
};
Err(ForLexerError::UnexpectedExpression { at: at.into() })
}
pub fn lex_variable_name(&mut self) -> Option<Result<ForVariableNameToken, ForLexerError>> {
match self.state {
State::VariableName if !self.rest.is_empty() => {}
State::VariableName => {
self.state = State::Done;
return None;
}
State::Done => return None,
}
let index = self.rest.next_whitespace();
let (index, next_index) = match self.rest.find(',') {
Some(comma_index) if comma_index < index => {
let next_index = self.rest[comma_index + 1..].next_non_whitespace();
(comma_index, next_index + 1)
}
_ => {
self.state = State::Done;
let next_index = self.rest[index..].next_non_whitespace();
(index, next_index)
}
};
let at = (self.byte, index);
self.previous_at = Some(at);
let name = &self.rest[..index];
if name.contains(['"', '\'', '|']) {
self.rest = "";
self.state = State::Done;
return Some(Err(ForLexerError::InvalidName {
name: name.to_string(),
at: at.into(),
}));
}
self.byte += index + next_index;
self.rest = &self.rest[index + next_index..];
Some(Ok(ForVariableNameToken { at }))
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_lex_simple() {
let template = "{% for foo in bar %}";
let parts = TagParts { at: (7, 10) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 3),
token_type: ForTokenType::Variable,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_text() {
let template = "{% for foo in 'bar' %}";
let parts = TagParts { at: (7, 12) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 5),
token_type: ForTokenType::Text,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_text_double_quotes() {
let template = "{% for foo in \"bar\" %}";
let parts = TagParts { at: (7, 12) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 5),
token_type: ForTokenType::Text,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_translated_text() {
let template = "{% for foo in _('bar') %}";
let parts = TagParts { at: (7, 15) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 8),
token_type: ForTokenType::TranslatedText,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_underscore_expression() {
let template = "{% for foo in _bar %}";
let parts = TagParts { at: (7, 11) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 4),
token_type: ForTokenType::Variable,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_int() {
let template = "{% for foo in 123 %}";
let parts = TagParts { at: (7, 10) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 3),
token_type: ForTokenType::Numeric,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_variable_names() {
let template = "{% for foo, bar in spam %}";
let parts = TagParts { at: (7, 16) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableNameToken { at: (12, 3) };
let spam = ForVariableToken {
at: (19, 4),
token_type: ForTokenType::Variable,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), bar);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), spam);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_variable_names_no_whitespace_after_comma() {
let template = "{% for foo,bar in spam %}";
let parts = TagParts { at: (7, 15) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableNameToken { at: (11, 3) };
let spam = ForVariableToken {
at: (18, 4),
token_type: ForTokenType::Variable,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), bar);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), spam);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_comma_in_text() {
let template = "{% for foo in 'spam,' %}";
let parts = TagParts { at: (7, 14) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let spam = ForVariableToken {
at: (14, 7),
token_type: ForTokenType::Text,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), spam);
assert!(!lexer.lex_reversed().unwrap());
}
#[test]
fn test_lex_reversed() {
let template = "{% for foo in bar reversed %}";
let parts = TagParts { at: (7, 19) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 3),
token_type: ForTokenType::Variable,
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert!(lexer.lex_reversed().unwrap());
}
#[test]
fn test_unexpected_before_in() {
let template = "{% for foo bar in bar reversed %}";
let parts = TagParts { at: (7, 23) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let unexpected = ForLexerInError::MissingComma { at: (11, 3).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
assert_eq!(lexer.lex_in().unwrap_err(), unexpected);
}
#[test]
fn test_unexpected_after_iterable() {
let template = "{% for foo in bar invalid %}";
let parts = TagParts { at: (7, 18) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 3),
token_type: ForTokenType::Variable,
};
let unexpected = ForLexerError::UnexpectedExpression { at: (18, 7).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert_eq!(lexer.lex_reversed().unwrap_err(), unexpected);
}
#[test]
fn test_unexpected_after_reversed() {
let template = "{% for foo in bar reversed invalid %}";
let parts = TagParts { at: (7, 27) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let bar = ForVariableToken {
at: (14, 3),
token_type: ForTokenType::Variable,
};
let unexpected = ForLexerError::UnexpectedExpression { at: (27, 7).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap(), bar);
assert_eq!(lexer.lex_reversed().unwrap_err(), unexpected);
}
#[test]
fn test_incomplete_string() {
let template = "{% for foo in 'bar %}";
let parts = TagParts { at: (7, 11) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let incomplete = LexerError::IncompleteString { at: (14, 4).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap_err(), incomplete.into());
}
#[test]
fn test_incomplete_translated_string() {
let template = "{% for foo in _('bar' %}";
let parts = TagParts { at: (7, 14) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let incomplete = LexerError::IncompleteTranslatedString { at: (14, 7).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap_err(), incomplete.into());
}
#[test]
fn test_invalid_remainder() {
let template = "{% for foo in 'bar'baz %}";
let parts = TagParts { at: (7, 15) };
let mut lexer = ForLexer::new(template.into(), parts);
let foo = ForVariableNameToken { at: (7, 3) };
let incomplete = LexerError::InvalidRemainder { at: (19, 3).into() };
assert_eq!(lexer.lex_variable_name().unwrap().unwrap(), foo);
lexer.lex_in().unwrap();
assert_eq!(lexer.lex_expression().unwrap_err(), incomplete.into());
}
#[test]
fn test_invalid_name() {
let template = "{% for '2' in 'bar' %}";
let parts = TagParts { at: (7, 12) };
let mut lexer = ForLexer::new(template.into(), parts);
let invalid = ForLexerError::InvalidName {
name: "'2'".to_string(),
at: (7, 3).into(),
};
assert_eq!(lexer.lex_variable_name().unwrap().unwrap_err(), invalid);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/path.rs | src/path.rs | #![expect(unused_assignments)]
use std::borrow::Cow;
use std::path::{Path, PathBuf};
use miette::{Diagnostic, SourceSpan};
use sugar_path::SugarPath;
use thiserror::Error;
use dtl_lexer::types::At;
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum RelativePathError {
#[error(
"The relative path '{template_path}' points outside the file hierarchy that template '{origin}' is in."
)]
Outside {
#[label("relative path")]
at: SourceSpan,
origin: PathBuf,
template_path: String,
},
#[error("The relative path '{path}' cannot be evaluated due to an unknown template origin.")]
UnknownOrigin {
path: String,
#[label("here")]
at: SourceSpan,
},
}
pub fn construct_relative_path<'a>(
path: &'a str,
origin: Option<&'a str>,
at: At,
) -> Result<Option<Cow<'a, str>>, RelativePathError> {
let adjacent = path.starts_with("./");
if !adjacent && !path.starts_with("../") {
return Ok(None);
}
match origin {
Some(origin) => {
let origin = Path::new(origin);
let path = match origin.parent() {
None if adjacent => Path::new(path).normalize(),
None => {
return Err(RelativePathError::Outside {
at: at.into(),
origin: origin.to_path_buf(),
template_path: path.to_string(),
});
}
Some(directory) => {
let new_path = Path::join(directory, path).normalize();
if new_path.starts_with("../") {
return Err(RelativePathError::Outside {
at: at.into(),
origin: origin.to_path_buf(),
template_path: path.to_string(),
});
}
new_path
}
};
Ok(Some(Cow::Owned(
path.to_str()
.expect("Template names should be valid unicode.")
.to_string(),
)))
}
None => Err(RelativePathError::UnknownOrigin {
at: at.into(),
path: path.to_string(),
}),
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_construct_adjacent_path_origin_root() {
let path = "./foo.txt";
let origin = Some("/");
let at = (0, 8);
assert_eq!(
construct_relative_path(path, origin, at).unwrap().unwrap(),
"foo.txt"
);
}
#[test]
fn test_construct_parent_path_origin_root() {
let path = "../foo.txt";
let origin = "/";
let at = (0, 8);
assert_eq!(
construct_relative_path(path, Some(origin), at).unwrap_err(),
RelativePathError::Outside {
at: at.into(),
origin: origin.into(),
template_path: path.to_string(),
}
);
}
#[test]
fn test_construct_adjacent_path_origin_empty_string() {
let path = "./foo.txt";
let origin = Some("");
let at = (0, 8);
assert_eq!(
construct_relative_path(path, origin, at).unwrap().unwrap(),
"foo.txt"
);
}
#[test]
fn test_construct_parent_path_origin_empty_string() {
let path = "../foo.txt";
let origin = "";
let at = (0, 8);
assert_eq!(
construct_relative_path(path, Some(origin), at).unwrap_err(),
RelativePathError::Outside {
at: at.into(),
origin: origin.into(),
template_path: path.to_string(),
}
);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/lib.rs | src/lib.rs | /// Custom `todo!` macro that returns a pickable `PyNotImplementedError` instead of
/// a non-pickable `PanicException`. This allows Django's test suite to run in parallel.
///
/// This macro shadows the standard library's `todo!` macro throughout the crate.
#[macro_export]
macro_rules! todo {
() => {{
let err: ::pyo3::PyErr = ::pyo3::exceptions::PyNotImplementedError::new_err("not yet implemented");
return Err(err.into())
}};
($($arg:tt)+) => {{
let err: ::pyo3::PyErr = ::pyo3::exceptions::PyNotImplementedError::new_err(
format!("not yet implemented: {}", format_args!($($arg)+))
);
return Err(err.into())
}};
}
mod error;
mod filters;
mod loaders;
mod parse;
mod path;
pub mod render;
mod template;
mod types;
mod utils;
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/filters.rs | src/filters.rs | use std::sync::Arc;
use pyo3::prelude::*;
use crate::types::Argument;
use dtl_lexer::types::At;
#[derive(Clone, Debug, PartialEq)]
pub enum FilterType {
Add(AddFilter),
AddSlashes(AddSlashesFilter),
Capfirst(CapfirstFilter),
Center(CenterFilter),
Cut(CutFilter),
Default(DefaultFilter),
DefaultIfNone(DefaultIfNoneFilter),
Date(DateFilter),
Escape(EscapeFilter),
Escapejs(EscapejsFilter),
External(ExternalFilter),
Lower(LowerFilter),
Length(LengthFilter),
Safe(SafeFilter),
Slugify(SlugifyFilter),
Title(TitleFilter),
Upper(UpperFilter),
Wordcount(WordcountFilter),
Wordwrap(WordwrapFilter),
Yesno(YesnoFilter),
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct AddSlashesFilter;
#[derive(Clone, Debug, PartialEq)]
pub struct AddFilter {
pub argument: Argument,
}
impl AddFilter {
pub fn new(argument: Argument) -> Self {
Self { argument }
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CapfirstFilter;
#[derive(Clone, Debug, PartialEq)]
pub struct CenterFilter {
pub argument: Argument,
}
impl CenterFilter {
pub fn new(argument: Argument) -> Self {
Self { argument }
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct CutFilter {
pub argument: Argument,
}
impl CutFilter {
pub fn new(argument: Argument) -> Self {
Self { argument }
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct DefaultFilter {
pub argument: Argument,
pub at: At,
}
impl DefaultFilter {
pub fn new(argument: Argument, at: At) -> Self {
Self { argument, at }
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct DefaultIfNoneFilter {
pub argument: Argument,
}
impl DefaultIfNoneFilter {
pub fn new(argument: Argument) -> Self {
Self { argument }
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct DateFilter {
pub argument: Option<Argument>,
pub at: At,
}
impl DateFilter {
pub fn new(argument: Option<Argument>, at: At) -> Self {
Self { argument, at }
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct EscapeFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct EscapejsFilter;
#[derive(Clone, Debug)]
pub struct ExternalFilter {
pub filter: Arc<Py<PyAny>>,
pub argument: Option<Argument>,
}
impl ExternalFilter {
pub fn new(filter: Py<PyAny>, argument: Option<Argument>) -> Self {
Self {
filter: Arc::new(filter),
argument,
}
}
}
impl PartialEq for ExternalFilter {
fn eq(&self, other: &Self) -> bool {
// We use `Arc::ptr_eq` here to avoid needing the `py` token for true
// equality comparison between two `Py` smart pointers.
//
// We only use `eq` in tests, so this concession is acceptable here.
self.argument.eq(&other.argument) && Arc::ptr_eq(&self.filter, &other.filter)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LowerFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LengthFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SafeFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct SlugifyFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct TitleFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct UpperFilter;
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct WordcountFilter;
#[derive(Clone, Debug, PartialEq)]
pub struct WordwrapFilter {
pub argument: Argument,
}
impl WordwrapFilter {
pub fn new(argument: Argument) -> Self {
Self { argument }
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct YesnoFilter {
pub at: At,
pub argument: Option<Argument>,
}
impl YesnoFilter {
pub fn new(at: At, argument: Option<Argument>) -> Self {
Self { at, argument }
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/parse.rs | src/parse.rs | // Silence lint warnings for Miette Diagnostic
// https://github.com/zkat/miette/issues/458
// https://github.com/rust-lang/rust/issues/147648
#![expect(unused_assignments)]
use std::borrow::Cow;
use std::collections::{HashMap, HashSet};
use std::iter::Peekable;
use std::sync::Arc;
use either::Either;
use miette::{Diagnostic, SourceSpan};
use num_bigint::BigInt;
use pyo3::intern;
use pyo3::prelude::*;
use thiserror::Error;
use crate::filters::AddFilter;
use crate::filters::AddSlashesFilter;
use crate::filters::CapfirstFilter;
use crate::filters::CenterFilter;
use crate::filters::CutFilter;
use crate::filters::DateFilter;
use crate::filters::DefaultFilter;
use crate::filters::DefaultIfNoneFilter;
use crate::filters::EscapeFilter;
use crate::filters::EscapejsFilter;
use crate::filters::ExternalFilter;
use crate::filters::FilterType;
use crate::filters::LengthFilter;
use crate::filters::LowerFilter;
use crate::filters::SafeFilter;
use crate::filters::SlugifyFilter;
use crate::filters::TitleFilter;
use crate::filters::UpperFilter;
use crate::filters::WordcountFilter;
use crate::filters::WordwrapFilter;
use crate::filters::YesnoFilter;
use dtl_lexer::common::{LexerError, text_content_at, translated_text_content_at};
use dtl_lexer::core::{Lexer, TokenType};
use dtl_lexer::tag::autoescape::{AutoescapeEnabled, AutoescapeError, lex_autoescape_argument};
use dtl_lexer::tag::custom_tag::{
SimpleTagLexer, SimpleTagLexerError, SimpleTagToken, SimpleTagTokenType,
};
use dtl_lexer::tag::forloop::{ForLexer, ForLexerError, ForLexerInError, ForTokenType};
use dtl_lexer::tag::ifcondition::{
IfConditionAtom, IfConditionLexer, IfConditionOperator, IfConditionTokenType,
};
use dtl_lexer::tag::include::{
IncludeLexer, IncludeLexerError, IncludeTemplateToken, IncludeTemplateTokenType, IncludeToken,
IncludeWithToken,
};
use dtl_lexer::tag::load::{LoadLexer, LoadToken};
use dtl_lexer::tag::lorem::{LoremError, LoremLexer, LoremMethod, LoremTokenType};
use dtl_lexer::tag::{TagLexerError, TagParts, lex_tag};
use dtl_lexer::types::{At, TemplateString};
use dtl_lexer::variable::{
Argument as ArgumentToken, VariableLexerError, VariableToken, lex_variable_or_filter,
};
use dtl_lexer::{START_TAG_LEN, TemplateContent};
use crate::path::{RelativePathError, construct_relative_path};
use crate::template::django_rusty_templates::Engine;
use crate::types::Argument;
use crate::types::ArgumentType;
use crate::types::ForVariable;
use crate::types::ForVariableName;
use crate::types::Text;
use crate::types::TranslatedText;
use dtl_lexer::types::Variable;
trait Parse<R> {
fn parse(&self, parser: &Parser) -> Result<R, ParseError>;
}
#[derive(Debug, Clone, PartialEq)]
pub struct Lorem {
pub count: TagElement,
pub method: LoremMethod,
pub common: bool,
}
impl Parse<Argument> for ArgumentToken {
fn parse(&self, parser: &Parser) -> Result<Argument, ParseError> {
Ok(match *self {
Self::Variable(at) => Argument {
at,
argument_type: parser.parse_for_variable(at).into(),
},
Self::Text(at) => Argument {
at,
argument_type: ArgumentType::Text(Text::new(self.content_at())),
},
Self::TranslatedText(at) => Argument {
at,
argument_type: ArgumentType::TranslatedText(TranslatedText::new(self.content_at())),
},
Self::Numeric(at) => {
let content = parser.template.content(at);
let argument_type = match content.parse::<BigInt>() {
Ok(n) => ArgumentType::Int(n),
Err(_) => match content.parse::<f64>() {
Ok(f) => ArgumentType::Float(f),
Err(_) => return Err(ParseError::InvalidNumber { at: at.into() }),
},
};
Argument { at, argument_type }
}
})
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum TagElement {
Int(BigInt),
Float(f64),
Text(Text),
TranslatedText(Text),
Variable(Variable),
ForVariable(ForVariable),
Filter(Box<Filter>),
}
fn unexpected_argument(filter: &'static str, right: Argument) -> ParseError {
ParseError::UnexpectedArgument {
filter,
at: right.at.into(),
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct Filter {
pub at: At,
pub all_at: At,
pub left: TagElement,
pub filter: FilterType,
}
impl Filter {
pub fn new(
parser: &Parser,
at: At,
all_at: At,
left: TagElement,
right: Option<Argument>,
) -> Result<Self, ParseError> {
let filter = match parser.template.content(at) {
"add" => match right {
Some(right) => FilterType::Add(AddFilter::new(right)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"addslashes" => match right {
Some(right) => return Err(unexpected_argument("addslashes", right)),
None => FilterType::AddSlashes(AddSlashesFilter),
},
"capfirst" => match right {
Some(right) => return Err(unexpected_argument("capfirst", right)),
None => FilterType::Capfirst(CapfirstFilter),
},
"center" => match right {
Some(right) => FilterType::Center(CenterFilter::new(right)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"cut" => match right {
Some(right) => FilterType::Cut(CutFilter::new(right)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"default" => match right {
Some(right) => FilterType::Default(DefaultFilter::new(right, at)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"default_if_none" => match right {
Some(right) => FilterType::DefaultIfNone(DefaultIfNoneFilter::new(right)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"date" => FilterType::Date(DateFilter::new(right, at)),
"escape" => match right {
Some(right) => return Err(unexpected_argument("escape", right)),
None => FilterType::Escape(EscapeFilter),
},
"escapejs" => match right {
Some(right) => return Err(unexpected_argument("escapejs", right)),
None => FilterType::Escapejs(EscapejsFilter),
},
"lower" => match right {
Some(right) => return Err(unexpected_argument("lower", right)),
None => FilterType::Lower(LowerFilter),
},
"length" => match right {
Some(right) => return Err(unexpected_argument("length", right)),
None => FilterType::Length(LengthFilter),
},
"safe" => match right {
Some(right) => return Err(unexpected_argument("safe", right)),
None => FilterType::Safe(SafeFilter),
},
"slugify" => match right {
Some(right) => return Err(unexpected_argument("slugify", right)),
None => FilterType::Slugify(SlugifyFilter),
},
"title" => match right {
Some(right) => return Err(unexpected_argument("title", right)),
None => FilterType::Title(TitleFilter),
},
"upper" => match right {
Some(right) => return Err(unexpected_argument("upper", right)),
None => FilterType::Upper(UpperFilter),
},
"wordcount" => match right {
Some(right) => return Err(unexpected_argument("wordcount", right)),
None => FilterType::Wordcount(WordcountFilter),
},
"wordwrap" => match right {
Some(right) => FilterType::Wordwrap(WordwrapFilter::new(right)),
None => return Err(ParseError::MissingArgument { at: at.into() }),
},
"yesno" => FilterType::Yesno(YesnoFilter::new(at, right)),
external => {
let external = match parser.external_filters.get(external) {
Some(external) => external.clone().unbind(),
None => {
return Err(ParseError::InvalidFilter {
at: at.into(),
filter: external.to_string(),
});
}
};
FilterType::External(ExternalFilter::new(external, right))
}
};
Ok(Self {
at,
all_at,
left,
filter,
})
}
}
fn parse_numeric(content: &str, at: At) -> Result<TagElement, ParseError> {
match content.parse::<BigInt>() {
Ok(n) => Ok(TagElement::Int(n)),
Err(_) => match content.parse::<f64>() {
Ok(f) => Ok(TagElement::Float(f)),
Err(_) => Err(ParseError::InvalidNumber { at: at.into() }),
},
}
}
impl Parse<TagElement> for SimpleTagToken {
fn parse(&self, parser: &Parser) -> Result<TagElement, ParseError> {
let content_at = self.content_at();
let (start, _len) = content_at;
let content = parser.template.content(content_at);
match self.token_type {
SimpleTagTokenType::Numeric => parse_numeric(content, self.at),
SimpleTagTokenType::Text => Ok(TagElement::Text(Text::new(content_at))),
SimpleTagTokenType::TranslatedText => {
Ok(TagElement::TranslatedText(Text::new(content_at)))
}
SimpleTagTokenType::Variable => parser.parse_variable(content, content_at, start),
}
}
}
fn parse_include_template_token(
token: IncludeTemplateToken,
parser: &Parser,
) -> Result<IncludeTemplateName, ParseError> {
let content_at = token.content_at();
let (start, _len) = content_at;
let content = parser.template.content(content_at);
Ok(match token.token_type {
IncludeTemplateTokenType::Text => IncludeTemplateName::Text(Text::new(content_at)),
IncludeTemplateTokenType::Variable => {
IncludeTemplateName::Variable(parser.parse_variable(content, content_at, start)?)
}
})
}
#[derive(Clone, Debug, PartialEq)]
pub struct Url {
pub view_name: TagElement,
pub args: Vec<TagElement>,
pub kwargs: Vec<(String, TagElement)>,
pub variable: Option<String>,
}
#[derive(Clone, Debug, PartialEq)]
pub enum IfCondition {
Variable(TagElement),
And(Box<(Self, Self)>),
Or(Box<(Self, Self)>),
Not(Box<Self>),
Equal(Box<(Self, Self)>),
NotEqual(Box<(Self, Self)>),
LessThan(Box<(Self, Self)>),
GreaterThan(Box<(Self, Self)>),
LessThanEqual(Box<(Self, Self)>),
GreaterThanEqual(Box<(Self, Self)>),
In(Box<(Self, Self)>),
NotIn(Box<(Self, Self)>),
Is(Box<(Self, Self)>),
IsNot(Box<(Self, Self)>),
}
fn parse_if_condition(
parser: &mut Parser,
parts: TagParts,
at: At,
) -> Result<IfCondition, ParseError> {
let mut lexer = IfConditionLexer::new(parser.template, parts).peekable();
if lexer.peek().is_none() {
return Err(ParseError::MissingBooleanExpression { at: at.into() });
}
parse_if_binding_power(parser, &mut lexer, 0, at)
}
fn parse_if_binding_power(
parser: &mut Parser,
lexer: &mut Peekable<IfConditionLexer>,
min_binding_power: u8,
at: At,
) -> Result<IfCondition, ParseError> {
let Some(token) = lexer.next().transpose()? else {
return Err(ParseError::UnexpectedEndExpression { at: at.into() });
};
let content = parser.template.content(token.at);
let token_at = token.content_at();
let mut lhs = match token.token_type {
IfConditionTokenType::Atom(IfConditionAtom::Numeric) => {
IfCondition::Variable(parse_numeric(content, token_at)?)
}
IfConditionTokenType::Atom(IfConditionAtom::Text) => {
IfCondition::Variable(TagElement::Text(Text::new(token_at)))
}
IfConditionTokenType::Atom(IfConditionAtom::TranslatedText) => {
IfCondition::Variable(TagElement::TranslatedText(Text::new(token_at)))
}
IfConditionTokenType::Atom(IfConditionAtom::Variable) => {
IfCondition::Variable(parser.parse_variable(content, token_at, token.at.0)?)
}
IfConditionTokenType::Not => {
let if_condition = parse_if_binding_power(parser, lexer, NOT_BINDING_POWER, token_at)?;
IfCondition::Not(Box::new(if_condition))
}
_ => {
return Err(ParseError::InvalidIfPosition {
at: token.at.into(),
token: content.to_string(),
});
}
};
loop {
let token = match lexer.peek() {
None => break,
Some(Err(e)) => return Err(e.clone().into()),
Some(Ok(token)) => token,
};
let operator = match &token.token_type {
IfConditionTokenType::Atom(_) | IfConditionTokenType::Not => {
return Err(ParseError::UnusedExpression {
at: token.at.into(),
expression: parser.template.content(token.at).to_string(),
});
}
IfConditionTokenType::Operator(operator) => *operator,
};
let binding_power = operator.binding_power();
if binding_power <= min_binding_power {
break;
}
// We can get the next token properly now, since we have the right binding
// power and don't need to `break`.
let token = lexer
.next()
.expect("already `break`ed in match peek()")
.expect("already `return Err` in match peek()");
let rhs = parse_if_binding_power(parser, lexer, binding_power, token.at)?;
lhs = operator.build_condition(lhs, rhs);
}
Ok(lhs)
}
const NOT_BINDING_POWER: u8 = 8;
trait IfConditionOperatorMethods {
fn binding_power(&self) -> u8;
fn build_condition(&self, lhs: IfCondition, rhs: IfCondition) -> IfCondition;
}
impl IfConditionOperatorMethods for IfConditionOperator {
fn binding_power(&self) -> u8 {
match self {
Self::Or => 6,
Self::And => 7,
Self::In | Self::NotIn => 9,
Self::Is
| Self::IsNot
| Self::Equal
| Self::NotEqual
| Self::GreaterThan
| Self::GreaterThanEqual
| Self::LessThan
| Self::LessThanEqual => 10,
}
}
fn build_condition(&self, lhs: IfCondition, rhs: IfCondition) -> IfCondition {
let inner = Box::new((lhs, rhs));
match self {
Self::And => IfCondition::And(inner),
Self::Or => IfCondition::Or(inner),
Self::In => IfCondition::In(inner),
Self::NotIn => IfCondition::NotIn(inner),
Self::Is => IfCondition::Is(inner),
Self::IsNot => IfCondition::IsNot(inner),
Self::Equal => IfCondition::Equal(inner),
Self::NotEqual => IfCondition::NotEqual(inner),
Self::GreaterThan => IfCondition::GreaterThan(inner),
Self::GreaterThanEqual => IfCondition::GreaterThanEqual(inner),
Self::LessThan => IfCondition::LessThan(inner),
Self::LessThanEqual => IfCondition::LessThanEqual(inner),
}
}
}
#[derive(Clone, Debug, PartialEq)]
pub struct ForIterable {
pub iterable: TagElement,
pub at: At,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ForNames {
pub names: Vec<String>,
pub at: At,
}
fn parse_for_loop(
parser: &mut Parser,
parts: TagParts,
at: At,
) -> Result<(ForIterable, ForNames, bool), ParseError> {
let mut lexer = ForLexer::new(parser.template, parts);
let mut variable_names = Vec::new();
while let Some(token) = lexer.lex_variable_name() {
variable_names.push(token?);
}
if variable_names.is_empty() {
return Err(ForParseError::MissingVariableNames { at: at.into() }.into());
}
let variables_start = variable_names[0].at.0;
let last = variable_names
.last()
.expect("Variables has at least one element");
let variables_at = (variables_start, last.at.0 - variables_start + last.at.1);
if let Err(error) = lexer.lex_in() {
if parser.template.content(last.at) != "in" {
return Err(error.into());
}
let len = variable_names.len();
match error {
ForLexerInError::MissingComma { .. } if len >= 2 => {
let previous = &variable_names[len - 2];
let at = previous.at.into();
return Err(ForParseError::MissingVariable { at }.into());
}
_ => {
let at = last.at.into();
return Err(ForParseError::MissingVariableBeforeIn { at }.into());
}
}
}
let expression_token = lexer.lex_expression()?;
let reversed = lexer.lex_reversed()?;
let variable_names = variable_names
.iter()
.map(|token| parser.template.content(token.at).to_string())
.collect();
let expression_content = parser.template.content(expression_token.at);
let expression = match expression_token.token_type {
ForTokenType::Numeric => {
return Err(ParseError::NotIterable {
literal: expression_content.to_string(),
at: expression_token.at.into(),
});
}
ForTokenType::Text => TagElement::Text(Text::new(text_content_at(expression_token.at))),
ForTokenType::TranslatedText => {
TagElement::TranslatedText(Text::new(translated_text_content_at(expression_token.at)))
}
ForTokenType::Variable => parser.parse_variable(
expression_content,
expression_token.at,
expression_token.at.0,
)?,
};
Ok((
ForIterable {
iterable: expression,
at: expression_token.at,
},
ForNames {
names: variable_names,
at: variables_at,
},
reversed,
))
}
#[derive(Clone, Debug, PartialEq)]
pub struct For {
pub iterable: ForIterable,
pub variables: ForNames,
pub reversed: bool,
pub body: Vec<TokenTree>,
pub empty: Option<Vec<TokenTree>>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct RelativePath {
pub at: At,
pub path: String,
}
#[derive(Clone, Debug, PartialEq)]
pub enum IncludeTemplateName {
Text(Text),
Variable(TagElement),
Relative(RelativePath),
}
#[derive(Clone, Debug)]
pub struct Include {
pub template_name: IncludeTemplateName,
pub origin: Option<String>,
pub engine: Arc<Engine>,
pub only: bool,
pub kwargs: Vec<(At, TagElement)>,
}
impl PartialEq for Include {
fn eq(&self, other: &Self) -> bool {
// We use `Arc::ptr_eq` here to avoid needing the `py` token for true
// equality comparison between two `Py` smart pointers.
//
// We only use `eq` in tests, so this concession is acceptable here.
self.only == other.only
&& self.origin == other.origin
&& self.template_name.eq(&other.template_name)
&& self.kwargs == other.kwargs
&& Arc::ptr_eq(&self.engine, &other.engine)
}
}
#[derive(Clone, Debug)]
pub struct SimpleTag {
pub func: Arc<Py<PyAny>>,
pub at: At,
pub takes_context: bool,
pub args: Vec<TagElement>,
pub kwargs: Vec<(String, TagElement)>,
pub target_var: Option<String>,
}
impl PartialEq for SimpleTag {
fn eq(&self, other: &Self) -> bool {
// We use `Arc::ptr_eq` here to avoid needing the `py` token for true
// equality comparison between two `Py` smart pointers.
//
// We only use `eq` in tests, so this concession is acceptable here.
self.at == other.at
&& self.takes_context == other.takes_context
&& self.args == other.args
&& self.kwargs == other.kwargs
&& self.target_var == other.target_var
&& Arc::ptr_eq(&self.func, &other.func)
}
}
#[derive(Clone, Debug)]
pub struct SimpleBlockTag {
pub func: Arc<Py<PyAny>>,
pub nodes: Vec<TokenTree>,
pub at: At,
pub takes_context: bool,
pub args: Vec<TagElement>,
pub kwargs: Vec<(String, TagElement)>,
pub target_var: Option<String>,
}
impl PartialEq for SimpleBlockTag {
fn eq(&self, other: &Self) -> bool {
// We use `Arc::ptr_eq` here to avoid needing the `py` token for true
// equality comparison between two `Py` smart pointers.
//
// We only use `eq` in tests, so this concession is acceptable here.
self.at == other.at
&& self.takes_context == other.takes_context
&& self.args == other.args
&& self.kwargs == other.kwargs
&& self.target_var == other.target_var
&& self.nodes == other.nodes
&& Arc::ptr_eq(&self.func, &other.func)
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum Tag {
Autoescape {
enabled: AutoescapeEnabled,
nodes: Vec<TokenTree>,
},
If {
condition: IfCondition,
truthy: Vec<TokenTree>,
falsey: Option<Vec<TokenTree>>,
},
For(For),
Include(Include),
Load,
SimpleTag(SimpleTag),
SimpleBlockTag(SimpleBlockTag),
Url(Url),
Lorem(Lorem),
}
#[derive(PartialEq, Eq)]
enum EndTagType {
Autoescape,
Elif,
Else,
EndIf,
Empty,
EndFor,
Verbatim,
Custom(String),
}
impl EndTagType {
fn as_cow(&self) -> Cow<'static, str> {
let end_tag = match self {
Self::Autoescape => "endautoescape",
Self::Elif => "elif",
Self::Else => "else",
Self::EndIf => "endif",
Self::Empty => "empty",
Self::EndFor => "endfor",
Self::Verbatim => "endverbatim",
Self::Custom(s) => return Cow::Owned(s.clone()),
};
Cow::Borrowed(end_tag)
}
}
#[derive(PartialEq, Eq)]
struct EndTag {
at: At,
end: EndTagType,
parts: TagParts,
}
impl EndTag {
fn as_cow(&self) -> Cow<'static, str> {
self.end.as_cow()
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum TokenTree {
Text(Text),
TranslatedText(Text),
Int(BigInt),
Float(f64),
Tag(Tag),
Variable(Variable),
ForVariable(ForVariable),
Filter(Box<Filter>),
}
impl From<TagElement> for TokenTree {
fn from(tag_element: TagElement) -> Self {
match tag_element {
TagElement::Text(text) => Self::Text(text),
TagElement::TranslatedText(text) => Self::TranslatedText(text),
TagElement::Variable(variable) => Self::Variable(variable),
TagElement::ForVariable(variable) => Self::ForVariable(variable),
TagElement::Filter(filter) => Self::Filter(filter),
TagElement::Int(n) => Self::Int(n),
TagElement::Float(f) => Self::Float(f),
}
}
}
impl From<Either<Variable, ForVariable>> for TagElement {
fn from(variable: Either<Variable, ForVariable>) -> Self {
match variable {
Either::Left(v) => Self::Variable(v),
Either::Right(v) => Self::ForVariable(v),
}
}
}
impl From<Either<Variable, ForVariable>> for ArgumentType {
fn from(variable: Either<Variable, ForVariable>) -> Self {
match variable {
Either::Left(v) => Self::Variable(v),
Either::Right(v) => Self::ForVariable(v),
}
}
}
#[allow(clippy::enum_variant_names)]
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum ForParseError {
#[error("Expected another variable when unpacking in for loop:")]
MissingVariable {
#[label("after this variable")]
at: SourceSpan,
},
#[error("Expected a variable name before the 'in' keyword:")]
MissingVariableBeforeIn {
#[label("before this keyword")]
at: SourceSpan,
},
#[error("Expected at least one variable name in for loop:")]
MissingVariableNames {
#[label("in this tag")]
at: SourceSpan,
},
}
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum ParseError {
#[error("Empty variable tag")]
EmptyVariable {
#[label("here")]
at: SourceSpan,
},
#[error("Expected an argument")]
MissingArgument {
#[label("here")]
at: SourceSpan,
},
#[error(transparent)]
#[diagnostic(transparent)]
AutoescapeError(#[from] AutoescapeError),
#[error(transparent)]
#[diagnostic(transparent)]
BlockError(#[from] TagLexerError),
#[error(transparent)]
#[diagnostic(transparent)]
LexerError(#[from] LexerError),
#[error(transparent)]
#[diagnostic(transparent)]
ForLexerError(#[from] ForLexerError),
#[error(transparent)]
#[diagnostic(transparent)]
ForLexerInError(#[from] ForLexerInError),
#[allow(clippy::enum_variant_names)]
#[error(transparent)]
#[diagnostic(transparent)]
ForParseError(#[from] ForParseError),
#[error(transparent)]
#[diagnostic(transparent)]
IncludeLexerError(#[from] IncludeLexerError),
#[error("{literal} is not iterable")]
NotIterable {
literal: String,
#[label("here")]
at: SourceSpan,
},
#[error(transparent)]
#[diagnostic(transparent)]
RelativePathError(#[from] RelativePathError),
#[error(transparent)]
#[diagnostic(transparent)]
SimpleTagLexerError(#[from] SimpleTagLexerError),
#[error(transparent)]
#[diagnostic(transparent)]
VariableError(#[from] VariableLexerError),
#[error("The 'only' option was specified more than once.")]
#[diagnostic(help("Remove the second 'only'"))]
IncludeOnlyTwice {
#[label("first here")]
first_at: SourceSpan,
#[label("second here")]
second_at: SourceSpan,
},
#[error("Invalid filter: '{filter}'")]
InvalidFilter {
filter: String,
#[label("here")]
at: SourceSpan,
},
#[error("Not expecting '{token}' in this position")]
InvalidIfPosition {
token: String,
#[label("here")]
at: SourceSpan,
},
#[error("Invalid numeric literal")]
InvalidNumber {
#[label("here")]
at: SourceSpan,
},
#[error("Missing boolean expression")]
MissingBooleanExpression {
#[label("here")]
at: SourceSpan,
},
#[error("Unclosed '{start}' tag. Looking for one of: {expected}")]
MissingEndTag {
start: Cow<'static, str>,
expected: String,
#[label("started here")]
at: SourceSpan,
},
#[error("'{tag}' is not a valid tag or filter in tag library '{library}'")]
MissingFilterTag {
tag: String,
library: String,
#[label("tag or filter")]
tag_at: SourceSpan,
#[label("library")]
library_at: SourceSpan,
},
#[error("Expected a keyword argument")]
MissingKeywordArgument {
#[label("after this")]
at: SourceSpan,
},
#[error("'{library}' is not a registered tag library.")]
MissingTagLibrary {
library: String,
#[label("here")]
at: SourceSpan,
#[help]
help: String,
},
#[error("Cannot mix arguments and keyword arguments")]
MixedArgsKwargs {
#[label("here")]
at: SourceSpan,
},
#[error("'url' view name must be a string or variable, not a number")]
NumericUrlName {
#[label("here")]
at: SourceSpan,
},
#[error("'{name}' must have a first argument of 'content'")]
RequiresContent {
name: String,
#[label("loaded here")]
at: SourceSpan,
},
#[error(
"'{name}' is decorated with takes_context=True so it must have a first argument of 'context'"
)]
RequiresContext {
name: String,
#[label("loaded here")]
at: SourceSpan,
},
#[error(
"'{name}' is decorated with takes_context=True so it must have a first argument of 'context' and a second argument of 'content'"
)]
RequiresContextAndContent {
name: String,
#[label("loaded here")]
at: SourceSpan,
},
#[error("'{tag_name}' did not receive value(s) for the argument(s): {missing}")]
MissingArguments {
tag_name: String,
missing: String,
#[label("here")]
at: SourceSpan,
},
#[error("'{tag_name}' received multiple values for keyword argument '{kwarg_name}'")]
DuplicateKeywordArgument {
tag_name: String,
kwarg_name: String,
#[label("first")]
first_at: SourceSpan,
#[label("second")]
second_at: SourceSpan,
},
#[error("Unexpected positional argument after keyword argument")]
PositionalAfterKeyword {
#[label("this positional argument")]
at: SourceSpan,
#[label("after this keyword argument")]
after: SourceSpan,
},
#[error("Unexpected positional argument")]
TooManyPositionalArguments {
#[label("here")]
at: SourceSpan,
},
#[error("Unexpected keyword argument")]
UnexpectedKeywordArgument {
#[label("here")]
at: SourceSpan,
},
#[error("{filter} filter does not take an argument")]
UnexpectedArgument {
filter: &'static str,
#[label("unexpected argument")]
at: SourceSpan,
},
#[error("Unexpected end of expression")]
UnexpectedEndExpression {
#[label("after this")]
at: SourceSpan,
},
#[error("Unexpected tag {unexpected}")]
UnexpectedEndTag {
unexpected: Cow<'static, str>,
#[label("unexpected tag")]
at: SourceSpan,
},
#[error("Unused expression '{expression}' in if tag")]
UnusedExpression {
expression: String,
#[label("here")]
at: SourceSpan,
},
#[error("'url' takes at least one argument, a URL pattern name")]
UrlTagNoArguments {
#[label("here")]
at: SourceSpan,
},
#[error("Unexpected tag {unexpected}, expected {expected}")]
WrongEndTag {
unexpected: Cow<'static, str>,
expected: String,
#[label("unexpected tag")]
at: SourceSpan,
#[label("start tag")]
start_at: SourceSpan,
},
#[error("Incorrect format for '{tag}' tag")]
InvalidTagFormat {
tag: &'static str,
#[label("here")]
at: SourceSpan,
},
#[error(transparent)]
#[diagnostic(transparent)]
LoremError(#[from] LoremError),
}
#[derive(Error, Debug)]
pub enum PyParseError {
#[error(transparent)]
PyErr(#[from] PyErr),
#[error(transparent)]
ParseError(#[from] ParseError),
}
impl PyParseError {
pub fn try_into_parse_error(self) -> PyResult<ParseError> {
match self {
Self::ParseError(err) => Ok(err),
Self::PyErr(err) => Err(err),
}
}
#[cfg(test)]
pub fn unwrap_parse_error(self) -> ParseError {
match self {
Self::ParseError(err) => err,
Self::PyErr(err) => panic!("{err:?}"),
}
}
}
trait LoadLibrary {
fn load_library<'l, 'py>(
&self,
py: Python<'py>,
libraries: &'l HashMap<String, Py<PyAny>>,
template: TemplateString<'_>,
) -> Result<&'l Bound<'py, PyAny>, ParseError>;
}
impl LoadLibrary for LoadToken {
fn load_library<'l, 'py>(
&self,
py: Python<'py>,
libraries: &'l HashMap<String, Py<PyAny>>,
template: TemplateString<'_>,
) -> Result<&'l Bound<'py, PyAny>, ParseError> {
let library_name = template.content(self.at);
match libraries.get(library_name) {
Some(library) => Ok(library.bind(py)),
None => {
let mut libraries: Vec<_> = libraries.keys().map(String::as_str).collect();
libraries.sort_unstable();
let help = format!("Must be one of:\n{}", libraries.join("\n"));
Err(ParseError::MissingTagLibrary {
at: self.at.into(),
library: library_name.to_string(),
help,
})
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | true |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/template.rs | src/template.rs | use pyo3::prelude::*;
#[pymodule]
pub mod django_rusty_templates {
use std::borrow::Cow;
use std::collections::HashMap;
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use encoding_rs::Encoding;
use pyo3::exceptions::{
PyAttributeError, PyImportError, PyOverflowError, PyTypeError, PyValueError,
};
use pyo3::import_exception;
use pyo3::intern;
use pyo3::prelude::*;
use pyo3::sync::{MutexExt, PyOnceLock};
use pyo3::types::{PyBool, PyDict, PyIterator, PyList, PyString, PyTuple};
use crate::error::RenderError;
use crate::loaders::{AppDirsLoader, CachedLoader, FileSystemLoader, Loader, LocMemLoader};
use crate::parse::{Parser, TokenTree};
use crate::render::types::{Context, PyContext};
use crate::render::{Render, RenderResult};
use crate::utils::PyResultMethods;
use dtl_lexer::types::TemplateString;
import_exception!(django.core.exceptions, ImproperlyConfigured);
import_exception!(django.template.base, VariableDoesNotExist);
import_exception!(django.template.exceptions, TemplateDoesNotExist);
import_exception!(django.template.exceptions, TemplateSyntaxError);
import_exception!(django.template.library, InvalidTemplateLibrary);
import_exception!(django.urls, NoReverseMatch);
static IMPORT_STRING: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
trait WithSourceCode {
fn with_source_code(
err: miette::Report,
source: impl miette::SourceCode + 'static,
) -> PyErr;
}
impl WithSourceCode for TemplateSyntaxError {
fn with_source_code(
err: miette::Report,
source: impl miette::SourceCode + 'static,
) -> PyErr {
let miette_err = err.with_source_code(source);
Self::new_err(format!("{miette_err:?}"))
}
}
impl WithSourceCode for VariableDoesNotExist {
fn with_source_code(
err: miette::Report,
source: impl miette::SourceCode + 'static,
) -> PyErr {
let miette_err = err.with_source_code(source);
let report = format!("{miette_err:?}");
// Work around old-style Python formatting in VariableDoesNotExist.__str__
let report = report.replace('%', "%%");
Self::new_err(report)
}
}
impl WithSourceCode for PyOverflowError {
fn with_source_code(
err: miette::Report,
source: impl miette::SourceCode + 'static,
) -> PyErr {
let miette_err = err.with_source_code(source);
Self::new_err(format!("{miette_err:?}"))
}
}
impl WithSourceCode for PyValueError {
fn with_source_code(
err: miette::Report,
source: impl miette::SourceCode + 'static,
) -> PyErr {
let miette_err = err.with_source_code(source);
Self::new_err(format!("{miette_err:?}"))
}
}
fn import_libraries(libraries: Bound<'_, PyAny>) -> PyResult<HashMap<String, Py<PyAny>>> {
let py = libraries.py();
let libraries: HashMap<String, String> = libraries.extract()?;
let mut libs = HashMap::with_capacity(libraries.len());
for (name, path) in libraries {
let library = match py.import(&path).ok_or_isinstance_of::<PyImportError>(py)? {
Ok(library) => library,
Err(e) => {
let error = format!(
"Invalid template library specified. ImportError raised when trying to load '{}': {}",
path,
e.value(py)
);
return Err(InvalidTemplateLibrary::new_err(error));
}
};
let Ok(library) = library
.getattr(intern!(py, "register"))
.ok_or_isinstance_of::<PyAttributeError>(py)?
else {
let error = format!("Module '{path}' does not have a variable named 'register'");
return Err(InvalidTemplateLibrary::new_err(error));
};
libs.insert(name, library.unbind());
}
Ok(libs)
}
/// Helper function to unpack a loader tuple configuration.
/// See <https://docs.djangoproject.com/en/stable/ref/templates/api/#django.template.Engine>
fn unpack<'py>(loader: &Bound<'py, PyAny>) -> PyResult<(String, Bound<'py, PyAny>)> {
let mut items = loader.try_iter()?;
let first_item = match items.next() {
Some(item) => item?,
None => return Err(ImproperlyConfigured::new_err("Configuration is empty")),
};
let loader_path = first_item.extract::<String>().map_err(|_| {
ImproperlyConfigured::new_err(
"First element of tuple configuration must be a Loader class name",
)
})?;
let remaining_args = match items.next() {
Some(item) => item?,
None => {
return Err(ImproperlyConfigured::new_err(
"Missing second element in tuple configuration",
));
}
};
Ok((loader_path, remaining_args))
}
fn get_template_loaders(
py: Python<'_>,
template_loaders: Bound<'_, PyIterator>,
encoding: &'static Encoding,
) -> PyResult<Vec<Loader>> {
template_loaders
.map(|template_loader| {
template_loader
.and_then(|template_loader| find_template_loader(py, template_loader, encoding))
})
.collect()
}
fn find_template_loader(
py: Python<'_>,
loader: Bound<'_, PyAny>,
encoding: &'static Encoding,
) -> PyResult<Loader> {
if let Ok(loader_str) = loader.extract::<String>() {
return map_loader(py, &loader_str, None, encoding);
}
let (loader_path, args) = unpack(&loader).map_err(|e| {
ImproperlyConfigured::new_err(format!(
"Invalid template loader: {loader}. {}",
e.value(py),
))
})?;
map_loader(py, &loader_path, Some(args), encoding)
}
fn map_loader(
py: Python<'_>,
loader_path: &str,
args: Option<Bound<'_, PyAny>>,
encoding: &'static Encoding,
) -> PyResult<Loader> {
match loader_path {
"django.template.loaders.filesystem.Loader" => {
let paths = args
.map(|arg| {
arg.try_iter()?
.map(|item| item?.extract::<PathBuf>())
.collect::<PyResult<Vec<_>>>()
})
.transpose()?
.unwrap_or_default();
Ok(Loader::FileSystem(FileSystemLoader::new(paths, encoding)))
}
"django.template.loaders.app_directories.Loader" => {
Ok(Loader::AppDirs(AppDirsLoader::new(encoding)))
}
"django.template.loaders.locmem.Loader" => {
let templates = args
.map(|arg| arg.extract())
.transpose()?
.unwrap_or_default();
Ok(Loader::LocMem(LocMemLoader::new(templates)))
}
"django.template.loaders.cached.Loader" => {
let nested_loaders = args
.ok_or_else(|| {
ImproperlyConfigured::new_err(
"django.template.loaders.cached.Loader requires a list/tuple of loaders"
)
})?
.try_iter()?
.map(|inner_loader| find_template_loader(py, inner_loader?, encoding))
.collect::<PyResult<Vec<_>>>()?;
Ok(Loader::Cached(CachedLoader::new(nested_loaders)))
}
// TODO: Return an `ExternalLoader` when it's fully implemented
unknown => Err(ImproperlyConfigured::new_err(format!(
"Invalid template loader class: {unknown}"
))),
}
}
#[derive(Debug)]
pub struct Engine {
#[allow(dead_code)]
dirs: Vec<PathBuf>,
app_dirs: bool,
context_processors: Vec<String>,
debug: bool,
template_loaders: Mutex<Vec<Loader>>,
string_if_invalid: String,
#[allow(dead_code)]
encoding: &'static Encoding,
builtins: Vec<String>,
pub autoescape: bool,
loaded_context_processors: Vec<Py<PyAny>>,
pub libraries: HashMap<String, Py<PyAny>>,
}
impl Engine {
#[cfg(test)]
pub fn empty() -> Self {
Self {
dirs: Vec::new(),
app_dirs: false,
context_processors: Vec::new(),
debug: false,
template_loaders: Mutex::new(Vec::new()),
string_if_invalid: String::new(),
encoding: encoding_rs::UTF_8,
builtins: Vec::new(),
autoescape: false,
loaded_context_processors: Vec::new(),
libraries: HashMap::new(),
}
}
}
pub fn get_template(
engine: Arc<Engine>,
py: Python<'_>,
template_name: Cow<str>,
) -> PyResult<Template> {
let mut tried = Vec::new();
let mut loaders = engine
.template_loaders
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
for loader in loaders.iter_mut() {
match loader.get_template(py, &template_name, engine.clone()) {
Ok(template) => return template,
Err(e) => tried.push(e.tried),
}
}
drop(loaders);
Err(TemplateDoesNotExist::new_err((
template_name.into_owned(),
tried,
)))
}
pub fn select_template(
engine: Arc<Engine>,
py: Python<'_>,
template_name_list: Vec<String>,
) -> PyResult<Template> {
if template_name_list.is_empty() {
return Err(TemplateDoesNotExist::new_err("No template names provided"));
}
let mut not_found = Vec::new();
for template_name in template_name_list {
match get_template(engine.clone(), py, Cow::Owned(template_name)) {
Ok(template) => return Ok(template),
Err(e) if e.is_instance_of::<TemplateDoesNotExist>(py) => {
not_found.push(e.value(py).to_string());
}
Err(e) => return Err(e),
}
}
Err(TemplateDoesNotExist::new_err(not_found.join(", ")))
}
#[derive(Debug)]
#[pyclass(name = "Engine")]
pub struct PyEngine {
engine: Arc<Engine>,
}
#[pymethods]
impl PyEngine {
#[new]
#[pyo3(signature = (dirs=None, app_dirs=false, context_processors=None, debug=false, loaders=None, string_if_invalid=String::new(), file_charset="utf-8".to_string(), libraries=None, builtins=None, autoescape=true))]
#[allow(clippy::too_many_arguments)] // We're matching Django's Engine __init__ signature
pub fn new(
py: Python<'_>,
dirs: Option<Bound<'_, PyAny>>,
app_dirs: bool,
context_processors: Option<Bound<'_, PyAny>>,
debug: bool,
loaders: Option<Bound<'_, PyAny>>,
string_if_invalid: String,
file_charset: String,
libraries: Option<Bound<'_, PyAny>>,
#[allow(unused_variables)] builtins: Option<Bound<'_, PyAny>>,
autoescape: bool,
) -> PyResult<Self> {
let dirs = match dirs {
Some(dirs) => dirs.extract()?,
None => Vec::new(),
};
let (context_processors, loaded_context_processors) = match context_processors {
Some(context_processors) => {
let import_string =
IMPORT_STRING.import(py, "django.utils.module_loading", "import_string")?;
let loaded = context_processors
.try_iter()?
.map(|processor| {
import_string.call1((processor?,)).map(pyo3::Bound::unbind)
})
.collect::<PyResult<Vec<Py<PyAny>>>>()?;
(context_processors.extract()?, loaded)
}
None => (Vec::new(), Vec::new()),
};
let Some(encoding) = Encoding::for_label(file_charset.as_bytes()) else {
return Err(PyValueError::new_err(format!(
"Unknown encoding: '{file_charset}'"
)));
};
let template_loaders = match loaders {
Some(_) if app_dirs => {
let err = ImproperlyConfigured::new_err(
"app_dirs must not be set when loaders is defined.",
);
return Err(err);
}
Some(loaders) => get_template_loaders(py, loaders.try_iter()?, encoding)?,
None => {
let filesystem_loader =
Loader::FileSystem(FileSystemLoader::new(dirs.clone(), encoding));
let appdirs_loader = Loader::AppDirs(AppDirsLoader::new(encoding));
let loaders = if app_dirs {
vec![filesystem_loader, appdirs_loader]
} else {
vec![filesystem_loader]
};
let cached_loader = Loader::Cached(CachedLoader::new(loaders));
vec![cached_loader]
}
};
let libraries = match libraries {
None => HashMap::new(),
Some(libraries) => import_libraries(libraries)?,
};
let builtins = vec![];
let engine = Engine {
dirs,
app_dirs,
context_processors,
debug,
template_loaders: template_loaders.into(),
string_if_invalid,
encoding,
builtins,
autoescape,
loaded_context_processors,
libraries,
};
Ok(Self {
engine: Arc::new(engine),
})
}
/// Return a compiled Template object for the given template name,
/// handling template inheritance recursively.
///
/// See <https://docs.djangoproject.com/en/stable/ref/templates/api/#django.template.Engine.get_template>
pub fn get_template(&self, py: Python<'_>, template_name: String) -> PyResult<Template> {
get_template(self.engine.clone(), py, Cow::Owned(template_name))
}
/// Given a list of template names, return the first that can be loaded.
///
/// See <https://docs.djangoproject.com/en/stable/ref/templates/api/#django.template.Engine.select_template>
pub fn select_template(
&mut self,
py: Python<'_>,
template_name_list: Vec<String>,
) -> PyResult<Template> {
select_template(self.engine.clone(), py, template_name_list)
}
#[allow(clippy::wrong_self_convention)] // We're implementing a Django interface
pub fn from_string(&self, template_code: Bound<'_, PyString>) -> PyResult<Template> {
Template::new_from_string(
template_code.py(),
template_code.extract()?,
self.engine.clone(),
)
}
/// Render the template specified by `template_name` with the given context.
/// For use in Django's test suite.
#[pyo3(signature = (template_name, context=None))]
pub fn render_to_string(
&mut self,
py: Python<'_>,
template_name: Bound<'_, PyAny>,
context: Option<Bound<'_, PyAny>>,
) -> PyResult<String> {
let template = if template_name.is_instance_of::<PyList>()
|| template_name.is_instance_of::<PyTuple>()
{
self.select_template(py, template_name.extract()?)?
} else {
self.get_template(py, template_name.extract()?)?
};
template.py_render(py, context, None)
}
#[getter]
pub fn dirs(&self) -> Vec<String> {
self.engine
.dirs
.iter()
.map(|p| p.to_string_lossy().to_string())
.collect()
}
#[getter]
pub fn app_dirs(&self) -> bool {
self.engine.app_dirs
}
#[getter]
pub fn context_processors(&self) -> &Vec<String> {
&self.engine.context_processors
}
#[getter]
pub fn debug(&self) -> bool {
self.engine.debug
}
#[getter]
pub fn string_if_invalid(&self) -> &str {
&self.engine.string_if_invalid
}
#[getter]
pub fn file_charset(&self) -> String {
self.engine.encoding.name().to_string()
}
#[getter]
pub fn builtins(&self) -> &Vec<String> {
&self.engine.builtins
}
#[getter]
pub fn autoescape(&self) -> bool {
self.engine.autoescape
}
#[getter]
pub fn libraries<'py>(&self, py: Python<'py>) -> PyResult<Bound<'py, PyDict>> {
let dict = PyDict::new(py);
for (key, value) in &self.engine.libraries {
dict.set_item(key, value.bind(py))?;
}
Ok(dict)
}
}
#[derive(Debug, Clone)]
#[pyclass]
pub struct Template {
pub filename: Option<PathBuf>,
pub template: String,
pub nodes: Vec<TokenTree>,
pub engine: Arc<Engine>,
}
impl Template {
pub fn new(
py: Python<'_>,
template: &str,
filename: PathBuf,
template_name: &str,
engine: Arc<Engine>,
) -> PyResult<Self> {
let mut parser = Parser::new(
py,
TemplateString(template),
engine.clone(),
Some(template_name),
);
let nodes = match parser.parse() {
Ok(nodes) => nodes,
Err(err) => {
let err = err.try_into_parse_error()?;
let source =
miette::NamedSource::new(filename.to_string_lossy(), template.to_string());
return Err(TemplateSyntaxError::with_source_code(err.into(), source));
}
};
Ok(Self {
template: template.to_string(),
filename: Some(filename),
nodes,
engine,
})
}
pub fn new_from_string(
py: Python<'_>,
template: String,
engine: Arc<Engine>,
) -> PyResult<Self> {
let mut parser = Parser::new(py, TemplateString(&template), engine.clone(), None);
let nodes = match parser.parse() {
Ok(nodes) => nodes,
Err(err) => {
let err = err.try_into_parse_error()?;
return Err(TemplateSyntaxError::with_source_code(err.into(), template));
}
};
Ok(Self {
template,
filename: None,
nodes,
engine,
})
}
pub fn render(&self, py: Python<'_>, context: &mut Context) -> RenderResult<'_> {
let mut rendered = String::with_capacity(self.template.len());
let template = TemplateString(&self.template);
for node in &self.nodes {
let content = node.render(py, template, context)?;
rendered.push_str(&content);
}
Ok(Cow::Owned(rendered))
}
fn _render(&self, py: Python<'_>, context: &mut Context) -> PyResult<String> {
match self.render(py, context) {
Ok(content) => Ok(content.to_string()),
Err(err) => {
let err = err.try_into_render_error()?;
match err {
RenderError::RelativePathError(_) => {
Err(TemplateSyntaxError::with_source_code(
err.into(),
self.template.clone(),
))
}
RenderError::VariableDoesNotExist { .. }
| RenderError::ArgumentDoesNotExist { .. } => {
Err(VariableDoesNotExist::with_source_code(
err.into(),
self.template.clone(),
))
}
RenderError::InvalidArgumentInteger { .. }
| RenderError::InvalidArgumentString { .. }
| RenderError::TupleUnpackError { .. } => Err(
PyValueError::with_source_code(err.into(), self.template.clone()),
),
RenderError::OverflowError { .. }
| RenderError::InvalidArgumentFloat { .. } => Err(
PyOverflowError::with_source_code(err.into(), self.template.clone()),
),
}
}
}
}
}
#[pymethods]
impl Template {
#[pyo3(name = "render", signature = (context=None, request=None))]
pub fn py_render(
&self,
py: Python<'_>,
context: Option<Bound<'_, PyAny>>,
request: Option<Bound<'_, PyAny>>,
) -> PyResult<String> {
let mut base_context = HashMap::from([
("None".to_string(), py.None()),
("True".to_string(), PyBool::new(py, true).to_owned().into()),
(
"False".to_string(),
PyBool::new(py, false).to_owned().into(),
),
]);
let request = request.map(pyo3::Bound::unbind);
if let Some(ref request) = request {
for processor in &self.engine.loaded_context_processors {
let processor = processor.bind(py);
let processor_context = processor.call1((request,))?;
let processor_context: HashMap<_, _> = match processor_context.extract() {
Ok(processor_context) => processor_context,
Err(_) => {
let processor_module = processor.getattr("__module__")?;
let processor_name = processor.getattr("__qualname__")?;
let message = format!(
"Context processor '{processor_module}.{processor_name}' didn't return a dictionary."
);
let error = PyTypeError::new_err(message);
return Err(error);
}
};
base_context.extend(processor_context);
}
}
let mut context = match context {
Some(py_context) if py_context.is_instance_of::<PyContext>() => {
let extracted: PyContext = py_context
.extract()
.expect("The type of py_context should be PyContext");
let mut context = extracted
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
return self._render(py, &mut context);
}
Some(context) => {
let new_context: HashMap<_, _> = context.extract()?;
base_context.extend(new_context);
Context::new(base_context, request, self.engine.autoescape)
}
None => Context::new(base_context, request, self.engine.autoescape),
};
self._render(py, &mut context)
}
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::django_rusty_templates::*;
use pyo3::Python;
use pyo3::types::{PyDict, PyDictMethods, PyList, PyString};
#[test]
fn test_syntax_error() {
Python::initialize();
Python::attach(|py| {
let mut filename = std::env::current_dir().unwrap();
filename.push("tests");
filename.push("templates");
filename.push("parse_error.txt");
let expected = format!(
"TemplateSyntaxError: × Empty variable tag
╭─[{}:1:28]
1 │ This is an empty variable: {{{{ }}}}
· ──┬──
· ╰── here
╰────
",
filename.display(),
);
let engine = Arc::new(Engine::empty());
let template_string = std::fs::read_to_string(&filename).unwrap();
let error = temp_env::with_var("NO_COLOR", Some("1"), || {
Template::new(py, &template_string, filename, "parse_error.txt", engine)
.unwrap_err()
});
let error_string = format!("{error}");
assert_eq!(error_string, expected);
});
}
#[test]
fn test_syntax_error_from_string() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ foo.bar|title'foo' }}".to_string();
let error = temp_env::with_var("NO_COLOR", Some("1"), || {
Template::new_from_string(py, template_string, engine).unwrap_err()
});
let expected = "TemplateSyntaxError: × Could not parse the remainder
╭────
1 │ {{ foo.bar|title'foo' }}
· ──┬──
· ╰── here
╰────
";
let error_string = format!("{error}");
assert_eq!(error_string, expected);
});
}
#[test]
fn test_render_empty_template() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = String::new();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let context = PyDict::new(py).into_any();
assert_eq!(template.py_render(py, Some(context), None).unwrap(), "");
});
}
#[test]
fn test_render_template_variable() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "Hello {{ user }}!".to_string();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let context = PyDict::new(py);
context.set_item("user", "Lily").unwrap();
assert_eq!(
template
.py_render(py, Some(context.into_any()), None)
.unwrap(),
"Hello Lily!"
);
});
}
#[test]
fn test_render_template_unknown_variable() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "Hello {{ user }}!".to_string();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let context = PyDict::new(py).into_any();
assert_eq!(
template.py_render(py, Some(context), None).unwrap(),
"Hello !"
);
});
}
#[test]
fn test_render_template_variable_nested() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "Hello {{ user.profile.names.0 }}!".to_string();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let locals = PyDict::new(py);
py.run(
cr#"
class User:
def __init__(self, names):
self.profile = {"names": names}
user = User(["Lily"])
"#,
None,
Some(&locals),
)
.unwrap();
let user = locals.get_item("user").unwrap().unwrap();
let context = PyDict::new(py);
context.set_item("user", user.into_any()).unwrap();
assert_eq!(
template
.py_render(py, Some(context.into_any()), None)
.unwrap(),
"Hello Lily!"
);
});
}
#[test]
fn test_engine_from_string() {
Python::initialize();
Python::attach(|py| {
let engine = PyEngine::new(
py,
None,
false,
None,
false,
None,
String::new(),
"utf-8".to_string(),
None,
None,
false,
)
.unwrap();
let template_string = PyString::new(py, "Hello {{ user }}!");
let template = engine.from_string(template_string).unwrap();
let context = PyDict::new(py).into_any();
assert_eq!(
template.py_render(py, Some(context), None).unwrap(),
"Hello !"
);
});
}
#[test]
fn test_clone_template() {
use std::collections::HashMap;
use pyo3::IntoPyObject;
use pyo3::types::{PyAnyMethods, PyListMethods};
Python::initialize();
Python::attach(|py| {
let cwd = std::env::current_dir().unwrap();
let sys_path = py.import("sys").unwrap().getattr("path").unwrap();
let sys_path = sys_path.cast().unwrap();
sys_path.append(cwd.to_string_lossy()).unwrap();
let engine = PyEngine::new(
py,
Some(vec!["tests/templates"].into_pyobject(py).unwrap()),
false,
None,
false,
None,
String::new(),
"utf-8".to_string(),
Some(
HashMap::from([("custom_filters", "tests.templatetags.custom_filters")])
.into_pyobject(py)
.unwrap()
.into_any(),
),
None,
false,
)
.unwrap();
let template = engine
.get_template(py, "full_example.html".to_string())
.unwrap();
let cloned = template.clone();
assert_eq!(cloned.filename, template.filename);
assert_eq!(cloned.template, template.template);
assert_eq!(cloned.nodes, template.nodes);
assert_eq!(cloned.engine.autoescape, template.engine.autoescape);
});
}
#[test]
fn test_engine_attributes() {
use std::collections::HashMap;
use pyo3::IntoPyObject;
use pyo3::types::{PyAnyMethods, PyListMethods};
Python::initialize();
Python::attach(|py| {
let cwd = std::env::current_dir().unwrap();
let sys_path = py.import("sys").unwrap().getattr("path").unwrap();
let sys_path = sys_path.cast().unwrap();
sys_path.append(cwd.to_string_lossy()).unwrap();
let engine = PyEngine::new(
py,
Some(
vec!["tests/templates", "other/templates"]
.into_pyobject(py)
.unwrap(),
),
true,
Some(
vec!["django.template.context_processors.debug"]
.into_pyobject(py)
.unwrap(),
),
true,
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | true |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/error.rs | src/error.rs | // Silence lint warnings for Miette Diagnostic
// https://github.com/zkat/miette/issues/458
// https://github.com/rust-lang/rust/issues/147648
#![expect(unused_assignments)]
use miette::{Diagnostic, LabeledSpan, SourceSpan, miette};
use pyo3::exceptions::PyKeyError;
use pyo3::prelude::*;
use thiserror::Error;
use crate::path::RelativePathError;
use dtl_lexer::types::{At, TemplateString};
#[derive(Error, Debug)]
pub enum PyRenderError {
#[error(transparent)]
PyErr(#[from] PyErr),
#[error(transparent)]
RenderError(#[from] RenderError),
}
impl PyRenderError {
pub fn try_into_render_error(self) -> PyResult<RenderError> {
match self {
Self::RenderError(err) => Ok(err),
Self::PyErr(err) => Err(err),
}
}
}
#[derive(Error, Debug, Diagnostic, PartialEq, Eq)]
pub enum RenderError {
#[error(transparent)]
#[diagnostic(transparent)]
RelativePathError(#[from] RelativePathError),
#[error("Couldn't convert argument ({argument}) to integer")]
InvalidArgumentInteger {
argument: String,
#[label("argument")]
argument_at: SourceSpan,
},
#[error("Couldn't convert float ({argument}) to integer")]
InvalidArgumentFloat {
argument: String,
#[label("here")]
argument_at: SourceSpan,
},
#[error("String argument expected")]
InvalidArgumentString {
#[label("here")]
argument_at: SourceSpan,
},
#[error("Integer {argument} is too large")]
OverflowError {
argument: String,
#[label("here")]
argument_at: SourceSpan,
},
#[error("Failed lookup for key [{key}] in {object}")]
ArgumentDoesNotExist {
key: String,
object: String,
#[label("key")]
key_at: SourceSpan,
#[label("{object}")]
object_at: Option<SourceSpan>,
},
#[error("Need {expected_count} values to unpack; got {actual_count}.")]
TupleUnpackError {
expected_count: usize,
actual_count: usize,
#[label("unpacked here")]
expected_at: SourceSpan,
#[label("from here")]
actual_at: SourceSpan,
},
#[error("Failed lookup for key [{key}] in {object}")]
VariableDoesNotExist {
key: String,
object: String,
#[label("key")]
key_at: SourceSpan,
#[label("{object}")]
object_at: Option<SourceSpan>,
},
}
#[pyclass]
struct KeyErrorMessage {
message: String,
}
#[pymethods]
impl KeyErrorMessage {
fn __repr__(&self) -> &str {
&self.message
}
}
pub trait AnnotatePyErr {
fn annotate(self, py: Python<'_>, at: At, label: &str, template: TemplateString<'_>) -> Self;
}
impl AnnotatePyErr for PyErr {
fn annotate(self, py: Python<'_>, at: At, label: &str, template: TemplateString<'_>) -> Self {
let message = miette!(
labels = vec![LabeledSpan::at(at, label)],
"{}",
self.value(py),
)
.with_source_code(template.0.to_string());
if self.is_instance_of::<PyKeyError>(py) {
let message = format!("{message:?}");
// Python converts the message to `repr(message)` for KeyError.
// When annotating, this is unhelpful, so we work around this by defining a custom
// `__repr__` that returns the message exactly as we want it.
// https://github.com/python/cpython/blob/43573028c6ae21c66c118b8bae866c8968b87b68/Objects/exceptions.c#L2946-L2954
let message = KeyErrorMessage { message };
PyKeyError::new_err((message,))
} else {
let err_type = self.get_type(py);
Self::from_type(err_type, format!("{message:?}"))
}
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/types.rs | src/types.rs | use num_bigint::BigInt;
use dtl_lexer::types::{At, Variable};
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct Text {
pub at: At,
}
impl Text {
pub fn new(at: At) -> Self {
Self { at }
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub struct TranslatedText {
pub at: At,
}
impl TranslatedText {
pub fn new(at: At) -> Self {
Self { at }
}
}
#[derive(Clone, Debug, PartialEq)]
pub enum ArgumentType {
Variable(Variable),
ForVariable(ForVariable),
Text(Text),
TranslatedText(TranslatedText),
Int(BigInt),
Float(f64),
}
#[derive(Clone, Debug, PartialEq)]
pub struct Argument {
pub at: At,
pub argument_type: ArgumentType,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum ForVariableName {
Counter,
Counter0,
RevCounter,
RevCounter0,
First,
Last,
Object,
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ForVariable {
pub variant: ForVariableName,
pub parent_count: usize,
pub at: At,
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render.rs | src/render.rs | pub mod common;
pub mod filters;
pub mod lorem;
pub mod tags;
pub mod types;
use std::borrow::Cow;
use pyo3::prelude::*;
use dtl_lexer::types::TemplateString;
use crate::error::PyRenderError;
use types::{Content, Context};
pub type ResolveResult<'t, 'py> = Result<Option<Content<'t, 'py>>, PyRenderError>;
pub type RenderResult<'t> = Result<Cow<'t, str>, PyRenderError>;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
pub enum ResolveFailures {
Raise,
IgnoreVariableDoesNotExist,
}
/// Trait for resolving a template element into content suitable for
/// further processing by another template element.
trait Resolve {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py>;
}
/// Trait for rendering a template element into content suitable for
/// output in the completely processed template.
pub trait Render {
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t>;
}
/// Trait for evaluating an expression in a boolean context
pub trait Evaluate {
fn evaluate(
&self,
py: Python<'_>,
template: TemplateString<'_>,
context: &mut Context,
) -> Option<bool>;
}
impl<T> Evaluate for Option<T>
where
T: Evaluate,
{
fn evaluate(
&self,
py: Python<'_>,
template: TemplateString<'_>,
context: &mut Context,
) -> Option<bool> {
match self {
Some(inner) => inner.evaluate(py, template, context),
None => Some(false),
}
}
}
/// All resolvable template elements can be rendered
impl<T> Render for T
where
T: Resolve,
{
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
match self.resolve(py, template, context, ResolveFailures::Raise)? {
Some(content) => Ok(content.render(context)?),
None => Ok(Cow::Borrowed("")),
}
}
}
impl<T> Render for Vec<T>
where
T: Render,
{
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
Ok(Cow::Owned(
self.iter()
.map(|node| node.render(py, template, context))
.collect::<Result<Vec<_>, _>>()?
.join(""),
))
}
}
impl<T> Render for Option<T>
where
T: Render,
{
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
Ok(match self {
Some(inner) => inner.render(py, template, context)?,
None => Cow::Borrowed(""),
})
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/utils.rs | src/utils.rs | use pyo3::prelude::*;
use pyo3::type_object::PyTypeInfo;
pub trait PyResultMethods<T> {
fn ok_or_isinstance_of<E>(self, py: Python<'_>) -> PyResult<PyResult<T>>
where
E: PyTypeInfo;
}
impl<T> PyResultMethods<T> for PyResult<T> {
fn ok_or_isinstance_of<E>(self, py: Python<'_>) -> PyResult<Self>
where
E: PyTypeInfo,
{
match self {
Ok(obj) => Ok(Ok(obj)),
Err(e) if e.is_instance_of::<E>(py) => Ok(Err(e)),
Err(e) => Err(e),
}
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/loaders.rs | src/loaders.rs | use std::collections::HashMap;
use std::path::{Path, PathBuf};
use std::sync::Arc;
use cached::proc_macro::cached;
use encoding_rs::Encoding;
use pyo3::exceptions::PyUnicodeError;
use pyo3::prelude::*;
use pyo3::sync::PyOnceLock;
use sugar_path::SugarPath;
use crate::template::django_rusty_templates::{Engine, Template};
static APPS: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LoaderError {
pub tried: Vec<(String, String)>,
}
fn abspath(path: &Path) -> Option<PathBuf> {
match path.as_os_str().is_empty() {
false => std::path::absolute(path).map(|p| p.normalize()).ok(),
true => std::env::current_dir().ok(),
}
}
fn safe_join(directory: &Path, template_name: &str) -> Option<PathBuf> {
let final_path = abspath(&directory.join(template_name))?;
let directory = abspath(directory)?;
if final_path.starts_with(directory) {
Some(final_path)
} else {
None
}
}
fn get_app_template_dir(path: Bound<'_, PyAny>, dirname: &str) -> PyResult<Option<PathBuf>> {
if path.is_truthy()? {
let path_buf: PathBuf = path.extract()?;
let template_path = path_buf.join(dirname);
if template_path.is_dir() {
return Ok(Some(template_path));
}
}
Ok(None)
}
#[cached(
size = 128, // Cache size
result = true, // Cache Result type
key = "String", // Use owned String as key
convert = r##"{ dirname.to_string() }"## // Convert &str to String
)]
fn get_app_template_dirs(py: Python<'_>, dirname: &str) -> PyResult<Vec<PathBuf>> {
let apps = APPS.import(py, "django.apps", "apps")?;
let app_configs = apps.call_method0("get_app_configs")?;
let mut template_dirs = Vec::new();
for app_config_result in app_configs.try_iter()? {
let path = app_config_result?.getattr("path")?;
if let Some(template_path) = get_app_template_dir(path, dirname)? {
template_dirs.push(template_path);
}
}
Ok(template_dirs)
}
#[derive(Debug)]
pub struct FileSystemLoader {
dirs: Vec<PathBuf>,
encoding: &'static Encoding,
}
impl FileSystemLoader {
pub fn new(dirs: Vec<PathBuf>, encoding: &'static Encoding) -> Self {
Self { dirs, encoding }
}
pub fn from_pathbuf(dirs: Vec<PathBuf>, encoding: &'static Encoding) -> Self {
Self { dirs, encoding }
}
fn get_template(
&self,
py: Python<'_>,
template_name: &str,
engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
let mut tried = Vec::new();
for template_dir in &self.dirs {
let Some(path) = safe_join(template_dir, template_name) else {
continue;
};
let Ok(bytes) = std::fs::read(&path) else {
tried.push((
path.display().to_string(),
"Source does not exist".to_string(),
));
continue;
};
let (contents, encoding, malformed) = self.encoding.decode(&bytes);
if malformed {
return Ok(Err(PyUnicodeError::new_err(format!(
"Could not open {} with {} encoding.",
path.display(),
encoding.name()
))));
}
return Ok(Template::new(py, &contents, path, template_name, engine));
}
Err(LoaderError { tried })
}
}
#[derive(Debug)]
pub struct AppDirsLoader {
encoding: &'static Encoding,
}
impl AppDirsLoader {
pub fn new(encoding: &'static Encoding) -> Self {
Self { encoding }
}
fn get_template(
&self,
py: Python<'_>,
template_name: &str,
engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
let dirs = match get_app_template_dirs(py, "templates") {
Ok(dirs) => dirs,
Err(e) => return Ok(Err(e)),
};
let filesystem_loader = FileSystemLoader::from_pathbuf(dirs, self.encoding);
filesystem_loader.get_template(py, template_name, engine)
}
}
#[derive(Debug)]
pub struct CachedLoader {
cache: HashMap<String, Result<Template, LoaderError>>,
pub loaders: Vec<Loader>,
}
impl CachedLoader {
pub fn new(loaders: Vec<Loader>) -> Self {
Self {
loaders,
cache: HashMap::new(),
}
}
fn get_template(
&mut self,
py: Python<'_>,
template_name: &str,
engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
match self.cache.get(template_name) {
Some(Ok(template)) => Ok(Ok((*template).clone())),
Some(Err(e)) => Err(e.clone()),
None => {
let mut tried = Vec::new();
for loader in &mut self.loaders {
match loader.get_template(py, template_name, engine.clone()) {
Ok(Ok(template)) => {
self.cache
.insert(template_name.to_string(), Ok(template.clone()));
return Ok(Ok(template));
}
Ok(Err(e)) => return Ok(Err(e)),
Err(mut e) => tried.append(&mut e.tried),
}
}
let error = LoaderError { tried };
self.cache
.insert(template_name.to_string(), Err(error.clone()));
Err(error)
}
}
}
}
#[derive(Debug)]
pub struct LocMemLoader {
templates: HashMap<String, String>,
}
impl LocMemLoader {
#[allow(dead_code)]
pub fn new(templates: HashMap<String, String>) -> Self {
Self { templates }
}
fn get_template(
&self,
py: Python<'_>,
template_name: &str,
engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
if let Some(contents) = self.templates.get(template_name) {
Ok(Template::new(
py,
contents,
PathBuf::from(template_name),
template_name,
engine,
))
} else {
Err(LoaderError {
tried: vec![(
template_name.to_string(),
"Source does not exist".to_string(),
)],
})
}
}
}
#[derive(Debug)]
pub struct ExternalLoader {}
impl ExternalLoader {
fn get_template(
&self,
_py: Python<'_>,
_template_name: &str,
_engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
std::todo!() // Bail here because it does not make much sense to convert from PyErr to empty LoaderError
}
}
#[derive(Debug)]
pub enum Loader {
FileSystem(FileSystemLoader),
AppDirs(AppDirsLoader),
Cached(CachedLoader),
#[allow(dead_code)]
LocMem(LocMemLoader),
#[allow(dead_code)]
External(ExternalLoader),
}
impl Loader {
pub fn get_template(
&mut self,
py: Python<'_>,
template_name: &str,
engine: Arc<Engine>,
) -> Result<PyResult<Template>, LoaderError> {
match self {
Self::FileSystem(loader) => loader.get_template(py, template_name, engine),
Self::AppDirs(loader) => loader.get_template(py, template_name, engine),
Self::Cached(loader) => loader.get_template(py, template_name, engine),
Self::LocMem(loader) => loader.get_template(py, template_name, engine),
Self::External(loader) => loader.get_template(py, template_name, engine),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use pyo3::{BoundObject, IntoPyObjectExt};
use quickcheck::quickcheck;
fn setup_django(py: Python<'_>) {
// Import the os module and set the DJANGO_SETTINGS_MODULE environment variable
let os_module = PyModule::import(py, "os").unwrap();
let environ = os_module.getattr("environ").unwrap();
environ
.call_method(
"setdefault",
("DJANGO_SETTINGS_MODULE", "tests.settings"),
None,
)
.unwrap();
// Import the django module and call django.setup()
let django_module = PyModule::import(py, "django").unwrap();
django_module.call_method0("setup").unwrap();
}
#[test]
fn test_filesystem_loader() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
let template = loader
.get_template(py, "basic.txt", engine)
.unwrap()
.unwrap();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/basic.txt");
#[cfg(windows)]
expected.push("tests\\templates\\basic.txt");
assert_eq!(template.filename.unwrap(), expected);
});
}
#[test]
fn test_filesystem_loader_missing_template() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
let error = loader.get_template(py, "missing.txt", engine).unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/missing.txt");
#[cfg(windows)]
expected.push("tests\\templates\\missing.txt");
assert_eq!(
error,
LoaderError {
tried: vec![(
expected.display().to_string(),
"Source does not exist".to_string(),
)],
},
);
});
}
#[test]
fn test_filesystem_loader_invalid_encoding() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
let error = loader
.get_template(py, "invalid.txt", engine)
.unwrap()
.unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/invalid.txt");
#[cfg(windows)]
expected.push("tests\\templates\\invalid.txt");
assert_eq!(
error.to_string(),
format!(
"UnicodeError: Could not open {} with UTF-8 encoding.",
expected.display()
)
);
});
}
#[test]
fn test_cached_loader() {
Python::initialize();
Python::attach(|py| {
// Helper to check cache contents
let verify_cache = |cache: &HashMap<String, Result<Template, LoaderError>>,
key: &str,
expected_path: &Path| {
if let Some(Ok(cached_template)) = cache.get(key) {
assert_eq!(cached_template.filename.as_ref().unwrap(), expected_path);
} else {
panic!("Expected '{key}' to be in cache.");
}
};
let engine = Arc::new(Engine::empty());
// Create a FileSystemLoader for the CachedLoader
let filesystem_loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
// Wrap the FileSystemLoader in a CachedLoader
let mut cached_loader = CachedLoader::new(vec![Loader::FileSystem(filesystem_loader)]);
// Load a template via the CachedLoader
let template = cached_loader
.get_template(py, "basic.txt", engine.clone())
.expect("Failed to load template")
.expect("Template file could not be read");
// Verify the template filename
let mut expected_path =
std::env::current_dir().expect("Failed to get current directory");
#[cfg(not(windows))]
expected_path.push("tests/templates/basic.txt");
#[cfg(windows)]
expected_path.push("tests\\templates\\basic.txt");
assert_eq!(template.filename.unwrap(), expected_path);
// Verify the cache state after first load
assert_eq!(cached_loader.cache.len(), 1);
verify_cache(&cached_loader.cache, "basic.txt", &expected_path);
// Load the same template again via the CachedLoader
let template = cached_loader
.get_template(py, "basic.txt", engine)
.expect("Failed to load template")
.expect("Template file could not be read");
// Verify the template filename again
assert_eq!(template.filename.unwrap(), expected_path);
// Verify the cache state remains consistent
assert_eq!(cached_loader.cache.len(), 1);
verify_cache(&cached_loader.cache, "basic.txt", &expected_path);
});
}
#[test]
fn test_cached_loader_missing_template() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let filesystem_loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
let mut cached_loader = CachedLoader::new(vec![Loader::FileSystem(filesystem_loader)]);
let error = cached_loader
.get_template(py, "missing.txt", engine.clone())
.unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/missing.txt");
#[cfg(windows)]
expected.push("tests\\templates\\missing.txt");
let expected_err = LoaderError {
tried: vec![(
expected.display().to_string(),
"Source does not exist".to_string(),
)],
};
assert_eq!(error, expected_err);
let cache = &cached_loader.cache;
assert_eq!(
cache.get("missing.txt").unwrap().as_ref().unwrap_err(),
&expected_err
);
let error = cached_loader
.get_template(py, "missing.txt", engine)
.unwrap_err();
assert_eq!(error, expected_err);
});
}
#[test]
fn test_cached_loader_invalid_encoding() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let filesystem_loader =
FileSystemLoader::new(vec![PathBuf::from("tests/templates")], encoding_rs::UTF_8);
let mut cached_loader = CachedLoader::new(vec![Loader::FileSystem(filesystem_loader)]);
let error = cached_loader
.get_template(py, "invalid.txt", engine)
.unwrap()
.unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/invalid.txt");
#[cfg(windows)]
expected.push("tests\\templates\\invalid.txt");
assert_eq!(
error.to_string(),
format!(
"UnicodeError: Could not open {} with UTF-8 encoding.",
expected.display()
)
);
});
}
#[test]
fn test_locmem_loader() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let mut templates: HashMap<String, String> = HashMap::new();
templates.insert("index.html".to_string(), "index".to_string());
let loader = LocMemLoader::new(templates);
let template = loader
.get_template(py, "index.html", engine)
.unwrap()
.unwrap();
assert_eq!(template.template, "index".to_string());
assert_eq!(template.filename.unwrap(), PathBuf::from("index.html"));
});
}
#[test]
fn test_locmem_loader_missing_template() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let templates: HashMap<String, String> = HashMap::new();
let loader = LocMemLoader::new(templates);
let error = loader.get_template(py, "index.html", engine).unwrap_err();
assert_eq!(
error,
LoaderError {
tried: vec![(
"index.html".to_string(),
"Source does not exist".to_string(),
)],
},
);
});
}
#[test]
fn test_appdirs_loader() {
Python::initialize();
Python::attach(|py| {
// Setup Django
setup_django(py);
let engine = Arc::new(Engine::empty());
let loader = AppDirsLoader::new(encoding_rs::UTF_8);
let template = loader
.get_template(py, "basic.txt", engine)
.unwrap()
.unwrap();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/basic.txt");
#[cfg(windows)]
expected.push("tests\\templates\\basic.txt");
assert_eq!(template.filename.unwrap(), expected);
});
}
#[test]
fn test_appdirs_loader_missing_template() {
Python::initialize();
Python::attach(|py| {
// Setup Django
setup_django(py);
let engine = Arc::new(Engine::empty());
let loader = AppDirsLoader::new(encoding_rs::UTF_8);
let error = loader.get_template(py, "missing.txt", engine).unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/missing.txt");
#[cfg(windows)]
expected.push("tests\\templates\\missing.txt");
let auth = py.import("django.contrib.auth").unwrap();
let auth: PathBuf = auth.getattr("__file__").unwrap().extract().unwrap();
let mut auth = auth.parent().unwrap().to_path_buf();
#[cfg(not(windows))]
auth.push("templates/missing.txt");
#[cfg(windows)]
auth.push("templates\\missing.txt");
assert_eq!(
error,
LoaderError {
tried: vec![
(
expected.display().to_string(),
"Source does not exist".to_string(),
),
(
auth.display().to_string(),
"Source does not exist".to_string(),
),
],
},
);
});
}
#[test]
fn test_appdirs_loader_invalid_encoding() {
Python::initialize();
Python::attach(|py| {
// Setup Django
setup_django(py);
let engine = Arc::new(Engine::empty());
let loader = AppDirsLoader::new(encoding_rs::UTF_8);
let error = loader
.get_template(py, "invalid.txt", engine)
.unwrap()
.unwrap_err();
let mut expected = std::env::current_dir().unwrap();
#[cfg(not(windows))]
expected.push("tests/templates/invalid.txt");
#[cfg(windows)]
expected.push("tests\\templates\\invalid.txt");
assert_eq!(
error.to_string(),
format!(
"UnicodeError: Could not open {} with UTF-8 encoding.",
expected.display()
)
);
});
}
#[test]
fn test_get_app_template_dir_special_cases() {
Python::initialize();
Python::attach(|py| {
// Test with None path
let none_path = py.None().into_bound(py);
let result = get_app_template_dir(none_path, "templates");
assert!(result.is_ok());
assert_eq!(result.unwrap(), None);
// Test with invalid type (integer)
let invalid = 42.into_bound_py_any(py).unwrap();
let result = get_app_template_dir(invalid, "templates");
assert!(result.is_err());
});
}
#[test]
fn test_get_app_template_dir_with_str() {
Python::initialize();
Python::attach(|py| {
// Test with Python string for current directory (nonexistent template)
let current_dir = ".".into_bound_py_any(py).unwrap();
let result = get_app_template_dir(current_dir, "nonexistent");
assert!(result.is_ok());
assert_eq!(result.unwrap(), None);
// Test with Python string for the "tests" directory
let tests_dir = "tests".into_bound_py_any(py).unwrap();
let result = get_app_template_dir(tests_dir, "templates");
assert!(result.is_ok());
let expected_path = PathBuf::from("tests").join("templates");
assert_eq!(result.unwrap(), Some(expected_path));
});
}
#[test]
fn test_get_app_template_dir_with_pathlib() {
Python::initialize();
Python::attach(|py| {
// Import pathlib.Path
let path_module = py.import("pathlib").unwrap();
let path_cls = path_module.getattr("Path").unwrap();
// Test with pathlib.Path for current directory (nonexistent template)
let path_obj = path_cls.call1((".",)).unwrap().into_bound();
let result = get_app_template_dir(path_obj, "nonexistent");
assert!(result.is_ok());
assert_eq!(result.unwrap(), None);
// Test with pathlib.Path for the "tests" directory
let path_obj = path_cls.call1(("tests",)).unwrap().into_bound();
let result = get_app_template_dir(path_obj, "templates");
assert!(result.is_ok());
let expected_path = PathBuf::from("tests").join("templates");
assert_eq!(result.unwrap(), Some(expected_path));
});
}
#[test]
fn test_get_app_template_dirs() {
Python::initialize();
Python::attach(|py| {
// Setup Django
setup_django(py);
let dirs = get_app_template_dirs(py, "templates").unwrap();
assert_eq!(dirs.len(), 2);
let mut expected = std::env::current_dir().unwrap();
expected.push("tests/templates");
assert_eq!(dirs[0], expected);
});
}
#[test]
fn test_safe_join_absolute() {
let path = PathBuf::from("/abc/");
let joined = safe_join(&path, "def").unwrap();
#[cfg(not(windows))]
assert_eq!(joined, PathBuf::from("/abc/def"));
#[cfg(windows)]
assert!(joined.ends_with("\\abc\\def"));
}
#[test]
fn test_safe_join_relative() {
let path = PathBuf::from("abc");
let joined = safe_join(&path, "def").unwrap();
let mut expected = std::env::current_dir().unwrap();
expected.push("abc/def");
assert_eq!(joined, expected);
}
#[test]
fn test_safe_join_absolute_starts_with_sep() {
let path = PathBuf::from("/abc/");
let joined = safe_join(&path, "/def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_relative_starts_with_sep() {
let path = PathBuf::from("abc");
let joined = safe_join(&path, "/def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_absolute_parent() {
let path = PathBuf::from("/abc/");
let joined = safe_join(&path, "../def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_relative_parent() {
let path = PathBuf::from("abc");
let joined = safe_join(&path, "../def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_absolute_parent_starts_with_sep() {
let path = PathBuf::from("/abc/");
let joined = safe_join(&path, "/../def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_relative_parent_starts_with_sep() {
let path = PathBuf::from("abc");
let joined = safe_join(&path, "/../def");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_django_example() {
let path = PathBuf::from("/dir");
let joined = safe_join(&path, "/../d");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_django_example_variant() {
let path = PathBuf::from("/dir");
let joined = safe_join(&path, "/../directory");
assert_eq!(joined, None);
}
#[test]
fn test_safe_join_empty_path() {
let path = PathBuf::from("");
let joined = safe_join(&path, "directory").unwrap();
let mut expected = std::env::current_dir().unwrap();
expected.push("directory");
assert_eq!(joined, expected);
}
#[test]
fn test_safe_join_empty_path_and_template_name() {
let path = PathBuf::from("");
let joined = safe_join(&path, "").unwrap();
let expected = std::env::current_dir().unwrap();
assert_eq!(joined, expected);
}
#[test]
fn test_safe_join_parent_and_empty_template_name() {
let path = PathBuf::from("..");
let joined = safe_join(&path, "").unwrap();
let mut expected = std::env::current_dir().unwrap();
expected.push("..");
assert_eq!(joined, expected.normalize());
}
#[test]
#[cfg_attr(
windows,
ignore = "Skipping on Windows due to path character restrictions"
)]
fn test_safe_join_matches_django_safe_join() {
fn matches(path: PathBuf, template_name: String) -> bool {
Python::attach(|py| {
let utils_os = PyModule::import(py, "django.utils._os").unwrap();
let django_safe_join = utils_os.getattr("safe_join").unwrap();
let joined = django_safe_join
.call1((&path, &template_name))
.map(|joined| joined.extract().unwrap_or_default())
.ok();
joined == safe_join(&path, &template_name)
})
}
Python::initialize();
quickcheck(matches as fn(PathBuf, String) -> bool);
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render/filters.rs | src/render/filters.rs | use std::borrow::Cow;
use html_escape::encode_quoted_attribute_to_string;
use num_traits::ToPrimitive;
use pyo3::prelude::*;
use pyo3::sync::PyOnceLock;
use pyo3::types::PyType;
use pyo3::types::{PyDate, PyDateTime, PyTime};
use crate::error::{AnnotatePyErr, PyRenderError, RenderError};
use crate::filters::{
AddFilter, AddSlashesFilter, CapfirstFilter, CenterFilter, CutFilter, DateFilter,
DefaultFilter, DefaultIfNoneFilter, EscapeFilter, EscapejsFilter, ExternalFilter, FilterType,
LengthFilter, LowerFilter, SafeFilter, SlugifyFilter, TitleFilter, UpperFilter,
WordcountFilter, WordwrapFilter, YesnoFilter,
};
use crate::parse::Filter;
use crate::render::common::gettext;
use crate::render::types::{AsBorrowedContent, Content, ContentString, Context, IntoOwnedContent};
use crate::render::{Resolve, ResolveFailures, ResolveResult};
use dtl_lexer::types::TemplateString;
use unicode_normalization::UnicodeNormalization;
static SAFEDATA: PyOnceLock<Py<PyType>> = PyOnceLock::new();
static GET_FORMAT: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
static DATE_FORMAT: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
impl Resolve for Filter {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let left = self.left.resolve(py, template, context, failures)?;
match &self.filter {
FilterType::Add(filter) => filter.resolve(left, py, template, context),
FilterType::AddSlashes(filter) => filter.resolve(left, py, template, context),
FilterType::Capfirst(filter) => filter.resolve(left, py, template, context),
FilterType::Center(filter) => filter.resolve(left, py, template, context),
FilterType::Cut(filter) => filter.resolve(left, py, template, context),
FilterType::Default(filter) => filter.resolve(left, py, template, context),
FilterType::DefaultIfNone(filter) => filter.resolve(left, py, template, context),
FilterType::Date(filter) => filter.resolve(left, py, template, context),
FilterType::Escape(filter) => filter.resolve(left, py, template, context),
FilterType::Escapejs(filter) => filter.resolve(left, py, template, context),
FilterType::External(filter) => filter.resolve(left, py, template, context),
FilterType::Lower(filter) => filter.resolve(left, py, template, context),
FilterType::Length(filter) => filter.resolve(left, py, template, context),
FilterType::Safe(filter) => filter.resolve(left, py, template, context),
FilterType::Slugify(filter) => filter.resolve(left, py, template, context),
FilterType::Title(filter) => filter.resolve(left, py, template, context),
FilterType::Upper(filter) => filter.resolve(left, py, template, context),
FilterType::Wordcount(filter) => filter.resolve(left, py, template, context),
FilterType::Wordwrap(filter) => filter.resolve(left, py, template, context),
FilterType::Yesno(filter) => filter.resolve(left, py, template, context),
}
}
}
pub trait ResolveFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py>;
}
impl ResolveFilter for AddSlashesFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => {
let content_string = content.resolve_string(context)?;
content_string.map_content(|raw| {
Cow::Owned(
raw.replace('\\', r"\\")
.replace('"', "\\\"")
.replace('\'', r"\'"),
)
})
}
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for AddFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(variable) = variable else {
return Ok(None);
};
let right = self
.argument
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised");
Ok(match (variable.to_bigint(), right.to_bigint()) {
(Some(variable), Some(right)) => Some(Content::Int(variable + right)),
_ => {
let variable = variable.to_py(py);
let right = right.to_py(py);
variable.add(right).ok().map(Content::Py)
}
})
}
}
impl ResolveFilter for CapfirstFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => {
let content_string = content.render(context)?.into_owned();
let mut chars = content_string.chars();
let first_char = match chars.next() {
Some(c) => c.to_uppercase(),
None => return Ok(Some("".as_content())),
};
let string: String = first_char.chain(chars).collect();
string.into_content()
}
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for CenterFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(content) = variable else {
return Ok(Some("".as_content()));
};
let content = content.render(context)?;
let arg = self
.argument
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised");
let size = arg.resolve_usize(self.argument.at)?;
if size <= content.len() {
return Ok(Some(content.into_content()));
}
let round_up = size % 2 == 0 && content.len() % 2 != 0;
let right = if round_up {
// If the size is even and the content length is odd, we need to adjust the centering
(size - content.len()).div_ceil(2)
} else {
(size - content.len()) / 2
};
let left = size - content.len() - right;
let mut centered = String::with_capacity(size);
centered.push_str(&" ".repeat(left));
centered.push_str(&content);
centered.push_str(&" ".repeat(right));
Ok(Some(centered.into_content()))
}
}
fn cut(source: Cow<'_, str>, value: &str) -> String {
source.replace(value, "")
}
impl ResolveFilter for CutFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(variable) = variable else {
return Ok(Some("".as_content()));
};
let arg = self
.argument
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised")
.resolve_string_strict(context, self.argument.at.into())?
.into_raw();
let content_string = variable.resolve_string(context)?;
let result = match content_string {
ContentString::String(s) => ContentString::String(cut(s, &arg).into()),
ContentString::HtmlSafe(s) => {
let cut = cut(s, &arg);
match arg.as_ref() {
";" => ContentString::HtmlUnsafe(cut.into()),
_ => ContentString::HtmlSafe(cut.into()),
}
}
ContentString::HtmlUnsafe(s) => ContentString::HtmlUnsafe(cut(s, &arg).into()),
};
Ok(Some(Content::String(result)))
}
}
impl ResolveFilter for DefaultFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
match variable {
Some(left)
if left
.to_bool()
.map_err(|err| err.annotate(py, self.at, "here", template))? =>
{
Ok(Some(left))
}
None | Some(_) => self
.argument
.resolve(py, template, context, ResolveFailures::Raise),
}
}
}
impl ResolveFilter for DefaultIfNoneFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
match variable {
Some(Content::Py(ref value)) if value.is_none() => {
self.argument
.resolve(py, template, context, ResolveFailures::Raise)
}
Some(left) => Ok(Some(left)),
None => Ok(Some("".as_content())),
}
}
}
impl ResolveFilter for DateFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(value) = variable else {
return Ok(Some("".as_content()));
};
let fmt = match &self.argument {
Some(arg) => arg
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised")
.to_py(py),
None => {
let get_format = GET_FORMAT.import(py, "django.utils.formats", "get_format")?;
get_format.call1(("DATE_FORMAT",))?
}
};
let value = value.to_py(py);
let is_valid = value.is_instance_of::<PyDate>()
|| value.is_instance_of::<PyTime>()
|| value.is_instance_of::<PyDateTime>();
if !is_valid {
return Ok(Some("".as_content()));
}
let date_format_fn = DATE_FORMAT.import(py, "django.utils.dateformat", "format")?;
let formatted = match date_format_fn.call1((value, fmt)) {
Ok(res) => res,
Err(e) => {
if e.is_instance_of::<pyo3::exceptions::PyAttributeError>(py) {
return Ok(Some("".as_content()));
}
return Err(PyRenderError::PyErr(
e.annotate(py, self.at, "here", template),
));
}
};
Ok(Some(Content::Py(formatted)))
}
}
impl ResolveFilter for EscapeFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
_context: &mut Context,
) -> ResolveResult<'t, 'py> {
Ok(Some(Content::String(ContentString::HtmlSafe(
match variable {
Some(content) => match content {
Content::String(ContentString::HtmlSafe(content)) => content,
Content::String(content) => {
let mut encoded = String::new();
encode_quoted_attribute_to_string(content.as_raw(), &mut encoded);
Cow::Owned(encoded)
}
Content::Int(n) => Cow::Owned(n.to_string()),
Content::Float(n) => Cow::Owned(n.to_string()),
Content::Py(object) => {
let content = object.str()?.extract::<String>()?;
let mut encoded = String::new();
encode_quoted_attribute_to_string(&content, &mut encoded);
Cow::Owned(encoded)
}
Content::Bool(true) => Cow::Borrowed("True"),
Content::Bool(false) => Cow::Borrowed("False"),
},
None => Cow::Borrowed(""),
},
))))
}
}
/// Hex encode characters for use in JavaScript strings.
fn escapejs(value: &str) -> String {
let mut result = String::with_capacity(value.len());
for ch in value.chars() {
match ch {
'\\' => result.push_str(r"\u005C"),
'\'' => result.push_str(r"\u0027"),
'"' => result.push_str(r"\u0022"),
'>' => result.push_str(r"\u003E"),
'<' => result.push_str(r"\u003C"),
'&' => result.push_str(r"\u0026"),
'=' => result.push_str(r"\u003D"),
'-' => result.push_str(r"\u002D"),
';' => result.push_str(r"\u003B"),
'`' => result.push_str(r"\u0060"),
// Line separator
'\u{2028}' => result.push_str(r"\u2028"),
// Paragraph Separator
'\u{2029}' => result.push_str(r"\u2029"),
// c as u32 is safe because all chars are valid u32
// See https://doc.rust-lang.org/std/primitive.char.html#method.from_u32
c if matches!(c, '\0'..='\x1f') => {
result.push_str(&format!(r"\u{:04X}", c as u32));
}
c => result.push(c),
}
}
result
}
impl ResolveFilter for EscapejsFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => content
.resolve_string(context)?
.map_content(|content| Cow::Owned(escapejs(&content))),
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for ExternalFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let arg = match &self.argument {
Some(arg) => arg.resolve(py, template, context, ResolveFailures::Raise)?,
None => None,
};
let filter = self.filter.bind(py);
let value = match arg {
Some(arg) => filter.call1((variable, arg))?,
None => filter.call1((variable,))?,
};
Ok(Some(Content::Py(value)))
}
}
impl ResolveFilter for LowerFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => content
.resolve_string(context)?
.map_content(|content| Cow::Owned(content.to_lowercase())),
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for LengthFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
_context: &mut Context,
) -> ResolveResult<'t, 'py> {
let len = match variable {
None => 0,
Some(Content::String(s)) => s.as_raw().chars().count(),
Some(Content::Py(obj)) => obj.len().unwrap_or(0),
Some(Content::Int(_) | Content::Float(_) | Content::Bool(_)) => 0,
};
Ok(Some(Content::Int(num_bigint::BigInt::from(len))))
}
}
impl ResolveFilter for SafeFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
_context: &mut Context,
) -> ResolveResult<'t, 'py> {
Ok(Some(Content::String(ContentString::HtmlSafe(
match variable {
Some(content) => match content {
Content::String(content) => content.into_raw(),
Content::Int(n) => Cow::Owned(n.to_string()),
Content::Float(n) => Cow::Owned(n.to_string()),
Content::Py(object) => {
let content = object.str()?.extract::<String>()?;
Cow::Owned(content)
}
Content::Bool(true) => Cow::Borrowed("True"),
Content::Bool(false) => Cow::Borrowed("False"),
},
None => Cow::Borrowed(""),
},
))))
}
}
/// Convert spaces or repeated dashes to single dashes.
/// Remove characters that aren't ascii alphanumerics, underscores, or hyphens.
/// Convert to lowercase. Also strip leading and trailing whitespace, dashes, and underscores.
///
/// See https://github.com/django/django/blob/stable/5.2.x/django/utils/text.py#L453
pub fn slugify(content: Cow<str>) -> Cow<str> {
let mut slug = String::with_capacity(content.len());
// treat start as if preceded by dash to strip leading dashes
let mut prev_dash = true;
for c in content.as_ref().nfkd().filter(|c| c.is_ascii()) {
if c.is_ascii_alphanumeric() || c == '_' {
slug.push(c.to_ascii_lowercase());
prev_dash = false;
} else if (c.is_whitespace() || c == '-') && !prev_dash {
slug.push('-');
prev_dash = true;
}
}
slug.truncate(slug.trim_end_matches(['-', '_']).len());
Cow::Owned(slug)
}
impl ResolveFilter for SlugifyFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
_template: TemplateString<'t>,
_context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => match content {
Content::Py(content) => {
let slug = slugify(Cow::Owned(content.str()?.extract::<String>()?));
#[allow(non_snake_case)]
let SafeData = SAFEDATA.import(py, "django.utils.safestring", "SafeData")?;
match content.is_instance(SafeData)? {
true => Content::String(ContentString::HtmlSafe(slug)),
false => Content::String(ContentString::HtmlUnsafe(slug)),
}
}
// Int and Float requires no slugify, we only need to turn it into a string.
Content::Int(content) => content.to_string().into_content(),
Content::Float(content) => content.to_string().into_content(),
Content::String(content) => content.map_content(slugify),
Content::Bool(true) => "true".as_content(),
Content::Bool(false) => "false".as_content(),
},
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for TitleFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(content) = variable else {
return Ok(Some("".as_content()));
};
let content_string = content.resolve_string(context)?;
Ok(Some(content_string.map_content(|content| {
let mut result = String::with_capacity(content.len());
let mut prev = None;
let mut prev_letter_was_lowercased = false;
for ch in content.chars() {
if ch.is_alphabetic() {
// Django's special cases to trigger lowercase:
// 1. After apostrophe that follows a lowercased letter
// 2. After a digit
let should_lowercase = match prev {
Some('\'') if prev_letter_was_lowercased => true,
Some(c) if c.is_ascii_digit() | c.is_alphabetic() => true,
_ => false,
};
if should_lowercase {
result.extend(ch.to_lowercase());
} else {
result.extend(ch.to_uppercase());
}
prev_letter_was_lowercased = should_lowercase;
} else {
result.push(ch);
}
prev = Some(ch);
}
Cow::Owned(result)
})))
}
}
impl ResolveFilter for UpperFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => {
let content = content.resolve_string(context)?;
content.map_content(|content| Cow::Owned(content.to_uppercase()))
}
None => "".as_content(),
};
Ok(Some(content))
}
}
impl ResolveFilter for WordcountFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
_py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let content = match variable {
Some(content) => {
let content_string = content.resolve_string(context)?;
let word_count = content_string.as_raw().split_whitespace().count();
Content::Int(word_count.into())
}
None => Content::Int(0.into()),
};
Ok(Some(content))
}
}
/// A word-wrap function that preserves existing line breaks.
/// Expects that existing line breaks are posix newlines.
///
/// Preserve all white space except added line breaks consume the space on
/// which they break the line.
///
/// Don't wrap long words, thus the output text may have lines longer than ``width``.
fn wordwrap(text: &str, width: usize) -> String {
let mut result = String::with_capacity(text.len());
for (i, line) in text.split('\n').enumerate() {
if i > 0 {
result.push('\n');
}
if line.is_empty() {
continue;
}
let leading_whitespace = line.chars().take_while(|c| c.is_whitespace()).count();
let (indent, trimmed_line) = line.split_at(leading_whitespace);
let mut words = trimmed_line.split_whitespace();
let Some(first_word) = words.next() else {
// Line contains only whitespace - preserve it
result.push_str(line);
continue;
};
result.push_str(indent);
result.push_str(first_word);
let mut current_len = leading_whitespace + first_word.len();
for word in words {
let word_len = word.len();
if current_len + word_len < width {
result.push(' ');
current_len += 1 + word_len;
} else {
result.push('\n');
current_len = word_len;
}
result.push_str(word);
}
}
result
}
impl ResolveFilter for WordwrapFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let Some(content) = variable else {
return Ok(Some("".as_content()));
};
let text = content.resolve_string(context)?;
let arg = self
.argument
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised");
// Check for negative values before converting to usize
if let Some(bigint) = arg.to_bigint()
&& let Some(n) = bigint.to_isize()
&& n <= 0
{
return Err(pyo3::exceptions::PyValueError::new_err(format!(
"invalid width {n} (must be > 0)"
))
.annotate(py, self.argument.at, "width", template)
.into());
}
let width = match arg.resolve_usize(self.argument.at) {
Ok(w) => w,
Err(PyRenderError::RenderError(RenderError::OverflowError { .. })) => usize::MAX,
Err(e) => return Err(e),
};
let wrapped = wordwrap(text.as_raw(), width);
Ok(Some(text.map_content(|_| Cow::Owned(wrapped))))
}
}
impl ResolveFilter for YesnoFilter {
fn resolve<'t, 'py>(
&self,
variable: Option<Content<'t, 'py>>,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> ResolveResult<'t, 'py> {
let arg_string = match &self.argument {
Some(arg) => {
let arg_content = arg
.resolve(py, template, context, ResolveFailures::Raise)?
.expect("missing argument in context should already have raised");
arg_content
.resolve_string_strict(context, arg.at.into())?
.into_raw()
}
None => Cow::Owned(gettext(py, "yes,no,maybe")?),
};
let bits: Vec<&str> = arg_string.split(',').collect();
let (yes, no, maybe) = match bits.as_slice() {
// If less than 2 values, return the original value
[] | [_] => return Ok(variable),
[yes, no] => (yes, no, no),
[yes, no, maybe, ..] => (yes, no, maybe),
};
let result = match variable {
Some(Content::Py(ref obj)) if obj.is_none() => maybe,
Some(content) => match content.to_bool() {
Ok(true) => yes,
Ok(false) => no,
Err(error) => {
let error = error.annotate(py, self.at, "when calling __bool__ here", template);
return Err(error.into());
}
},
None => no,
};
Ok(Some(result.to_string().into_content()))
}
}
#[cfg(test)]
mod tests {
use std::sync::Arc;
use super::*;
use crate::filters::{AddSlashesFilter, DefaultFilter, LowerFilter, UpperFilter};
use crate::parse::TagElement;
use crate::render::Render;
use crate::template::django_rusty_templates::{Engine, Template};
use crate::types::{Argument, ArgumentType, Text};
use pyo3::types::{PyDict, PyString};
static MARK_SAFE: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
fn mark_safe(py: Python<'_>, string: String) -> PyResult<Py<PyAny>> {
let mark_safe = match MARK_SAFE.get(py) {
Some(mark_safe) => mark_safe,
None => {
let py_mark_safe = py.import("django.utils.safestring")?;
let py_mark_safe = py_mark_safe.getattr("mark_safe")?;
MARK_SAFE.set(py, py_mark_safe.into()).unwrap();
MARK_SAFE.get(py).unwrap()
}
};
let safe_string = mark_safe.call1(py, (string,))?;
Ok(safe_string)
}
use dtl_lexer::types::Variable;
use std::collections::HashMap;
#[test]
fn test_render_filter() {
Python::initialize();
Python::attach(|py| {
let name = PyString::new(py, "Lily").into_any();
let context = HashMap::from([("name".to_string(), name.unbind())]);
let mut context = Context::new(context, None, false);
let template = TemplateString("{{ name|default:'Bryony' }}");
let variable = Variable::new((3, 4));
let filter = Filter {
at: (8, 7),
all_at: (3, 12),
left: TagElement::Variable(variable),
filter: FilterType::Default(DefaultFilter::new(
Argument {
at: (16, 8),
argument_type: ArgumentType::Text(Text::new((17, 6))),
},
(8, 7),
)),
};
let rendered = filter.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "Lily");
});
}
#[test]
fn test_render_filter_slugify_happy_path() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|slugify }}".to_string();
let context = PyDict::new(py);
context.set_item("var", "hello world").unwrap();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
.py_render(py, Some(context.into_any()), None)
.unwrap();
assert_eq!(result, "hello-world");
});
}
#[test]
fn test_render_filter_slugify_spaces_omitted() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|slugify }}".to_string();
let context = PyDict::new(py);
context.set_item("var", " hello world").unwrap();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
.py_render(py, Some(context.into_any()), None)
.unwrap();
assert_eq!(result, "hello-world");
});
}
#[test]
fn test_render_filter_slugify_special_characters_omitted() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|slugify }}".to_string();
let context = PyDict::new(py);
context.set_item("var", "a&€%").unwrap();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
.py_render(py, Some(context.into_any()), None)
.unwrap();
assert_eq!(result, "a");
});
}
#[test]
fn test_render_filter_slugify_multiple_spaces_inside_becomes_single() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|slugify }}".to_string();
let context = PyDict::new(py);
context.set_item("var", "a & b").unwrap();
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
.py_render(py, Some(context.into_any()), None)
.unwrap();
assert_eq!(result, "a-b");
});
}
#[test]
fn test_render_filter_slugify_integer() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|default:1|slugify }}".to_string();
let context = PyDict::new(py);
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
.py_render(py, Some(context.into_any()), None)
.unwrap();
assert_eq!(result, "1");
});
}
#[test]
fn test_render_filter_slugify_float() {
Python::initialize();
Python::attach(|py| {
let engine = Arc::new(Engine::empty());
let template_string = "{{ var|default:1.3|slugify }}".to_string();
let context = PyDict::new(py);
let template = Template::new_from_string(py, template_string, engine).unwrap();
let result = template
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | true |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render/lorem.rs | src/render/lorem.rs | use rand::Rng;
use rand::seq::SliceRandom;
use std::borrow::Cow;
static COMMON_P: &str = "Lorem ipsum dolor sit amet, consectetur adipisicing elit, sed do eiusmod \
tempor incididunt ut labore et dolore magna aliqua. Ut enim ad minim veniam, quis nostrud \
exercitation ullamco laboris nisi ut aliquip ex ea commodo consequat. Duis aute irure dolor in \
reprehenderit in voluptate velit esse cillum dolore eu fugiat nulla pariatur. Excepteur sint \
occaecat cupidatat non proident, sunt in culpa qui officia deserunt mollit anim id est laborum.";
static WORDS: [&str; 182] = [
"exercitationem",
"perferendis",
"perspiciatis",
"laborum",
"eveniet",
"sunt",
"iure",
"nam",
"nobis",
"eum",
"cum",
"officiis",
"excepturi",
"odio",
"consectetur",
"quasi",
"aut",
"quisquam",
"vel",
"eligendi",
"itaque",
"non",
"odit",
"tempore",
"quaerat",
"dignissimos",
"facilis",
"neque",
"nihil",
"expedita",
"vitae",
"vero",
"ipsum",
"nisi",
"animi",
"cumque",
"pariatur",
"velit",
"modi",
"natus",
"iusto",
"eaque",
"sequi",
"illo",
"sed",
"ex",
"et",
"voluptatibus",
"tempora",
"veritatis",
"ratione",
"assumenda",
"incidunt",
"nostrum",
"placeat",
"aliquid",
"fuga",
"provident",
"praesentium",
"rem",
"necessitatibus",
"suscipit",
"adipisci",
"quidem",
"possimus",
"voluptas",
"debitis",
"sint",
"accusantium",
"unde",
"sapiente",
"voluptate",
"qui",
"aspernatur",
"laudantium",
"soluta",
"amet",
"quo",
"aliquam",
"saepe",
"culpa",
"libero",
"ipsa",
"dicta",
"reiciendis",
"nesciunt",
"doloribus",
"autem",
"impedit",
"minima",
"maiores",
"repudiandae",
"ipsam",
"obcaecati",
"ullam",
"enim",
"totam",
"delectus",
"ducimus",
"quis",
"voluptates",
"dolores",
"molestiae",
"harum",
"dolorem",
"quia",
"voluptatem",
"molestias",
"magni",
"distinctio",
"omnis",
"illum",
"dolorum",
"voluptatum",
"ea",
"quas",
"quam",
"corporis",
"quae",
"blanditiis",
"atque",
"deserunt",
"laboriosam",
"earum",
"consequuntur",
"hic",
"cupiditate",
"quibusdam",
"accusamus",
"ut",
"rerum",
"error",
"minus",
"eius",
"ab",
"ad",
"nemo",
"fugit",
"officia",
"at",
"in",
"id",
"quos",
"reprehenderit",
"numquam",
"iste",
"fugiat",
"sit",
"inventore",
"beatae",
"repellendus",
"magnam",
"recusandae",
"quod",
"explicabo",
"doloremque",
"aperiam",
"consequatur",
"asperiores",
"commodi",
"optio",
"dolor",
"labore",
"temporibus",
"repellat",
"veniam",
"architecto",
"est",
"esse",
"mollitia",
"nulla",
"a",
"similique",
"eos",
"alias",
"dolore",
"tenetur",
"deleniti",
"porro",
"facere",
"maxime",
"corrupti",
];
pub static COMMON_WORDS: [&str; 19] = [
"lorem",
"ipsum",
"dolor",
"sit",
"amet",
"consectetur",
"adipisicing",
"elit",
"sed",
"do",
"eiusmod",
"tempor",
"incididunt",
"ut",
"labore",
"et",
"dolore",
"magna",
"aliqua",
];
pub fn sentence() -> String {
use rand::Rng;
use rand::seq::SliceRandom;
let mut rng = rand::thread_rng();
let num_sections = rng.gen_range(1..=5);
let mut sections = Vec::with_capacity(num_sections);
for _ in 0..num_sections {
let num_words = rng.gen_range(3..=12);
let selected_words: Vec<&str> = WORDS
.choose_multiple(&mut rng, num_words)
.copied()
.collect();
sections.push(selected_words.join(" "));
}
let mut sentence = sections.join(", ");
let first = sentence
.chars()
.next()
.expect("A sentence should have at least one character");
let upper = first.to_uppercase();
let rest = &sentence[first.len_utf8()..];
sentence = format!("{upper}{rest}");
let punctuation = if rng.gen_bool(0.5) { "?" } else { "." };
sentence.push_str(punctuation);
sentence
}
pub fn paragraph() -> String {
let num_sentences = rand::thread_rng().gen_range(1..=4);
(0..num_sentences)
.map(|_| sentence())
.collect::<Vec<String>>()
.join(" ")
}
pub fn paragraphs(count: usize, common: bool) -> Vec<Cow<'static, str>> {
let mut paras = Vec::with_capacity(count);
for i in 0..count {
if common && i == 0 {
paras.push(Cow::Borrowed(COMMON_P));
} else {
paras.push(Cow::Owned(paragraph()));
}
}
paras
}
pub fn words(mut count: usize, common: bool) -> String {
if common && count <= COMMON_WORDS.len() {
return COMMON_WORDS[..count].join(" ");
}
let mut rng = rand::thread_rng();
let mut word_list: Vec<&str> = Vec::with_capacity(count);
if common {
word_list.extend(&COMMON_WORDS);
count -= word_list.len();
}
while count > 0 {
let take = count.min(WORDS.len());
let sampled = WORDS.choose_multiple(&mut rng, take);
word_list.extend(sampled.copied());
count -= take;
}
word_list.join(" ")
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render/types.rs | src/render/types.rs | use std::borrow::Cow;
use std::collections::BTreeMap;
use std::collections::HashMap;
use std::collections::HashSet;
use std::collections::hash_map::Entry;
use std::iter::zip;
use std::sync::{Arc, Mutex};
use html_escape::encode_quoted_attribute;
use miette::SourceSpan;
use num_bigint::{BigInt, ToBigInt};
use num_traits::{ToPrimitive, Zero};
use pyo3::exceptions::{PyAttributeError, PyKeyError, PyTypeError};
use pyo3::intern;
use pyo3::prelude::*;
use pyo3::sync::{MutexExt, PyOnceLock};
use pyo3::types::{PyBool, PyDict, PyInt, PyString, PyType};
use crate::error::{AnnotatePyErr, PyRenderError, RenderError};
use crate::template::django_rusty_templates::{Engine, Template, get_template, select_template};
use crate::utils::PyResultMethods;
use dtl_lexer::types::{At, TemplateString};
static MARK_SAFE: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
#[derive(Debug, Clone)]
pub struct ForLoop {
count: usize,
len: usize,
}
impl ForLoop {
pub fn counter0(&self) -> usize {
self.count
}
pub fn counter(&self) -> usize {
self.count + 1
}
pub fn rev_counter(&self) -> usize {
self.len - self.count
}
pub fn rev_counter0(&self) -> usize {
self.len - self.count - 1
}
pub fn first(&self) -> bool {
self.count == 0
}
pub fn last(&self) -> bool {
self.count + 1 == self.len
}
}
#[derive(Clone, Debug, PartialEq, Eq, Hash)]
pub enum IncludeTemplateKey {
String(String),
Vec(Vec<String>),
}
#[derive(Debug, Default)]
pub struct Context {
context: HashMap<String, Vec<Py<PyAny>>>,
loops: Vec<ForLoop>,
pub request: Option<Py<PyAny>>,
pub autoescape: bool,
names: Vec<HashSet<String>>,
include_cache: HashMap<IncludeTemplateKey, Arc<Template>>,
}
impl Context {
pub fn new(
context: HashMap<String, Py<PyAny>>,
request: Option<Py<PyAny>>,
autoescape: bool,
) -> Self {
let context = context.into_iter().map(|(k, v)| (k, vec![v])).collect();
Self {
request,
context,
autoescape,
loops: Vec::new(),
names: Vec::new(),
include_cache: HashMap::new(),
}
}
pub fn clone_ref(&self, py: Python<'_>) -> Self {
Self {
request: self.request.as_ref().map(|v| v.clone_ref(py)),
context: self
.context
.iter()
.map(|(k, v)| (k.clone(), v.iter().map(|v| v.clone_ref(py)).collect()))
.collect(),
autoescape: self.autoescape,
loops: self.loops.clone(),
names: self.names.clone(),
include_cache: self.include_cache.clone(),
}
}
pub fn get(&self, key: &str) -> Option<&Py<PyAny>> {
self.context.get(key)?.last()
}
pub fn display(&self, py: Python<'_>) -> String {
let context: BTreeMap<_, _> = self
.context
.iter()
.filter_map(|(k, v)| Some((k, v.last()?.bind(py))))
.collect();
format!("{context:?}")
}
fn _insert(&mut self, key: String, value: Bound<'_, PyAny>, replace: bool) {
let value = value.unbind();
match self.context.entry(key) {
std::collections::hash_map::Entry::Occupied(mut entry) => {
let values = entry.get_mut();
if replace {
values.pop();
}
values.push(value);
}
std::collections::hash_map::Entry::Vacant(entry) => {
entry.insert(vec![value]);
}
}
}
pub fn append(&mut self, key: String, value: Bound<'_, PyAny>) {
self._insert(key, value, false);
}
pub fn insert(&mut self, key: String, value: Bound<'_, PyAny>) {
self._insert(key, value, true);
}
pub fn push_variable(&mut self, name: String, value: Bound<'_, PyAny>, index: usize) {
let replace = index != 0;
if !replace {
let mut names_set = HashSet::new();
names_set.insert(name.clone());
self.names.push(names_set);
}
self._insert(name, value, replace);
}
pub fn push_variables(
&mut self,
names: &Vec<String>,
names_at: At,
values: Bound<'_, PyAny>,
values_at: At,
index: usize,
template: TemplateString<'_>,
) -> Result<(), PyRenderError> {
let replace = index != 0;
if !replace {
let names_set = names.iter().cloned().collect();
self.names.push(names_set);
}
if names.len() == 1 {
self._insert(names[0].clone(), values, replace);
} else {
let py = values.py();
let values: Vec<_> = match values.try_iter() {
Ok(values) => match values.collect() {
Ok(values) => values,
Err(error) => {
let error = error.annotate(py, values_at, "while unpacking this", template);
return Err(error.into());
}
},
Err(error) if error.is_instance_of::<PyTypeError>(py) => {
return Err(RenderError::TupleUnpackError {
expected_count: names.len(),
actual_count: 1,
expected_at: names_at.into(),
actual_at: values_at.into(),
}
.into());
}
Err(error) => {
let error = error.annotate(py, values_at, "while iterating this", template);
return Err(error.into());
}
};
if names.len() == values.len() {
for (name, value) in zip(names, values) {
self._insert(name.clone(), value, replace);
}
} else {
return Err(RenderError::TupleUnpackError {
expected_count: names.len(),
actual_count: values.len(),
expected_at: names_at.into(),
actual_at: values_at.into(),
}
.into());
}
}
Ok(())
}
pub fn pop_variable(&mut self, name: &str) {
let values = self
.context
.get_mut(name)
.expect("Variable should have been pushed before");
values.pop();
}
pub fn pop_variables(&mut self) {
if let Some(names) = self.names.pop() {
for name in names {
self.pop_variable(&name);
}
}
}
pub fn push_for_loop(&mut self, len: usize) {
self.loops.push(ForLoop { count: 0, len });
}
pub fn increment_for_loop(&mut self) {
let for_loop = self
.loops
.last_mut()
.expect("Called within an active for loop");
for_loop.count += 1;
}
pub fn pop_for_loop(&mut self) {
self.loops
.pop()
.expect("Called when exiting an active for loop");
}
pub fn get_for_loop(&self, depth: usize) -> Option<&ForLoop> {
let index = self.loops.len().checked_sub(depth + 1)?;
self.loops.get(index)
}
pub fn render_for_loop(&self, py: Python<'_>, depth: usize) -> String {
let mut forloop_dict = PyDict::new(py);
for forloop in self.loops.iter().rev().take(self.loops.len() - depth) {
let dict = PyDict::new(py);
dict.set_item("parentloop", forloop_dict)
.expect("Can always set a str: dict key/value");
dict.set_item("counter0", forloop.counter0())
.expect("Can always set a str: int key/value");
dict.set_item("counter", forloop.counter())
.expect("Can always set a str: int key/value");
dict.set_item("revcounter", forloop.rev_counter())
.expect("Can always set a str: int key/value");
dict.set_item("revcounter0", forloop.rev_counter0())
.expect("Can always set a str: int key/value");
dict.set_item("first", forloop.first())
.expect("Can always set a str: bool key/value");
dict.set_item("last", forloop.last())
.expect("Can always set a str: bool key/value");
forloop_dict = dict;
}
let forloop_str = forloop_dict
.str()
.expect("All elements of the dictionary can be converted to a string");
forloop_str.to_string()
}
pub fn get_or_insert_include(
&mut self,
py: Python,
engine: &Arc<Engine>,
key: &IncludeTemplateKey,
) -> Result<Arc<Template>, PyErr> {
match self.include_cache.entry(key.clone()) {
Entry::Occupied(entry) => Ok(entry.get().clone()),
Entry::Vacant(entry) => {
let include = match key {
IncludeTemplateKey::String(content) => {
get_template(engine.clone(), py, Cow::Borrowed(content))?
}
IncludeTemplateKey::Vec(templates) => {
select_template(engine.clone(), py, templates.clone())?
}
};
Ok(entry.insert(Arc::new(include)).clone())
}
}
}
}
#[pyclass(mapping)]
#[derive(Clone)]
pub struct PyContext {
pub context: Arc<Mutex<Context>>,
}
impl PyContext {
pub fn new(context: Context) -> Self {
Self {
context: Arc::new(Mutex::new(context)),
}
}
}
#[pymethods]
impl PyContext {
#[getter]
fn request<'py>(&self, py: Python<'py>) -> Option<Bound<'py, PyAny>> {
let guard = self
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
guard
.request
.as_ref()
.map(|request| request.bind(py).clone())
}
fn get<'py>(
&self,
py: Python<'py>,
key: String,
fallback: Bound<'py, PyAny>,
) -> Bound<'py, PyAny> {
let guard = self
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
match guard.get(&key) {
Some(value) => value.bind(py).clone(),
None => fallback,
}
}
fn __contains__(&self, py: Python<'_>, key: String) -> bool {
let guard = self
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
guard.get(&key).is_some()
}
fn __getitem__<'py>(&self, py: Python<'py>, key: String) -> PyResult<Bound<'py, PyAny>> {
let guard = self
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
match guard.get(&key) {
Some(value) => Ok(value.bind(py).clone()),
None => Err(PyKeyError::new_err(key)),
}
}
fn __setitem__<'py>(&self, py: Python<'py>, key: String, value: Bound<'py, PyAny>) {
let mut guard = self
.context
.lock_py_attached(py)
.expect("Mutex should not be poisoned");
if let Some(last) = guard.names.last_mut() {
last.insert(key.clone());
}
guard.insert(key, value);
}
}
#[derive(Debug, IntoPyObject)]
pub enum ContentString<'t> {
String(Cow<'t, str>),
HtmlSafe(Cow<'t, str>),
HtmlUnsafe(Cow<'t, str>),
}
#[allow(clippy::needless_lifetimes)] // https://github.com/rust-lang/rust-clippy/issues/13923
impl<'t, 'py> ContentString<'t> {
pub fn content(self) -> Cow<'t, str> {
match self {
Self::String(content) | Self::HtmlSafe(content) => content,
Self::HtmlUnsafe(content) => Cow::Owned(encode_quoted_attribute(&content).to_string()),
}
}
pub fn as_raw(&self) -> &Cow<'t, str> {
match self {
Self::String(content) | Self::HtmlSafe(content) | Self::HtmlUnsafe(content) => content,
}
}
pub fn into_raw(self) -> Cow<'t, str> {
match self {
Self::String(content) | Self::HtmlSafe(content) | Self::HtmlUnsafe(content) => content,
}
}
pub fn map_content(self, f: impl FnOnce(Cow<'t, str>) -> Cow<'t, str>) -> Content<'t, 'py> {
Content::String(match self {
Self::String(content) => Self::String(f(content)),
Self::HtmlSafe(content) => Self::HtmlSafe(f(content)),
Self::HtmlUnsafe(content) => Self::HtmlUnsafe(f(content)),
})
}
}
fn resolve_python<'t>(value: Bound<'_, PyAny>, context: &Context) -> PyResult<ContentString<'t>> {
if !context.autoescape {
return Ok(ContentString::String(
value.str()?.extract::<String>()?.into(),
));
}
let py = value.py();
let value = match value.is_instance_of::<PyString>() {
true => value,
false => value.str()?.into_any(),
};
Ok(
match value
.getattr(intern!(py, "__html__"))
.ok_or_isinstance_of::<PyAttributeError>(py)?
{
Ok(html) => ContentString::HtmlSafe(html.call0()?.extract::<String>()?.into()),
Err(_) => ContentString::HtmlUnsafe(value.str()?.extract::<String>()?.into()),
},
)
}
fn resolve_bigint(bigint: BigInt, at: At) -> Result<usize, PyRenderError> {
match bigint.to_isize() {
Some(n) => {
let n = n.max(0);
#[allow(clippy::cast_sign_loss)]
let n = n as usize;
Ok(n)
}
None => Err(RenderError::OverflowError {
argument: bigint.to_string(),
argument_at: at.into(),
}
.into()),
}
}
#[derive(Debug, IntoPyObject)]
pub enum Content<'t, 'py> {
Py(Bound<'py, PyAny>),
String(ContentString<'t>),
Float(f64),
Int(BigInt),
Bool(bool),
}
impl<'t, 'py> Content<'t, 'py> {
pub fn render(self, context: &Context) -> PyResult<Cow<'t, str>> {
Ok(match self {
Self::Py(content) => resolve_python(content, context)?.content(),
Self::String(content) => content.content(),
Self::Float(content) => content.to_string().into(),
Self::Int(content) => content.to_string().into(),
Self::Bool(true) => "True".into(),
Self::Bool(false) => "False".into(),
})
}
pub fn resolve_string(self, context: &Context) -> PyResult<ContentString<'t>> {
Ok(match self {
Self::String(content) => content,
Self::Float(content) => ContentString::String(content.to_string().into()),
Self::Int(content) => ContentString::String(content.to_string().into()),
Self::Py(content) => return resolve_python(content, context),
Self::Bool(true) => ContentString::String(Cow::Borrowed("True")),
Self::Bool(false) => ContentString::String(Cow::Borrowed("False")),
})
}
/// Resolve to a string, raising an error if the content is not a string or a string-like Python object
pub fn resolve_string_strict(
self,
context: &Context,
argument_at: SourceSpan,
) -> Result<ContentString<'t>, PyRenderError> {
match self {
Self::String(content) => Ok(content),
Self::Py(content) if content.is_instance_of::<PyString>() => {
Ok(resolve_python(content, context)?)
}
_ => Err(RenderError::InvalidArgumentString { argument_at }.into()),
}
}
pub fn to_bigint(&self) -> Option<BigInt> {
match self {
Self::Int(left) => Some(left.clone()),
Self::String(left) => left.as_raw().parse::<BigInt>().ok(),
Self::Float(left) => left.trunc().to_bigint(),
Self::Py(left) => match left.extract::<BigInt>() {
Ok(left) => Some(left),
Err(_) => {
let int = PyType::new::<PyInt>(left.py());
let left = int.call1((left,)).ok()?;
Some(
left.extract::<BigInt>()
.expect("Python integers are BigInt compatible"),
)
}
},
Self::Bool(true) => 1.to_bigint(),
Self::Bool(false) => 0.to_bigint(),
}
}
/// Convert Content to usize, providing detailed errors if the conversion fails
pub fn resolve_usize(self, argument_at: At) -> Result<usize, PyRenderError> {
match self {
Self::Int(n) => resolve_bigint(n, argument_at),
Self::String(s) => match s.as_raw().parse::<BigInt>() {
Ok(n) => resolve_bigint(n, argument_at),
Err(_) => Err(RenderError::InvalidArgumentInteger {
argument: format!("'{}'", s.as_raw()),
argument_at: argument_at.into(),
}
.into()),
},
Self::Float(f) => match f.trunc().to_bigint() {
Some(n) => resolve_bigint(n, argument_at),
None => Err(RenderError::InvalidArgumentFloat {
argument: f.to_string(),
argument_at: argument_at.into(),
}
.into()),
},
Self::Py(obj) => match obj.extract::<BigInt>() {
Ok(n) => resolve_bigint(n, argument_at),
Err(_) => {
let argument = obj.to_string();
let argument_at = argument_at.into();
match obj.extract::<f64>() {
Ok(_) => Err(RenderError::InvalidArgumentFloat {
argument,
argument_at,
}
.into()),
Err(_) => Err(RenderError::InvalidArgumentInteger {
argument,
argument_at,
}
.into()),
}
}
},
Self::Bool(true) => Ok(1),
Self::Bool(false) => Ok(0),
}
}
pub fn to_bool(&self) -> PyResult<bool> {
Ok(match self {
Self::Bool(b) => *b,
Self::Int(n) => !n.is_zero(),
Self::Float(f) => !f.is_zero(),
Self::String(s) => !s.as_raw().is_empty(),
Self::Py(obj) => obj.is_truthy()?,
})
}
pub fn to_py(&self, py: Python<'py>) -> Bound<'py, PyAny> {
match self {
Self::Py(object) => object.clone(),
Self::Int(i) => i
.into_pyobject(py)
.expect("A BigInt can always be converted to a Python int.")
.into_any(),
Self::Float(f) => f
.into_pyobject(py)
.expect("An f64 can always be converted to a Python float.")
.into_any(),
Self::String(s) => match s {
ContentString::String(s) | ContentString::HtmlUnsafe(s) => s
.into_pyobject(py)
.expect("A string can always be converted to a Python str.")
.into_any(),
ContentString::HtmlSafe(s) => {
let string = s
.into_pyobject(py)
.expect("A string can always be converted to a Python str.");
let mark_safe = MARK_SAFE
.import(py, "django.utils.safestring", "mark_safe")
.expect("Should be able to import `django.utils.safestring.mark_safe`");
mark_safe
.call1((string,))
.expect("`mark_safe` should not raise if given a string")
}
},
Self::Bool(b) => PyBool::new(py, *b).to_owned().into_any(),
}
}
}
pub trait IntoOwnedContent<'t, 'py> {
fn into_content(self) -> Content<'t, 'py>;
}
pub trait AsBorrowedContent<'a, 't, 'py>
where
'a: 't,
{
fn as_content(&'a self) -> Content<'t, 'py>;
}
impl<'a, 't, 'py> AsBorrowedContent<'a, 't, 'py> for str
where
'a: 't,
{
fn as_content(&'a self) -> Content<'t, 'py> {
Content::String(ContentString::String(Cow::Borrowed(self)))
}
}
impl<'t, 'py> IntoOwnedContent<'t, 'py> for String {
fn into_content(self) -> Content<'t, 'py> {
Content::String(ContentString::String(Cow::Owned(self)))
}
}
impl<'t, 'py> IntoOwnedContent<'t, 'py> for Cow<'t, str> {
fn into_content(self) -> Content<'t, 'py> {
Content::String(ContentString::String(self))
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render/tags.rs | src/render/tags.rs | use std::borrow::Cow;
use std::collections::{HashMap, VecDeque};
use std::sync::Arc;
use num_bigint::{BigInt, Sign};
use num_traits::cast::ToPrimitive;
use pyo3::exceptions::{PyAttributeError, PyTypeError};
use pyo3::intern;
use pyo3::prelude::*;
use pyo3::sync::{MutexExt, PyOnceLock};
use pyo3::types::{PyBool, PyDict, PyList, PyNone, PyString, PyTuple};
use crate::render::lorem::{COMMON_WORDS, paragraphs, words};
use dtl_lexer::tag::lorem::LoremMethod;
use dtl_lexer::types::{At, TemplateString};
use super::types::{
AsBorrowedContent, Content, ContentString, Context, IncludeTemplateKey, PyContext,
};
use super::{Evaluate, Render, RenderResult, Resolve, ResolveFailures, ResolveResult};
use crate::error::{AnnotatePyErr, PyRenderError, RenderError};
use crate::parse::{
For, IfCondition, Include, IncludeTemplateName, SimpleBlockTag, SimpleTag, Tag, TagElement, Url,
};
use crate::path::construct_relative_path;
use crate::template::django_rusty_templates::{NoReverseMatch, Template, TemplateDoesNotExist};
use crate::utils::PyResultMethods;
static PROMISE: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
static REVERSE: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
fn current_app(py: Python, request: Option<&Py<PyAny>>) -> PyResult<Py<PyAny>> {
let Some(request) = request else {
return Ok(py.None());
};
if let Ok(current_app) = request
.getattr(py, "current_app")
.ok_or_isinstance_of::<PyAttributeError>(py)?
{
return Ok(current_app);
}
match request
.getattr(py, "resolver_match")
.ok_or_isinstance_of::<PyAttributeError>(py)?
{
Ok(resolver_match) if !resolver_match.is_none(py) => {
resolver_match.getattr(py, "namespace")
}
_ => Ok(py.None()),
}
}
impl Resolve for Url {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let view_name = match self.view_name.resolve(py, template, context, failures)? {
Some(view_name) => view_name,
None => "".as_content(),
};
let reverse = REVERSE.import(py, "django.urls", "reverse")?;
let current_app = current_app(py, context.request.as_ref())?;
let url = if self.kwargs.is_empty() {
let py_args = PyList::empty(py);
for arg in &self.args {
py_args.append(arg.resolve(py, template, context, failures)?)?;
}
reverse.call1((
view_name,
py.None(),
py_args.to_tuple(),
py.None(),
current_app,
))
} else {
let kwargs = PyDict::new(py);
for (key, value) in &self.kwargs {
kwargs.set_item(key, value.resolve(py, template, context, failures)?)?;
}
reverse.call1((view_name, py.None(), py.None(), kwargs, current_app))
};
match &self.variable {
None => Ok(Some(Content::Py(url?))),
Some(variable) => {
if let Ok(url) = url.ok_or_isinstance_of::<NoReverseMatch>(py)? {
context.insert(variable.clone(), url);
}
Ok(None)
}
}
}
}
impl Evaluate for Content<'_, '_> {
fn evaluate(
&self,
_py: Python<'_>,
_template: TemplateString<'_>,
_context: &mut Context,
) -> Option<bool> {
Some(match self {
Self::Py(obj) => obj.is_truthy().unwrap_or(false),
Self::String(s) => !s.as_raw().is_empty(),
Self::Float(f) => *f != 0.0,
Self::Int(n) => *n != BigInt::ZERO,
Self::Bool(b) => *b,
})
}
}
trait PyCmp<T> {
fn eq(&self, other: &T) -> bool;
fn ne(&self, other: &T) -> bool {
!self.eq(other)
}
fn lt(&self, other: &T) -> bool;
fn gt(&self, other: &T) -> bool;
fn lte(&self, other: &T) -> bool;
fn gte(&self, other: &T) -> bool;
}
impl PyCmp<Content<'_, '_>> for Content<'_, '_> {
fn eq(&self, other: &Content<'_, '_>) -> bool {
match (self, other) {
(Self::Py(obj), Content::Py(other)) => obj.eq(other).unwrap_or(false),
(Self::Py(obj), Content::Float(other)) => obj.eq(other).unwrap_or(false),
(Self::Py(obj), Content::Int(other)) => obj.eq(other).unwrap_or(false),
(Self::Py(obj), Content::Bool(other)) => obj.eq(other).unwrap_or(false),
(Self::Py(obj), Content::String(other)) => obj.eq(other.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Py(other)) => other.eq(obj).unwrap_or(false),
(Self::Int(obj), Content::Py(other)) => other.eq(obj).unwrap_or(false),
(Self::String(obj), Content::Py(other)) => other.eq(obj.as_raw()).unwrap_or(false),
(Self::Bool(obj), Content::Py(other)) => other.eq(obj).unwrap_or(false),
(Self::Float(obj), Content::Float(other)) => obj == other,
(Self::Int(obj), Content::Int(other)) => obj == other,
(Self::Int(obj), Content::Bool(other)) => u8::try_from(obj)
.map(|o| o == u8::from(*other))
.unwrap_or(false),
(Self::Bool(obj), Content::Int(other)) => u8::try_from(other)
.map(|o| o == u8::from(*obj))
.unwrap_or(false),
(Self::Float(obj), Content::Int(other)) => {
match other.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY | f64::NEG_INFINITY => false,
other => *obj == other,
}
}
(Self::Int(obj), Content::Float(other)) => {
match obj.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY | f64::NEG_INFINITY => false,
obj => obj == *other,
}
}
(Self::Float(obj), Content::Bool(other)) => match other {
true => *obj == 1.0,
false => *obj == 0.0,
},
(Self::Bool(obj), Content::Float(other)) => match obj {
true => *other == 1.0,
false => *other == 0.0,
},
(Self::String(obj), Content::String(other)) => obj.as_raw() == other.as_raw(),
(Self::Bool(obj), Content::Bool(other)) => obj == other,
_ => false,
}
}
fn lt(&self, other: &Content<'_, '_>) -> bool {
match (self, other) {
(Self::Py(obj), Content::Py(other)) => obj.lt(other).unwrap_or(false),
(Self::Py(obj), Content::Float(other)) => obj.lt(other).unwrap_or(false),
(Self::Py(obj), Content::Int(other)) => obj.lt(other).unwrap_or(false),
(Self::Py(obj), Content::Bool(other)) => obj.lt(other).unwrap_or(false),
(Self::Py(obj), Content::String(other)) => obj.lt(other.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Py(other)) => other.gt(obj).unwrap_or(false),
(Self::Int(obj), Content::Py(other)) => other.gt(obj).unwrap_or(false),
(Self::String(obj), Content::Py(other)) => other.gt(obj.as_raw()).unwrap_or(false),
(Self::Bool(obj), Content::Py(other)) => other.gt(obj).unwrap_or(false),
(Self::Float(obj), Content::Float(other)) => obj < other,
(Self::Int(obj), Content::Int(other)) => obj < other,
(Self::Int(obj), Content::Bool(other)) => match obj.sign() {
Sign::Minus => true,
_ => u8::try_from(obj)
.map(|o| o < u8::from(*other))
.unwrap_or(false),
},
(Self::Bool(obj), Content::Int(other)) => match other.sign() {
Sign::Minus => false,
_ => u8::try_from(other)
.map(|o| o > u8::from(*obj))
.unwrap_or(true),
},
(Self::Float(obj), Content::Int(other)) => {
match other.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => obj.is_finite() || *obj == f64::NEG_INFINITY,
f64::NEG_INFINITY => *obj == f64::NEG_INFINITY,
other => *obj < other,
}
}
(Self::Int(obj), Content::Float(other)) => {
match obj.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => *other == f64::INFINITY,
f64::NEG_INFINITY => other.is_finite() || *other == f64::INFINITY,
obj => obj < *other,
}
}
(Self::Float(obj), Content::Bool(other)) => match other {
true => *obj < 1.0,
false => *obj < 0.0,
},
(Self::Bool(obj), Content::Float(other)) => match obj {
true => *other > 1.0,
false => *other > 0.0,
},
(Self::String(obj), Content::String(other)) => obj.as_raw() < other.as_raw(),
(Self::Bool(obj), Content::Bool(other)) => obj < other,
_ => false,
}
}
fn gt(&self, other: &Content<'_, '_>) -> bool {
match (self, other) {
(Self::Py(obj), Content::Py(other)) => obj.gt(other).unwrap_or(false),
(Self::Py(obj), Content::Float(other)) => obj.gt(other).unwrap_or(false),
(Self::Py(obj), Content::Int(other)) => obj.gt(other).unwrap_or(false),
(Self::Py(obj), Content::Bool(other)) => obj.gt(other).unwrap_or(false),
(Self::Py(obj), Content::String(other)) => obj.gt(other.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Py(other)) => other.lt(obj).unwrap_or(false),
(Self::Int(obj), Content::Py(other)) => other.lt(obj).unwrap_or(false),
(Self::String(obj), Content::Py(other)) => other.lt(obj.as_raw()).unwrap_or(false),
(Self::Bool(obj), Content::Py(other)) => other.lt(obj).unwrap_or(false),
(Self::Float(obj), Content::Float(other)) => obj > other,
(Self::Int(obj), Content::Int(other)) => obj > other,
(Self::Int(obj), Content::Bool(other)) => match obj.sign() {
Sign::Minus => false,
_ => u8::try_from(obj)
.map(|o| o > u8::from(*other))
.unwrap_or(true),
},
(Self::Bool(obj), Content::Int(other)) => match other.sign() {
Sign::Minus => true,
_ => u8::try_from(other)
.map(|o| o < u8::from(*obj))
.unwrap_or(false),
},
(Self::Float(obj), Content::Int(other)) => {
match other.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => *obj == f64::INFINITY,
f64::NEG_INFINITY => obj.is_finite() || *obj == f64::INFINITY,
other => *obj > other,
}
}
(Self::Int(obj), Content::Float(other)) => {
match obj.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => other.is_finite() || *other == f64::NEG_INFINITY,
f64::NEG_INFINITY => *other == f64::NEG_INFINITY,
obj => obj > *other,
}
}
(Self::Float(obj), Content::Bool(other)) => match other {
true => *obj > 1.0,
false => *obj > 0.0,
},
(Self::Bool(obj), Content::Float(other)) => match obj {
true => *other < 1.0,
false => *other < 0.0,
},
(Self::String(obj), Content::String(other)) => obj.as_raw() > other.as_raw(),
(Self::Bool(obj), Content::Bool(other)) => obj > other,
_ => false,
}
}
fn lte(&self, other: &Content<'_, '_>) -> bool {
match (self, other) {
(Self::Py(obj), Content::Py(other)) => obj.le(other).unwrap_or(false),
(Self::Py(obj), Content::Float(other)) => obj.le(other).unwrap_or(false),
(Self::Py(obj), Content::Int(other)) => obj.le(other).unwrap_or(false),
(Self::Py(obj), Content::Bool(other)) => obj.le(other).unwrap_or(false),
(Self::Py(obj), Content::String(other)) => obj.le(other.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Py(other)) => other.ge(obj).unwrap_or(false),
(Self::Int(obj), Content::Py(other)) => other.ge(obj).unwrap_or(false),
(Self::Bool(obj), Content::Py(other)) => other.ge(obj).unwrap_or(false),
(Self::String(obj), Content::Py(other)) => other.ge(obj.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Float(other)) => obj <= other,
(Self::Int(obj), Content::Int(other)) => obj <= other,
(Self::Int(obj), Content::Bool(other)) => match obj.sign() {
Sign::Minus => true,
_ => u8::try_from(obj)
.map(|o| o <= u8::from(*other))
.unwrap_or(false),
},
(Self::Bool(obj), Content::Int(other)) => match other.sign() {
Sign::Minus => false,
_ => u8::try_from(other)
.map(|o| o >= u8::from(*obj))
.unwrap_or(true),
},
(Self::Float(obj), Content::Int(other)) => {
match other.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => obj.is_finite() || *obj == f64::NEG_INFINITY,
f64::NEG_INFINITY => *obj == f64::NEG_INFINITY,
other => *obj <= other,
}
}
(Self::Int(obj), Content::Float(other)) => {
match obj.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => *other == f64::INFINITY,
f64::NEG_INFINITY => other.is_finite() || *other == f64::INFINITY,
obj => obj <= *other,
}
}
(Self::Float(obj), Content::Bool(other)) => match other {
true => *obj <= 1.0,
false => *obj <= 0.0,
},
(Self::Bool(obj), Content::Float(other)) => match obj {
true => *other >= 1.0,
false => *other >= 0.0,
},
(Self::String(obj), Content::String(other)) => obj.as_raw() <= other.as_raw(),
(Self::Bool(obj), Content::Bool(other)) => obj <= other,
_ => false,
}
}
fn gte(&self, other: &Content<'_, '_>) -> bool {
match (self, other) {
(Self::Py(obj), Content::Py(other)) => obj.ge(other).unwrap_or(false),
(Self::Py(obj), Content::Float(other)) => obj.ge(other).unwrap_or(false),
(Self::Py(obj), Content::Int(other)) => obj.ge(other).unwrap_or(false),
(Self::Py(obj), Content::Bool(other)) => obj.ge(other).unwrap_or(false),
(Self::Py(obj), Content::String(other)) => obj.ge(other.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Py(other)) => other.le(obj).unwrap_or(false),
(Self::Int(obj), Content::Py(other)) => other.le(obj).unwrap_or(false),
(Self::Bool(obj), Content::Py(other)) => other.le(obj).unwrap_or(false),
(Self::String(obj), Content::Py(other)) => other.le(obj.as_raw()).unwrap_or(false),
(Self::Float(obj), Content::Float(other)) => obj >= other,
(Self::Int(obj), Content::Int(other)) => obj >= other,
(Self::Int(obj), Content::Bool(other)) => match obj.sign() {
Sign::Minus => false,
_ => u8::try_from(obj)
.map(|o| o >= u8::from(*other))
.unwrap_or(true),
},
(Self::Bool(obj), Content::Int(other)) => match other.sign() {
Sign::Minus => true,
_ => u8::try_from(other)
.map(|o| o <= u8::from(*obj))
.unwrap_or(false),
},
(Self::Float(obj), Content::Int(other)) => {
match other.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => *obj == f64::INFINITY,
f64::NEG_INFINITY => obj.is_finite() || *obj == f64::INFINITY,
other => *obj >= other,
}
}
(Self::Int(obj), Content::Float(other)) => {
match obj.to_f64().expect("BigInt to f64 is always possible") {
f64::INFINITY => other.is_finite() || *other == f64::NEG_INFINITY,
f64::NEG_INFINITY => *other == f64::NEG_INFINITY,
obj => obj >= *other,
}
}
(Self::Float(obj), Content::Bool(other)) => match other {
true => *obj >= 1.0,
false => *obj >= 0.0,
},
(Self::Bool(obj), Content::Float(other)) => match obj {
true => *other <= 1.0,
false => *other <= 0.0,
},
(Self::String(obj), Content::String(other)) => obj.as_raw() >= other.as_raw(),
(Self::Bool(obj), Content::Bool(other)) => obj >= other,
_ => false,
}
}
}
impl PyCmp<Self> for Option<Content<'_, '_>> {
fn eq(&self, other: &Self) -> bool {
match (self, other) {
(None, None) => true,
(Some(obj), Some(other)) => obj.eq(other),
(Some(obj), None) | (None, Some(obj)) => match obj {
Content::Py(obj) => obj.eq(PyNone::get(obj.py())).unwrap_or(false),
_ => false,
},
}
}
fn lt(&self, other: &Self) -> bool {
match (self, other) {
(Some(obj), Some(other)) => obj.lt(other),
_ => false,
}
}
fn gt(&self, other: &Self) -> bool {
match (self, other) {
(Some(obj), Some(other)) => obj.gt(other),
_ => false,
}
}
fn lte(&self, other: &Self) -> bool {
match (self, other) {
(Some(obj), Some(other)) => obj.lte(other),
_ => false,
}
}
fn gte(&self, other: &Self) -> bool {
match (self, other) {
(Some(obj), Some(other)) => obj.gte(other),
_ => false,
}
}
}
trait Contains<T> {
fn contains(&self, other: T) -> Option<bool>;
}
impl Contains<Option<Content<'_, '_>>> for Content<'_, '_> {
fn contains(&self, other: Option<Content<'_, '_>>) -> Option<bool> {
match other {
None => match self {
Self::Py(obj) => obj.contains(PyNone::get(obj.py())).ok(),
_ => None,
},
Some(Content::Py(other)) => {
let obj = self.to_py(other.py());
obj.contains(other).ok()
}
Some(Content::String(other)) => match self {
Self::String(obj) => Some(obj.as_raw().contains(other.as_raw().as_ref())),
Self::Int(_) | Self::Float(_) | Self::Bool(_) => None,
Self::Py(obj) => obj.contains(other).ok(),
},
Some(Content::Int(n)) => match self {
Self::Py(obj) => obj.contains(n).ok(),
_ => None,
},
Some(Content::Float(f)) => match self {
Self::Py(obj) => obj.contains(f).ok(),
_ => None,
},
Some(Content::Bool(b)) => match self {
Self::Py(obj) => obj.contains(b).ok(),
_ => None,
},
}
}
}
trait ResolveTuple<'t, 'py> {
fn resolve(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> Result<(Option<Content<'t, 'py>>, Option<Content<'t, 'py>>), PyRenderError>;
}
impl<'t, 'py> ResolveTuple<'t, 'py> for (IfCondition, IfCondition) {
fn resolve(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
) -> Result<(Option<Content<'t, 'py>>, Option<Content<'t, 'py>>), PyRenderError> {
const IGNORE: ResolveFailures = ResolveFailures::IgnoreVariableDoesNotExist;
Ok(match self {
(IfCondition::Variable(l), IfCondition::Variable(r)) => {
let left = l.resolve(py, template, context, IGNORE)?;
let right = r.resolve(py, template, context, IGNORE)?;
(left, right)
}
(IfCondition::Variable(l), r) => {
let left = l.resolve(py, template, context, IGNORE)?;
let right = r
.evaluate(py, template, context)
.expect("Right cannot be an expression that evaluates to None");
(left, Some(Content::Bool(right)))
}
(l, IfCondition::Variable(r)) => {
let left = l
.evaluate(py, template, context)
.expect("Left cannot be an expression that evaluates to None");
let right = r.resolve(py, template, context, IGNORE)?;
(Some(Content::Bool(left)), right)
}
(l, r) => {
let left = l
.evaluate(py, template, context)
.expect("Left cannot be an expression that evaluates to None");
let right = r
.evaluate(py, template, context)
.expect("Right cannot be an expression that evaluates to None");
(Some(Content::Bool(left)), Some(Content::Bool(right)))
}
})
}
}
impl Evaluate for IfCondition {
#[allow(clippy::too_many_lines)]
fn evaluate(
&self,
py: Python<'_>,
template: TemplateString<'_>,
context: &mut Context,
) -> Option<bool> {
Some(match self {
Self::Variable(v) => v.evaluate(py, template, context)?,
Self::And(inner) => {
let left = inner.0.evaluate(py, template, context).unwrap_or(false);
let right = inner.1.evaluate(py, template, context).unwrap_or(false);
if left { right } else { false }
}
Self::Or(inner) => {
let left = inner.0.evaluate(py, template, context);
let right = inner.1.evaluate(py, template, context);
match left {
None => false,
Some(left) => {
if left {
true
} else {
right.unwrap_or(false)
}
}
}
}
Self::Not(inner) => match inner.evaluate(py, template, context) {
None | Some(true) => false,
Some(false) => true,
},
Self::Equal(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.eq(&r),
Err(_) => false,
},
Self::NotEqual(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.ne(&r),
Err(_) => false,
},
Self::LessThan(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.lt(&r),
Err(_) => false,
},
Self::GreaterThan(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.gt(&r),
Err(_) => false,
},
Self::LessThanEqual(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.lte(&r),
Err(_) => false,
},
Self::GreaterThanEqual(inner) => match inner.resolve(py, template, context) {
Ok((l, r)) => l.gte(&r),
Err(_) => false,
},
Self::In(inner) => {
let Ok(inner) = inner.resolve(py, template, context) else {
return Some(false);
};
match inner {
(l, Some(r)) => r.contains(l).unwrap_or(false),
_ => false,
}
}
Self::NotIn(inner) => {
let Ok(inner) = inner.resolve(py, template, context) else {
return Some(false);
};
match inner {
(l, Some(r)) => !(r.contains(l).unwrap_or(true)),
_ => false,
}
}
Self::Is(inner) => {
let Ok(inner) = inner.resolve(py, template, context) else {
return Some(false);
};
match inner {
(Some(Content::Py(left)), Some(Content::Py(right))) => left.is(&right),
(Some(Content::Py(obj)), None) | (None, Some(Content::Py(obj))) => {
obj.is(PyNone::get(py).as_any())
}
(Some(Content::Bool(left)), Some(Content::Py(right))) => {
right.is(PyBool::new(py, left).as_any())
}
(None, None) => true,
_ => false,
}
}
Self::IsNot(inner) => {
let Ok(inner) = inner.resolve(py, template, context) else {
return Some(false);
};
match inner {
(Some(Content::Py(left)), Some(Content::Py(right))) => !left.is(&right),
(Some(Content::Bool(left)), Some(Content::Bool(right))) => left != right,
(Some(Content::Py(obj)), None) | (None, Some(Content::Py(obj))) => {
!obj.is(PyNone::get(py).as_any())
}
(Some(Content::Bool(left)), Some(Content::Py(right))) => {
!right.is(PyBool::new(py, left).as_any())
}
(Some(Content::Py(left)), Some(Content::Bool(right))) => {
!left.is(PyBool::new(py, right).as_any())
}
(None, None) => false,
_ => true,
}
}
})
}
}
impl Render for Tag {
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
Ok(match self {
Self::Autoescape { enabled, nodes } => {
let autoescape = context.autoescape;
context.autoescape = enabled.into();
let mut rendered = vec![];
for node in nodes {
rendered.push(node.render(py, template, context)?);
}
context.autoescape = autoescape;
Cow::Owned(rendered.join(""))
}
Self::If {
condition,
truthy,
falsey,
} => {
if condition.evaluate(py, template, context).unwrap_or(false) {
truthy.render(py, template, context)?
} else {
falsey.render(py, template, context)?
}
}
Self::For(for_tag) => for_tag.render(py, template, context)?,
Self::Include(include_tag) => include_tag.render(py, template, context)?,
Self::Load => Cow::Borrowed(""),
Self::SimpleTag(simple_tag) => simple_tag.render(py, template, context)?,
Self::SimpleBlockTag(simple_tag) => simple_tag.render(py, template, context)?,
Self::Url(url) => url.render(py, template, context)?,
Self::Lorem(lorem) => {
let count_content = lorem.count.resolve(
py,
template,
context,
ResolveFailures::IgnoreVariableDoesNotExist,
)?;
let val = count_content
.and_then(|c| c.to_bigint())
.and_then(|n| n.to_i64())
.unwrap_or(1);
let text = match lorem.method {
LoremMethod::Words => {
let final_count = if val < 0 {
(COMMON_WORDS.len() as i64 + val).max(0) as usize
} else {
val as usize
};
words(final_count, lorem.common)
}
LoremMethod::Paragraphs | LoremMethod::Blocks => {
if val <= 0 {
return Ok(Cow::Borrowed(""));
} else {
let count = val as usize;
let paras = paragraphs(count, lorem.common);
if matches!(lorem.method, LoremMethod::Paragraphs) {
paras
.into_iter()
.map(|p| format!("<p>{}</p>", p))
.collect::<Vec<_>>()
.join("\n\n")
} else {
paras.join("\n\n")
}
}
}
};
Cow::Owned(text)
}
})
}
}
impl For {
fn render_python<'t>(
&self,
iterable: &Bound<'_, PyAny>,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
let mut parts = Vec::new();
let mut list: Vec<_> = match iterable.try_iter() {
Ok(iterator) => iterator.collect(),
Err(error) => {
let error = error.annotate(py, self.iterable.at, "here", template);
return Err(error.into());
}
};
if self.reversed {
list.reverse();
}
context.push_for_loop(list.len());
for (index, values) in list.into_iter().enumerate() {
let values = match values {
Ok(values) => values,
Err(error) => {
let error =
error.annotate(py, self.iterable.at, "while iterating this", template);
return Err(error.into());
}
};
context.push_variables(
&self.variables.names,
self.variables.at,
values,
self.iterable.at,
index,
template,
)?;
parts.push(self.body.render(py, template, context)?);
context.increment_for_loop();
}
context.pop_variables();
context.pop_for_loop();
Ok(Cow::Owned(parts.join("")))
}
fn render_string<'t>(
&self,
string: &str,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
if self.variables.names.len() > 1 {
return Err(RenderError::TupleUnpackError {
expected_count: self.variables.names.len(),
actual_count: 1,
expected_at: self.variables.at.into(),
actual_at: self.iterable.at.into(),
}
.into());
}
let mut parts = Vec::new();
let mut chars: Vec<_> = string.chars().collect();
if self.reversed {
chars.reverse();
}
let variable = &self.variables.names[0];
context.push_for_loop(chars.len());
for (index, c) in chars.into_iter().enumerate() {
let c = PyString::new(py, &c.to_string());
context.push_variable(variable.clone(), c.into_any(), index);
parts.push(self.body.render(py, template, context)?);
context.increment_for_loop();
}
context.pop_variables();
context.pop_for_loop();
Ok(Cow::Owned(parts.join("")))
}
}
impl Render for For {
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | true |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/src/render/common.rs | src/render/common.rs | use std::borrow::Cow;
use pyo3::intern;
use pyo3::prelude::*;
use pyo3::sync::PyOnceLock;
use pyo3::types::PyString;
use dtl_lexer::types::TemplateString;
use super::types::{AsBorrowedContent, Content, ContentString, Context};
use super::{Evaluate, Render, RenderResult, Resolve, ResolveFailures, ResolveResult};
use crate::error::{AnnotatePyErr, RenderError};
use crate::parse::{TagElement, TokenTree};
use crate::types::Argument;
use crate::types::ArgumentType;
use crate::types::ForVariable;
use crate::types::ForVariableName;
use crate::types::Text;
use crate::types::TranslatedText;
use dtl_lexer::types::Variable;
static GETTEXT: PyOnceLock<Py<PyAny>> = PyOnceLock::new();
/// Helper function to translate a string using Django's gettext
pub fn gettext(py: Python<'_>, text: &str) -> PyResult<String> {
let get_text = GETTEXT.import(py, "django.utils.translation", "gettext")?;
get_text.call1((text,))?.extract::<String>()
}
fn has_truthy_attr(variable: &Bound<'_, PyAny>, attr: &Bound<'_, PyString>) -> PyResult<bool> {
match variable.getattr(attr) {
Ok(attr) if attr.is_truthy()? => Ok(true),
_ => Ok(false),
}
}
fn resolve_callable(variable: Bound<'_, PyAny>) -> PyResult<Option<Bound<'_, PyAny>>> {
if !variable.is_callable() {
return Ok(Some(variable));
}
let py = variable.py();
if has_truthy_attr(&variable, intern!(py, "do_not_call_in_templates"))? {
return Ok(Some(variable));
}
if has_truthy_attr(&variable, intern!(py, "alters_data"))? {
return Ok(None);
}
Ok(Some(variable.call0()?))
}
impl Resolve for Variable {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let mut parts = self.parts(template);
let (first, mut object_at) = parts.next().expect("Variable names cannot be empty");
let Some(variable) = context.get(first) else {
return Ok(None);
};
let Some(mut variable) = resolve_callable(variable.bind(py).clone())
.map_err(|err| err.annotate(py, self.at, "here", template))?
else {
return Ok(None);
};
for (part, key_at) in parts {
variable = match variable.get_item(part) {
Ok(variable) => variable,
Err(_) => match variable.getattr(part) {
Ok(variable) => variable,
Err(_) => {
let Ok(int) = part.parse::<usize>() else {
return match failures {
ResolveFailures::Raise => Err(RenderError::VariableDoesNotExist {
key: part.to_string(),
object: variable.str()?.to_string(),
key_at: key_at.into(),
object_at: Some(object_at.into()),
}
.into()),
ResolveFailures::IgnoreVariableDoesNotExist => Ok(None),
};
};
match variable.get_item(int) {
Ok(variable) => variable,
Err(_) => todo!(),
}
}
},
};
variable = match resolve_callable(variable)
.map_err(|err| err.annotate(py, self.at, "here", template))?
{
Some(variable) => variable,
None => return Ok(None),
};
object_at.1 += key_at.1 + 1;
}
Ok(Some(Content::Py(variable)))
}
}
impl Resolve for ForVariable {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
_template: TemplateString<'t>,
context: &mut Context,
_failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let Some(for_loop) = context.get_for_loop(self.parent_count) else {
return Ok(Some("{}".as_content()));
};
Ok(Some(match self.variant {
ForVariableName::Counter => Content::Int(for_loop.counter().into()),
ForVariableName::Counter0 => Content::Int(for_loop.counter0().into()),
ForVariableName::RevCounter => Content::Int(for_loop.rev_counter().into()),
ForVariableName::RevCounter0 => Content::Int(for_loop.rev_counter0().into()),
ForVariableName::First => Content::Bool(for_loop.first()),
ForVariableName::Last => Content::Bool(for_loop.last()),
ForVariableName::Object => {
let content = Cow::Owned(context.render_for_loop(py, self.parent_count));
let content = match context.autoescape {
false => ContentString::String(content),
true => ContentString::HtmlUnsafe(content),
};
Content::String(content)
}
}))
}
}
impl Resolve for Text {
fn resolve<'t, 'py>(
&self,
_py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
_failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let resolved = Cow::Borrowed(template.content(self.at));
Ok(Some(Content::String(match context.autoescape {
false => ContentString::String(resolved),
true => ContentString::HtmlSafe(resolved),
})))
}
}
impl Resolve for TranslatedText {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
_failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
let text = template.content(self.at);
let resolved = gettext(py, text)?;
Ok(Some(Content::String(match context.autoescape {
false => ContentString::String(Cow::Owned(resolved)),
true => ContentString::HtmlSafe(Cow::Owned(resolved)),
})))
}
}
impl Resolve for Argument {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
Ok(Some(match &self.argument_type {
ArgumentType::Text(text) => return text.resolve(py, template, context, failures),
ArgumentType::TranslatedText(text) => {
return text.resolve(py, template, context, failures);
}
ArgumentType::Variable(variable) => {
match variable.resolve(py, template, context, failures)? {
Some(content) => content,
None => {
let key = template.content(variable.at).to_string();
let object = context.display(py);
return Err(RenderError::ArgumentDoesNotExist {
key,
object,
key_at: variable.at.into(),
object_at: None,
}
.into());
}
}
}
ArgumentType::ForVariable(variable) => {
return variable.resolve(py, template, context, failures);
}
ArgumentType::Float(number) => Content::Float(*number),
ArgumentType::Int(number) => Content::Int(number.clone()),
}))
}
}
impl Resolve for TagElement {
fn resolve<'t, 'py>(
&self,
py: Python<'py>,
template: TemplateString<'t>,
context: &mut Context,
failures: ResolveFailures,
) -> ResolveResult<'t, 'py> {
match self {
Self::Text(text) | Self::TranslatedText(text) => {
text.resolve(py, template, context, failures)
}
Self::Variable(variable) => variable.resolve(py, template, context, failures),
Self::ForVariable(variable) => variable.resolve(py, template, context, failures),
Self::Filter(filter) => filter.resolve(py, template, context, failures),
Self::Int(int) => Ok(Some(Content::Int(int.clone()))),
Self::Float(float) => Ok(Some(Content::Float(*float))),
}
}
}
impl Evaluate for TagElement {
fn evaluate(
&self,
py: Python<'_>,
template: TemplateString<'_>,
context: &mut Context,
) -> Option<bool> {
self.resolve(
py,
template,
context,
ResolveFailures::IgnoreVariableDoesNotExist,
)
.ok()?
.evaluate(py, template, context)
}
}
impl Render for TokenTree {
fn render<'t>(
&self,
py: Python<'_>,
template: TemplateString<'t>,
context: &mut Context,
) -> RenderResult<'t> {
match self {
Self::Text(text) => text.render(py, template, context),
Self::TranslatedText(_text) => todo!(),
Self::Int(n) => Ok(n.to_string().into()),
Self::Float(f) => Ok(f.to_string().into()),
Self::Tag(tag) => tag.render(py, template, context),
Self::Variable(variable) => variable.render(py, template, context),
Self::ForVariable(variable) => variable.render(py, template, context),
Self::Filter(filter) => filter.render(py, template, context),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::collections::HashMap;
use pyo3::types::{PyDict, PyList, PyString};
#[test]
fn test_render_variable() {
Python::initialize();
Python::attach(|py| {
let name = PyString::new(py, "Lily").into_any();
let context = HashMap::from([("name".to_string(), name.unbind())]);
let mut context = Context::new(context, None, false);
let template = TemplateString("{{ name }}");
let variable = Variable::new((3, 4));
let rendered = variable.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "Lily");
});
}
#[test]
fn test_render_dict_lookup() {
Python::initialize();
Python::attach(|py| {
let data = PyDict::new(py);
let name = PyString::new(py, "Lily");
data.set_item("name", name).unwrap();
let context = HashMap::from([("data".to_string(), data.into_any().unbind())]);
let mut context = Context::new(context, None, false);
let template = TemplateString("{{ data.name }}");
let variable = Variable::new((3, 9));
let rendered = variable.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "Lily");
});
}
#[test]
fn test_render_list_lookup() {
Python::initialize();
Python::attach(|py| {
let name = PyString::new(py, "Lily");
let names = PyList::new(py, [name]).unwrap();
let context = HashMap::from([("names".to_string(), names.into_any().unbind())]);
let mut context = Context::new(context, None, false);
let template = TemplateString("{{ names.0 }}");
let variable = Variable::new((3, 7));
let rendered = variable.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "Lily");
});
}
#[test]
fn test_render_attribute_lookup() {
Python::initialize();
Python::attach(|py| {
let locals = PyDict::new(py);
py.run(
c"
class User:
def __init__(self, name):
self.name = name
user = User('Lily')
",
None,
Some(&locals),
)
.unwrap();
let context = locals.extract().unwrap();
let mut context = Context::new(context, None, false);
let template = TemplateString("{{ user.name }}");
let variable = Variable::new((3, 9));
let rendered = variable.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "Lily");
});
}
#[test]
fn test_render_html_autoescape() {
Python::initialize();
Python::attach(|py| {
let html = PyString::new(py, "<p>Hello World!</p>").into_any().unbind();
let context = HashMap::from([("html".to_string(), html)]);
let mut context = Context::new(context, None, true);
let template = TemplateString("{{ html }}");
let html = Variable::new((3, 4));
let rendered = html.render(py, template, &mut context).unwrap();
assert_eq!(rendered, "<p>Hello World!</p>");
});
}
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-benchmark/src/lib.rs | dtl-benchmark/src/lib.rs | // Empty for now
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
LilyFirefly/django-rusty-templates | https://github.com/LilyFirefly/django-rusty-templates/blob/86be8c9afbb5f98da786657fa0c3d044fab87e37/dtl-benchmark/benches/slugify.rs | dtl-benchmark/benches/slugify.rs | use std::borrow::Cow;
use std::fmt;
use divan::black_box;
use django_rusty_templates::render::filters::slugify;
fn main() {
divan::main();
}
struct BenchCase {
name: &'static str,
input: &'static str,
}
impl fmt::Display for BenchCase {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.name)
}
}
#[divan::bench(args = [
BenchCase { name: "Simple ASCII", input: "Hello World" },
BenchCase { name: "With Numbers", input: "Test123 Example456" },
BenchCase { name: "Mixed Case", input: "ThIs Is A TeSt" },
BenchCase { name: "With Punctuation", input: "Hello, World! How are you?" },
BenchCase { name: "Multiple Spaces", input: "Hello World Test" },
BenchCase { name: "With Hyphens", input: "Hello-World-Test" },
BenchCase { name: "Unicode Accents", input: "Héllo Wörld Tëst" },
BenchCase { name: "Long Text", input: "This is a much longer text that contains multiple words and should test the performance with larger inputs" },
BenchCase { name: "Special Chars", input: "Test@#$%^&*()_+={}[]|\\:;\"'<>,.?/" },
BenchCase { name: "Mixed Unicode", input: "Café résumé naïve" },
])]
fn bench_slugify(bencher: divan::Bencher, case: &BenchCase) {
bencher.bench_local(|| slugify(black_box(Cow::Borrowed(case.input))));
}
| rust | BSD-3-Clause | 86be8c9afbb5f98da786657fa0c3d044fab87e37 | 2026-01-04T20:21:11.003630Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/lib.rs | huginn-net/src/lib.rs | #![forbid(unsafe_code)]
// ============================================================================
// CORE IMPORTS (database, errors, results - always required)
// ============================================================================
use crate::output::FingerprintResult;
use huginn_net_db::MatchQualityType;
pub use huginn_net_db::{db_matching_trait, Database, Label};
pub use huginn_net_db::{http, tcp};
use huginn_net_tcp::output::OSQualityMatched;
// ============================================================================
// TCP PROTOCOL IMPORTS (base protocol)
// ============================================================================
pub use huginn_net_db::tcp::Ttl;
use huginn_net_tcp::output::{
MTUOutput, MTUQualityMatched, OperativeSystem, SynAckTCPOutput, SynTCPOutput, UptimeOutput,
UptimeRole,
};
use huginn_net_tcp::uptime::{ConnectionKey, TcpTimestamp};
// ============================================================================
// HTTP PROTOCOL IMPORTS (depends on TCP)
// ============================================================================
use huginn_net_db::http::HttpDiagnosis;
use huginn_net_http::http_process::{FlowKey, TcpFlow};
use huginn_net_http::output::{
Browser, BrowserQualityMatched, HttpRequestOutput, HttpResponseOutput, WebServer,
WebServerQualityMatched,
};
// ============================================================================
// TLS PROTOCOL IMPORTS (depends on TCP)
// ============================================================================
use huginn_net_tls::output::TlsClientOutput;
// ============================================================================
// SHARED PROCESSING IMPORTS (used across protocols)
// ============================================================================
use crate::process::ObservablePackage;
// ============================================================================
// OBSERVABLE SIGNALS EXPORTS (conditional in future)
// ============================================================================
pub use huginn_net_http::observable::{ObservableHttpRequest, ObservableHttpResponse};
pub use huginn_net_tcp::observable::ObservableTcp;
pub use huginn_net_tls::ObservableTlsClient;
// ============================================================================
// EXTERNAL CRATE IMPORTS
// ============================================================================
use pcap_file::pcap::PcapReader;
use pnet::datalink;
use pnet::datalink::Config;
use std::fs::File;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::Sender;
use std::sync::Arc;
use tracing::{debug, error};
use ttl_cache::TtlCache;
pub mod matcher;
pub mod packet_parser;
// ============================================================================
// CORE MODULES (always required - database, matching, errors, results)
// ============================================================================
pub mod error;
pub mod output;
// ============================================================================
// TCP PROTOCOL MODULES (external crate)
// ============================================================================
pub use huginn_net_tcp;
use huginn_net_tcp::raw_filter;
pub use huginn_net_tcp::{FilterConfig, IpFilter, PortFilter};
// ============================================================================
// HTTP PROTOCOL MODULES (external crate)
// ============================================================================
pub use huginn_net_http;
pub use huginn_net_http::{
FilterConfig as HttpFilterConfig, IpFilter as HttpIpFilter, PortFilter as HttpPortFilter,
};
// ============================================================================
// TLS PROTOCOL MODULES (external crate)
// ============================================================================
use crate::error::HuginnNetError;
pub use huginn_net_tls;
pub use huginn_net_tls::{
FilterConfig as TlsFilterConfig, IpFilter as TlsIpFilter, PortFilter as TlsPortFilter,
};
// ============================================================================
// SHARED PROCESSING MODULES (used by multiple protocols)
// ============================================================================
pub mod process;
/// Configuration for protocol analysis
#[derive(Debug, Clone)]
pub struct AnalysisConfig {
/// Enable HTTP protocol analysis
pub http_enabled: bool,
/// Enable TCP protocol analysis
pub tcp_enabled: bool,
/// Enable TLS protocol analysis
pub tls_enabled: bool,
/// Enable fingerprint matching against the database. When false, all quality matched results will be Disabled.
pub matcher_enabled: bool,
}
impl Default for AnalysisConfig {
fn default() -> Self {
Self { http_enabled: true, tcp_enabled: true, tls_enabled: true, matcher_enabled: true }
}
}
/// A multi-protocol passive fingerprinting library inspired by `p0f` with JA4 TLS client fingerprinting.
///
/// The `HuginnNet` struct acts as the core component of the library, handling TCP, HTTP, and TLS packet
/// analysis and matching signatures using a database of known fingerprints, plus JA4 TLS
/// client analysis following the official FoxIO specification.
pub struct HuginnNet<'a> {
pub tcp_matcher: Option<huginn_net_tcp::SignatureMatcher<'a>>,
pub http_matcher: Option<huginn_net_http::SignatureMatcher<'a>>,
connection_tracker: TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: TtlCache<FlowKey, TcpFlow>,
http_processors: huginn_net_http::http_process::HttpProcessors,
config: AnalysisConfig,
filter_config: Option<FilterConfig>,
}
impl<'a> HuginnNet<'a> {
/// Creates a new instance of `HuginnNet`.
///
/// # Parameters
/// - `database`: Optional reference to the database containing known TCP/Http signatures from p0f.
/// Only loaded if `matcher_enabled` is true and HTTP or TCP analysis is enabled.
/// Not needed for TLS-only analysis or when fingerprint matching is disabled.
/// - `max_connections`: The maximum number of connections to maintain in the connection tracker and HTTP flows.
/// - `config`: Optional configuration specifying which protocols to analyze. If None, uses default (all enabled).
/// When `matcher_enabled` is false, the database won't be loaded and no signature matching will be performed.
///
/// # Returns
/// A new `HuginnNet` instance initialized with the given database, max connections, and configuration.
///
/// # Errors
/// Returns `HuginnNetError::MissConfiguration` if `matcher_enabled` is true but no database is provided.
pub fn new(
database: Option<&'a Database>,
max_connections: usize,
config: Option<AnalysisConfig>,
) -> Result<Self, error::HuginnNetError> {
let config = config.unwrap_or_default();
if config.matcher_enabled
&& (config.tcp_enabled || config.http_enabled)
&& database.is_none()
{
return Err(error::HuginnNetError::MissConfiguration(
"Database is required when matcher is enabled".to_string(),
));
}
let tcp_matcher = if config.matcher_enabled && config.tcp_enabled {
database.map(huginn_net_tcp::SignatureMatcher::new)
} else {
None
};
let http_matcher = if config.matcher_enabled && config.http_enabled {
database.map(huginn_net_http::SignatureMatcher::new)
} else {
None
};
let connection_tracker_size = if config.tcp_enabled {
max_connections
} else {
0
};
let http_flows_size = if config.http_enabled {
max_connections
} else {
0
};
Ok(Self {
tcp_matcher,
http_matcher,
connection_tracker: TtlCache::new(connection_tracker_size),
http_flows: TtlCache::new(http_flows_size),
http_processors: huginn_net_http::http_process::HttpProcessors::new(),
config,
filter_config: None,
})
}
/// Configure packet filtering for this analyzer.
///
/// Filters packets by IP address and/or port before processing.
/// This is more efficient than processing all packets and filtering later.
///
/// # Parameters
/// - `filter`: The `FilterConfig` to apply to incoming packets.
///
/// # Returns
/// A new `HuginnNet` instance with the filter configured.
pub fn with_filter(mut self, filter: FilterConfig) -> Self {
self.filter_config = Some(filter);
self
}
fn process_with<F>(
&mut self,
mut packet_fn: F,
sender: Sender<FingerprintResult>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetError>
where
F: FnMut() -> Option<Result<Vec<u8>, HuginnNetError>>,
{
while let Some(packet_result) = packet_fn() {
if let Some(ref cancel) = cancel_signal {
if cancel.load(Ordering::Relaxed) {
debug!("Cancellation signal received, stopping packet processing");
break;
}
}
match packet_result {
Ok(packet) => {
if let Some(ref filter) = self.filter_config {
if !raw_filter::apply(&packet, filter) {
debug!("Filtered out packet before parsing");
continue;
}
}
let output = self.analyze_tcp(&packet);
if sender.send(output).is_err() {
error!("Receiver dropped, stopping packet processing");
break;
}
}
Err(e) => {
error!("Failed to read packet: {}", e);
}
}
}
Ok(())
}
/// Captures and analyzes packets on the specified network interface.
///
/// Sends `FingerprintResult` through the provided channel.
///
/// # Parameters
/// - `interface_name`: The name of the network interface to analyze.
/// - `sender`: A `Sender` to send `FingerprintResult` objects back to the caller.
/// - `cancel_signal`: Optional `Arc<AtomicBool>` to signal graceful shutdown.
///
/// # Errors
/// - If the network interface cannot be found or a channel cannot be created.
pub fn analyze_network(
&mut self,
interface_name: &str,
sender: Sender<FingerprintResult>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetError> {
let interfaces = datalink::interfaces();
let interface = interfaces
.into_iter()
.find(|iface| iface.name == interface_name)
.ok_or_else(|| {
HuginnNetError::MissConfiguration(format!(
"Could not find network interface: {interface_name}"
))
})?;
debug!("Using network interface: {}", interface.name);
let config = Config { promiscuous: true, ..Config::default() };
let (_tx, mut rx) = match datalink::channel(&interface, config) {
Ok(datalink::Channel::Ethernet(tx, rx)) => (tx, rx),
Ok(_) => {
return Err(HuginnNetError::MissConfiguration("Unhandled channel type".to_string()))
}
Err(e) => {
return Err(HuginnNetError::MissConfiguration(format!(
"Unable to create channel: {e}"
)))
}
};
self.process_with(
move || match rx.next() {
Ok(packet) => Some(Ok(packet.to_vec())),
Err(e) => Some(Err(HuginnNetError::MissConfiguration(format!(
"Error receiving packet: {e}"
)))),
},
sender,
cancel_signal,
)
}
/// Analyzes packets from a PCAP file.
///
/// # Parameters
/// - `pcap_path`: The path to the PCAP file to analyze.
/// - `sender`: A `Sender` to send `FingerprintResult` objects back to the caller.
/// - `cancel_signal`: Optional `Arc<AtomicBool>` to signal graceful shutdown.
///
/// # Errors
/// - If the PCAP file cannot be opened or read.
pub fn analyze_pcap(
&mut self,
pcap_path: &str,
sender: Sender<FingerprintResult>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetError> {
let file = File::open(pcap_path).map_err(|e| {
HuginnNetError::MissConfiguration(format!("Failed to open PCAP file: {e}"))
})?;
let mut pcap_reader = PcapReader::new(file).map_err(|e| {
HuginnNetError::MissConfiguration(format!("Failed to create PCAP reader: {e}"))
})?;
self.process_with(
move || match pcap_reader.next_packet() {
Some(Ok(packet)) => Some(Ok(packet.data.to_vec())),
Some(Err(e)) => Some(Err(HuginnNetError::MissConfiguration(format!(
"Error reading PCAP packet: {e}"
)))),
None => None,
},
sender,
cancel_signal,
)
}
/// Analyzes a TCP packet and returns a `FingerprintResult` object.
///
/// # Parameters
/// - `packet`: A reference to the TCP packet to analyze.
///
/// # Returns
/// A `FingerprintResult` object containing the analysis results.
pub fn analyze_tcp(&mut self, packet: &[u8]) -> FingerprintResult {
match ObservablePackage::extract(
packet,
&mut self.connection_tracker,
&mut self.http_flows,
&self.http_processors,
&self.config,
) {
Ok(observable_package) => {
let (
syn,
syn_ack,
mtu,
client_uptime,
server_uptime,
http_request,
http_response,
tls_client,
) = {
let mtu: Option<MTUOutput> = observable_package.mtu.map(|observable_mtu| {
let link_quality = simple_quality_match!(
enabled: self.config.matcher_enabled,
matcher: self.tcp_matcher,
method: matching_by_mtu(&observable_mtu.value),
success: (link, _) => MTUQualityMatched {
link: Some(link.clone()),
quality: MatchQualityType::Matched(1.0),
},
failure: MTUQualityMatched {
link: None,
quality: MatchQualityType::NotMatched,
},
disabled: MTUQualityMatched {
link: None,
quality: MatchQualityType::Disabled,
}
);
MTUOutput {
source: huginn_net_tcp::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tcp::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
link: link_quality,
mtu: observable_mtu.value,
}
});
let syn: Option<SynTCPOutput> =
observable_package.tcp_request.map(|observable_tcp| {
let os_quality = simple_quality_match!(
enabled: self.config.matcher_enabled,
matcher: self.tcp_matcher,
method: matching_by_tcp_request(&observable_tcp),
success: (label, _signature, quality) => OSQualityMatched {
os: Some(OperativeSystem::from(label)),
quality: MatchQualityType::Matched(quality),
},
failure: OSQualityMatched {
os: None,
quality: MatchQualityType::NotMatched,
},
disabled: OSQualityMatched {
os: None,
quality: MatchQualityType::Disabled,
}
);
SynTCPOutput {
source: huginn_net_tcp::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tcp::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
os_matched: os_quality,
sig: observable_tcp,
}
});
let syn_ack: Option<SynAckTCPOutput> =
observable_package.tcp_response.map(|observable_tcp| {
let os_quality = simple_quality_match!(
enabled: self.config.matcher_enabled,
matcher: self.tcp_matcher,
method: matching_by_tcp_response(&observable_tcp),
success: (label, _signature, quality) => OSQualityMatched {
os: Some(OperativeSystem::from(label)),
quality: MatchQualityType::Matched(quality),
},
failure: OSQualityMatched {
os: None,
quality: MatchQualityType::NotMatched,
},
disabled: OSQualityMatched {
os: None,
quality: MatchQualityType::Disabled,
}
);
SynAckTCPOutput {
source: huginn_net_tcp::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tcp::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
os_matched: os_quality,
sig: observable_tcp,
}
});
let client_uptime: Option<UptimeOutput> =
observable_package.client_uptime.map(|update| UptimeOutput {
source: huginn_net_tcp::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tcp::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
role: UptimeRole::Client,
days: update.days,
hours: update.hours,
min: update.min,
up_mod_days: update.up_mod_days,
freq: update.freq,
});
let server_uptime: Option<UptimeOutput> =
observable_package.server_uptime.map(|update| UptimeOutput {
source: huginn_net_tcp::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tcp::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
role: UptimeRole::Server,
days: update.days,
hours: update.hours,
min: update.min,
up_mod_days: update.up_mod_days,
freq: update.freq,
});
let http_request: Option<HttpRequestOutput> = observable_package
.http_request
.map(|observable_http_request| {
let (signature_matcher, ua_matcher, browser_quality) = quality_match!(
enabled: self.config.matcher_enabled,
matcher: self.http_matcher,
call: matcher => {
let sig_match = matcher.matching_by_http_request(&observable_http_request);
let ua_match = observable_http_request.user_agent.clone()
.and_then(|ua| matcher.matching_by_user_agent(ua));
Some((sig_match, ua_match))
},
matched: (signature_matcher, ua_matcher) => {
let browser_quality = signature_matcher
.map(|(label, _signature, quality)| BrowserQualityMatched {
browser: Some(Browser::from(label)),
quality: MatchQualityType::Matched(quality),
})
.unwrap_or(BrowserQualityMatched {
browser: None,
quality: MatchQualityType::NotMatched,
});
(signature_matcher, ua_matcher, browser_quality)
},
not_matched: {
let browser_quality = BrowserQualityMatched {
browser: None,
quality: MatchQualityType::NotMatched,
};
(None, None, browser_quality)
},
disabled: {
let browser_quality = BrowserQualityMatched {
browser: None,
quality: MatchQualityType::Disabled,
};
(None, None, browser_quality)
}
);
let http_diagnosis = huginn_net_http::http_common::get_diagnostic(
observable_http_request.user_agent.clone(),
ua_matcher,
signature_matcher.map(|(label, _signature, _quality)| label),
);
HttpRequestOutput {
source: huginn_net_http::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_http::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
lang: observable_http_request.lang.clone(),
browser_matched: browser_quality,
diagnosis: http_diagnosis,
sig: observable_http_request,
}
});
let http_response: Option<HttpResponseOutput> = observable_package
.http_response
.map(|observable_http_response| {
let web_server_quality = simple_quality_match!(
enabled: self.config.matcher_enabled,
matcher: self.http_matcher,
method: matching_by_http_response(&observable_http_response),
success: (label, _signature, quality) => WebServerQualityMatched {
web_server: Some(WebServer::from(label)),
quality: MatchQualityType::Matched(quality),
},
failure: WebServerQualityMatched {
web_server: None,
quality: MatchQualityType::NotMatched,
},
disabled: WebServerQualityMatched {
web_server: None,
quality: MatchQualityType::Disabled,
}
);
HttpResponseOutput {
source: huginn_net_http::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_http::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
web_server_matched: web_server_quality,
diagnosis: HttpDiagnosis::None,
sig: observable_http_response,
}
});
let tls_client: Option<TlsClientOutput> =
observable_package
.tls_client
.map(|observable_tls| TlsClientOutput {
source: huginn_net_tls::output::IpPort::new(
observable_package.source.ip,
observable_package.source.port,
),
destination: huginn_net_tls::output::IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
sig: observable_tls,
});
(
syn,
syn_ack,
mtu,
client_uptime,
server_uptime,
http_request,
http_response,
tls_client,
)
};
FingerprintResult {
tcp_syn: syn,
tcp_syn_ack: syn_ack,
tcp_mtu: mtu,
tcp_client_uptime: client_uptime,
tcp_server_uptime: server_uptime,
http_request,
http_response,
tls_client,
}
}
Err(error) => {
debug!("Fail to process signature: {}", error);
FingerprintResult {
tcp_syn: None,
tcp_syn_ack: None,
tcp_mtu: None,
tcp_client_uptime: None,
tcp_server_uptime: None,
http_request: None,
http_response: None,
tls_client: None,
}
}
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/process.rs | huginn-net/src/process.rs | use crate::error::HuginnNetError;
use crate::packet_parser::{parse_packet, IpPacket};
use crate::AnalysisConfig;
use huginn_net_http::http_process::{FlowKey, HttpProcessors, ObservableHttpPackage, TcpFlow};
use huginn_net_http::observable::{ObservableHttpRequest, ObservableHttpResponse};
use huginn_net_tcp::observable::{ObservableMtu, ObservableTcp, ObservableUptime};
use huginn_net_tcp::tcp_process::ObservableTCPPackage;
use huginn_net_tcp::uptime::{ConnectionKey, TcpTimestamp};
use huginn_net_tls::{ObservableTlsClient, ObservableTlsPackage};
use pnet::packet::ip::IpNextHeaderProtocols;
use pnet::packet::{ipv4::Ipv4Packet, ipv6::Ipv6Packet, tcp::TcpPacket, Packet};
use std::net::IpAddr;
use ttl_cache::TtlCache;
#[derive(Clone)]
pub struct IpPort {
pub ip: IpAddr,
pub port: u16,
}
pub struct ObservablePackage {
pub source: IpPort,
pub destination: IpPort,
pub tcp_request: Option<ObservableTcp>,
pub tcp_response: Option<ObservableTcp>,
pub mtu: Option<ObservableMtu>,
pub client_uptime: Option<ObservableUptime>,
pub server_uptime: Option<ObservableUptime>,
pub http_request: Option<ObservableHttpRequest>,
pub http_response: Option<ObservableHttpResponse>,
pub tls_client: Option<ObservableTlsClient>,
}
impl ObservablePackage {
pub fn extract(
packet: &[u8],
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
config: &AnalysisConfig,
) -> Result<Self, HuginnNetError> {
match parse_packet(packet) {
IpPacket::Ipv4(ip_data) => {
if let Some(ipv4) = Ipv4Packet::new(ip_data) {
process_ipv4(connection_tracker, http_flows, http_processors, ipv4, config)
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv4 packet".to_string()))
}
}
IpPacket::Ipv6(ip_data) => {
if let Some(ipv6) = Ipv6Packet::new(ip_data) {
process_ipv6(connection_tracker, http_flows, http_processors, ipv6, config)
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv6 packet".to_string()))
}
}
IpPacket::None => {
Err(HuginnNetError::UnexpectedPackage("No valid IP packet found".to_string()))
}
}
}
}
trait IpPacketProcessor: Packet {
fn is_tcp(&self) -> bool;
fn get_protocol_error(&self) -> String;
fn get_addresses(&self) -> (IpAddr, IpAddr);
fn process_http_with_data(
data: &[u8],
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
) -> Result<ObservableHttpPackage, HuginnNetError>;
fn process_tcp_with_data(
data: &[u8],
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
) -> Result<ObservableTCPPackage, HuginnNetError>;
fn process_tls_with_data(data: &[u8]) -> Result<ObservableTlsPackage, HuginnNetError>;
}
impl IpPacketProcessor for Ipv4Packet<'_> {
fn is_tcp(&self) -> bool {
self.get_next_level_protocol() == IpNextHeaderProtocols::Tcp
}
fn get_protocol_error(&self) -> String {
format!(
"unsupported IPv4 packet with non-TCP payload: {}",
self.get_next_level_protocol()
)
}
fn get_addresses(&self) -> (IpAddr, IpAddr) {
(IpAddr::V4(self.get_source()), IpAddr::V4(self.get_destination()))
}
fn process_http_with_data(
data: &[u8],
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
) -> Result<ObservableHttpPackage, HuginnNetError> {
if let Some(packet) = Ipv4Packet::new(data) {
huginn_net_http::http_process::process_http_ipv4(&packet, http_flows, http_processors)
.map_err(|e| match e {
huginn_net_http::error::HuginnNetHttpError::Parse(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_http::error::HuginnNetHttpError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_http::error::HuginnNetHttpError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
})
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv4 packet data".to_string()))
}
}
fn process_tcp_with_data(
data: &[u8],
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
) -> Result<ObservableTCPPackage, HuginnNetError> {
if let Some(packet) = Ipv4Packet::new(data) {
huginn_net_tcp::tcp_process::process_tcp_ipv4(&packet, connection_tracker).map_err(
|e| match e {
huginn_net_tcp::error::HuginnNetTcpError::Parse(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::InvalidTcpFlags(flags) => {
HuginnNetError::InvalidTcpFlags(flags)
}
huginn_net_tcp::error::HuginnNetTcpError::UnexpectedPackage(msg) => {
HuginnNetError::UnexpectedPackage(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
},
)
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv4 packet data".to_string()))
}
}
fn process_tls_with_data(data: &[u8]) -> Result<ObservableTlsPackage, HuginnNetError> {
if let Some(packet) = Ipv4Packet::new(data) {
huginn_net_tls::process_tls_ipv4(&packet).map_err(|e| match e {
huginn_net_tls::error::HuginnNetTlsError::Parse(msg) => HuginnNetError::Parse(msg),
huginn_net_tls::error::HuginnNetTlsError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_tls::error::HuginnNetTlsError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_tls::error::HuginnNetTlsError::Unknown => {
HuginnNetError::Parse("Unknown TLS error".to_string())
}
})
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv4 packet data".to_string()))
}
}
}
impl IpPacketProcessor for Ipv6Packet<'_> {
fn is_tcp(&self) -> bool {
self.get_next_header() == IpNextHeaderProtocols::Tcp
}
fn get_protocol_error(&self) -> String {
format!("IPv6 packet with non-TCP payload: {}", self.get_next_header())
}
fn get_addresses(&self) -> (IpAddr, IpAddr) {
(IpAddr::V6(self.get_source()), IpAddr::V6(self.get_destination()))
}
fn process_http_with_data(
data: &[u8],
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
) -> Result<ObservableHttpPackage, HuginnNetError> {
if let Some(packet) = Ipv6Packet::new(data) {
huginn_net_http::http_process::process_http_ipv6(&packet, http_flows, http_processors)
.map_err(|e| match e {
huginn_net_http::error::HuginnNetHttpError::Parse(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_http::error::HuginnNetHttpError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_http::error::HuginnNetHttpError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
})
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv6 packet data".to_string()))
}
}
fn process_tcp_with_data(
data: &[u8],
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
) -> Result<ObservableTCPPackage, HuginnNetError> {
if let Some(packet) = Ipv6Packet::new(data) {
huginn_net_tcp::tcp_process::process_tcp_ipv6(&packet, connection_tracker).map_err(
|e| match e {
huginn_net_tcp::error::HuginnNetTcpError::Parse(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::InvalidTcpFlags(flags) => {
HuginnNetError::InvalidTcpFlags(flags)
}
huginn_net_tcp::error::HuginnNetTcpError::UnexpectedPackage(msg) => {
HuginnNetError::UnexpectedPackage(msg)
}
huginn_net_tcp::error::HuginnNetTcpError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
},
)
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv6 packet data".to_string()))
}
}
fn process_tls_with_data(data: &[u8]) -> Result<ObservableTlsPackage, HuginnNetError> {
if let Some(packet) = Ipv6Packet::new(data) {
huginn_net_tls::process_tls_ipv6(&packet).map_err(|e| match e {
huginn_net_tls::error::HuginnNetTlsError::Parse(msg) => HuginnNetError::Parse(msg),
huginn_net_tls::error::HuginnNetTlsError::UnsupportedProtocol(msg) => {
HuginnNetError::UnsupportedProtocol(msg)
}
huginn_net_tls::error::HuginnNetTlsError::Misconfiguration(msg) => {
HuginnNetError::Parse(msg)
}
huginn_net_tls::error::HuginnNetTlsError::Unknown => {
HuginnNetError::Parse("Unknown TLS error".to_string())
}
})
} else {
Err(HuginnNetError::UnexpectedPackage("Invalid IPv6 packet data".to_string()))
}
}
}
fn execute_analysis<P: IpPacketProcessor>(
packet_data: &[u8],
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
config: &AnalysisConfig,
source: IpPort,
destination: IpPort,
) -> Result<ObservablePackage, HuginnNetError> {
let http_response = if config.http_enabled {
P::process_http_with_data(packet_data, http_flows, http_processors)?
} else {
ObservableHttpPackage { http_request: None, http_response: None }
};
let tcp_response: ObservableTCPPackage = if config.tcp_enabled {
P::process_tcp_with_data(packet_data, connection_tracker)?
} else {
ObservableTCPPackage {
tcp_request: None,
tcp_response: None,
mtu: None,
client_uptime: None,
server_uptime: None,
}
};
let tls_response = if config.tls_enabled {
P::process_tls_with_data(packet_data)?
} else {
ObservableTlsPackage { tls_client: None }
};
handle_http_tcp_tlc(Ok(http_response), Ok(tcp_response), Ok(tls_response), source, destination)
}
fn process_ip<P: IpPacketProcessor>(
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
packet: P,
config: &AnalysisConfig,
) -> Result<ObservablePackage, HuginnNetError> {
if !packet.is_tcp() {
return Err(HuginnNetError::UnsupportedProtocol(packet.get_protocol_error()));
}
let (source_ip, destination_ip) = packet.get_addresses();
let tcp_ports = TcpPacket::new(packet.payload())
.ok_or_else(|| HuginnNetError::UnexpectedPackage("Invalid TCP packet".to_string()))?;
let source = IpPort { ip: source_ip, port: tcp_ports.get_source() };
let destination = IpPort { ip: destination_ip, port: tcp_ports.get_destination() };
let packet_data = packet.packet();
execute_analysis::<P>(
packet_data,
connection_tracker,
http_flows,
http_processors,
config,
source,
destination,
)
}
pub fn process_ipv4(
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
packet: Ipv4Packet,
config: &AnalysisConfig,
) -> Result<ObservablePackage, HuginnNetError> {
process_ip(connection_tracker, http_flows, http_processors, packet, config)
}
pub fn process_ipv6(
connection_tracker: &mut TtlCache<ConnectionKey, TcpTimestamp>,
http_flows: &mut TtlCache<FlowKey, TcpFlow>,
http_processors: &HttpProcessors,
packet: Ipv6Packet,
config: &AnalysisConfig,
) -> Result<ObservablePackage, HuginnNetError> {
process_ip(connection_tracker, http_flows, http_processors, packet, config)
}
fn handle_http_tcp_tlc(
http_response: Result<ObservableHttpPackage, HuginnNetError>,
tcp_response: Result<ObservableTCPPackage, HuginnNetError>,
tls_response: Result<ObservableTlsPackage, HuginnNetError>,
source: IpPort,
destination: IpPort,
) -> Result<ObservablePackage, HuginnNetError> {
match (http_response, tcp_response, tls_response) {
(Ok(http_package), Ok(tcp_package), Ok(tls_package)) => Ok(ObservablePackage {
source,
destination,
tcp_request: tcp_package.tcp_request,
tcp_response: tcp_package.tcp_response,
mtu: tcp_package.mtu,
client_uptime: tcp_package.client_uptime,
server_uptime: tcp_package.server_uptime,
http_request: http_package.http_request,
http_response: http_package.http_response,
tls_client: tls_package.tls_client,
}),
(Err(http_err), _, _) => Err(http_err),
(_, Err(tcp_err), _) => Err(tcp_err),
(_, _, Err(tls_err)) => Err(tls_err),
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/error.rs | huginn-net/src/error.rs | use pnet::packet::ethernet::EtherType;
use thiserror::Error;
/// Error handling during network packet analysis and Database parsing.
#[derive(Error, Debug)]
pub enum HuginnNetError {
/// An error occurred while parsing data.
///
/// This variant is used when a parsing operation fails.
/// The associated string provides additional context about the error.
#[error("Parse error: {0}")]
Parse(String),
/// An unsupported protocol was encountered.
///
/// This variant is used when a protocol is not supported by the application.
/// The associated string specifies the unsupported protocol.
#[error("Unsupported protocol: {0}")]
UnsupportedProtocol(String),
/// Invalid TCP flags were detected.
///
/// This variant is used when TCP flags are invalid or unexpected.
/// The associated value provides the invalid flags.
#[error("Invalid TCP flags: {0}")]
InvalidTcpFlags(u8),
/// An invalid package was encountered.
///
/// This variant is used when a package is deemed invalid.
/// The associated string provides details about the invalid package.
#[error("Invalid package: {0}")]
UnexpectedPackage(String),
/// An unsupported Ethernet type was encountered.
///
/// This variant is used when an Ethernet type is not supported.
/// The associated value specifies the unsupported Ethernet type.
#[error("Unsupported ethernet type: {0}")]
UnsupportedEthernetType(EtherType),
/// Unacceptable configuration.
///
/// This variant is used when the configuration is unacceptable.
/// The associated value specifies the unacceptable configuration.
#[error("Unacceptable configuration: {0}")]
MissConfiguration(String),
/// An unknown error occurred.
///
/// This variant is used as a catch-all for errors that do not fit other categories.
#[error("Unknown error")]
Unknown,
}
impl From<huginn_net_db::error::DatabaseError> for HuginnNetError {
fn from(err: huginn_net_db::error::DatabaseError) -> Self {
HuginnNetError::MissConfiguration(format!("Database error: {err}"))
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/matcher.rs | huginn-net/src/matcher.rs | /// Macro for quality matching pattern.
/// This macro provides a zero-cost abstraction for the common pattern of
/// conditional quality matching based on matcher_enabled configuration.
///
/// # Usage
///
/// ```rust
/// use huginn_net::quality_match;
/// # use huginn_net_tcp::output::{OperativeSystem, OSQualityMatched};
/// # use huginn_net_db::{MatchQualityType, Label};
/// # struct Config { matcher_enabled: bool }
/// # struct Matcher;
/// # struct ObservableTcp;
/// # let config = Config { matcher_enabled: true };
/// # let matcher: Option<Matcher> = None;
/// # let observable_tcp = ObservableTcp;
/// let quality = quality_match!(
/// enabled: config.matcher_enabled,
/// matcher: matcher,
/// call: matcher => None::<(Label, String, f32)>,
/// matched: (label, _signature, quality) => OSQualityMatched {
/// os: Some(OperativeSystem::from(&label)),
/// quality: MatchQualityType::Matched(quality),
/// },
/// not_matched: OSQualityMatched {
/// os: None,
/// quality: MatchQualityType::NotMatched,
/// },
/// disabled: OSQualityMatched {
/// os: None,
/// quality: MatchQualityType::Disabled,
/// }
/// );
/// ```
#[macro_export]
macro_rules! quality_match {
(
enabled: $enabled:expr,
matcher: $matcher:expr,
call: $matcher_var:ident => $call:expr,
matched: $result:pat => $matched_expr:expr,
not_matched: $not_matched_expr:expr,
disabled: $disabled_expr:expr
) => {
if $enabled {
$matcher
.as_ref()
.and_then(|$matcher_var| $call)
.map(|$result| $matched_expr)
.unwrap_or($not_matched_expr)
} else {
$disabled_expr
}
};
}
/// Simplified quality matching macro for cases where the matcher call is straightforward.
///
/// This is a convenience macro for the most common use case where you just need
/// to call a single matcher method and handle the three states.
///
/// # Usage
///
/// ```rust
/// use huginn_net::{simple_quality_match, quality_match};
/// # use huginn_net_tcp::output::MTUQualityMatched;
/// # use huginn_net_db::MatchQualityType;
/// # struct Config { matcher_enabled: bool }
/// # struct Matcher;
/// # impl Matcher {
/// # fn matching_by_mtu(&self, _value: &u16) -> Option<(String, String)> { None }
/// # }
/// # struct ObservableMtu { value: u16 }
/// # let config = Config { matcher_enabled: true };
/// # let matcher: Option<Matcher> = None;
/// # let observable_mtu = ObservableMtu { value: 1500 };
/// let quality = simple_quality_match!(
/// enabled: config.matcher_enabled,
/// matcher: matcher,
/// method: matching_by_mtu(&observable_mtu.value),
/// success: (link, _) => MTUQualityMatched {
/// link: Some(link.clone()),
/// quality: MatchQualityType::Matched(1.0),
/// },
/// failure: MTUQualityMatched {
/// link: None,
/// quality: MatchQualityType::NotMatched,
/// },
/// disabled: MTUQualityMatched {
/// link: None,
/// quality: MatchQualityType::Disabled,
/// }
/// );
/// ```
#[macro_export]
macro_rules! simple_quality_match {
(
enabled: $enabled:expr,
matcher: $matcher:expr,
method: $method:ident($($args:expr),*),
success: $result:pat => $success_expr:expr,
failure: $failure_expr:expr,
disabled: $disabled_expr:expr
) => {
quality_match!(
enabled: $enabled,
matcher: $matcher,
call: matcher => matcher.$method($($args),*),
matched: $result => $success_expr,
not_matched: $failure_expr,
disabled: $disabled_expr
)
};
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/output.rs | huginn-net/src/output.rs | use huginn_net_http::output::{HttpRequestOutput, HttpResponseOutput};
use huginn_net_tcp::output::{MTUOutput, SynAckTCPOutput, SynTCPOutput, UptimeOutput};
use huginn_net_tls::output::TlsClientOutput;
/// Represents the output from the Huginn Net analyzer.
///
/// This struct contains various optional outputs that can be derived
/// from analyzing TCP, HTTP, and TLS packets.
pub struct FingerprintResult {
/// Information derived from TCP SYN packets.
pub tcp_syn: Option<SynTCPOutput>,
/// Information derived from TCP SYN-ACK packets.
pub tcp_syn_ack: Option<SynAckTCPOutput>,
/// Information about the TCP Maximum Transmission Unit (MTU).
pub tcp_mtu: Option<MTUOutput>,
/// Information about the TCP client system uptime.
pub tcp_client_uptime: Option<UptimeOutput>,
/// Information about the TCP server system uptime.
pub tcp_server_uptime: Option<UptimeOutput>,
/// Information derived from HTTP request headers.
pub http_request: Option<HttpRequestOutput>,
/// Information derived from HTTP response headers.
pub http_response: Option<HttpResponseOutput>,
/// Information derived from TLS ClientHello analysis using JA4 fingerprinting.
/// JA4 methodology by FoxIO, LLC - implementation from scratch for Huginn Net.
pub tls_client: Option<TlsClientOutput>,
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/src/packet_parser.rs | huginn-net/src/packet_parser.rs | /// Packet parsing utilities for different network packet formats
///
/// This module provides unified parsing for various network packet formats
/// from both live network capture and PCAP files:
/// - Ethernet frames (most common in network interfaces)
/// - Raw IP packets (tunnels, loopback interfaces)
/// - NULL datalink packets (specialized capture tools)
/// - Future packet formats can be added here
use pnet::packet::ethernet::{EtherTypes, EthernetPacket};
use pnet::packet::ipv4::Ipv4Packet;
use pnet::packet::ipv6::Ipv6Packet;
use tracing::debug;
/// Represents the result of IP packet parsing
#[derive(Debug)]
pub enum IpPacket<'a> {
/// IPv4 packet data (slice of original packet)
Ipv4(&'a [u8]),
/// IPv6 packet data (slice of original packet)
Ipv6(&'a [u8]),
/// No valid IP packet found
None,
}
/// Datalink format types supported
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum DatalinkFormat {
/// Standard Ethernet frame (14-byte header)
Ethernet,
/// Raw IP packet (no datalink header)
RawIp,
/// NULL datalink with 4-byte header (0x1e 0x00 ...)
Null,
}
/// Parse a network packet using multiple format strategies
///
/// Tries different parsing strategies in order of likelihood:
/// 1. Ethernet (most common in network interfaces and PCAPs)
/// 2. Raw IP (tunnels, loopback interfaces, some PCAPs)
/// 3. NULL datalink (specialized capture tools)
///
/// Works with packets from both live network capture and PCAP files.
///
/// # Arguments
/// * `packet` - Raw packet bytes from network interface or PCAP file
///
/// # Returns
/// * `IpPacket` - The parsed IP packet or None if no valid format found
pub fn parse_packet(packet: &[u8]) -> IpPacket<'_> {
// Strategy 1: Try Ethernet first (most common)
if let Some(parsed) = try_ethernet_format(packet) {
return parsed;
}
// Strategy 2: Try Raw IP (no Ethernet header)
if let Some(parsed) = try_raw_ip_format(packet) {
return parsed;
}
// Strategy 3: Try NULL datalink (skip 4-byte header)
if let Some(parsed) = try_null_datalink_format(packet) {
return parsed;
}
IpPacket::None
}
/// Try parsing as Ethernet frame
fn try_ethernet_format(packet: &[u8]) -> Option<IpPacket<'_>> {
// Ethernet header is 14 bytes: [6B dst][6B src][2B ethertype]
if packet.len() < 14 {
return None;
}
let ethernet = EthernetPacket::new(packet)?;
let ip_data = &packet[14..]; // Skip 14-byte Ethernet header
match ethernet.get_ethertype() {
EtherTypes::Ipv4 => {
if Ipv4Packet::new(ip_data).is_some() {
debug!("Parsed Ethernet IPv4 packet");
return Some(IpPacket::Ipv4(ip_data));
}
}
EtherTypes::Ipv6 => {
if Ipv6Packet::new(ip_data).is_some() {
debug!("Parsed Ethernet IPv6 packet");
return Some(IpPacket::Ipv6(ip_data));
}
}
_ => {}
}
None
}
/// Try parsing as Raw IP (no datalink header)
fn try_raw_ip_format(packet: &[u8]) -> Option<IpPacket<'_>> {
if packet.len() < 20 {
return None;
}
// Check IP version in first 4 bits
let version = (packet[0] & 0xF0) >> 4;
match version {
4 => {
if Ipv4Packet::new(packet).is_some() {
debug!("Parsed Raw IPv4 packet");
return Some(IpPacket::Ipv4(packet));
}
}
6 => {
if Ipv6Packet::new(packet).is_some() {
debug!("Parsed Raw IPv6 packet");
return Some(IpPacket::Ipv6(packet));
}
}
_ => {}
}
None
}
/// Try parsing as NULL datalink format (4-byte header)
fn try_null_datalink_format(packet: &[u8]) -> Option<IpPacket<'_>> {
// Check for NULL datalink signature and minimum size
if packet.len() < 24 || packet[0] != 0x1e || packet[1] != 0x00 {
return None;
}
let ip_data = &packet[4..]; // Skip 4-byte NULL header
let version = (ip_data[0] & 0xF0) >> 4;
match version {
4 => {
if Ipv4Packet::new(ip_data).is_some() {
debug!("Parsed NULL datalink IPv4 packet");
return Some(IpPacket::Ipv4(ip_data));
}
}
6 => {
if Ipv6Packet::new(ip_data).is_some() {
debug!("Parsed NULL datalink IPv6 packet");
return Some(IpPacket::Ipv6(ip_data));
}
}
_ => {}
}
None
}
/// Detect the datalink format of a packet without full parsing
///
/// Useful for statistics or format validation
pub fn detect_datalink_format(packet: &[u8]) -> Option<DatalinkFormat> {
// Check NULL datalink first (most specific signature)
if packet.len() >= 24 && packet[0] == 0x1e && packet[1] == 0x00 {
let ip_data = &packet[4..];
let version = (ip_data[0] & 0xF0) >> 4;
if version == 4 || version == 6 {
return Some(DatalinkFormat::Null);
}
}
// Check Raw IP (check if it starts with valid IP version)
if packet.len() >= 20 {
let version = (packet[0] & 0xF0) >> 4;
if version == 4 || version == 6 {
// Additional validation for IPv4
if version == 4 {
let ihl = (packet[0] & 0x0F).saturating_mul(4);
if ihl >= 20 && packet.len() >= usize::from(ihl) {
return Some(DatalinkFormat::RawIp);
}
}
// Additional validation for IPv6
else if version == 6 && packet.len() >= 40 {
return Some(DatalinkFormat::RawIp);
}
}
}
// Check Ethernet (least specific - needs valid EtherType)
if packet.len() >= 14 {
if let Some(ethernet) = EthernetPacket::new(packet) {
let ethertype = ethernet.get_ethertype();
// Only consider it Ethernet if it has a valid IP EtherType
if ethertype == EtherTypes::Ipv4 || ethertype == EtherTypes::Ipv6 {
let ip_data = &packet[14..];
if !ip_data.is_empty() {
let version = (ip_data[0] & 0xF0) >> 4;
if (ethertype == EtherTypes::Ipv4 && version == 4)
|| (ethertype == EtherTypes::Ipv6 && version == 6)
{
return Some(DatalinkFormat::Ethernet);
}
}
}
}
}
None
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net/tests/packet_parser.rs | huginn-net/tests/packet_parser.rs | use huginn_net::packet_parser::{detect_datalink_format, parse_packet, DatalinkFormat, IpPacket};
#[test]
fn test_detect_null_datalink() {
// NULL datalink: 4-byte header + IPv6 packet
let null_packet = vec![
0x1e, 0x00, 0x00, 0x00, // NULL header
0x60, 0x00, 0x00, 0x00, // IPv6 header start (version=6)
0x00, 0x14, 0x06, 0x40, // IPv6 payload length, next header (TCP), hop limit
// Add minimal IPv6 addresses (32 bytes total)
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, // src
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x02, // dst
];
let format = detect_datalink_format(&null_packet);
assert_eq!(format, Some(DatalinkFormat::Null));
}
#[test]
fn test_detect_raw_ipv4() {
// Raw IPv4 packet (no datalink header)
let raw_ipv4 = vec![
0x45, 0x00, 0x00, 0x1c, // Version=4, IHL=5, TOS=0, Total Length=28
0x00, 0x00, 0x40, 0x00, // ID=0, Flags=0x4000 (DF), Fragment Offset=0
0x40, 0x06, 0x7c, 0xb0, // TTL=64, Protocol=TCP(6), Checksum
0xc0, 0xa8, 0x01, 0x01, // Source IP: 192.168.1.1
0xc0, 0xa8, 0x01, 0x02, // Dest IP: 192.168.1.2
];
let format = detect_datalink_format(&raw_ipv4);
assert_eq!(format, Some(DatalinkFormat::RawIp));
}
#[test]
fn test_detect_raw_ipv6() {
// Raw IPv6 packet (no datalink header)
let raw_ipv6 = vec![
0x60, 0x00, 0x00, 0x00, // Version=6, Traffic Class=0, Flow Label=0
0x00, 0x00, 0x06, 0x40, // Payload Length=0, Next Header=TCP(6), Hop Limit=64
// Source address: 2001:db8::1
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, // Destination address: 2001:db8::2
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x02,
];
let format = detect_datalink_format(&raw_ipv6);
assert_eq!(format, Some(DatalinkFormat::RawIp));
}
#[test]
fn test_detect_ethernet_ipv4() {
// Ethernet frame with IPv4 payload
let ethernet_ipv4 = vec![
// Ethernet header (14 bytes)
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Destination MAC
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Source MAC
0x08, 0x00, // EtherType: IPv4
// IPv4 header
0x45, 0x00, 0x00, 0x1c, // Version=4, IHL=5, TOS=0, Total Length=28
0x00, 0x00, 0x40, 0x00, // ID=0, Flags=0x4000 (DF), Fragment Offset=0
0x40, 0x06, 0x7c, 0xb0, // TTL=64, Protocol=TCP(6), Checksum
0xc0, 0xa8, 0x01, 0x01, // Source IP: 192.168.1.1
0xc0, 0xa8, 0x01, 0x02, // Dest IP: 192.168.1.2
];
let format = detect_datalink_format(ðernet_ipv4);
assert_eq!(format, Some(DatalinkFormat::Ethernet));
}
#[test]
fn test_detect_ethernet_ipv6() {
// Ethernet frame with IPv6 payload
let ethernet_ipv6 = vec![
// Ethernet header (14 bytes)
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Destination MAC
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Source MAC
0x86, 0xDD, // EtherType: IPv6
// IPv6 header (40 bytes)
0x60, 0x00, 0x00, 0x00, // Version=6, Traffic Class=0, Flow Label=0
0x00, 0x00, 0x06, 0x40, // Payload Length=0, Next Header=TCP(6), Hop Limit=64
// Source address: 2001:db8::1
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, // Destination address: 2001:db8::2
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x02,
];
let format = detect_datalink_format(ðernet_ipv6);
assert_eq!(format, Some(DatalinkFormat::Ethernet));
}
#[test]
fn test_parse_ethernet_ipv4() {
// Test parsing Ethernet frame with IPv4
let ethernet_ipv4 = vec![
// Ethernet header (14 bytes)
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Destination MAC
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, // Source MAC
0x08, 0x00, // EtherType: IPv4
// IPv4 header
0x45, 0x00, 0x00, 0x1c, // Version=4, IHL=5, TOS=0, Total Length=28
0x00, 0x00, 0x40, 0x00, // ID=0, Flags=0x4000 (DF), Fragment Offset=0
0x40, 0x06, 0x7c, 0xb0, // TTL=64, Protocol=TCP(6), Checksum
0xc0, 0xa8, 0x01, 0x01, // Source IP: 192.168.1.1
0xc0, 0xa8, 0x01, 0x02, // Dest IP: 192.168.1.2
];
match parse_packet(ðernet_ipv4) {
IpPacket::Ipv4(ip_data) => {
assert_eq!(ip_data[0], 0x45); // Version=4, IHL=5
assert_eq!(ip_data.len(), 20); // IPv4 header length
}
_ => panic!("Expected IPv4 packet"),
}
}
#[test]
fn test_parse_raw_ipv6() {
// Test parsing raw IPv6 packet
let raw_ipv6 = vec![
0x60, 0x00, 0x00, 0x00, // Version=6, Traffic Class=0, Flow Label=0
0x00, 0x00, 0x06, 0x40, // Payload Length=0, Next Header=TCP(6), Hop Limit=64
// Source address: 2001:db8::1
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, // Destination address: 2001:db8::2
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x02,
];
match parse_packet(&raw_ipv6) {
IpPacket::Ipv6(ip_data) => {
assert_eq!(ip_data[0], 0x60); // Version=6
assert_eq!(ip_data.len(), 40); // IPv6 header length
}
_ => panic!("Expected IPv6 packet"),
}
}
#[test]
fn test_parse_null_datalink_ipv6() {
// Test parsing NULL datalink with IPv6
let null_ipv6 = vec![
0x1e, 0x00, 0x00, 0x00, // NULL header
0x60, 0x00, 0x00, 0x00, // IPv6 header start (version=6)
0x00, 0x14, 0x06, 0x40, // IPv6 payload length, next header (TCP), hop limit
// Add minimal IPv6 addresses (32 bytes total)
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x01, // src
0x20, 0x01, 0x0d, 0xb8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x02, // dst
];
match parse_packet(&null_ipv6) {
IpPacket::Ipv6(ip_data) => {
assert_eq!(ip_data[0], 0x60); // Version=6
assert_eq!(ip_data.len(), 40); // IPv6 header (40 bytes total)
}
_ => panic!("Expected NULL datalink IPv6 packet"),
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/observable.rs | huginn-net-tls/src/observable.rs | use crate::tls::{Ja4Payload, TlsVersion};
/// Observable TLS Client signals
#[derive(Debug, Clone)]
pub struct ObservableTlsClient {
/// TLS version from ClientHello
pub version: TlsVersion,
/// Server Name Indication (SNI) if present
pub sni: Option<String>,
/// Application-Layer Protocol Negotiation (ALPN) if present
pub alpn: Option<String>,
/// Cipher suites from ClientHello
pub cipher_suites: Vec<u16>,
/// Extensions from ClientHello
pub extensions: Vec<u16>,
/// Signature algorithms from extensions
pub signature_algorithms: Vec<u16>,
/// Elliptic curves from extensions
pub elliptic_curves: Vec<u16>,
/// Generated JA4 fingerprint from ClientHello
pub ja4: Ja4Payload,
/// Generated JA4 fingerprint from original ClientHello
pub ja4_original: Ja4Payload,
}
/// Result of TLS packet processing
#[derive(Debug)]
pub struct ObservableTlsPackage {
pub tls_client: Option<ObservableTlsClient>,
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/tls.rs | huginn-net-tls/src/tls.rs | use sha2::{Digest, Sha256};
use std::fmt::{self};
/// TLS version for fingerprinting
/// Includes legacy SSL versions for complete JA4 specification compatibility.
/// Note: SSL 2.0 is not supported by tls-parser (too legacy/vulnerable)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub enum TlsVersion {
V1_3,
V1_2,
V1_1,
V1_0,
Ssl3_0,
Ssl2_0,
Unknown(u16),
}
impl fmt::Display for TlsVersion {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
TlsVersion::V1_3 => write!(f, "13"),
TlsVersion::V1_2 => write!(f, "12"),
TlsVersion::V1_1 => write!(f, "11"),
TlsVersion::V1_0 => write!(f, "10"),
TlsVersion::Ssl3_0 => write!(f, "s3"),
TlsVersion::Ssl2_0 => write!(f, "s2"),
TlsVersion::Unknown(_) => write!(f, "00"),
}
}
}
/// JA4 Fingerprint - sorted/unsorted (original)
#[derive(Debug, Clone, PartialEq)]
pub enum Ja4Fingerprint {
Sorted(String),
Unsorted(String),
}
impl fmt::Display for Ja4Fingerprint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Ja4Fingerprint::Sorted(s) => write!(f, "{s}"),
Ja4Fingerprint::Unsorted(s) => write!(f, "{s}"),
}
}
}
impl Ja4Fingerprint {
pub fn variant_name(&self) -> &'static str {
match self {
Ja4Fingerprint::Sorted(_) => "ja4",
Ja4Fingerprint::Unsorted(_) => "ja4_o",
}
}
pub fn value(&self) -> &str {
match self {
Ja4Fingerprint::Sorted(s) => s,
Ja4Fingerprint::Unsorted(s) => s,
}
}
}
/// JA4 Raw Fingerprint (full version) - sorted/unsorted (original)
#[derive(Debug, Clone, PartialEq)]
pub enum Ja4RawFingerprint {
Sorted(String),
Unsorted(String),
}
impl fmt::Display for Ja4RawFingerprint {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Ja4RawFingerprint::Sorted(s) => write!(f, "{s}"),
Ja4RawFingerprint::Unsorted(s) => write!(f, "{s}"),
}
}
}
impl Ja4RawFingerprint {
pub fn variant_name(&self) -> &'static str {
match self {
Ja4RawFingerprint::Sorted(_) => "ja4_r",
Ja4RawFingerprint::Unsorted(_) => "ja4_ro",
}
}
pub fn value(&self) -> &str {
match self {
Ja4RawFingerprint::Sorted(s) => s,
Ja4RawFingerprint::Unsorted(s) => s,
}
}
}
/// JA4 Payload structure following official FoxIO specification
/// Uses elegant sorted/unsorted enums like the original rustica_tls implementation
#[derive(Debug, Clone, PartialEq)]
pub struct Ja4Payload {
/// JA4_a: TLS version + SNI + cipher count + extension count + ALPN
pub ja4_a: String,
/// JA4_b: Cipher suites (sorted or original order)
pub ja4_b: String,
/// JA4_c: Extensions + signature algorithms (sorted or original order)
pub ja4_c: String,
/// JA4 fingerprint (hashed, sorted/unsorted)
pub full: Ja4Fingerprint,
/// JA4 raw fingerprint (full, sorted/unsorted)
pub raw: Ja4RawFingerprint,
}
/// See <https://datatracker.ietf.org/doc/html/draft-davidben-tls-grease-01#page-5>
pub const TLS_GREASE_VALUES: [u16; 16] = [
0x0a0a, 0x1a1a, 0x2a2a, 0x3a3a, 0x4a4a, 0x5a5a, 0x6a6a, 0x7a7a, 0x8a8a, 0x9a9a, 0xaaaa, 0xbaba,
0xcaca, 0xdada, 0xeaea, 0xfafa,
];
/// Check if a value is a GREASE value according to RFC 8701
fn is_grease_value(value: u16) -> bool {
TLS_GREASE_VALUES.contains(&value)
}
/// Filter out GREASE values from a list of u16 values
fn filter_grease_values(values: &[u16]) -> Vec<u16> {
values
.iter()
.filter(|&&v| !is_grease_value(v))
.copied()
.collect()
}
/// TLS ClientHello Signature
#[derive(Debug, Clone, PartialEq)]
pub struct Signature {
/// TLS version (used in JA4_a)
pub version: TlsVersion,
/// Cipher suites (used in JA4_b)
pub cipher_suites: Vec<u16>,
/// Extensions (used in JA4_c)
pub extensions: Vec<u16>,
/// Elliptic curves (parsed for completeness, not used in JA4)
pub elliptic_curves: Vec<u16>,
/// Elliptic curve point formats (parsed for completeness, not used in JA4)
pub elliptic_curve_point_formats: Vec<u8>,
/// Signature algorithms (used in JA4_c)
pub signature_algorithms: Vec<u16>,
/// Server Name Indication (used in JA4_a)
pub sni: Option<String>,
/// Application-Layer Protocol Negotiation (used in JA4_a)
pub alpn: Option<String>,
}
/// Extract first and last characters from ALPN string, replacing non-ASCII with '9'
pub fn first_last_alpn(s: &str) -> (char, char) {
let replace_nonascii_with_9 = |c: char| {
if c.is_ascii() {
c
} else {
'9'
}
};
let mut chars = s.chars();
let first = chars.next().map(replace_nonascii_with_9).unwrap_or('0');
let last = chars
.next_back()
.map(replace_nonascii_with_9)
.unwrap_or('0');
(first, if s.len() == 1 { '0' } else { last })
}
/// Generate 12-character hash (first 12 chars of SHA256)
pub fn hash12(input: &str) -> String {
format!("{:x}", Sha256::digest(input.as_bytes()))[..12].to_string()
}
impl Signature {
/// Generate JA4 fingerprint according to official FoxIO specification
/// Returns sorted version by default
pub fn generate_ja4(&self) -> Ja4Payload {
self.generate_ja4_with_order(false)
}
/// Generate JA4 fingerprint with original order (unsorted)
pub fn generate_ja4_original(&self) -> Ja4Payload {
self.generate_ja4_with_order(true)
}
/// Generate JA4 fingerprint with specified order
/// original_order: true for unsorted (original), false for sorted
pub fn generate_ja4_with_order(&self, original_order: bool) -> Ja4Payload {
// Filter out GREASE values from cipher suites for JA4_b and JA4_c processing
let filtered_ciphers = filter_grease_values(&self.cipher_suites);
let filtered_extensions = filter_grease_values(&self.extensions);
let filtered_sig_algs = filter_grease_values(&self.signature_algorithms);
// Protocol marker (always 't' for TLS, 'q' for QUIC)
let protocol = "t";
// TLS version
let tls_version_str = format!("{}", self.version);
// SNI indicator: 'd' if SNI present, 'i' if not
let sni_indicator = if self.sni.is_some() { "d" } else { "i" };
// Cipher count in 2-digit decimal (max 99) - use ORIGINAL count before filtering
let cipher_count = format!("{:02}", self.cipher_suites.len().min(99));
// Extension count in 2-digit decimal (max 99) - use ORIGINAL count before filtering
let extension_count = format!("{:02}", self.extensions.len().min(99));
// ALPN first and last characters
let (alpn_first, alpn_last) = match &self.alpn {
Some(alpn) => first_last_alpn(alpn),
None => ('0', '0'),
};
// JA4_a format: protocol + version + sni + cipher_count + extension_count + alpn_first + alpn_last
let ja4_a = format!(
"{protocol}{tls_version_str}{sni_indicator}{cipher_count}{extension_count}{alpn_first}{alpn_last}"
);
// JA4_b: Cipher suites (sorted or original order, comma-separated, 4-digit hex) - GREASE filtered
let mut ciphers_for_b = filtered_ciphers;
if !original_order {
ciphers_for_b.sort_unstable();
}
let ja4_b_raw = ciphers_for_b
.iter()
.map(|c| format!("{c:04x}"))
.collect::<Vec<String>>()
.join(",");
// JA4_c: Extensions (sorted or original order, comma-separated, 4-digit hex) + "_" + signature algorithms
let mut extensions_for_c = filtered_extensions;
// For sorted version: Remove SNI (0x0000) and ALPN (0x0010) from extensions AND sort
// For original version: Keep SNI/ALPN and preserve original order
if !original_order {
extensions_for_c.retain(|&ext| ext != 0x0000 && ext != 0x0010);
extensions_for_c.sort_unstable();
}
let extensions_str = extensions_for_c
.iter()
.map(|e| format!("{e:04x}"))
.collect::<Vec<String>>()
.join(",");
// Signature algorithms are NOT sorted according to the official spec
// But GREASE values are filtered
let sig_algs_str = filtered_sig_algs
.iter()
.map(|s| format!("{s:04x}"))
.collect::<Vec<String>>()
.join(",");
// According to the specification, "if there are no signature algorithms in the
// Hello packet, then the string ends without an underscore".
let ja4_c_raw = if sig_algs_str.is_empty() {
extensions_str
} else if extensions_str.is_empty() {
sig_algs_str
} else {
format!("{extensions_str}_{sig_algs_str}")
};
// Generate hashes for JA4_b and JA4_c (first 12 characters of SHA256)
let ja4_b_hash = hash12(&ja4_b_raw);
let ja4_c_hash = hash12(&ja4_c_raw);
// JA4 hashed: ja4_a + "_" + ja4_b_hash + "_" + ja4_c_hash
let ja4_hashed = format!("{ja4_a}_{ja4_b_hash}_{ja4_c_hash}");
// JA4 raw: ja4_a + "_" + ja4_b_raw + "_" + ja4_c_raw
let ja4_raw_full = format!("{ja4_a}_{ja4_b_raw}_{ja4_c_raw}");
// Create the appropriate enum variants based on order
let ja4_fingerprint = if original_order {
Ja4Fingerprint::Unsorted(ja4_hashed)
} else {
Ja4Fingerprint::Sorted(ja4_hashed)
};
let ja4_raw_fingerprint = if original_order {
Ja4RawFingerprint::Unsorted(ja4_raw_full)
} else {
Ja4RawFingerprint::Sorted(ja4_raw_full)
};
Ja4Payload {
ja4_a,
ja4_b: ja4_b_raw,
ja4_c: ja4_c_raw,
full: ja4_fingerprint,
raw: ja4_raw_fingerprint,
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/tls_process.rs | huginn-net-tls/src/tls_process.rs | use crate::error::HuginnNetTlsError;
use crate::observable::ObservableTlsClient;
use crate::observable::ObservableTlsPackage;
use crate::tls::{Signature, TlsVersion, TLS_GREASE_VALUES};
use pnet::packet::ip::IpNextHeaderProtocols;
use pnet::packet::ipv4::Ipv4Packet;
use pnet::packet::ipv6::Ipv6Packet;
use pnet::packet::tcp::TcpPacket;
use pnet::packet::Packet;
use tls_parser::{
parse_tls_extensions, parse_tls_plaintext, TlsClientHelloContents, TlsExtension,
TlsExtensionType, TlsMessage, TlsMessageHandshake,
};
use tracing::debug;
pub fn process_tls_ipv4(packet: &Ipv4Packet) -> Result<ObservableTlsPackage, HuginnNetTlsError> {
if packet.get_next_level_protocol() != IpNextHeaderProtocols::Tcp {
return Err(HuginnNetTlsError::UnsupportedProtocol("IPv4".to_string()));
}
if let Some(tcp) = TcpPacket::new(packet.payload()) {
process_tls_tcp(&tcp)
} else {
Ok(ObservableTlsPackage { tls_client: None })
}
}
pub fn process_tls_ipv6(packet: &Ipv6Packet) -> Result<ObservableTlsPackage, HuginnNetTlsError> {
if packet.get_next_header() != IpNextHeaderProtocols::Tcp {
return Err(HuginnNetTlsError::UnsupportedProtocol("IPv6".to_string()));
}
if let Some(tcp) = TcpPacket::new(packet.payload()) {
process_tls_tcp(&tcp)
} else {
Ok(ObservableTlsPackage { tls_client: None })
}
}
pub fn process_tls_tcp(tcp: &TcpPacket) -> Result<ObservableTlsPackage, HuginnNetTlsError> {
let payload = tcp.payload();
if !is_tls_traffic(payload) {
return Ok(ObservableTlsPackage { tls_client: None });
}
parse_tls_client_hello(payload)
.map(|signature| {
let ja4 = signature.generate_ja4();
let ja4_original = signature.generate_ja4_original();
ObservableTlsPackage {
tls_client: Some(ObservableTlsClient {
version: signature.version,
sni: signature.sni,
alpn: signature.alpn,
cipher_suites: signature.cipher_suites,
extensions: signature.extensions,
signature_algorithms: signature.signature_algorithms,
elliptic_curves: signature.elliptic_curves,
ja4,
ja4_original,
}),
}
})
.or(Ok(ObservableTlsPackage { tls_client: None }))
}
/// Detect TLS traffic based on packet content only
/// This is more reliable than port-based detection since TLS can run on any port
#[inline(always)]
pub fn is_tls_traffic(payload: &[u8]) -> bool {
// Check for TLS record header (0x16 = Handshake, followed by version)
payload.len() >= 5 && payload[0] == 0x16 && {
let version = u16::from_be_bytes([payload[1], payload[2]]);
(0x0300..=0x0304).contains(&version)
}
}
pub fn parse_tls_client_hello(data: &[u8]) -> Result<Signature, HuginnNetTlsError> {
match parse_tls_plaintext(data) {
Ok((_remaining, tls_record)) => {
for message in tls_record.msg.iter() {
if let TlsMessage::Handshake(TlsMessageHandshake::ClientHello(client_hello)) =
message
{
return extract_tls_signature_from_client_hello(client_hello);
}
}
Err(HuginnNetTlsError::Parse("No ClientHello found in TLS record".to_string()))
}
Err(e) => Err(HuginnNetTlsError::Parse(format!("TLS parsing failed: {e:?}"))),
}
}
/// Parse TLS ClientHello and extract JA4 fingerprint string directly
///
/// This is a convenience function that combines parsing and fingerprint generation
/// into a single call, returning the JA4 fingerprint string directly.
///
/// # Parameters
/// - `data`: Raw TLS ClientHello bytes
///
/// # Returns
/// - `Some(String)` containing the JA4 fingerprint if parsing succeeds
/// - `None` if parsing fails or no ClientHello is found
///
/// # Example
/// ```no_run
/// use huginn_net_tls::tls_process::parse_tls_client_hello_ja4;
///
/// let client_hello_bytes = b"\x16\x03\x01\x00\x4a...";
/// if let Some(ja4) = parse_tls_client_hello_ja4(client_hello_bytes) {
/// println!("JA4 fingerprint: {}", ja4);
/// }
/// ```
#[must_use]
pub fn parse_tls_client_hello_ja4(data: &[u8]) -> Option<String> {
parse_tls_client_hello(data)
.ok()
.map(|sig| sig.generate_ja4().full.value().to_string())
}
pub fn extract_tls_signature_from_client_hello(
client_hello: &TlsClientHelloContents,
) -> Result<Signature, HuginnNetTlsError> {
let cipher_suites: Vec<u16> = client_hello
.ciphers
.iter()
.map(|c| c.0)
.filter(|&cipher| !TLS_GREASE_VALUES.contains(&cipher))
.collect();
let mut extensions = Vec::new();
let mut sni = None;
let mut alpn = None;
let mut signature_algorithms = Vec::new();
let mut elliptic_curves = Vec::new();
let mut elliptic_curve_point_formats = Vec::new();
// Parse extensions if present - if not present, we still generate JA4 with empty extension fields
if let Some(ext_data) = &client_hello.ext {
match parse_tls_extensions(ext_data) {
Ok((_remaining, parsed_extensions)) => {
for extension in &parsed_extensions {
let ext_type: u16 = TlsExtensionType::from(extension).into();
// Filter GREASE extensions
if !TLS_GREASE_VALUES.contains(&ext_type) {
extensions.push(ext_type);
}
match extension {
TlsExtension::SNI(sni_list) => {
if let Some((_, hostname)) = sni_list.first() {
sni = std::str::from_utf8(hostname).ok().map(str::to_owned);
}
}
TlsExtension::ALPN(alpn_list) => {
if let Some(protocol) = alpn_list.first() {
alpn = std::str::from_utf8(protocol).ok().map(str::to_owned);
}
}
TlsExtension::SignatureAlgorithms(sig_algs) => {
signature_algorithms = sig_algs.clone();
}
TlsExtension::EllipticCurves(curves) => {
elliptic_curves = curves.iter().map(|c| c.0).collect();
}
TlsExtension::EcPointFormats(formats) => {
elliptic_curve_point_formats = formats.to_vec();
}
_ => {}
}
}
}
Err(e) => {
debug!("Failed to parse TLS extensions: {:?}", e);
}
}
}
let version = determine_tls_version(&client_hello.version, &extensions);
Ok(Signature {
version,
cipher_suites,
extensions,
elliptic_curves,
elliptic_curve_point_formats,
signature_algorithms,
sni,
alpn,
})
}
pub fn determine_tls_version(
legacy_version: &tls_parser::TlsVersion,
extensions: &[u16],
) -> TlsVersion {
// TLS 1.3 uses supported_versions extension
if extensions.contains(&TlsExtensionType::SupportedVersions.into()) {
return TlsVersion::V1_3;
}
// Parse legacy version from ClientHello
// Note: SSL 2.0 is not supported by tls-parser (too legacy/vulnerable)
match *legacy_version {
tls_parser::TlsVersion::Tls13 => TlsVersion::V1_3,
tls_parser::TlsVersion::Tls12 => TlsVersion::V1_2,
tls_parser::TlsVersion::Tls11 => TlsVersion::V1_1,
tls_parser::TlsVersion::Tls10 => TlsVersion::V1_0,
tls_parser::TlsVersion::Ssl30 => TlsVersion::Ssl3_0,
_ => {
debug!("Unknown/unsupported TLS version {:?}, defaulting to TLS 1.2", legacy_version);
TlsVersion::V1_2
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/lib.rs | huginn-net-tls/src/lib.rs | pub mod error;
pub mod filter;
pub mod observable;
pub mod output;
pub mod packet_parser;
pub mod parallel;
pub mod process;
pub mod raw_filter;
pub mod tls;
pub mod tls_client_hello_reader;
pub mod tls_process;
// Re-exports
pub use error::*;
pub use filter::*;
pub use observable::*;
pub use output::*;
pub use parallel::{DispatchResult, PoolStats, WorkerPool, WorkerStats};
pub use process::*;
pub use tls::*;
pub use tls_client_hello_reader::TlsClientHelloReader;
pub use tls_process::{
parse_tls_client_hello, parse_tls_client_hello_ja4, process_tls_ipv4, process_tls_ipv6,
};
use crate::packet_parser::{parse_packet, IpPacket};
use pcap_file::pcap::PcapReader;
use pnet::datalink::{self, Channel, Config};
use std::fs::File;
use std::sync::atomic::{AtomicBool, Ordering};
use std::sync::mpsc::Sender;
use std::sync::Arc;
use tracing::{debug, error};
/// Configuration for parallel processing
///
/// Controls the behavior of worker threads in parallel mode.
#[derive(Debug, Clone)]
struct ParallelConfig {
/// Number of worker threads to spawn
num_workers: usize,
/// Size of packet queue per worker (affects memory usage and backpressure)
queue_size: usize,
/// Maximum packets to process in one batch before checking for new work
/// Higher = better throughput, lower = better latency (typical: 16-64)
batch_size: usize,
/// Worker receive timeout in milliseconds
/// Lower = faster shutdown, higher = better throughput (typical: 5-50)
timeout_ms: u64,
}
/// A TLS-focused passive fingerprinting analyzer using JA4 methodology.
///
/// The `HuginnNetTls` struct handles TLS packet analysis and JA4 fingerprinting
/// following the official FoxIO specification.
pub struct HuginnNetTls {
parallel_config: Option<ParallelConfig>,
worker_pool: Option<Arc<WorkerPool>>,
filter_config: Option<FilterConfig>,
}
impl Default for HuginnNetTls {
fn default() -> Self {
Self::new()
}
}
impl HuginnNetTls {
/// Creates a new instance of `HuginnNetTls` in sequential mode (single-threaded).
///
/// # Returns
/// A new `HuginnNetTls` instance ready for TLS analysis.
pub fn new() -> Self {
Self { parallel_config: None, worker_pool: None, filter_config: None }
}
/// Configure packet filtering (builder pattern)
pub fn with_filter(mut self, config: FilterConfig) -> Self {
self.filter_config = Some(config);
self
}
/// Creates a new instance with full parallel configuration.
///
/// # Parameters
/// - `num_workers`: Number of worker threads (recommended: 2-4 on 8-core systems)
/// - `queue_size`: Size of packet queue per worker (typical: 100-200)
/// - `batch_size`: Maximum packets to process in one batch (typical: 16-64, recommended: 32)
/// - `timeout_ms`: Worker receive timeout in milliseconds (typical: 5-50, recommended: 10)
///
/// # Configuration Guide
///
/// ## batch_size
/// - **Low (8-16)**: Lower latency, more responsive, higher overhead
/// - **Medium (32)**: Balanced throughput and latency *(recommended)*
/// - **High (64-128)**: Maximum throughput, higher latency
///
/// ## timeout_ms
/// - **Low (5-10ms)**: Fast shutdown, slightly lower throughput *(recommended: 10)*
/// - **Medium (20-50ms)**: Better throughput, slower shutdown
/// - **High (100ms+)**: Maximum throughput, slow shutdown
///
/// # Example
/// ```rust,no_run
/// use huginn_net_tls::HuginnNetTls;
///
/// // Balanced configuration (recommended)
/// let tls = HuginnNetTls::with_config(4, 100, 32, 10);
///
/// // Low latency
/// let low_latency = HuginnNetTls::with_config(2, 100, 8, 5);
///
/// // High throughput
/// let high_throughput = HuginnNetTls::with_config(4, 200, 64, 20);
/// ```
///
/// # Returns
/// A new `HuginnNetTls` instance with parallel configuration.
pub fn with_config(
num_workers: usize,
queue_size: usize,
batch_size: usize,
timeout_ms: u64,
) -> Self {
Self {
parallel_config: Some(ParallelConfig {
num_workers,
queue_size,
batch_size,
timeout_ms,
}),
worker_pool: None,
filter_config: None,
}
}
/// Get worker pool statistics (only available in parallel mode, after analyze_* is called)
///
/// # Returns
/// `Some(PoolStats)` if parallel mode is active, `None` otherwise
pub fn stats(&self) -> Option<PoolStats> {
self.worker_pool.as_ref().map(|pool| pool.stats())
}
/// Get a reference to the worker pool (only available in parallel mode, after analyze_* is called)
///
/// # Returns
/// `Some(Arc<WorkerPool>)` if parallel mode is active, `None` otherwise
pub fn worker_pool(&self) -> Option<Arc<WorkerPool>> {
self.worker_pool.clone()
}
/// Initialize the worker pool (only for parallel mode, called automatically by analyze_*)
///
/// This can be called explicitly to get the pool reference before starting analysis
///
/// # Errors
/// - If the worker pool creation fails.
///
pub fn init_pool(&mut self, sender: Sender<TlsClientOutput>) -> Result<(), HuginnNetTlsError> {
if let Some(config) = &self.parallel_config {
if self.worker_pool.is_none() {
let worker_pool = Arc::new(WorkerPool::new(
config.num_workers,
config.queue_size,
config.batch_size,
config.timeout_ms,
sender,
self.filter_config.clone(),
)?);
self.worker_pool = Some(worker_pool);
}
}
Ok(())
}
fn process_with<F>(
&mut self,
packet_fn: F,
sender: Sender<TlsClientOutput>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetTlsError>
where
F: FnMut() -> Option<Result<Vec<u8>, HuginnNetTlsError>>,
{
if self.parallel_config.is_some() {
self.process_parallel(packet_fn, sender, cancel_signal)
} else {
self.process_sequential(packet_fn, sender, cancel_signal)
}
}
fn process_parallel<F>(
&mut self,
mut packet_fn: F,
sender: Sender<TlsClientOutput>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetTlsError>
where
F: FnMut() -> Option<Result<Vec<u8>, HuginnNetTlsError>>,
{
let config = self
.parallel_config
.as_ref()
.ok_or_else(|| HuginnNetTlsError::Parse("Parallel config not found".to_string()))?;
if self.worker_pool.is_none() {
let worker_pool = Arc::new(WorkerPool::new(
config.num_workers,
config.queue_size,
config.batch_size,
config.timeout_ms,
sender,
self.filter_config.clone(),
)?);
self.worker_pool = Some(worker_pool);
}
let worker_pool = self
.worker_pool
.as_ref()
.ok_or_else(|| HuginnNetTlsError::Parse("Worker pool not initialized".to_string()))?
.clone();
while let Some(packet_result) = packet_fn() {
if let Some(ref cancel) = cancel_signal {
if cancel.load(Ordering::Relaxed) {
debug!("Cancellation signal received, stopping packet processing");
break;
}
}
match packet_result {
Ok(packet) => {
let _ = worker_pool.dispatch(packet);
}
Err(e) => {
error!("Failed to read packet: {e}");
}
}
}
Ok(())
}
fn process_sequential<F>(
&mut self,
mut packet_fn: F,
sender: Sender<TlsClientOutput>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetTlsError>
where
F: FnMut() -> Option<Result<Vec<u8>, HuginnNetTlsError>>,
{
while let Some(packet_result) = packet_fn() {
if let Some(ref cancel) = cancel_signal {
if cancel.load(Ordering::Relaxed) {
debug!("Cancellation signal received, stopping packet processing");
break;
}
}
match packet_result {
Ok(packet) => match self.process_packet(&packet) {
Ok(Some(result)) => {
if sender.send(result).is_err() {
error!("Receiver dropped, stopping packet processing");
break;
}
}
Ok(None) => {
debug!("No TLS found, continuing packet processing");
}
Err(tls_error) => {
debug!("Skipping non-TLS packet: {tls_error}");
}
},
Err(e) => {
error!("Failed to read packet: {e}");
}
}
}
Ok(())
}
/// Captures and analyzes packets on the specified network interface.
///
/// Sends `TlsClientOutput` through the provided channel.
///
/// # Parameters
/// - `interface_name`: The name of the network interface to analyze.
/// - `sender`: A `Sender` to send `TlsClientOutput` objects back to the caller.
/// - `cancel_signal`: Optional `Arc<AtomicBool>` to signal graceful shutdown.
///
/// # Errors
/// - If the network interface cannot be found or a channel cannot be created.
pub fn analyze_network(
&mut self,
interface_name: &str,
sender: Sender<TlsClientOutput>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetTlsError> {
let interfaces = datalink::interfaces();
let interface = interfaces
.into_iter()
.find(|iface| iface.name == interface_name)
.ok_or_else(|| {
HuginnNetTlsError::Parse(format!(
"Could not find network interface: {interface_name}"
))
})?;
debug!("Using network interface: {}", interface.name);
let config = Config { promiscuous: true, ..Config::default() };
let (_tx, mut rx) = match datalink::channel(&interface, config) {
Ok(Channel::Ethernet(tx, rx)) => (tx, rx),
Ok(_) => return Err(HuginnNetTlsError::Parse("Unhandled channel type".to_string())),
Err(e) => {
return Err(HuginnNetTlsError::Parse(format!("Unable to create channel: {e}")))
}
};
self.process_with(
move || match rx.next() {
Ok(packet) => Some(Ok(packet.to_vec())),
Err(e) => {
Some(Err(HuginnNetTlsError::Parse(format!("Error receiving packet: {e}"))))
}
},
sender,
cancel_signal,
)
}
/// Analyzes packets from a PCAP file.
///
/// # Parameters
/// - `pcap_path`: The path to the PCAP file to analyze.
/// - `sender`: A `Sender` to send `TlsClientOutput` objects back to the caller.
/// - `cancel_signal`: Optional `Arc<AtomicBool>` to signal graceful shutdown.
///
/// # Errors
/// - If the PCAP file cannot be opened or read.
pub fn analyze_pcap(
&mut self,
pcap_path: &str,
sender: Sender<TlsClientOutput>,
cancel_signal: Option<Arc<AtomicBool>>,
) -> Result<(), HuginnNetTlsError> {
let file = File::open(pcap_path)
.map_err(|e| HuginnNetTlsError::Parse(format!("Failed to open PCAP file: {e}")))?;
let mut pcap_reader = PcapReader::new(file)
.map_err(|e| HuginnNetTlsError::Parse(format!("Failed to create PCAP reader: {e}")))?;
self.process_with(
move || match pcap_reader.next_packet() {
Some(Ok(packet)) => Some(Ok(packet.data.to_vec())),
Some(Err(e)) => {
Some(Err(HuginnNetTlsError::Parse(format!("Error reading PCAP packet: {e}"))))
}
None => None,
},
sender,
cancel_signal,
)
}
/// Processes a single packet and extracts TLS information if present.
///
/// # Parameters
/// - `packet`: The raw packet data.
///
/// # Returns
/// A `Result` containing an optional `TlsClientOutput` or an error.
fn process_packet(
&mut self,
packet: &[u8],
) -> Result<Option<TlsClientOutput>, HuginnNetTlsError> {
if let Some(ref filter) = self.filter_config {
if !raw_filter::apply(packet, filter) {
debug!("Filtered out packet before parsing");
return Ok(None);
}
}
match parse_packet(packet) {
IpPacket::Ipv4(ipv4) => process_ipv4_packet(&ipv4),
IpPacket::Ipv6(ipv6) => process_ipv6_packet(&ipv6),
IpPacket::None => Ok(None),
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/process.rs | huginn-net-tls/src/process.rs | use crate::error::HuginnNetTlsError;
use crate::output::{IpPort, TlsClientOutput};
use crate::ObservableTlsClient;
use pnet::packet::ipv4::Ipv4Packet;
use pnet::packet::ipv6::Ipv6Packet;
use pnet::packet::tcp::TcpPacket;
use pnet::packet::Packet;
use std::net::IpAddr;
#[derive(Clone)]
pub struct ObservablePackage {
pub source: IpPort,
pub destination: IpPort,
pub tls_client: Option<ObservableTlsClient>,
}
pub fn process_ipv4_packet(
ipv4: &Ipv4Packet,
) -> std::result::Result<Option<TlsClientOutput>, HuginnNetTlsError> {
let observable_package = create_observable_package_ipv4(ipv4)?;
let tls_output = observable_package
.tls_client
.map(|observable_tls| TlsClientOutput {
source: IpPort::new(observable_package.source.ip, observable_package.source.port),
destination: IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
sig: observable_tls,
});
Ok(tls_output)
}
fn create_observable_package_ipv4(
ipv4: &Ipv4Packet,
) -> std::result::Result<ObservablePackage, HuginnNetTlsError> {
let tcp = TcpPacket::new(ipv4.payload())
.ok_or_else(|| HuginnNetTlsError::Parse("Invalid TCP packet".to_string()))?;
let source = IpPort { ip: IpAddr::V4(ipv4.get_source()), port: tcp.get_source() };
let destination =
IpPort { ip: IpAddr::V4(ipv4.get_destination()), port: tcp.get_destination() };
let tls_package = crate::tls_process::process_tls_ipv4(ipv4)?;
Ok(ObservablePackage { source, destination, tls_client: tls_package.tls_client })
}
pub fn process_ipv6_packet(
ipv6: &Ipv6Packet,
) -> std::result::Result<Option<TlsClientOutput>, HuginnNetTlsError> {
let observable_package = create_observable_package_ipv6(ipv6)?;
let tls_output = observable_package
.tls_client
.map(|observable_tls| TlsClientOutput {
source: IpPort::new(observable_package.source.ip, observable_package.source.port),
destination: IpPort::new(
observable_package.destination.ip,
observable_package.destination.port,
),
sig: observable_tls,
});
Ok(tls_output)
}
fn create_observable_package_ipv6(
ipv6: &Ipv6Packet,
) -> std::result::Result<ObservablePackage, HuginnNetTlsError> {
let tcp = TcpPacket::new(ipv6.payload())
.ok_or_else(|| HuginnNetTlsError::Parse("Invalid TCP packet".to_string()))?;
let source = IpPort { ip: IpAddr::V6(ipv6.get_source()), port: tcp.get_source() };
let destination =
IpPort { ip: IpAddr::V6(ipv6.get_destination()), port: tcp.get_destination() };
let tls_package = crate::tls_process::process_tls_ipv6(ipv6)?;
Ok(ObservablePackage { source, destination, tls_client: tls_package.tls_client })
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/filter.rs | huginn-net-tls/src/filter.rs | use pnet::ipnetwork::{IpNetwork, Ipv4Network, Ipv6Network};
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
/// Filter mode: Allow (allowlist) or Deny (denylist)
#[derive(Debug, Clone, Copy, PartialEq, Eq, Default)]
pub enum FilterMode {
/// Allow only matching packets (allowlist mode)
#[default]
Allow,
/// Deny matching packets (denylist mode)
Deny,
}
/// Port filter configuration
///
/// Filters packets based on TCP source and/or destination ports.
/// Supports individual ports, ranges, and lists.
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// // Single port
/// let filter = PortFilter::new().destination(443);
///
/// // Multiple ports
/// let filter = PortFilter::new().destination_list(vec![80, 443, 8080]);
///
/// // Port range
/// let filter = PortFilter::new().destination_range(8000..9000);
/// ```
#[derive(Debug, Clone, Default)]
pub struct PortFilter {
/// Source ports to match
pub source_ports: Vec<u16>,
/// Destination ports to match
pub destination_ports: Vec<u16>,
/// Source port ranges (inclusive)
pub source_ranges: Vec<(u16, u16)>,
/// Destination port ranges (inclusive)
pub destination_ranges: Vec<(u16, u16)>,
/// Match ANY port (source OR destination)?
pub match_any: bool,
}
impl PortFilter {
/// Create a new empty port filter
pub fn new() -> Self {
Self::default()
}
/// Add a destination port
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().destination(443);
/// ```
pub fn destination(mut self, port: u16) -> Self {
self.destination_ports.push(port);
self
}
/// Add a source port
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().source(12345);
/// ```
pub fn source(mut self, port: u16) -> Self {
self.source_ports.push(port);
self
}
/// Add a destination port range (inclusive)
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().destination_range(8000..9000);
/// // Matches ports 8000 through 8999
/// ```
pub fn destination_range(mut self, range: std::ops::Range<u16>) -> Self {
self.destination_ranges
.push((range.start, range.end.saturating_sub(1)));
self
}
/// Add a source port range (inclusive)
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().source_range(10000..20000);
/// // Matches ports 10000 through 19999
/// ```
pub fn source_range(mut self, range: std::ops::Range<u16>) -> Self {
self.source_ranges
.push((range.start, range.end.saturating_sub(1)));
self
}
/// Add multiple destination ports
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().destination_list(vec![80, 443, 8080, 8443]);
/// ```
pub fn destination_list(mut self, ports: Vec<u16>) -> Self {
self.destination_ports.extend(ports);
self
}
/// Add multiple source ports
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::PortFilter;
///
/// let filter = PortFilter::new().source_list(vec![12345, 54321, 9999]);
/// ```
pub fn source_list(mut self, ports: Vec<u16>) -> Self {
self.source_ports.extend(ports);
self
}
/// Match if ANY port matches (source OR destination)
///
/// By default, all specified filters must match. With `match_any()`,
/// the packet passes if either source OR destination matches.
pub fn any_port(mut self) -> Self {
self.match_any = true;
self
}
/// Check if packet matches port filter
///
/// # Returns
///
/// `true` if the packet matches the filter criteria
pub fn matches(&self, src_port: u16, dst_port: u16) -> bool {
if self.match_any {
let all_ports: Vec<u16> = self
.source_ports
.iter()
.chain(self.destination_ports.iter())
.copied()
.collect();
let all_ranges: Vec<(u16, u16)> = self
.source_ranges
.iter()
.chain(self.destination_ranges.iter())
.copied()
.collect();
let port_match = all_ports.contains(&src_port)
|| all_ports.contains(&dst_port)
|| all_ranges
.iter()
.any(|(start, end)| src_port >= *start && src_port <= *end)
|| all_ranges
.iter()
.any(|(start, end)| dst_port >= *start && dst_port <= *end);
port_match
} else {
let src_match = self.source_ports.contains(&src_port)
|| self
.source_ranges
.iter()
.any(|(start, end)| src_port >= *start && src_port <= *end);
let dst_match = self.destination_ports.contains(&dst_port)
|| self
.destination_ranges
.iter()
.any(|(start, end)| dst_port >= *start && dst_port <= *end);
let src_ok = self.source_ports.is_empty() && self.source_ranges.is_empty() || src_match;
let dst_ok = self.destination_ports.is_empty() && self.destination_ranges.is_empty()
|| dst_match;
src_ok && dst_ok
}
}
}
/// IP address filter configuration
///
/// Filters packets based on specific IPv4 or IPv6 addresses.
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::IpFilter;
///
/// let filter = IpFilter::new()
/// .allow("8.8.8.8")
/// .unwrap()
/// .allow("2001:4860:4860::8888")
/// .unwrap();
/// ```
#[derive(Debug, Clone, Default)]
pub struct IpFilter {
/// IPv4 addresses to match
pub ipv4_addresses: Vec<Ipv4Addr>,
/// IPv6 addresses to match
pub ipv6_addresses: Vec<Ipv6Addr>,
/// Check source, destination, or both?
pub check_source: bool,
pub check_destination: bool,
}
impl IpFilter {
/// Create a new IP filter that checks both source and destination by default
pub fn new() -> Self {
Self { check_source: true, check_destination: true, ..Default::default() }
}
/// Add an IP address (auto-detects IPv4/IPv6)
///
/// # Errors
///
/// Returns an error if the IP address string is invalid
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::IpFilter;
///
/// let filter = IpFilter::new()
/// .allow("192.168.1.1").unwrap()
/// .allow("2001:db8::1").unwrap();
/// ```
pub fn allow(mut self, ip: &str) -> Result<Self, String> {
let addr: IpAddr = ip.parse().map_err(|e| format!("Invalid IP: {e}"))?;
match addr {
IpAddr::V4(v4) => self.ipv4_addresses.push(v4),
IpAddr::V6(v6) => self.ipv6_addresses.push(v6),
}
Ok(self)
}
/// Add multiple IP addresses
///
/// # Errors
///
/// Returns an error if any IP address string is invalid
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::IpFilter;
///
/// let filter = IpFilter::new()
/// .allow_list(vec!["8.8.8.8", "1.1.1.1", "2001:4860:4860::8888"])
/// .unwrap();
/// ```
pub fn allow_list(mut self, ips: Vec<&str>) -> Result<Self, String> {
for ip in ips {
self = self.allow(ip)?;
}
Ok(self)
}
/// Only check source addresses
///
/// By default, both source and destination are checked.
pub fn source_only(mut self) -> Self {
self.check_source = true;
self.check_destination = false;
self
}
/// Only check destination addresses
///
/// By default, both source and destination are checked.
pub fn destination_only(mut self) -> Self {
self.check_source = false;
self.check_destination = true;
self
}
/// Check if packet matches IP filter
///
/// # Returns
///
/// `true` if either source or destination IP matches (if enabled)
pub fn matches(&self, src_ip: &IpAddr, dst_ip: &IpAddr) -> bool {
let src_match = if self.check_source {
match src_ip {
IpAddr::V4(v4) => self.ipv4_addresses.contains(v4),
IpAddr::V6(v6) => self.ipv6_addresses.contains(v6),
}
} else {
false
};
let dst_match = if self.check_destination {
match dst_ip {
IpAddr::V4(v4) => self.ipv4_addresses.contains(v4),
IpAddr::V6(v6) => self.ipv6_addresses.contains(v6),
}
} else {
false
};
src_match || dst_match
}
}
/// Subnet filter configuration (CIDR notation)
///
/// Filters packets based on subnet membership using CIDR notation.
/// Supports both IPv4 and IPv6 subnets.
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::SubnetFilter;
///
/// // Allow only private networks
/// let filter = SubnetFilter::new()
/// .allow("192.168.0.0/16").unwrap()
/// .allow("10.0.0.0/8").unwrap();
///
/// // IPv6 subnet
/// let filter = SubnetFilter::new()
/// .allow("2001:db8::/32").unwrap();
/// ```
#[derive(Debug, Clone, Default)]
pub struct SubnetFilter {
/// IPv4 subnets to match
pub ipv4_subnets: Vec<Ipv4Network>,
/// IPv6 subnets to match
pub ipv6_subnets: Vec<Ipv6Network>,
/// Check source, destination, or both?
pub check_source: bool,
pub check_destination: bool,
}
impl SubnetFilter {
/// Create a new subnet filter that checks both source and destination by default
pub fn new() -> Self {
Self { check_source: true, check_destination: true, ..Default::default() }
}
/// Add a subnet in CIDR notation
///
/// # Errors
///
/// Returns an error if the CIDR notation is invalid
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::SubnetFilter;
///
/// let filter = SubnetFilter::new()
/// .allow("192.168.1.0/24").unwrap();
/// ```
pub fn allow(mut self, cidr: &str) -> Result<Self, String> {
let network: IpNetwork = cidr.parse().map_err(|e| format!("Invalid CIDR: {e}"))?;
match network {
IpNetwork::V4(v4) => self.ipv4_subnets.push(v4),
IpNetwork::V6(v6) => self.ipv6_subnets.push(v6),
}
Ok(self)
}
/// Add multiple subnets
///
/// # Errors
///
/// Returns an error if any CIDR notation is invalid
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::SubnetFilter;
///
/// let filter = SubnetFilter::new()
/// .allow_list(vec!["192.168.0.0/16", "10.0.0.0/8", "172.16.0.0/12"])
/// .unwrap();
/// ```
pub fn allow_list(mut self, cidrs: Vec<&str>) -> Result<Self, String> {
for cidr in cidrs {
self = self.allow(cidr)?;
}
Ok(self)
}
/// Only check source addresses
///
/// By default, both source and destination are checked.
pub fn source_only(mut self) -> Self {
self.check_source = true;
self.check_destination = false;
self
}
/// Only check destination addresses
///
/// By default, both source and destination are checked.
pub fn destination_only(mut self) -> Self {
self.check_source = false;
self.check_destination = true;
self
}
/// Check if packet matches subnet filter
///
/// # Returns
///
/// `true` if either source or destination IP is in any of the subnets (if enabled)
pub fn matches(&self, src_ip: &IpAddr, dst_ip: &IpAddr) -> bool {
let src_match = if self.check_source {
match src_ip {
IpAddr::V4(v4) => self.ipv4_subnets.iter().any(|net| net.contains(*v4)),
IpAddr::V6(v6) => self.ipv6_subnets.iter().any(|net| net.contains(*v6)),
}
} else {
false
};
let dst_match = if self.check_destination {
match dst_ip {
IpAddr::V4(v4) => self.ipv4_subnets.iter().any(|net| net.contains(*v4)),
IpAddr::V6(v6) => self.ipv6_subnets.iter().any(|net| net.contains(*v6)),
}
} else {
false
};
src_match || dst_match
}
}
/// Combined filter configuration
///
/// Combines port, IP, and subnet filters with a filter mode (Allow/Deny).
/// All enabled filters must pass for a packet to be processed.
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::{FilterConfig, FilterMode, PortFilter, SubnetFilter};
///
/// let filter = FilterConfig::new()
/// .mode(FilterMode::Allow)
/// .with_port_filter(PortFilter::new().destination(443))
/// .with_subnet_filter(
/// SubnetFilter::new()
/// .allow("192.168.0.0/16")
/// .unwrap()
/// );
/// ```
#[derive(Debug, Clone, Default)]
pub struct FilterConfig {
pub port_filter: Option<PortFilter>,
pub ip_filter: Option<IpFilter>,
pub subnet_filter: Option<SubnetFilter>,
pub mode: FilterMode,
}
impl FilterConfig {
/// Create a new empty filter configuration
pub fn new() -> Self {
Self::default()
}
/// Set filter mode (Allow/Deny)
///
/// # Examples
///
/// ```
/// use huginn_net_tls::{FilterConfig, FilterMode};
///
/// // Allowlist mode (default) - only matching packets pass
/// let filter = FilterConfig::new().mode(FilterMode::Allow);
///
/// // Denylist mode - matching packets are blocked
/// let filter = FilterConfig::new().mode(FilterMode::Deny);
/// ```
pub fn mode(mut self, mode: FilterMode) -> Self {
self.mode = mode;
self
}
/// Add port filter
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::{FilterConfig, PortFilter};
///
/// let filter = FilterConfig::new()
/// .with_port_filter(PortFilter::new().destination(443));
/// ```
pub fn with_port_filter(mut self, filter: PortFilter) -> Self {
self.port_filter = Some(filter);
self
}
/// Add IP filter
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::{FilterConfig, IpFilter};
///
/// let filter = FilterConfig::new()
/// .with_ip_filter(
/// IpFilter::new()
/// .allow("8.8.8.8")
/// .unwrap()
/// );
/// ```
pub fn with_ip_filter(mut self, filter: IpFilter) -> Self {
self.ip_filter = Some(filter);
self
}
/// Add subnet filter
///
/// # Examples
///
/// ```rust
/// use huginn_net_tls::{FilterConfig, SubnetFilter};
///
/// let filter = FilterConfig::new()
/// .with_subnet_filter(
/// SubnetFilter::new()
/// .allow("192.168.0.0/16")
/// .unwrap()
/// );
/// ```
pub fn with_subnet_filter(mut self, filter: SubnetFilter) -> Self {
self.subnet_filter = Some(filter);
self
}
/// Check if packet should be processed based on filters (userspace filtering)
///
/// This method performs filtering in userspace after packets reach the application.
/// It extracts IP addresses and ports from packet headers and applies the configured
/// filters (port, IP, subnet) according to the filter mode (Allow/Deny).
///
/// # Returns
///
/// - `true`: Packet passes all filters (should be processed)
/// - `false`: Packet blocked by filters (should be dropped)
///
/// # Logic
///
/// - If no filters are configured, all packets pass
/// - In Allow mode: packet must match ALL configured filters
/// - In Deny mode: packet must NOT match ALL configured filters
pub fn should_process(
&self,
src_ip: &IpAddr,
dst_ip: &IpAddr,
src_port: u16,
dst_port: u16,
) -> bool {
if self.port_filter.is_none() && self.ip_filter.is_none() && self.subnet_filter.is_none() {
return true;
}
match self.mode {
FilterMode::Allow => {
if let Some(ref filter) = self.port_filter {
if !filter.matches(src_port, dst_port) {
return false;
}
}
if let Some(ref filter) = self.ip_filter {
if !filter.matches(src_ip, dst_ip) {
return false;
}
}
if let Some(ref filter) = self.subnet_filter {
if !filter.matches(src_ip, dst_ip) {
return false;
}
}
true
}
FilterMode::Deny => {
let mut all_match = true;
if let Some(ref filter) = self.port_filter {
all_match = all_match && filter.matches(src_port, dst_port);
}
if let Some(ref filter) = self.ip_filter {
all_match = all_match && filter.matches(src_ip, dst_ip);
}
if let Some(ref filter) = self.subnet_filter {
all_match = all_match && filter.matches(src_ip, dst_ip);
}
!all_match
}
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/tls_client_hello_reader.rs | huginn-net-tls/src/tls_client_hello_reader.rs | use crate::tls::Signature;
use crate::tls_process::parse_tls_client_hello;
/// TLS ClientHello reader with incremental parsing support
///
/// This struct manages reading and parsing TLS ClientHello messages incrementally,
/// handling cases where the ClientHello arrives in multiple TCP packets.
///
/// # Example
/// ```no_run
/// use huginn_net_tls::tls_client_hello_reader::TlsClientHelloReader;
///
/// let mut reader = TlsClientHelloReader::new();
///
/// // Add bytes incrementally
/// reader.add_bytes(&[0x16, 0x03, 0x01, 0x00, 0x4a]);
/// reader.add_bytes(&[/* more bytes */]);
///
/// if let Some(signature) = reader.get_signature() {
/// println!("Got TLS signature");
/// }
/// ```
pub struct TlsClientHelloReader {
buffer: Vec<u8>,
signature: Option<Signature>,
}
impl TlsClientHelloReader {
/// Create a new TLS ClientHello reader
///
/// # Returns
/// A new `TlsClientHelloReader` instance ready to process TLS ClientHello data
#[must_use]
pub fn new() -> Self {
Self { buffer: Vec::with_capacity(8192), signature: None }
}
/// Add bytes to the buffer and attempt to parse ClientHello
///
/// This method handles incremental data arrival, parsing the ClientHello as soon
/// as enough data is available.
///
/// # Parameters
/// - `data`: New bytes to add to the buffer
///
/// # Returns
/// - `Ok(Some(Signature))` if ClientHello was successfully parsed
/// - `Ok(None)` if more data is needed or signature already parsed
/// - `Err(HuginnNetTlsError)` if parsing fails
pub fn add_bytes(
&mut self,
data: &[u8],
) -> Result<Option<Signature>, crate::error::HuginnNetTlsError> {
// If signature already parsed, don't process more data
if self.signature.is_some() {
return Ok(None);
}
// Check if we have enough data to determine TLS record length
self.buffer.extend_from_slice(data);
// Need at least 5 bytes to read TLS record header
if self.buffer.len() < 5 {
return Ok(None);
}
// Read TLS record length from bytes 3-4
let record_len = u16::from_be_bytes([self.buffer[3], self.buffer[4]]) as usize;
let needed = record_len.saturating_add(5);
// Check if we have complete TLS record
if self.buffer.len() < needed {
return Ok(None);
}
// Safety limit: don't process records larger than 64KB
if needed > 64 * 1024 {
return Err(crate::error::HuginnNetTlsError::Parse("TLS record too large".to_string()));
}
// Parse ClientHello
match parse_tls_client_hello(&self.buffer[..needed]) {
Ok(signature) => {
self.signature = Some(signature.clone());
Ok(Some(signature))
}
Err(e) => Err(e),
}
}
/// Get the parsed signature if available
///
/// # Returns
/// - `Some(Signature)` if signature has been parsed
/// - `None` if signature not yet available
#[must_use]
pub fn get_signature(&self) -> Option<&Signature> {
self.signature.as_ref()
}
/// Check if signature has been parsed
///
/// # Returns
/// `true` if signature is available, `false` otherwise
#[must_use]
pub fn signature_parsed(&self) -> bool {
self.signature.is_some()
}
/// Reset the reader to process a new ClientHello
///
/// Clears the buffer and resets parsing state, allowing the reader to be reused.
pub fn reset(&mut self) {
self.buffer.clear();
self.signature = None;
}
}
impl Default for TlsClientHelloReader {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/error.rs | huginn-net-tls/src/error.rs | use thiserror::Error;
#[derive(Error, Debug)]
pub enum HuginnNetTlsError {
/// An error occurred while parsing TLS data.
#[error("Parse error: {0}")]
Parse(String),
/// An unsupported protocol was encountered.
#[error("Unsupported protocol: {0}")]
UnsupportedProtocol(String),
/// Misconfiguration error.
#[error("Misconfiguration: {0}")]
Misconfiguration(String),
/// An unknown error occurred.
#[error("Unknown error")]
Unknown,
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/parallel.rs | huginn-net-tls/src/parallel.rs | use crate::filter::FilterConfig;
use crate::output::TlsClientOutput;
use crate::packet_parser::{parse_packet, IpPacket};
use crate::process::{process_ipv4_packet, process_ipv6_packet};
use crate::raw_filter;
use crate::HuginnNetTlsError;
use crossbeam_channel::{bounded, Receiver, RecvTimeoutError, Sender, TryRecvError, TrySendError};
use std::fmt;
use std::num::NonZeroUsize;
use std::sync::atomic::{AtomicBool, AtomicU64, AtomicUsize, Ordering};
use std::sync::{Arc, Mutex};
use std::thread;
use std::time::Duration;
use tracing::debug;
/// Worker configuration parameters
struct WorkerConfig {
batch_size: usize,
timeout_ms: u64,
}
/// Result of dispatching a packet to a worker
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub enum DispatchResult {
/// Packet successfully queued
Queued,
/// Packet dropped (queue full)
Dropped,
}
/// Statistics for a single worker
#[derive(Debug, Clone, Copy, Default)]
pub struct WorkerStats {
/// Worker ID
pub id: usize,
/// Current queue size (approximate)
pub queue_size: usize,
/// Total packets dropped by this worker
pub dropped: u64,
}
impl fmt::Display for WorkerStats {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(
f,
"Worker {}: queue_size={}, dropped={}",
self.id, self.queue_size, self.dropped
)
}
}
/// Simple statistics for monitoring
#[derive(Debug, Clone, Default)]
pub struct PoolStats {
/// Total packets dispatched to workers (accumulated since start)
pub total_dispatched: u64,
/// Total packets dropped because queues were full (accumulated since start)
pub total_dropped: u64,
/// Per-worker statistics (current state)
pub workers: Vec<WorkerStats>,
}
impl fmt::Display for PoolStats {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
writeln!(
f,
"TLS Pool Stats - packets dispatched: {}, packets dropped: {}",
self.total_dispatched, self.total_dropped
)?;
for worker in &self.workers {
writeln!(f, " {worker}")?;
}
Ok(())
}
}
/// Worker pool for parallel TLS processing with early filtering
pub struct WorkerPool {
_workers: Vec<thread::JoinHandle<()>>,
packet_senders: Arc<Vec<Sender<Vec<u8>>>>,
result_sender: Arc<Mutex<Option<std::sync::mpsc::Sender<TlsClientOutput>>>>,
shutdown_flag: Arc<AtomicBool>,
pub num_workers: NonZeroUsize,
pub batch_size: usize,
pub timeout_ms: u64,
next_worker: AtomicUsize,
queued_count: AtomicU64,
dropped_count: AtomicU64,
worker_dropped: Vec<AtomicU64>,
}
impl WorkerPool {
/// Create a new worker pool
///
/// # Errors
///
/// Returns an error if unable to spawn worker threads or if num_workers is 0
pub fn new(
num_workers: usize,
queue_size: usize,
batch_size: usize,
timeout_ms: u64,
result_sender: std::sync::mpsc::Sender<TlsClientOutput>,
filter_config: Option<FilterConfig>,
) -> Result<Self, HuginnNetTlsError> {
let num_workers = NonZeroUsize::new(num_workers).ok_or_else(|| {
HuginnNetTlsError::Misconfiguration("Worker count must be greater than 0".to_string())
})?;
debug!(
"Creating TLS worker pool: {} workers, queue size: {}, batch size: {}, timeout: {}ms",
num_workers, queue_size, batch_size, timeout_ms
);
let num_workers_val = num_workers.get();
let mut workers = Vec::with_capacity(num_workers_val);
let mut packet_senders = Vec::with_capacity(num_workers_val);
let mut worker_dropped = Vec::with_capacity(num_workers_val);
let shutdown_flag = Arc::new(AtomicBool::new(false));
let filter_arc = filter_config.map(Arc::new);
for worker_id in 0..num_workers_val {
worker_dropped.push(AtomicU64::new(0));
let (tx, rx) = bounded::<Vec<u8>>(queue_size);
packet_senders.push(tx);
let result_sender_clone = result_sender.clone();
let shutdown_flag_clone = Arc::clone(&shutdown_flag);
let filter_clone = filter_arc.clone();
let handle = thread::Builder::new()
.name(format!("tls-worker-{worker_id}"))
.spawn(move || {
Self::worker_loop(
worker_id,
rx,
result_sender_clone,
shutdown_flag_clone,
filter_clone,
WorkerConfig { batch_size, timeout_ms },
);
})
.map_err(|e| {
HuginnNetTlsError::Misconfiguration(format!(
"Failed to spawn worker thread: {e}"
))
})?;
workers.push(handle);
}
Ok(Self {
_workers: workers,
packet_senders: Arc::new(packet_senders),
result_sender: Arc::new(Mutex::new(Some(result_sender))),
shutdown_flag,
num_workers,
batch_size,
timeout_ms,
next_worker: AtomicUsize::new(0),
queued_count: AtomicU64::new(0),
dropped_count: AtomicU64::new(0),
worker_dropped,
})
}
/// Shutdown the worker pool by closing all channels
pub fn shutdown(&self) {
self.shutdown_flag.store(true, Ordering::Relaxed);
if let Ok(mut sender) = self.result_sender.lock() {
*sender = None;
}
}
/// Dispatch packet to a worker (round-robin)
pub fn dispatch(&self, packet: Vec<u8>) -> DispatchResult {
// Check if pool is shutting down
if self.shutdown_flag.load(Ordering::Relaxed) {
return DispatchResult::Dropped;
}
let counter = self.next_worker.fetch_add(1, Ordering::Relaxed);
let worker_id = counter.checked_rem(self.num_workers.get()).unwrap_or(0);
match self.packet_senders[worker_id].try_send(packet) {
Ok(()) => {
self.queued_count.fetch_add(1, Ordering::Relaxed);
DispatchResult::Queued
}
Err(TrySendError::Full(_)) => {
self.dropped_count.fetch_add(1, Ordering::Relaxed);
self.worker_dropped[worker_id].fetch_add(1, Ordering::Relaxed);
DispatchResult::Dropped
}
Err(TrySendError::Disconnected(_)) => {
self.dropped_count.fetch_add(1, Ordering::Relaxed);
self.worker_dropped[worker_id].fetch_add(1, Ordering::Relaxed);
DispatchResult::Dropped
}
}
}
/// Get current statistics
pub fn stats(&self) -> PoolStats {
let workers = (0..self.num_workers.get())
.map(|worker_id| WorkerStats {
id: worker_id,
queue_size: self
.packet_senders
.get(worker_id)
.map(|s| s.len())
.unwrap_or(0),
dropped: self.worker_dropped[worker_id].load(Ordering::Relaxed),
})
.collect();
PoolStats {
total_dispatched: self.queued_count.load(Ordering::Relaxed),
total_dropped: self.dropped_count.load(Ordering::Relaxed),
workers,
}
}
fn worker_loop(
worker_id: usize,
rx: Receiver<Vec<u8>>,
result_sender: std::sync::mpsc::Sender<TlsClientOutput>,
shutdown_flag: Arc<AtomicBool>,
filter_config: Option<Arc<FilterConfig>>,
config: WorkerConfig,
) {
debug!("TLS worker {} started", worker_id);
let mut batch = Vec::with_capacity(config.batch_size);
let timeout = Duration::from_millis(config.timeout_ms);
loop {
if shutdown_flag.load(Ordering::Relaxed) {
debug!("TLS worker {} received shutdown signal", worker_id);
break;
}
// Blocking recv for first packet (waits if queue is empty)
let first_packet = match rx.recv_timeout(timeout) {
Ok(packet) => packet,
Err(RecvTimeoutError::Timeout) => {
if shutdown_flag.load(Ordering::Relaxed) {
break;
}
batch.clear();
continue;
}
Err(RecvTimeoutError::Disconnected) => {
debug!("TLS worker {} channel disconnected", worker_id);
break;
}
};
batch.push(first_packet);
// Try to fill batch with more packets (non-blocking)
while batch.len() < config.batch_size {
match rx.try_recv() {
Ok(packet) => batch.push(packet),
Err(TryRecvError::Empty) => break,
Err(TryRecvError::Disconnected) => break,
}
}
// Process entire batch
for packet in batch.drain(..) {
match Self::process_packet(&packet, filter_config.as_deref()) {
Ok(Some(result)) => {
if result_sender.send(result).is_err() {
debug!("TLS worker {} result channel closed", worker_id);
return;
}
}
Ok(None) => {}
Err(_) => {}
}
}
}
debug!("TLS worker {} stopped", worker_id);
}
fn process_packet(
packet: &[u8],
filter: Option<&FilterConfig>,
) -> Result<Option<TlsClientOutput>, HuginnNetTlsError> {
if let Some(filter_cfg) = filter {
if !raw_filter::apply(packet, filter_cfg) {
debug!("Filtered out packet before parsing");
return Ok(None);
}
}
match parse_packet(packet) {
IpPacket::Ipv4(ipv4) => process_ipv4_packet(&ipv4),
IpPacket::Ipv6(ipv6) => process_ipv6_packet(&ipv6),
IpPacket::None => Ok(None),
}
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/raw_filter.rs | huginn-net-tls/src/raw_filter.rs | use crate::filter::FilterConfig;
use std::net::{IpAddr, Ipv4Addr, Ipv6Addr};
use tracing::debug;
/// Apply raw filter check on raw packet bytes
///
/// Extracts only IPs and ports without creating full packet structures.
/// This is much faster than parsing the entire packet first.
///
/// # Returns
///
/// - `true`: Packet should be processed (passed filter or no filter)
/// - `false`: Packet should be dropped (failed filter)
pub fn apply(packet: &[u8], filter: &FilterConfig) -> bool {
if let Some((src_ip, dst_ip, src_port, dst_port)) = extract_quick_info(packet) {
filter.should_process(&src_ip, &dst_ip, src_port, dst_port)
} else {
debug!("Could not extract quick info from packet");
true
}
}
/// Extract IPs and ports without full parsing
///
/// Tries multiple datalink formats (Ethernet, Raw IP, NULL) to find IP header.
/// Only extracts the minimum fields needed for filtering.
fn extract_quick_info(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
// Try Ethernet (most common)
if let Some(info) = try_ethernet(packet) {
return Some(info);
}
// Try Raw IP
if let Some(info) = try_raw_ip(packet) {
return Some(info);
}
// Try NULL/Loopback
if let Some(info) = try_null_datalink(packet) {
return Some(info);
}
None
}
/// Try to extract from Ethernet frame
fn try_ethernet(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
if packet.len() < 14 {
return None;
}
// EtherType at offset 12-13
let ethertype = u16::from_be_bytes([packet[12], packet[13]]);
match ethertype {
0x0800 => extract_ipv4_info(&packet[14..]), // IPv4
0x86DD => extract_ipv6_info(&packet[14..]), // IPv6
_ => None,
}
}
/// Try to extract from Raw IP
fn try_raw_ip(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
if packet.is_empty() {
return None;
}
// Check IP version (first 4 bits)
let version = packet[0] >> 4;
match version {
4 => extract_ipv4_info(packet),
6 => extract_ipv6_info(packet),
_ => None,
}
}
/// Try to extract from NULL/Loopback datalink
fn try_null_datalink(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
if packet.len() < 4 {
return None;
}
// NULL datalink has 4-byte header with address family
// AF_INET = 2, AF_INET6 = 30 (on most systems)
let family = u32::from_ne_bytes([packet[0], packet[1], packet[2], packet[3]]);
match family {
2 => extract_ipv4_info(&packet[4..]), // AF_INET
30 | 28 => extract_ipv6_info(&packet[4..]), // AF_INET6 (varies by OS)
_ => None,
}
}
/// Extract IPv4 src/dst IPs and TCP ports (minimal parsing)
fn extract_ipv4_info(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
// IPv4 header minimum: 20 bytes
if packet.len() < 20 {
return None;
}
// Check protocol (offset 9): must be TCP (6)
if packet[9] != 6 {
return None;
}
// Extract source IP (offset 12-15)
let src_ip = IpAddr::V4(Ipv4Addr::new(packet[12], packet[13], packet[14], packet[15]));
// Extract destination IP (offset 16-19)
let dst_ip = IpAddr::V4(Ipv4Addr::new(packet[16], packet[17], packet[18], packet[19]));
// Get IP header length (first 4 bits of byte 0, in 32-bit words)
let ihl = (packet[0] & 0x0F) as usize;
let ip_header_len = ihl.saturating_mul(4);
// TCP header starts after IP header
let tcp_offset = ip_header_len;
if packet.len() < tcp_offset.saturating_add(4) {
return None;
}
// Extract TCP ports (first 4 bytes of TCP header)
let src_port = u16::from_be_bytes([packet[tcp_offset], packet[tcp_offset.saturating_add(1)]]);
let dst_port = u16::from_be_bytes([
packet[tcp_offset.saturating_add(2)],
packet[tcp_offset.saturating_add(3)],
]);
Some((src_ip, dst_ip, src_port, dst_port))
}
/// Extract IPv6 src/dst IPs and TCP ports (minimal parsing)
fn extract_ipv6_info(packet: &[u8]) -> Option<(IpAddr, IpAddr, u16, u16)> {
// IPv6 header: 40 bytes minimum
if packet.len() < 40 {
return None;
}
// Check next header (offset 6): must be TCP (6)
if packet[6] != 6 {
return None;
}
// Extract source IP (offset 8-23)
let src_ip = IpAddr::V6(Ipv6Addr::new(
u16::from_be_bytes([packet[8], packet[9]]),
u16::from_be_bytes([packet[10], packet[11]]),
u16::from_be_bytes([packet[12], packet[13]]),
u16::from_be_bytes([packet[14], packet[15]]),
u16::from_be_bytes([packet[16], packet[17]]),
u16::from_be_bytes([packet[18], packet[19]]),
u16::from_be_bytes([packet[20], packet[21]]),
u16::from_be_bytes([packet[22], packet[23]]),
));
// Extract destination IP (offset 24-39)
let dst_ip = IpAddr::V6(Ipv6Addr::new(
u16::from_be_bytes([packet[24], packet[25]]),
u16::from_be_bytes([packet[26], packet[27]]),
u16::from_be_bytes([packet[28], packet[29]]),
u16::from_be_bytes([packet[30], packet[31]]),
u16::from_be_bytes([packet[32], packet[33]]),
u16::from_be_bytes([packet[34], packet[35]]),
u16::from_be_bytes([packet[36], packet[37]]),
u16::from_be_bytes([packet[38], packet[39]]),
));
// TCP header starts at offset 40 (IPv6 header is fixed 40 bytes)
if packet.len() < 44 {
return None;
}
// Extract TCP ports
let src_port = u16::from_be_bytes([packet[40], packet[41]]);
let dst_port = u16::from_be_bytes([packet[42], packet[43]]);
Some((src_ip, dst_ip, src_port, dst_port))
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/output.rs | huginn-net-tls/src/output.rs | use crate::ObservableTlsClient;
use std::fmt;
use std::fmt::Formatter;
#[derive(Debug, Clone, PartialEq)]
pub struct IpPort {
pub ip: std::net::IpAddr,
pub port: u16,
}
impl IpPort {
pub fn new(ip: std::net::IpAddr, port: u16) -> Self {
Self { ip, port }
}
}
/// Holds information derived from analyzing TLS ClientHello packets.
///
/// This structure contains details about the TLS client based on its ClientHello packet,
/// including the JA4 Payload and extracted TLS parameters.
pub struct TlsClientOutput {
/// The source IP address and port of the client sending the ClientHello.
pub source: IpPort,
/// The destination IP address and port of the server receiving the ClientHello.
pub destination: IpPort,
/// The raw TLS signature extracted from the ClientHello packet.
pub sig: ObservableTlsClient,
}
impl fmt::Display for TlsClientOutput {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
write!(
f,
"[TLS Client] {}:{} → {}:{}\n\
SNI: {}\n\
Version: TLS {}\n\
JA4: {}\n\
JA4_r: {}\n\
JA4_o: {}\n\
JA4_or: {}\n",
self.source.ip,
self.source.port,
self.destination.ip,
self.destination.port,
self.sig.sni.as_deref().unwrap_or("none"),
self.sig.version,
self.sig.ja4.full.value(),
self.sig.ja4.raw.value(),
self.sig.ja4_original.full.value(),
self.sig.ja4_original.raw.value(),
)
}
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/src/packet_parser.rs | huginn-net-tls/src/packet_parser.rs | /// Packet parsing utilities for different network packet formats
///
/// This module provides unified parsing for various network packet formats
/// from both live network capture and PCAP files:
/// - Ethernet frames (most common in network interfaces)
/// - Raw IP packets (tunnels, loopback interfaces)
/// - NULL datalink packets (specialized capture tools)
/// - Future packet formats can be added here
use pnet::packet::ethernet::{EtherTypes, EthernetPacket};
use pnet::packet::ipv4::Ipv4Packet;
use pnet::packet::ipv6::Ipv6Packet;
use tracing::debug;
/// Represents the result of IP packet parsing
#[derive(Debug)]
pub enum IpPacket<'a> {
Ipv4(Ipv4Packet<'a>),
Ipv6(Ipv6Packet<'a>),
None,
}
/// Datalink format types supported
#[derive(Debug, Clone, Copy, PartialEq)]
pub enum DatalinkFormat {
/// Standard Ethernet frame (14-byte header)
Ethernet,
/// Raw IP packet (no datalink header)
RawIp,
/// NULL datalink with 4-byte header (0x1e 0x00 ...)
Null,
}
/// Parse a network packet using multiple format strategies
///
/// Tries different parsing strategies in order of likelihood:
/// 1. Ethernet (most common in network interfaces and PCAPs)
/// 2. Raw IP (tunnels, loopback interfaces, some PCAPs)
/// 3. NULL datalink (specialized capture tools)
///
/// Works with packets from both live network capture and PCAP files.
///
/// # Arguments
/// * `packet` - Raw packet bytes from network interface or PCAP file
///
/// # Returns
/// * `IpPacket` - The parsed IP packet or None if no valid format found
pub fn parse_packet(packet: &[u8]) -> IpPacket<'_> {
// Strategy 1: Try Ethernet first (most common)
if let Some(parsed) = try_ethernet_format(packet) {
return parsed;
}
// Strategy 2: Try Raw IP (no Ethernet header)
if let Some(parsed) = try_raw_ip_format(packet) {
return parsed;
}
// Strategy 3: Try NULL datalink (skip 4-byte header)
if let Some(parsed) = try_null_datalink_format(packet) {
return parsed;
}
IpPacket::None
}
/// Try parsing as Ethernet frame
fn try_ethernet_format(packet: &[u8]) -> Option<IpPacket<'_>> {
// Ethernet header is 14 bytes: [6B dst][6B src][2B ethertype]
if packet.len() < 14 {
return None;
}
let ethernet = EthernetPacket::new(packet)?;
let ip_data = &packet[14..]; // Skip 14-byte Ethernet header
match ethernet.get_ethertype() {
EtherTypes::Ipv4 => {
if let Some(ipv4) = Ipv4Packet::new(ip_data) {
debug!("Parsed Ethernet IPv4 packet");
return Some(IpPacket::Ipv4(ipv4));
}
}
EtherTypes::Ipv6 => {
if let Some(ipv6) = Ipv6Packet::new(ip_data) {
debug!("Parsed Ethernet IPv6 packet");
return Some(IpPacket::Ipv6(ipv6));
}
}
_ => {}
}
None
}
/// Try parsing as Raw IP (no datalink header)
fn try_raw_ip_format(packet: &[u8]) -> Option<IpPacket<'_>> {
if packet.len() < 20 {
return None;
}
// Check IP version in first 4 bits
let version = (packet[0] & 0xF0) >> 4;
match version {
4 => {
if let Some(ipv4) = Ipv4Packet::new(packet) {
debug!("Parsed Raw IPv4 packet");
return Some(IpPacket::Ipv4(ipv4));
}
}
6 => {
if let Some(ipv6) = Ipv6Packet::new(packet) {
debug!("Parsed Raw IPv6 packet");
return Some(IpPacket::Ipv6(ipv6));
}
}
_ => {}
}
None
}
/// Try parsing as NULL datalink format (4-byte header)
fn try_null_datalink_format(packet: &[u8]) -> Option<IpPacket<'_>> {
// Check for NULL datalink signature and minimum size
if packet.len() < 24 || packet[0] != 0x1e || packet[1] != 0x00 {
return None;
}
let ip_data = &packet[4..]; // Skip 4-byte NULL header
let version = (ip_data[0] & 0xF0) >> 4;
match version {
4 => {
if let Some(ipv4) = Ipv4Packet::new(ip_data) {
debug!("Parsed NULL datalink IPv4 packet");
return Some(IpPacket::Ipv4(ipv4));
}
}
6 => {
if let Some(ipv6) = Ipv6Packet::new(ip_data) {
debug!("Parsed NULL datalink IPv6 packet");
return Some(IpPacket::Ipv6(ipv6));
}
}
_ => {}
}
None
}
/// Detect the datalink format of a packet without full parsing
///
/// Useful for statistics or format validation
pub fn detect_datalink_format(packet: &[u8]) -> Option<DatalinkFormat> {
// Check NULL datalink first (most specific signature)
if packet.len() >= 24 && packet[0] == 0x1e && packet[1] == 0x00 {
let ip_data = &packet[4..];
let version = (ip_data[0] & 0xF0) >> 4;
if version == 4 || version == 6 {
return Some(DatalinkFormat::Null);
}
}
// Check Raw IP (check if it starts with valid IP version)
if packet.len() >= 20 {
let version = (packet[0] & 0xF0) >> 4;
if version == 4 || version == 6 {
// Additional validation for IPv4
if version == 4 {
let ihl = (packet[0] & 0x0F).saturating_mul(4);
if ihl >= 20 && packet.len() >= usize::from(ihl) {
return Some(DatalinkFormat::RawIp);
}
}
// Additional validation for IPv6
else if version == 6 && packet.len() >= 40 {
return Some(DatalinkFormat::RawIp);
}
}
}
// Check Ethernet (least specific - needs valid EtherType)
if packet.len() >= 14 {
if let Some(ethernet) = EthernetPacket::new(packet) {
let ethertype = ethernet.get_ethertype();
// Only consider it Ethernet if it has a valid IP EtherType
if ethertype == EtherTypes::Ipv4 || ethertype == EtherTypes::Ipv6 {
let ip_data = &packet[14..];
if !ip_data.is_empty() {
let version = (ip_data[0] & 0xF0) >> 4;
if (ethertype == EtherTypes::Ipv4 && version == 4)
|| (ethertype == EtherTypes::Ipv6 && version == 6)
{
return Some(DatalinkFormat::Ethernet);
}
}
}
}
}
None
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/tests/tls.rs | huginn-net-tls/tests/tls.rs | use huginn_net_tls::*;
fn create_test_signature() -> Signature {
Signature {
version: TlsVersion::V1_3,
cipher_suites: vec![
0x1301, 0x1302, 0x1303, 0xc02b, 0xc02f, 0xc02c, 0xc030, 0xcca9, 0xcca8, 0xc013, 0xc014,
0x009c, 0x009d, 0x002f, 0x0035,
],
extensions: vec![
0x001b, 0x0000, 0x0033, 0x0010, 0x4469, 0x0017, 0x002d, 0x000d, 0x0005, 0x0023, 0x0012,
0x002b, 0xff01, 0x000b, 0x000a, 0x0015,
],
elliptic_curves: vec![0x001d, 0x0017, 0x0018, 0x0019],
elliptic_curve_point_formats: vec![0x00],
signature_algorithms: vec![0x0403, 0x0804, 0x0401, 0x0503, 0x0805, 0x0501, 0x0806, 0x0601],
sni: Some("example.com".to_string()),
alpn: Some("h2".to_string()),
}
}
#[test]
fn test_ja4_generation() {
let sig = create_test_signature();
let ja4 = sig.generate_ja4();
// Test JA4_a format: protocol + version + sni + cipher_count + extension_count + alpn_first + alpn_last
assert_eq!(ja4.ja4_a, "t13d1516h2");
// Test that cipher suites are sorted and properly formatted
assert!(ja4.ja4_b.contains("002f"));
assert!(ja4.ja4_b.contains("1301"));
// Test that extensions exclude SNI (0x0000) and ALPN (0x0010)
assert!(!ja4.ja4_c.contains("0000"));
assert!(!ja4.ja4_c.contains("0010"));
// Test that signature algorithms are included
assert!(ja4.ja4_c.contains("0403"));
// Test hash lengths (should be 12 characters) - use the new enum structure
let parts: Vec<&str> = ja4.full.value().split('_').collect();
assert!(parts.len() >= 3, "JA4 should have at least 3 parts separated by underscores");
assert_eq!(parts[1].len(), 12, "Second JA4 hash part should be 12 characters");
assert_eq!(parts[2].len(), 12, "Third JA4 hash part should be 12 characters");
}
#[test]
fn test_ja4_original_order() {
let sig = create_test_signature();
let ja4_sorted = sig.generate_ja4();
let ja4_original = sig.generate_ja4_original();
// JA4_original should differ from JA4 in both cipher and extension order
assert_ne!(ja4_original.raw.value(), ja4_sorted.raw.value());
assert_eq!(
ja4_original.raw.value().split('_').next(),
ja4_sorted.raw.value().split('_').next()
); // Same JA4_a
// JA4_b should be different due to cipher order (original vs sorted)
assert_ne!(
ja4_original.raw.value().split('_').nth(1),
ja4_sorted.raw.value().split('_').nth(1)
); // Different JA4_b
// JA4_c should be different due to extension order and SNI/ALPN inclusion
assert_ne!(
ja4_original.raw.value().split('_').nth(2),
ja4_sorted.raw.value().split('_').nth(2)
);
// JA4_original should include SNI (0000) and ALPN (0010)
assert!(ja4_original.raw.value().contains("0000")); // SNI
assert!(ja4_original.raw.value().contains("0010")); // ALPN
// JA4 (sorted) should NOT include SNI and ALPN
assert!(!ja4_sorted.raw.value().contains("0000")); // SNI
assert!(!ja4_sorted.raw.value().contains("0010")); // ALPN
}
#[test]
fn test_grease_filtering() {
let mut sig = create_test_signature();
// Add GREASE values
sig.cipher_suites.push(0x0a0a);
sig.extensions.push(0x1a1a);
sig.signature_algorithms.push(0x2a2a);
let ja4 = sig.generate_ja4();
// GREASE values should be filtered out
assert!(!ja4.ja4_b.contains("0a0a"));
assert!(!ja4.ja4_c.contains("1a1a"));
assert!(!ja4.ja4_c.contains("2a2a"));
}
#[test]
fn test_alpn_first_last() {
// Test single character ALPN
assert_eq!(first_last_alpn("h"), ('h', '0'));
// Test two character ALPN
assert_eq!(first_last_alpn("h2"), ('h', '2'));
// Test longer ALPN
assert_eq!(first_last_alpn("http/1.1"), ('h', '1'));
// Test non-ASCII replacement
assert_eq!(first_last_alpn("hñ"), ('h', '9'));
// Test empty (should not happen in practice)
assert_eq!(first_last_alpn(""), ('0', '0'));
}
#[test]
fn test_sni_indicator() {
let mut sig = create_test_signature();
sig.sni = Some("example.com".to_string());
let ja4_with_sni = sig.generate_ja4();
assert!(ja4_with_sni.ja4_a.contains('d'));
sig.sni = None;
let ja4_without_sni = sig.generate_ja4();
assert!(ja4_without_sni.ja4_a.contains('i'));
}
#[test]
fn test_no_signature_algorithms() {
let mut sig = create_test_signature();
sig.signature_algorithms.clear();
let ja4 = sig.generate_ja4();
// Should not end with underscore when no signature algorithms
assert!(!ja4.ja4_c.ends_with('_'));
assert!(!ja4.raw.value().contains("__"));
}
#[test]
fn test_tls_version_display() {
assert_eq!(format!("{}", TlsVersion::V1_0), "10");
assert_eq!(format!("{}", TlsVersion::V1_1), "11");
assert_eq!(format!("{}", TlsVersion::V1_2), "12");
assert_eq!(format!("{}", TlsVersion::V1_3), "13");
assert_eq!(format!("{}", TlsVersion::Ssl3_0), "s3");
assert_eq!(format!("{}", TlsVersion::Ssl2_0), "s2");
assert_eq!(format!("{}", TlsVersion::Unknown(0x0305)), "00");
}
#[test]
fn test_ssl_version_in_ja4() {
// Test that SSL 3.0 appears correctly in JA4 fingerprint
let mut signature = create_test_signature();
signature.version = TlsVersion::Ssl3_0;
let ja4 = signature.generate_ja4();
let ja4_string = ja4.full.value();
// Should start with "ts3d" (t=TLS, s3=SSL3.0, d=SNI present)
assert!(
ja4_string.starts_with("ts3d"),
"JA4 should start with 'ts3d' for SSL 3.0, got: {ja4_string}"
);
}
#[test]
fn test_hash12_function() {
let input = "test_string";
let hash = hash12(input);
assert_eq!(hash.len(), 12);
// Same input should produce same hash
assert_eq!(hash12(input), hash12(input));
// Different input should produce different hash
assert_ne!(hash12("different"), hash12(input));
}
#[test]
fn test_cipher_extension_count_limits() {
let mut sig = create_test_signature();
// Test with more than 99 ciphers
sig.cipher_suites = (0..150).map(|i| i as u16).collect();
let ja4 = sig.generate_ja4();
// JA4_a format: protocol(1) + version(2) + sni(1) + cipher_count(2) + extension_count(2) + alpn_first(1) + alpn_last(1)
// Example: "t13d9999h2" = t + 13 + d + 99 + 99 + h + 2
let cipher_count = &ja4.ja4_a[4..6]; // positions 4-5 for cipher count
assert_eq!(cipher_count, "99");
// Test with more than 99 extensions
sig.extensions = (0..200).map(|i| i as u16).collect();
let ja4 = sig.generate_ja4();
// Should be limited to 99
let ext_count = &ja4.ja4_a[6..8]; // positions 6-7 for extension count
assert_eq!(ext_count, "99");
}
#[test]
fn test_ja4_format_consistency() {
let sig = create_test_signature();
let ja4_sorted = sig.generate_ja4();
let ja4_original = sig.generate_ja4_original();
// JA4 hash should have exactly 2 underscores (ja4_a_ja4_b_hash_ja4_c_hash)
assert_eq!(ja4_sorted.full.value().matches('_').count(), 2);
assert_eq!(ja4_original.full.value().matches('_').count(), 2);
// JA4 full format can have more underscores due to internal structure (extensions_sig_algs)
// The main structure should be ja4_a_ja4_b_ja4_c where ja4_c might contain internal underscores
let ja4_full_parts: Vec<&str> = ja4_sorted.raw.value().split('_').collect();
let ja4_original_full_parts: Vec<&str> = ja4_original.raw.value().split('_').collect();
// Should have at least 3 parts: ja4_a, ja4_b, and ja4_c (which might contain more underscores)
assert!(ja4_full_parts.len() >= 3);
assert!(ja4_original_full_parts.len() >= 3);
// All parts should start with the same JA4_a
assert!(ja4_sorted.full.value().starts_with(&ja4_sorted.ja4_a));
assert!(ja4_sorted.raw.value().starts_with(&ja4_sorted.ja4_a));
assert!(ja4_original.raw.value().starts_with(&ja4_original.ja4_a));
assert!(ja4_original.full.value().starts_with(&ja4_original.ja4_a));
// First parts should be identical (ja4_a)
assert_eq!(ja4_full_parts[0], ja4_original_full_parts[0]);
// JA4 vs JA4_original differences:
// - JA4 uses sorted cipher suites, JA4_original uses original order
// - JA4 excludes SNI/ALPN and sorts extensions, JA4_original includes SNI/ALPN in original order
// Verify JA4 (sorted) excludes SNI/ALPN
assert!(!ja4_sorted.raw.value().contains("0000")); // No SNI
assert!(!ja4_sorted.raw.value().contains("0010")); // No ALPN
// Verify JA4_original includes SNI/ALPN
assert!(ja4_original.raw.value().contains("0000")); // Has SNI
assert!(ja4_original.raw.value().contains("0010")); // Has ALPN
}
#[test]
fn test_known_ja4_comparison() {
let sig = Signature {
version: TlsVersion::V1_3,
cipher_suites: vec![
0x1301, 0x1302, 0x1303, 0xc02b, 0xc02f, 0xc02c, 0xc030, 0xcca9, 0xcca8, 0xc013, 0xc014,
0x009c, 0x009d, 0x002f, 0x0035,
],
extensions: vec![
0x0000, 0x0017, 0x0018, 0xff01, 0x000a, 0x000b, 0x0023, 0x0010, 0x000d, 0x0012, 0x0033,
0x002b, 0x002d, 0x0015, 0x001b, 0x001c,
],
elliptic_curves: vec![0x001d, 0x0017, 0x0018, 0x0019],
elliptic_curve_point_formats: vec![0x00],
signature_algorithms: vec![0x0403, 0x0804, 0x0401, 0x0503, 0x0805, 0x0501, 0x0806, 0x0601],
sni: Some("example.com".to_string()),
alpn: Some("h2".to_string()),
};
let ja4_original = sig.generate_ja4_original();
// Expected JA4_ro (original order with SNI/ALPN)
let expected_ja4_ro = "t13d1516h2_1301,1302,1303,c02b,c02f,c02c,c030,cca9,cca8,c013,c014,009c,009d,002f,0035_0000,0017,0018,ff01,000a,000b,0023,0010,000d,0012,0033,002b,002d,0015,001b,001c_0403,0804,0401,0503,0805,0501,0806,0601";
// This should now match exactly
assert_eq!(ja4_original.raw.value(), expected_ja4_ro);
}
#[test]
fn test_captured_traffic_ja4() {
// Test with captured traffic data from a real browser
let sig = Signature {
version: TlsVersion::V1_3,
cipher_suites: vec![
0x1301, 0x1302, 0x1303, 0xc02b, 0xc02f, 0xc02c, 0xc030, 0xcca9, 0xcca8, 0xc013, 0xc014,
0x009c, 0x009d, 0x002f, 0x0035,
],
// First packet: 0012,000d,000b,ff01,0000,0023,001b,44cd,fe0d,0033,0005,0010,000a,002d,0017,002b
extensions: vec![
0x0012, 0x000d, 0x000b, 0xff01, 0x0000, 0x0023, 0x001b, 0x44cd, 0xfe0d, 0x0033, 0x0005,
0x0010, 0x000a, 0x002d, 0x0017, 0x002b,
],
elliptic_curves: vec![0x001d, 0x0017, 0x0018, 0x0019],
elliptic_curve_point_formats: vec![0x00],
signature_algorithms: vec![0x0403, 0x0804, 0x0401, 0x0503, 0x0805, 0x0501, 0x0806, 0x0601],
sni: Some("example.com".to_string()),
alpn: Some("h2".to_string()),
};
let ja4_sorted = sig.generate_ja4();
let ja4_original = sig.generate_ja4_original();
// Verify the JA4_a part is correct
assert_eq!(ja4_sorted.ja4_a, "t13d1516h2");
assert_eq!(ja4_original.ja4_a, "t13d1516h2");
// Verify JA4_ro uses original order and includes SNI/ALPN
let expected_ja4_ro = "t13d1516h2_1301,1302,1303,c02b,c02f,c02c,c030,cca9,cca8,c013,c014,009c,009d,002f,0035_0012,000d,000b,ff01,0000,0023,001b,44cd,fe0d,0033,0005,0010,000a,002d,0017,002b_0403,0804,0401,0503,0805,0501,0806,0601";
assert_eq!(ja4_original.raw.value(), expected_ja4_ro);
// Verify JA4_r excludes SNI and ALPN and sorts extensions
assert!(!ja4_sorted.raw.value().contains("0000")); // No SNI
assert!(!ja4_sorted.raw.value().contains("0010")); // No ALPN
// Verify JA4_ro includes SNI and ALPN in original order
assert!(ja4_original.raw.value().contains("0000")); // Has SNI
assert!(ja4_original.raw.value().contains("0010")); // Has ALPN
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
biandratti/huginn-net | https://github.com/biandratti/huginn-net/blob/ef479d3357bc040b4dcdacffbb4ab8db07051f1b/huginn-net-tls/tests/raw_filter_tests.rs | huginn-net-tls/tests/raw_filter_tests.rs | use std::net::IpAddr;
use huginn_net_tls::filter::{FilterConfig, FilterMode, IpFilter, PortFilter};
/// Helper to create an IPv4 TCP packet (minimal: IP header + TCP ports)
fn create_ipv4_tcp_packet(
src_ip: [u8; 4],
dst_ip: [u8; 4],
src_port: u16,
dst_port: u16,
) -> Vec<u8> {
let mut packet = vec![0u8; 40];
packet[0] = 0x45; // IPv4, IHL=5
packet[9] = 6; // TCP protocol
packet[12..16].copy_from_slice(&src_ip);
packet[16..20].copy_from_slice(&dst_ip);
packet[20..22].copy_from_slice(&src_port.to_be_bytes());
packet[22..24].copy_from_slice(&dst_port.to_be_bytes());
packet
}
/// Helper to create an Ethernet frame with IPv4 TCP packet
fn create_ethernet_ipv4_tcp_packet(
src_ip: [u8; 4],
dst_ip: [u8; 4],
src_port: u16,
dst_port: u16,
) -> Vec<u8> {
let mut packet = vec![0u8; 54];
packet[12..14].copy_from_slice(&[0x08, 0x00]); // IPv4 EtherType
packet[14] = 0x45; // IPv4, IHL=5
packet[23] = 6; // TCP protocol
packet[26..30].copy_from_slice(&src_ip);
packet[30..34].copy_from_slice(&dst_ip);
packet[34..36].copy_from_slice(&src_port.to_be_bytes());
packet[36..38].copy_from_slice(&dst_port.to_be_bytes());
packet
}
#[test]
fn test_raw_filter_ipv4_raw_packet() {
let packet = create_ipv4_tcp_packet([192, 168, 1, 100], [8, 8, 8, 8], 12345, 443);
// The raw filter should work on raw bytes
// We test indirectly by checking that the packet format is correct
assert_eq!(packet.len(), 40);
assert_eq!(packet[0], 0x45); // IPv4, IHL=5
assert_eq!(packet[9], 6); // TCP protocol
}
#[test]
fn test_raw_filter_ethernet_frame() {
let packet = create_ethernet_ipv4_tcp_packet([192, 168, 1, 100], [8, 8, 8, 8], 12345, 443);
// Test Ethernet frame structure
assert_eq!(packet.len(), 54);
assert_eq!(&packet[12..14], &[0x08, 0x00]); // IPv4 EtherType
assert_eq!(packet[14], 0x45); // IPv4, IHL=5
assert_eq!(packet[23], 6); // TCP protocol
}
#[test]
fn test_raw_filter_allows_matching_destination_port() {
let filter = FilterConfig::new()
.mode(FilterMode::Allow)
.with_port_filter(PortFilter::new().destination(443));
// Raw filter should allow packets matching destination port
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(filter.should_process(&src_ip, &dst_ip, 12345, 443));
}
#[test]
fn test_raw_filter_blocks_non_matching_destination_port() {
let filter = FilterConfig::new()
.mode(FilterMode::Allow)
.with_port_filter(PortFilter::new().destination(443));
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
// Different port should be blocked
assert!(!filter.should_process(&src_ip, &dst_ip, 12345, 80));
}
#[test]
fn test_raw_filter_allows_matching_source_ip() {
let filter = FilterConfig::new().mode(FilterMode::Allow).with_ip_filter(
IpFilter::new()
.allow("192.168.1.100")
.unwrap_or_else(|e| panic!("Invalid IP: {e}"))
.source_only(),
);
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(filter.should_process(&src_ip, &dst_ip, 12345, 443));
}
#[test]
fn test_raw_filter_blocks_non_matching_source_ip() {
let filter = FilterConfig::new().mode(FilterMode::Allow).with_ip_filter(
IpFilter::new()
.allow("192.168.1.100")
.unwrap_or_else(|e| panic!("Invalid IP: {e}"))
.source_only(),
);
let src_ip: IpAddr = "10.0.0.1"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(!filter.should_process(&src_ip, &dst_ip, 12345, 443));
}
#[test]
fn test_raw_filter_combined_filters() {
let filter = FilterConfig::new()
.mode(FilterMode::Allow)
.with_port_filter(PortFilter::new().destination(443))
.with_ip_filter(
IpFilter::new()
.allow("192.168.1.100")
.unwrap_or_else(|e| panic!("Invalid IP: {e}"))
.source_only(),
);
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(filter.should_process(&src_ip, &dst_ip, 12345, 443));
assert!(!filter.should_process(&src_ip, &dst_ip, 12345, 80));
let wrong_src: IpAddr = "10.0.0.1"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(!filter.should_process(&wrong_src, &dst_ip, 12345, 443));
}
#[test]
fn test_raw_filter_deny_mode() {
let filter = FilterConfig::new()
.mode(FilterMode::Deny)
.with_port_filter(PortFilter::new().destination(22));
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(!filter.should_process(&src_ip, &dst_ip, 12345, 22));
assert!(filter.should_process(&src_ip, &dst_ip, 12345, 443));
}
#[test]
fn test_raw_filter_no_filter_allows_all() {
let filter = FilterConfig::new();
let src_ip: IpAddr = "192.168.1.100"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
let dst_ip: IpAddr = "8.8.8.8"
.parse()
.unwrap_or_else(|e| panic!("Invalid IP: {e}"));
assert!(filter.should_process(&src_ip, &dst_ip, 12345, 443));
assert!(filter.should_process(&src_ip, &dst_ip, 54321, 80));
}
| rust | Apache-2.0 | ef479d3357bc040b4dcdacffbb4ab8db07051f1b | 2026-01-04T20:21:12.648216Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.