repo
stringlengths
6
65
file_url
stringlengths
81
311
file_path
stringlengths
6
227
content
stringlengths
0
32.8k
language
stringclasses
1 value
license
stringclasses
7 values
commit_sha
stringlengths
40
40
retrieved_at
stringdate
2026-01-04 15:31:58
2026-01-04 20:25:31
truncated
bool
2 classes
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/tests/integration_test_2.rs
tests/integration_test_2.rs
use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; use tracing_test::traced_test; pub mod common; use std::sync::atomic::{AtomicBool, Ordering}; use crate::common::Event; use common::BoxedResult; #[tokio::test(start_paused = true)] #[traced_test] async fn wait_for_children() { let (nested1_started, set_nested1_started) = Event::create(); let (nested1_finished, set_nested1_finished) = Event::create(); let (nested2_started, set_nested2_started) = Event::create(); let (nested2_finished, set_nested2_finished) = Event::create(); let nested_subsys2 = async move |subsys: &mut SubsystemHandle| { set_nested2_started(); subsys.on_shutdown_requested().await; sleep(Duration::from_millis(100)).await; set_nested2_finished(); BoxedResult::Ok(()) }; let nested_subsys1 = async move |subsys: &mut SubsystemHandle| { subsys.start(SubsystemBuilder::new("nested2", nested_subsys2)); set_nested1_started(); subsys.on_shutdown_requested().await; sleep(Duration::from_millis(100)).await; set_nested1_finished(); BoxedResult::Ok(()) }; let subsys1 = async move |subsys: &mut SubsystemHandle| { subsys.start(SubsystemBuilder::new("nested1", nested_subsys1)); sleep(Duration::from_millis(100)).await; subsys.request_shutdown(); assert!(nested1_started.get()); assert!(!nested1_finished.get()); assert!(nested2_started.get()); assert!(!nested2_finished.get()); subsys.wait_for_children().await; assert!(nested1_finished.get()); assert!(nested2_finished.get()); BoxedResult::Ok(()) }; Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsys1)); }) .handle_shutdown_requests(Duration::from_millis(500)) .await .unwrap(); } #[tokio::test(start_paused = true)] #[traced_test] async fn request_local_shutdown() { let (nested1_started, set_nested1_started) = Event::create(); let (nested1_finished, set_nested1_finished) = Event::create(); let (nested2_started, set_nested2_started) = Event::create(); let (nested2_finished, set_nested2_finished) = Event::create(); let (global_finished, set_global_finished) = Event::create(); let nested_subsys2 = async move |subsys: &mut SubsystemHandle| { set_nested2_started(); subsys.on_shutdown_requested().await; set_nested2_finished(); BoxedResult::Ok(()) }; let nested_subsys1 = async move |subsys: &mut SubsystemHandle| { subsys.start(SubsystemBuilder::new("nested2", nested_subsys2)); set_nested1_started(); subsys.on_shutdown_requested().await; set_nested1_finished(); BoxedResult::Ok(()) }; let subsys1 = async move |subsys: &mut SubsystemHandle| { subsys.start(SubsystemBuilder::new("nested1", nested_subsys1)); sleep(Duration::from_millis(100)).await; assert!(nested1_started.get()); assert!(!nested1_finished.get()); assert!(nested2_started.get()); assert!(!nested2_finished.get()); assert!(!global_finished.get()); assert!(!subsys.is_shutdown_requested()); subsys.request_local_shutdown(); sleep(Duration::from_millis(200)).await; assert!(nested1_finished.get()); assert!(nested2_finished.get()); assert!(!global_finished.get()); assert!(subsys.is_shutdown_requested()); subsys.request_shutdown(); sleep(Duration::from_millis(50)).await; assert!(global_finished.get()); assert!(subsys.is_shutdown_requested()); BoxedResult::Ok(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsys1)); s.on_shutdown_requested().await; set_global_finished(); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); } #[cfg(unix)] #[tokio::test] #[traced_test] async fn shutdown_through_signal_2() { use nix::sys::signal::{self, Signal}; use nix::unistd::Pid; use tokio_graceful_shutdown::FutureExt; let subsystem = async |subsys: &mut SubsystemHandle| { subsys.on_shutdown_requested().await; sleep(Duration::from_millis(200)).await; BoxedResult::Ok(()) }; tokio::join!( async { sleep(Duration::from_millis(100)).await; // Send SIGINT to ourselves. signal::kill(Pid::this(), Signal::SIGTERM).unwrap(); }, async { let result = Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsystem)); assert!( sleep(Duration::from_millis(1000)) .cancel_on_shutdown(s) .await .is_err() ); assert!(s.is_shutdown_requested()); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(400)) .await; assert!(result.is_ok()); }, ); } #[tokio::test(start_paused = true)] #[traced_test] async fn cancellation_token() { let subsystem = async |subsys: &mut SubsystemHandle| { let cancellation_token = subsys.create_cancellation_token(); assert!(!cancellation_token.is_cancelled()); subsys.on_shutdown_requested().await; assert!(cancellation_token.is_cancelled()); BoxedResult::Ok(()) }; let toplevel = Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsystem)); sleep(Duration::from_millis(100)).await; s.request_shutdown(); }); let result = toplevel .handle_shutdown_requests(Duration::from_millis(400)) .await; assert!(result.is_ok()); } #[tokio::test(start_paused = true)] #[traced_test] async fn cancellation_token_does_not_propagate_up() { let subsystem = async |subsys: &mut SubsystemHandle| { let cancellation_token = subsys.create_cancellation_token(); cancellation_token.cancel(); assert!(!subsys.is_shutdown_requested()); BoxedResult::Ok(()) }; let toplevel = Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsystem)); }); let result = toplevel .handle_shutdown_requests(Duration::from_millis(400)) .await; assert!(result.is_ok()); } #[tokio::test(start_paused = true)] #[traced_test] async fn subsystem_finished_works_correctly() { let subsystem = async |subsys: &mut SubsystemHandle| { subsys.on_shutdown_requested().await; BoxedResult::Ok(()) }; let toplevel = Toplevel::new(async move |s: &mut SubsystemHandle| { let nested = s.start(SubsystemBuilder::new("subsys", subsystem)); let nested_finished = nested.finished(); let is_finished = AtomicBool::new(false); tokio::join!( async { nested_finished.await; is_finished.store(true, Ordering::Release); }, async { sleep(Duration::from_millis(20)).await; assert!(!is_finished.load(Ordering::Acquire)); nested.initiate_shutdown(); sleep(Duration::from_millis(20)).await; assert!(is_finished.load(Ordering::Acquire)); } ); }); let result = toplevel .handle_shutdown_requests(Duration::from_millis(400)) .await; assert!(result.is_ok()); } #[tokio::test(start_paused = true)] #[traced_test] async fn shutdown_does_not_propagate_to_detached_subsystem() { let (nested_started, set_nested_started) = Event::create(); let (nested_finished, set_nested_finished) = Event::create(); let detached_subsystem = async |subsys: &mut SubsystemHandle| { set_nested_started(); subsys.on_shutdown_requested().await; set_nested_finished(); BoxedResult::Ok(()) }; let subsystem = async move |subsys: &mut SubsystemHandle| { let nested = subsys.start(SubsystemBuilder::new("detached", detached_subsystem).detached()); sleep(Duration::from_millis(20)).await; assert!(nested_started.get()); assert!(!nested_finished.get()); subsys.on_shutdown_requested().await; sleep(Duration::from_millis(20)).await; assert!(!nested_finished.get()); nested.initiate_shutdown(); sleep(Duration::from_millis(20)).await; assert!(nested_finished.get()); BoxedResult::Ok(()) }; let toplevel = Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys", subsystem)); sleep(Duration::from_millis(100)).await; s.request_shutdown(); }); let result = toplevel .handle_shutdown_requests(Duration::from_millis(400)) .await; assert!(result.is_ok()); }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/tests/abort.rs
tests/abort.rs
pub mod common; use std::sync::Arc; use futures::future::BoxFuture; use std::sync::atomic::{self, AtomicBool}; use common::BoxedResult; use futures::FutureExt; use std::convert::Infallible; use tokio::time::Duration; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; use tracing_test::traced_test; #[tokio::test(start_paused = true)] async fn abort_subsystem_works() { // Diagram: // // top // \ // nested (rcv's abort at 0.5s, panics after 1s) let subsys_nested = move |_: &mut SubsystemHandle| -> BoxFuture<BoxedResult> { async move { tokio::time::sleep(Duration::from_millis(1000)).await; panic!("Nested subsystem should not reach completion"); } .boxed() }; let subsys_top = async move |subsys: &mut SubsystemHandle| { let nested = subsys.start(SubsystemBuilder::new("subsys_nested", subsys_nested)); tokio::time::sleep(Duration::from_millis(500)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(1)).await; assert!(nested.is_finished()); tokio::time::sleep(Duration::from_millis(1000)).await; Ok::<_, Infallible>(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys_top", subsys_top)); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); } #[tokio::test(start_paused = true)] #[traced_test] async fn nested_subsystem_is_aborted() { // Diagram: // // top // \ // d1 (0s lifetime, rcv's abort at 0.5s) // \ // d2 (panics after 1s) // // We want to ensure aborting d1 aborts d2. let subsys_nested_d2 = move |_: &mut SubsystemHandle| -> BoxFuture<BoxedResult> { async move { tokio::time::sleep(Duration::from_millis(1000)).await; panic!("Depth 2 subsystem should not reach completion"); } .boxed() }; let subsys_nested_d1 = async move |subsys: &mut SubsystemHandle| { let _nested = subsys.start(SubsystemBuilder::new("d2", subsys_nested_d2)); BoxedResult::Ok(()) }; let subsys_top = async move |subsys: &mut SubsystemHandle| { let nested = subsys.start(SubsystemBuilder::new("d1", subsys_nested_d1)); tokio::time::sleep(Duration::from_millis(500)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(1)).await; assert!(nested.is_finished()); tokio::time::sleep(Duration::from_millis(1000)).await; BoxedResult::Ok(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys_top", subsys_top)); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); } #[tokio::test(start_paused = true)] #[traced_test] async fn multiple_abort_works() { // Diagram: // // top // \ // nested (rcv's abort at 0.5s, rcv's abort at 0.6s, panics at 1s) // // This is just making sure we can call .abort() multiple times without // problems happening. let subsys_nested = move |_: &mut SubsystemHandle| -> BoxFuture<BoxedResult> { async move { tokio::time::sleep(Duration::from_millis(1000)).await; panic!("Nested subsystem should not reach completion"); } .boxed() }; let subsys_top = async move |subsys: &mut SubsystemHandle| { let nested = subsys.start(SubsystemBuilder::new("subsys_nested", subsys_nested)); tokio::time::sleep(Duration::from_millis(500)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(100)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(1000)).await; Ok::<_, Infallible>(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys_top", subsys_top)); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); } #[tokio::test(start_paused = true)] #[traced_test] async fn abort_overrides_shutdown() { // Diagram: // // top // \ // nested (rcv's shutdown at 0.5s, rcv's abort at 0.6s, shuts down at 1s after shutdown requested) let subsys_nested = move |s: &mut SubsystemHandle| -> BoxFuture<BoxedResult> { let cancellation_token = s.create_cancellation_token(); async move { cancellation_token.cancelled().await; tracing::info!("received shutdown signal"); tokio::time::sleep(Duration::from_millis(500)).await; panic!("Nested subsystem should not reach completion"); } .boxed() }; let subsys_top = async move |subsys: &mut SubsystemHandle| { let nested = subsys.start(SubsystemBuilder::new("subsys_nested", subsys_nested)); tokio::time::sleep(Duration::from_millis(500)).await; nested.initiate_shutdown(); tokio::time::sleep(Duration::from_millis(100)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(1000)).await; Ok::<_, Infallible>(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys_top", subsys_top)); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); } #[tokio::test(start_paused = true)] #[traced_test] async fn abort_ensures_drop() { // Diagram: // // top // \ // nested (rcv's abort at 0.5s, owns an object that we expect to be dropped, 1s lifetime) /// Holds reference to a flag. The flag is initialized to false. /// When this object is dropped, the flag is set to true. struct IHaveNoMouthYetIMustBeDropped { was_dropped: Arc<AtomicBool>, } impl IHaveNoMouthYetIMustBeDropped { fn new() -> Self { Self { was_dropped: Arc::new(AtomicBool::new(false)), } } } impl Drop for IHaveNoMouthYetIMustBeDropped { fn drop(&mut self) { self.was_dropped.store(true, atomic::Ordering::Relaxed); } } let subsys_top = async move |subsys: &mut SubsystemHandle| { let to_be_dropped = IHaveNoMouthYetIMustBeDropped::new(); let flag = to_be_dropped.was_dropped.clone(); let nested = subsys.start(SubsystemBuilder::new( "subsys_nested", move |_s: &mut SubsystemHandle| -> BoxFuture<BoxedResult> { async move { let _owned_object = to_be_dropped; //take ownership of the drop object loop { tokio::time::sleep(Duration::from_millis(1000)).await; } } .boxed() }, )); tokio::time::sleep(Duration::from_millis(500)).await; nested.abort(); tokio::time::sleep(Duration::from_millis(1)).await; // may need to wait for it to be dropped assert!( flag.load(atomic::Ordering::Relaxed), "drop did not get called" ); Ok::<_, Infallible>(()) }; Toplevel::new(async move |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("subsys_top", subsys_top)); }) .handle_shutdown_requests(Duration::from_millis(100)) .await .unwrap(); }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/tests/common/event.rs
tests/common/event.rs
#![allow(dead_code)] use tokio::sync::watch; pub struct Event { receiver: watch::Receiver<bool>, } impl Event { pub fn create() -> (Self, impl FnOnce()) { let (sender, receiver) = watch::channel(false); (Self { receiver }, move || { sender.send_replace(true); }) } pub fn get(&self) -> bool { *self.receiver.borrow() } pub async fn wait(&self) { let mut receiver = self.receiver.clone(); while !*receiver.borrow_and_update() { receiver.changed().await.unwrap(); } } }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/tests/common/mod.rs
tests/common/mod.rs
#![allow(unused_imports)] mod event; pub use event::Event; use std::error::Error; /// Wrapper type to simplify lambdas pub type BoxedError = Box<dyn Error + Sync + Send>; pub type BoxedResult = Result<(), BoxedError>;
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/18_error_type_passthrough.rs
examples/18_error_type_passthrough.rs
//! This example shows to pass custom error types all the way through to the top, //! to recover them from the return value of `handle_shutdown_requests`. use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ IntoSubsystem, SubsystemBuilder, SubsystemHandle, Toplevel, errors::{GracefulShutdownError, SubsystemError}, }; #[derive(Debug, thiserror::Error)] enum MyError { #[error("MyError.WithData: {0}")] WithData(u32), #[error("MyError.WithoutData")] WithoutData, } async fn subsys1(_subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(200)).await; tracing::info!("Subsystem1 stopped."); Err(MyError::WithData(42)) } async fn subsys2(_subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem2 started."); sleep(Duration::from_millis(200)).await; tracing::info!("Subsystem2 stopped."); Err(MyError::WithoutData) } async fn subsys3(_subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem3 started."); sleep(Duration::from_millis(200)).await; tracing::info!("Subsystem3 stopped."); panic!("This subsystem panicked."); } async fn subsys4(_subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem4 started."); sleep(Duration::from_millis(1000)).await; tracing::info!("Subsystem4 stopped."); // This subsystem would end normally but takes too long and therefore // will time out. Ok(()) } async fn subsys5(_subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem5 started."); sleep(Duration::from_millis(200)).await; tracing::info!("Subsystem5 stopped."); // This subsystem ended normally and should not show up in the list of // subsystem errors. Ok(()) } // This subsystem implements the IntoSubsystem trait with a custom error type. // The first generic is the error type returned from the `run()` function, the // second generic is the error wrapper type used by Toplevel. In this case, // both are identical. struct Subsys6; impl IntoSubsystem<MyError, MyError> for Subsys6 { async fn run(self, _subsys: &mut SubsystemHandle<MyError>) -> Result<(), MyError> { tracing::info!("Subsystem6 started."); sleep(Duration::from_millis(200)).await; tracing::info!("Subsystem6 stopped."); Err(MyError::WithData(69)) } } #[tokio::main] async fn main() -> Result<(), miette::Report> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree let errors = Toplevel::<MyError>::new(async |s: &mut SubsystemHandle<MyError>| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); s.start(SubsystemBuilder::new("Subsys2", subsys2)); s.start(SubsystemBuilder::new("Subsys3", subsys3)); s.start(SubsystemBuilder::new("Subsys4", subsys4)); s.start(SubsystemBuilder::new("Subsys5", subsys5)); s.start(SubsystemBuilder::new("Subsys6", Subsys6.into_subsystem())); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(500)) .await; if let Err(e) = &errors { match e { GracefulShutdownError::SubsystemsFailed(_) => { tracing::warn!("Subsystems failed.") } GracefulShutdownError::ShutdownTimeout(_) => { tracing::warn!("Shutdown timed out.") } }; for subsystem_error in e.get_subsystem_errors() { match subsystem_error { SubsystemError::Failed(name, e) => { tracing::warn!(" Subsystem '{}' failed.", name); match e.get_error() { MyError::WithData(data) => { tracing::warn!(" It failed with MyError::WithData({})", data) } MyError::WithoutData => { tracing::warn!(" It failed with MyError::WithoutData") } } } SubsystemError::Panicked(name) => { tracing::warn!(" Subsystem '{}' panicked.", name) } } } }; Ok(errors?) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/19_sequential_shutdown.rs
examples/19_sequential_shutdown.rs
//! This example demonstrates how multiple subsystems could be shut down sequentially. //! //! When a shutdown gets triggered (via Ctrl+C), Nested1 will shutdown first, //! followed by Nested2 and Nested3. Only once the previous subsystem is finished shutting down, //! the next subsystem will follow. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ FutureExt, SubsystemBuilder, SubsystemFinishedFuture, SubsystemHandle, Toplevel, }; async fn counter(id: &str) { let mut i = 0; loop { tracing::info!("{id}: {i}"); i += 1; sleep(Duration::from_millis(50)).await; } } async fn nested1(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Nested1 started."); if counter("Nested1").cancel_on_shutdown(subsys).await.is_ok() { tracing::info!("Nested1 counter finished."); } else { tracing::info!("Nested1 shutting down ..."); sleep(Duration::from_millis(200)).await; } subsys.on_shutdown_requested().await; tracing::info!("Nested1 stopped."); Ok(()) } async fn nested2( subsys: &mut SubsystemHandle, nested1_finished: SubsystemFinishedFuture, ) -> Result<()> { // Create a future that triggers once nested1 is finished **and** a shutdown is requested let shutdown = { let shutdown_requested = subsys.on_shutdown_requested(); async move { tokio::join!(shutdown_requested, nested1_finished); } }; tracing::info!("Nested2 started."); tokio::select! { _ = shutdown => { tracing::info!("Nested2 shutting down ..."); sleep(Duration::from_millis(200)).await; } _ = counter("Nested2") => { tracing::info!("Nested2 counter finished."); } } tracing::info!("Nested2 stopped."); Ok(()) } async fn nested3( subsys: &mut SubsystemHandle, nested2_finished: SubsystemFinishedFuture, ) -> Result<()> { // Create a future that triggers once nested2 is finished **and** a shutdown is requested let shutdown = { // This is an alternative to `on_shutdown_requested()` (as shown in nested2). // Use this if `on_shutdown_requested()` gives you lifetime issues. let cancellation_token = subsys.create_cancellation_token(); async move { tokio::join!(cancellation_token.cancelled(), nested2_finished); } }; tracing::info!("Nested3 started."); tokio::select! { _ = shutdown => { tracing::info!("Nested3 shutting down ..."); sleep(Duration::from_millis(200)).await; } _ = counter("Nested3") => { tracing::info!("Nested3 counter finished."); } } tracing::info!("Nested3 stopped."); Ok(()) } async fn root(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Root started."); tracing::info!("Starting nested subsystems ..."); let nested1_handle = subsys.start(SubsystemBuilder::new("Nested1", nested1)); let nested1_finished = nested1_handle.finished(); let nested2_handle = subsys.start(SubsystemBuilder::new( "Nested2", async |s: &mut SubsystemHandle| nested2(s, nested1_finished).await, )); let nested2_finished = nested2_handle.finished(); subsys.start(SubsystemBuilder::new( "Nested3", async |s: &mut SubsystemHandle| nested3(s, nested2_finished).await, )); tracing::info!("Nested subsystems started."); // Wait for all children to finish shutting down. subsys.wait_for_children().await; tracing::info!("All children finished, stopping Root ..."); sleep(Duration::from_millis(200)).await; tracing::info!("Root stopped."); Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Root", root)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/09_task_cancellation.rs
examples/09_task_cancellation.rs
//! This example demonstrates how to implement a clean shutdown //! of a subsystem, through the example of a countdown that //! gets cancelled on shutdown. //! //! There are two options to cancel tasks on shutdown: //! - with [tokio::select] //! - with [FutureExt::cancel_on_shutdown()] //! //! In this case we go with `cancel_on_shutdown()`, but `tokio::select` would be equally viable. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel, errors::CancelledByShutdown, }; struct CountdownSubsystem {} impl CountdownSubsystem { fn new() -> Self { Self {} } async fn countdown(&self) { for i in (1..10).rev() { tracing::info!("Countdown: {}", i); sleep(Duration::from_millis(1000)).await; } } async fn run(self, subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Starting countdown ..."); match self.countdown().cancel_on_shutdown(subsys).await { Ok(()) => { tracing::info!("Countdown finished."); } Err(CancelledByShutdown) => { tracing::info!("Countdown cancelled."); } } Ok(()) } } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new( "Countdown", async |h: &mut SubsystemHandle| CountdownSubsystem::new().run(h).await, )); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/04_subsystem_finished.rs
examples/04_subsystem_finished.rs
//! This example demonstrates that subsystems can also stop //! prematurely. //! //! Returning Ok(()) from a subsystem indicates that the subsystem //! stopped intentionally, and no further measures by the runtime are performed. //! (unless there are no more subsystems left, in that case TopLevel would shut down anyway) use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); // Task ends without an error. This should not cause the main program to shutdown, // because Subsys2 is still running. Ok(()) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { subsys.on_shutdown_requested().await; Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); s.start(SubsystemBuilder::new("Subsys2", subsys2)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/12_subsystem_auto_restart.rs
examples/12_subsystem_auto_restart.rs
//! This example demonstrates how a subsystem could get implemented that auto-restarts //! every time a panic occurs. //! //! This isn't really a usecase related to this library, but seems to be used regularly, //! so I included it anyway. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ErrorAction, SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { // This subsystem panics every two seconds. // It should get restarted constantly. tracing::info!("Subsystem1 started."); tokio::select! { _ = subsys.on_shutdown_requested() => (), _ = sleep(Duration::from_secs(2)) => { panic!("Subsystem1 panicked!"); } }; tracing::info!("Subsystem1 stopped."); Ok(()) } async fn subsys1_keepalive(subsys: &mut SubsystemHandle) -> Result<()> { loop { let nested_subsys = subsys.start( SubsystemBuilder::new("Subsys1", subsys1) .on_failure(ErrorAction::CatchAndLocalShutdown) .on_panic(ErrorAction::CatchAndLocalShutdown), ); if let Err(err) = nested_subsys.join().await { tracing::error!("Subsystem1 failed: {:?}", miette::Report::from(err)); } else { break; } if subsys.is_shutdown_requested() { break; } tracing::info!("Restarting subsystem1 ..."); } Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1Keepalive", subsys1_keepalive)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/03_shutdown_timeout.rs
examples/03_shutdown_timeout.rs
//! This subsystem demonstrates the shutdown timeout mechanism. //! //! The subsystem takes longer to shut down than the timeout allows, //! so the subsystem gets cancelled and the program returns an appropriate //! error code. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(2000)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(500)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/08_panic_handling.rs
examples/08_panic_handling.rs
//! This example demonstrates that like errors, panics also get dealt with //! gracefully. //! //! A normal program shutdown is performed, and other subsystems get the //! chance to clean up their work. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } async fn subsys2(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); sleep(Duration::from_millis(500)).await; panic!("Subsystem2 panicked!") } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/14_partial_shutdown_error.rs
examples/14_partial_shutdown_error.rs
//! This example demonstrates how an error during partial shutdown behaves. //! //! If an error during partial a shutdown happens, it will not cause a global //! shutdown, but instead it will be delivered to the task that initiated //! the partial shutdown. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ErrorAction, SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys3(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsys3 started."); subsys.on_shutdown_requested().await; panic!("Subsystem3 threw an error!") } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsys2 started."); subsys.start(SubsystemBuilder::new("Subsys3", subsys3)); subsys.on_shutdown_requested().await; tracing::info!("Subsys2 stopped."); Ok(()) } async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { // This subsystem shuts down the nested subsystem after a seconds. tracing::info!("Subsys1 started."); tracing::info!("Starting nested subsystem ..."); let nested_subsys = subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); tracing::info!("Nested subsystem started."); tokio::select! { _ = subsys.on_shutdown_requested() => (), _ = sleep(Duration::from_secs(1)) => { tracing::info!("Shutting down nested subsystem ..."); // Redirect errors during shutdown to the local `.join()` call nested_subsys.change_failure_action(ErrorAction::CatchAndLocalShutdown); nested_subsys.change_panic_action(ErrorAction::CatchAndLocalShutdown); // Perform shutdown nested_subsys.initiate_shutdown(); if let Err(err) = nested_subsys.join().await { tracing::warn!("Error during nested subsystem shutdown: {:?}", miette::Report::from(err)); }; tracing::info!("Nested subsystem shut down."); subsys.on_shutdown_requested().await; } }; tracing::info!("Subsys1 stopped."); Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/05_subsystem_finished_with_error.rs
examples/05_subsystem_finished_with_error.rs
//! This example shows how the library reacts to failing subsystems. //! //! If a subsystem returns an `Err(...)` value, it is assumed that the //! subsystem failed and in response the program will be shut down. //! //! As expected, this is a graceful shutdown, giving other subsystems //! the chance to also shut down gracefully. use miette::{Result, miette}; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); // Task ends with an error. This should cause the main program to shutdown. Err(miette!("Subsystem1 failed intentionally.")) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { subsys.on_shutdown_requested().await; Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); s.start(SubsystemBuilder::new("Subsys2", subsys2)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/22_subsystem_abort.rs
examples/22_subsystem_abort.rs
//! This example demonstrates how a subsystem that is stuck (in an await) can get aborted. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); let nested = subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); sleep(Duration::from_millis(500)).await; tracing::info!("Aborting nested subsystem ..."); nested.abort(); sleep(Duration::from_millis(500)).await; tracing::info!("Nested subsystem is finished: {:?}", nested.is_finished()); Ok(()) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); subsys.start(SubsystemBuilder::new("Subsys3", subsys3)); loop { tracing::info!("Subsystem2 stuck ..."); sleep(Duration::from_millis(100)).await; } } async fn subsys3(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem3 started."); loop { tracing::info!("Subsystem3 stuck ..."); sleep(Duration::from_millis(100)).await; } } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/17_with_eyre.rs
examples/17_with_eyre.rs
//! This example shows how to use this library with eyre instead of miette use eyre::{Result, eyre}; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); // Task ends with an error. This should cause the main program to shutdown. Err(eyre!("Subsystem1 threw an error.")) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/hyper.rs
examples/hyper.rs
//! This example demonstrates how to gracefully shutdown a hyper //! server using this crate. //! //! This example closely follows hyper's "hello" example. //! //! Note that while we could spawn one subsystem per connection, //! tokio-graceful-shutdown's subsystems are quite heavy. //! So for a large amount of dynamic tasks like this, it is //! recommended to use CancellationToken + TaskTracker instead. use miette::{Context, IntoDiagnostic, Result}; use tokio::time::Duration; use tokio_graceful_shutdown::errors::CancelledByShutdown; use tokio_graceful_shutdown::{FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel}; use std::convert::Infallible; use std::net::SocketAddr; use std::pin::pin; use bytes::Bytes; use http_body_util::Full; use hyper::server::conn::http1; use hyper::service::service_fn; use hyper::{Request, Response}; use hyper_util::rt::TokioIo; use tokio::net::TcpListener; use tokio_util::task::TaskTracker; // An async function that consumes a request, does nothing with it and returns a // response. async fn hello(_: Request<hyper::body::Incoming>) -> Result<Response<Full<Bytes>>, Infallible> { Ok(Response::new(Full::new(Bytes::from("Hello World!")))) } async fn connection_handler( subsys: &mut SubsystemHandle, listener: TcpListener, connection_tracker: TaskTracker, ) -> Result<()> { loop { let connection = match listener.accept().cancel_on_shutdown(subsys).await { Ok(connection) => connection, Err(CancelledByShutdown) => break, }; let (tcp, addr) = connection .into_diagnostic() .context("Error while waiting for connection")?; let io = TokioIo::new(tcp); // Spawn handler on connection tracker to give the parent subsystem // the chance to wait for the shutdown to finish connection_tracker.spawn({ let cancellation_token = subsys.create_cancellation_token(); async move { tracing::info!("Connected to {} ...", addr); let mut connection = pin!(http1::Builder::new().serve_connection(io, service_fn(hello))); let result = tokio::select! { e = connection.as_mut() => e, _ = cancellation_token.cancelled() => { // If the system shuts down, shut down the connection // and continue serving, as specified in the hyper docs. tracing::info!("Shutting down connection to {} ...", addr); connection.as_mut().graceful_shutdown(); connection.await }, }; if let Err(err) = result { tracing::warn!("Error serving connection: {:?}", err); } else { tracing::info!("Connection to {} closed.", addr); } } }); } Ok(()) } async fn hyper_subsystem(subsys: &mut SubsystemHandle) -> Result<()> { let addr: SocketAddr = ([127, 0, 0, 1], 12345).into(); // Bind to the port and listen for incoming TCP connections let listener = TcpListener::bind(addr) .await .into_diagnostic() .context("Unable to start tcp server")?; tracing::info!("Listening on http://{}", addr); // Use a tasktracker instead of spawning a subsystem for every connection, // as this would result in a lot of overhead. let connection_tracker = TaskTracker::new(); let listener = subsys.start(SubsystemBuilder::new("Hyper Listener", { let connection_tracker = connection_tracker.clone(); async move |subsys: &mut SubsystemHandle| { connection_handler(subsys, listener, connection_tracker).await } })); // Make sure no more tasks can be spawned before we close the tracker listener.join().await?; // Wait for connections to close connection_tracker.close(); connection_tracker.wait().await; Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Hyper", hyper_subsystem)); }) .catch_signals() .handle_shutdown_requests(Duration::from_secs(5)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/13_partial_shutdown.rs
examples/13_partial_shutdown.rs
//! This example demonstrates how to perform a partial shutdown of the system. //! //! Subsys1 will perform a partial shutdown after 5 seconds, which will in turn //! shut down Subsys2 and Subsys3, leaving Subsys1 running. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ErrorAction, SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys3(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsys3 started."); subsys.on_shutdown_requested().await; tracing::info!("Subsys3 stopped."); Ok(()) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsys2 started."); subsys.start(SubsystemBuilder::new("Subsys3", subsys3)); subsys.on_shutdown_requested().await; tracing::info!("Subsys2 stopped."); Ok(()) } async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { // This subsystem shuts down the nested subsystem after a second. tracing::info!("Subsys1 started."); tracing::info!("Starting nested subsystem ..."); let nested_subsys = subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); tracing::info!("Nested subsystem started."); tokio::select! { _ = subsys.on_shutdown_requested() => (), _ = sleep(Duration::from_secs(1)) => { tracing::info!("Shutting down nested subsystem ..."); // Redirect errors during shutdown to the local `.join()` call nested_subsys.change_failure_action(ErrorAction::CatchAndLocalShutdown); nested_subsys.change_panic_action(ErrorAction::CatchAndLocalShutdown); // Perform shutdown nested_subsys.initiate_shutdown(); nested_subsys.join().await?; tracing::info!("Nested subsystem shut down."); subsys.on_shutdown_requested().await; } }; tracing::info!("Subsys1 stopped."); Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/01_normal_shutdown.rs
examples/01_normal_shutdown.rs
//! This example demonstrates the basic usage pattern of this crate. //! //! It shows that subsystems get started, and when the program //! gets shut down (by pressing Ctrl-C), the subsystems get shut down //! gracefully. //! //! If custom arguments for the subsystem coroutines are required, //! a struct has to be used instead, as seen in other examples. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(400)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem2 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem2 stopped."); Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); s.start(SubsystemBuilder::new("Subsys2", subsys2)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/07_nested_error.rs
examples/07_nested_error.rs
//! This example demonstrates that if one subsystem returns an error, //! a graceful shutdown is performed and other subsystems get the chance //! to clean up. use miette::{Result, miette}; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } async fn subsys2(_: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); sleep(Duration::from_millis(500)).await; Err(miette!("Subsystem2 failed intentionally.")) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/02_structs.rs
examples/02_structs.rs
//! This example demonstrates how using subsystem structs enables //! custom parameters to be passed to the subsystem. //! //! There are two ways of using structs as subsystems, by either //! wrapping them in a closure, or by implementing the //! IntoSubsystem trait. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{IntoSubsystem, SubsystemBuilder, SubsystemHandle, Toplevel}; struct Subsystem1 { arg: u32, } impl Subsystem1 { async fn run(self, subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started. Extra argument: {}", self.arg); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } } struct Subsystem2 { arg: u32, } impl IntoSubsystem<miette::Report> for Subsystem2 { async fn run(self, subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started. Extra argument: {}", self.arg); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem2 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem2 stopped."); Ok(()) } } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); let subsys1 = Subsystem1 { arg: 42 }; let subsys2 = Subsystem2 { arg: 69 }; // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new( "Subsys1", async |a: &mut SubsystemHandle| subsys1.run(a).await, )); s.start(SubsystemBuilder::new("Subsys2", subsys2.into_subsystem())); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/tokio_console.rs
examples/tokio_console.rs
//! This example demonstrates how to use the tokio-console application for tracing tokio tasks's //! runtime behaviour. Subsystems will appear under their registration names. //! //! Run this example with: //! //! ``` //! RUSTFLAGS="--cfg tokio_unstable" cargo run --features "tracing" --example tokio_console //! ``` //! //! Then, open the `tokio-console` application (see https://crates.io/crates/tokio-console) to //! follow the subsystem tasks live. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel}; use tracing::Level; use tracing_subscriber::{fmt::writer::MakeWriterExt, prelude::*}; async fn child(subsys: &mut SubsystemHandle) -> Result<()> { sleep(Duration::from_millis(3000)) .cancel_on_shutdown(&subsys) .await .ok(); Ok(()) } async fn parent(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Parent started."); let mut iteration = 0; while !subsys.is_shutdown_requested() { subsys.start(SubsystemBuilder::new(format!("child{iteration}"), child)); iteration += 1; sleep(Duration::from_millis(1000)) .cancel_on_shutdown(&subsys) .await .ok(); } tracing::info!("Parent stopped."); Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init tokio-console server and tracing let console_layer = console_subscriber::spawn(); tracing_subscriber::registry() .with(console_layer) .with( tracing_subscriber::fmt::layer() .with_writer(std::io::stdout.with_max_level(Level::DEBUG)) .compact(), ) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("parent", parent)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/10_request_shutdown.rs
examples/10_request_shutdown.rs
//! This example demonstrates how a subsystem can initiate //! a shutdown. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{ FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel, errors::CancelledByShutdown, }; struct CountdownSubsystem {} impl CountdownSubsystem { fn new() -> Self { Self {} } async fn countdown(&self) { for i in (1..10).rev() { tracing::info!("Shutting down in: {}", i); sleep(Duration::from_millis(1000)).await; } } async fn run(self, subsys: &mut SubsystemHandle) -> Result<()> { match self.countdown().cancel_on_shutdown(subsys).await { Ok(()) => subsys.request_shutdown(), Err(CancelledByShutdown) => tracing::info!("Countdown cancelled."), } Ok(()) } } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new( "Countdown", async |h: &mut SubsystemHandle| CountdownSubsystem::new().run(h).await, )); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/11_double_panic.rs
examples/11_double_panic.rs
//! This example demonstrates if a subsystem panics during a shutdown caused //! by another panic, the shutdown is still performed normally and the third //! subsystem gets cleaned up without a problem. //! //! Note that this even works when running in tokio's single-threaded mode. //! //! There is no real programming knowledge to be gained here, this example is just //! to demonstrate the robustness of the system. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); subsys.start(SubsystemBuilder::new("Subsys3", subsys3)); tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(200)).await; panic!("Subsystem1 panicked!"); } async fn subsys2(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); sleep(Duration::from_millis(500)).await; panic!("Subsystem2 panicked!") } async fn subsys3(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem3 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem3 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem3 shut down successfully."); Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/23_shutdown_from_external.rs
examples/23_shutdown_from_external.rs
//! This example demonstrates how the entire tokio runtime can be run //! in its own thread and how the subsystem tree can then be shut down //! from another thread thread. use miette::{Result, miette}; use tokio::{ runtime::Runtime, time::{Duration, sleep}, }; use tokio_graceful_shutdown::{FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel}; use tokio_util::sync::CancellationToken; async fn counter(subsys: &mut SubsystemHandle) -> Result<()> { let mut i = 1; while !subsys.is_shutdown_requested() { tracing::info!("Counter: {}", i); sleep(Duration::from_millis(1000)) .cancel_on_shutdown(subsys) .await .ok(); i += 1; } tracing::info!("Counter stopped."); Ok(()) } fn tokio_thread(shutdown_token: CancellationToken) -> Result<()> { Runtime::new().unwrap().block_on(async { // Setup and execute subsystem tree Toplevel::new_with_shutdown_token( async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Counter", counter)); }, shutdown_token, ) .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }) } fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); let shutdown_token = CancellationToken::new(); let tokio_thread_handle = std::thread::spawn({ let shutdown_token = shutdown_token.clone(); move || tokio_thread(shutdown_token) }); std::thread::sleep(Duration::from_millis(4500)); tracing::info!("Initiating shutdown ..."); shutdown_token.cancel(); match tokio_thread_handle.join() { Ok(result) => { tracing::info!("Shutdown finished."); result } Err(_) => Err(miette!("Error while waiting for tokio thread!")), } }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/06_nested_subsystems.rs
examples/06_nested_subsystems.rs
//! This example demonstrates how one subsystem can launch another //! nested subsystem. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(subsys: &mut SubsystemHandle) -> Result<()> { subsys.start(SubsystemBuilder::new("Subsys2", subsys2)); tracing::info!("Subsystem1 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem1 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); Ok(()) } async fn subsys2(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem2 started."); subsys.on_shutdown_requested().await; tracing::info!("Shutting down Subsystem2 ..."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem2 stopped."); Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/15_without_miette.rs
examples/15_without_miette.rs
//! This example shows how to use this library with std::error::Error instead of miette::Error use std::error::Error; use std::fmt; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; #[derive(Debug, Clone)] struct MyError; impl fmt::Display for MyError { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!(f, "MyError") } } impl Error for MyError {} async fn subsys1(_subsys: &mut SubsystemHandle) -> Result<(), MyError> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); // Task ends with an error. This should cause the main program to shutdown. Err(MyError {}) } #[tokio::main] async fn main() -> Result<(), Box<dyn Error>> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/20_orchestrated_shutdown_order.rs
examples/20_orchestrated_shutdown_order.rs
//! This example demonstrates how a parent subsystem could orchestrate //! the shutdown order of its children manually. //! //! This is done by spawning the children in 'detached' mode to prevent //! that the shutdown signal gets passed to the children. //! Then, the parent calls `initialize_shutdown` on each child manually. use miette::Result; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel}; async fn counter(id: &str) { let mut i = 0; loop { tracing::info!("{id}: {i}"); i += 1; sleep(Duration::from_millis(50)).await; } } async fn child(name: &str, subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("{name} started."); if counter(name).cancel_on_shutdown(subsys).await.is_ok() { tracing::info!("{name} counter finished."); } else { tracing::info!("{name} shutting down ..."); sleep(Duration::from_millis(200)).await; } subsys.on_shutdown_requested().await; tracing::info!("{name} stopped."); Ok(()) } async fn parent(subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Parent started."); tracing::info!("Starting detached nested subsystems ..."); let nested1 = subsys.start( SubsystemBuilder::new("Nested1", async |s: &mut SubsystemHandle| { child("Nested1", s).await }) .detached(), ); let nested2 = subsys.start( SubsystemBuilder::new("Nested2", async |s: &mut SubsystemHandle| { child("Nested2", s).await }) .detached(), ); let nested3 = subsys.start( SubsystemBuilder::new("Nested3", async |s: &mut SubsystemHandle| { child("Nested3", s).await }) .detached(), ); tracing::info!("Nested subsystems started."); // Wait for the shutdown to happen subsys.on_shutdown_requested().await; // Shut down children sequentially. As they are detached, they will not shutdown on their own, // but need to be shut down manually via `initiate_shutdown`. tracing::info!("Initiating Nested1 shutdown ..."); nested1.initiate_shutdown(); nested1.join().await?; tracing::info!("Initiating Nested2 shutdown ..."); nested2.initiate_shutdown(); nested2.join().await?; tracing::info!("Initiating Nested3 shutdown ..."); nested3.initiate_shutdown(); nested3.join().await?; tracing::info!("All children finished, stopping Root ..."); sleep(Duration::from_millis(200)).await; tracing::info!("Root stopped."); Ok(()) } #[tokio::main(flavor = "current_thread")] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("parent", parent)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/16_with_anyhow.rs
examples/16_with_anyhow.rs
//! This example shows how to use this library with anyhow instead of miette use anyhow::{Result, anyhow}; use tokio::time::{Duration, sleep}; use tokio_graceful_shutdown::{SubsystemBuilder, SubsystemHandle, Toplevel}; async fn subsys1(_subsys: &mut SubsystemHandle) -> Result<()> { tracing::info!("Subsystem1 started."); sleep(Duration::from_millis(500)).await; tracing::info!("Subsystem1 stopped."); // Task ends with an error. This should cause the main program to shutdown. Err(anyhow!("Subsystem1 threw an error.")) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("Subsys1", subsys1)); }) .catch_signals() .handle_shutdown_requests(Duration::from_millis(1000)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
Finomnis/tokio-graceful-shutdown
https://github.com/Finomnis/tokio-graceful-shutdown/blob/21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3/examples/21_tcp_echo_server.rs
examples/21_tcp_echo_server.rs
//! This example demonstrates how to gracefully shutdown a server //! that spawns an indefinite number of connection tasks. //! //! The server is a simple TCP echo server, capitalizing the data //! it echos (to demonstrate that it computes things). //! On shutdown, it transmits a goodbye message, to demonstrate //! that during shutdown we can still perform cleanup steps. //! //! This example is similar to the hyper example; for a more complex //! version of this same example, look there. use miette::{Context, IntoDiagnostic, Result}; use tokio::io::{AsyncReadExt, AsyncWriteExt}; use tokio::time::Duration; use tokio_graceful_shutdown::errors::CancelledByShutdown; use tokio_graceful_shutdown::{FutureExt, SubsystemBuilder, SubsystemHandle, Toplevel}; use std::net::SocketAddr; use tokio::net::{TcpListener, TcpStream}; use tokio_util::task::TaskTracker; async fn echo_connection(tcp: &mut TcpStream) -> Result<()> { tcp.write_all(b"Hello!\r\n").await.into_diagnostic()?; let mut buffer = [0u8; 256]; loop { match tcp.read(&mut buffer).await { Ok(0) => return Ok(()), Err(e) => return Err(e).into_diagnostic(), Ok(len) => { let bytes = &mut buffer[..len]; for byte in bytes.iter_mut() { *byte = byte.to_ascii_uppercase(); } tcp.write_all(bytes).await.into_diagnostic()?; } } } } async fn echo_connection_shutdown(tcp: &mut TcpStream) -> Result<()> { tcp.write_all(b"Goodbye.\r\n").await.into_diagnostic()?; tcp.shutdown().await.into_diagnostic()?; Ok(()) } async fn connection_handler( subsys: &mut SubsystemHandle, listener: TcpListener, connection_tracker: TaskTracker, ) -> Result<()> { loop { let connection = match listener.accept().cancel_on_shutdown(subsys).await { Ok(connection) => connection, Err(CancelledByShutdown) => break, }; let (mut tcp, addr) = connection .into_diagnostic() .context("Error while waiting for connection")?; // Spawn handler on connection tracker to give the parent subsystem // the chance to wait for the shutdown to finish connection_tracker.spawn({ let cancellation_token = subsys.create_cancellation_token(); async move { tracing::info!("Connected to {} ...", addr); let result = tokio::select! { e = echo_connection(&mut tcp) => e, _ = cancellation_token.cancelled() => { tracing::info!("Shutting down {} ...", addr); echo_connection_shutdown(&mut tcp).await }, }; if let Err(err) = result { tracing::warn!("Error serving connection: {:?}", err); } else { tracing::info!("Connection to {} closed.", addr); } } }); } Ok(()) } async fn echo_subsystem(subsys: &mut SubsystemHandle) -> Result<()> { let addr: SocketAddr = ([127, 0, 0, 1], 12345).into(); // Bind to the port and listen for incoming TCP connections let listener = TcpListener::bind(addr) .await .into_diagnostic() .context("Unable to start tcp server")?; tracing::info!("Listening on {}", addr); // Use a tasktracker instead of spawning a subsystem for every connection, // as this would result in a lot of overhead. let connection_tracker = TaskTracker::new(); let listener = subsys.start(SubsystemBuilder::new("Echo Listener", { let connection_tracker = connection_tracker.clone(); async move |subsys: &mut SubsystemHandle| { connection_handler(subsys, listener, connection_tracker).await } })); // Make sure no more tasks can be spawned before we close the tracker listener.join().await?; // Wait for connections to close connection_tracker.close(); connection_tracker.wait().await; Ok(()) } #[tokio::main] async fn main() -> Result<()> { // Init logging tracing_subscriber::fmt() .with_max_level(tracing::Level::DEBUG) .init(); // Setup and execute subsystem tree Toplevel::new(async |s: &mut SubsystemHandle| { s.start(SubsystemBuilder::new("EchoServer", echo_subsystem)); }) .catch_signals() .handle_shutdown_requests(Duration::from_secs(5)) .await .map_err(Into::into) }
rust
Apache-2.0
21f915e6cd39cd139c5ab4fa8199b9d87dc5aaa3
2026-01-04T20:24:42.405407Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/lib.rs
src/lib.rs
#![forbid(missing_docs)] #![doc(html_root_url = "https://docs.rs/trie-rs/0.4.2")] #![doc = include_str!("../README.md")] pub mod inc_search; mod internal_data_structure; pub mod iter; pub mod map; mod trie; pub mod try_collect; pub use trie::{Trie, TrieBuilder};
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/try_collect.rs
src/try_collect.rs
//! Try to collect from an iterator; operation may fail. //! //! Any type can that be `collect()`ed can be `try_collect()`ed without fail. //! //! # Usage //! //! The simplest usage is like this. //! //! ``` //! use trie_rs::try_collect::*; //! let bytes: Vec<u8> = vec![72, 105]; //! let s: String = bytes.into_iter().try_collect().unwrap(); //! assert_eq!(s, "Hi"); //! ``` //! //! # Motivation //! //! I really wanted to be able to turn a `Iterator<Item = u8>` into a String //! more easily, so that one could accumulate trie entries as `Vec<u8>`s or as //! `String`s. This is made complicated by the fact that [String] does not have //! a `FromIterator<u8>` implementation, and the method it does have //! `from_utf8()` is fallible; it returns a `Result`. //! //! Thus [TryFromIterator] is simply a fallible version of //! [std::iter::FromIterator]. And `try_collect()` is `collect()` fallible //! cousin as well. //! //! # Technical Note //! //! `TryFromIterator<A, M>` accepts a generic type `M` marker parameter. In //! general usage, the caller will simply pass along a generic `M` type. //! //! The reason it exists is so we can specify a blanket implementation of //! [TryFromIterator] for all [std::iter::FromIterator]s, and we can also //! specify one for [String]. //! //! Without this marker type, it's not possible to have a blanket and //! specialized implementation of the trait. //! use std::fmt::Debug; use std::iter::FromIterator; /// Try to collect from an iterator; operation may fail. pub trait TryCollect: Iterator { /// Use this iterator to collect into a container `C`, may fail. fn try_collect<C, M>(self) -> Result<C, C::Error> where C: TryFromIterator<Self::Item, M>, Self: Sized, { C::try_from_iter(self) } } impl<T> TryCollect for T where T: Iterator + ?Sized {} /// Try to create an object from an iterator. pub trait TryFromIterator<A, Marker> { /// Error type of [TryFromIterator::try_from_iter]. type Error: Debug; /// Try to turn the given iterator into `Self`. fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error> where Self: Sized, T: IntoIterator<Item = A>; } #[derive(Debug, Clone)] /// Marker type for blanket [TryFromIterator] implementation. #[doc(hidden)] pub struct Collect; impl<S, A> TryFromIterator<A, Collect> for S where S: FromIterator<A>, { type Error = (); fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error> where Self: Sized, T: IntoIterator<Item = A>, { Ok(FromIterator::from_iter(iter)) } } #[derive(Debug, Clone)] /// Marker type for String [TryFromIterator] implementation. #[doc(hidden)] pub struct StringCollect; impl TryFromIterator<u8, StringCollect> for String { type Error = std::string::FromUtf8Error; fn try_from_iter<T>(iter: T) -> Result<Self, Self::Error> where Self: Sized, T: IntoIterator<Item = u8>, { String::from_utf8(iter.into_iter().collect()) } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/map.rs
src/map.rs
//! A trie that maps sequence of `Label`s to a `Value`. use crate::internal_data_structure::naive_trie::NaiveTrie; use louds_rs::Louds; mod trie; mod trie_builder; #[cfg(feature = "mem_dbg")] use mem_dbg::MemDbg; #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] /// A trie for sequences of the type `Label`; each sequence has an associated `Value`. pub struct Trie<Label, Value> { louds: Louds, /// (LoudsNodeNum - 2) -> TrieLabel trie_labels: Vec<TrieLabel<Label, Value>>, } #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] /// A trie builder for [Trie]. pub struct TrieBuilder<Label, Value> { naive_trie: NaiveTrie<Label, Value>, } #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] struct TrieLabel<Label, Value> { label: Label, value: Option<Value>, }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/inc_search.rs
src/inc_search.rs
//! Incremental search //! //! # Motivation //! //! The motivation for this struct is for "online" or interactive use cases. One //! often accumulates input to match against a trie. Using the standard //! [`exact_match()`][crate::trie::Trie::exact_match] faculties which has a time //! complexity of _O(m log n)_ where _m_ is the query string length and _n_ is //! the number of entries in the trie. Consider this loop where we simulate //! accumulating a query. //! //! ```rust //! use trie_rs::Trie; //! //! let q = "appli"; // query string //! let mut is_match: bool; //! let trie = Trie::from_iter(vec!["appli", "application"]); //! for i in 0..q.len() - 1 { //! assert!(!trie.exact_match(&q[0..i])); //! } //! assert!(trie.exact_match(q)); //! ``` //! //! Building the query one "character" at a time and `exact_match()`ing each //! time, the loop has effectively complexity of _O(m<sup>2</sup> log n)_. //! //! Using the incremental search, the time complexity of each query is _O(log //! n)_ which returns an [Answer] enum. //! //! ```ignore //! let q = "appli"; // query string //! let inc_search = trie.inc_search(); //! let mut is_match: bool; //! for i = 0..q.len() { //! is_match = inc_search.query(q[i]).unwrap().is_match(); //! } //! ``` //! //! This means the above code restores the time complexity of _O(m log n)_ for //! the loop. use crate::{ map::Trie, try_collect::{TryCollect, TryFromIterator}, }; use louds_rs::LoudsNodeNum; #[derive(Debug, Clone)] /// An incremental search of the trie. pub struct IncSearch<'a, Label, Value> { trie: &'a Trie<Label, Value>, node: LoudsNodeNum, } /// Search position in the trie. /// /// # Why do this? /// /// "Position" is more descriptive for incremental search purposes, and without /// it a user would have to explicitly depend on `louds-rs`. pub type Position = LoudsNodeNum; /// Retrieve the position the search is on. Useful for hanging on to a search /// without having to fight the borrow checker because its borrowing a trie. impl<'a, L, V> From<IncSearch<'a, L, V>> for Position { fn from(inc_search: IncSearch<'a, L, V>) -> Self { inc_search.node } } /// A "matching" answer to an incremental search on a partial query. #[derive(Debug, PartialEq, Eq, Clone, Copy)] pub enum Answer { /// There is a prefix here. Prefix, /// There is an exact match here. Match, /// There is a prefix and an exact match here. PrefixAndMatch, } impl Answer { /// Is query answer a prefix? pub fn is_prefix(&self) -> bool { matches!(self, Answer::Prefix | Answer::PrefixAndMatch) } /// Is query answer an exact match? pub fn is_match(&self) -> bool { matches!(self, Answer::Match | Answer::PrefixAndMatch) } fn new(is_prefix: bool, is_match: bool) -> Option<Self> { match (is_prefix, is_match) { (true, false) => Some(Answer::Prefix), (false, true) => Some(Answer::Match), (true, true) => Some(Answer::PrefixAndMatch), (false, false) => None, } } } impl<'a, Label: Ord, Value> IncSearch<'a, Label, Value> { /// Create a new incremental search for a trie. pub fn new(trie: &'a Trie<Label, Value>) -> Self { Self { trie, node: LoudsNodeNum(1), } } /// Resume an incremental search at a particular point. /// /// ``` /// use trie_rs::{Trie, inc_search::{Answer, IncSearch}}; /// use louds_rs::LoudsNodeNum; /// /// let trie: Trie<u8> = ["hello", "bye"].into_iter().collect(); /// let mut inc_search = trie.inc_search(); /// /// assert_eq!(inc_search.query_until("he"), Ok(Answer::Prefix)); /// let position = LoudsNodeNum::from(inc_search); /// /// // inc_search is dropped. /// let mut inc_search2 = IncSearch::resume(&trie.0, position); /// assert_eq!(inc_search2.query_until("llo"), Ok(Answer::Match)); /// /// ``` pub fn resume(trie: &'a Trie<Label, Value>, position: Position) -> Self { Self { trie, node: position, } } /// Query but do not change the node we're looking at on the trie. pub fn peek(&self, chr: &Label) -> Option<Answer> { let children_node_nums: Vec<_> = self.trie.children_node_nums(self.node).collect(); let res = self .trie .bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(j) => { let node = children_node_nums[j]; let is_prefix = self.trie.has_children_node_nums(node); let is_match = self.trie.value(node).is_some(); Answer::new(is_prefix, is_match) } Err(_) => None, } } /// Query the trie and go to node if there is a match. pub fn query(&mut self, chr: &Label) -> Option<Answer> { let children_node_nums: Vec<_> = self.trie.children_node_nums(self.node).collect(); let res = self .trie .bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(j) => { self.node = children_node_nums[j]; let is_prefix = self.trie.has_children_node_nums(self.node); let is_match = self.trie.value(self.node).is_some(); Answer::new(is_prefix, is_match) } Err(_) => None, } } /// Query the trie with a sequence. Will return `Err(index of query)` on /// first failure to match. pub fn query_until(&mut self, query: impl AsRef<[Label]>) -> Result<Answer, usize> { let mut result = None; let mut i = 0; for chr in query.as_ref().iter() { result = self.query(chr); if result.is_none() { return Err(i); } i += 1; } result.ok_or(i) } /// Return the value at current node. There should be one for any node where /// `answer.is_match()` is true. pub fn value(&self) -> Option<&'a Value> { self.trie.value(self.node) } /// Go to the longest shared prefix. pub fn goto_longest_prefix(&mut self) -> Result<usize, usize> { let mut count = 0; while count == 0 || !self.trie.is_terminal(self.node) { let mut iter = self.trie.children_node_nums(self.node); let first = iter.next(); let second = iter.next(); match (first, second) { (Some(child_node_num), None) => { self.node = child_node_num; count += 1; } (None, _) => { assert_eq!(count, 0); return Ok(count); } _ => { return Err(count); } } } Ok(count) } /// Return the current prefix for this search. pub fn prefix<C, M>(&self) -> C where C: TryFromIterator<Label, M>, Label: Clone, { let mut v: Vec<Label> = self .trie .child_to_ancestors(self.node) .map(|node| self.trie.label(node).clone()) .collect(); v.reverse(); v.into_iter().try_collect().expect("Could not collect") } /// Returne the length of the current prefix for this search. pub fn prefix_len(&self) -> usize { // TODO: If PR for child_to_ancestors is accepted. Use the iterator and // remove `pub(crate)` from Trie.louds field. Also uncomment prefix() // above. self.trie.child_to_ancestors(self.node).count() // let mut node = self.node; // let mut count = 0; // while node.0 > 1 { // let index = self.trie.louds.node_num_to_index(node); // node = self.trie.louds.child_to_parent(index); // count += 1; // } // count } // This isn't actually possible. // /// Return the mutable value at current node. There should be one for any // /// node where `answer.is_match()` is true. // /// // /// Note: Because [IncSearch] does not store a mutable reference to the // /// trie, a mutable reference must be provided. // pub fn value_mut<'b>(self, trie: &'b mut Trie<Label, Value>) -> Option<&'b mut Value> { // trie.value_mut(self.node) // } /// Reset the query. pub fn reset(&mut self) { self.node = LoudsNodeNum(1); } } #[cfg(test)] mod search_tests { use super::*; use crate::map::{Trie, TrieBuilder}; fn build_trie() -> Trie<u8, u8> { let mut builder = TrieBuilder::new(); builder.push("a", 0); builder.push("app", 1); builder.push("apple", 2); builder.push("better", 3); builder.push("application", 4); builder.push("アップル🍎", 5); builder.build() } #[test] fn inc_search() { let trie = build_trie(); let mut search = trie.inc_search(); assert_eq!("", search.prefix::<String, _>()); assert_eq!(0, search.prefix_len()); assert_eq!(None, search.query(&b'z')); assert_eq!("", search.prefix::<String, _>()); assert_eq!(0, search.prefix_len()); assert_eq!(Answer::PrefixAndMatch, search.query(&b'a').unwrap()); assert_eq!("a", search.prefix::<String, _>()); assert_eq!(1, search.prefix_len()); assert_eq!(Answer::Prefix, search.query(&b'p').unwrap()); assert_eq!("ap", search.prefix::<String, _>()); assert_eq!(2, search.prefix_len()); assert_eq!(Answer::PrefixAndMatch, search.query(&b'p').unwrap()); assert_eq!("app", search.prefix::<String, _>()); assert_eq!(3, search.prefix_len()); assert_eq!(Answer::Prefix, search.query(&b'l').unwrap()); assert_eq!("appl", search.prefix::<String, _>()); assert_eq!(4, search.prefix_len()); assert_eq!(Answer::Match, search.query(&b'e').unwrap()); assert_eq!("apple", search.prefix::<String, _>()); assert_eq!(5, search.prefix_len()); } #[test] fn inc_search_value() { let trie = build_trie(); let mut search = trie.inc_search(); assert_eq!("", search.prefix::<String, _>()); assert_eq!(None, search.query(&b'z')); assert_eq!("", search.prefix::<String, _>()); assert_eq!(Answer::PrefixAndMatch, search.query(&b'a').unwrap()); assert_eq!("a", search.prefix::<String, _>()); assert_eq!(Answer::Prefix, search.query(&b'p').unwrap()); assert_eq!("ap", search.prefix::<String, _>()); assert_eq!(Answer::PrefixAndMatch, search.query(&b'p').unwrap()); assert_eq!("app", search.prefix::<String, _>()); assert_eq!(Answer::Prefix, search.query(&b'l').unwrap()); assert_eq!("appl", search.prefix::<String, _>()); assert_eq!(Answer::Match, search.query(&b'e').unwrap()); assert_eq!("apple", search.prefix::<String, _>()); assert_eq!(Some(&2), search.value()); } #[test] fn inc_search_query_until() { let trie = build_trie(); let mut search = trie.inc_search(); assert_eq!(Err(0), search.query_until("zoo")); assert_eq!("", search.prefix::<String, _>()); search.reset(); assert_eq!(Err(1), search.query_until("blue")); assert_eq!("b", search.prefix::<String, _>()); search.reset(); assert_eq!(Answer::Match, search.query_until("apple").unwrap()); assert_eq!("apple", search.prefix::<String, _>()); assert_eq!(Some(&2), search.value()); } #[test] fn inc_search_goto_longest_prefix() { let trie = build_trie(); let mut search = trie.inc_search(); assert_eq!(Err(0), search.goto_longest_prefix()); assert_eq!("", search.prefix::<String, _>()); search.reset(); assert_eq!(Ok(Answer::PrefixAndMatch), search.query_until("a")); assert_eq!("a", search.prefix::<String, _>()); assert_eq!(Ok(2), search.goto_longest_prefix()); assert_eq!("app", search.prefix::<String, _>()); assert_eq!(Err(1), search.goto_longest_prefix()); assert_eq!("appl", search.prefix::<String, _>()); assert_eq!(Err(0), search.goto_longest_prefix()); assert_eq!(Ok(Answer::Prefix), search.query_until("i")); assert_eq!(Ok(6), search.goto_longest_prefix()); assert_eq!(Ok(0), search.goto_longest_prefix()); assert_eq!("application", search.prefix::<String, _>()); search.reset(); assert_eq!(Answer::Match, search.query_until("apple").unwrap()); assert_eq!("apple", search.prefix::<String, _>()); assert_eq!(Some(&2), search.value()); } // #[test] // fn inc_serach_value_mut() { // let trie = build_trie(); // let mut search = trie.inc_search(); // assert_eq!(None, search.query(b'z')); // assert_eq!(Answer::PrefixAndMatch, search.query(b'a').unwrap()); // assert_eq!(Answer::Prefix, search.query(b'p').unwrap()); // assert_eq!(Answer::PrefixAndMatch, search.query(b'p').unwrap()); // assert_eq!(Answer::Prefix, search.query(b'l').unwrap()); // assert_eq!(Answer::Match, search.query(b'e').unwrap()); // let mut v = search.value_mut(&mut trie); // assert_eq!(Some(&2), v.as_deref()) // } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/internal_data_structure.rs
src/internal_data_structure.rs
pub mod naive_trie;
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/internal_data_structure/naive_trie.rs
src/internal_data_structure/naive_trie.rs
pub mod naive_trie_b_f_iter; pub mod naive_trie_impl; #[cfg(feature = "mem_dbg")] use mem_dbg::MemDbg; #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] /// Naive trie with ordered Label sequence in edges. /// /// The following naive trie contains these words. /// - a /// - app /// - apple /// - application /// /// ```text /// <Root> /// | /// | a: Label /// <IntermOrLeaf (Terminate)> /// | /// | p /// <IntermOrLeaf> /// | /// | p /// <IntermOrLeaf (Terminate)> /// | /// | l /// <IntermOrLeaf> /// |------------------------------+ /// | e | i /// <IntermOrLeaf (Terminate)> <IntermOrLeaf> /// | /// | c /// <IntermOrLeaf> /// | /// | a /// <IntermOrLeaf> /// | /// | t /// <IntermOrLeaf> /// | /// | i /// <IntermOrLeaf> /// | /// | o /// <IntermOrLeaf> /// | /// | n /// <IntermOrLeaf (Terminate)> /// ``` pub enum NaiveTrie<Label, Value> { Root(NaiveTrieRoot<Label, Value>), IntermOrLeaf(NaiveTrieIntermOrLeaf<Label, Value>), /// Used for Breadth-First iteration. /// /// ```text /// <Root> /// | /// |------------------+- - - - - - - - + /// | a | i | /// <IntermOrLeaf> <IntermOrLeaf> <PhantomSibling> /// | | /// . +- - - - - - - - + /// | | n | /// <PhantomSibling> <IntermOrLeaf> <PhantomSibling> /// | /// | /// | /// <PhantomSibling> /// ``` /// /// This trie's BFIter emits: /// `a i <PhantomSibling> <PhantomSibling> n <PhantomSibling> <PhantomSibling>` PhantomSibling, } #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct NaiveTrieRoot<Label, Value> { /// Sorted by Label's order. children: Vec<NaiveTrie<Label, Value>>, } #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] pub struct NaiveTrieIntermOrLeaf<Label, Value> { /// Sorted by Label's order. children: Vec<NaiveTrie<Label, Value>>, pub(crate) label: Label, pub(crate) value: Option<Value>, }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/internal_data_structure/naive_trie/naive_trie_impl.rs
src/internal_data_structure/naive_trie/naive_trie_impl.rs
use super::naive_trie_b_f_iter::NaiveTrieBFIter; use super::{NaiveTrie, NaiveTrieIntermOrLeaf, NaiveTrieRoot}; use std::vec::Drain; impl<'trie, Label: Ord, Value> NaiveTrie<Label, Value> { pub fn make_root() -> Self { NaiveTrie::Root(NaiveTrieRoot { children: vec![] }) } pub fn make_interm_or_leaf(label: Label, terminal: Option<Value>) -> Self { NaiveTrie::IntermOrLeaf(NaiveTrieIntermOrLeaf { children: vec![], label, value: terminal, }) } pub fn push<Arr: Iterator<Item = Label>>(&'trie mut self, word: Arr, value: Value) { let mut trie = self; for chr in word { let res = trie .children() .binary_search_by(|child| child.label().cmp(&chr)); match res { Ok(j) => { trie = match trie { NaiveTrie::Root(node) => &mut node.children[j], NaiveTrie::IntermOrLeaf(node) => &mut node.children[j], _ => panic!("Unexpected type"), }; } Err(j) => { let child_trie = Self::make_interm_or_leaf(chr, None); trie = match trie { NaiveTrie::Root(node) => { node.children.insert(j, child_trie); &mut node.children[j] } NaiveTrie::IntermOrLeaf(node) => { node.children.insert(j, child_trie); &mut node.children[j] } _ => panic!("Unexpected type"), }; } }; } match trie { NaiveTrie::IntermOrLeaf(node) => node.value = Some(value), _ => panic!("Unexpected type"), } } pub fn children(&self) -> &[Self] { match self { NaiveTrie::Root(node) => &node.children, NaiveTrie::IntermOrLeaf(node) => &node.children, _ => panic!("Unexpected type"), } } pub fn drain_children(&mut self) -> Drain<'_, Self> { match self { NaiveTrie::Root(node) => node.children.drain(0..), NaiveTrie::IntermOrLeaf(node) => node.children.drain(0..), _ => panic!("Unexpected type"), } } /// # Panics /// If self is not IntermOrLeaf. #[allow(dead_code)] pub fn value(&self) -> Option<&Value> { match self { NaiveTrie::IntermOrLeaf(node) => node.value.as_ref(), _ => panic!("Unexpected type"), } } /// # Panics /// If self is not IntermOrLeaf. pub fn label(&self) -> &Label { match self { NaiveTrie::IntermOrLeaf(node) => &node.label, _ => panic!("Unexpected type"), } } } impl<Label: Ord, Value> IntoIterator for NaiveTrie<Label, Value> { type Item = NaiveTrie<Label, Value>; type IntoIter = NaiveTrieBFIter<Label, Value>; fn into_iter(self) -> NaiveTrieBFIter<Label, Value> { NaiveTrieBFIter::new(self) } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/internal_data_structure/naive_trie/naive_trie_b_f_iter.rs
src/internal_data_structure/naive_trie/naive_trie_b_f_iter.rs
use super::NaiveTrie; use std::collections::VecDeque; #[derive(Debug)] /// Iterates over NaiveTrie in Breadth-First manner. pub struct NaiveTrieBFIter<Label, Value> { unvisited: VecDeque<NaiveTrie<Label, Value>>, } impl<Label, Value> NaiveTrieBFIter<Label, Value> { pub fn new(iter_start: NaiveTrie<Label, Value>) -> Self { let mut unvisited = VecDeque::new(); unvisited.push_back(iter_start); Self { unvisited } } } impl<Label: Ord, Value> Iterator for NaiveTrieBFIter<Label, Value> { type Item = NaiveTrie<Label, Value>; /// Returns: /// /// - None: All nodes are visited. /// - Some(NaiveTrie::Root): Root node. /// - Some(NaiveTrie::IntermOrLeaf): Intermediate or leaf node. /// - Some(NaiveTrie::PhantomSibling): Marker to represent "all siblings are iterated". fn next(&mut self) -> Option<Self::Item> { self.unvisited.pop_front().map(|mut trie| { match trie { NaiveTrie::Root(_) | NaiveTrie::IntermOrLeaf(_) => { for child in trie.drain_children() { self.unvisited.push_back(child); } self.unvisited.push_back(NaiveTrie::PhantomSibling); } NaiveTrie::PhantomSibling => {} }; trie }) } } #[cfg(test)] mod bf_iter_tests { type NaiveTrie<T> = super::NaiveTrie<T, ()>; const TRUE: Option<()> = Some(()); const FALSE: Option<()> = None; macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (words, expected_nodes) = $value; let mut trie = NaiveTrie::make_root(); for word in words { trie.push(word.bytes().into_iter(), ()); } let nodes: Vec<NaiveTrie<u8>> = trie.into_iter().collect(); assert_eq!(nodes.len(), expected_nodes.len()); for i in 0..nodes.len() { let node = &nodes[i]; let expected_node = &expected_nodes[i]; assert!(std::mem::discriminant(node) == std::mem::discriminant(expected_node)); if let NaiveTrie::IntermOrLeaf(n) = node { assert_eq!(n.label, *expected_node.label()); assert_eq!(n.value.is_some(), expected_node.value().is_some()); } } } )* } } parameterized_tests! { t1: ( Vec::<&str>::new(), vec![ NaiveTrie::make_root(), // parent = root NaiveTrie::PhantomSibling, ] ), t2: ( vec!["a"], vec![ NaiveTrie::make_root(), // parent = root NaiveTrie::make_interm_or_leaf(b'a', TRUE), NaiveTrie::PhantomSibling, // parent = a NaiveTrie::PhantomSibling, ] ), t3: ( vec!["a", "a"], vec![ NaiveTrie::make_root(), // parent = root NaiveTrie::make_interm_or_leaf(b'a', TRUE), NaiveTrie::PhantomSibling, // parent = a NaiveTrie::PhantomSibling, ] ), t4: ( // root // |-----------------------+-----------------------+ // | | | // a (term) b Ph // |---------+ |-----------------+ // | | | | // n (term) Ph a Ph // | |--------+ // | | | // Ph d (term) Ph // | // | // Ph vec!["a", "bad", "an"], vec![ NaiveTrie::make_root(), // parent = root NaiveTrie::make_interm_or_leaf(b'a', TRUE), NaiveTrie::make_interm_or_leaf(b'b', FALSE), NaiveTrie::PhantomSibling, // parent = [a] NaiveTrie::make_interm_or_leaf(b'n', TRUE), NaiveTrie::PhantomSibling, // parent = b NaiveTrie::make_interm_or_leaf(b'a', FALSE), NaiveTrie::PhantomSibling, // parent = n NaiveTrie::PhantomSibling, // parent = b[a]d NaiveTrie::make_interm_or_leaf(b'd', TRUE), NaiveTrie::PhantomSibling, // parent = d NaiveTrie::PhantomSibling, ] ), t5: ( // 'り' => 227, 130, 138 // 'ん' => 227, 130, 147 // 'ご' => 227, 129, 148 vec!["a", "an", "りんご", "りんりん"], vec![ NaiveTrie::make_root(), // parent = root NaiveTrie::make_interm_or_leaf(b'a', TRUE), NaiveTrie::make_interm_or_leaf(227, FALSE), NaiveTrie::PhantomSibling, // parent = a NaiveTrie::make_interm_or_leaf(b'n', TRUE), NaiveTrie::PhantomSibling, // parent = [227] 130 138 (り) NaiveTrie::make_interm_or_leaf(130, FALSE), NaiveTrie::PhantomSibling, // parent = n NaiveTrie::PhantomSibling, // parent = 227 [130] 138 (り) NaiveTrie::make_interm_or_leaf(138, FALSE), NaiveTrie::PhantomSibling, // parent = 227 130 [138] (り) NaiveTrie::make_interm_or_leaf(227, FALSE), NaiveTrie::PhantomSibling, // parent = [227] 130 147 (ん) NaiveTrie::make_interm_or_leaf(130, FALSE), NaiveTrie::PhantomSibling, // parent = 227 [130] 147 (ん) NaiveTrie::make_interm_or_leaf(147, FALSE), NaiveTrie::PhantomSibling, // parent = 227 130 [147] (ん) NaiveTrie::make_interm_or_leaf(227, FALSE), NaiveTrie::PhantomSibling, // parent = [227] _ _ (ご or り) NaiveTrie::make_interm_or_leaf(129, FALSE), NaiveTrie::make_interm_or_leaf(130, FALSE), NaiveTrie::PhantomSibling, // parent = 227 [129] 148 (ご) NaiveTrie::make_interm_or_leaf(148, TRUE), NaiveTrie::PhantomSibling, // parent = 227 [130] 138 (り) NaiveTrie::make_interm_or_leaf(138, FALSE), NaiveTrie::PhantomSibling, // parent = 227 129 [148] (ご) NaiveTrie::PhantomSibling, // parent = 227 130 [138] (り) NaiveTrie::make_interm_or_leaf(227, FALSE), NaiveTrie::PhantomSibling, // parent = [227] 130 147 (ん) NaiveTrie::make_interm_or_leaf(130, FALSE), NaiveTrie::PhantomSibling, // parent = 227 [130] 147 (ん) NaiveTrie::make_interm_or_leaf(147, TRUE), NaiveTrie::PhantomSibling, // parent = 227 130 [147] (ん) NaiveTrie::PhantomSibling, ] ), } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/trie/mod.rs
src/trie/mod.rs
mod trie_builder; mod trie_impl; pub use trie_builder::TrieBuilder; pub use trie_impl::Trie;
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/trie/trie_impl.rs
src/trie/trie_impl.rs
use crate::inc_search::IncSearch; use crate::iter::{Keys, KeysExt, PostfixIter, PrefixIter, SearchIter}; use crate::map; use crate::try_collect::TryFromIterator; use std::iter::FromIterator; #[cfg(feature = "mem_dbg")] use mem_dbg::MemDbg; #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] /// A trie for sequences of the type `Label`. pub struct Trie<Label>(pub map::Trie<Label, ()>); impl<Label: Ord> Trie<Label> { /// Return true if `query` is an exact match. /// /// # Arguments /// * `query` - The query to search for. /// /// # Examples /// In the following example we illustrate how to query an exact match. /// /// ```rust /// use trie_rs::Trie; /// /// let trie = Trie::from_iter(["a", "app", "apple", "better", "application"]); /// /// assert!(trie.exact_match("application")); /// assert!(trie.exact_match("app")); /// assert!(!trie.exact_match("appla")); /// /// ``` pub fn exact_match(&self, query: impl AsRef<[Label]>) -> bool { self.0.exact_match(query).is_some() } /// Return the common prefixes of `query`. /// /// # Arguments /// * `query` - The query to search for. /// /// # Examples /// In the following example we illustrate how to query the common prefixes of a query string. /// /// ```rust /// use trie_rs::Trie; /// /// let trie = Trie::from_iter(["a", "app", "apple", "better", "application"]); /// /// let results: Vec<String> = trie.common_prefix_search("application").collect(); /// /// assert_eq!(results, vec!["a", "app", "application"]); /// /// ``` pub fn common_prefix_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> Keys<PrefixIter<'_, Label, (), C, M>> where C: TryFromIterator<Label, M>, Label: Clone, { // TODO: We could return Keys iterators instead of collecting. self.0.common_prefix_search(query).keys() } /// Return all entries that match `query`. pub fn predictive_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> Keys<SearchIter<'_, Label, (), C, M>> where C: TryFromIterator<Label, M> + Clone, Label: Clone, { self.0.predictive_search(query).keys() } /// Return the postfixes of all entries that match `query`. /// /// # Arguments /// * `query` - The query to search for. /// /// # Examples /// In the following example we illustrate how to query the postfixes of a query string. /// /// ```rust /// use trie_rs::Trie; /// /// let trie = Trie::from_iter(["a", "app", "apple", "better", "application"]); /// /// let results: Vec<String> = trie.postfix_search("application").collect(); /// /// assert!(results.is_empty()); /// /// let results: Vec<String> = trie.postfix_search("app").collect(); /// /// assert_eq!(results, vec!["le", "lication"]); /// /// ``` pub fn postfix_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> Keys<PostfixIter<'_, Label, (), C, M>> where C: TryFromIterator<Label, M>, Label: Clone, { self.0.postfix_search(query).keys() } /// Returns an iterator across all keys in the trie. /// /// # Examples /// In the following example we illustrate how to iterate over all keys in the trie. /// Note that the order of the keys is not guaranteed, as they will be returned in /// lexicographical order. /// /// ```rust /// use trie_rs::Trie; /// /// let trie = Trie::from_iter(["a", "app", "apple", "better", "application"]); /// /// let results: Vec<String> = trie.iter().collect(); /// /// assert_eq!(results, vec!["a", "app", "apple", "application", "better"]); /// /// ``` pub fn iter<C, M>(&self) -> Keys<PostfixIter<'_, Label, (), C, M>> where C: TryFromIterator<Label, M>, Label: Clone, { self.postfix_search([]) } /// Create an incremental search. Useful for interactive applications. See /// [crate::inc_search] for details. pub fn inc_search(&self) -> IncSearch<'_, Label, ()> { IncSearch::new(&self.0) } /// Return true if `query` is a prefix. /// /// Note: A prefix may be an exact match or not, and an exact match may be a /// prefix or not. pub fn is_prefix(&self, query: impl AsRef<[Label]>) -> bool { self.0.is_prefix(query) } /// Return the longest shared prefix of `query`. pub fn longest_prefix<C, M>(&self, query: impl AsRef<[Label]>) -> Option<C> where C: TryFromIterator<Label, M>, Label: Clone, { self.0.longest_prefix(query) } } impl<Label, C> FromIterator<C> for Trie<Label> where C: AsRef<[Label]>, Label: Ord + Clone, { fn from_iter<T>(iter: T) -> Self where Self: Sized, T: IntoIterator<Item = C>, { let mut builder = super::TrieBuilder::new(); for k in iter { builder.push(k) } builder.build() } } #[cfg(test)] mod search_tests { use crate::{Trie, TrieBuilder}; use std::iter::FromIterator; fn build_trie() -> Trie<u8> { let mut builder = TrieBuilder::new(); builder.push("a"); builder.push("app"); builder.push("apple"); builder.push("better"); builder.push("application"); builder.push("アップル🍎"); builder.build() } #[test] fn trie_from_iter() { let trie = Trie::<u8>::from_iter(["a", "app", "apple", "better", "application"]); assert!(trie.exact_match("application")); } #[test] fn collect_a_trie() { let trie: Trie<u8> = IntoIterator::into_iter(["a", "app", "apple", "better", "application"]).collect(); assert!(trie.exact_match("application")); } #[test] fn clone() { let trie = build_trie(); let _c: Trie<u8> = trie.clone(); } #[rustfmt::skip] #[test] fn print_debug() { let trie: Trie<u8> = ["a"].into_iter().collect(); assert_eq!(format!("{:?}", trie), "Trie(Trie { louds: Louds { lbs: Fid { byte_vec: [160], bit_len: 5, chunks: Chunks { chunks: [Chunk { value: 2, blocks: Blocks { blocks: [Block { value: 1, length: 1 }, Block { value: 1, length: 1 }, Block { value: 2, length: 1 }, Block { value: 2, length: 1 }], blocks_cnt: 4 } }, Chunk { value: 2, blocks: Blocks { blocks: [Block { value: 0, length: 1 }], blocks_cnt: 1 } }], chunks_cnt: 2 }, table: PopcountTable { bit_length: 1, table: [0, 1] } } }, trie_labels: [TrieLabel { label: 97, value: Some(()) }] })" ); } #[rustfmt::skip] #[test] fn print_debug_builder() { let mut builder = TrieBuilder::new(); builder.push("a"); builder.push("app"); assert_eq!(format!("{:?}", builder), "TrieBuilder(TrieBuilder { naive_trie: Root(NaiveTrieRoot { children: [IntermOrLeaf(NaiveTrieIntermOrLeaf { children: [IntermOrLeaf(NaiveTrieIntermOrLeaf { children: [IntermOrLeaf(NaiveTrieIntermOrLeaf { children: [], label: 112, value: Some(()) })], label: 112, value: None })], label: 97, value: Some(()) })] }) })" ); } #[test] fn use_empty_queries() { let trie = build_trie(); assert!(!trie.exact_match("")); let _ = trie.predictive_search::<String, _>("").next(); let _ = trie.postfix_search::<String, _>("").next(); let _ = trie.common_prefix_search::<String, _>("").next(); } #[cfg(feature = "mem_dbg")] #[test] /// ```sh /// cargo test --features mem_dbg memsize -- --nocapture /// ``` fn memsize() { use mem_dbg::*; use std::{ env, fs::File, io::{BufRead, BufReader}, }; const COUNT: usize = 100; let mut builder = TrieBuilder::new(); let repo_root = env::var("CARGO_MANIFEST_DIR") .expect("CARGO_MANIFEST_DIR environment variable must be set."); let edict2_path = format!("{}/benches/edict.furigana", repo_root); println!("Reading dictionary file from: {}", edict2_path); let mut n_words = 0; let mut accum = 0; for result in BufReader::new(File::open(edict2_path).unwrap()) .lines() .take(COUNT) { let l = result.unwrap(); accum += l.len(); builder.push(l); n_words += 1; } println!("Read {} words, {} bytes.", n_words, accum); let trie = builder.build(); let trie_size = trie.mem_size(SizeFlags::default()); eprintln!("Trie size {trie_size}"); let uncompressed: Vec<String> = trie.iter().collect(); let uncompressed_size = uncompressed.mem_size(SizeFlags::default()); eprintln!("Uncompressed size {}", uncompressed_size); assert!(accum < trie_size); // This seems wrong to me. assert!(trie_size < uncompressed_size); } mod exact_match_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_match) = $value; let trie = super::build_trie(); let result = trie.exact_match(query); assert_eq!(result, expected_match); } )* } } parameterized_tests! { t1: ("a", true), t2: ("app", true), t3: ("apple", true), t4: ("application", true), t5: ("better", true), t6: ("アップル🍎", true), t7: ("appl", false), t8: ("appler", false), } } mod is_prefix_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_match) = $value; let trie = super::build_trie(); let result = trie.is_prefix(query); assert_eq!(result, expected_match); } )* } } parameterized_tests! { t1: ("a", true), t2: ("app", true), t3: ("apple", false), t4: ("application", false), t5: ("better", false), t6: ("アップル🍎", false), t7: ("appl", true), t8: ("appler", false), t9: ("アップル", true), t10: ("ed", false), t11: ("e", false), t12: ("", true), } } mod predictive_search_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie(); let results: Vec<String> = trie.predictive_search(query).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec!["a", "app", "apple", "application"]), t2: ("app", vec!["app", "apple", "application"]), t3: ("appl", vec!["apple", "application"]), t4: ("apple", vec!["apple"]), t5: ("b", vec!["better"]), t6: ("c", Vec::<&str>::new()), t7: ("アップ", vec!["アップル🍎"]), } } mod common_prefix_search_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie(); let results: Vec<String> = trie.common_prefix_search(query).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec!["a"]), t2: ("ap", vec!["a"]), t3: ("appl", vec!["a", "app"]), t4: ("appler", vec!["a", "app", "apple"]), t5: ("bette", Vec::<&str>::new()), t6: ("betterment", vec!["better"]), t7: ("c", Vec::<&str>::new()), t8: ("アップル🍎🍏", vec!["アップル🍎"]), } } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/trie/trie_builder.rs
src/trie/trie_builder.rs
use super::Trie; use crate::map; #[cfg(feature = "mem_dbg")] use mem_dbg::MemDbg; #[derive(Debug, Clone)] #[cfg_attr(feature = "mem_dbg", derive(mem_dbg::MemDbg, mem_dbg::MemSize))] #[cfg_attr(feature = "serde", derive(serde::Serialize, serde::Deserialize))] /// A trie builder for [Trie]. pub struct TrieBuilder<Label>(map::TrieBuilder<Label, ()>); impl<Label: Ord> TrieBuilder<Label> { /// Return a [TrieBuilder]. pub fn new() -> Self { Self(map::TrieBuilder::new()) } /// Add a cloneable entry. pub fn push<Arr: AsRef<[Label]>>(&mut self, entry: Arr) where Label: Clone, { self.0.push(entry, ()); } /// Add an entry. pub fn insert<Arr: IntoIterator<Item = Label>>(&mut self, entry: Arr) { self.0.insert(entry, ()); } /// Build a [Trie]. pub fn build(self) -> Trie<Label> { Trie(self.0.build()) } } impl<Label: Ord> Default for TrieBuilder<Label> { fn default() -> Self { Self::new() } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/iter/search_iter.rs
src/iter/search_iter.rs
use crate::iter::PostfixIter; use crate::map::Trie; use crate::try_collect::{Collect, TryCollect, TryFromIterator}; use louds_rs::LoudsNodeNum; use std::marker::PhantomData; #[derive(Debug, Clone)] /// Iterates through all the matches of a query. pub struct SearchIter<'a, Label, Value, C, M> { prefix: Vec<Label>, first: Option<(C, &'a Value)>, postfix_iter: PostfixIter<'a, Label, Value, Vec<Label>, Collect>, col: PhantomData<(C, M)>, } impl<'a, Label: Ord + Clone, Value, C, M> SearchIter<'a, Label, Value, C, M> where C: TryFromIterator<Label, M> + Clone, { pub(crate) fn new(trie: &'a Trie<Label, Value>, query: impl AsRef<[Label]>) -> Self { let mut cur_node_num = LoudsNodeNum(1); let mut prefix = Vec::new(); // Consumes query (prefix) for chr in query.as_ref() { let children_node_nums: Vec<_> = trie.children_node_nums(cur_node_num).collect(); let res = trie.bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(i) => cur_node_num = children_node_nums[i], Err(_) => return Self::empty(trie), } prefix.push(trie.label(cur_node_num).clone()); } // let prefix: = prefix.into_iter().try_collect().expect("Could not collect"); let first = trie.value(cur_node_num).map(|v| { ( prefix .clone() .into_iter() .try_collect() .expect("Could not collect"), v, ) }); SearchIter { prefix, first, postfix_iter: PostfixIter::new(trie, cur_node_num), col: PhantomData, } } fn empty(trie: &'a Trie<Label, Value>) -> Self { SearchIter { prefix: Vec::new(), first: None, postfix_iter: PostfixIter::empty(trie), col: PhantomData, } } } impl<'a, Label: Ord + Clone, Value, C, M> Iterator for SearchIter<'a, Label, Value, C, M> where C: TryFromIterator<Label, M> + Clone, Vec<Label>: TryFromIterator<Label, Collect>, { type Item = (C, &'a Value); #[inline] fn next(&mut self) -> Option<Self::Item> { match self.first.take() { // None => None, None => self.postfix_iter.next().map(|(postfix, v)| { let entry = C::try_from_iter(self.prefix.clone().into_iter().chain(postfix)) .expect("Could not collect postfix"); // let mut entry = self.prefix.clone(); // let ext: C = postfix.into_iter().try_collect().expect("Could not collect postfix"); // entry.extend([ext]); (entry, v) }), x => x, } } } // impl<'a, Label: Ord + Clone, Value, C> Iterator for SearchIter<'a, Label, Value, C, Collect> // where C: TryFromIterator<Label, Collect> + Extend<Label> + Clone, // Vec<Label>: TryFromIterator<Label, Collect> // { // type Item = (C, &'a Value); // #[inline] // fn next(&mut self) -> Option<Self::Item> { // match self.first.take() { // // None => None, // None => self.postfix_iter.next().map(|(postfix, v)| { // let mut entry = self.prefix.clone(); // entry.extend(postfix.into_iter()); // (entry, v) // }), // x => x // } // } // } // impl<'a, Label: Ord, V> Value<V> for frayed::defray::Group<'a, SearchIter<'_, Label, V>> { // fn value(&self) -> Option<&V> { // self.parent.iter_ref().value() // } // }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/iter/postfix_iter.rs
src/iter/postfix_iter.rs
use crate::map::Trie; use crate::try_collect::{TryCollect, TryFromIterator}; use louds_rs::LoudsNodeNum; use std::marker::PhantomData; #[derive(Debug, Clone)] /// Iterates through all the postfixes of a matching query. pub struct PostfixIter<'a, Label, Value, C, M> { trie: &'a Trie<Label, Value>, queue: Vec<(usize, LoudsNodeNum)>, buffer: Vec<&'a Label>, value: Option<&'a Value>, col: PhantomData<(C, M)>, } impl<'a, Label: Ord, Value, C, M> PostfixIter<'a, Label, Value, C, M> where C: TryFromIterator<Label, M>, { #[inline] pub(crate) fn new(trie: &'a Trie<Label, Value>, root: LoudsNodeNum) -> Self { let mut children: Vec<_> = trie.children_node_nums(root).map(|n| (0, n)).collect(); children.reverse(); Self { trie, queue: children, buffer: Vec::new(), value: None, col: PhantomData, } } #[inline] pub(crate) fn empty(trie: &'a Trie<Label, Value>) -> Self { Self { trie, queue: Vec::new(), buffer: Vec::new(), value: None, col: PhantomData, } } } impl<'a, Label: Ord + Clone, Value, C, M> Iterator for PostfixIter<'a, Label, Value, C, M> where C: TryFromIterator<Label, M>, { type Item = (C, &'a Value); #[inline] fn next(&mut self) -> Option<Self::Item> { use std::cmp::Ordering; while self.value.is_none() { if let Some((depth, node)) = self.queue.pop() { let children = self.trie.children_node_nums(node); self.queue .extend(children.rev().map(|child| (depth + 1, child))); match depth.cmp(&self.buffer.len()) { Ordering::Equal => { self.buffer.push(self.trie.label(node)); } Ordering::Less => { let _ = self.buffer.drain(depth + 1..); self.buffer[depth] = self.trie.label(node); } Ordering::Greater => { panic!("depth > buffer.len()"); } } self.value = self.trie.value(node); } else { break; } } if let Some(v) = self.value.take() { Some(( self.buffer .iter() .cloned() .cloned() .try_collect() .expect("Could not collect"), v, )) } else { None } } } // impl<Label: Ord, V, C, M> Value<V> for PostfixIter<'_, Label, V, C, M> { // fn value(&self) -> Option<&V> { // self.value // } // }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/iter/mod.rs
src/iter/mod.rs
//! Trie iterators mod keys; mod postfix_iter; mod prefix_iter; mod search_iter; pub use keys::{Keys, KeysExt}; pub use postfix_iter::PostfixIter; pub use prefix_iter::PrefixIter; pub use search_iter::SearchIter;
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/iter/prefix_iter.rs
src/iter/prefix_iter.rs
use crate::map::Trie; use crate::try_collect::{TryCollect, TryFromIterator}; use louds_rs::LoudsNodeNum; use std::marker::PhantomData; #[derive(Debug, Clone)] /// Iterates through all the common prefixes of a given query. pub struct PrefixIter<'a, Label, Value, C, M> { trie: &'a Trie<Label, Value>, query: Vec<Label>, index: usize, node: LoudsNodeNum, buffer: Vec<&'a Label>, consume: Option<&'a Value>, col: PhantomData<(C, M)>, } impl<'a, Label: Ord + Clone, Value, C, M> PrefixIter<'a, Label, Value, C, M> { #[inline] pub(crate) fn new(trie: &'a Trie<Label, Value>, query: impl AsRef<[Label]>) -> Self { Self { trie, query: query.as_ref().to_vec(), index: 0, node: LoudsNodeNum(1), buffer: Vec::new(), consume: None, col: PhantomData, } } } impl<'a, Label: Ord + Clone, Value, C, M> Iterator for PrefixIter<'a, Label, Value, C, M> where C: TryFromIterator<Label, M>, { type Item = (C, &'a Value); fn next(&mut self) -> Option<Self::Item> { while self.consume.is_none() { if let Some(chr) = self.query.get(self.index) { let children_node_nums: Vec<_> = self.trie.children_node_nums(self.node).collect(); let res = self .trie .bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(j) => { let child_node_num = children_node_nums[j]; self.buffer.push(self.trie.label(child_node_num)); self.consume = self.trie.value(child_node_num); self.node = child_node_num; } Err(_) => break, } } else { return None; } self.index += 1; } if let Some(v) = self.consume.take() { let col = self.buffer.clone(); Some(( col.into_iter() .cloned() .try_collect() .expect("Could not collect"), v, )) } else { None } } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/iter/keys.rs
src/iter/keys.rs
#[derive(Debug, Clone)] /// Retains keys and strips off `Value`s from a [crate::iter] iterator. pub struct Keys<I>(I); impl<I> Keys<I> { ///Creates a new `Keys` iterator. pub fn new(iter: I) -> Self { Self(iter) } } // TODO: This is generic for V, which is a stand-in for the Value, but in a // `map::Trie<K,V>`, its iterators will actually reurn `(C, &V)`. Hopefully that // won't matter. impl<I, C, V> Iterator for Keys<I> where I: Iterator<Item = (C, V)>, { type Item = C; fn next(&mut self) -> Option<C> { self.0.next().map(|x| x.0) } } /// Strip an iterator items `(K, V)` to only have `K`. pub trait KeysExt: Iterator { /// Retain keys and strip values from a [crate::iter] iterator. fn keys(self) -> Keys<Self> where Self: Sized, { Keys::new(self) } } impl<T> KeysExt for T where T: Iterator + ?Sized {}
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/map/trie_builder.rs
src/map/trie_builder.rs
use crate::internal_data_structure::naive_trie::NaiveTrie; use crate::map::TrieLabel; use crate::map::{Trie, TrieBuilder}; use louds_rs::Louds; impl<Label: Ord, Value> Default for TrieBuilder<Label, Value> { fn default() -> Self { Self::new() } } impl<Label: Ord, Value> TrieBuilder<Label, Value> { /// Return a [TrieBuilder]. pub fn new() -> Self { let naive_trie = NaiveTrie::make_root(); Self { naive_trie } } /// Add a cloneable entry and value. pub fn push<Arr: AsRef<[Label]>>(&mut self, entry: Arr, value: Value) where Label: Clone, { self.naive_trie.push(entry.as_ref().iter().cloned(), value); } /// Add an entry and value. pub fn insert<Arr: IntoIterator<Item = Label>>(&mut self, entry: Arr, value: Value) { self.naive_trie.push(entry.into_iter(), value); } /// Build a [Trie]. pub fn build(self) -> Trie<Label, Value> { let mut louds_bits: Vec<bool> = vec![true, false]; let mut trie_labels: Vec<TrieLabel<Label, Value>> = vec![]; for node in self.naive_trie.into_iter() { match node { NaiveTrie::Root(_) => {} NaiveTrie::IntermOrLeaf(n) => { louds_bits.push(true); trie_labels.push(TrieLabel { label: n.label, value: n.value, }); } NaiveTrie::PhantomSibling => { louds_bits.push(false); } } } let louds = Louds::from(&louds_bits[..]); Trie { louds, trie_labels } } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/src/map/trie.rs
src/map/trie.rs
//! A trie map stores a value with each word or key. use super::Trie; use crate::inc_search::IncSearch; use crate::iter::{PostfixIter, PrefixIter, SearchIter}; use crate::try_collect::{TryCollect, TryFromIterator}; use louds_rs::{AncestorNodeIter, ChildNodeIter, LoudsNodeNum}; use std::iter::FromIterator; impl<Label: Ord, Value> Trie<Label, Value> { /// Return `Some(&Value)` if query is an exact match. pub fn exact_match(&self, query: impl AsRef<[Label]>) -> Option<&Value> { self.exact_match_node(query) .and_then(move |x| self.value(x)) } /// Return `Node` if query is an exact match. #[inline] fn exact_match_node(&self, query: impl AsRef<[Label]>) -> Option<LoudsNodeNum> { let mut cur_node_num = LoudsNodeNum(1); for (i, chr) in query.as_ref().iter().enumerate() { let children_node_nums: Vec<LoudsNodeNum> = self.children_node_nums(cur_node_num).collect(); let res = self.bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(j) => { let child_node_num = children_node_nums[j]; if i == query.as_ref().len() - 1 && self.is_terminal(child_node_num) { return Some(child_node_num); } cur_node_num = child_node_num; } Err(_) => return None, } } None } /// Return `Some(&mut value)` if query is an exact match. pub fn exact_match_mut(&mut self, query: impl AsRef<[Label]>) -> Option<&mut Value> { self.exact_match_node(query) .and_then(move |x| self.value_mut(x)) } /// Create an incremental search. Useful for interactive applications. See /// [crate::inc_search] for details. pub fn inc_search(&self) -> IncSearch<'_, Label, Value> { IncSearch::new(self) } /// Return true if `query` is a prefix. /// /// Note: A prefix may be an exact match or not, and an exact match may be a /// prefix or not. pub fn is_prefix(&self, query: impl AsRef<[Label]>) -> bool { let mut cur_node_num = LoudsNodeNum(1); for chr in query.as_ref().iter() { let children_node_nums: Vec<_> = self.children_node_nums(cur_node_num).collect(); let res = self.bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(j) => cur_node_num = children_node_nums[j], Err(_) => return false, } } // Are there more nodes after our query? self.has_children_node_nums(cur_node_num) } /// Return all entries and their values that match `query`. pub fn predictive_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> SearchIter<'_, Label, Value, C, M> where C: TryFromIterator<Label, M> + Clone, Label: Clone, { SearchIter::new(self, query) } /// Return the postfixes and values of all entries that match `query`. pub fn postfix_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> PostfixIter<'_, Label, Value, C, M> where C: TryFromIterator<Label, M>, Label: Clone, { let mut cur_node_num = LoudsNodeNum(1); // Consumes query (prefix) for chr in query.as_ref() { let children_node_nums: Vec<_> = self.children_node_nums(cur_node_num).collect(); let res = self.bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(i) => cur_node_num = children_node_nums[i], Err(_) => { return PostfixIter::empty(self); } } } PostfixIter::new(self, cur_node_num) } /// Returns an iterator across all keys in the trie. /// /// # Examples /// In the following example we illustrate how to iterate over all keys in the trie. /// Note that the order of the keys is not guaranteed, as they will be returned in /// lexicographical order. /// /// ```rust /// use trie_rs::map::Trie; /// let trie = Trie::from_iter([("a", 0), ("app", 1), ("apple", 2), ("better", 3), ("application", 4)]); /// let results: Vec<(String, &u8)> = trie.iter().collect(); /// assert_eq!(results, [("a".to_string(), &0u8), ("app".to_string(), &1u8), ("apple".to_string(), &2u8), ("application".to_string(), &4u8), ("better".to_string(), &3u8)]); /// ``` pub fn iter<C, M>(&self) -> PostfixIter<'_, Label, Value, C, M> where C: TryFromIterator<Label, M>, Label: Clone, { self.postfix_search([]) } /// Return the common prefixes of `query`. pub fn common_prefix_search<C, M>( &self, query: impl AsRef<[Label]>, ) -> PrefixIter<'_, Label, Value, C, M> where C: TryFromIterator<Label, M>, Label: Clone, { PrefixIter::new(self, query) } /// Return the longest shared prefix or terminal of `query`. pub fn longest_prefix<C, M>(&self, query: impl AsRef<[Label]>) -> Option<C> where C: TryFromIterator<Label, M>, Label: Clone, { let mut cur_node_num = LoudsNodeNum(1); let mut buffer = Vec::new(); // Consumes query (prefix) for chr in query.as_ref() { let children_node_nums: Vec<_> = self.children_node_nums(cur_node_num).collect(); let res = self.bin_search_by_children_labels(chr, &children_node_nums[..]); match res { Ok(i) => { cur_node_num = children_node_nums[i]; buffer.push(cur_node_num); } Err(_) => { return None; } } } // Walk the trie as long as there is only one path and it isn't a terminal value. while !self.is_terminal(cur_node_num) { let mut iter = self.children_node_nums(cur_node_num); let first = iter.next(); let second = iter.next(); match (first, second) { (Some(child_node_num), None) => { cur_node_num = child_node_num; buffer.push(child_node_num); } _ => break, } } if buffer.is_empty() { None } else { Some( buffer .into_iter() .map(|x| self.label(x).clone()) .try_collect() .expect("Could not collect"), ) } } pub(crate) fn has_children_node_nums(&self, node_num: LoudsNodeNum) -> bool { self.louds .parent_to_children_indices(node_num) .next() .is_some() } pub(crate) fn children_node_nums(&self, node_num: LoudsNodeNum) -> ChildNodeIter { self.louds.parent_to_children_nodes(node_num) } pub(crate) fn bin_search_by_children_labels( &self, query: &Label, children_node_nums: &[LoudsNodeNum], ) -> Result<usize, usize> { children_node_nums.binary_search_by(|child_node_num| self.label(*child_node_num).cmp(query)) } pub(crate) fn label(&self, node_num: LoudsNodeNum) -> &Label { &self.trie_labels[(node_num.0 - 2) as usize].label } pub(crate) fn is_terminal(&self, node_num: LoudsNodeNum) -> bool { if node_num.0 >= 2 { self.trie_labels[(node_num.0 - 2) as usize].value.is_some() } else { false } } pub(crate) fn value(&self, node_num: LoudsNodeNum) -> Option<&Value> { if node_num.0 >= 2 { self.trie_labels[(node_num.0 - 2) as usize].value.as_ref() } else { None } } pub(crate) fn value_mut(&mut self, node_num: LoudsNodeNum) -> Option<&mut Value> { self.trie_labels[(node_num.0 - 2) as usize].value.as_mut() } pub(crate) fn child_to_ancestors(&self, node_num: LoudsNodeNum) -> AncestorNodeIter { self.louds.child_to_ancestors(node_num) } } impl<Label, Value, C> FromIterator<(C, Value)> for Trie<Label, Value> where C: AsRef<[Label]>, Label: Ord + Clone, { fn from_iter<T>(iter: T) -> Self where Self: Sized, T: IntoIterator<Item = (C, Value)>, { let mut builder = super::TrieBuilder::new(); for (k, v) in iter { builder.push(k, v) } builder.build() } } #[cfg(test)] mod search_tests { use crate::map::{Trie, TrieBuilder}; use std::iter::FromIterator; fn build_trie() -> Trie<u8, u8> { let mut builder = TrieBuilder::new(); builder.push("a", 0); builder.push("app", 1); builder.push("apple", 2); builder.push("better", 3); builder.push("application", 4); builder.push("アップル🍎", 5); builder.build() } fn build_trie2() -> Trie<char, u8> { let mut builder: TrieBuilder<char, u8> = TrieBuilder::new(); builder.insert("a".chars(), 0); builder.insert("app".chars(), 1); builder.insert("apple".chars(), 2); builder.insert("better".chars(), 3); builder.insert("application".chars(), 4); builder.insert("アップル🍎".chars(), 5); builder.build() } #[test] fn sanity_check() { let trie = build_trie(); let v: Vec<(String, &u8)> = trie.predictive_search("apple").collect(); assert_eq!(v, vec![("apple".to_string(), &2)]); } #[test] fn clone() { let trie = build_trie(); let _c: Trie<u8, u8> = trie.clone(); } #[test] fn value_mut() { let mut trie = build_trie(); assert_eq!(trie.exact_match("apple"), Some(&2)); let v = trie.exact_match_mut("apple").unwrap(); *v = 10; assert_eq!(trie.exact_match("apple"), Some(&10)); } #[test] fn trie_from_iter() { let trie = Trie::<u8, u8>::from_iter([ ("a", 0), ("app", 1), ("apple", 2), ("better", 3), ("application", 4), ]); assert_eq!(trie.exact_match("application"), Some(&4)); } #[test] fn collect_a_trie() { // Does not work with arrays in rust 2018 because into_iter() returns references instead of owned types. // let trie: Trie<u8, u8> = [("a", 0), ("app", 1), ("apple", 2), ("better", 3), ("application", 4)].into_iter().collect(); let trie: Trie<u8, u8> = vec![ ("a", 0), ("app", 1), ("apple", 2), ("better", 3), ("application", 4), ] .into_iter() .collect(); assert_eq!(trie.exact_match("application"), Some(&4)); } #[test] fn use_empty_queries() { let trie = build_trie(); assert!(trie.exact_match("").is_none()); let _ = trie.predictive_search::<String, _>("").next(); let _ = trie.postfix_search::<String, _>("").next(); let _ = trie.common_prefix_search::<String, _>("").next(); } #[test] fn insert_order_dependent() { let trie = Trie::from_iter([("a", 0), ("app", 1), ("apple", 2)]); let results: Vec<(String, &u8)> = trie.iter().collect(); assert_eq!( results, [ ("a".to_string(), &0u8), ("app".to_string(), &1u8), ("apple".to_string(), &2u8) ] ); let trie = Trie::from_iter([("a", 0), ("apple", 2), ("app", 1)]); let results: Vec<(String, &u8)> = trie.iter().collect(); assert_eq!( results, [ ("a".to_string(), &0u8), ("app".to_string(), &1u8), ("apple".to_string(), &2u8) ] ); } mod exact_match_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_match) = $value; let trie = super::build_trie(); let result = trie.exact_match(query); assert_eq!(result, expected_match); } )* } } parameterized_tests! { t1: ("a", Some(&0)), t2: ("app", Some(&1)), t3: ("apple", Some(&2)), t4: ("application", Some(&4)), t5: ("better", Some(&3)), t6: ("アップル🍎", Some(&5)), t7: ("appl", None), t8: ("appler", None), } } mod is_prefix_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_match) = $value; let trie = super::build_trie(); let result = trie.is_prefix(query); assert_eq!(result, expected_match); } )* } } parameterized_tests! { t1: ("a", true), t2: ("app", true), t3: ("apple", false), t4: ("application", false), t5: ("better", false), t6: ("アップル🍎", false), t7: ("appl", true), t8: ("appler", false), t9: ("アップル", true), } } mod longest_prefix_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_match) = $value; let trie = super::build_trie(); let result: Option<String> = trie.longest_prefix(query); let expected_match = expected_match.map(str::to_string); assert_eq!(result, expected_match); } )* } } parameterized_tests! { t1: ("a", Some("a")), t2: ("ap", Some("app")), t3: ("app", Some("app")), t4: ("appl", Some("appl")), t5: ("appli", Some("application")), t6: ("b", Some("better")), t7: ("アップル🍎", Some("アップル🍎")), t8: ("appler", None), t9: ("アップル", Some("アップル🍎")), t10: ("z", None), t11: ("applesDONTEXIST", None), t12: ("", None), } } mod predictive_search_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie(); let results: Vec<(String, &u8)> = trie.predictive_search(query).collect(); let expected_results: Vec<(String, &u8)> = expected_results.iter().map(|s| (s.0.to_string(), &s.1)).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec![("a", 0), ("app", 1), ("apple", 2), ("application", 4)]), t2: ("app", vec![("app", 1), ("apple", 2), ("application", 4)]), t3: ("appl", vec![("apple", 2), ("application", 4)]), t4: ("apple", vec![("apple", 2)]), t5: ("b", vec![("better", 3)]), t6: ("c", Vec::<(&str, u8)>::new()), t7: ("アップ", vec![("アップル🍎", 5)]), } } mod common_prefix_search_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie(); let results: Vec<(String, &u8)> = trie.common_prefix_search(query).collect(); let expected_results: Vec<(String, &u8)> = expected_results.iter().map(|s| (s.0.to_string(), &s.1)).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec![("a", 0)]), t2: ("ap", vec![("a", 0)]), t3: ("appl", vec![("a", 0), ("app", 1)]), t4: ("appler", vec![("a", 0), ("app", 1), ("apple", 2)]), t5: ("bette", Vec::<(&str, u8)>::new()), t6: ("betterment", vec![("better", 3)]), t7: ("c", Vec::<(&str, u8)>::new()), t8: ("アップル🍎🍏", vec![("アップル🍎", 5)]), } } mod postfix_search_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie(); let results: Vec<(String, &u8)> = trie.postfix_search(query).collect(); let expected_results: Vec<(String, &u8)> = expected_results.iter().map(|s| (s.0.to_string(), &s.1)).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec![("pp", 1), ("pple", 2), ("pplication", 4)]), t2: ("ap", vec![("p", 1), ("ple", 2), ("plication", 4)]), t3: ("appl", vec![("e", 2), ("ication", 4)]), t4: ("appler", Vec::<(&str, u8)>::new()), t5: ("bette", vec![("r", 3)]), t6: ("betterment", Vec::<(&str, u8)>::new()), t7: ("c", Vec::<(&str, u8)>::new()), t8: ("アップル🍎🍏", Vec::<(&str, u8)>::new()), } } mod postfix_search_char_tests { macro_rules! parameterized_tests { ($($name:ident: $value:expr,)*) => { $( #[test] fn $name() { let (query, expected_results) = $value; let trie = super::build_trie2(); let chars: Vec<char> = query.chars().collect(); let results: Vec<(String, &u8)> = trie.postfix_search(chars).collect(); let expected_results: Vec<(String, &u8)> = expected_results.iter().map(|s| (s.0.to_string(), &s.1)).collect(); assert_eq!(results, expected_results); } )* } } parameterized_tests! { t1: ("a", vec![("pp", 1), ("pple", 2), ("pplication", 4)]), t2: ("ap", vec![("p", 1), ("ple", 2), ("plication", 4)]), t3: ("appl", vec![("e", 2), ("ication", 4)]), t4: ("appler", Vec::<(&str, u8)>::new()), t5: ("bette", vec![("r", 3)]), t6: ("betterment", Vec::<(&str, u8)>::new()), t7: ("c", Vec::<(&str, u8)>::new()), t8: ("アップル🍎🍏", Vec::<(&str, u8)>::new()), } } }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/tests/test_versions.rs
tests/test_versions.rs
#[test] fn test_readme_deps() { version_sync::assert_markdown_deps_updated!("README.md"); } #[test] fn test_html_root_url() { version_sync::assert_html_root_url_updated!("src/lib.rs"); }
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
laysakura/trie-rs
https://github.com/laysakura/trie-rs/blob/9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c/benches/bench.rs
benches/bench.rs
#[macro_use] extern crate criterion; #[macro_use] extern crate lazy_static; use criterion::Criterion; use std::time::Duration; fn c() -> Criterion { Criterion::default() .sample_size(10) // must be >= 10 for Criterion v0.3 .warm_up_time(Duration::from_secs(1)) .with_plots() } fn git_hash() -> String { use std::process::Command; let output = Command::new("git") .args(["rev-parse", "--short", "HEAD"]) .output() .unwrap(); String::from(String::from_utf8(output.stdout).unwrap().trim()) } mod trie { use criterion::{black_box, BatchSize, Criterion}; use std::env; use std::fs::File; use std::io::{BufRead, BufReader}; use trie_rs::{Trie, TrieBuilder}; lazy_static! { // Construct Japanese dictionary using EDICT (http://www.edrdg.org/jmdict/edict.html). static ref TRIE_EDICT: Trie<u8> = { let mut builder = TrieBuilder::new(); let repo_root = env::var("CARGO_MANIFEST_DIR").expect("CARGO_MANIFEST_DIR environment variable must be set."); let edict2_path = format!("{}/benches/edict.furigana", repo_root); println!("Reading dictionary file from: {}", edict2_path); let mut n_words = 0; for result in BufReader::new(File::open(edict2_path).unwrap()).lines() { let l = result.unwrap(); builder.push(l); n_words += 1; } println!("Read {} words.", n_words); builder.build() // TODO print memory footprint compared to original `edict.furigana` file }; } pub fn build(_: &mut Criterion) { let items = 10_000; super::c().bench_function( &format!("[{}] Trie::build() {} items", super::git_hash(), items), move |b| { b.iter_batched( || &TRIE_EDICT, |_trie| { let mut builder = TrieBuilder::new(); let repo_root = env::var("CARGO_MANIFEST_DIR") .expect("CARGO_MANIFEST_DIR environment variable must be set."); let edict2_path = format!("{}/benches/edict.furigana", repo_root); let mut n_words = 0; for result in BufReader::new(File::open(edict2_path).unwrap()).lines() { let l = result.unwrap(); builder.push(l); n_words += 1; if n_words >= items { break; } } black_box(builder.build()) }, BatchSize::SmallInput, ) }, ); } pub fn exact_match(_: &mut Criterion) { let times = 100; super::c().bench_function( &format!( "[{}] Trie::exact_match() {} times", super::git_hash(), times ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { // iter_batched() does not properly time `routine` time // when `setup` time is far longer than `routine` time. // Tested function takes too short compared to build(). // So loop many times. let result = trie.exact_match("すしをにぎる"); for _ in 0..(times - 1) { assert!(trie.exact_match("すしをにぎる")); } assert!(result); }, BatchSize::SmallInput, ) }, ); } pub fn predictive_search(_: &mut Criterion) { let times = 100; super::c().bench_function( &format!( "[{}] Trie::predictive_search() {} times", super::git_hash(), times ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { // iter_batched() does not properly time `routine` time // when `setup` time is far longer than `routine` time. // Tested function takes too short compared to build(). // So loop many times. let results_in_u8s: Vec<Vec<u8>> = trie.predictive_search("すし").collect(); for _ in 0..(times - 1) { for entry in trie.predictive_search::<Vec<u8>, _>("すし") { black_box(entry); } } let results_in_str: Vec<String> = results_in_u8s .into_iter() .map(|u8s| String::from_utf8(u8s).unwrap()) .collect(); assert_eq!( results_in_str, vec![ "すし", "すしだね", "すしづめ", "すしのぐ", "すしめし", "すしや", "すしをにぎる" ] ); }, BatchSize::SmallInput, ) }, ); } pub fn predictive_search_big_output(_: &mut Criterion) { super::c().bench_function( &format!( "[{}] Trie::predictive_search_big_output()", super::git_hash(), ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { let results: Vec<Vec<u8>> = trie.predictive_search("す").collect(); assert_eq!(results.len(), 4220); let results_in_u8s = results.into_iter().take(100); assert_eq!(results_in_u8s.len(), 100); }, BatchSize::SmallInput, ) }, ); } pub fn predictive_search_limited_big_output(_: &mut Criterion) { super::c().bench_function( &format!( "[{}] Trie::predictive_search_limited_big_output()", super::git_hash(), ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { let results_in_u8s: Vec<Vec<u8>> = trie.predictive_search("す").take(100).collect(); assert_eq!(results_in_u8s.len(), 100); }, BatchSize::SmallInput, ) }, ); } pub fn common_prefix_search(_: &mut Criterion) { let times = 100; super::c().bench_function( &format!( "[{}] Trie::common_prefix_search() {} times", super::git_hash(), times ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { // iter_batched() does not properly time `routine` time // when `setup` time is far longer than `routine` time. // Tested function takes too short compared to build(). // So loop many times. let results_in_str: Vec<String> = trie.common_prefix_search("すしをにぎる").collect(); for _ in 0..(times - 1) { for entry in trie.common_prefix_search("すしをにぎる") { black_box::<Vec<u8>>(entry); } } assert_eq!(results_in_str, vec!["す", "すし", "すしをにぎる"]); }, BatchSize::SmallInput, ) }, ); } pub fn common_prefix_match(_: &mut Criterion) { let times = 100; super::c().bench_function( &format!( "[{}] Trie::common_prefix_match() {} times", super::git_hash(), times ), move |b| { b.iter_batched( || &TRIE_EDICT, |trie| { // iter_batched() does not properly time `routine` time when `setup` time is far longer than `routine` time. // Tested function takes too short compared to build(). So loop many times. let result = trie .common_prefix_search::<Vec<u8>, _>("すしをにぎる") .next() .is_some(); for _ in 0..(times - 1) { let _ = trie .common_prefix_search::<Vec<u8>, _>("すしをにぎる") .next() .is_some(); } assert!(result); }, BatchSize::SmallInput, ) }, ); } } criterion_group!( benches, trie::build, trie::exact_match, trie::predictive_search, trie::predictive_search_big_output, trie::predictive_search_limited_big_output, trie::common_prefix_search, trie::common_prefix_match, ); criterion_main!(benches);
rust
Apache-2.0
9fdacbb6a3abf8a81bdb479a9beaf6fceb99e90c
2026-01-04T20:24:45.056613Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/config.rs
src/config.rs
use crate::glob::Glob; use anyhow::Context; use clap::ValueEnum; use fs_err::tokio as fs; use relative_path::RelativePathBuf; use schemars::JsonSchema; use serde::Deserialize; use std::{collections::HashMap, path::PathBuf}; #[derive(Debug, Deserialize, Clone, JsonSchema)] pub struct Config { pub creator: Creator, #[serde(default)] pub codegen: Codegen, /// A map of input names to input configurations pub inputs: HashMap<String, Input>, } pub type InputMap = HashMap<String, Input>; pub const FILE_NAME: &str = "asphalt.toml"; impl Config { pub async fn read() -> anyhow::Result<Config> { let config = fs::read_to_string(FILE_NAME) .await .context("Failed to read config file")?; let config: Config = toml::from_str(&config)?; Ok(config) } } /// Optional configuration for generated files and code #[derive(Debug, Deserialize, Clone, Default, JsonSchema)] #[serde(default)] pub struct Codegen { pub style: CodegenStyle, /// Generate a TypeScript definition file pub typescript: bool, /// Strip the file extensions from asset keys pub strip_extensions: bool, /// Generate the Content data type instead of strings pub content: bool, } /// The type of Creator #[derive(Debug, Deserialize, Clone, ValueEnum, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CreatorType { /// A personal Roblox account User, /// A Community on Roblox Group, } /// The Roblox creator to upload the assets under #[derive(Debug, Deserialize, Clone, JsonSchema)] pub struct Creator { /// The type of Creator #[serde(rename = "type")] pub ty: CreatorType, /// The Creator ID pub id: u64, } fn default_true() -> bool { true } /// A collection of assets #[derive(Debug, Deserialize, Clone, JsonSchema)] pub struct Input { /// A glob pattern to match files to upload #[schemars(with = "String")] #[serde(rename = "path")] pub include: Glob, /// The directory path to output the generated code pub output_path: PathBuf, /// Enable alpha bleeding images. Keep in mind that changing this setting won't invalidate your lockfile or reupload your images #[serde(default = "default_true")] pub bleed: bool, /// A map of paths relative to the input path to existing assets on Roblox #[serde(default)] #[schemars(with = "HashMap<PathBuf, WebAsset>")] pub web: HashMap<RelativePathBuf, WebAsset>, } /// An asset that exists on Roblox #[derive(Debug, Deserialize, Clone, JsonSchema)] pub struct WebAsset { /// The asset ID pub id: u64, } /// The style of code to generate #[derive(Debug, Deserialize, Default, Clone, JsonSchema)] #[serde(rename_all = "snake_case")] pub enum CodegenStyle { #[default] /// A flat table is generated with keys that look like asset paths Flat, /// A nested table is generated by separating the asset paths Nested, }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/lockfile.rs
src/lockfile.rs
use anyhow::{Context, bail}; use blake3::Hasher; use fs_err::tokio as fs; use serde::{Deserialize, Serialize}; use std::{ collections::BTreeMap, path::{Path, PathBuf}, }; pub const FILE_NAME: &str = "asphalt.lock.toml"; #[derive(Debug, Serialize, Deserialize)] pub struct Lockfile { version: u32, inputs: BTreeMap<String, BTreeMap<String, LockfileEntry>>, } #[derive(Debug, Serialize, Deserialize, Clone)] pub struct LockfileEntry { pub asset_id: u64, } impl Default for Lockfile { fn default() -> Self { Self { version: 2, inputs: BTreeMap::new(), } } } impl Lockfile { pub fn get(&self, input_name: &str, hash: &str) -> Option<&LockfileEntry> { self.inputs.get(input_name).and_then(|m| m.get(hash)) } pub fn insert(&mut self, input_name: &str, hash: &str, entry: LockfileEntry) { self.inputs .entry(input_name.to_string()) .or_default() .insert(hash.to_owned(), entry); } pub async fn write(&self, filename: Option<&Path>) -> anyhow::Result<()> { let mut content = toml::to_string(self)?; content.insert_str(0, "# This file is automatically @generated by Asphalt.\n# It is not intended for manual editing.\n"); fs::write(filename.unwrap_or(Path::new(FILE_NAME)), content).await?; Ok(()) } } #[derive(Debug, Serialize, Deserialize)] pub struct OldLockfileEntry { pub hash: String, pub asset_id: u64, } #[derive(Debug, Serialize, Deserialize)] pub struct LockfileV0 { entries: BTreeMap<PathBuf, OldLockfileEntry>, } #[derive(Debug, Serialize, Deserialize)] pub struct LockfileV1 { version: u32, inputs: BTreeMap<String, BTreeMap<PathBuf, OldLockfileEntry>>, } #[derive(Debug, Serialize, Deserialize)] #[serde(untagged)] pub enum RawLockfile { V0(LockfileV0), V1(LockfileV1), V2(Lockfile), } impl Default for RawLockfile { fn default() -> Self { Self::V2(Lockfile::default()) } } impl RawLockfile { pub async fn read() -> anyhow::Result<RawLockfile> { let content = fs::read_to_string(FILE_NAME).await; let content = match content { Err(_) => return Ok(Self::default()), Ok(content) => content, }; let raw: toml::Value = toml::from_str(&content)?; match raw.get("version").and_then(|v| v.as_integer()) { Some(2) => Ok(RawLockfile::V2(toml::from_str(&content)?)), Some(1) => Ok(RawLockfile::V1(toml::from_str(&content)?)), Some(0) | None => Ok(RawLockfile::V0(toml::from_str(&content)?)), _ => bail!("Unsupported lockfile version"), } } pub fn into_lockfile(self) -> anyhow::Result<Lockfile> { match self { Self::V2(lockfile) => Ok(lockfile), _ => anyhow::bail!("Your lockfile is out of date, please run asphalt migrate-lockfile"), } } pub async fn migrate(self, input_name: Option<&str>) -> anyhow::Result<Lockfile> { match (self, input_name) { (Self::V2(_), _) => bail!("Your lockfile is already up to date"), (Self::V1(v1), _) => Ok(migrate_from_v1(&v1)), (Self::V0(v0), Some(name)) => migrate_from_v0(&v0, name).await, (Self::V0(_), None) => { bail!("An input name must be passed in order to migrate from v0 to v1") } } } } fn migrate_from_v1(lockfile: &LockfileV1) -> Lockfile { let mut new_lockfile = Lockfile::default(); for (input_name, entries) in &lockfile.inputs { for entry in entries.values() { new_lockfile.insert( input_name, &entry.hash, LockfileEntry { asset_id: entry.asset_id, }, ) } } new_lockfile } async fn migrate_from_v0(lockfile: &LockfileV0, input_name: &str) -> anyhow::Result<Lockfile> { let mut new_lockfile = Lockfile::default(); for (path, entry) in &lockfile.entries { let new_hash = read_and_hash(path) .await .context(format!("Failed to hash {}", path.display()))?; new_lockfile.insert( input_name, &new_hash, LockfileEntry { asset_id: entry.asset_id, }, ) } Ok(new_lockfile) } async fn read_and_hash(path: &Path) -> anyhow::Result<String> { let bytes = fs::read(path).await?; let mut hasher = Hasher::new(); hasher.update(&bytes); Ok(hasher.finalize().to_string()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/cli.rs
src/cli.rs
use crate::config::CreatorType; use clap::{Args, Parser, Subcommand}; use clap_verbosity_flag::{InfoLevel, Verbosity}; #[derive(Parser)] #[command(version, about = "Upload and reference Roblox assets in code.")] pub struct Cli { #[command(subcommand)] pub command: Commands, #[command(flatten)] pub verbose: Verbosity<InfoLevel>, } #[derive(Subcommand)] pub enum Commands { /// Sync assets. Sync(SyncArgs), /// Uploads a single asset and returns the asset ID. Upload(UploadArgs), /// Migrates a lockfile to the latest version. /// /// You can only run this once per upgrade, and it will overwrite the existing lockfile. /// Keep in mind that because pre-1.0 did not support multiple inputs, you'll need to provide a default input name for that migration. /// The pre-1.0 migration entails hashing your files again and updating the lockfile with the new hashes. /// We basically pretend nothing has changed, so your assets don't get reuploaded. MigrateLockfile(MigrateLockfileArgs), #[command(hide = true)] GenerateConfigSchema, } #[derive(Subcommand, Clone, Copy)] pub enum SyncTarget { /// Upload assets to Roblox cloud. Cloud { /// Error if assets would be uploaded. #[arg(long)] dry_run: bool, }, /// Write assets to the Roblox Studio content folder. Studio, /// Write assets to the .asphalt-debug folder. Debug, } impl SyncTarget { pub fn write_on_sync(&self) -> bool { matches!(self, SyncTarget::Cloud { dry_run: false }) } } #[derive(Args, Clone)] pub struct SyncArgs { /// Your Open Cloud API key. #[arg(short, long, env = "ASPHALT_API_KEY")] pub api_key: Option<String>, /// Where Asphalt should sync assets to. #[command(subcommand)] target: Option<SyncTarget>, /// Provides Roblox with the amount of Robux that you are willing to spend on each non-free asset upload. #[arg(long)] pub expected_price: Option<u32>, } impl SyncArgs { pub fn target(&self) -> SyncTarget { self.target.unwrap_or(SyncTarget::Cloud { dry_run: false }) } } #[derive(Args)] pub struct UploadArgs { /// The file to upload. pub path: String, /// The creator type of the asset. #[arg(long)] pub creator_type: CreatorType, /// The creator ID of the asset. #[arg(long)] pub creator_id: u64, /// Your Open Cloud API key. /// Can also be set with the ASPHALT_API_KEY environment variable. #[arg(short, long)] pub api_key: Option<String>, /// Whether to alpha bleed if it's an image. #[arg(long, default_value = "true")] pub bleed: bool, /// Format the response as a link. #[arg(long)] pub link: bool, /// Provides Roblox with the amount of Robux that you are willing to spend on each non-free asset upload. #[arg(long)] pub expected_price: Option<u32>, } #[derive(Args)] pub struct MigrateLockfileArgs { /// The default input name to use. Only applies when upgrading from V0 to V1. pub input_name: Option<String>, }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/asset.rs
src/asset.rs
use crate::{ config::WebAsset, lockfile::LockfileEntry, util::{alpha_bleed::alpha_bleed, svg::svg_to_png}, }; use anyhow::Context; use blake3::Hasher; use bytes::Bytes; use image::DynamicImage; use relative_path::RelativePathBuf; use resvg::usvg::fontdb::{self}; use serde::Serialize; use std::{ffi::OsStr, fmt, io::Cursor, sync::Arc}; use tokio::task::spawn_blocking; type AssetCtor = fn(&[u8]) -> anyhow::Result<AssetType>; const SUPPORTED_EXTENSIONS: &[(&str, AssetCtor)] = &[ ("mp3", |_| Ok(AssetType::Audio(AudioType::Mp3))), ("ogg", |_| Ok(AssetType::Audio(AudioType::Ogg))), ("flac", |_| Ok(AssetType::Audio(AudioType::Flac))), ("wav", |_| Ok(AssetType::Audio(AudioType::Wav))), ("png", |_| Ok(AssetType::Image(ImageType::Png))), ("svg", |_| Ok(AssetType::Image(ImageType::Png))), ("jpg", |_| Ok(AssetType::Image(ImageType::Jpg))), ("jpeg", |_| Ok(AssetType::Image(ImageType::Jpg))), ("bmp", |_| Ok(AssetType::Image(ImageType::Bmp))), ("tga", |_| Ok(AssetType::Image(ImageType::Tga))), ("fbx", |_| Ok(AssetType::Model(ModelType::Fbx))), ("gltf", |_| Ok(AssetType::Model(ModelType::GltfJson))), ("glb", |_| Ok(AssetType::Model(ModelType::GltfBinary))), ("rbxm", |data| { let format = RobloxModelFormat::Binary; if is_animation(data, &format)? { Ok(AssetType::Animation) } else { Ok(AssetType::Model(ModelType::Roblox)) } }), ("rbxmx", |data| { let format = RobloxModelFormat::Xml; if is_animation(data, &format)? { Ok(AssetType::Animation) } else { Ok(AssetType::Model(ModelType::Roblox)) } }), ("mp4", |_| Ok(AssetType::Video(VideoType::Mp4))), ("mov", |_| Ok(AssetType::Video(VideoType::Mov))), ]; pub fn is_supported_extension(ext: &OsStr) -> bool { SUPPORTED_EXTENSIONS.iter().any(|(e, _)| *e == ext) } pub struct Asset { /// Relative to Input prefix pub path: RelativePathBuf, pub data: Bytes, pub ty: AssetType, pub ext: String, /// The hash before processing pub hash: String, } impl Asset { pub async fn new( path: RelativePathBuf, data: Vec<u8>, font_db: Arc<fontdb::Database>, bleed: bool, ) -> anyhow::Result<Self> { let mut ext = path .extension() .context("File has no extension")? .to_string(); let ty = SUPPORTED_EXTENSIONS .iter() .find(|(e, _)| *e == ext) .map(|(_, func)| func(&data)) .context("Unknown file type")??; let (data, hash, ext) = spawn_blocking({ let font_db = font_db.clone(); move || { let mut data = Bytes::from(data); let mut hasher = Hasher::new(); hasher.update(&data); let hash = hasher.finalize().to_string(); if ext == "svg" { data = svg_to_png(&data, font_db)?.into(); ext = "png".to_string(); } if matches!(ty, AssetType::Image(ImageType::Png)) && bleed { let mut image: DynamicImage = image::load_from_memory(&data)?; alpha_bleed(&mut image); let mut writer = Cursor::new(Vec::new()); image.write_to(&mut writer, image::ImageFormat::Png)?; data = Bytes::from(writer.into_inner()); } anyhow::Ok((data, hash, ext)) } }) .await??; Ok(Self { path, data, ty, ext, hash, }) } } #[derive(Debug, Clone, Copy)] pub enum AssetType { Model(ModelType), Animation, Image(ImageType), Audio(AudioType), Video(VideoType), } impl AssetType { // https://create.roblox.com/docs/cloud/guides/usage-assets#supported-asset-types-and-limits pub fn asset_type(&self) -> &'static str { match self { AssetType::Model(_) => "Model", AssetType::Animation => "Animation", AssetType::Image(_) => "Image", AssetType::Audio(_) => "Audio", AssetType::Video(_) => "Video", } } pub fn file_type(&self) -> &'static str { match self { AssetType::Animation => "model/x-rbxm", AssetType::Model(ModelType::Fbx) => "model/fbx", AssetType::Model(ModelType::GltfJson) => "model/gltf+json", AssetType::Model(ModelType::GltfBinary) => "model/gltf-binary", AssetType::Model(ModelType::Roblox) => "model/x-rbxm", AssetType::Image(ImageType::Png) => "image/png", AssetType::Image(ImageType::Jpg) => "image/jpeg", AssetType::Image(ImageType::Bmp) => "image/bmp", AssetType::Image(ImageType::Tga) => "image/tga", AssetType::Audio(AudioType::Mp3) => "audio/mpeg", AssetType::Audio(AudioType::Ogg) => "audio/ogg", AssetType::Audio(AudioType::Flac) => "audio/flac", AssetType::Audio(AudioType::Wav) => "audio/wav", AssetType::Video(VideoType::Mp4) => "video/mp4", AssetType::Video(VideoType::Mov) => "video/mov", } } } impl Serialize for AssetType { fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error> where S: serde::Serializer, { serializer.serialize_str(self.asset_type()) } } #[derive(Debug, Clone, Copy)] pub enum AudioType { Mp3, Ogg, Flac, Wav, } #[derive(Debug, Clone, Copy)] pub enum ImageType { Png, Jpg, Bmp, Tga, } #[derive(Debug, Clone, Copy)] pub enum ModelType { Fbx, GltfJson, GltfBinary, Roblox, } #[derive(Debug, Clone, Copy)] pub enum VideoType { Mp4, Mov, } pub fn is_animation(data: &[u8], format: &RobloxModelFormat) -> anyhow::Result<bool> { let dom = match format { RobloxModelFormat::Binary => rbx_binary::from_reader(data)?, RobloxModelFormat::Xml => rbx_xml::from_reader(data, Default::default())?, }; let children = dom.root().children(); let first_ref = *children.first().context("No children found in root")?; let first = dom .get_by_ref(first_ref) .context("Failed to get first child")?; Ok(first.class == "KeyframeSequence" || first.class == "CurveAnimation") } #[derive(Debug, Clone)] pub enum RobloxModelFormat { Binary, Xml, } #[derive(Debug, Clone)] pub enum AssetRef { Cloud(u64), Studio(String), } impl fmt::Display for AssetRef { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { AssetRef::Cloud(id) => write!(f, "rbxassetid://{id}"), AssetRef::Studio(name) => write!(f, "rbxasset://{name}"), } } } impl From<WebAsset> for AssetRef { fn from(value: WebAsset) -> Self { AssetRef::Cloud(value.id) } } impl From<&LockfileEntry> for AssetRef { fn from(value: &LockfileEntry) -> Self { AssetRef::Cloud(value.asset_id) } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/glob.rs
src/glob.rs
//! Wrapper around globset's Glob type that has better serialization //! characteristics by coupling Glob and GlobMatcher into a single type. //! https://github.com/Roblox/tarmac/blob/master/src/glob.rs use std::{ fmt, path::{Path, PathBuf}, }; use globset::{Glob as InnerGlob, GlobMatcher}; use serde::{Deserialize, Deserializer, Serialize, Serializer, de::Error as _}; pub use globset::Error; #[derive(Debug, Clone)] pub struct Glob { inner: InnerGlob, matcher: GlobMatcher, } impl Glob { pub fn new(glob: &str) -> Result<Self, Error> { let inner = InnerGlob::new(glob)?; let matcher = inner.compile_matcher(); Ok(Glob { inner, matcher }) } pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool { self.matcher.is_match(path) } pub fn get_prefix(&self) -> PathBuf { get_non_pattern_prefix(Path::new(self.inner.glob())) } } impl PartialEq for Glob { fn eq(&self, other: &Self) -> bool { self.inner == other.inner } } impl Eq for Glob {} impl Serialize for Glob { fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> { serializer.serialize_str(self.inner.glob()) } } impl<'de> Deserialize<'de> for Glob { fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> { let glob = <String as Deserialize>::deserialize(deserializer)?; Glob::new(glob.as_str()).map_err(D::Error::custom) } } impl fmt::Display for Glob { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { self.inner.fmt(f) } } // A basic set of characters that might indicate the use of glob pattern syntax. // This is to distinguish portions of a glob that are fixed paths (e.g. // "foo.png") from ones that are leveraging patterns (e.g. "*.png"). // // This approach has false positives, as it will treat escape sequences like // `[*]` as pattern syntax, but those should be rare enough to be acceptable // // Glob syntax described here: https://docs.rs/globset/0.4.4/globset/#syntax const GLOB_PATTERN_CHARACTERS: &str = "*?{}[]"; fn get_non_pattern_prefix(glob_path: &Path) -> PathBuf { let mut prefix = PathBuf::new(); for component in glob_path.iter() { let component_str = component.to_str().unwrap(); if GLOB_PATTERN_CHARACTERS .chars() .any(|special_char| component_str.contains(special_char)) { break; } prefix.push(component); } prefix } #[cfg(test)] mod test { use super::*; #[test] fn simple_prefix() { assert_eq!( get_non_pattern_prefix(Path::new("a/b/**/*.png")), PathBuf::from("a/b") ); } #[test] fn prefix_only() { assert_eq!( get_non_pattern_prefix(Path::new("a/**/b/*.png")), PathBuf::from("a") ); } #[test] fn no_prefix() { assert_eq!( get_non_pattern_prefix(Path::new("**/b/*.png")), PathBuf::from("") ); } #[test] fn whole_path() { assert_eq!( get_non_pattern_prefix(Path::new("a/b/foo.png")), PathBuf::from("a/b/foo.png") ) } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/migrate_lockfile.rs
src/migrate_lockfile.rs
use crate::{cli::MigrateLockfileArgs, lockfile::RawLockfile}; pub async fn migrate_lockfile(args: MigrateLockfileArgs) -> anyhow::Result<()> { let file = RawLockfile::read().await?; let migrated = file.migrate(args.input_name.as_deref()).await?; migrated.write(None).await?; Ok(()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/upload.rs
src/upload.rs
use crate::{asset::Asset, cli::UploadArgs, config::Creator, web_api::WebApiClient}; use anyhow::Context; use fs_err::tokio as fs; use relative_path::PathExt; use resvg::usvg::fontdb::Database; use std::{path::PathBuf, sync::Arc}; pub async fn upload(args: UploadArgs) -> anyhow::Result<()> { let path = PathBuf::from(&args.path); let data = fs::read(&path).await?; let mut font_db = Database::new(); font_db.load_system_fonts(); let asset = Asset::new(path.relative_to(".")?, data, Arc::new(font_db), args.bleed).await?; let creator = Creator { ty: args.creator_type, id: args.creator_id, }; let client = WebApiClient::new( args.api_key .context("An API key is required to use the upload command")?, creator, args.expected_price, ); let asset_id = client.upload(&asset).await?; if args.link { println!("https://create.roblox.com/store/asset/{asset_id}"); } else { println!("{asset_id}"); } Ok(()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/web_api.rs
src/web_api.rs
use crate::{ asset::{Asset, AssetType}, config, }; use anyhow::{Context, bail}; use log::{debug, warn}; use reqwest::{RequestBuilder, Response, StatusCode, multipart}; use serde::{Deserialize, Serialize}; use std::{ env, sync::atomic::{AtomicBool, Ordering}, time::Duration, }; use tokio::sync::Mutex; use tokio::time::Instant; const RATELIMIT_RESET_HEADER: &str = "x-ratelimit-reset"; const UPLOAD_URL: &str = "https://apis.roblox.com/assets/v1/assets"; const OPERATION_URL: &str = "https://apis.roblox.com/assets/v1/operations"; const ASSET_DESCRIPTION: &str = "Uploaded by Asphalt"; const MAX_DISPLAY_NAME_LENGTH: usize = 50; pub struct WebApiClient { inner: reqwest::Client, api_key: String, creator: config::Creator, expected_price: Option<u32>, fatally_failed: AtomicBool, /// Shared rate limit state: when we can next make a request rate_limit_reset: Mutex<Option<Instant>>, } impl WebApiClient { pub fn new(api_key: String, creator: config::Creator, expected_price: Option<u32>) -> Self { WebApiClient { inner: reqwest::Client::new(), api_key, creator, expected_price, fatally_failed: AtomicBool::new(false), rate_limit_reset: Mutex::new(None), } } pub async fn upload(&self, asset: &Asset) -> anyhow::Result<u64> { if env::var("ASPHALT_TEST").is_ok() { return Ok(1337); } let file_name = asset.path.file_name().unwrap(); let display_name = trim_display_name(file_name); let req = Request { display_name, asset_type: asset.ty, creation_context: CreationContext { creator: self.creator.clone().into(), expected_price: self.expected_price, }, description: ASSET_DESCRIPTION, }; let len = asset.data.len() as u64; let req_json = serde_json::to_string(&req)?; let mime = req.asset_type.file_type().to_owned(); let name = file_name.to_owned(); let res = self .send_with_retry(|client| { let file_part = multipart::Part::stream_with_length( reqwest::Body::from(asset.data.clone()), len, ) .file_name(name.clone()) .mime_str(&mime) .unwrap(); let form = multipart::Form::new() .text("request", req_json.clone()) .part("fileContent", file_part); client .post(UPLOAD_URL) .header("x-api-key", &self.api_key) .multipart(form) }) .await?; let body = res.text().await?; let operation: Operation = serde_json::from_str(&body)?; let id = self .poll_operation(operation.operation_id, &self.api_key) .await .context("Failed to poll operation")?; Ok(id) } async fn poll_operation(&self, id: String, api_key: &str) -> anyhow::Result<u64> { let mut delay = Duration::from_secs(1); const MAX_POLLS: u32 = 10; for attempt in 0..MAX_POLLS { let res = self .send_with_retry(|client| { client .get(format!("{OPERATION_URL}/{id}")) .header("x-api-key", api_key) }) .await?; let text = res.text().await?; let operation: Operation = serde_json::from_str(&text)?; if operation.done { if let Some(response) = operation.response { return Ok(response.asset_id.parse()?); } else { bail!("Operation completed but no response provided"); } } debug!("Operation not done yet"); if attempt < MAX_POLLS - 1 { tokio::time::sleep(delay).await; delay *= 2; } } bail!("Operation polling exceeded maximum retries") } async fn send_with_retry<F>(&self, make_req: F) -> anyhow::Result<Response> where F: Fn(&reqwest::Client) -> RequestBuilder, { if self.fatally_failed.load(Ordering::SeqCst) { bail!("A previous request failed due to a fatal error"); } const MAX: u8 = 5; let mut attempt = 0; loop { { let reset = self.rate_limit_reset.lock().await; if let Some(reset_at) = *reset { let now = Instant::now(); if reset_at > now { let wait = reset_at - now; drop(reset); debug!("Waiting {:.2}ms for rate limit reset", wait.as_secs_f64()); tokio::time::sleep(wait).await; } } } let res = make_req(&self.inner).send().await?; let status = res.status(); match status { StatusCode::TOO_MANY_REQUESTS if attempt < MAX => { let wait = res .headers() .get(RATELIMIT_RESET_HEADER) .and_then(|h| h.to_str().ok()) .and_then(|s| s.parse::<u64>().ok()) .map(Duration::from_secs) .unwrap_or_else(|| Duration::from_secs(1 << attempt)); let reset_at = Instant::now() + wait; { let mut reset = self.rate_limit_reset.lock().await; *reset = Some(reset_at); } warn!( "Rate limited, retrying in {:.2} seconds", wait.as_secs_f64() ); tokio::time::sleep(wait).await; attempt += 1; continue; } StatusCode::OK => return Ok(res), _ => { let body = res.text().await?; self.fatally_failed.store(true, Ordering::SeqCst); bail!("Request failed with status {status}:\n{body}"); } } } } } #[derive(Serialize)] #[serde(rename_all = "camelCase")] struct Request { asset_type: AssetType, display_name: String, description: &'static str, creation_context: CreationContext, } #[derive(Serialize)] #[serde(rename_all = "camelCase")] struct CreationContext { creator: Creator, expected_price: Option<u32>, } #[derive(Serialize)] #[serde(untagged)] enum Creator { User(UserCreator), Group(GroupCreator), } impl From<config::Creator> for Creator { fn from(value: config::Creator) -> Self { match value.ty { config::CreatorType::User => Creator::User(UserCreator { user_id: value.id.to_string(), }), config::CreatorType::Group => Creator::Group(GroupCreator { group_id: value.id.to_string(), }), } } } #[derive(Serialize)] #[serde(rename_all = "camelCase")] struct UserCreator { user_id: String, } #[derive(Serialize)] #[serde(rename_all = "camelCase")] struct GroupCreator { group_id: String, } #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct Operation { done: bool, operation_id: String, response: Option<OperationResponse>, } #[derive(Deserialize)] #[serde(rename_all = "camelCase")] struct OperationResponse { asset_id: String, } fn trim_display_name(name: &str) -> String { let full_path = name.to_string(); if full_path.len() > MAX_DISPLAY_NAME_LENGTH { let start_index = full_path.len().saturating_sub(MAX_DISPLAY_NAME_LENGTH); full_path[start_index..].to_string() } else { full_path } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/main.rs
src/main.rs
use clap::Parser; use cli::{Cli, Commands}; use dotenvy::dotenv; use fs_err::tokio as fs; use indicatif::MultiProgress; use log::LevelFilter; use migrate_lockfile::migrate_lockfile; use schemars::schema_for; use sync::sync; use upload::upload; use crate::config::Config; mod asset; mod cli; mod config; mod glob; mod lockfile; mod migrate_lockfile; mod sync; mod upload; mod util; mod web_api; #[tokio::main] async fn main() -> anyhow::Result<()> { let _ = dotenv(); let args = Cli::parse(); let mut binding = env_logger::Builder::new(); let logger = binding .filter_level(LevelFilter::Info) .filter_module("asphalt", args.verbose.log_level_filter()) .format_timestamp(None) .format_module_path(false) .build(); let level = logger.filter(); let multi_progress = MultiProgress::new(); indicatif_log_bridge::LogWrapper::new(multi_progress.clone(), logger).try_init()?; log::set_max_level(level); match args.command { Commands::Sync(args) => sync(args, multi_progress).await, Commands::Upload(args) => upload(args).await, Commands::MigrateLockfile(args) => migrate_lockfile(args).await, Commands::GenerateConfigSchema => generate_config_schema().await, } } async fn generate_config_schema() -> anyhow::Result<()> { let schema = schema_for!(Config); fs::write( "schema.json", serde_json::to_string_pretty(&schema).unwrap(), ) .await?; Ok(()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/util/alpha_bleed.rs
src/util/alpha_bleed.rs
//! Changes pixels in an image that are totally transparent to the color of //! their nearest non-transparent neighbor. This fixes artifacting when images //! are resized in some contexts. use std::collections::VecDeque; use bit_vec::BitVec; use image::{DynamicImage, GenericImage, GenericImageView, Rgba}; pub fn alpha_bleed(img: &mut DynamicImage) { let (w, h) = img.dimensions(); // Tells whether a given position has been touched by the bleeding algorithm // yet and is safe to sample colors from. In the first pass, we'll set all // pixels that aren't totally transparent since this algorithm won't mutate // them. let mut can_be_sampled = Mask2::new(w, h); // The set of images that we've already visited and don't need to queue if // traversed again. let mut visited = Mask2::new(w, h); // A queue of pixels to blend with surrounding pixels with next. // // Populated initially with all pixels that border opaque pixels. We'll use // it to blend outwards from each opaque pixel breadth-first. let mut to_visit = VecDeque::new(); // An iterator of in-bounds positions adjacent to the given one. let adjacent_positions = |x, y| { DIRECTIONS.iter().filter_map(move |(x_offset, y_offset)| { let x_source = (x as i32) + x_offset; let y_source = (y as i32) + y_offset; if x_source < 0 || y_source < 0 || x_source >= w as i32 || y_source >= h as i32 { return None; } Some((x_source as u32, y_source as u32)) }) }; // Populate the set of initial positions to visit as well as positions that // are valid to sample from. for y in 0..h { for x in 0..w { let pixel = img.get_pixel(x, y); if pixel[3] != 0 { // This pixel is not totally transparent, so we don't need to // modify it. We'll add it to the `can_be_sampled` set to // indicate it's okay to sample from this pixel. can_be_sampled.set(x, y); visited.set(x, y); continue; } // Check if any adjacent pixels have non-zero alpha. let borders_opaque = adjacent_positions(x, y).any(|(x_source, y_source)| { let source = img.get_pixel(x_source, y_source); source[3] != 0 }); if borders_opaque { // This pixel is totally transparent, but borders at least one // opaque pixel. We'll add it to the initial set of positions to // visit. visited.set(x, y); to_visit.push_back((x, y)); } } } loop { let queue_length = to_visit.len(); if queue_length == 0 { break; } let mut mutated_coords: Vec<(u32, u32)> = vec![(0, 0); queue_length]; for _ in 0..queue_length { if let Some((x, y)) = to_visit.pop_front() { // Compute the average color from all surrounding pixels that are // eligible to be sampled from. let mut new_color = (0, 0, 0); let mut contributing = 0; for (x_source, y_source) in adjacent_positions(x, y) { if can_be_sampled.get(x_source, y_source) { let source = img.get_pixel(x_source, y_source); contributing += 1; new_color.0 += source[0] as u16; new_color.1 += source[1] as u16; new_color.2 += source[2] as u16; } else if !visited.get(x_source, y_source) { visited.set(x_source, y_source); to_visit.push_back((x_source, y_source)); } } let denominator = u16::max(1, contributing); let pixel = Rgba([ (new_color.0 / denominator) as u8, (new_color.1 / denominator) as u8, (new_color.2 / denominator) as u8, 0, ]); img.put_pixel(x, y, pixel); mutated_coords.push((x, y)); } } for _ in 0..queue_length { if let Some((x, y)) = mutated_coords.pop() { // Now that we've bled this pixel, it's eligible to be sampled from for // future iterations. can_be_sampled.set(x, y); } } } } const DIRECTIONS: &[(i32, i32)] = &[ (1, 0), (1, 1), (0, 1), (-1, 1), (-1, 0), (-1, -1), (0, -1), (1, -1), ]; struct Mask2 { size: (u32, u32), data: BitVec, } impl Mask2 { fn new(w: u32, h: u32) -> Self { Self { size: (w, h), data: BitVec::from_elem((w * h) as usize, false), } } fn get(&self, x: u32, y: u32) -> bool { let index = x + y * self.size.0; self.data.get(index as usize).unwrap_or(false) } fn set(&mut self, x: u32, y: u32) { let index = x + y * self.size.0; self.data.set(index as usize, true); } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/util/svg.rs
src/util/svg.rs
use resvg::{ tiny_skia::Pixmap, usvg::{Options, Transform, Tree, fontdb::Database}, }; use std::sync::Arc; pub fn svg_to_png(data: &[u8], fontdb: Arc<Database>) -> anyhow::Result<Vec<u8>> { let opt = Options { fontdb, ..Default::default() }; let rtree = Tree::from_data(data, &opt)?; let pixmap_size = rtree.size(); let mut pixmap = Pixmap::new(pixmap_size.width() as u32, pixmap_size.height() as u32).unwrap(); resvg::render(&rtree, Transform::identity(), &mut pixmap.as_mut()); let encoded = pixmap.encode_png()?; Ok(encoded) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/util/mod.rs
src/util/mod.rs
pub mod alpha_bleed; pub mod svg;
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/collect.rs
src/sync/collect.rs
use indicatif::{MultiProgress, ProgressBar, ProgressStyle}; use std::{ collections::{HashMap, HashSet}, path::PathBuf, }; use tokio::sync::mpsc::UnboundedReceiver; use crate::{ asset::AssetRef, cli::SyncTarget, config::InputMap, lockfile::{Lockfile, LockfileEntry}, sync::codegen::NodeSource, }; pub struct CollectResults { pub new_lockfile: Lockfile, pub input_sources: HashMap<String, NodeSource>, pub new_count: u64, pub any_failed: bool, } pub async fn collect_events( mut rx: UnboundedReceiver<super::Event>, target: SyncTarget, inputs: InputMap, mp: MultiProgress, ) -> anyhow::Result<CollectResults> { let mut new_lockfile = Lockfile::default(); let mut input_sources: HashMap<String, NodeSource> = HashMap::new(); for (input_name, input) in inputs { for (rel_path, web_asset) in &input.web { input_sources .entry(input_name.clone()) .or_default() .insert(rel_path.clone(), web_asset.clone().into()); } } let mut progress = Progress::new(mp, target); let mut seen_paths = HashSet::new(); while let Some(event) = rx.recv().await { match event { super::Event::Discovered(path) => { if !seen_paths.contains(&path) { progress.discovered += 1; } } super::Event::InFlight(path) => { if !seen_paths.contains(&path) { progress.in_flight.insert(path.clone()); } } super::Event::Finished { state, input_name, path, rel_path, hash, asset_ref, } => { seen_paths.insert(path.clone()); if let Some(asset_ref) = asset_ref { input_sources .entry(input_name.clone()) .or_default() .insert(rel_path.clone(), asset_ref.clone()); if let AssetRef::Cloud(id) = asset_ref { new_lockfile.insert(&input_name, &hash, LockfileEntry { asset_id: id }); } } match state { super::EventState::Synced { new } => { progress.synced += 1; if new { progress.new += 1; if target.write_on_sync() { new_lockfile.write(None).await?; } } } super::EventState::Duplicate => { progress.dupes += 1; } } progress.in_flight.remove(&path); } super::Event::Failed(path) => { progress.failed += 1; progress.in_flight.remove(&path); } } progress.update(); } progress.finish(); Ok(CollectResults { new_lockfile, input_sources, new_count: progress.new, any_failed: progress.failed > 0, }) } struct Progress { inner: ProgressBar, target: SyncTarget, in_flight: HashSet<PathBuf>, discovered: u64, synced: u64, new: u64, dupes: u64, failed: u64, } impl Progress { fn get_style(finished: bool) -> ProgressStyle { ProgressStyle::default_bar() .template(&format!( "{{prefix:.{prefix_color}.bold}}{bar} {{pos}}/{{len}} assets: ({{msg}})", prefix_color = if finished { "green" } else { "cyan" }, bar = if finished { "" } else { " [{bar:40}]" }, )) .unwrap() .progress_chars("=> ") } fn new(mp: MultiProgress, target: SyncTarget) -> Self { let spinner = mp.add(ProgressBar::new_spinner()); spinner.set_style(Progress::get_style(false)); spinner.set_prefix("Syncing"); spinner.enable_steady_tick(std::time::Duration::from_millis(100)); Self { inner: spinner, target, in_flight: HashSet::new(), discovered: 0, synced: 0, new: 0, dupes: 0, failed: 0, } } fn get_msg(&self) -> String { let mut parts = Vec::new(); if self.new > 0 { let target_msg = match self.target { SyncTarget::Cloud { dry_run: true } => "checked", SyncTarget::Cloud { dry_run: false } => "uploaded", SyncTarget::Studio | SyncTarget::Debug => "written", }; parts.push(format!("{} {}", self.new, target_msg)); } let noop = self.synced - self.new; if noop > 0 { parts.push(format!("{} no-op", noop)); } if self.dupes > 0 { parts.push(format!("{} duplicates", self.dupes)); } let in_flight = self.in_flight.len(); if in_flight > 0 { parts.push(format!("{} processing", in_flight)); } let failed = self.failed; if failed > 0 { parts.push(format!("{} failed", failed)); } parts.join(", ") } fn update(&self) { self.inner.set_position(self.synced + self.dupes); self.inner.set_length(self.discovered); self.inner.set_message(self.get_msg()); } fn finish(&self) { self.inner.set_prefix("Synced"); self.inner.set_style(Progress::get_style(true)); self.inner.finish(); } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/walk.rs
src/sync/walk.rs
use crate::{ asset::{self, Asset}, cli::SyncTarget, config::Config, lockfile::Lockfile, sync::TargetBackend, }; use anyhow::Context; use fs_err::tokio as fs; use log::{debug, warn}; use relative_path::PathExt; use resvg::usvg::fontdb; use std::{ collections::{ HashMap, hash_map::{self}, }, path::{Path, PathBuf}, sync::Arc, }; use tokio::{ sync::{Mutex, Semaphore, mpsc::UnboundedSender}, task::JoinSet, }; use walkdir::WalkDir; pub struct Params { pub target: SyncTarget, pub existing_lockfile: Lockfile, pub font_db: Arc<fontdb::Database>, pub backend: Option<TargetBackend>, } struct InputState { params: Arc<Params>, input_name: String, input_prefix: PathBuf, seen_hashes: Arc<Mutex<HashMap<String, PathBuf>>>, bleed: bool, } pub async fn walk(params: Params, config: &Config, tx: &UnboundedSender<super::Event>) { let params = Arc::new(params); for (input_name, input) in &config.inputs { let state = Arc::new(InputState { params: params.clone(), input_name: input_name.clone(), input_prefix: input.include.get_prefix(), seen_hashes: Arc::new(Mutex::new(HashMap::new())), bleed: input.bleed, }); let mut join_set = JoinSet::new(); let semaphore = Arc::new(Semaphore::new(50)); for entry in WalkDir::new(input.include.get_prefix()) .into_iter() .filter_entry(|entry| { let path = entry.path(); path == input.include.get_prefix() || input.include.is_match(path) }) { let Ok(entry) = entry else { continue }; let path = entry.into_path(); if !path.is_file() { continue; } let Some(ext) = path.extension() else { continue; }; if !asset::is_supported_extension(ext) { continue; } let state = state.clone(); let semaphore = semaphore.clone(); let tx = tx.clone(); tx.send(super::Event::Discovered(path.clone())).unwrap(); join_set.spawn(async move { let _permit = semaphore.acquire_owned().await.unwrap(); tx.send(super::Event::InFlight(path.clone())).unwrap(); if let Err(e) = process_entry(state.clone(), &path, &tx).await { warn!("Failed to process file {}: {e:?}", path.display()); tx.send(super::Event::Failed(path.clone())).unwrap(); } }); } while join_set.join_next().await.is_some() {} } } async fn process_entry( state: Arc<InputState>, path: &Path, tx: &UnboundedSender<super::Event>, ) -> anyhow::Result<()> { debug!("Handling entry: {}", path.display()); let rel_path = path.relative_to(&state.input_prefix)?; let data = fs::read(path).await?; let asset = Asset::new( rel_path.clone(), data, state.params.font_db.clone(), state.bleed, ) .await .context("Failed to create asset")?; let lockfile_entry = state .params .existing_lockfile .get(&state.input_name, &asset.hash); { let mut seen_hashes = state.seen_hashes.lock().await; match seen_hashes.entry(asset.hash.clone()) { hash_map::Entry::Occupied(entry) => { let seen_path = entry.get(); let rel_seen_path = seen_path.relative_to(&state.input_prefix)?; debug!("Duplicate asset found: {} -> {}", rel_path, rel_seen_path); let event = super::Event::Finished { state: super::EventState::Duplicate, input_name: state.input_name.clone(), path: path.into(), rel_path: rel_path.clone(), asset_ref: lockfile_entry.map(Into::into), hash: asset.hash.clone(), }; tx.send(event).unwrap(); return Ok(()); } hash_map::Entry::Vacant(_) => { seen_hashes.insert(asset.hash.clone(), path.into()); } } } let always_target = matches!(state.params.target, SyncTarget::Studio | SyncTarget::Debug); let is_new = always_target || lockfile_entry.is_none(); let asset_ref = match state.params.backend { Some(ref backend) => backend.sync(&asset, lockfile_entry).await?, None => lockfile_entry.map(Into::into), }; let event = super::Event::Finished { state: super::EventState::Synced { new: is_new }, input_name: state.input_name.clone(), path: path.into(), rel_path: asset.path.clone(), hash: asset.hash.clone(), asset_ref, }; tx.send(event).unwrap(); Ok(()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/mod.rs
src/sync/mod.rs
use crate::{ asset::{Asset, AssetRef}, cli::{SyncArgs, SyncTarget}, config::Config, lockfile::{LockfileEntry, RawLockfile}, sync::{backend::Backend, collect::collect_events}, }; use anyhow::{Context, bail}; use fs_err::tokio as fs; use indicatif::MultiProgress; use log::info; use relative_path::RelativePathBuf; use resvg::usvg::fontdb; use std::{path::PathBuf, sync::Arc}; use tokio::sync::mpsc::{self}; mod backend; mod codegen; mod collect; mod walk; enum TargetBackend { Cloud(backend::Cloud), Debug(backend::Debug), Studio(backend::Studio), } impl TargetBackend { pub async fn sync( &self, asset: &Asset, lockfile_entry: Option<&LockfileEntry>, ) -> anyhow::Result<Option<AssetRef>> { match self { Self::Cloud(cloud_backend) => cloud_backend.sync(asset, lockfile_entry).await, Self::Debug(debug_backend) => debug_backend.sync(asset, lockfile_entry).await, Self::Studio(studio_backend) => studio_backend.sync(asset, lockfile_entry).await, } } } #[derive(Debug)] enum Event { Discovered(PathBuf), InFlight(PathBuf), Finished { state: EventState, input_name: String, path: PathBuf, rel_path: RelativePathBuf, hash: String, asset_ref: Option<AssetRef>, }, Failed(PathBuf), } #[derive(Debug)] enum EventState { Synced { new: bool }, Duplicate, } pub async fn sync(args: SyncArgs, mp: MultiProgress) -> anyhow::Result<()> { let config = Config::read().await?; let target = args.target(); let existing_lockfile = RawLockfile::read().await?.into_lockfile()?; let font_db = Arc::new({ let mut db = fontdb::Database::new(); db.load_system_fonts(); db }); let (event_tx, event_rx) = mpsc::unbounded_channel::<Event>(); let collector_handle = tokio::spawn({ let inputs = config.inputs.clone(); async move { collect_events(event_rx, target, inputs, mp).await } }); let params = walk::Params { target, existing_lockfile, font_db, backend: { let params = backend::Params { api_key: args.api_key, creator: config.creator.clone(), expected_price: args.expected_price, }; match &target { SyncTarget::Cloud { dry_run: false } => { Some(TargetBackend::Cloud(backend::Cloud::new(params).await?)) } SyncTarget::Cloud { dry_run: true } => None, SyncTarget::Debug => Some(TargetBackend::Debug(backend::Debug::new(params).await?)), SyncTarget::Studio => { Some(TargetBackend::Studio(backend::Studio::new(params).await?)) } } }, }; walk::walk(params, &config, &event_tx).await; drop(event_tx); let results = collector_handle.await??; if matches!(target, SyncTarget::Cloud { dry_run: true }) { if results.new_count > 0 { bail!("Dry run: {} new assets would be synced", results.new_count) } else { info!("Dry run: No new assets would be synced"); return Ok(()); } } if target.write_on_sync() { results.new_lockfile.write(None).await?; } for (input_name, source) in results.input_sources { let input = config .inputs .get(&input_name) .context("Failed to find input for codegen input")?; let mut langs_to_generate = vec![codegen::Language::Luau]; if config.codegen.typescript { langs_to_generate.push(codegen::Language::TypeScript); } for lang in langs_to_generate { let node = codegen::create_node(&source, &config.codegen); let ext = match lang { codegen::Language::Luau => "luau", codegen::Language::TypeScript => "d.ts", }; let code = codegen::generate_code(lang, &input_name, &node)?; fs::create_dir_all(&input.output_path).await?; fs::write(input.output_path.join(format!("{input_name}.{ext}")), code).await?; } } if results.any_failed { bail!("Some assets failed to sync") } Ok(()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/codegen.rs
src/sync/codegen.rs
use crate::{asset::AssetRef, config}; use anyhow::bail; use relative_path::{RelativePath, RelativePathBuf}; use std::{collections::BTreeMap, path::Path}; pub enum Node { Table(BTreeMap<String, Node>), String(String), Content(String), #[allow(dead_code)] Number(u64), } pub enum Language { TypeScript, Luau, } pub type NodeSource = BTreeMap<RelativePathBuf, AssetRef>; pub fn create_node(source: &NodeSource, config: &config::Codegen) -> Node { let mut root = Node::Table(BTreeMap::new()); for (path, value) in source { let value = if config.content { Node::Content(value.to_string()) } else { Node::String(value.to_string()) }; match config.style { config::CodegenStyle::Nested => { let components = normalize_path_components(path, config.strip_extensions); insert_nested(&mut root, &components, value); } config::CodegenStyle::Flat => { let key = normalize_path_string(path, config.strip_extensions); insert_flat(&mut root, &key, value); } } } root } fn normalize_path_components(path: &RelativePath, strip_extensions: bool) -> Vec<String> { let mut components: Vec<String> = Vec::new(); let total_components = path.iter().count(); for (i, comp) in path.iter().enumerate() { if i == total_components - 1 && strip_extensions { let as_path = Path::new(comp); if let Some(stem) = as_path.file_stem() { components.push(stem.to_string_lossy().to_string()); continue; } } components.push(comp.to_string()); } components } fn normalize_path_string(path: &RelativePath, strip_extensions: bool) -> String { if strip_extensions && let (Some(file_name), Some(parent)) = (path.file_name(), path.parent()) && let Some(stem) = Path::new(file_name).file_stem() { let parent_str = parent.to_string(); return if parent_str.is_empty() || parent_str == "." { stem.to_string_lossy().to_string() } else { format!("{}/{}", parent_str, stem.to_string_lossy()) }; } path.to_string() } fn insert_flat(node: &mut Node, key: &str, value: Node) { match node { Node::Table(map) => { map.insert(key.into(), value); } _ => { *node = Node::Table(BTreeMap::new()); if let Node::Table(map) = node { map.insert(key.into(), value); } } } } fn insert_nested(node: &mut Node, components: &[String], value: Node) { if !matches!(node, Node::Table(_)) { *node = Node::Table(BTreeMap::new()); } if components.is_empty() { return; } if let Node::Table(map) = node { let component = &components[0]; if components.len() == 1 { map.insert(component.clone(), value); } else { let next_node = map .entry(component.clone()) .or_insert_with(|| Node::Table(BTreeMap::new())); if !matches!(next_node, Node::Table(_)) { *next_node = Node::Table(BTreeMap::new()); } insert_nested(next_node, &components[1..], value); } } } pub fn generate_code(lang: Language, name: &str, node: &Node) -> anyhow::Result<String> { if !matches!(node, Node::Table(_)) { bail!("Root node must be a Table"); } Ok(match lang { Language::TypeScript => generate_typescript(name, node), Language::Luau => generate_luau(name, node), }) } fn generate_typescript(name: &str, node: &Node) -> String { let body = generate_ts_node(node, 0); format!( "// This file is automatically @generated by Asphalt.\n// It is not intended for manual editing.\ndeclare const {name}: {body}\n\nexport = {name}" ) } fn generate_ts_node(node: &Node, indent: usize) -> String { match node { Node::Table(map) => { let mut result = String::from("{\n"); for (k, v) in map { result.push_str(&"\t".repeat(indent + 1)); let k = if is_valid_identifier(k) { k.clone() } else { format!("\"{k}\"") }; result.push_str(&k); result.push_str(": "); result.push_str(&generate_ts_node(v, indent + 1)); result.push('\n'); } result.push_str(&"\t".repeat(indent)); result.push('}'); result } Node::String(_) => "string".to_string(), Node::Content(_) => "Content".to_string(), Node::Number(_) => "number".to_string(), } } fn generate_luau(name: &str, node: &Node) -> String { let body = generate_luau_node(node, 0); format!( "-- This file is automatically @generated by Asphalt.\n-- It is not intended for manual editing.\nlocal {name} = {body}\n\nreturn {name}" ) } fn generate_luau_node(node: &Node, indent: usize) -> String { match node { Node::Table(map) => { let mut result = String::from("{\n"); for (k, v) in map { result.push_str(&"\t".repeat(indent + 1)); let k = if is_valid_identifier(k) { k.clone() } else { format!("[\"{k}\"]") }; result.push_str(&k); result.push_str(" = "); result.push_str(&generate_luau_node(v, indent + 1)); result.push_str(",\n"); } result.push_str(&"\t".repeat(indent)); result.push('}'); result } Node::String(s) => format!("\"{s}\""), Node::Content(s) => format!("Content.fromUri(\"{s}\")"), Node::Number(n) => format!("{n}"), } } fn is_valid_ident_char_start(value: char) -> bool { value.is_ascii_alphabetic() || value == '_' } fn is_valid_ident_char(value: char) -> bool { value.is_ascii_alphanumeric() || value == '_' } fn is_valid_identifier(value: &str) -> bool { let mut chars = value.chars(); match chars.next() { Some(first) => { if !is_valid_ident_char_start(first) { return false; } } None => return false, } chars.all(is_valid_ident_char) } #[cfg(test)] mod tests { use super::*; fn make_test_node() -> Node { let mut inner_map = BTreeMap::new(); inner_map.insert("foo".to_string(), Node::String("bar".to_string())); inner_map.insert("baz".to_string(), Node::Number(42)); let inner_node = Node::Table(inner_map); let mut root_inner = BTreeMap::new(); root_inner.insert("qux".to_string(), inner_node); root_inner.insert("fred".to_string(), Node::String("world".to_string())); root_inner.insert("waldo".to_string(), Node::Content("garply".to_string())); Node::Table(root_inner) } #[test] fn test_typescript_codegen() { let root_node = make_test_node(); let code = generate_code(Language::TypeScript, "name", &root_node).unwrap(); insta::assert_snapshot!(code); } #[test] fn test_luau_codegen() { let root_node = make_test_node(); let code = generate_code(Language::Luau, "name", &root_node).unwrap(); insta::assert_snapshot!(code); } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/backend/cloud.rs
src/sync/backend/cloud.rs
use super::Backend; use crate::{ asset::{Asset, AssetRef}, lockfile::LockfileEntry, sync::backend::Params, web_api::WebApiClient, }; use anyhow::{Context, bail}; pub struct Cloud { client: WebApiClient, } impl Backend for Cloud { async fn new(params: Params) -> anyhow::Result<Self> where Self: Sized, { Ok(Self { client: WebApiClient::new( params .api_key .context("An API key is required to use the Cloud backend")?, params.creator, params.expected_price, ), }) } async fn sync( &self, asset: &Asset, lockfile_entry: Option<&LockfileEntry>, ) -> anyhow::Result<Option<AssetRef>> { if let Some(lockfile_entry) = lockfile_entry { return Ok(Some(lockfile_entry.into())); } match self.client.upload(asset).await { Ok(id) => Ok(Some(AssetRef::Cloud(id))), Err(err) => bail!("Failed to upload asset: {err:?}"), } } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/backend/debug.rs
src/sync/backend/debug.rs
use super::{AssetRef, Backend}; use crate::{asset::Asset, lockfile::LockfileEntry, sync::backend::Params}; use anyhow::Context; use fs_err::tokio as fs; use log::info; use std::{env, path::PathBuf}; pub struct Debug { sync_path: PathBuf, } impl Backend for Debug { async fn new(_: Params) -> anyhow::Result<Self> where Self: Sized, { let debug_path = env::current_dir()?.join(".asphalt-debug"); info!("Assets will be synced to: {}", debug_path.display()); if debug_path.exists() { fs::remove_dir_all(&debug_path) .await .context("Failed to remove existing folder")?; } fs::create_dir_all(&debug_path) .await .context("Failed to create debug directory")?; Ok(Self { sync_path: debug_path, }) } async fn sync( &self, asset: &Asset, lockfile_entry: Option<&LockfileEntry>, ) -> anyhow::Result<Option<AssetRef>> { let target_path = asset.path.to_logical_path(&self.sync_path); if let Some(parent) = target_path.parent() { fs::create_dir_all(parent) .await .context("Failed to create parent directories")?; } fs::write(&target_path, &asset.data) .await .with_context(|| format!("Failed to write asset to {}", target_path.display()))?; Ok(lockfile_entry.map(Into::into)) } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/backend/mod.rs
src/sync/backend/mod.rs
use crate::{ asset::{Asset, AssetRef}, config, lockfile::LockfileEntry, }; mod cloud; pub use cloud::Cloud; mod debug; pub use debug::Debug; mod studio; pub use studio::Studio; pub trait Backend { async fn new(params: Params) -> anyhow::Result<Self> where Self: Sized; async fn sync( &self, asset: &Asset, lockfile_entry: Option<&LockfileEntry>, ) -> anyhow::Result<Option<AssetRef>>; } pub struct Params { pub api_key: Option<String>, pub creator: config::Creator, pub expected_price: Option<u32>, }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/src/sync/backend/studio.rs
src/sync/backend/studio.rs
use super::{AssetRef, Backend}; use crate::{ asset::{Asset, AssetType}, lockfile::LockfileEntry, sync::backend::Params, }; use anyhow::{Context, bail}; use fs_err::tokio as fs; use log::{debug, info, warn}; use relative_path::RelativePathBuf; use roblox_install::RobloxStudio; use std::{env, path::PathBuf}; pub struct Studio { identifier: String, sync_path: PathBuf, } impl Backend for Studio { async fn new(_: Params) -> anyhow::Result<Self> where Self: Sized, { let content_path = get_content_path()?; let cwd = env::current_dir()?; let cwd_name = cwd .file_name() .and_then(|s| s.to_str()) .context("Failed to get current directory name")?; let project_name = cwd_name .to_lowercase() .split_whitespace() .collect::<Vec<_>>() .join("-"); let identifier = format!(".asphalt-{project_name}"); let sync_path = content_path.join(&identifier); info!("Assets will be synced to: {}", sync_path.display()); if sync_path.exists() { fs::remove_dir_all(&sync_path).await?; } Ok(Self { identifier, sync_path, }) } async fn sync( &self, asset: &Asset, lockfile_entry: Option<&LockfileEntry>, ) -> anyhow::Result<Option<AssetRef>> { if matches!(asset.ty, AssetType::Model(_) | AssetType::Animation) { return match lockfile_entry { Some(entry) => Ok(Some(AssetRef::Studio(format!( "rbxassetid://{}", entry.asset_id )))), None => { warn!( "Models and Animations cannot be synced to Studio without having been uploaded first" ); Ok(None) } }; } let rel_target_path = RelativePathBuf::from(&asset.hash).with_extension(&asset.ext); let target_path = rel_target_path.to_logical_path(&self.sync_path); if let Some(parent) = target_path.parent() { fs::create_dir_all(parent).await?; } fs::write(&target_path, &asset.data).await?; Ok(Some(AssetRef::Studio(format!( "rbxasset://{}/{}", self.identifier, rel_target_path )))) } } fn get_content_path() -> anyhow::Result<PathBuf> { if let Ok(var) = env::var("ROBLOX_CONTENT_PATH") { let path = PathBuf::from(var); if path.exists() { debug!( "Using environment variable content path: {}", path.display() ); return Ok(path); } else { bail!("Content path `{}` does not exist", path.display()); } } let studio = RobloxStudio::locate()?; let path = studio.content_path(); debug!("Using auto-detected content path: {}", path.display()); Ok(path.to_owned()) }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/tests/sync.rs
tests/sync.rs
use assert_fs::{fixture::ChildPath, prelude::*}; use common::Project; use predicates::{Predicate, prelude::predicate, str::contains}; use std::{fs, path::Path}; use toml::toml; mod common; fn hash(path: &ChildPath) -> String { let mut hasher = blake3::Hasher::new(); hasher.update(&fs::read(path).unwrap()); hasher.finalize().to_string() } fn toml_eq(expected: toml::Value) -> impl Predicate<Path> { predicate::function(move |path: &Path| { let contents = fs::read_to_string(path).unwrap(); let actual: toml::Value = toml::from_str(&contents).unwrap(); actual == expected }) } #[test] fn missing_config_fails() { Project::new() .run() .args(["sync", "--target", "debug"]) .assert() .failure(); } #[test] fn debug_creates_output() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 1234 [inputs.assets] path = "input/**/*" output_path = "output" bleed = false }); let test_file = project.add_file("test1.png"); project.run().args(["sync", "debug"]).assert().success(); project .dir .child(".asphalt-debug/test1.png") .assert(predicate::path::eq_file(test_file.path())); } #[test] fn debug_web_assets() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" [inputs.assets.web] "existing.png" = { id = 1234 } }); project.run().args(["sync", "debug"]).assert().success(); project .dir .child("output/assets.luau") .assert(contains("existing.png")) .assert(contains("1234")); } #[test] fn cloud_output_and_lockfile() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" }); let test_file = project.add_file("test1.png"); project .run() .args(["sync", "--api-key", "test"]) .assert() .success(); project.dir.child("asphalt.lock.toml").assert(toml_eq({ let mut table = toml::Table::new(); table.insert("version".into(), 2.into()); table.insert("inputs".into(), { let mut inputs = toml::Table::new(); inputs.insert("assets".into(), { let mut assets = toml::Table::new(); assets.insert(hash(&test_file), { let mut entry = toml::Table::new(); entry.insert("asset_id".into(), 1337.into()); entry.into() }); assets.into() }); inputs.into() }); table.into() })); } #[test] fn dry_run_none() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" }); project .run() .args(["sync", "cloud", "--dry-run"]) .assert() .success() .stderr(contains("No new assets")); } #[test] fn dry_run_1_new() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" }); project.add_file("test1.png"); project .run() .args(["sync", "cloud", "--dry-run"]) .assert() .failure() .stderr(contains("1 new assets")); } #[test] fn dry_run_1_new_1_old() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" }); let old_file = project.add_file("test1.png"); project.add_file("test2.jpg"); project.write_lockfile({ let mut table = toml::Table::new(); table.insert("version".into(), 2.into()); table.insert("inputs".into(), { let mut inputs = toml::Table::new(); inputs.insert("assets".into(), { let mut assets = toml::Table::new(); assets.insert(hash(&old_file), { let mut entry = toml::Table::new(); entry.insert("asset_id".into(), toml::Value::Integer(1)); entry.into() }); assets.into() }); inputs.into() }); table }); project .run() .args(["sync", "cloud", "--dry-run"]) .assert() .failure() .stderr(contains("1 new assets")); } #[test] fn dry_run_2_old() { let project = Project::new(); project.write_config(toml! { [creator] type = "user" id = 12345 [inputs.assets] path = "input/**/*" output_path = "output" }); let old_file_1 = project.add_file("test1.png"); let old_file_2 = project.add_file("test2.jpg"); project.write_lockfile({ let mut table = toml::Table::new(); table.insert("version".into(), 2.into()); table.insert("inputs".into(), { let mut inputs = toml::Table::new(); inputs.insert("assets".into(), { let mut assets = toml::Table::new(); assets.insert(hash(&old_file_1), { let mut entry = toml::Table::new(); entry.insert("asset_id".into(), toml::Value::Integer(1)); entry.into() }); assets.insert(hash(&old_file_2), { let mut entry = toml::Table::new(); entry.insert("asset_id".into(), toml::Value::Integer(1)); entry.into() }); assets.into() }); inputs.into() }); table }); project .run() .args(["sync", "cloud", "--dry-run"]) .assert() .success() .stderr(contains("No new assets")); }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
jackTabsCode/asphalt
https://github.com/jackTabsCode/asphalt/blob/f44f6d2be32d429b0eb05c112f69dfdbc4c9f719/tests/common/mod.rs
tests/common/mod.rs
use assert_cmd::cargo::cargo_bin_cmd; use assert_fs::{TempDir, fixture::ChildPath, prelude::*}; use std::{fs, path::Path}; pub struct Project { pub dir: TempDir, } impl Project { pub fn new() -> Self { Self { dir: TempDir::new().unwrap(), } } pub fn write_config(&self, contents: toml::Table) { self.dir .child("asphalt.toml") .write_str(&contents.to_string()) .unwrap(); } pub fn write_lockfile(&self, contents: toml::Table) { self.dir .child("asphalt.lock.toml") .write_str(&contents.to_string()) .unwrap(); } fn read_test_asset(&self, file_name: &str) -> Vec<u8> { let path = Path::new("tests").join("assets").join(file_name); fs::read(&path).unwrap() } pub fn add_file(&self, file_name: &str) -> ChildPath { let file = self.dir.child("input").child(file_name); file.write_binary(&self.read_test_asset(file_name)).unwrap(); file } pub fn run(&self) -> assert_cmd::Command { let mut cmd = cargo_bin_cmd!(); cmd.env("ASPHALT_TEST", "true"); cmd.env("ASPHALT_API_KEY", "test"); cmd.current_dir(self.dir.path()); cmd } }
rust
MIT
f44f6d2be32d429b0eb05c112f69dfdbc4c9f719
2026-01-04T20:24:56.172033Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/draw_table.rs
src/draw_table.rs
use crate::{board::BoardState, zobrist::ZobristKey}; use std::collections::HashMap; #[derive(Clone)] pub struct DrawTable { pub table: HashMap<ZobristKey, u8>, } impl DrawTable { pub fn new() -> DrawTable { DrawTable { table: HashMap::new(), } } pub fn clear(&mut self) { self.table.clear(); } pub fn remove_board_from_draw_table(&mut self, board: &BoardState) { if let Some(&val) = self.table.get(&board.zobrist_key) { self.table.insert(board.zobrist_key, val - 1); } } pub fn add_board_to_draw_table(&mut self, board: &BoardState) { let board_count = *self.table.get(&board.zobrist_key).unwrap_or(&0); self.table.insert(board.zobrist_key, board_count + 1); } /* Given the next move as a board determine if making that move would result in a three fold repetition */ pub fn is_threefold_repetition(&mut self, board: &BoardState) -> bool { let board_count = *self.table.get(&board.zobrist_key).unwrap_or(&0); if board_count == 2 { // this position has been seen twice before, so making the move again would be a draw return true; } false } } #[cfg(test)] mod tests { use super::*; use crate::board::DEFAULT_FEN_STRING; #[test] fn remove_board_from_draw_table_test() { let board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); let mut draw_table: DrawTable = DrawTable::new(); draw_table.table.insert(board.zobrist_key, 2); draw_table.remove_board_from_draw_table(&board); assert_eq!(*draw_table.table.get(&board.zobrist_key).unwrap(), 1); } #[test] fn draw_detected_three_fold_rep() { let board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); let mut draw_table: DrawTable = DrawTable::new(); draw_table.table.insert(board.zobrist_key, 2); assert!(draw_table.is_threefold_repetition(&board)); } #[test] fn draw_not_detected() { let board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); let mut draw_table: DrawTable = DrawTable::new(); draw_table.table.insert(board.zobrist_key, 1); assert!(!draw_table.is_threefold_repetition(&board)); } #[test] fn add_board_to_draw_table_test() { let board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); let mut draw_table: DrawTable = DrawTable::new(); draw_table.table.insert(board.zobrist_key, 2); draw_table.add_board_to_draw_table(&board); assert_eq!(*draw_table.table.get(&board.zobrist_key).unwrap(), 3); } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/engine.rs
src/engine.rs
pub use crate::board::*; pub use crate::board::{PieceColor::*, PieceKind::*}; use crate::draw_table::DrawTable; pub use crate::evaluation::*; pub use crate::move_generation::*; pub use crate::search::{Search, KILLER_MOVE_PLY_SIZE, MAX_DEPTH}; pub use crate::uci::send_to_gui; pub use crate::utils::out_of_time; use crate::zobrist::ZobristHasher; use std::cmp::{max, min, Reverse}; use std::sync::mpsc; use std::thread; use std::time::{Duration, Instant}; const MATE_SCORE: i32 = 100000; const POS_INF: i32 = 9999999; const NEG_INF: i32 = -POS_INF; /* We want killer moves to be ordered behind all "good" captures, but still ahead of other moves For our purposes a good capture is capturing any with a piece of lower value Ex: capturing a pawn with a queen is a "bad" capture capturing a queen with a pawn is a "good" capture For this reason we give killer moves a 25, or ranked slightly between both types of captures */ const KILLER_MOVE_SCORE: i32 = 25; type BoardSender = std::sync::mpsc::Sender<BoardState>; /* Capture extension, only search captures from here on to find a "quite" position */ fn quiesce( board: &BoardState, mut alpha: i32, beta: i32, search_info: &mut Search, zobrist_hasher: &ZobristHasher, ) -> i32 { search_info.node_searched(); let stand_pat = get_evaluation(board); if stand_pat >= beta { return beta; } if alpha < stand_pat { alpha = stand_pat; } let mut moves = generate_moves(board, MoveGenerationMode::CapturesOnly, zobrist_hasher); moves.sort_unstable_by_key(|k| Reverse(k.order_heuristic)); for mov in moves { let score = -quiesce(&mov, -beta, -alpha, search_info, zobrist_hasher); if score >= beta { return beta; } if score > alpha { alpha = score; } } alpha } /* Run a standard alpha beta search to try and find the best move Orders moves by piece value to attempt to improve search efficiency */ #[allow(clippy::too_many_arguments)] fn alpha_beta_search( start: Instant, time_to_move_ms: u128, board: &BoardState, mut depth: u8, ply_from_root: i32, mut alpha: i32, mut beta: i32, search_info: &mut Search, allow_null: bool, zobrist_hasher: &ZobristHasher, draw_table: &mut DrawTable, ) -> i32 { // we are out of time, exit the search if out_of_time(start, time_to_move_ms) { return NEG_INF; } search_info.node_searched(); // check for draw if draw_table.is_threefold_repetition(board) { return 0; } draw_table.add_board_to_draw_table(board); if depth == 0 { // need to resolve check before we enter quiesce if is_check(board, board.to_move) { depth += 1; } else { draw_table.remove_board_from_draw_table(board); return quiesce(board, alpha, beta, search_info, zobrist_hasher); } } // Skip this position if a mating sequence has already been found earlier in // the search, which would be shorter than any mate we could find from here. alpha = max(alpha, -MATE_SCORE + ply_from_root); beta = min(beta, MATE_SCORE - ply_from_root); if alpha >= beta { draw_table.remove_board_from_draw_table(board); return alpha; } // Null move pruning https://www.chessprogramming.org/Null_Move_Pruning // With R = 2 if allow_null && depth >= 3 && !is_check(board, board.to_move) { // allow this player to go again let mut b = board.clone(); b.to_move = board.to_move.opposite(); let eval = -alpha_beta_search( start, time_to_move_ms, &b, depth - 3, ply_from_root + 10, //hack for now but passing in a large ply ensures we don't overwrite the pv -beta, -beta + 1, search_info, false, zobrist_hasher, draw_table, ); if eval >= beta { // null move prune draw_table.remove_board_from_draw_table(board); return beta; } } let mut moves = generate_moves(board, MoveGenerationMode::AllMoves, zobrist_hasher); if moves.is_empty() { if is_check(board, board.to_move) { // checkmate draw_table.remove_board_from_draw_table(board); let mate_score = MATE_SCORE - ply_from_root; return -mate_score; } // stalemate draw_table.remove_board_from_draw_table(board); return 0; } // rank killer moves and pv moves for mov in &mut moves { if mov.last_move == search_info.pv_moves[ply_from_root as usize] { // consider principle variation moves before anything else mov.order_heuristic = POS_INF; } else { for i in 0..KILLER_MOVE_PLY_SIZE { if mov.last_move == search_info.killer_moves[ply_from_root as usize][i] { // consider killer moves after considering "good" captures mov.order_heuristic = KILLER_MOVE_SCORE; break; } } } } moves.sort_unstable_by_key(|k| Reverse(k.order_heuristic)); search_info.insert_into_cur_line(ply_from_root, &moves[0]); if moves[0].order_heuristic != POS_INF { search_info.set_principle_variation(); } // do a full search with what we think is the best move // which should be the first move in the array let mut best_score = -alpha_beta_search( start, time_to_move_ms, &moves[0], depth - 1, ply_from_root + 1, -beta, -alpha, search_info, true, zobrist_hasher, draw_table, ); if best_score > alpha { if best_score >= beta { draw_table.remove_board_from_draw_table(board); return best_score; } search_info.set_principle_variation(); alpha = best_score; } // https://en.wikipedia.org/wiki/Principal_variation_search // try out all remaining moves with a reduced window for mov in moves.iter().skip(1) { search_info.insert_into_cur_line(ply_from_root, mov); // zero window search let mut score = -alpha_beta_search( start, time_to_move_ms, mov, depth - 1, ply_from_root + 1, -alpha - 1, -alpha, search_info, true, zobrist_hasher, draw_table, ); if score > alpha && score < beta { // got a result outside our window, need to redo full search score = -alpha_beta_search( start, time_to_move_ms, mov, depth - 1, ply_from_root + 1, -beta, -alpha, search_info, true, zobrist_hasher, draw_table, ); if score > alpha { alpha = score; } } if score > best_score { if score >= beta { // avoid inserting PV nodes or captures into the killer moves table if mov.order_heuristic == 0 { search_info.insert_killer_move(ply_from_root, mov); } draw_table.remove_board_from_draw_table(board); return score; } search_info.set_principle_variation(); best_score = score; } } draw_table.remove_board_from_draw_table(board); best_score } /* Interface to the alpha_beta function, works very similarly but returns a board state at the end and also operates with a channel to send the best board state found so far */ pub fn get_best_move( board: &BoardState, draw_table: &mut DrawTable, start: Instant, time_to_move_ms: u128, tx: &BoardSender, ) { let mut cur_depth = 1; let ply_from_root = 0; let mut best_move: Option<BoardState> = None; let mut search_info = Search::new_search(); let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut moves = generate_moves(board, MoveGenerationMode::AllMoves, &zobrist_hasher); while cur_depth < MAX_DEPTH { let mut alpha = NEG_INF; let beta = POS_INF; search_info.reset_search(); moves.sort_unstable_by_key(|k| Reverse(k.order_heuristic)); for mov in &moves { // make an effort to exit once we are out of time if out_of_time(start, time_to_move_ms) { // if we have not found a move to send back, send back the best move as determined by the order_heuristic // this can happen on very short time control situations if best_move.is_none() { tx.send(moves[0].clone()).unwrap(); } return; } let evaluation = -alpha_beta_search( start, time_to_move_ms, mov, cur_depth - 1, ply_from_root + 1, -beta, -alpha, &mut search_info, true, &zobrist_hasher, draw_table, ); search_info.insert_into_cur_line(ply_from_root, mov); if evaluation > alpha && !out_of_time(start, time_to_move_ms) { //alpha raised, remember this line as the pv alpha = evaluation; best_move = Some(mov.clone()); tx.send(mov.clone()).unwrap(); search_info.set_principle_variation(); send_search_info(&search_info, cur_depth, evaluation, start); } } moves = generate_moves(board, MoveGenerationMode::AllMoves, &zobrist_hasher); if let Some(b) = &best_move { for mov in &mut moves { if mov.last_move == b.last_move { // found the pv node mov.order_heuristic = POS_INF; break; } } } cur_depth += 1; } } /* Send information about the current search status to the GUI */ fn send_search_info(search_info: &Search, depth: u8, eval: i32, start: Instant) { let mut ponder_move = "".to_string(); for mov in &search_info.pv_moves { if let Some(m) = mov { ponder_move = format!("{} {}{}", ponder_move, m.0, m.1) } else { break; } } let mate_window = 15; if eval >= MATE_SCORE - mate_window { // this player is threatening checkmate send_to_gui(&format!( "info pv{} depth {} nodes {} score mate {} time {}", ponder_move, depth, search_info.nodes_searched, (MATE_SCORE - eval + 1) / 2, Instant::now().duration_since(start).as_millis() )); } else if eval <= -MATE_SCORE + mate_window { // this player is getting matted send_to_gui(&format!( "info pv{} depth {} nodes {} score mate {} time {}", ponder_move, depth, search_info.nodes_searched, (MATE_SCORE + eval) / -2, Instant::now().duration_since(start).as_millis() )); } else { send_to_gui(&format!( "info pv{} depth {} nodes {} score cp {} time {}", ponder_move, depth, search_info.nodes_searched, eval, Instant::now().duration_since(start).as_millis() )); } } /* Play a game in the terminal where the engine plays against itself */ pub fn play_game_against_self( b: &BoardState, max_moves: u8, time_to_move_ms: u128, simple_print: bool, ) { let show_board = |simple_print: bool, b: &BoardState| { if simple_print { b.simple_print_board() } else { b.pretty_print_board() } }; let mut board = b.clone(); let draw_table: DrawTable = DrawTable::new(); show_board(simple_print, &board); for _ in 0..max_moves { let (tx, rx) = mpsc::channel(); let start = Instant::now(); let clone = board.clone(); let mut draw_clone = draw_table.clone(); thread::spawn(move || get_best_move(&clone, &mut draw_clone, start, time_to_move_ms, &tx)); while !out_of_time(start, time_to_move_ms) { if let Ok(b) = rx.try_recv() { board = b; } else { thread::sleep(Duration::from_millis(1)); } } show_board(simple_print, &board); } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/uci.rs
src/uci.rs
pub use crate::board::*; use crate::draw_table::DrawTable; pub use crate::engine::*; pub use crate::time_control::*; pub use crate::utils::*; use crate::zobrist::ZobristHasher; use log::{error, info}; use std::io::{self, BufRead}; use std::process; use std::sync::mpsc; use std::thread; use std::time::{Duration, Instant}; const WHITE_KING_SIDE_CASTLE_STRING: &str = "e1g1"; const WHITE_QUEEN_SIDE_CASTLE_STRING: &str = "e1c1"; const BLACK_KING_SIDE_CASTLE_STRING: &str = "e8g8"; const BLACK_QUEEN_SIDE_CASTLE_STRING: &str = "e8c8"; pub fn play_game_uci() { let mut board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); let buffer = read_from_gui(); if buffer != "uci" { error!("Expected uci protocol but got {}", buffer); return; } send_to_gui(&format!( "id name {} {}", env!("CARGO_PKG_NAME"), env!("CARGO_PKG_VERSION") )); send_to_gui(&format!("id author {}", env!("CARGO_PKG_AUTHORS"))); send_to_gui("option name DebugLogLevel type combo default None var Info var None"); send_to_gui("uciok"); let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut draw_table = DrawTable::new(); loop { let buffer = read_from_gui(); let start = Instant::now(); let commands: Vec<&str> = buffer.split(' ').collect(); match commands[0] { "isready" => send_to_gui("readyok"), "ucinewgame" => (), // we don't keep any internal state really so no need to reset anything here "position" => { draw_table.clear(); board = play_out_position(&commands, &zobrist_hasher, &mut draw_table); info!("{}", board.simple_board()); } "go" => { board = find_and_play_best_move(&commands, &mut board, start, &mut draw_table); } "setoption" => { if commands.contains(&"DebugLogLevel") && commands.contains(&"Info") { // set up logging let log_name = format!("walleye_{}.log", process::id()); if simple_logging::log_to_file(log_name, log::LevelFilter::Info).is_err() { panic!("Something went wrong when trying to set up logs"); }; } } "quit" => process::exit(1), _ => error!("Unrecognized command: {}", buffer), }; } } /* Finds an plays the best move and sends it to UCI Returns the new board state with the best move played */ fn find_and_play_best_move( commands: &[&str], board: &mut BoardState, start: Instant, draw_table: &mut DrawTable, ) -> BoardState { let time_to_move_ms = parse_go_command(commands).calculate_time_slice(board.to_move); let mut best_move = None; let (tx, rx) = mpsc::channel(); let clone = board.clone(); let mut draw_clone = draw_table.clone(); thread::spawn(move || get_best_move(&clone, &mut draw_clone, start, time_to_move_ms, &tx)); // keep looking until we are out of time // also add a guard to ensure we at least get a move from the search thread while !out_of_time(start, time_to_move_ms) || best_move.is_none() { if let Ok(b) = rx.try_recv() { best_move = Some(b); } else { thread::sleep(Duration::from_millis(1)); } } let board = best_move.unwrap(); send_best_move_to_gui(&board); info!("{}", board.simple_board()); board } // parse the go command and get relevant info about the current game time fn parse_go_command(commands: &[&str]) -> GameTime { let mut gt = GameTime { wtime: 0, btime: 0, winc: 0, binc: 0, movestogo: None, }; let mut i = 0; while i + 1 < commands.len() { match commands[i] { "wtime" => { gt.wtime = commands[i + 1].parse().unwrap(); i += 1; } "btime" => { gt.btime = commands[i + 1].parse().unwrap(); i += 1; } "binc" => { gt.binc = commands[i + 1].parse().unwrap(); i += 1; } "winc" => { gt.winc = commands[i + 1].parse().unwrap(); i += 1; } "movestogo" => { gt.movestogo = Some(commands[i + 1].parse().unwrap()); i += 1; } _ => (), } i += 1; } gt } /* From the provided fen string set up the board state */ fn play_out_position( commands: &[&str], zobrist_hasher: &ZobristHasher, draw_table: &mut DrawTable, ) -> BoardState { let mut board; if commands[1] == "fen" { let mut fen = "".to_string(); for c in commands.iter().take(7).skip(2) { fen += &format!("{} ", c); } fen += commands[7]; board = match BoardState::from_fen(&fen) { Ok(board) => board, Err(err) => { error!("{}", err); panic!("Got bad fen string, cant continue"); } }; } else { board = BoardState::from_fen(DEFAULT_FEN_STRING).unwrap(); } let mut moves_start_index = None; for (i, command) in commands.iter().enumerate() { if command == &"moves" { moves_start_index = Some(i); break; } } draw_table.table.insert(board.zobrist_key, 1); if let Some(start_index) = moves_start_index { for mov in commands.iter().skip(start_index + 1) { make_move(&mut board, mov, zobrist_hasher); draw_table.add_board_to_draw_table(&board); } } board } /* Play the opponents move on the board */ fn make_move(board: &mut BoardState, player_move: &str, zobrist_hasher: &ZobristHasher) { let start_pair: Point = (player_move[0..2]).parse().unwrap(); let end_pair: Point = (player_move[2..4]).parse().unwrap(); board.unset_pawn_double_move(zobrist_hasher); if let Square::Full(piece) = board.board[start_pair.0][start_pair.1] { // update king location if piece.kind == King { if piece.color == White { board.white_king_location = end_pair; board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); } else { board.black_king_location = end_pair; board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); } } else if piece.kind == Pawn { if (start_pair.0 as i8 - end_pair.0 as i8).abs() == 2 { // pawn made a double move, record space behind pawn for en passant let target = match piece.color { White => Point(start_pair.0 - 1, start_pair.1), Black => Point(start_pair.0 + 1, start_pair.1), }; board.zobrist_key ^= zobrist_hasher.get_val_for_en_passant(target.1); board.pawn_double_move = Some(target); } // check for en passant captures // if a pawn moves diagonally and no capture is made, it must be an en passant capture if start_pair.1 != end_pair.1 && board.board[end_pair.0][end_pair.1] == Square::Empty { board.board[start_pair.0][end_pair.1] = Square::Empty; board.zobrist_key ^= zobrist_hasher.get_val_for_piece( Piece::pawn(board.to_move.opposite()), Point(start_pair.0, end_pair.1), ); } } } else { panic!("UCI Error: Trying to move a piece that does not exist"); } //deal with castling privileges related to the movement/capture of rooks if player_move.contains("a8") { board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); } if player_move.contains("h8") { board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); } if player_move.contains("a1") { board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); } if player_move.contains("h1") { board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); } //move piece board.move_piece(start_pair, end_pair, zobrist_hasher); //deal with any pawn promotions if player_move.len() == 5 { let kind = match player_move.chars().nth(4).unwrap() { 'q' => Queen, 'n' => Knight, 'b' => Bishop, 'r' => Rook, _ => { error!("Could not recognize piece value, default to queen"); Queen } }; let promotion_piece = Piece { color: board.to_move, kind, }; board.zobrist_key ^= zobrist_hasher.get_val_for_piece(Piece::pawn(board.to_move), end_pair) ^ zobrist_hasher.get_val_for_piece(promotion_piece, end_pair); board.board[end_pair.0][end_pair.1] = promotion_piece.into(); } // deal with castling, here we also make sure the right king is on the target square to // distinguish between castling and normal moves if player_move == WHITE_KING_SIDE_CASTLE_STRING && board.board[end_pair.0][end_pair.1] == Piece::king(White) { board.move_piece( Point(BOARD_END - 1, BOARD_END - 1), Point(BOARD_END - 1, BOARD_END - 3), zobrist_hasher, ); } else if player_move == WHITE_QUEEN_SIDE_CASTLE_STRING && board.board[end_pair.0][end_pair.1] == Piece::king(White) { board.move_piece( Point(BOARD_END - 1, BOARD_START), Point(BOARD_END - 1, BOARD_START + 3), zobrist_hasher, ); } else if player_move == BLACK_KING_SIDE_CASTLE_STRING && board.board[end_pair.0][end_pair.1] == Piece::king(Black) { board.move_piece( Point(BOARD_START, BOARD_END - 1), Point(BOARD_START, BOARD_END - 3), zobrist_hasher, ); } else if player_move == BLACK_QUEEN_SIDE_CASTLE_STRING && board.board[end_pair.0][end_pair.1] == Piece::king(Black) { board.move_piece( Point(BOARD_START, BOARD_START), Point(BOARD_START, BOARD_START + 3), zobrist_hasher, ); } board.swap_color(zobrist_hasher); } fn send_best_move_to_gui(board: &BoardState) { let best_move = board.last_move.unwrap(); if let Some(pawn_promotion) = board.pawn_promotion { send_to_gui(&format!( "bestmove {}{}{}", best_move.0, best_move.1, pawn_promotion.kind.alg() )); } else { send_to_gui(&format!("bestmove {}{}", best_move.0, best_move.1)); } } pub fn send_to_gui(message: &str) { println!("{}", message); info!("ENGINE >> {}", message); } pub fn read_from_gui() -> String { let stdin = io::stdin(); let mut buffer = String::new(); stdin.lock().read_line(&mut buffer).unwrap(); buffer = clean_input(&buffer); info!("ENGINE << {}", buffer); buffer } #[cfg(test)] mod tests { use super::*; #[test] fn can_parse_go_command_no_inc() { let buffer = "go wtime 12345 btime 300000 movestogo 40"; let commands: Vec<&str> = buffer.split(' ').collect(); let res = parse_go_command(&commands); assert_eq!(res.winc, 0); assert_eq!(res.binc, 0); assert_eq!(res.wtime, 12345); assert_eq!(res.btime, 300000); assert_eq!(res.movestogo, Some(40)); } #[test] fn can_parse_go_command() { let buffer = "go wtime 300000 btime 300000 winc 1 binc 2 movestogo 40"; let commands: Vec<&str> = buffer.split(' ').collect(); let res = parse_go_command(&commands); assert_eq!(res.winc, 1); assert_eq!(res.binc, 2); assert_eq!(res.wtime, 300000); assert_eq!(res.btime, 300000); assert_eq!(res.movestogo, Some(40)); } #[test] fn can_parse_go_command_no_moves_to_go() { let buffer = "go wtime 300000 btime 300000 winc 1 binc 2"; let commands: Vec<&str> = buffer.split(' ').collect(); let res = parse_go_command(&commands); assert_eq!(res.winc, 1); assert_eq!(res.binc, 2); assert_eq!(res.wtime, 300000); assert_eq!(res.btime, 300000); assert_eq!(res.movestogo, None); } #[test] fn en_passant_capture_parsed_correctly_black() { let mut board = BoardState::from_fen("8/1k6/8/8/7p/8/1K4P1/8 w - - 0 1").unwrap(); let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); make_move(&mut board, "g2g4", &zobrist_hasher); make_move(&mut board, "h4g3", &zobrist_hasher); assert_eq!(board.board[7][8], Square::from(Piece::pawn(Black))); let mut pawn_count = 0; for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { if let Square::Full(Piece { kind, .. }) = board.board[i][j] { if kind == Pawn { pawn_count += 1; } } } } assert_eq!(pawn_count, 1); } #[test] fn en_passant_capture_parsed_correctly_white() { let mut board = BoardState::from_fen("8/1k4p1/8/5P2/8/8/1K6/8 b - - 0 1").unwrap(); let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); make_move(&mut board, "g7g5", &zobrist_hasher); make_move(&mut board, "f5g6", &zobrist_hasher); assert_eq!(board.board[4][8], Square::from(Piece::pawn(White))); let mut pawn_count = 0; for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { if let Square::Full(Piece { kind, .. }) = board.board[i][j] { if kind == Pawn { pawn_count += 1; } } } } assert_eq!(pawn_count, 1); } #[test] fn full_game_played_white_wins() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut draw_table: DrawTable = DrawTable::new(); let commands: Vec<&str> = "position startpos moves g1f3 g8f6 d2d4 d7d5 e2e3 e7e6 f1d3 b8c6 b1c3 f8e7 e1g1 e8g8 a2a3 h7h6 b2b4 a7a6 c1b2 e7d6 a1c1 b7b5 h2h3 c8b7 f1e1 f8e8 g2g3 d8d7 e3e4 e6e5 c3d5 f6d5 e4d5 c6d4 f3d4 e5d4 d1h5 d6e7 b2d4 d7d5 h5d5 b7d5 c2c4 b5c4 d3c4 d5c4 c1c4 e7d6 e1e8 a8e8 c4c6 e8e1 g1g2 e1d1 d4e3 d1a1 c6a6 d6b4 a3a4 h6h5 a6a8 g8h7 a8a7 h7g6 a7c7 a1a4 c7c4 g6f6 e3d2 b4d2 c4a4 d2c3 g2f3 f6e6 f3e4 f7f5 e4e3 e6f7 e3f4 c3e1 f2f3 g7g6 a4a7 f7e6 f4g5 e1g3 a7a6 e6e5 g5g6 e5d4 a6e6 h5h4 g6f5 d4c3 e6e8 g3f2 e8d8 c3c4 f5g4 f2e1 f3f4 c4b3 f4f5 e1c3 g4g5 c3a5 d8e8 a5d2 g5h4 d2c3 h4g5 b3c4 f5f6 c3b2 f6f7 b2a3 g5g6 c4d5 h3h4 d5c4 h4h5 a3d6 h5h6 d6f8 e8f8 c4d5 f8d8 d5e5 f7f8q e5e4 f8f2 e4e5 f2f5".split(' ').collect(); let board = play_out_position(&commands, &zobrist_hasher, &mut draw_table); let end_board = BoardState::from_fen("3R4/8/6KP/4kQ2/8/8/8/8 b - - 4 66").unwrap(); for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { assert_eq!(board.board[i][j], end_board.board[i][j]); } } assert_eq!( board.white_queen_side_castle, end_board.white_queen_side_castle ); assert_eq!( board.white_king_side_castle, end_board.white_king_side_castle ); assert_eq!( board.black_king_side_castle, end_board.black_king_side_castle ); assert_eq!( board.black_queen_side_castle, end_board.black_queen_side_castle ); } #[test] fn zobrist_hash_full_game_played_white_wins() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut draw_table: DrawTable = DrawTable::new(); let commands: Vec<&str> = "position startpos moves g1f3 g8f6 d2d4 d7d5 e2e3 e7e6 f1d3 b8c6 b1c3 f8e7 e1g1 e8g8 a2a3 h7h6 b2b4 a7a6 c1b2 e7d6 a1c1 b7b5 h2h3 c8b7 f1e1 f8e8 g2g3 d8d7 e3e4 e6e5 c3d5 f6d5 e4d5 c6d4 f3d4 e5d4 d1h5 d6e7 b2d4 d7d5 h5d5 b7d5 c2c4 b5c4 d3c4 d5c4 c1c4 e7d6 e1e8 a8e8 c4c6 e8e1 g1g2 e1d1 d4e3 d1a1 c6a6 d6b4 a3a4 h6h5 a6a8 g8h7 a8a7 h7g6 a7c7 a1a4 c7c4 g6f6 e3d2 b4d2 c4a4 d2c3 g2f3 f6e6 f3e4 f7f5 e4e3 e6f7 e3f4 c3e1 f2f3 g7g6 a4a7 f7e6 f4g5 e1g3 a7a6 e6e5 g5g6 e5d4 a6e6 h5h4 g6f5 d4c3 e6e8 g3f2 e8d8 c3c4 f5g4 f2e1 f3f4 c4b3 f4f5 e1c3 g4g5 c3a5 d8e8 a5d2 g5h4 d2c3 h4g5 b3c4 f5f6 c3b2 f6f7 b2a3 g5g6 c4d5 h3h4 d5c4 h4h5 a3d6 h5h6 d6f8 e8f8 c4d5 f8d8 d5e5 f7f8q e5e4 f8f2 e4e5 f2f5".split(' ').collect(); let board = play_out_position(&commands, &zobrist_hasher, &mut draw_table); let end_board = BoardState::from_fen("3R4/8/6KP/4kQ2/8/8/8/8 b - - 4 66").unwrap(); assert_eq!(board.zobrist_key, end_board.zobrist_key); } #[test] fn zobrist_hash_full_game_played_white_wins_2() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut draw_table: DrawTable = DrawTable::new(); // this game contains en-passant, castling and pawn promotion let commands: Vec<&str> = "position startpos moves e2e4 d7d5 e4e5 f7f5 e5f6 b8c6 f6g7 c8e6 g7h8q d8d6 d2d3 e8c8 d1h5 c6a5 h8g8 e6d7 g8f8 a5c6 h5g4 h7h6 g4a4 c6d4 a4a7 h6h5 a7a8".split(' ').collect(); let board = play_out_position(&commands, &zobrist_hasher, &mut draw_table); let end_board = BoardState::from_fen("Q1kr1Q2/1ppbp3/3q4/3p3p/3n4/3P4/PPP2PPP/RNB1KBNR b KQ - 1 13") .unwrap(); assert_eq!(board.zobrist_key, end_board.zobrist_key); } #[test] fn game_is_draw_three_fold_repetition() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut draw_table: DrawTable = DrawTable::new(); // this game contains en-passant, castling and pawn promotion let commands: Vec<&str> = "position fen 8/8/k7/p7/P7/K7/8/8 w - - 0 1 moves a3b3 a6b6 b3c4 b6c6 c4d4 c6d6 d4c4 d6c6 c4d4 c6d6 d4c4 d6c6".split(' ').collect(); let board = play_out_position(&commands, &zobrist_hasher, &mut draw_table); assert_eq!(*draw_table.table.get(&board.zobrist_key).unwrap(), 3); } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/board.rs
src/board.rs
use crate::engine::*; use crate::utils::*; use crate::zobrist::ZobristHasher; use colored::*; use std::fmt; use std::str::FromStr; // Board position for the start of a new game pub const DEFAULT_FEN_STRING: &str = "rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1"; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum Square { // This square is empty. Empty, // A piece is on this square Full(Piece), // A non-board square; the board data structure contains squares not present on the // actual board in order to make move calculation easier, and all such squares have // this variant. Boundary, } impl Square { // Check if this square is empty or contains a piece of the given color (used in move // generation) pub fn is_empty_or_color(self, color: PieceColor) -> bool { match self { Square::Full(Piece { color: square_color, .. }) => color == square_color, Square::Empty => true, _ => false, } } // Check if this square is empty pub fn is_empty(self) -> bool { self == Square::Empty } // Check if a square is a certain color, return false if empty pub fn is_color(self, color: PieceColor) -> bool { match self { Square::Full(Piece { color: square_color, .. }) => color == square_color, _ => false, } } // Get the "fancy" character to represent the content of this square fn fancy_char(self) -> &'static str { match self { Square::Full(piece) => piece.fancy_char(), _ => " ", } } // Get the "simple" character to represent this content of this square (capitalized based on // the piece's color) fn simple_char(self) -> &'static str { match self { Square::Full(piece) => piece.simple_char(), _ => ".", } } } impl From<Piece> for Square { // Given a piece, generate a square containing that piece fn from(piece: Piece) -> Self { Square::Full(piece) } } impl PartialEq<Piece> for Square { fn eq(&self, other: &Piece) -> bool { match self { Square::Full(piece) => piece == other, _ => false, } } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct Piece { pub color: PieceColor, pub kind: PieceKind, } impl Piece { pub fn index(self) -> usize { self.kind.index() } pub const fn pawn(color: PieceColor) -> Self { Self { kind: Pawn, color } } pub const fn knight(color: PieceColor) -> Self { Self { kind: Knight, color, } } pub const fn bishop(color: PieceColor) -> Self { Self { kind: Bishop, color, } } pub const fn rook(color: PieceColor) -> Self { Self { kind: Rook, color } } pub const fn queen(color: PieceColor) -> Self { Self { kind: Queen, color } } pub const fn king(color: PieceColor) -> Self { Self { kind: King, color } } // Get the "fancy" character for this piece fn fancy_char(self) -> &'static str { match self.kind { Pawn => "♟︎", Knight => "♞", Bishop => "♝", Rook => "♜", Queen => "♛", King => "♚", } } // Get the "simple" character to represent this piece (capitalized based on the piece's color) fn simple_char(self) -> &'static str { match (self.color, self.kind) { (White, Pawn) => "P", (White, Knight) => "N", (White, Bishop) => "B", (White, Rook) => "R", (White, Queen) => "Q", (White, King) => "K", (Black, Pawn) => "p", (Black, Knight) => "n", (Black, Bishop) => "b", (Black, Rook) => "r", (Black, Queen) => "q", (Black, King) => "k", } } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PieceColor { Black, White, } impl PieceColor { // Get the opposite color pub fn opposite(self) -> Self { match self { Black => White, White => Black, } } } #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub enum PieceKind { Pawn, Knight, Bishop, Rook, Queen, King, } impl PieceKind { // get an index for a piece, helpful for arrays pub fn index(self) -> usize { match self { King => 0, Queen => 1, Rook => 2, Bishop => 3, Knight => 4, Pawn => 5, } } // Get the alg name for this kind of piece pub fn alg(self) -> &'static str { match self { Pawn => "p", Knight => "n", Bishop => "b", Rook => "r", Queen => "q", King => "k", } } } pub const BOARD_START: usize = 2; pub const BOARD_END: usize = 10; #[derive(Copy, Clone, PartialEq, Eq, Debug)] pub struct Point(pub usize, pub usize); impl FromStr for Point { type Err = &'static str; // Parse an algebraic pair into a board position fn from_str(pair: &str) -> Result<Self, Self::Err> { if pair.len() != 2 { return Err("Invalid length for algebraic string"); } let c = pair.chars().next().unwrap(); let r = pair.chars().nth(1).unwrap(); let col = match c { 'a' => 0, 'b' => 1, 'c' => 2, 'd' => 3, 'e' => 4, 'f' => 5, 'g' => 6, 'h' => 7, _ => return Err("Invalid column"), }; let row = BOARD_END - (r.to_digit(10).unwrap() as usize); if !(BOARD_START..BOARD_END).contains(&row) { return Err("Invalid row"); } Ok(Point(row, col + BOARD_START)) } } impl fmt::Display for Point { fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result { write!( f, "{}{}", match self.1 { 2 => "a", 3 => "b", 4 => "c", 5 => "d", 6 => "e", 7 => "f", 8 => "g", 9 => "h", _ => "h", }, match self.0 { 2 => "8", 3 => "7", 4 => "6", 5 => "5", 6 => "4", 7 => "3", 8 => "2", 9 => "1", _ => "1", }, ) } } #[derive(Clone)] pub struct BoardState { pub board: [[Square; 12]; 12], pub to_move: PieceColor, pub pawn_double_move: Option<Point>, // if a pawn, on the last move, made a double move, this is set, otherwise this is None pub white_king_location: Point, pub black_king_location: Point, pub white_king_side_castle: bool, pub white_queen_side_castle: bool, pub black_king_side_castle: bool, pub black_queen_side_castle: bool, pub order_heuristic: i32, // value set to help order this board, a higher value means this board state will be considered first pub last_move: Option<(Point, Point)>, // the start and last position of the last move made pub pawn_promotion: Option<Piece>, // set to the chosen pawn promotion type pub zobrist_key: u64, } impl BoardState { // Parse the standard fen string notation (en.wikipedia.org/wiki/Forsyth–Edwards_Notation) and return a board state pub fn from_fen(fen: &str) -> Result<BoardState, &str> { let mut board = [[Square::Boundary; 12]; 12]; let mut fen = fen.to_string(); let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut zobrist_key = 0; trim_newline(&mut fen); let fen_config: Vec<&str> = fen.split(' ').collect(); if fen_config.len() != 6 { return Err("Could not parse fen string: Invalid fen string"); } let to_move = match fen_config[1] { "w" => PieceColor::White, "b" => PieceColor::Black, _ => return Err("Could not parse fen string: Next player to move was not provided"), }; if to_move == PieceColor::Black { zobrist_key = zobrist_hasher.get_black_to_move_val(); } let castling_privileges = fen_config[2]; let en_passant = fen_config[3]; let half_move_clock = fen_config[4].parse::<u8>(); if half_move_clock.is_err() { return Err("Could not parse fen string: Invalid half move value"); } let full_move_clock = fen_config[5].parse::<u8>(); if full_move_clock.is_err() { return Err("Could not parse fen string: Invalid full move value"); } let fen_rows: Vec<&str> = fen_config[0].split('/').collect(); if fen_rows.len() != 8 { return Err("Could not parse fen string: Invalid number of rows provided, 8 expected"); } let mut row: usize = BOARD_START; let mut col: usize = BOARD_START; let mut white_king_location = Point(0, 0); let mut black_king_location = Point(0, 0); for fen_row in fen_rows { for square in fen_row.chars() { if row >= BOARD_END || col >= BOARD_END { return Err("Too many squares specified for board"); } if square.is_ascii_digit() { let square_skip_count = square.to_digit(10).unwrap() as usize; if square_skip_count + col > BOARD_END { return Err("Could not parse fen string: Index out of bounds"); } for _ in 0..square_skip_count { board[row][col] = Square::Empty; col += 1; } } else { board[row][col] = match Self::piece_from_fen_string_char(square) { Some(piece) => Square::Full(piece), None => return Err("Could not parse fen string: Invalid character found"), }; if let Square::Full(Piece { kind, color }) = board[row][col] { zobrist_key ^= zobrist_hasher .get_val_for_piece(Piece { kind, color }, Point(row, col)); if kind == King { match color { White => white_king_location = Point(row, col), Black => black_king_location = Point(row, col), }; } } col += 1; } } if col != BOARD_END { return Err("Could not parse fen string: Complete row was not specified"); } row += 1; col = BOARD_START; } // Deal with the en passant string let mut en_passant_pos: Option<Point> = None; if en_passant.len() != 2 { if en_passant != "-" { return Err("Could not parse fen string: En passant string not valid"); } } else { en_passant_pos = en_passant.parse().ok(); if let Some(point) = en_passant_pos { zobrist_key ^= zobrist_hasher.get_val_for_en_passant(point.1); } } let mut board = BoardState { board, to_move, white_king_location, black_king_location, pawn_double_move: en_passant_pos, white_king_side_castle: castling_privileges.find('K').is_some(), white_queen_side_castle: castling_privileges.find('Q').is_some(), black_king_side_castle: castling_privileges.find('k').is_some(), black_queen_side_castle: castling_privileges.find('q').is_some(), order_heuristic: 0, last_move: None, pawn_promotion: None, zobrist_key, }; if board.white_king_side_castle { board.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::WhiteKingSide); } if board.white_queen_side_castle { board.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::WhiteQueenSide); } if board.black_king_side_castle { board.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::BlackKingSide) } if board.black_queen_side_castle { board.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::BlackQueenSide); } Ok(board) } fn piece_from_fen_string_char(piece: char) -> Option<Piece> { match piece { 'r' => Some(Piece { color: Black, kind: Rook, }), 'n' => Some(Piece { color: Black, kind: Knight, }), 'b' => Some(Piece { color: Black, kind: Bishop, }), 'q' => Some(Piece { color: Black, kind: Queen, }), 'k' => Some(Piece { color: Black, kind: King, }), 'p' => Some(Piece { color: Black, kind: Pawn, }), 'R' => Some(Piece { color: White, kind: Rook, }), 'N' => Some(Piece { color: White, kind: Knight, }), 'B' => Some(Piece { color: White, kind: Bishop, }), 'Q' => Some(Piece { color: White, kind: Queen, }), 'K' => Some(Piece { color: White, kind: King, }), 'P' => Some(Piece { color: White, kind: Pawn, }), _ => None, } } pub fn pretty_print_board(&self) { println!("a b c d e f g h"); for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { let square = self.board[i][j]; let cell = format!("{} ", square.fancy_char()); let cell = match square { Square::Full(Piece { color: White, .. }) => cell.white(), Square::Full(Piece { color: Black, .. }) => cell.black(), _ => cell.white(), }; let cell = if (i + j) % 2 != 0 { cell.on_truecolor(158, 93, 30) } else { cell.on_truecolor(205, 170, 125) }; print!("{}", cell); } println!(" {}", 10 - i); } } pub fn simple_board(&self) -> String { let mut board = "\na b c d e f g h\n".to_string(); for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { board = format!("{}{} ", board, self.board[i][j].simple_char()); } board = format!("{} {}\n", board, 10 - i); } board } pub fn simple_print_board(&self) { print!("{}", self.simple_board()); } pub fn swap_color(&mut self, zobrist_hasher: &ZobristHasher) { match self.to_move { PieceColor::White => self.to_move = PieceColor::Black, PieceColor::Black => self.to_move = PieceColor::White, } // the current play changed so we need to update the key self.zobrist_key ^= zobrist_hasher.get_black_to_move_val(); } /* Helper function to take away castling rights, updates the zobrist as well if required Also protects against unsetting the castling rights more than once, which would mess up the zobrist key */ pub fn take_away_castling_rights( &mut self, castling_type: CastlingType, zobrist_hasher: &ZobristHasher, ) { if castling_type == CastlingType::WhiteKingSide && self.white_king_side_castle { self.white_king_side_castle = false; self.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::WhiteKingSide) } else if castling_type == CastlingType::WhiteQueenSide && self.white_queen_side_castle { self.white_queen_side_castle = false; self.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::WhiteQueenSide); } else if castling_type == CastlingType::BlackKingSide && self.black_king_side_castle { self.black_king_side_castle = false; self.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::BlackKingSide); } else if castling_type == CastlingType::BlackQueenSide && self.black_queen_side_castle { self.black_queen_side_castle = false; self.zobrist_key ^= zobrist_hasher.get_val_for_castling(CastlingType::BlackQueenSide); } } /* Helper function to clear the pawn double move condition and update the zobrist key if required */ pub fn unset_pawn_double_move(&mut self, zobrist_hasher: &ZobristHasher) { if let Some(en_passant_target) = self.pawn_double_move { self.pawn_double_move = None; self.zobrist_key ^= zobrist_hasher.get_val_for_en_passant(en_passant_target.1); } } /* Helper function to move a piece on the board, will also update the zobrist hash of the board correctly even with a capture */ pub fn move_piece(&mut self, start: Point, end: Point, zobrist_hasher: &ZobristHasher) { if let Square::Full(cur_piece) = self.board[start.0][start.1] { self.board[start.0][start.1] = Square::Empty; if let Square::Full(target_piece) = self.board[end.0][end.1] { self.zobrist_key ^= zobrist_hasher.get_val_for_piece(target_piece, end); } self.board[end.0][end.1] = Square::Full(cur_piece); self.zobrist_key ^= zobrist_hasher.get_val_for_piece(cur_piece, start) ^ zobrist_hasher.get_val_for_piece(cur_piece, end); } } } #[cfg(test)] mod tests { use super::*; #[test] fn pieces_recognized() { assert_eq!(Piece::bishop(White).color, White); assert_eq!(Piece::rook(White).color, White); assert_eq!(Piece::king(White).color, White); assert_eq!(Piece::pawn(White).color, White); assert_eq!(Piece::bishop(Black).color, Black); assert_eq!(Piece::rook(Black).color, Black); assert_eq!(Piece::king(Black).color, Black); assert_eq!(Piece::pawn(Black).color, Black); assert_eq!(Piece::pawn(White).kind, Pawn); assert_eq!(Piece::knight(White).kind, Knight); assert_eq!(Piece::bishop(White).kind, Bishop); assert_eq!(Piece::rook(White).kind, Rook); assert_eq!(Piece::queen(White).kind, Queen); assert_eq!(Piece::king(White).kind, King); assert!(Square::Empty.is_empty()); assert!(!Square::Full(Piece::king(White)).is_empty()); } // Algebraic translation tests #[test] fn algebraic_translation_correct() { let res = "a8".parse::<Point>().unwrap(); assert_eq!(res.0, BOARD_START); assert_eq!(res.1, BOARD_START); let res = "h1".parse::<Point>().unwrap(); assert_eq!(res.0, BOARD_END - 1); assert_eq!(res.1, BOARD_END - 1); let res = "a6".parse::<Point>().unwrap(); assert_eq!(res.0, BOARD_START + 2); assert_eq!(res.1, BOARD_START); let res = "c5".parse::<Point>().unwrap(); assert_eq!(res.0, BOARD_START + 3); assert_eq!(res.1, BOARD_START + 2); } #[test] #[should_panic] fn algebraic_translation_panic_col() { "z1".parse::<Point>().unwrap(); } #[test] #[should_panic] fn algebraic_translation_panic_long() { "a11".parse::<Point>().unwrap(); } #[test] fn points_to_long_algebraic_position_test() { let res = Point(2, 2).to_string(); assert_eq!(res, "a8"); let res = Point(4, 6).to_string(); assert_eq!(res, "e6"); } // Zobrist hashing tests #[test] fn zobrist_swap_color() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); let old_zobrist_key = b.zobrist_key; b.swap_color(&zobrist_hasher); assert_eq!( old_zobrist_key, b.zobrist_key ^ zobrist_hasher.get_black_to_move_val() ); b.swap_color(&zobrist_hasher); assert_eq!(old_zobrist_key, b.zobrist_key); } #[test] fn zobrist_move_piece_no_capture() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); // move the black rook let start = Point(BOARD_START, BOARD_START); let end = Point(BOARD_START + 2, BOARD_START + 3); let expected_result = b.zobrist_key ^ zobrist_hasher.get_val_for_piece(Piece::rook(Black), start) ^ zobrist_hasher.get_val_for_piece(Piece::rook(Black), end); b.move_piece(start, end, &zobrist_hasher); assert_eq!(expected_result, b.zobrist_key); } #[test] fn zobrist_move_piece_with_capture() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); // move the black rook to capture the white rook diagonally across the board let start = Point(BOARD_START, BOARD_START); let end = Point(BOARD_END - 1, BOARD_END - 1); let expected_result = b.zobrist_key ^ zobrist_hasher.get_val_for_piece(Piece::rook(Black), start) ^ zobrist_hasher.get_val_for_piece(Piece::rook(Black), end) ^ zobrist_hasher.get_val_for_piece(Piece::rook(White), end); b.move_piece(start, end, &zobrist_hasher); assert_eq!(expected_result, b.zobrist_key); } #[test] fn zobrist_castling_white_king_side() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); let mut b_copy = b.clone(); b_copy.take_away_castling_rights(CastlingType::WhiteKingSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::WhiteKingSide) ); // ensure that unsetting the same rights again doesn't cause the key to change b_copy.take_away_castling_rights(CastlingType::WhiteKingSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::WhiteKingSide) ); } #[test] fn zobrist_castling_white_queen_side() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); let mut b_copy = b.clone(); b_copy.take_away_castling_rights(CastlingType::WhiteQueenSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::WhiteQueenSide) ); // ensure that unsetting the same rights again doesn't cause the key to change b_copy.take_away_castling_rights(CastlingType::WhiteQueenSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::WhiteQueenSide) ); } #[test] fn zobrist_castling_black_king_side() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); let mut b_copy = b.clone(); b_copy.take_away_castling_rights(CastlingType::BlackKingSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::BlackKingSide) ); // ensure that unsetting the same rights again doesn't cause the key to change b_copy.take_away_castling_rights(CastlingType::BlackKingSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::BlackKingSide) ); } #[test] fn zobrist_castling_black_queen_side() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); let mut b_copy = b.clone(); b_copy.take_away_castling_rights(CastlingType::BlackQueenSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::BlackQueenSide) ); // ensure that unsetting the same rights again doesn't cause the key to change b_copy.take_away_castling_rights(CastlingType::BlackQueenSide, &zobrist_hasher); assert_eq!( b.zobrist_key, b_copy.zobrist_key ^ zobrist_hasher.get_val_for_castling(CastlingType::BlackQueenSide) ); } #[test] fn zobrist_unset_double_pawn_move() { let zobrist_hasher = ZobristHasher::create_zobrist_hasher(); let mut b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq e3 0 1") .unwrap(); let expected_result = b.zobrist_key ^ zobrist_hasher.get_val_for_en_passant(b.pawn_double_move.unwrap().1); b.unset_pawn_double_move(&zobrist_hasher); assert_eq!(expected_result, b.zobrist_key); // ensure that unsetting a double pawn move twice doesn't change the key b.unset_pawn_double_move(&zobrist_hasher); assert_eq!(expected_result, b.zobrist_key); } // Fen string tests #[test] fn empty_board() { let b = BoardState::from_fen("8/8/8/8/8/8/8/8 w - - 0 1").unwrap(); for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { assert_eq!(b.board[i][j], Square::Empty); } } assert_eq!(b.zobrist_key, 0); } #[test] fn starting_pos() { let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); assert_eq!(b.board[2][2], Square::from(Piece::rook(Black))); assert_eq!(b.board[2][3], Square::from(Piece::knight(Black))); assert_eq!(b.board[2][4], Square::from(Piece::bishop(Black))); assert_eq!(b.board[2][5], Square::from(Piece::queen(Black))); assert_eq!(b.board[2][6], Square::from(Piece::king(Black))); assert_eq!(b.board[2][7], Square::from(Piece::bishop(Black))); assert_eq!(b.board[2][8], Square::from(Piece::knight(Black))); assert_eq!(b.board[2][9], Square::from(Piece::rook(Black))); for i in BOARD_START..BOARD_END { assert_eq!(b.board[3][i], Square::from(Piece::pawn(Black))); } for i in 4..8 { for j in BOARD_START..BOARD_END { assert_eq!(b.board[i][j], Square::Empty); } } assert_eq!(b.board[9][2], Square::from(Piece::rook(White))); assert_eq!(b.board[9][3], Square::from(Piece::knight(White))); assert_eq!(b.board[9][4], Square::from(Piece::bishop(White))); assert_eq!(b.board[9][5], Square::from(Piece::queen(White))); assert_eq!(b.board[9][6], Square::from(Piece::king(White))); assert_eq!(b.board[9][7], Square::from(Piece::bishop(White))); assert_eq!(b.board[9][8], Square::from(Piece::knight(White))); assert_eq!(b.board[9][9], Square::from(Piece::rook(White))); for i in BOARD_START..BOARD_END { assert_eq!(b.board[8][i], Square::from(Piece::pawn(White))); } assert_eq!(b.zobrist_key, 9731356503239323868); } #[test] fn correct_en_passant_privileges() { let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/4P3/8/PPPP1PPP/RNBQKBNR w KQkq e4 0 1") .unwrap(); assert_eq!(b.pawn_double_move.unwrap().0, BOARD_START + 4); assert_eq!(b.pawn_double_move.unwrap().1, BOARD_START + 4); } #[test] fn correct_en_passant_privileges_black() { let b = BoardState::from_fen("rnbqkbnr/ppppppp1/8/7p/8/8/PPPPPPPP/RNBQKBNR w KQkq h5 0 1") .unwrap(); assert_eq!(b.pawn_double_move.unwrap().0, BOARD_START + 3); assert_eq!(b.pawn_double_move.unwrap().1, BOARD_START + 7); } #[test] fn correct_king_location() { let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); assert_eq!(b.black_king_location, Point(2, 6)); assert_eq!(b.white_king_location, Point(9, 6)); } #[test] fn correct_king_location_two() { let b = BoardState::from_fen("6rk/1b4np/5pp1/1p6/8/1P3NP1/1B3P1P/5RK1 w KQkq - 0 1").unwrap(); assert_eq!(b.black_king_location, Point(2, 9)); assert_eq!(b.white_king_location, Point(9, 8)); } #[test] fn correct_starting_player() { let mut b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); assert_eq!(b.to_move, PieceColor::White); b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR b KQkq - 0 1") .unwrap(); assert_eq!(b.to_move, PieceColor::Black); } #[test] fn correct_castling_privileges() { let mut b = BoardState::from_fen("6rk/1b4np/5pp1/1p6/8/1P3NP1/1B3P1P/5RK1 w KQkq - 0 1").unwrap(); assert!(b.black_king_side_castle); assert!(b.black_queen_side_castle); assert!(b.white_king_side_castle); assert!(b.white_queen_side_castle); b = BoardState::from_fen("6rk/1b4np/5pp1/1p6/8/1P3NP1/1B3P1P/5RK1 w - - 0 1").unwrap(); assert!(!b.black_king_side_castle); assert!(!b.black_queen_side_castle); assert!(!b.white_king_side_castle); assert!(!b.white_queen_side_castle); b = BoardState::from_fen("6rk/1b4np/5pp1/1p6/8/1P3NP1/1B3P1P/5RK1 w Kq - 0 1").unwrap(); assert!(!b.black_king_side_castle); assert!(b.black_queen_side_castle); assert!(b.white_king_side_castle); assert!(!b.white_queen_side_castle); } #[test] fn random_pos() { let b = BoardState::from_fen("4R1B1/1kp5/1B1Q4/1P5p/1p2p1pK/8/3pP3/4N1b1 w - - 0 1").unwrap(); assert_eq!(b.board[2][6], Square::from(Piece::rook(White))); assert_eq!(b.board[2][8], Square::from(Piece::bishop(White))); assert_eq!(b.board[3][3], Square::from(Piece::king(Black))); assert_eq!(b.board[3][4], Square::from(Piece::pawn(Black))); assert_eq!(b.board[4][3], Square::from(Piece::bishop(White))); assert_eq!(b.board[4][5], Square::from(Piece::queen(White))); assert_eq!(b.board[5][3], Square::from(Piece::pawn(White))); assert_eq!(b.board[5][9], Square::from(Piece::pawn(Black))); assert_eq!(b.board[6][3], Square::from(Piece::pawn(Black))); assert_eq!(b.board[6][6], Square::from(Piece::pawn(Black))); assert_eq!(b.board[6][8], Square::from(Piece::pawn(Black))); assert_eq!(b.board[6][9], Square::from(Piece::king(White))); assert_eq!(b.board[8][5], Square::from(Piece::pawn(Black))); assert_eq!(b.board[8][6], Square::from(Piece::pawn(White))); assert_eq!(b.board[9][6], Square::from(Piece::knight(White)));
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
true
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/search.rs
src/search.rs
pub use crate::board::*; pub const MAX_DEPTH: u8 = 100; pub const KILLER_MOVE_PLY_SIZE: usize = 2; type MoveArray = [Option<(Point, Point)>; MAX_DEPTH as usize]; type KillerMoveArray = [[Option<(Point, Point)>; KILLER_MOVE_PLY_SIZE]; MAX_DEPTH as usize]; /* Keep track of global information about the current search context */ #[derive(Copy, Clone)] pub struct Search { pub killer_moves: KillerMoveArray, // the killer moves for this search pub pv_moves: MoveArray, // the principle variation for this search pub cur_line: MoveArray, // the current line being considered for this search pub nodes_searched: u32, } impl Search { pub fn new_search() -> Search { Search { killer_moves: [[None; KILLER_MOVE_PLY_SIZE]; MAX_DEPTH as usize], pv_moves: [None; MAX_DEPTH as usize], cur_line: [None; MAX_DEPTH as usize], nodes_searched: 0, } } pub fn node_searched(&mut self) { self.nodes_searched += 1; } pub fn insert_killer_move(&mut self, ply_from_root: i32, mov: &BoardState) { let ply = ply_from_root as usize; if self.killer_moves[ply].contains(&mov.last_move) { return; } for i in 0..(KILLER_MOVE_PLY_SIZE - 1) { self.killer_moves[ply][i + 1] = self.killer_moves[ply][i]; } self.killer_moves[ply][0] = mov.last_move; } pub fn insert_into_cur_line(&mut self, ply_from_root: i32, mov: &BoardState) { self.cur_line[ply_from_root as usize] = mov.last_move; } pub fn set_principle_variation(&mut self) { self.pv_moves.clone_from_slice(&self.cur_line); } // reset the required data to search the next depth pub fn reset_search(&mut self) { self.nodes_searched = 0; self.cur_line = [None; MAX_DEPTH as usize]; } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/move_generation.rs
src/move_generation.rs
pub use crate::board::*; pub use crate::evaluation::*; use crate::zobrist::ZobristHasher; const KNIGHT_CORDS: [(i8, i8); 8] = [ (1, 2), (1, -2), (2, 1), (2, -1), (-1, 2), (-1, -2), (-2, -1), (-2, 1), ]; // MVV-LVA score, see https://www.chessprogramming.org/MVV-LVA // addressed as [victim][attacker] #[rustfmt::skip] const MVV_LVA: [[i32; 7]; 7] = [ [0, 0, 0, 0, 0, 0, 0], [50, 51, 52, 53, 54, 55, 0], [40, 41, 42, 43, 44, 45, 0], [30, 31, 32, 33, 34, 35, 0], [20, 21, 22, 23, 24, 25, 0], [10, 11, 12, 13, 14, 15, 0], [ 0, 0, 0, 0, 0, 0, 0], ]; #[allow(clippy::enum_variant_names)] #[derive(PartialEq, Eq)] pub enum CastlingType { WhiteKingSide, WhiteQueenSide, BlackKingSide, BlackQueenSide, } #[derive(PartialEq, Eq, Copy, Clone)] pub enum MoveGenerationMode { AllMoves, CapturesOnly, } const WHITE_KING_SIDE_CASTLE_ALG: Option<(Point, Point)> = Some((Point(9, 6), Point(9, 8))); const WHITE_QUEEN_SIDE_CASTLE_ALG: Option<(Point, Point)> = Some((Point(9, 6), Point(9, 4))); const BLACK_KING_SIDE_CASTLE_ALG: Option<(Point, Point)> = Some((Point(2, 6), Point(2, 8))); const BLACK_QUEEN_SIDE_CASTLE_ALG: Option<(Point, Point)> = Some((Point(2, 6), Point(2, 4))); /* Generate all possible *legal* moves from the given board Also sets appropriate variables for the board state */ pub fn generate_moves( board: &BoardState, move_gen_mode: MoveGenerationMode, zobrist_hasher: &ZobristHasher, ) -> Vec<BoardState> { //usually there is at minimum 16 moves in a position, so it make sense to preallocate some space to avoid excessive reallocations let mut new_moves: Vec<BoardState> = Vec::with_capacity(16); for i in BOARD_START..BOARD_END { for j in BOARD_START..BOARD_END { if let Square::Full(piece) = board.board[i][j] { if piece.color == board.to_move { generate_moves_for_piece( piece, board, Point(i, j), &mut new_moves, move_gen_mode, zobrist_hasher, ); } } } } if move_gen_mode == MoveGenerationMode::AllMoves { generate_castling_moves(board, &mut new_moves, zobrist_hasher); } new_moves } /* Determine if a color is currently in check */ pub fn is_check(board: &BoardState, color: PieceColor) -> bool { match color { White => is_check_cords(board, White, board.white_king_location), Black => is_check_cords(board, Black, board.black_king_location), } } /* Generate pseudo-legal moves for a knight */ fn knight_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { for (r, c) in &KNIGHT_CORDS { let row = (row as i8 + r) as usize; let col = (col as i8 + c) as usize; let square = board.board[row][col]; if square.is_empty_or_color(piece.color.opposite()) { if move_generation_mode == MoveGenerationMode::CapturesOnly { if !square.is_empty() { moves.push(Point(row, col)); } } else { moves.push(Point(row, col)); } } } } /* Generate pseudo-legal moves for a pawn */ fn pawn_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { match piece.color { // white pawns move up board White => { // check capture let left_cap = board.board[row - 1][col - 1]; let right_cap = board.board[row - 1][col + 1]; if let Square::Full(Piece { color: Black, .. }) = left_cap { moves.push(Point(row - 1, col - 1)); } if let Square::Full(Piece { color: Black, .. }) = right_cap { moves.push(Point(row - 1, col + 1)); } // check a normal push if move_generation_mode == MoveGenerationMode::AllMoves && (board.board[row - 1][col]).is_empty() { moves.push(Point(row - 1, col)); // check double push if row == 8 && (board.board[row - 2][col]).is_empty() { moves.push(Point(row - 2, col)); } } } // black pawns move down board Black => { // check capture let left_cap = board.board[row + 1][col + 1]; let right_cap = board.board[row + 1][col - 1]; if let Square::Full(Piece { color: White, .. }) = left_cap { moves.push(Point(row + 1, col + 1)); } if let Square::Full(Piece { color: White, .. }) = right_cap { moves.push(Point(row + 1, col - 1)); } // check a normal push if move_generation_mode == MoveGenerationMode::AllMoves && (board.board[row + 1][col]).is_empty() { moves.push(Point(row + 1, col)); // check double push if row == 3 && (board.board[row + 2][col]).is_empty() { moves.push(Point(row + 2, col)); } } } } } /* Generate pseudo-legal en passant moves Uses the pawn_double_move cords to decide if a en passant capture is legal Returns None if no legal move is available, otherwise return the coordinates of the capture */ fn pawn_moves_en_passant( piece: Piece, row: usize, col: usize, board: &BoardState, ) -> Option<Point> { if let Some(double_moved_pawn) = board.pawn_double_move { let left_cap; let right_cap; match piece.color { White if row == BOARD_START + 3 => { left_cap = Point(row - 1, col - 1); right_cap = Point(row - 1, col + 1); } Black if row == BOARD_START + 4 => { left_cap = Point(row + 1, col + 1); right_cap = Point(row + 1, col - 1); } _ => return None, } if left_cap == double_moved_pawn { return Some(left_cap); } else if right_cap == double_moved_pawn { return Some(right_cap); } } None } /* Generate pseudo-legal moves for a king */ fn king_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { for i in 0..3 { let row = row + i - 1; for j in 0..3 { let col = col + j - 1; let square = board.board[row][col]; if square.is_empty_or_color(piece.color.opposite()) { if move_generation_mode == MoveGenerationMode::CapturesOnly { if !square.is_empty() { moves.push(Point(row, col)); } } else { moves.push(Point(row, col)); } } } } } /* Generate pseudo-legal moves for a rook */ fn rook_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { for (r, c) in &[(1, 0), (-1, 0), (0, 1), (0, -1)] { let mut row = row as i8 + r; let mut col = col as i8 + c; let mut square = board.board[row as usize][col as usize]; while square.is_empty() { if move_generation_mode == MoveGenerationMode::AllMoves { moves.push(Point(row as usize, col as usize)); } row += r; col += c; square = board.board[row as usize][col as usize]; } if square.is_color(piece.color.opposite()) { moves.push(Point(row as usize, col as usize)); } } } /* Generate pseudo-legal moves for a bishop */ fn bishop_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { for (r, c) in &[(1, -1), (1, 1), (-1, 1), (-1, -1)] { let mut row = row as i8 + r; let mut col = col as i8 + c; let mut square = board.board[row as usize][col as usize]; while square.is_empty() { if move_generation_mode == MoveGenerationMode::AllMoves { moves.push(Point(row as usize, col as usize)); } row += r; col += c; square = board.board[row as usize][col as usize]; } if square.is_color(piece.color.opposite()) { moves.push(Point(row as usize, col as usize)); } } } /* Generate pseudo-legal moves for a queen */ fn queen_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { rook_moves(piece, row, col, board, moves, move_generation_mode); bishop_moves(piece, row, col, board, moves, move_generation_mode); } /* Generate pseudo-legal moves for a piece This will not generate en passants and castling, these cases are handled separately */ fn get_moves( piece: Piece, row: usize, col: usize, board: &BoardState, moves: &mut Vec<Point>, move_generation_mode: MoveGenerationMode, ) { match piece.kind { Pawn => pawn_moves(piece, row, col, board, moves, move_generation_mode), Rook => rook_moves(piece, row, col, board, moves, move_generation_mode), Bishop => bishop_moves(piece, row, col, board, moves, move_generation_mode), Knight => knight_moves(piece, row, col, board, moves, move_generation_mode), King => king_moves(piece, row, col, board, moves, move_generation_mode), Queen => queen_moves(piece, row, col, board, moves, move_generation_mode), } } /* Determine if the given position is check Rather than checking each piece to see if it attacks the king this function checks all possible attack squares to the king and sees if the piece is there, thus it is important the king_location is set */ fn is_check_cords(board: &BoardState, color: PieceColor, square_cords: Point) -> bool { let attacking_color = color.opposite(); let attacking_rook = Piece::rook(attacking_color); let attacking_queen = Piece::queen(attacking_color); let attacking_bishop = Piece::bishop(attacking_color); let attacking_knight = Piece::knight(attacking_color); let attacking_pawn = Piece::pawn(attacking_color); // Check from rook or queen for (r, c) in &[(1, 0), (-1, 0), (0, 1), (0, -1)] { let mut row = square_cords.0 as i8 + r; let mut col = square_cords.1 as i8 + c; let mut square = board.board[row as usize][col as usize]; while square.is_empty() { row += r; col += c; square = board.board[row as usize][col as usize]; } if square == attacking_rook || square == attacking_queen { return true; } } // Check from bishop or queen for (r, c) in &[(1, -1), (1, 1), (-1, 1), (-1, -1)] { let mut row = square_cords.0 as i8 + r; let mut col = square_cords.1 as i8 + c; let mut square = board.board[row as usize][col as usize]; while square.is_empty() { row += r; col += c; square = board.board[row as usize][col as usize]; } if square == attacking_bishop || square == attacking_queen { return true; } } // Check from knight for (r, c) in &KNIGHT_CORDS { let row = (square_cords.0 as i8 + r) as usize; let col = (square_cords.1 as i8 + c) as usize; let square = board.board[row][col]; if square == attacking_knight { return true; } } // Check from pawn let pawn_row = match color { White => square_cords.0 - 1, Black => square_cords.0 + 1, }; if board.board[pawn_row][square_cords.1 - 1] == attacking_pawn || board.board[pawn_row][square_cords.1 + 1] == attacking_pawn { return true; } // Check from king // By using the king location here we can just check if they are within one square of each other (board.black_king_location.0 as i8 - board.white_king_location.0 as i8).abs() <= 1 && (board.black_king_location.1 as i8 - board.white_king_location.1 as i8).abs() <= 1 } /* Determine if castling is a legal move Rules 1. The castling must be kingside or queen side. 2. Neither the king nor the chosen rook has previously moved. 3. There are no pieces between the king and the chosen rook. 4. The king is not currently in check. 5. The king does not pass through a square that is attacked by an enemy piece. 6. The king does not end up in check. (True of any legal move.) This method will check all but rule 2 This method will check the board state to determine if is should go ahead with the castling check If the associated castling privilege variable is set to true, the following will be assumed by this function 1. The king and associated rook have not moved yet this game 2. The king and associated rook are in the correct castling positions Thus its the responsibility of other functions to update the castling privilege variables when the king or associated rook moves (including castling) */ fn can_castle(board: &BoardState, castling_type: &CastlingType) -> bool { match castling_type { CastlingType::WhiteKingSide => can_castle_white_king_side(board), CastlingType::WhiteQueenSide => can_castle_white_queen_side(board), CastlingType::BlackKingSide => can_castle_black_king_side(board), CastlingType::BlackQueenSide => can_castle_black_queen_side(board), } } fn can_castle_white_king_side(board: &BoardState) -> bool { if !board.white_king_side_castle { return false; } // check that squares required for castling are empty if !(board.board[BOARD_END - 1][BOARD_END - 3]).is_empty() || !(board.board[BOARD_END - 1][BOARD_END - 2]).is_empty() { return false; } // check that the king currently isn't in check if is_check(board, White) { return false; } //check that the squares required for castling are not threatened if is_check_cords(board, White, Point(BOARD_END - 1, BOARD_END - 3)) || is_check_cords(board, White, Point(BOARD_END - 1, BOARD_END - 2)) { return false; } true } fn can_castle_white_queen_side(board: &BoardState) -> bool { if !board.white_queen_side_castle { return false; } // check that squares required for castling are empty if !(board.board[BOARD_END - 1][BOARD_START + 1]).is_empty() || !(board.board[BOARD_END - 1][BOARD_START + 2]).is_empty() || !(board.board[BOARD_END - 1][BOARD_START + 3]).is_empty() { return false; } // check that the king currently isn't in check if is_check(board, White) { return false; } //check that the squares required for castling are not threatened if is_check_cords(board, White, Point(BOARD_END - 1, BOARD_START + 3)) || is_check_cords(board, White, Point(BOARD_END - 1, BOARD_START + 2)) { return false; } true } fn can_castle_black_king_side(board: &BoardState) -> bool { if !board.black_king_side_castle { return false; } // check that squares required for castling are empty if !(board.board[BOARD_START][BOARD_END - 3]).is_empty() || !(board.board[BOARD_START][BOARD_END - 2]).is_empty() { return false; } // check that the king currently isn't in check if is_check(board, Black) { return false; } //check that the squares required for castling are not threatened if is_check_cords(board, Black, Point(BOARD_START, BOARD_END - 3)) || is_check_cords(board, Black, Point(BOARD_START, BOARD_END - 2)) { return false; } true } fn can_castle_black_queen_side(board: &BoardState) -> bool { if !board.black_queen_side_castle { return false; } // check that squares required for castling are empty if !(board.board[BOARD_START][BOARD_START + 1]).is_empty() || !(board.board[BOARD_START][BOARD_START + 2]).is_empty() || !(board.board[BOARD_START][BOARD_START + 3]).is_empty() { return false; } // check that the king currently isn't in check if is_check(board, Black) { return false; } //check that the squares required for castling are not threatened if is_check_cords(board, Black, Point(BOARD_START, BOARD_START + 2)) || is_check_cords(board, Black, Point(BOARD_START, BOARD_START + 3)) { return false; } true } /* Given the coordinates of a piece and that pieces color, generate all possible pseudo-legal moves for that piece */ fn generate_moves_for_piece( piece: Piece, board: &BoardState, square_cords: Point, new_moves: &mut Vec<BoardState>, move_generation_mode: MoveGenerationMode, zobrist_hasher: &ZobristHasher, ) { let mut moves: Vec<Point> = Vec::new(); let Piece { color, kind } = piece; get_moves( piece, square_cords.0, square_cords.1, board, &mut moves, move_generation_mode, ); // make all the valid moves of this piece for mov in moves { let mut new_board = board.clone(); new_board.pawn_promotion = None; new_board.swap_color(zobrist_hasher); // update king location if we are moving the king if kind == King { match color { White => new_board.white_king_location = mov, Black => new_board.black_king_location = mov, } } let target_square = new_board.board[mov.0][mov.1]; if let Square::Full(target_piece) = target_square { new_board.order_heuristic = MVV_LVA[target_piece.index()][piece.index()]; } else { // by default all moves are given a neutral score new_board.order_heuristic = 0; } // move the piece, this will take care of any captures as well, excluding en passant new_board.move_piece(square_cords, mov, zobrist_hasher); new_board.last_move = Some((square_cords, mov)); // if you make your move, and you are in check, this move is not valid if is_check(&new_board, color) { continue; } // if the rook or king move, take away castling privileges if kind == King { if color == White { new_board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); } else { new_board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); } } else if square_cords.0 == BOARD_END - 1 && square_cords.1 == BOARD_END - 1 { new_board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); } else if square_cords.0 == BOARD_END - 1 && square_cords.1 == BOARD_START { new_board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); } else if square_cords.0 == BOARD_START && square_cords.1 == BOARD_START { new_board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); } else if square_cords.0 == BOARD_START && square_cords.1 == BOARD_END - 1 { new_board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); } // if the rook is captured, take away castling privileges if mov.0 == BOARD_END - 1 && mov.1 == BOARD_END - 1 { new_board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); } else if mov.0 == BOARD_END - 1 && mov.1 == BOARD_START { new_board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); } else if mov.0 == BOARD_START && mov.1 == BOARD_START { new_board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); } else if mov.0 == BOARD_START && mov.1 == BOARD_END - 1 { new_board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); } // checks if the pawn has moved two spaces, if it has it can be captured en passant, record the space *behind* the pawn ie the valid capture square if move_generation_mode == MoveGenerationMode::AllMoves { if kind == Pawn && (square_cords.0 as i8 - mov.0 as i8).abs() == 2 { let en_passant_square = match color { White => Point(mov.0 + 1, mov.1), Black => Point(mov.0 - 1, mov.1), }; new_board.pawn_double_move = Some(en_passant_square); new_board.zobrist_key ^= zobrist_hasher.get_val_for_en_passant(en_passant_square.1); } else { // the most recent move was not a double pawn move, unset any possibly existing pawn double move new_board.unset_pawn_double_move(zobrist_hasher); } // deal with pawn promotions if mov.0 == BOARD_START && color == White && kind == Pawn { promote_pawn( &new_board, White, square_cords, mov, new_moves, zobrist_hasher, ); } else if mov.0 == BOARD_END - 1 && color == Black && kind == Pawn { promote_pawn( &new_board, Black, square_cords, mov, new_moves, zobrist_hasher, ); } else { new_moves.push(new_board); } } else { new_moves.push(new_board); } } // take care of en passant captures if board.pawn_double_move.is_some() && kind == Pawn { let en_passant = pawn_moves_en_passant(piece, square_cords.0, square_cords.1, board); if let Some(mov) = en_passant { let mut new_board = board.clone(); new_board.last_move = Some((square_cords, mov)); new_board.swap_color(zobrist_hasher); new_board.unset_pawn_double_move(zobrist_hasher); new_board.move_piece(square_cords, mov, zobrist_hasher); if color == White { new_board.board[mov.0 + 1][mov.1] = Square::Empty; new_board.zobrist_key ^= zobrist_hasher.get_val_for_piece(Piece::pawn(Black), Point(mov.0 + 1, mov.1)); } else { new_board.board[mov.0 - 1][mov.1] = Square::Empty; new_board.zobrist_key ^= zobrist_hasher.get_val_for_piece(Piece::pawn(White), Point(mov.0 - 1, mov.1)); } // if you make a move, and you do not end up in check, then this move is valid if !is_check(&new_board, board.to_move) { new_moves.push(new_board); } } } } /* Given the current board, attempt to castle If castling is possible add the move the the list of possible moves Will also update appropriate castling variables if castling was successful */ fn generate_castling_moves( board: &BoardState, new_moves: &mut Vec<BoardState>, zobrist_hasher: &ZobristHasher, ) { if board.to_move == White && can_castle(board, &CastlingType::WhiteKingSide) { let mut new_board = board.clone(); new_board.swap_color(zobrist_hasher); new_board.unset_pawn_double_move(zobrist_hasher); new_board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); new_board.white_king_location = Point(BOARD_END - 1, BOARD_END - 2); new_board.last_move = WHITE_KING_SIDE_CASTLE_ALG; // move king and rook new_board.move_piece( board.white_king_location, new_board.white_king_location, zobrist_hasher, ); new_board.move_piece( Point(BOARD_END - 1, BOARD_END - 1), Point(BOARD_END - 1, BOARD_END - 3), zobrist_hasher, ); new_moves.push(new_board); } if board.to_move == White && can_castle(board, &CastlingType::WhiteQueenSide) { let mut new_board = board.clone(); new_board.swap_color(zobrist_hasher); new_board.unset_pawn_double_move(zobrist_hasher); new_board.take_away_castling_rights(CastlingType::WhiteKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::WhiteQueenSide, zobrist_hasher); new_board.white_king_location = Point(BOARD_END - 1, BOARD_START + 2); new_board.last_move = WHITE_QUEEN_SIDE_CASTLE_ALG; // move king and rook new_board.move_piece( board.white_king_location, new_board.white_king_location, zobrist_hasher, ); new_board.move_piece( Point(BOARD_END - 1, BOARD_START), Point(BOARD_END - 1, BOARD_START + 3), zobrist_hasher, ); new_moves.push(new_board); } if board.to_move == Black && can_castle(board, &CastlingType::BlackKingSide) { let mut new_board = board.clone(); new_board.swap_color(zobrist_hasher); new_board.unset_pawn_double_move(zobrist_hasher); new_board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); new_board.black_king_location = Point(BOARD_START, BOARD_END - 2); new_board.last_move = BLACK_KING_SIDE_CASTLE_ALG; // move king and rook new_board.move_piece( board.black_king_location, new_board.black_king_location, zobrist_hasher, ); new_board.move_piece( Point(BOARD_START, BOARD_END - 1), Point(BOARD_START, BOARD_END - 3), zobrist_hasher, ); new_moves.push(new_board); } if board.to_move == Black && can_castle(board, &CastlingType::BlackQueenSide) { let mut new_board = board.clone(); new_board.swap_color(zobrist_hasher); new_board.unset_pawn_double_move(zobrist_hasher); new_board.take_away_castling_rights(CastlingType::BlackKingSide, zobrist_hasher); new_board.take_away_castling_rights(CastlingType::BlackQueenSide, zobrist_hasher); new_board.black_king_location = Point(BOARD_START, BOARD_START + 2); new_board.last_move = BLACK_QUEEN_SIDE_CASTLE_ALG; // move king and rook new_board.move_piece( board.black_king_location, new_board.black_king_location, zobrist_hasher, ); new_board.move_piece( Point(BOARD_START, BOARD_START), Point(BOARD_START, BOARD_START + 3), zobrist_hasher, ); new_moves.push(new_board); } } /* Executes a pawn promotion on the given cords This function assumes that the board state is a valid pawn promotion and does not do additional checks */ const QUEEN_PROMOTION_SCORE: i32 = 800; // queen value - pawn value const UNDER_PROMOTION_SCORE: i32 = -999999999; // under promotions should be tried last fn promote_pawn( board: &BoardState, color: PieceColor, start: Point, target: Point, moves: &mut Vec<BoardState>, zobrist_hasher: &ZobristHasher, ) { for kind in [Queen, Knight, Bishop, Rook] { let mut new_board = board.clone(); new_board.unset_pawn_double_move(zobrist_hasher); let promotion_piece = Piece { color, kind }; new_board.board[target.0][target.1] = Square::Full(promotion_piece); new_board.last_move = Some((start, target)); new_board.pawn_promotion = Some(promotion_piece); // promoting to a piece that isn't a queen is rarely a good idea new_board.order_heuristic = if kind == Queen { QUEEN_PROMOTION_SCORE } else { UNDER_PROMOTION_SCORE }; // erase pawn and add the promotion piece new_board.zobrist_key ^= zobrist_hasher.get_val_for_piece(promotion_piece, target) ^ zobrist_hasher.get_val_for_piece(Piece::pawn(color), target); moves.push(new_board); } } /* Generate all valid moves recursively given the current board state Will generate up until cur_depth = depth */ pub fn generate_moves_test( board: &BoardState, cur_depth: usize, depth: usize, move_counts: &mut [u32], should_evaluate: bool, zobrist_hasher: &ZobristHasher, ) { if cur_depth == depth { if should_evaluate { // we don't do anything with this score, we just calculate it at the leaf for // performance testing purposes get_evaluation(board); } return; } let moves = generate_moves(board, MoveGenerationMode::AllMoves, zobrist_hasher); move_counts[cur_depth] += moves.len() as u32; for mov in moves { generate_moves_test( &mov, cur_depth + 1, depth, move_counts, should_evaluate, zobrist_hasher, ); } } #[cfg(test)] mod tests { use super::*; #[test] fn check_sanity_test() { let b = BoardState::from_fen("8/8/8/8/3K4/8/8/8 w - - 0 1").unwrap(); assert!(!is_check(&b, White)); } #[test] fn knight_checks() { let mut b = BoardState::from_fen("8/8/4n3/8/3K4/8/8/8 w - - 0 1").unwrap(); assert!(is_check(&b, White)); b = BoardState::from_fen("8/8/8/8/8/8/1RK5/nRB5 w - - 0 1").unwrap(); assert!(is_check(&b, White)); b = BoardState::from_fen("8/8/8/8/3k4/5N2/8/8 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("8/8/8/8/3k4/5n2/8/7N w - - 0 1").unwrap(); assert!(!is_check(&b, Black)); b = BoardState::from_fen("8/8/2N5/8/3k4/5n2/8/7N w - - 0 1").unwrap(); assert!(is_check(&b, Black)); } #[test] fn pawn_checks() { let mut b = BoardState::from_fen("8/8/8/4k3/3P4/8/8/8 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("8/8/8/4k3/5P2/8/8/8 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("8/8/8/4k3/4P3/8/8/8 w - - 0 1").unwrap(); assert!(!is_check(&b, Black)); b = BoardState::from_fen("8/8/3PPP2/4k3/8/8/8/8 w - - 0 1").unwrap(); assert!(!is_check(&b, Black)); b = BoardState::from_fen("8/8/8/8/8/5p2/6K1/8 w - - 0 1").unwrap(); assert!(is_check(&b, White)); b = BoardState::from_fen("8/8/8/8/8/7p/6K1/8 w - - 0 1").unwrap(); assert!(is_check(&b, White)); b = BoardState::from_fen("8/8/8/8/8/6p1/6K1/8 w - - 0 1").unwrap(); assert!(!is_check(&b, White)); b = BoardState::from_fen("8/8/8/8/8/6K1/5ppp/8 w - - 0 1").unwrap(); assert!(!is_check(&b, White)); } #[test] fn rook_checks() { let mut b = BoardState::from_fen("8/8/8/R3k3/8/8/8/8 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("8/8/8/R1r1k3/8/8/8/8 w - - 0 1").unwrap(); assert!(!is_check(&b, Black)); b = BoardState::from_fen("8/8/8/R1r1k3/8/8/8/4R3 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("4R3/8/8/R1r5/8/8/8/4k3 w - - 0 1").unwrap(); assert!(is_check(&b, Black)); b = BoardState::from_fen("8/8/8/R1r5/8/8/7R/4k3 w - - 0 1").unwrap(); assert!(!is_check(&b, Black));
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
true
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/time_control.rs
src/time_control.rs
use crate::board::PieceColor; pub const SAFEGUARD: f64 = 100.0; // msecs const GAME_LENGTH: u32 = 30; // moves const MAX_USAGE: f64 = 0.8; // percentage const NO_TIME: u128 = 0; pub struct GameTime { // all time is in ms unless otherwise specified pub wtime: i128, pub btime: i128, pub winc: i128, pub binc: i128, pub movestogo: Option<u32>, } /* Big thanks to @mvanthoor (https://github.com/mvanthoor) whose chess engine the below time control implementation was adapted from */ impl GameTime { // Calculates the time the engine allocates for searching a single // move. This depends on the number of moves still to go in the game. pub fn calculate_time_slice(&self, color: PieceColor) -> u128 { let mtg = self.movestogo.unwrap_or(GAME_LENGTH) as f64; let is_white = color == PieceColor::White; let clock = if is_white { self.wtime } else { self.btime } as f64; let increment = if is_white { self.winc } else { self.binc } as f64; let base_time = clock - SAFEGUARD; // return a time slice. if base_time <= 0.0 { if increment > 0.0 { (increment * MAX_USAGE).round() as u128 } else { NO_TIME } } else { (base_time * MAX_USAGE / mtg).round() as u128 } } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/utils.rs
src/utils.rs
use std::time::Instant; /* Remove new line characters from the end of a string Works on windows and linux */ pub fn trim_newline(s: &mut String) { if s.ends_with('\n') { s.pop(); if s.ends_with('\r') { s.pop(); } } } pub fn clean_input(buffer: &str) -> String { let mut cleaned = String::new(); let mut prev_char = ' '; for c in buffer.chars() { if !c.is_whitespace() { cleaned.push(c); } else if c.is_whitespace() && !prev_char.is_whitespace() { cleaned.push(' '); } prev_char = c; } cleaned.trim().to_string() } /* Helper function to determine if we are out of time for our search */ pub fn out_of_time(start: Instant, time_to_move_ms: u128) -> bool { Instant::now().duration_since(start).as_millis() >= time_to_move_ms } #[cfg(test)] mod tests { use super::*; #[test] fn trim_windows() { let mut word = "hello\r\n".to_string(); trim_newline(&mut word); assert_eq!("hello", word); } #[test] fn trim_linux() { let mut word = "hello\n".to_string(); trim_newline(&mut word); assert_eq!("hello", word); } #[test] fn clean_string() { assert_eq!(clean_input(" debug on \n"), "debug on"); assert_eq!(clean_input("\t debug \t \t\ton\t \n"), "debug on"); } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/main.rs
src/main.rs
extern crate clap; use clap::{App, Arg}; use std::{cmp::max, time::Instant}; mod board; mod draw_table; mod engine; mod evaluation; mod move_generation; mod search; mod time_control; mod uci; mod utils; mod zobrist; /* A custom memory allocator with better performance characteristics than rusts default. During testing this resulted in a ~20% speed up in move generation. If you are having trouble compiling the engine for your target system you can try removing the two lines below. https://github.com/microsoft/mimalloc */ #[global_allocator] static GLOBAL: mimalloc::MiMalloc = mimalloc::MiMalloc; fn main() { let matches = App::new(env!("CARGO_PKG_NAME")) .version(env!("CARGO_PKG_VERSION")) .author(env!("CARGO_PKG_AUTHORS")) .about("Plays Chess - Sometimes well") .arg( Arg::with_name("fen") .short("f") .long("fen") .value_name("FEN STRING") .help("Load a board state from a fen string, defaults to the start of a new game") .takes_value(true), ) .arg( Arg::with_name("depth") .short("d") .long("depth") .value_name("DEPTH") .help("Set the depth the engine should search to, only used for profiling") .takes_value(true), ) .arg( Arg::with_name("play self") .short("P") .long("play-self") .help("Play a game against itself in the terminal"), ) .arg( Arg::with_name("test bench") .short("T") .long("test-bench") .help( "Evaluates <FEN STRING> to benchmark move generation - incompatible with play self", ), ) .arg( Arg::with_name("simple print") .short("S") .long("simple-print") .help("Does not use unicode or background coloring in the output"), ) .get_matches(); const DEFAULT_DEPTH: &str = "6"; let depth_str = matches.value_of("depth").unwrap_or(DEFAULT_DEPTH); let depth = match depth_str.parse::<u8>() { Ok(d) => d, Err(_) => { println!("Invalid depth provided"); return; } }; if depth >= search::MAX_DEPTH { println!("Can not have depth greater than {}", search::MAX_DEPTH - 1); return; } let fen = matches.value_of("fen").unwrap_or(board::DEFAULT_FEN_STRING); let board = match board::BoardState::from_fen(fen) { Ok(b) => b, Err(err) => { println!("{}", err); return; } }; if matches.is_present("test bench") { let mut moves_states = [0; search::MAX_DEPTH as usize]; let start = Instant::now(); let zobrist_hasher = zobrist::ZobristHasher::create_zobrist_hasher(); move_generation::generate_moves_test( &board, 0, depth as usize, &mut moves_states, true, &zobrist_hasher, ); let time_to_run = Instant::now().duration_since(start); let nodes: u32 = moves_states.iter().sum(); println!( "Searched to a depth of {} and evaluated {} nodes in {:?} for a total speed of {} nps", depth, nodes, time_to_run, nodes / max(time_to_run.as_secs() as u32, 1) ); return; } if matches.is_present("play self") { let simple_print = matches.is_present("simple print"); let max_moves = 100; let time_per_move_ms = 1000; engine::play_game_against_self(&board, max_moves, time_per_move_ms, simple_print); return; } uci::play_game_uci(); }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/zobrist.rs
src/zobrist.rs
use crate::board::{Piece, PieceColor::*, Point}; pub use crate::move_generation::CastlingType; use rand_chacha::rand_core::{RngCore, SeedableRng}; /* For simplicity use a 12x12 board so we do not need to convert between an 8x8 and 12x12 board coordinate system Since this array is not initialized very often it should have a negligible performance impact */ const BOARD_SIZE: usize = 12; // 6 pieces * 2 colors const PIECE_TYPES: usize = 12; pub type ZobristKey = u64; pub struct ZobristHasher { // indexed by [piece][file][rank] piece_square_table: [[[ZobristKey; BOARD_SIZE]; BOARD_SIZE]; PIECE_TYPES], black_to_move: ZobristKey, white_king_side_castle: ZobristKey, white_queen_side_castle: ZobristKey, black_king_side_castle: ZobristKey, black_queen_side_castle: ZobristKey, // indexed by file en_passant_files: [ZobristKey; BOARD_SIZE], } impl ZobristHasher { pub fn create_zobrist_hasher() -> ZobristHasher { // Here we use a seed so if you have to recreate the hasher you will always get the same values // Paul Morphy's birthday let mut rng = rand_chacha::ChaCha8Rng::seed_from_u64(6 * 10 * 1837); let mut piece_square_table = [[[0; BOARD_SIZE]; BOARD_SIZE]; PIECE_TYPES]; #[allow(clippy::needless_range_loop)] for i in 0..BOARD_SIZE { #[allow(clippy::needless_range_loop)] for j in 0..BOARD_SIZE { #[allow(clippy::needless_range_loop)] for k in 0..PIECE_TYPES { piece_square_table[k][j][i] = rng.next_u64(); } } } let mut en_passant_files = [0; BOARD_SIZE]; #[allow(clippy::needless_range_loop)] for i in 0..BOARD_SIZE { en_passant_files[i] = rng.next_u64(); } ZobristHasher { piece_square_table, black_to_move: rng.next_u64(), white_king_side_castle: rng.next_u64(), white_queen_side_castle: rng.next_u64(), black_king_side_castle: rng.next_u64(), black_queen_side_castle: rng.next_u64(), en_passant_files, } } pub fn get_val_for_piece(&self, piece: Piece, point: Point) -> ZobristKey { // shift everything by 6 for black pieces // ensures each piece,color pair gets a unique number in [0,11] let index = piece.index() + if piece.color == White { 0 } else { 6 }; self.piece_square_table[index][point.1][point.0] } pub fn get_val_for_castling(&self, castling_type: CastlingType) -> ZobristKey { match castling_type { CastlingType::WhiteKingSide => self.white_king_side_castle, CastlingType::WhiteQueenSide => self.white_queen_side_castle, CastlingType::BlackKingSide => self.black_king_side_castle, CastlingType::BlackQueenSide => self.black_queen_side_castle, } } pub fn get_val_for_en_passant(&self, file: usize) -> ZobristKey { self.en_passant_files[file] } pub fn get_black_to_move_val(&self) -> ZobristKey { self.black_to_move } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
MitchelPaulin/Walleye
https://github.com/MitchelPaulin/Walleye/blob/2cbd6df821e306f3c04d5af9e98b8650cdcb6d79/src/evaluation.rs
src/evaluation.rs
pub use crate::board::*; pub use crate::board::{PieceColor::*, PieceKind::*}; /* Evaluation function based on https://www.chessprogramming.org/PeSTO%27s_Evaluation_Function */ #[rustfmt::skip] const MG_PAWN_TABLE: [[i32; 8]; 8] = [ [ 0, 0, 0, 0, 0, 0, 0, 0], [ 98, 134, 61, 95, 68, 126, 34, -11], [ -6, 7, 26, 31, 65, 56, 25, -20], [-14, 13, 6, 21, 23, 12, 17, -23], [-27, -2, -5, 12, 17, 6, 10, -25], [-26, -4, -4, -10, 3, 3, 33, -12], [-35, -1, -20, -23, -15, 24, 38, -22], [ 0, 0, 0, 0, 0, 0, 0, 0] ]; #[rustfmt::skip] const EG_PAWN_TABLE: [[i32; 8]; 8] = [ [ 0, 0, 0, 0, 0, 0, 0, 0], [178, 173, 158, 134, 147, 132, 165, 187], [ 94, 100, 85, 67, 56, 53, 82, 84], [ 32, 24, 13, 5, -2, 4, 17, 17], [ 13, 9, -3, -7, -7, -8, 3, -1], [ 4, 7, -6, 1, 0, -5, -1, -8], [ 13, 8, 8, 10, 13, 0, 2, -7], [ 0, 0, 0, 0, 0, 0, 0, 0], ]; #[rustfmt::skip] const MG_KNIGHT_TABLE: [[i32; 8]; 8] = [ [-167, -89, -34, -49, 61, -97, -15, -107], [ -73, -41, 72, 36, 23, 62, 7, -17], [ -47, 60, 37, 65, 84, 129, 73, 44], [ -9, 17, 19, 53, 37, 69, 18, 22], [ -13, 4, 16, 13, 28, 19, 21, -8], [ -23, -9, 12, 10, 19, 17, 25, -16], [ -29, -53, -12, -3, -1, 18, -14, -19], [-105, -21, -58, -33, -17, -28, -19, -23], ]; #[rustfmt::skip] const EG_KNIGHT_TABLE: [[i32; 8]; 8] = [ [-58, -38, -13, -28, -31, -27, -63, -99], [-25, -8, -25, -2, -9, -25, -24, -52], [-24, -20, 10, 9, -1, -9, -19, -41], [-17, 3, 22, 22, 22, 11, 8, -18], [-18, -6, 16, 25, 16, 17, 4, -18], [-23, -3, -1, 15, 10, -3, -20, -22], [-42, -20, -10, -5, -2, -20, -23, -44], [-29, -51, -23, -15, -22, -18, -50, -64], ]; #[rustfmt::skip] const MG_BISHOP_TABLE: [[i32; 8]; 8] = [ [-29, 4, -82, -37, -25, -42, 7, -8], [-26, 16, -18, -13, 30, 59, 18, -47], [-16, 37, 43, 40, 35, 50, 37, -2], [ -4, 5, 19, 50, 37, 37, 7, -2], [ -6, 13, 13, 26, 34, 12, 10, 4], [ 0, 15, 15, 15, 14, 27, 18, 10], [ 4, 15, 16, 0, 7, 21, 33, 1], [-33, -3, -14, -21, -13, -12, -39, -21], ]; #[rustfmt::skip] const EG_BISHOP_TABLE: [[i32; 8]; 8] = [ [-14, -21, -11, -8, -7, -9, -17, -24], [ -8, -4, 7, -12, -3, -13, -4, -14], [ 2, -8, 0, -1, -2, 6, 0, 4], [ -3, 9, 12, 9, 14, 10, 3, 2], [ -6, 3, 13, 19, 7, 10, -3, -9], [-12, -3, 8, 10, 13, 3, -7, -15], [-14, -18, -7, -1, 4, -9, -15, -27], [-23, -9, -23, -5, -9, -16, -5, -17], ]; #[rustfmt::skip] const MG_ROOK_TABLE: [[i32; 8]; 8] = [ [ 32, 42, 32, 51, 63, 9, 31, 43], [ 27, 32, 58, 62, 80, 67, 26, 44], [ -5, 19, 26, 36, 17, 45, 61, 16], [-24, -11, 7, 26, 24, 35, -8, -20], [-36, -26, -12, -1, 9, -7, 6, -23], [-45, -25, -16, -17, 3, 0, -5, -33], [-44, -16, -20, -9, -1, 11, -6, -71], [-19, -13, 1, 17, 16, 7, -37, -26], ]; #[rustfmt::skip] const EG_ROOK_TABLE: [[i32; 8]; 8] = [ [13, 10, 18, 15, 12, 12, 8, 5], [11, 13, 13, 11, -3, 3, 8, 3], [ 7, 7, 7, 5, 4, -3, -5, -3], [ 4, 3, 13, 1, 2, 1, -1, 2], [ 3, 5, 8, 4, -5, -6, -8, -11], [-4, 0, -5, -1, -7, -12, -8, -16], [-6, -6, 0, 2, -9, -9, -11, -3], [-9, 2, 3, -1, -5, -13, 4, -20], ]; #[rustfmt::skip] const MG_QUEEN_TABLE: [[i32; 8]; 8] = [ [-28, 0, 29, 12, 59, 44, 43, 45], [-24, -39, -5, 1, -16, 57, 28, 54], [-13, -17, 7, 8, 29, 56, 47, 57], [-27, -27, -16, -16, -1, 17, -2, 1], [ -9, -26, -9, -10, -2, -4, 3, -3], [-14, 2, -11, -2, -5, 2, 14, 5], [-35, -8, 11, 2, 8, 15, -3, 1], [ -1, -18, -9, 10, -15, -25, -31, -50], ]; #[rustfmt::skip] const EG_QUEEN_TABLE: [[i32; 8]; 8] = [ [ -9, 22, 22, 27, 27, 19, 10, 20], [-17, 20, 32, 41, 58, 25, 30, 0], [-20, 6, 9, 49, 47, 35, 19, 9], [ 3, 22, 24, 45, 57, 40, 57, 36], [-18, 28, 19, 47, 31, 34, 39, 23], [-16, -27, 15, 6, 9, 17, 10, 5], [-22, -23, -30, -16, -16, -23, -36, -32], [-33, -28, -22, -43, -5, -32, -20, -41], ]; #[rustfmt::skip] const MG_KING_TABLE: [[i32; 8]; 8] = [ [-65, 23, 16, -15, -56, -34, 2, 13], [ 29, -1, -20, -7, -8, -4, -38, -29], [ -9, 24, 2, -16, -20, 6, 22, -22], [-17, -20, -12, -27, -30, -25, -14, -36], [-49, -1, -27, -39, -46, -44, -33, -51], [-14, -14, -22, -46, -44, -30, -15, -27], [ 1, 7, -8, -64, -43, -16, 9, 8], [-15, 36, 12, -54, 8, -28, 24, 14], ]; #[rustfmt::skip] const EG_KING_TABLE: [[i32; 8]; 8] = [ [-74, -35, -18, -18, -11, 15, 4, -17], [-12, 17, 14, 17, 17, 38, 23, 11], [ 10, 17, 23, 15, 20, 45, 44, 13], [ -8, 22, 24, 27, 26, 33, 26, 3], [-18, -4, 21, 24, 27, 23, 9, -11], [-19, -3, 11, 21, 23, 16, 7, -9], [-27, -11, 4, 13, 14, 4, -5, -17], [-53, -34, -21, -11, -28, -14, -24, -43] ]; fn mg_table(kind: PieceKind) -> &'static [[i32; 8]; 8] { match kind { Pawn => &MG_PAWN_TABLE, Bishop => &MG_BISHOP_TABLE, Knight => &MG_KNIGHT_TABLE, Rook => &MG_ROOK_TABLE, King => &MG_KING_TABLE, Queen => &MG_QUEEN_TABLE, } } fn eg_table(kind: PieceKind) -> &'static [[i32; 8]; 8] { match kind { Pawn => &EG_PAWN_TABLE, Bishop => &EG_BISHOP_TABLE, Knight => &EG_KNIGHT_TABLE, Rook => &EG_ROOK_TABLE, King => &EG_KING_TABLE, Queen => &EG_QUEEN_TABLE, } } fn mg_piece_val(kind: PieceKind) -> i32 { match kind { Pawn => 82, Knight => 337, Bishop => 365, Rook => 477, Queen => 1025, King => 0, } } fn eg_piece_val(kind: PieceKind) -> i32 { match kind { Pawn => 94, Knight => 281, Bishop => 297, Rook => 512, Queen => 936, King => 0, } } fn game_phase_val(kind: PieceKind) -> i32 { match kind { Pawn => 0, Knight => 1, Bishop => 1, Rook => 2, Queen => 4, King => 0, } } /* Return how good a position is from the perspective of the current player */ pub fn get_evaluation(board: &BoardState) -> i32 { let mut white_mg = 0; let mut black_mg = 0; let mut white_eg = 0; let mut black_eg = 0; let mut game_phase = 0; for row in BOARD_START..BOARD_END { for col in BOARD_START..BOARD_END { if let Square::Full(Piece { color, kind }) = board.board[row][col] { game_phase += game_phase_val(kind); if color == White { white_mg += mg_table(kind)[row - BOARD_START][col - BOARD_START] + mg_piece_val(kind); white_eg += eg_table(kind)[row - BOARD_START][col - BOARD_START] + eg_piece_val(kind); } else { black_mg += mg_table(kind)[9 - row][col - BOARD_START] + mg_piece_val(kind); black_eg += eg_table(kind)[9 - row][col - BOARD_START] + eg_piece_val(kind); } } } } let mg_score; let eg_score; if board.to_move == White { mg_score = white_mg - black_mg; eg_score = white_eg - black_eg; } else { mg_score = black_mg - white_mg; eg_score = black_eg - white_eg; } let mut mg_phase = game_phase; /* in case of early promotion */ if mg_phase > 24 { mg_phase = 24; } let eg_phase = 24 - mg_phase; (mg_score * mg_phase + eg_score * eg_phase) / 24 } #[cfg(test)] mod tests { use super::*; #[test] fn position_evaluation_equal() { let b = BoardState::from_fen("rnbqkbnr/pppppppp/8/8/8/8/PPPPPPPP/RNBQKBNR w KQkq - 0 1") .unwrap(); assert_eq!(get_evaluation(&b), 0); } }
rust
MIT
2cbd6df821e306f3c04d5af9e98b8650cdcb6d79
2026-01-04T20:24:54.383263Z
false
irevenko/ferris-fetch
https://github.com/irevenko/ferris-fetch/blob/229ab7029109865b9fd6aab5953875942bdf3e33/src/main.rs
src/main.rs
use colored::*; use std::string::ToString; use sysinfo::{System, SystemExt, RefreshKind, ProcessorExt}; const FERRIS_ART: &[&str] = &[ " ", " ▄ ▓▄ ▄▓▓ ▓▓ ", " ▄ ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ ▄ ", " ▐▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓ ", " ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▌ ", " ▄▄▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▄▄▄ ", " ▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▌ ", " ▐▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓▀▓▄▒▓▓▓▓▓▀▓▄▓▓▓▓▓▓▓▓▓▓▓▓▓ ", " ▐▓▓▓▓▓▓▓▓▓▓▓▓▓▌ ▐██▒▓▓▒▌ ██▌▓▓▓▓▓▓▓▓▓▓▓ ", " ▄▓▓▓▓▓▓▓▓▓▓▓▓▓▓▓█████▒▓▓▒████▌▓▓▓▓▓▓▓▓▓▓▓▓▓▄", " ▓▓▓▌▀▓▓▓▓▓▓▓▓▓▓▒▄▄▌▒▓▓▓▓▓▒▒▄▒▒▓▓▓▓▒▒▓▓▀▀▓▀▓▓▓", " ▀▓▓▄ ▀▄ ▀▓▓▀▓▀▒▓▓▓▒▀▓▒▓▓▀▒▓▓▓▒▀▓▀▓▒▓▀ ▀ ▐▓▀", " ▓▄ ▄ ▀▓▓▓▓▓▀▀▀ ▓▓▓▓▓▀ ▀ ▄▓ ", " ▀ ▀▓▓▓▓▄▄ ▄▄▓▓▓▀ ▀ ", " ▀▀▀▀ ▀▀▀ ", " ", ]; fn exc(exc: &str) -> Result<std::process::Output, std::io::Error> { let mut exc = exc.split_whitespace(); let mut cmd = std::process::Command::new(exc.next().unwrap()); cmd.args(exc).output() } fn get_ver(cmd: &str) -> String { exc(cmd) .ok() .and_then(|ver| String::from_utf8(ver.stdout).ok()) .and_then(|line| line.split_whitespace().nth(1).map(ToString::to_string)) .unwrap_or_else(|| "not present".to_string()) } fn get_cargo_crates() -> usize { exc("cargo install --list") .ok() .and_then(|installs| String::from_utf8(installs.stdout).ok()) .map_or(0, |ilist| { ilist .lines() .filter(|line| !line.starts_with(" ")) .count() }) } fn render(art: bool, info: &[String]) { if art { for (ferris_line, info_line) in FERRIS_ART.iter().zip(info) { println!("{} {}", ferris_line.red(), info_line); } } else { for line in info { println!("{}", line); } } } fn main() { let mut art = true; let args: Vec<String> = std::env::args().collect(); if args.len() == 2 && args[1] == "-s" { art = false; } let mut sys = System::new_with_specifics(RefreshKind::new().with_cpu()); sys.refresh_all(); let kernel = sys.kernel_version().unwrap_or_else(|| "Unknown".into()); let total_ram = sys.total_memory(); let used_ram = sys.used_memory(); let cpu = sys.processors()[0].brand(); let rustc_ver = get_ver("rustc -V"); let cargo_ver = get_ver("cargo -V"); let rustup_ver = get_ver("rustup -V"); let cargo_crates = get_cargo_crates(); let userinfo = format!( "{}{}{}", whoami::username().bright_red().bold(), "@".bold(), whoami::hostname().bright_red().bold() ); let splitline = "═".repeat(whoami::username().len() + whoami::hostname().len() + 1); let rustc_ver = format!("{}{}", "rustc ver: ".bright_red(), rustc_ver); let rustup_ver = format!("{}{}", "rustup ver: ".bright_red(), rustup_ver); let cargo_ver = format!("{}{}", "cargo ver: ".bright_red(), cargo_ver); let cargo_crates = format!("{}{}", "cargo crates: ".bright_red(), cargo_crates); let os = format!("{}{}", "os: ".bright_red(), whoami::distro()); let kernel = format!("{}{}", "kernel: ".bright_red(), kernel); let cpu = format!("{}{}", "cpu: ".bright_red(), cpu); let ram = format!( "{}{} » {}{}", "ram: ".bright_red(), used_ram, total_ram, " MB" ); let bright_colors = format!( "{}{}{}{}{}{}{}{}", "███".bright_red(), "███".bright_yellow(), "███".bright_green(), "███".bright_cyan(), "███".bright_blue(), "███".bright_magenta(), "███".bright_black(), "███".bright_white() ); let dark_colors = format!( "{}{}{}{}{}{}{}{}", "███".red(), "███".yellow(), "███".green(), "███".cyan(), "███".blue(), "███".magenta(), "███".black(), "███".white() ); render( art, &[ "".to_string(), "".to_string(), userinfo, splitline, rustc_ver, rustup_ver, cargo_ver, cargo_crates, os, kernel, cpu, ram, "".to_string(), bright_colors, dark_colors, "".to_string(), ], ); }
rust
MIT
229ab7029109865b9fd6aab5953875942bdf3e33
2026-01-04T20:24:53.210634Z
false
trinhminhtriet/rmrfrs
https://github.com/trinhminhtriet/rmrfrs/blob/6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2/rmrfrs/src/main.rs
rmrfrs/src/main.rs
use std::{ env::current_dir, error::Error, fmt, io::{stdin, stdout, Write}, num::ParseIntError, path::PathBuf, sync::mpsc::{Receiver, Sender, SyncSender}, }; use clap::{Command, CommandFactory, Parser}; use clap_complete::{generate, Generator, Shell}; use rmrfrs_lib::{ dir_size, path_canonicalise, pretty_size, print_elapsed, scan, Project, ScanOptions, }; // Below needs updating every time a new project type is added! #[derive(Parser, Debug)] #[command(name = "rmrfrs")] /// rmrfrs recursively cleans project directories. /// /// Supported project types: Cargo, Node, Unity, SBT, Haskell Stack, Maven, Unreal Engine, Jupyter Notebook, Python, Jupyter Notebooks, CMake, Composer, Pub, Elixir, Swift, Gradle, and .NET projects. struct Opt { /// The directories to examine. Current directory will be used if DIRS is omitted. #[arg(name = "DIRS")] dirs: Vec<PathBuf>, /// Directories to ignore. Will also prevent recursive traversal within. #[arg(short = 'I', long)] ignored_dirs: Vec<PathBuf>, /// Quiet mode. Won't output to the terminal. -qq prevents all output. #[arg(short, long, action = clap::ArgAction::Count, value_parser = clap::value_parser!(u8).range(0..3))] quiet: u8, /// Clean all found projects without confirmation. #[arg(short, long)] all: bool, /// Follow symbolic links #[arg(short = 'L', long)] follow_symlinks: bool, /// Restrict directory traversal to the root filesystem #[arg(short, long)] same_filesystem: bool, /// Only directories with a file last modified n units of time ago will be looked at. Ex: 20d. Units are m: minutes, h: hours, d: days, w: weeks, M: months and y: years. #[arg(short, long, value_parser = parse_age_filter, default_value = "0d")] older: u64, /// Generates completions for the specified shell #[arg(long = "completions", value_enum)] generator: Option<Shell>, /// If there is no input, defaults to yes #[arg(short, long)] default: bool, } fn prepare_directories(dirs: Vec<PathBuf>) -> Result<Vec<PathBuf>, Box<dyn Error>> { let cd = current_dir()?; if dirs.is_empty() { return Ok(vec![cd]); } let dirs = dirs .into_iter() .filter_map(|path| { let exists = path.try_exists().unwrap_or(false); if !exists { eprintln!("error: directory {} does not exist", path.to_string_lossy()); return None; } if let Ok(metadata) = path.metadata() { if metadata.is_file() { eprintln!( "error: file supplied but directory expected: {}", path.to_string_lossy() ); return None; } } path_canonicalise(&cd, path).ok() }) .collect(); Ok(dirs) } #[derive(Debug)] pub enum ParseAgeFilterError { ParseIntError(ParseIntError), InvalidUnit, } impl fmt::Display for ParseAgeFilterError { fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result { match self { ParseAgeFilterError::ParseIntError(e) => e.fmt(f), ParseAgeFilterError::InvalidUnit => { "invalid age unit, must be one of m, h, d, w, M, y".fmt(f) } } } } impl From<ParseIntError> for ParseAgeFilterError { fn from(e: ParseIntError) -> Self { Self::ParseIntError(e) } } impl Error for ParseAgeFilterError {} pub fn parse_age_filter(age_filter: &str) -> Result<u64, ParseAgeFilterError> { const MINUTE: u64 = 60; const HOUR: u64 = MINUTE * 60; const DAY: u64 = HOUR * 24; const WEEK: u64 = DAY * 7; const MONTH: u64 = WEEK * 4; const YEAR: u64 = DAY * 365; let (digit_end, unit) = age_filter .char_indices() .last() .ok_or(ParseAgeFilterError::InvalidUnit)?; let multiplier = match unit { 'm' => MINUTE, 'h' => HOUR, 'd' => DAY, 'w' => WEEK, 'M' => MONTH, 'y' => YEAR, _ => return Err(ParseAgeFilterError::InvalidUnit), }; let count = age_filter[..digit_end].parse::<u64>()?; let seconds = count * multiplier; Ok(seconds) } type DiscoverData = (Project, Vec<(String, u64)>, u64, String); type DeleteData = (Project, u64); fn discover( dirs: Vec<PathBuf>, scan_options: &ScanOptions, project_min_age: u64, result_sender: SyncSender<DiscoverData>, ignored_dirs: &[PathBuf], ) { for project in dirs .iter() .flat_map(|dir| scan(dir, scan_options)) .filter_map(|p| p.ok()) .filter(|p| ignored_dirs.iter().all(|i| !p.path.starts_with(i))) { let artifact_dir_sizes: Vec<_> = project .artifact_dirs() .iter() .copied() .filter_map( |dir| match dir_size(&project.path.join(dir), scan_options) { 0 => None, size => Some((dir.to_owned(), size)), }, ) .collect(); let project_artifact_bytes = artifact_dir_sizes.iter().map(|(_, bytes)| bytes).sum(); if project_artifact_bytes == 0 { continue; } let mut last_modified_str = String::new(); let mut last_modified_int: u64 = 0; if let Ok(last_modified) = project.last_modified(scan_options) { if let Ok(elapsed) = last_modified.elapsed() { last_modified_int = elapsed.as_secs(); let elapsed = print_elapsed(last_modified_int); last_modified_str = format!("({elapsed})"); } } if last_modified_int < project_min_age { continue; } if result_sender .send(( project, artifact_dir_sizes, project_artifact_bytes, last_modified_str, )) .is_err() { // interactive prompt has finished, silently finish here break; } } } fn process_deletes(project_recv: Receiver<DeleteData>) -> Vec<(Project, u64)> { project_recv .into_iter() .map(|(project, artifact_bytes)| { project.clean(); (project, artifact_bytes) }) .collect() } fn interactive_prompt( projects_recv: Receiver<DiscoverData>, deletes_send: Sender<DeleteData>, quiet: u8, mut clean_all: bool, default: bool, ) { 'project_loop: for (project, artifact_dirs, artifact_bytes, last_modified) in projects_recv { if quiet == 0 { println!( "{} {} project {last_modified}", &project.name(), project.type_name(), ); for (dir, size) in artifact_dirs { println!(" └─ {dir} ({})", pretty_size(size)); } } let clean_project = if clean_all { true } else { loop { print!( " delete above artifact directories? ([{}]es, [n]o, [a]ll, [q]uit): ", { if default { "Y" } else { "y" } } ); stdout().flush().unwrap(); let mut choice = String::new(); stdin().read_line(&mut choice).unwrap(); match choice.trim_end() { "y" => break true, "n" => break false, "a" => { clean_all = true; break true; } "q" => { println!(); break 'project_loop; } "" => { if default { println!(" defaulting to yes..."); break true; } else { println!(" no input, please choose between y, n, a, or q."); } } _ => println!(" invalid choice, please choose between y, n, a, or q."), } } }; if clean_project { // TODO: Return an error that indicates a partial failure, not a show stopper if let Err(e) = deletes_send.send((project, artifact_bytes)) { eprintln!( "no further projects will be scanned, error sending to delete thread {e}" ); break; } } } } fn print_completions<G: Generator>(gen: G, cmd: &mut Command) { generate(gen, cmd, cmd.get_name().to_string(), &mut stdout()); } fn main() -> Result<(), Box<dyn Error>> { let mut opt = Opt::parse(); if let Some(generator) = opt.generator { let mut cmd = Opt::command(); eprintln!("Generating completion file for {generator:?}..."); print_completions(generator, &mut cmd); return Ok(()); } if opt.quiet > 0 && !opt.all { eprintln!("Quiet mode can only be used with --all."); std::process::exit(1); } let dirs = prepare_directories(opt.dirs)?; let scan_options: ScanOptions = ScanOptions { follow_symlinks: opt.follow_symlinks, same_file_system: opt.same_filesystem, }; let (proj_discover_send, proj_discover_recv) = std::sync::mpsc::sync_channel::<DiscoverData>(5); let (proj_delete_send, proj_delete_recv) = std::sync::mpsc::channel::<(Project, u64)>(); let project_min_age = opt.older; let ignored_dirs = { let cd = current_dir()?; std::mem::take(&mut opt.ignored_dirs) .into_iter() .map(|dir| path_canonicalise(&cd, dir)) .collect::<Result<Vec<_>, _>>()? }; std::thread::spawn(move || { discover( dirs, &scan_options, project_min_age, proj_discover_send, &ignored_dirs, ); }); let delete_handle = std::thread::spawn(move || process_deletes(proj_delete_recv)); interactive_prompt( proj_discover_recv, proj_delete_send, opt.quiet, opt.all, opt.default, ); let delete_results = match delete_handle.join() { Ok(r) => r, Err(e) => { eprintln!("error in delete thread, {e:?}"); std::process::exit(1); } }; if opt.quiet < 2 { let projects_cleaned = delete_results.len(); let bytes_deleted = delete_results.iter().map(|(_, bytes)| bytes).sum(); println!( "Projects cleaned: {}, Bytes deleted: {}", projects_cleaned, pretty_size(bytes_deleted) ); } Ok(()) }
rust
MIT
6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2
2026-01-04T20:24:58.804688Z
false
trinhminhtriet/rmrfrs
https://github.com/trinhminhtriet/rmrfrs/blob/6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2/rmrfrs-ui/src/main.rs
rmrfrs-ui/src/main.rs
// On the Windows platform, disable the console when opening the app #![windows_subsystem = "windows"] use std::{ cmp::Ordering, path, sync::{mpsc, Arc}, thread, }; use druid::{ commands::{OPEN_FILE, SHOW_OPEN_PANEL}, widget::{ Button, Controller, CrossAxisAlignment, Flex, FlexParams, Label, List, Scroll, ViewSwitcher, WidgetExt, }, AppLauncher, Color, Command, Data, Env, Event, EventCtx, ExtEventSink, FileDialogOptions, FileInfo, Lens, LocalizedString, Selector, Target, Widget, WindowDesc, }; use rmrfrs_lib::{clean, pretty_size, scan, ScanOptions}; const ADD_ITEM: Selector<Project> = Selector::new("event.add-item"); const SET_ACTIVE_ITEM: Selector<Project> = Selector::new("event.set-active-item"); const CLEAN_PATH: Selector<Project> = Selector::new("event.clean-path"); const SCAN_COMPLETE: Selector<bool> = Selector::new("event.scan-complete"); const SCAN_START: Selector = Selector::new("event.scan-start"); struct EventHandler {} #[derive(Debug, Clone, Data, Lens)] struct Project { display: String, path: String, p_type: String, artifact_size: u64, non_artifact_size: u64, dirs: Arc<Vec<(String, u64, bool)>>, } impl PartialEq for Project { fn eq(&self, other: &Project) -> bool { self.path.eq(&other.path) } } impl Ord for Project { fn cmp(&self, other: &Self) -> std::cmp::Ordering { match self.artifact_size.cmp(&other.artifact_size) { Ordering::Equal => self.display.cmp(&other.display), Ordering::Greater => Ordering::Less, Ordering::Less => Ordering::Greater, } } } impl PartialOrd for Project { fn partial_cmp(&self, other: &Self) -> Option<Ordering> { Some(self.cmp(other)) } } impl Eq for Project {} #[derive(Debug, Clone, Data, PartialEq)] enum ScanStatus { NotStarted, InProgrss, Complete, } #[derive(Debug, Clone, Data, Lens)] struct AppData { items: Arc<Vec<Project>>, active_item: Option<Project>, scan_dir: String, artifact_size: u64, non_artifact_size: u64, saved: u64, scan_complete: ScanStatus, scan_starter_send: Arc<mpsc::SyncSender<ScanStarterThreadMsg>>, } enum ScanStarterThreadMsg { StartScan(String), } impl EventHandler { pub fn new() -> Self { EventHandler {} } } impl<W: Widget<AppData>> Controller<AppData, W> for EventHandler { fn event( &mut self, _child: &mut W, ctx: &mut EventCtx, event: &Event, data: &mut AppData, _env: &Env, ) { match event { Event::Command(cmd) if cmd.is(ADD_ITEM) => { let project = cmd.get::<Project>(ADD_ITEM).unwrap().clone(); data.artifact_size += project.artifact_size; data.non_artifact_size += project.non_artifact_size; let items = Arc::make_mut(&mut data.items); let pos = items.binary_search(&project).unwrap_or_else(|e| e); items.insert(pos, project); ctx.request_layout(); ctx.request_paint(); } Event::Command(cmd) if cmd.is(SET_ACTIVE_ITEM) => { let active_item = cmd.get::<Project>(SET_ACTIVE_ITEM).unwrap().clone(); data.active_item = Some(active_item); ctx.request_layout(); ctx.request_paint(); } Event::Command(cmd) if cmd.is(CLEAN_PATH) => { let active_item = cmd.get::<Project>(CLEAN_PATH).unwrap().clone(); let items = Arc::make_mut(&mut data.items); let pos = items .iter() .position(|probe| probe.path == active_item.path); if let Some(pos) = pos { clean(&active_item.path).unwrap(); data.artifact_size -= active_item.artifact_size; data.saved += active_item.artifact_size; if let Some(item) = items.get_mut(pos) { item.artifact_size = 0; let dirs = Arc::make_mut(&mut item.dirs); for (_, size, artifact_dir) in dirs.iter_mut() { if *artifact_dir { *size = 0; } } } items.sort_unstable(); data.active_item = None; ctx.request_layout(); ctx.request_paint(); } else { eprintln!("tried to clean & remove project but it was not found in the project list. display '{}' path '{}'", active_item.display, active_item.path); } } Event::Command(cmd) if cmd.is(OPEN_FILE) => { let file_info = cmd.get::<FileInfo>(OPEN_FILE).unwrap().clone(); data.scan_dir = String::from(file_info.path().to_str().unwrap()); ctx.submit_command(Command::new(SCAN_START, (), Target::Auto)); } Event::Command(cmd) if cmd.is(SCAN_START) => { data.active_item = None; data.artifact_size = 0; Arc::make_mut(&mut data.items).clear(); data.non_artifact_size = 0; // data.saved = 0 // unsure if this should be reset between dirs or not 🤔 data.scan_complete = ScanStatus::InProgrss; data.scan_starter_send .send(ScanStarterThreadMsg::StartScan(data.scan_dir.clone())) .expect("error sending SCAN_START"); } Event::Command(cmd) if cmd.is(SCAN_COMPLETE) => { data.scan_complete = ScanStatus::Complete; ctx.request_layout(); ctx.request_paint(); } Event::Command(cmd) => { println!("unhandled cmd: {:?}", cmd); } _ => (), } _child.event(ctx, event, data, _env); } } fn spawn_scanner_thread( scan_starter_recv: mpsc::Receiver<ScanStarterThreadMsg>, event_sink: ExtEventSink, options: ScanOptions, ) -> Result<(), Box<dyn std::error::Error>> { thread::Builder::new() .name(String::from("scan")) .spawn(move || loop { match scan_starter_recv.recv().expect("scan starter thread") { ScanStarterThreadMsg::StartScan(p) => { scan(&p, &options) .filter_map(|p| p.ok()) .for_each(|project| { let name = project.name().to_string(); let project_size = project.size_dirs(&options); let display = path::Path::new(&name) .file_name() .map(|s| s.to_str().unwrap_or(&name)) .unwrap_or(&name); let project = Project { display: String::from(display), path: name, p_type: project.type_name().into(), artifact_size: project_size.artifact_size, non_artifact_size: project_size.non_artifact_size, dirs: Arc::new(project_size.dirs), }; event_sink .submit_command(ADD_ITEM, project, Target::Auto) .expect("error submitting ADD_ITEM command"); }); event_sink .submit_command(SCAN_COMPLETE, false, Target::Auto) .expect("error submitting SCAN_COMPLETE command"); } } })?; Ok(()) } fn main() { let window = WindowDesc::new(make_ui) .title(LocalizedString::new("rmrfrs-main-window-title").with_placeholder("rmrfrs 🧹")) .window_size((1000.0, 500.0)); let launcher = AppLauncher::with_window(window); let (scan_starter_send, scan_starter_recv) = mpsc::sync_channel::<ScanStarterThreadMsg>(0); let scan_options = ScanOptions { follow_symlinks: false, same_file_system: true, }; spawn_scanner_thread( scan_starter_recv, launcher.get_external_handle(), scan_options, ) .expect("error spawning scan thread"); launcher .use_simple_logger() .launch(AppData { items: Arc::new(Vec::new()), active_item: None, scan_dir: String::new(), artifact_size: 0, non_artifact_size: 0, saved: 0, scan_complete: ScanStatus::NotStarted, scan_starter_send: Arc::new(scan_starter_send), }) .expect("launch failed"); } fn make_ui() -> impl Widget<AppData> { let mut root: Flex<AppData> = Flex::column(); root.add_child( Label::new("rmrfrs 🧹") .with_text_size(24.0) .with_text_color(Color::rgb(0.5, 0.75, 1.0)) .padding(10.0) .center(), ); root.add_child( Flex::<AppData>::row() .with_child(Label::new(|data: &AppData, _env: &_| { format!( "{} {}", data.scan_dir, match data.scan_complete { ScanStatus::Complete => "scan complete ✔️", ScanStatus::InProgrss => "scan in progress... 📡", ScanStatus::NotStarted => "scan not started", } ) })) .with_child(Button::new("Select Directory").on_click( |ctx, _data: &mut AppData, _env| { ctx.submit_command(Command::new( SHOW_OPEN_PANEL, FileDialogOptions::new().select_directories(), Target::Auto, )); }, )) .center(), ); root.add_child( Label::new(|data: &AppData, _env: &_| { format!( "artifacts {} non-artifacts {} total {} recovered {}", pretty_size(data.artifact_size), pretty_size(data.non_artifact_size), pretty_size(data.artifact_size + data.non_artifact_size), pretty_size(data.saved) ) }) .center(), ); let mut path_listing = Flex::column(); path_listing.add_child( Label::new(|data: &AppData, _env: &_| format!("{} Project(s)", data.items.len())) .padding(10.0) .center(), ); let l = Scroll::new( List::new(|| { Button::new(|item: &Project, _env: &_| { format!( "{} ({}) {} / {}", item.display, item.p_type, pretty_size(item.artifact_size), pretty_size(item.artifact_size + item.non_artifact_size) ) }) .on_click(|_ctx, data, _env| { _ctx.submit_command(Command::new(SET_ACTIVE_ITEM, data.clone(), Target::Auto)) }) }) .lens(AppData::items) .padding(2.5), ) .vertical(); path_listing.add_flex_child(l, FlexParams::new(1.0, CrossAxisAlignment::Start)); { let mut horiz = Flex::row(); horiz.add_flex_child(path_listing, 1.0); { let mut vert = Flex::column(); vert.add_flex_child( Label::new("Active Item Information").padding(10.0).center(), FlexParams::new(0.0, CrossAxisAlignment::Start), ); vert.add_flex_child( Label::new(|data: &AppData, _env: &_| match data.active_item { Some(ref project) => project.path.clone(), None => String::from("none selected"), }), FlexParams::new(0.0, CrossAxisAlignment::Start), ); vert.add_flex_child( Label::new(|data: &AppData, _env: &_| match data.active_item { Some(ref project) => format!( "{} {} / {}, {} project", project.display, pretty_size(project.artifact_size), pretty_size(project.artifact_size + project.non_artifact_size), project.p_type ), None => String::from("none selected"), }), FlexParams::new(0.0, CrossAxisAlignment::Start), ); let view_switcher = ViewSwitcher::new( |data: &AppData, _env| data.active_item.clone(), |selector, _data, _env| match selector { None => Box::new(Label::new("None")), Some(project) => { let project: &Project = project; let mut l = Flex::column(); for (i, (dir_name, size, artifact)) in project.dirs.iter().enumerate() { l.add_flex_child( Label::new(format!( " {}─ {}{} {}", if i == project.dirs.len() - 1 { "└" } else { "├" }, dir_name, if *artifact { "🗑️" } else { "" }, pretty_size(*size) )), FlexParams::new(0.0, CrossAxisAlignment::Start), ); } Box::new(l) } }, ); vert.add_flex_child( view_switcher, FlexParams::new(0.0, CrossAxisAlignment::Start), ); vert.add_flex_child( Button::new("Clean project of artifacts").on_click( |ctx, data: &mut AppData, _env| { if let Some(active_item) = data.active_item.clone() { ctx.submit_command(Command::new(CLEAN_PATH, active_item, Target::Auto)); } }, ), FlexParams::new(0.0, CrossAxisAlignment::Start), ); horiz.add_flex_child( vert.padding(2.5), FlexParams::new(1.0, CrossAxisAlignment::Start), ); } root.add_flex_child(horiz, 1.0); root.add_child( Label::new("See the source, report a bug, or contribute at https://github.com/trinhminhtriet/rmrfrs 🎉") .with_text_size(18.0) .padding(10.0) .center() ) } let cw = EventHandler::new(); root.controller(cw) }
rust
MIT
6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2
2026-01-04T20:24:58.804688Z
false
trinhminhtriet/rmrfrs
https://github.com/trinhminhtriet/rmrfrs/blob/6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2/rmrfrs-lib/src/lib.rs
rmrfrs-lib/src/lib.rs
use std::{ borrow::Cow, error::{self, Error}, fs, path::{self, Path}, time::SystemTime, }; const FILE_CARGO_TOML: &str = "Cargo.toml"; const FILE_PACKAGE_JSON: &str = "package.json"; const FILE_ASSEMBLY_CSHARP: &str = "Assembly-CSharp.csproj"; const FILE_STACK_HASKELL: &str = "stack.yaml"; const FILE_SBT_BUILD: &str = "build.sbt"; const FILE_MVN_BUILD: &str = "pom.xml"; const FILE_BUILD_GRADLE: &str = "build.gradle"; const FILE_BUILD_GRADLE_KTS: &str = "build.gradle.kts"; const FILE_CMAKE_BUILD: &str = "CMakeLists.txt"; const FILE_UNREAL_SUFFIX: &str = ".uproject"; const FILE_JUPYTER_SUFFIX: &str = ".ipynb"; const FILE_PYTHON_SUFFIX: &str = ".py"; const FILE_PIXI_PACKAGE: &str = "pixi.toml"; const FILE_COMPOSER_JSON: &str = "composer.json"; const FILE_PUBSPEC_YAML: &str = "pubspec.yaml"; const FILE_ELIXIR_MIX: &str = "mix.exs"; const FILE_SWIFT_PACKAGE: &str = "Package.swift"; const FILE_BUILD_ZIG: &str = "build.zig"; const FILE_GODOT_4_PROJECT: &str = "project.godot"; const FILE_CSPROJ_SUFFIX: &str = ".csproj"; const FILE_FSPROJ_SUFFIX: &str = ".fsproj"; const FILE_PROJECT_TURBOREPO: &str = "turbo.json"; const PROJECT_CARGO_DIRS: [&str; 2] = ["target", ".xwin-cache"]; const PROJECT_NODE_DIRS: [&str; 2] = ["node_modules", ".angular"]; const PROJECT_UNITY_DIRS: [&str; 7] = [ "Library", "Temp", "Obj", "Logs", "MemoryCaptures", "Build", "Builds", ]; const PROJECT_STACK_DIRS: [&str; 1] = [".stack-work"]; const PROJECT_SBT_DIRS: [&str; 2] = ["target", "project/target"]; const PROJECT_MVN_DIRS: [&str; 1] = ["target"]; const PROJECT_GRADLE_DIRS: [&str; 2] = ["build", ".gradle"]; const PROJECT_CMAKE_DIRS: [&str; 3] = ["build", "cmake-build-debug", "cmake-build-release"]; const PROJECT_UNREAL_DIRS: [&str; 5] = [ "Binaries", "Build", "Saved", "DerivedDataCache", "Intermediate", ]; const PROJECT_JUPYTER_DIRS: [&str; 1] = [".ipynb_checkpoints"]; const PROJECT_PYTHON_DIRS: [&str; 9] = [ ".hypothesis", ".mypy_cache", ".nox", ".pytest_cache", ".ruff_cache", ".tox", ".venv", "__pycache__", "__pypackages__", ]; const PROJECT_PIXI_DIRS: [&str; 1] = [".pixi"]; const PROJECT_COMPOSER_DIRS: [&str; 1] = ["vendor"]; const PROJECT_PUB_DIRS: [&str; 4] = [ "build", ".dart_tool", "linux/flutter/ephemeral", "windows/flutter/ephemeral", ]; const PROJECT_ELIXIR_DIRS: [&str; 4] = ["_build", ".elixir-tools", ".elixir_ls", ".lexical"]; const PROJECT_SWIFT_DIRS: [&str; 2] = [".build", ".swiftpm"]; const PROJECT_ZIG_DIRS: [&str; 1] = ["zig-cache"]; const PROJECT_GODOT_4_DIRS: [&str; 1] = [".godot"]; const PROJECT_DOTNET_DIRS: [&str; 2] = ["bin", "obj"]; const PROJECT_TURBOREPO_DIRS: [&str; 1] = [".turbo"]; const PROJECT_CARGO_NAME: &str = "Cargo"; const PROJECT_NODE_NAME: &str = "Node"; const PROJECT_UNITY_NAME: &str = "Unity"; const PROJECT_STACK_NAME: &str = "Stack"; const PROJECT_SBT_NAME: &str = "SBT"; const PROJECT_MVN_NAME: &str = "Maven"; const PROJECT_GRADLE_NAME: &str = "Gradle"; const PROJECT_CMAKE_NAME: &str = "CMake"; const PROJECT_UNREAL_NAME: &str = "Unreal"; const PROJECT_JUPYTER_NAME: &str = "Jupyter"; const PROJECT_PYTHON_NAME: &str = "Python"; const PROJECT_PIXI_NAME: &str = "Pixi"; const PROJECT_COMPOSER_NAME: &str = "Composer"; const PROJECT_PUB_NAME: &str = "Pub"; const PROJECT_ELIXIR_NAME: &str = "Elixir"; const PROJECT_SWIFT_NAME: &str = "Swift"; const PROJECT_ZIG_NAME: &str = "Zig"; const PROJECT_GODOT_4_NAME: &str = "Godot 4.x"; const PROJECT_DOTNET_NAME: &str = ".NET"; const PROJECT_TURBOREPO_NAME: &str = "Turborepo"; #[derive(Debug, Clone)] pub enum ProjectType { Cargo, Node, Unity, Stack, #[allow(clippy::upper_case_acronyms)] SBT, Maven, Gradle, CMake, Unreal, Jupyter, Python, Pixi, Composer, Pub, Elixir, Swift, Zig, Godot4, Dotnet, Turborepo, } #[derive(Debug, Clone)] pub struct Project { pub project_type: ProjectType, pub path: path::PathBuf, } #[derive(Debug, Clone)] pub struct ProjectSize { pub artifact_size: u64, pub non_artifact_size: u64, pub dirs: Vec<(String, u64, bool)>, } impl Project { pub fn artifact_dirs(&self) -> &[&str] { match self.project_type { ProjectType::Cargo => &PROJECT_CARGO_DIRS, ProjectType::Node => &PROJECT_NODE_DIRS, ProjectType::Unity => &PROJECT_UNITY_DIRS, ProjectType::Stack => &PROJECT_STACK_DIRS, ProjectType::SBT => &PROJECT_SBT_DIRS, ProjectType::Maven => &PROJECT_MVN_DIRS, ProjectType::Unreal => &PROJECT_UNREAL_DIRS, ProjectType::Jupyter => &PROJECT_JUPYTER_DIRS, ProjectType::Python => &PROJECT_PYTHON_DIRS, ProjectType::Pixi => &PROJECT_PIXI_DIRS, ProjectType::CMake => &PROJECT_CMAKE_DIRS, ProjectType::Composer => &PROJECT_COMPOSER_DIRS, ProjectType::Pub => &PROJECT_PUB_DIRS, ProjectType::Elixir => &PROJECT_ELIXIR_DIRS, ProjectType::Swift => &PROJECT_SWIFT_DIRS, ProjectType::Gradle => &PROJECT_GRADLE_DIRS, ProjectType::Zig => &PROJECT_ZIG_DIRS, ProjectType::Godot4 => &PROJECT_GODOT_4_DIRS, ProjectType::Dotnet => &PROJECT_DOTNET_DIRS, ProjectType::Turborepo => &PROJECT_TURBOREPO_DIRS, } } pub fn name(&self) -> Cow<str> { self.path.to_string_lossy() } pub fn size(&self, options: &ScanOptions) -> u64 { self.artifact_dirs() .iter() .copied() .map(|p| dir_size(&self.path.join(p), options)) .sum() } pub fn last_modified(&self, options: &ScanOptions) -> Result<SystemTime, std::io::Error> { let top_level_modified = fs::metadata(&self.path)?.modified()?; let most_recent_modified = ignore::WalkBuilder::new(&self.path) .follow_links(options.follow_symlinks) .same_file_system(options.same_file_system) .build() .fold(top_level_modified, |acc, e| { if let Ok(e) = e { if let Ok(e) = e.metadata() { if let Ok(modified) = e.modified() { if modified > acc { return modified; } } } } acc }); Ok(most_recent_modified) } pub fn size_dirs(&self, options: &ScanOptions) -> ProjectSize { let mut artifact_size = 0; let mut non_artifact_size = 0; let mut dirs = Vec::new(); let project_root = match fs::read_dir(&self.path) { Err(_) => { return ProjectSize { artifact_size, non_artifact_size, dirs, } } Ok(rd) => rd, }; for entry in project_root.filter_map(|rd| rd.ok()) { let file_type = match entry.file_type() { Err(_) => continue, Ok(file_type) => file_type, }; if file_type.is_file() { if let Ok(metadata) = entry.metadata() { non_artifact_size += metadata.len(); } continue; } if file_type.is_dir() { let file_name = match entry.file_name().into_string() { Err(_) => continue, Ok(file_name) => file_name, }; let size = dir_size(&entry.path(), options); let artifact_dir = self.artifact_dirs().contains(&file_name.as_str()); if artifact_dir { artifact_size += size; } else { non_artifact_size += size; } dirs.push((file_name, size, artifact_dir)); } } ProjectSize { artifact_size, non_artifact_size, dirs, } } pub fn type_name(&self) -> &'static str { match self.project_type { ProjectType::Cargo => PROJECT_CARGO_NAME, ProjectType::Node => PROJECT_NODE_NAME, ProjectType::Unity => PROJECT_UNITY_NAME, ProjectType::Stack => PROJECT_STACK_NAME, ProjectType::SBT => PROJECT_SBT_NAME, ProjectType::Maven => PROJECT_MVN_NAME, ProjectType::Unreal => PROJECT_UNREAL_NAME, ProjectType::Jupyter => PROJECT_JUPYTER_NAME, ProjectType::Python => PROJECT_PYTHON_NAME, ProjectType::Pixi => PROJECT_PIXI_NAME, ProjectType::CMake => PROJECT_CMAKE_NAME, ProjectType::Composer => PROJECT_COMPOSER_NAME, ProjectType::Pub => PROJECT_PUB_NAME, ProjectType::Elixir => PROJECT_ELIXIR_NAME, ProjectType::Swift => PROJECT_SWIFT_NAME, ProjectType::Gradle => PROJECT_GRADLE_NAME, ProjectType::Zig => PROJECT_ZIG_NAME, ProjectType::Godot4 => PROJECT_GODOT_4_NAME, ProjectType::Dotnet => PROJECT_DOTNET_NAME, ProjectType::Turborepo => PROJECT_TURBOREPO_NAME, } } /// Deletes the project's artifact directories and their contents pub fn clean(&self) { for artifact_dir in self .artifact_dirs() .iter() .copied() .map(|ad| self.path.join(ad)) .filter(|ad| ad.exists()) { if let Err(e) = fs::remove_dir_all(&artifact_dir) { eprintln!("error removing directory {:?}: {:?}", artifact_dir, e); } } } } pub fn print_elapsed(secs: u64) -> String { const MINUTE: u64 = 60; const HOUR: u64 = MINUTE * 60; const DAY: u64 = HOUR * 24; const WEEK: u64 = DAY * 7; const MONTH: u64 = WEEK * 4; const YEAR: u64 = DAY * 365; let (unit, fstring) = match secs { secs if secs < MINUTE => (secs as f64, "second"), secs if secs < HOUR * 2 => (secs as f64 / MINUTE as f64, "minute"), secs if secs < DAY * 2 => (secs as f64 / HOUR as f64, "hour"), secs if secs < WEEK * 2 => (secs as f64 / DAY as f64, "day"), secs if secs < MONTH * 2 => (secs as f64 / WEEK as f64, "week"), secs if secs < YEAR * 2 => (secs as f64 / MONTH as f64, "month"), secs => (secs as f64 / YEAR as f64, "year"), }; let unit = unit.round(); let plural = if unit == 1.0 { "" } else { "s" }; format!("{unit:.0} {fstring}{plural} ago") } fn is_hidden(entry: &walkdir::DirEntry) -> bool { entry.file_name().to_string_lossy().starts_with('.') } struct ProjectIter { it: walkdir::IntoIter, } pub enum Red { IOError(::std::io::Error), WalkdirError(walkdir::Error), } impl Iterator for ProjectIter { type Item = Result<Project, Red>; fn next(&mut self) -> Option<Self::Item> { loop { let entry: walkdir::DirEntry = match self.it.next() { None => return None, Some(Err(e)) => return Some(Err(Red::WalkdirError(e))), Some(Ok(entry)) => entry, }; if !entry.file_type().is_dir() { continue; } if is_hidden(&entry) { self.it.skip_current_dir(); continue; } let rd = match entry.path().read_dir() { Err(e) => return Some(Err(Red::IOError(e))), Ok(rd) => rd, }; // intentionally ignoring errors while iterating the ReadDir // can't return them because we'll lose the context of where we are for dir_entry in rd .filter_map(|rd| rd.ok()) .filter(|de| de.file_type().map(|ft| ft.is_file()).unwrap_or(false)) .map(|de| de.file_name()) { let file_name = match dir_entry.to_str() { None => continue, Some(file_name) => file_name, }; let p_type = match file_name { FILE_CARGO_TOML => Some(ProjectType::Cargo), FILE_PACKAGE_JSON => Some(ProjectType::Node), FILE_ASSEMBLY_CSHARP => Some(ProjectType::Unity), FILE_STACK_HASKELL => Some(ProjectType::Stack), FILE_SBT_BUILD => Some(ProjectType::SBT), FILE_MVN_BUILD => Some(ProjectType::Maven), FILE_CMAKE_BUILD => Some(ProjectType::CMake), FILE_COMPOSER_JSON => Some(ProjectType::Composer), FILE_PUBSPEC_YAML => Some(ProjectType::Pub), FILE_PIXI_PACKAGE => Some(ProjectType::Pixi), FILE_ELIXIR_MIX => Some(ProjectType::Elixir), FILE_SWIFT_PACKAGE => Some(ProjectType::Swift), FILE_BUILD_GRADLE => Some(ProjectType::Gradle), FILE_BUILD_GRADLE_KTS => Some(ProjectType::Gradle), FILE_BUILD_ZIG => Some(ProjectType::Zig), FILE_GODOT_4_PROJECT => Some(ProjectType::Godot4), FILE_PROJECT_TURBOREPO => Some(ProjectType::Turborepo), file_name if file_name.ends_with(FILE_UNREAL_SUFFIX) => { Some(ProjectType::Unreal) } file_name if file_name.ends_with(FILE_JUPYTER_SUFFIX) => { Some(ProjectType::Jupyter) } file_name if file_name.ends_with(FILE_PYTHON_SUFFIX) => { Some(ProjectType::Python) } file_name if file_name.ends_with(FILE_CSPROJ_SUFFIX) || file_name.ends_with(FILE_FSPROJ_SUFFIX) => { if dir_contains_file(entry.path(), FILE_GODOT_4_PROJECT) { Some(ProjectType::Godot4) } else if dir_contains_file(entry.path(), FILE_ASSEMBLY_CSHARP) { Some(ProjectType::Unity) } else { Some(ProjectType::Dotnet) } } _ => None, }; if let Some(project_type) = p_type { self.it.skip_current_dir(); return Some(Ok(Project { project_type, path: entry.path().to_path_buf(), })); } } } } } fn dir_contains_file(path: &Path, file: &str) -> bool { path.read_dir() .map(|rd| { rd.filter_map(|rd| rd.ok()).any(|de| { de.file_type().is_ok_and(|t| t.is_file()) && de.file_name().to_str() == Some(file) }) }) .unwrap_or(false) } #[derive(Clone, Debug)] pub struct ScanOptions { pub follow_symlinks: bool, pub same_file_system: bool, } fn build_walkdir_iter<P: AsRef<path::Path>>(path: &P, options: &ScanOptions) -> ProjectIter { ProjectIter { it: walkdir::WalkDir::new(path) .follow_links(options.follow_symlinks) .same_file_system(options.same_file_system) .into_iter(), } } pub fn scan<P: AsRef<path::Path>>( path: &P, options: &ScanOptions, ) -> impl Iterator<Item = Result<Project, Red>> { build_walkdir_iter(path, options) } // TODO does this need to exist as is?? pub fn dir_size<P: AsRef<path::Path>>(path: &P, options: &ScanOptions) -> u64 { build_walkdir_iter(path, options) .it .filter_map(|e| e.ok()) .filter(|e| e.file_type().is_file()) .filter_map(|e| e.metadata().ok()) .map(|e| e.len()) .sum() } pub fn pretty_size(size: u64) -> String { const KIBIBYTE: u64 = 1024; const MEBIBYTE: u64 = 1_048_576; const GIBIBYTE: u64 = 1_073_741_824; const TEBIBYTE: u64 = 1_099_511_627_776; const PEBIBYTE: u64 = 1_125_899_906_842_624; const EXBIBYTE: u64 = 1_152_921_504_606_846_976; let (size, symbol) = match size { size if size < KIBIBYTE => (size as f64, "B"), size if size < MEBIBYTE => (size as f64 / KIBIBYTE as f64, "KiB"), size if size < GIBIBYTE => (size as f64 / MEBIBYTE as f64, "MiB"), size if size < TEBIBYTE => (size as f64 / GIBIBYTE as f64, "GiB"), size if size < PEBIBYTE => (size as f64 / TEBIBYTE as f64, "TiB"), size if size < EXBIBYTE => (size as f64 / PEBIBYTE as f64, "PiB"), _ => (size as f64 / EXBIBYTE as f64, "EiB"), }; format!("{:.1}{}", size, symbol) } pub fn clean(project_path: &str) -> Result<(), Box<dyn error::Error>> { let project = fs::read_dir(project_path)? .filter_map(|rd| rd.ok()) .find_map(|dir_entry| { let file_name = dir_entry.file_name().into_string().ok()?; let p_type = match file_name.as_str() { FILE_CARGO_TOML => Some(ProjectType::Cargo), FILE_PACKAGE_JSON => Some(ProjectType::Node), FILE_ASSEMBLY_CSHARP => Some(ProjectType::Unity), FILE_STACK_HASKELL => Some(ProjectType::Stack), FILE_SBT_BUILD => Some(ProjectType::SBT), FILE_MVN_BUILD => Some(ProjectType::Maven), FILE_CMAKE_BUILD => Some(ProjectType::CMake), FILE_COMPOSER_JSON => Some(ProjectType::Composer), FILE_PUBSPEC_YAML => Some(ProjectType::Pub), FILE_PIXI_PACKAGE => Some(ProjectType::Pixi), FILE_ELIXIR_MIX => Some(ProjectType::Elixir), FILE_SWIFT_PACKAGE => Some(ProjectType::Swift), FILE_BUILD_ZIG => Some(ProjectType::Zig), FILE_GODOT_4_PROJECT => Some(ProjectType::Godot4), _ => None, }; if let Some(project_type) = p_type { return Some(Project { project_type, path: project_path.into(), }); } None }); if let Some(project) = project { for artifact_dir in project .artifact_dirs() .iter() .copied() .map(|ad| path::PathBuf::from(project_path).join(ad)) .filter(|ad| ad.exists()) { if let Err(e) = fs::remove_dir_all(&artifact_dir) { eprintln!("error removing directory {:?}: {:?}", artifact_dir, e); } } } Ok(()) } pub fn path_canonicalise( base: &path::Path, tail: path::PathBuf, ) -> Result<path::PathBuf, Box<dyn Error>> { if tail.is_absolute() { Ok(tail) } else { Ok(base.join(tail).canonicalize()?) } } #[cfg(test)] mod tests { use super::print_elapsed; #[test] fn elapsed() { assert_eq!(print_elapsed(0), "0 seconds ago"); assert_eq!(print_elapsed(1), "1 second ago"); assert_eq!(print_elapsed(2), "2 seconds ago"); assert_eq!(print_elapsed(59), "59 seconds ago"); assert_eq!(print_elapsed(60), "1 minute ago"); assert_eq!(print_elapsed(61), "1 minute ago"); assert_eq!(print_elapsed(119), "2 minutes ago"); assert_eq!(print_elapsed(120), "2 minutes ago"); assert_eq!(print_elapsed(121), "2 minutes ago"); assert_eq!(print_elapsed(3599), "60 minutes ago"); assert_eq!(print_elapsed(3600), "60 minutes ago"); assert_eq!(print_elapsed(3601), "60 minutes ago"); assert_eq!(print_elapsed(7199), "120 minutes ago"); assert_eq!(print_elapsed(7200), "2 hours ago"); assert_eq!(print_elapsed(7201), "2 hours ago"); assert_eq!(print_elapsed(86399), "24 hours ago"); assert_eq!(print_elapsed(86400), "24 hours ago"); assert_eq!(print_elapsed(86401), "24 hours ago"); assert_eq!(print_elapsed(172799), "48 hours ago"); assert_eq!(print_elapsed(172800), "2 days ago"); assert_eq!(print_elapsed(172801), "2 days ago"); assert_eq!(print_elapsed(604799), "7 days ago"); assert_eq!(print_elapsed(604800), "7 days ago"); assert_eq!(print_elapsed(604801), "7 days ago"); assert_eq!(print_elapsed(1209599), "14 days ago"); assert_eq!(print_elapsed(1209600), "2 weeks ago"); assert_eq!(print_elapsed(1209601), "2 weeks ago"); assert_eq!(print_elapsed(2419199), "4 weeks ago"); assert_eq!(print_elapsed(2419200), "4 weeks ago"); assert_eq!(print_elapsed(2419201), "4 weeks ago"); assert_eq!(print_elapsed(2419200 * 2), "2 months ago"); assert_eq!(print_elapsed(2419200 * 3), "3 months ago"); assert_eq!(print_elapsed(2419200 * 12), "12 months ago"); assert_eq!(print_elapsed(2419200 * 25), "25 months ago"); assert_eq!(print_elapsed(2419200 * 48), "4 years ago"); } }
rust
MIT
6b824c29d827efbca1c8022d0b4c2d82eb8ef7c2
2026-01-04T20:24:58.804688Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto-fuzz/fuzz_targets/smt.rs
miden-crypto-fuzz/fuzz_targets/smt.rs
#![no_main] use libfuzzer_sys::fuzz_target; use miden_crypto::{Felt, ONE, Word, merkle::smt::Smt}; use rand::Rng; // Needed for randomizing the split percentage struct FuzzInput { entries: Vec<(Word, Word)>, updates: Vec<(Word, Word)>, } impl FuzzInput { fn from_bytes(data: &[u8]) -> Self { let mut rng = rand::rng(); let split_percentage = rng.random_range(20..80); // Randomly choose between 20% and 80% let split_index = (data.len() * split_percentage) / 100; let (construction_data, update_data) = data.split_at(split_index); let entries = Self::parse_entries(construction_data); let updates = Self::parse_entries(update_data); Self { entries, updates } } fn parse_entries(data: &[u8]) -> Vec<(Word, Word)> { let mut entries = Vec::new(); let num_entries = data.len() / 40; // Each entry is 40 bytes for chunk in data.chunks_exact(40).take(num_entries) { let key = Word::new([ Felt::new(u64::from_le_bytes(chunk[0..8].try_into().unwrap())), Felt::new(u64::from_le_bytes(chunk[8..16].try_into().unwrap())), Felt::new(u64::from_le_bytes(chunk[16..24].try_into().unwrap())), Felt::new(u64::from_le_bytes(chunk[24..32].try_into().unwrap())), ]); let value = [ONE, ONE, ONE, Felt::new(u64::from_le_bytes(chunk[32..40].try_into().unwrap()))] .into(); entries.push((key, value)); } // Sort entries by key to ensure deterministic processing order between // sequential and concurrent implementations. entries.sort_by_key(|(key, _)| *key); entries } } fuzz_target!(|data: &[u8]| { let fuzz_input = FuzzInput::from_bytes(data); run_fuzz_smt(fuzz_input); }); fn run_fuzz_smt(fuzz_input: FuzzInput) { let sequential_result = Smt::fuzz_with_entries_sequential(fuzz_input.entries.clone()); let parallel_result = Smt::with_entries(fuzz_input.entries); match (sequential_result, parallel_result) { (Ok(sequential_smt), Ok(parallel_smt)) => { assert_eq!(sequential_smt.root(), parallel_smt.root(), "Mismatch in SMT roots!"); let sequential_mutations = sequential_smt.fuzz_compute_mutations_sequential(fuzz_input.updates.clone()); let parallel_mutations = parallel_smt .compute_mutations(fuzz_input.updates) .expect("Failed to compute mutations for parallel"); assert_eq!( sequential_mutations.root(), parallel_mutations.root(), "Mismatch in mutation results!" ); assert_eq!( sequential_mutations.node_mutations(), parallel_mutations.node_mutations(), "Node mutations mismatch!" ); assert_eq!( sequential_mutations.new_pairs(), parallel_mutations.new_pairs(), "New pairs mismatch!" ); }, (Err(e1), Err(e2)) => { assert_eq!(format!("{:?}", e1), format!("{:?}", e2), "Different errors returned"); }, (Ok(_), Err(e)) => panic!("Sequential succeeded but parallel failed with: {:?}", e), (Err(e), Ok(_)) => panic!("Parallel succeeded but sequential failed with: {:?}", e), } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto-derive/src/lib.rs
miden-crypto-derive/src/lib.rs
use proc_macro::TokenStream; use quote::quote; use syn::{DeriveInput, parse_macro_input}; /// Derives a Debug implementation that elides secret values. /// /// This macro generates a Debug implementation that outputs `<elided secret for TypeName>` /// instead of the actual field values, preventing accidental leakage of sensitive data /// in logs, error messages, or debug output. /// /// # Example /// /// ```ignore /// #[derive(SilentDebug)] /// pub struct SecretKey { /// inner: [u8; 32], /// } /// /// let sk = SecretKey { inner: [0u8; 32] }; /// assert_eq!(format!("{:?}", sk), "<elided secret for SecretKey>"); /// ``` #[proc_macro_derive(SilentDebug)] pub fn silent_debug(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); let name = &ast.ident; let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); let expanded = quote! { // In order to ensure that secrets are never leaked, Debug is elided impl #impl_generics ::core::fmt::Debug for #name #ty_generics #where_clause { fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { write!(f, "<elided secret for {}>", stringify!(#name)) } } }; TokenStream::from(expanded) } /// Derives a Display implementation that elides secret values. /// /// This macro generates a Display implementation that outputs `<elided secret for TypeName>` /// instead of the actual field values. While implementing Display for secret keys is /// generally discouraged (as Display implies "user-facing output"), this safe implementation /// prevents compilation errors in generic contexts while still protecting sensitive data. /// /// # Example /// /// ```ignore /// #[derive(SilentDisplay)] /// pub struct SecretKey { /// inner: [u8; 32], /// } /// /// let sk = SecretKey { inner: [0u8; 32] }; /// assert_eq!(format!("{}", sk), "<elided secret for SecretKey>"); /// ``` #[proc_macro_derive(SilentDisplay)] pub fn silent_display(input: TokenStream) -> TokenStream { let ast = parse_macro_input!(input as DeriveInput); let name = &ast.ident; let (impl_generics, ty_generics, where_clause) = ast.generics.split_for_impl(); let expanded = quote! { // In order to ensure that secrets are never leaked, Display is elided impl #impl_generics ::core::fmt::Display for #name #ty_generics #where_clause { fn fmt(&self, f: &mut ::core::fmt::Formatter<'_>) -> ::core::fmt::Result { write!(f, "<elided secret for {}>", stringify!(#name)) } } }; TokenStream::from(expanded) }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/build.rs
miden-crypto/build.rs
fn main() { #[cfg(target_feature = "sve")] compile_arch_arm64_sve(); } #[cfg(target_feature = "sve")] fn compile_arch_arm64_sve() { const RPO_SVE_PATH: &str = "arch/arm64-sve/rpo"; println!("cargo:rerun-if-changed={RPO_SVE_PATH}/library.c"); println!("cargo:rerun-if-changed={RPO_SVE_PATH}/library.h"); println!("cargo:rerun-if-changed={RPO_SVE_PATH}/rpo_hash.h"); cc::Build::new() .file(format!("{RPO_SVE_PATH}/library.c")) .flag("-march=armv8-a+sve") .flag("-O3") .compile("rpo_sve"); }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/lib.rs
miden-crypto/src/lib.rs
#![no_std] #[macro_use] extern crate alloc; #[cfg(feature = "std")] extern crate std; use field::PrimeCharacteristicRing; pub mod aead; pub mod dsa; pub mod ecdh; pub mod hash; pub mod ies; pub mod merkle; pub mod rand; pub mod utils; pub mod word; // RE-EXPORTS // ================================================================================================ pub use p3_miden_goldilocks::Goldilocks as Felt; pub use word::{Word, WordError}; pub mod field { //! Traits and utilities for working with the Goldilocks finite field (i.e., //! [Felt](super::Felt)). pub use p3_field::{ BasedVectorSpace, ExtensionField, Field, PrimeCharacteristicRing, PrimeField64, TwoAdicField, batch_multiplicative_inverse, extension::BinomialExtensionField, integers::QuotientMap, }; } pub mod stark { //! Foundational components for the STARK proving system based on Plonky3. //! //! This module contains components needed to build a STARK prover/verifier and define //! Algebraic Intermediate Representation (AIR) for the Miden VM and other components. //! It primarily consists of re-exports from the Plonky3 project with some Miden-specific //! adaptations. pub use p3_miden_prover::{ Commitments, Domain, Entry, OpenedValues, PackedChallenge, PackedVal, PcsError, Proof, ProverConstraintFolder, StarkConfig, StarkGenericConfig, SymbolicAirBuilder, SymbolicExpression, SymbolicVariable, Val, VerificationError, VerifierConstraintFolder, generate_logup_trace, get_log_quotient_degree, get_max_constraint_degree, get_symbolic_constraints, prove, quotient_values, recompose_quotient_from_chunks, verify, verify_constraints, }; pub mod challenger { pub use p3_challenger::{HashChallenger, SerializingChallenger64}; } pub mod symmetric { pub use p3_symmetric::{ CompressionFunctionFromHasher, PaddingFreeSponge, SerializingHasher, }; } pub mod air { pub use p3_air::{ Air, AirBuilder, AirBuilderWithPublicValues, BaseAir, BaseAirWithPublicValues, ExtensionBuilder, FilteredAirBuilder, PairBuilder, PairCol, PermutationAirBuilder, VirtualPairCol, }; pub use p3_miden_air::{ BaseAirWithAuxTrace, FilteredMidenAirBuilder, MidenAir, MidenAirBuilder, }; } } // TYPE ALIASES // ================================================================================================ /// An alias for a key-value map. /// /// By default, this is an alias for the [`alloc::collections::BTreeMap`], however, when the /// `hashmaps` feature is enabled, this is an alias for the `hashbrown`'s `HashMap`. #[cfg(feature = "hashmaps")] pub type Map<K, V> = hashbrown::HashMap<K, V>; #[cfg(feature = "hashmaps")] pub use hashbrown::hash_map::Entry as MapEntry; /// An alias for a key-value map. /// /// By default, this is an alias for the [`alloc::collections::BTreeMap`], however, when the /// `hashmaps` feature is enabled, this is an alias for the `hashbrown`'s `HashMap`. #[cfg(not(feature = "hashmaps"))] pub type Map<K, V> = alloc::collections::BTreeMap<K, V>; #[cfg(not(feature = "hashmaps"))] pub use alloc::collections::btree_map::Entry as MapEntry; /// An alias for a simple set. /// /// By default, this is an alias for the [`alloc::collections::BTreeSet`]. However, when the /// `hashmaps` feature is enabled, this becomes an alias for hashbrown's HashSet. #[cfg(feature = "hashmaps")] pub type Set<V> = hashbrown::HashSet<V>; /// An alias for a simple set. /// /// By default, this is an alias for the [`alloc::collections::BTreeSet`]. However, when the /// `hashmaps` feature is enabled, this becomes an alias for hashbrown's HashSet. #[cfg(not(feature = "hashmaps"))] pub type Set<V> = alloc::collections::BTreeSet<V>; // CONSTANTS // ================================================================================================ /// Number of field elements in a word. pub const WORD_SIZE: usize = 4; /// Field element representing ZERO in the Miden base filed. pub const ZERO: Felt = Felt::ZERO; /// Field element representing ONE in the Miden base filed. pub const ONE: Felt = Felt::ONE; /// Array of field elements representing word of ZEROs in the Miden base field. pub const EMPTY_WORD: Word = Word::new([ZERO; WORD_SIZE]); // TRAITS // ================================================================================================ /// Defines how to compute a commitment to an object represented as a sequence of field elements. pub trait SequentialCommit { /// A type of the commitment which must be derivable from [Word]. type Commitment: From<Word>; /// Computes the commitment to the object. /// /// The default implementation of this function uses RPO256 hash function to hash the sequence /// of elements returned from [Self::to_elements()]. fn to_commitment(&self) -> Self::Commitment { hash::rpo::Rpo256::hash_elements(&self.to_elements()).into() } /// Returns a representation of the object as a sequence of fields elements. fn to_elements(&self) -> alloc::vec::Vec<Felt>; } // TESTS // ================================================================================================ #[test] #[should_panic] fn debug_assert_is_checked() { // enforce the release checks to always have `RUSTFLAGS="-C debug-assertions"`. // // some upstream tests are performed with `debug_assert`, and we want to assert its correctness // downstream. // // for reference, check // https://github.com/0xMiden/miden-vm/issues/433 debug_assert!(false); } #[test] #[should_panic] #[allow(arithmetic_overflow)] fn overflow_panics_for_test() { // overflows might be disabled if tests are performed in release mode. these are critical, // mandatory checks as overflows might be attack vectors. // // to enable overflow checks in release mode, ensure `RUSTFLAGS="-C overflow-checks"` let a = 1_u64; let b = 64; assert_ne!(a << b, 0); }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/main.rs
miden-crypto/src/main.rs
use std::{path::PathBuf, time::Instant}; use clap::{Parser, ValueEnum}; #[cfg(feature = "rocksdb")] use miden_crypto::merkle::smt::{RocksDbConfig, RocksDbStorage}; use miden_crypto::{ EMPTY_WORD, Felt, ONE, Word, hash::rpo::Rpo256, merkle::smt::{LargeSmt, LargeSmtError, MemoryStorage, SmtStorage}, rand::test_utils::rand_value, }; use rand::{Rng, prelude::IteratorRandom, rng}; type Storage = Box<dyn SmtStorage>; #[derive(Parser, Debug)] #[command(name = "Benchmark", about = "SMT benchmark", version, rename_all = "kebab-case")] pub struct BenchmarkCmd { /// Size of the tree #[arg(short = 's', long = "size", default_value = "1000000")] size: usize, /// Number of insertions #[arg(short = 'i', long = "insertions", default_value = "10000")] insertions: usize, /// Number of updates #[arg(short = 'u', long = "updates", default_value = "10000")] updates: usize, /// Path for the benchmark database #[clap(short = 'p', long = "path")] storage_path: Option<PathBuf>, /// Open existing database and skip construction #[clap(short = 'o', long = "open", default_value = "false")] open: bool, /// Number of batch operations #[clap(short = 'b', long = "batches", default_value = "1")] batches: usize, /// Storage backend to use at runtime: memory or rocksdb #[arg(short = 's', long = "storage", value_enum, default_value = "memory")] storage: StorageKind, } #[derive(Copy, Clone, Debug, Eq, PartialEq, ValueEnum)] pub enum StorageKind { Memory, Rocksdb, } fn main() { benchmark_smt(); println!("Benchmark completed successfully"); } /// Run a benchmark for [`Smt`]. pub fn benchmark_smt() { let args = BenchmarkCmd::parse(); let tree_size = args.size; let insertions = args.insertions; let updates = args.updates; let storage_path = args.storage_path; let batches = args.batches; println!( "Running benchmark with {} storage", match args.storage { StorageKind::Memory => "memory", StorageKind::Rocksdb => "rocksdb", } ); assert!(updates <= tree_size, "Cannot update more than `size`"); // prepare the `leaves` vector for tree creation let mut entries = Vec::new(); for i in 0..tree_size { let key = rand_value::<Word>(); let value = Word::new([ONE, ONE, ONE, Felt::new(i as u64)]); entries.push((key, value)); } let mut tree = if args.open { open_existing(storage_path, args.storage).unwrap() } else { construction(entries.clone(), tree_size, storage_path, args.storage).unwrap() }; insertion(&mut tree, insertions).unwrap(); for _ in 0..batches { batched_insertion(&mut tree, insertions).unwrap(); batched_update(&mut tree, &entries, updates).unwrap(); } proof_generation(&mut tree).unwrap(); } /// Runs the construction benchmark for [`Smt`], returning the constructed tree. pub fn construction( entries: Vec<(Word, Word)>, size: usize, database_path: Option<PathBuf>, storage: StorageKind, ) -> Result<LargeSmt<Storage>, LargeSmtError> { println!("Running a construction benchmark:"); let now = Instant::now(); let storage = get_storage(database_path, false, storage); let tree = LargeSmt::with_entries(storage, entries)?; let elapsed = now.elapsed().as_secs_f32(); println!("Constructed an SMT with {size} key-value pairs in {elapsed:.1} seconds"); println!("Number of leaf nodes: {}\n", tree.num_leaves()); Ok(tree) } pub fn open_existing( storage_path: Option<PathBuf>, storage: StorageKind, ) -> Result<LargeSmt<Storage>, LargeSmtError> { println!("Opening an existing database:"); let now = Instant::now(); let storage = get_storage(storage_path, true, storage); let tree = LargeSmt::load(storage)?; let elapsed = now.elapsed().as_secs_f32(); println!("Opened an existing database in {elapsed:.1} seconds"); Ok(tree) } /// Runs the insertion benchmark for the [`Smt`]. pub fn insertion(tree: &mut LargeSmt<Storage>, insertions: usize) -> Result<(), LargeSmtError> { println!("Running an insertion benchmark:"); let size = tree.num_leaves(); let mut insertion_times = Vec::new(); for i in 0..insertions { let test_key = Rpo256::hash(&rand_value::<u64>().to_be_bytes()); let test_value = Word::new([ONE, ONE, ONE, Felt::new((size + i) as u64)]); let now = Instant::now(); tree.insert(test_key, test_value)?; let elapsed = now.elapsed(); insertion_times.push(elapsed.as_micros()); } println!( "The average insertion time measured by {insertions} inserts into an SMT with {size} leaves is {:.0} μs\n", // calculate the average insertion_times.iter().sum::<u128>() as f64 / (insertions as f64), ); Ok(()) } pub fn batched_insertion( tree: &mut LargeSmt<Storage>, insertions: usize, ) -> Result<(), LargeSmtError> { println!("Running a batched insertion benchmark:"); let size = tree.num_leaves(); let new_pairs: Vec<(Word, Word)> = (0..insertions) .map(|i| { let key = Rpo256::hash(&rand_value::<u64>().to_be_bytes()); let value = Word::new([ONE, ONE, ONE, Felt::new((size + i) as u64)]); (key, value) }) .collect(); let now = Instant::now(); let mutations = tree.compute_mutations(new_pairs)?; let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms println!( "The average insert-batch computation time measured by a {insertions}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs", compute_elapsed, compute_elapsed * 1000_f64 / insertions as f64, // time in μs ); let now = Instant::now(); tree.apply_mutations(mutations)?; let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms println!( "The average insert-batch application time measured by a {insertions}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs", apply_elapsed, apply_elapsed * 1000_f64 / insertions as f64, // time in μs ); println!( "The average batch insertion time measured by a {insertions}-batch into an SMT with {size} leaves totals to {:.1} ms", (compute_elapsed + apply_elapsed), ); println!(); Ok(()) } pub fn batched_update( tree: &mut LargeSmt<Storage>, entries: &[(Word, Word)], updates: usize, ) -> Result<(), LargeSmtError> { const REMOVAL_PROBABILITY: f64 = 0.2; println!("Running a batched update benchmark:"); let size = tree.num_leaves(); let mut rng = rng(); let new_pairs = entries.iter().choose_multiple(&mut rng, updates).into_iter().map(|&(key, _)| { let value = if rng.random_bool(REMOVAL_PROBABILITY) { EMPTY_WORD } else { Word::new([ONE, ONE, ONE, Felt::new(rng.random())]) }; (key, value) }); assert_eq!(new_pairs.len(), updates); let now = Instant::now(); let mutations = tree.compute_mutations(new_pairs)?; let compute_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms let now = Instant::now(); tree.apply_mutations(mutations)?; let apply_elapsed = now.elapsed().as_secs_f64() * 1000_f64; // time in ms println!( "The average update-batch computation time measured by a {updates}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs", compute_elapsed, compute_elapsed * 1000_f64 / updates as f64, // time in μs ); println!( "The average update-batch application time measured by a {updates}-batch into an SMT with {size} leaves over {:.1} ms is {:.0} μs", apply_elapsed, apply_elapsed * 1000_f64 / updates as f64, // time in μs ); println!( "The average batch update time measured by a {updates}-batch into an SMT with {size} leaves totals to {:.1} ms", (compute_elapsed + apply_elapsed), ); println!(); Ok(()) } /// Runs the proof generation benchmark for the [`Smt`]. pub fn proof_generation(tree: &mut LargeSmt<Storage>) -> Result<(), LargeSmtError> { const NUM_PROOFS: usize = 100; println!("Running a proof generation benchmark:"); let mut opening_times = Vec::new(); let size = tree.num_leaves(); // fetch keys already in the tree to be opened let keys = tree .leaves()? .take(NUM_PROOFS) .map(|(_, leaf)| leaf.entries()[0].0) .collect::<Vec<_>>(); for key in keys { let now = Instant::now(); let _proof = tree.open(&key); opening_times.push(now.elapsed().as_micros()); } println!( "The average proving time measured by {NUM_PROOFS} value proofs in an SMT with {size} leaves in {:.0} μs", // calculate the average opening_times.iter().sum::<u128>() as f64 / (NUM_PROOFS as f64), ); Ok(()) } #[allow(unused_variables)] fn get_storage(database_path: Option<PathBuf>, open: bool, kind: StorageKind) -> Storage { match kind { StorageKind::Memory => Box::new(MemoryStorage::new()), StorageKind::Rocksdb => { #[cfg(feature = "rocksdb")] { let path = database_path .unwrap_or_else(|| std::env::temp_dir().join("miden_crypto_benchmark")); println!("Using database path: {}", path.display()); if !open { // delete the folder if it exists as we are creating a new database if path.exists() { std::fs::remove_dir_all(path.clone()).unwrap(); } std::fs::create_dir_all(path.clone()) .expect("Failed to create database directory"); } let db = RocksDbStorage::open( RocksDbConfig::new(path).with_cache_size(1 << 30).with_max_open_files(2048), ) .expect("Failed to open database"); Box::new(db) } #[cfg(not(feature = "rocksdb"))] { eprintln!("rocksdb feature not enabled; falling back to memory storage"); Box::new(MemoryStorage::new()) } }, } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/mod.rs
miden-crypto/src/dsa/mod.rs
//! Digital signature schemes supported by default in the Miden VM. pub mod ecdsa_k256_keccak; pub mod eddsa_25519_sha512; pub mod falcon512_rpo;
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/eddsa_25519_sha512/tests.rs
miden-crypto/src/dsa/eddsa_25519_sha512/tests.rs
use super::*; #[test] fn sign_and_verify_roundtrip() { use rand::rng; let mut rng = rng(); let sk = SecretKey::with_rng(&mut rng); let pk = sk.public_key(); let msg = Word::default(); // all zeros let sig = sk.sign(msg); assert!(pk.verify(msg, &sig)); } #[test] fn test_key_generation_serialization() { let mut rng = rand::rng(); let sk = SecretKey::with_rng(&mut rng); let pk = sk.public_key(); // Secret key -> bytes -> recovered secret key let sk_bytes = sk.to_bytes(); let serialized_sk = SecretKey::read_from_bytes(&sk_bytes) .expect("deserialization of valid secret key bytes should succeed"); assert_eq!(sk.to_bytes(), serialized_sk.to_bytes()); // Public key -> bytes -> recovered public key let pk_bytes = pk.to_bytes(); let serialized_pk = PublicKey::read_from_bytes(&pk_bytes) .expect("deserialization of valid public key bytes should succeed"); assert_eq!(pk, serialized_pk); } #[test] fn test_secret_key_debug_redaction() { let mut rng = rand::rng(); let sk = SecretKey::with_rng(&mut rng); // Verify Debug impl produces expected redacted output let debug_output = format!("{sk:?}"); assert_eq!(debug_output, "<elided secret for SecretKey>"); // Verify Display impl also elides let display_output = format!("{sk}"); assert_eq!(display_output, "<elided secret for SecretKey>"); } #[test] fn test_compute_challenge_k_equivalence() { let mut rng = rand::rng(); let sk = SecretKey::with_rng(&mut rng); let pk = sk.public_key(); // Test with multiple different messages let messages = [ Word::default(), Word::from([Felt::new(1), Felt::new(2), Felt::new(3), Felt::new(4)]), Word::from([Felt::new(42), Felt::new(100), Felt::new(255), Felt::new(1000)]), ]; for message in messages { let signature = sk.sign(message); // Compute the challenge hash using the helper method let k_hash = pk.compute_challenge_k(message, &signature); // Verify using verify_with_unchecked_k should give the same result as verify() let result_with_k = pk.verify_with_unchecked_k(k_hash, &signature).is_ok(); let result_standard = pk.verify(message, &signature); assert_eq!( result_with_k, result_standard, "verify_with_unchecked_k(compute_challenge_k(...)) should equal verify()" ); assert!(result_standard, "Signature should be valid"); // Test with wrong message - both should fail let wrong_message = Word::from([Felt::new(999), Felt::new(888), Felt::new(777), Felt::new(666)]); let wrong_k_hash = pk.compute_challenge_k(wrong_message, &signature); assert!(matches!( pk.verify_with_unchecked_k(wrong_k_hash, &signature), Err(UncheckedVerificationError::EquationMismatch) )); assert!(!pk.verify(wrong_message, &signature), "verify with wrong message should fail"); } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/eddsa_25519_sha512/mod.rs
miden-crypto/src/dsa/eddsa_25519_sha512/mod.rs
//! Ed25519 (EdDSA) signature implementation using Curve25519 and SHA-512 to hash //! the messages when signing. use alloc::{string::ToString, vec::Vec}; use ed25519_dalek::{Signer, Verifier}; use miden_crypto_derive::{SilentDebug, SilentDisplay}; use rand::{CryptoRng, RngCore}; use thiserror::Error; use crate::{ Felt, SequentialCommit, Word, ecdh::x25519::{EphemeralPublicKey, SharedSecret}, utils::{ ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable, bytes_to_packed_u32_elements, zeroize::{Zeroize, ZeroizeOnDrop}, }, }; #[cfg(all(test, feature = "std"))] mod tests; // CONSTANTS // ================================================================================================ /// Length of secret key in bytes const SECRET_KEY_BYTES: usize = 32; /// Length of public key in bytes pub(crate) const PUBLIC_KEY_BYTES: usize = 32; /// Length of signature in bytes const SIGNATURE_BYTES: usize = 64; // SECRET KEY // ================================================================================================ /// Secret key for EdDSA (Ed25519) signature verification over Curve25519. #[derive(Clone, SilentDebug, SilentDisplay)] pub struct SecretKey { inner: ed25519_dalek::SigningKey, } impl SecretKey { /// Generates a new random secret key using the OS random number generator. #[cfg(feature = "std")] #[allow(clippy::new_without_default)] pub fn new() -> Self { let mut rng = rand::rng(); Self::with_rng(&mut rng) } /// Generates a new secret key using RNG. pub fn with_rng<R: CryptoRng + RngCore>(rng: &mut R) -> Self { let mut seed = [0u8; SECRET_KEY_BYTES]; rand::RngCore::fill_bytes(rng, &mut seed); let inner = ed25519_dalek::SigningKey::from_bytes(&seed); // Zeroize the seed to prevent leaking secret material seed.zeroize(); Self { inner } } /// Gets the corresponding public key for this secret key. pub fn public_key(&self) -> PublicKey { PublicKey { inner: self.inner.verifying_key() } } /// Signs a message (Word) with this secret key. pub fn sign(&self, message: Word) -> Signature { let message_bytes: [u8; 32] = message.into(); let sig = self.inner.sign(&message_bytes); Signature { inner: sig } } /// Computes a Diffie-Hellman shared secret from this secret key and the ephemeral public key /// generated by the other party. pub fn get_shared_secret(&self, pk_e: EphemeralPublicKey) -> SharedSecret { let shared = self.to_x25519().diffie_hellman(&pk_e.inner); SharedSecret::new(shared) } /// Converts this Ed25519 secret key into an [`x25519_dalek::StaticSecret`]. /// /// This conversion allows using the same underlying scalar from the Ed25519 secret key /// for X25519 Diffie-Hellman key exchange. The returned `StaticSecret` can then be used /// in key agreement protocols to establish a shared secret with another party's /// X25519 public key. fn to_x25519(&self) -> x25519_dalek::StaticSecret { let mut scalar_bytes = self.inner.to_scalar_bytes(); let static_secret = x25519_dalek::StaticSecret::from(scalar_bytes); // Zeroize the temporary scalar bytes scalar_bytes.zeroize(); static_secret } } // SAFETY: The inner `ed25519_dalek::SigningKey` already implements `ZeroizeOnDrop`, // which ensures that the secret key material is securely zeroized when dropped. impl ZeroizeOnDrop for SecretKey {} impl PartialEq for SecretKey { fn eq(&self, other: &Self) -> bool { use subtle::ConstantTimeEq; self.inner.to_bytes().ct_eq(&other.inner.to_bytes()).into() } } impl Eq for SecretKey {} // PUBLIC KEY // ================================================================================================ #[derive(Debug, Clone, PartialEq, Eq)] pub struct PublicKey { pub(crate) inner: ed25519_dalek::VerifyingKey, } impl PublicKey { /// Returns a commitment to the public key using the RPO256 hash function. /// /// The commitment is computed by first converting the public key to field elements (4 bytes /// per element), and then computing a sequential hash of the elements. pub fn to_commitment(&self) -> Word { <Self as SequentialCommit>::to_commitment(self) } /// Verifies a signature against this public key and message. pub fn verify(&self, message: Word, signature: &Signature) -> bool { let message_bytes: [u8; 32] = message.into(); self.inner.verify(&message_bytes, &signature.inner).is_ok() } /// Computes the Ed25519 challenge hash from a message and signature. /// /// This method computes the 64-byte hash `SHA-512(R || A || message)` where: /// - `R` is the signature's R component (first 32 bytes) /// - `A` is the public key /// - `message` is the message bytes /// /// The resulting 64-byte hash can be passed to `verify_with_unchecked_k()` which will /// reduce it modulo the curve order L to produce the challenge scalar. /// /// # Use Case /// /// This method is useful when you want to separate the hashing phase from the /// elliptic curve verification phase. You can: /// 1. Compute the hash using this method (hashing phase) /// 2. Verify using `verify_with_unchecked_k(hash, signature)` (EC phase) /// /// This is equivalent to calling `verify()` directly, but allows the two phases /// to be executed separately or in different environments. /// /// # Arguments /// * `message` - The message that was signed /// * `signature` - The signature to compute the challenge hash from /// /// # Returns /// A 64-byte hash that will be reduced modulo L in `verify_with_unchecked_k()` /// /// # Example /// ```ignore /// let k_hash = public_key.compute_challenge_k(message, &signature); /// let is_valid = public_key.verify_with_unchecked_k(k_hash, &signature).is_ok(); /// // is_valid should equal public_key.verify(message, &signature) /// ``` /// /// # Not Ed25519ph / RFC 8032 Prehash /// /// This helper reproduces the *standard* Ed25519 challenge `H(R || A || M)` used when verifying /// signatures. It does **not** implement the RFC 8032 Ed25519ph variant, which prepends a /// domain separation string and optional context before hashing. Callers that require the /// Ed25519ph flavour must implement the additional domain separation logic themselves. pub fn compute_challenge_k(&self, message: Word, signature: &Signature) -> [u8; 64] { use sha2::Digest; let message_bytes: [u8; 32] = message.into(); let sig_bytes = signature.inner.to_bytes(); let r_bytes = &sig_bytes[0..32]; // Compute SHA-512(R || A || message) let mut hasher = sha2::Sha512::new(); hasher.update(r_bytes); hasher.update(self.inner.to_bytes()); hasher.update(message_bytes); let k_hash = hasher.finalize(); k_hash.into() } /// Verifies a signature using a pre-computed challenge hash. /// /// # ⚠️ CRITICAL SECURITY WARNING ⚠️ /// /// **THIS METHOD IS EXTREMELY DANGEROUS AND EASY TO MISUSE.** /// /// This method bypasses the standard Ed25519 verification process by accepting a pre-computed /// challenge hash instead of computing it from the message. This breaks Ed25519's /// security properties in the following ways: /// /// ## Security Risks: /// /// 1. **Signature Forgery**: An attacker who can control the hash value can forge signatures /// for arbitrary messages without knowing the private key. /// /// 2. **Breaks Message Binding**: Standard Ed25519 cryptographically binds the signature to the /// message via the hash `H(R || A || message)`. Accepting arbitrary hashes breaks this /// binding. /// /// 3. **Bypasses Standard Protocol**: If the hash is not computed correctly as `SHA-512(R || A /// || message)`, this method bypasses standard Ed25519 verification and the signature will /// not be compatible with Ed25519 semantics. /// /// ## When This Might Be Used: /// /// This method is only appropriate in very specific scenarios where: /// - You have a trusted computation environment that computes the hash correctly as `SHA-512(R /// || A || message)` (see `compute_challenge_k()`) /// - You need to separate the hashing phase from the EC verification phase (e.g., for different /// execution environments or performance optimization) /// - You fully understand the security implications and have a threat model that accounts for /// them /// /// When the hash is computed correctly, this method implements standard Ed25519 verification. /// /// ## Standard Usage: /// /// For normal Ed25519 verification, use `verify()` instead. /// /// ## Performance /// /// This helper decompresses the signature's `R` component before performing group arithmetic /// and reuses the cached Edwards form of the public key. Expect it to be slower than /// calling `verify()` directly. /// /// # Arguments /// * `k_hash` - A 64-byte hash (typically computed as `SHA-512(R || A || message)`) /// * `signature` - The signature to verify /// /// # Returns /// `Ok(())` if the verification equation `[s]B = R + [k]A` holds, or an error describing why /// the verification failed. /// /// # Warning /// Do NOT use this method unless you fully understand Ed25519's cryptographic properties, /// have a specific need for this low-level operation, and are feeding it the exact /// `SHA-512(R || A || message)` output (without the Ed25519ph domain separation string). pub fn verify_with_unchecked_k( &self, k_hash: [u8; 64], signature: &Signature, ) -> Result<(), UncheckedVerificationError> { use curve25519_dalek::{ edwards::{CompressedEdwardsY, EdwardsPoint}, scalar::Scalar, }; // Reduce the 64-byte hash modulo L to get the challenge scalar let k_scalar = Scalar::from_bytes_mod_order_wide(&k_hash); // Extract signature components: R (first 32 bytes) and s (second 32 bytes) let sig_bytes = signature.inner.to_bytes(); let r_bytes: [u8; 32] = sig_bytes[..32].try_into().expect("signature R component is exactly 32 bytes"); let s_bytes: [u8; 32] = sig_bytes[32..].try_into().expect("signature s component is exactly 32 bytes"); // RFC 8032 requires s to be canonical; reject non-canonical scalars to avoid malleability. let s_candidate = Scalar::from_canonical_bytes(s_bytes); if s_candidate.is_none().into() { return Err(UncheckedVerificationError::NonCanonicalScalar); } let s_scalar = s_candidate.unwrap(); let r_compressed = CompressedEdwardsY(r_bytes); let Some(r_point) = r_compressed.decompress() else { return Err(UncheckedVerificationError::InvalidSignaturePoint); }; let a_point = self.inner.to_edwards(); // Match the stricter ed25519-dalek semantics by rejecting small-order inputs instead of // multiplying the whole equation by the cofactor. dalek leaves this check opt-in via // `verify_strict()`; we enforce it here to guard this hazmat API against torsion exploits. if r_point.is_small_order() { return Err(UncheckedVerificationError::SmallOrderSignature); } if a_point.is_small_order() { return Err(UncheckedVerificationError::SmallOrderPublicKey); } // Compute the verification equation: -[k]A + [s]B == R, mirroring dalek's raw_verify. // Small-order points are rejected above and hence no need for multiplication by co-factor let minus_a = -a_point; let expected_r = EdwardsPoint::vartime_double_scalar_mul_basepoint(&k_scalar, &minus_a, &s_scalar) .compress(); if expected_r == r_compressed { Ok(()) } else { Err(UncheckedVerificationError::EquationMismatch) } } /// Convert to a X25519 public key which can be used in a DH key exchange protocol. /// /// # ⚠️ Security Warning /// /// **Do not reuse the same secret key for both Ed25519 signatures and X25519 key exchange.** /// This conversion is primarily intended for sealed box primitives where an Ed25519 public key /// is used to generate the shared key for encryption given an ephemeral X25519 key pair. /// /// In all other uses, prefer generating dedicated X25519 keys directly. pub(crate) fn to_x25519(&self) -> x25519_dalek::PublicKey { let mont_point = self.inner.to_montgomery(); x25519_dalek::PublicKey::from(mont_point.to_bytes()) } } impl SequentialCommit for PublicKey { type Commitment = Word; fn to_elements(&self) -> Vec<Felt> { bytes_to_packed_u32_elements(&self.to_bytes()) } } #[derive(Debug, Error)] pub enum PublicKeyError { #[error("Could not verify with given public key and signature")] VerificationFailed, } /// Errors that can arise when invoking [`PublicKey::verify_with_unchecked_k`]. #[derive(Debug, Error)] pub enum UncheckedVerificationError { #[error("challenge scalar is not canonical")] NonCanonicalScalar, #[error("signature R component failed to decompress")] InvalidSignaturePoint, #[error("small-order component detected in signature R")] SmallOrderSignature, #[error("small-order component detected in public key")] SmallOrderPublicKey, #[error("verification equation was not satisfied")] EquationMismatch, } // SIGNATURE // ================================================================================================ /// EdDSA (Ed25519) signature #[derive(Debug, Clone, PartialEq, Eq)] pub struct Signature { inner: ed25519_dalek::Signature, } impl Signature { /// Verify against (message, public key). pub fn verify(&self, message: Word, pub_key: &PublicKey) -> bool { pub_key.verify(message, self) } } // SERIALIZATION / DESERIALIZATION // ================================================================================================ impl Serializable for SecretKey { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write_bytes(&self.inner.to_bytes()); } } impl Deserializable for SecretKey { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let mut bytes: [u8; SECRET_KEY_BYTES] = source.read_array()?; let inner = ed25519_dalek::SigningKey::from_bytes(&bytes); bytes.zeroize(); Ok(Self { inner }) } } impl Serializable for PublicKey { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write_bytes(&self.inner.to_bytes()); } } impl Deserializable for PublicKey { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let bytes: [u8; PUBLIC_KEY_BYTES] = source.read_array()?; let inner = ed25519_dalek::VerifyingKey::from_bytes(&bytes).map_err(|_| { DeserializationError::InvalidValue("Invalid Ed25519 public key".to_string()) })?; Ok(Self { inner }) } } impl Serializable for Signature { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write_bytes(&self.inner.to_bytes()) } } impl Deserializable for Signature { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let bytes: [u8; SIGNATURE_BYTES] = source.read_array()?; let inner = ed25519_dalek::Signature::from_bytes(&bytes); Ok(Self { inner }) } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/hash_to_point.rs
miden-crypto/src/dsa/falcon512_rpo/hash_to_point.rs
use alloc::vec::Vec; use p3_field::PrimeField64; use super::{MODULUS, N, Nonce, Polynomial, Rpo256, ZERO, math::FalconFelt}; use crate::{Felt, Word}; // HASH-TO-POINT FUNCTIONS // ================================================================================================ /// Returns a polynomial in Z_p[x]/(phi) representing the hash of the provided message and /// nonce using RPO256. /// /// Note that, in contrast to the SHAKE256-based reference implementation, this implementation /// does not use rejection sampling but instead uses one of the variants listed in the specification /// [1]. This variant omits the conditional check in the rejection sampling step at the cost of /// having to extract 64 bits, instead of 16 bits, of pseudo-randomness. This makes /// the implementation simpler and constant-time at the cost of a higher number of extracted /// pseudo-random bits per call to the hash-to-point algorithm. /// /// [1]: https://falcon-sign.info/falcon.pdf pub fn hash_to_point_rpo256(message: Word, nonce: &Nonce) -> Polynomial<FalconFelt> { let mut state = [ZERO; Rpo256::STATE_WIDTH]; // absorb the nonce into the state let nonce_elements = nonce.to_elements(); for (&n, s) in nonce_elements.iter().zip(state[Rpo256::RATE_RANGE].iter_mut()) { *s = n; } Rpo256::apply_permutation(&mut state); // absorb message into the state for (&m, s) in message.iter().zip(state[Rpo256::RATE_RANGE].iter_mut()) { *s = m; } // squeeze the coefficients of the polynomial let mut coefficients: Vec<FalconFelt> = Vec::with_capacity(N); for _ in 0..64 { // // Note that `FalconFelt::new((a.as_canonical_u64() % MODULUS as u64) as i16)` will // create a bias as we are mapping $2^64 - 2^31 + 1$ elements to $12289$ elements // and it must not be uniform. A statistical analysis can be applied here to show // that this is still fine: the output distribution is computational IND from // uniform. Rpo256::apply_permutation(&mut state); state[Rpo256::RATE_RANGE] .iter() .for_each(|value| coefficients.push(felt_to_falcon_felt(*value))); } Polynomial::new(coefficients) } /// Returns a polynomial in Z_p[x]/(phi) representing the hash of the provided message and /// nonce using SHAKE256. This is the hash-to-point algorithm used in the reference implementation. #[cfg(all(test, feature = "std"))] pub fn hash_to_point_shake256(message: &[u8], nonce: &Nonce) -> Polynomial<FalconFelt> { use sha3::{ Shake256, digest::{ExtendableOutput, Update, XofReader}, }; let mut data = vec![]; data.extend_from_slice(&nonce.as_bytes()); data.extend_from_slice(message); const K: u32 = (1u32 << 16) / MODULUS as u32; let mut hasher = Shake256::default(); hasher.update(&data); let mut reader = hasher.finalize_xof(); let mut coefficients: Vec<FalconFelt> = Vec::with_capacity(N); while coefficients.len() != N { let mut randomness = [0u8; 2]; reader.read(&mut randomness); let t = ((randomness[0] as u32) << 8) | (randomness[1] as u32); if t < K * MODULUS as u32 { coefficients.push(u32_to_falcon_felt(t)); } } Polynomial { coefficients } } // HELPER FUNCTIONS // ================================================================================================ /// Converts a Miden field element to a field element in the prime field with characteristic /// the Falcon prime. /// /// Note that since `FalconFelt::new` accepts `i16`, we first reduce the canonical value of /// the Miden field element modulo the Falcon prime and then cast the resulting value to an `i16`. /// Note that this final cast is safe as the Falcon prime is less than `i16::MAX`. fn felt_to_falcon_felt(value: Felt) -> FalconFelt { FalconFelt::new((value.as_canonical_u64() % MODULUS as u64) as i16) } /// Converts a `u32` to a field element in the prime field with characteristic the Falcon prime. /// /// Note that since `FalconFelt::new` accepts `i16`, we first reduce the `u32` value modulo /// the Falcon prime and then cast the resulting value to an `i16`. /// Note that this final cast is safe as the Falcon prime is less than `i16::MAX`. #[cfg(all(test, feature = "std"))] fn u32_to_falcon_felt(value: u32) -> FalconFelt { FalconFelt::new((value % MODULUS as u32) as i16) }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/signature.rs
miden-crypto/src/dsa/falcon512_rpo/signature.rs
use alloc::{string::ToString, vec::Vec}; use core::ops::Deref; use num::Zero; use super::{ ByteReader, ByteWriter, Deserializable, DeserializationError, LOG_N, MODULUS, N, Nonce, SIG_L2_BOUND, SIG_POLY_BYTE_LEN, Serializable, hash_to_point::hash_to_point_rpo256, keys::PublicKey, math::{FalconFelt, FastFft, Polynomial}, }; use crate::Word; // FALCON SIGNATURE // ================================================================================================ /// A deterministic RPO Falcon512 signature over a message. /// /// The signature is a pair of polynomials (s1, s2) in (Z_p\[x\]/(phi))^2 a nonce `r`, and a public /// key polynomial `h` where: /// - p := 12289 /// - phi := x^512 + 1 /// /// The signature verifies against a public key `pk` if and only if: /// 1. s1 = c - s2 * h /// 2. |s1|^2 + |s2|^2 <= SIG_L2_BOUND /// /// where |.| is the norm and: /// - c = HashToPoint(r || message) /// - pk = Rpo256::hash(h) /// /// Here h is a polynomial representing the public key and pk is its digest using the Rpo256 hash /// function. c is a polynomial that is the hash-to-point of the message being signed. /// /// To summarize the main points of differences with the reference implementation, we have that: /// /// 1. the hash-to-point algorithm is made deterministic by using a fixed nonce `r`. This fixed /// nonce is formed as `nonce_version_byte || preversioned_nonce` where `preversioned_nonce` is a /// 39-byte string that is defined as: i. a byte representing `log_2(512)`, followed by ii. the /// UTF8 representation of the string "RPO-FALCON-DET", followed by iii. the required number of /// 0_u8 padding to make the total length equal 39 bytes. Note that the above means in particular /// that only the `nonce_version_byte` needs to be serialized when serializing the signature. /// This reduces the deterministic signature compared to the reference implementation by 39 /// bytes. /// 2. the RNG used in the trapdoor sampler (i.e., the ffSampling algorithm) is ChaCha20Rng seeded /// with the `Blake3` hash of `log_2(512) || sk || message`. /// /// The signature is serialized as: /// /// 1. A header byte specifying the algorithm used to encode the coefficients of the `s2` polynomial /// together with the degree of the irreducible polynomial phi. For RPO Falcon512, the header /// byte is set to `10111001` to differentiate it from the standardized instantiation of the /// Falcon signature. /// 2. 1 byte for the nonce version. /// 4. 625 bytes encoding the `s2` polynomial above. /// /// In addition to the signature itself, the polynomial h is also serialized with the signature as: /// /// 1. 1 byte representing the log2(512) i.e., 9. /// 2. 896 bytes for the public key itself. /// /// The total size of the signature (including the extended public key) is 1524 bytes. /// /// [1]: https://github.com/algorand/falcon/blob/main/falcon-det.pdf /// [2]: https://datatracker.ietf.org/doc/html/rfc6979#section-3.5 #[derive(Debug, Clone, PartialEq, Eq)] pub struct Signature { header: SignatureHeader, nonce: Nonce, s2: SignaturePoly, h: PublicKey, } impl Signature { // CONSTRUCTOR // -------------------------------------------------------------------------------------------- /// Creates a new signature from the given nonce, public key polynomial, and signature /// polynomial. pub fn new(nonce: Nonce, h: PublicKey, s2: SignaturePoly) -> Signature { Self { header: SignatureHeader::default(), nonce, s2, h, } } // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- /// Returns the public key polynomial h. pub fn public_key(&self) -> &PublicKey { &self.h } /// Returns the polynomial representation of the signature in Z_p\[x\]/(phi). pub fn sig_poly(&self) -> &Polynomial<FalconFelt> { &self.s2 } /// Returns the nonce component of the signature. pub fn nonce(&self) -> &Nonce { &self.nonce } // SIGNATURE VERIFICATION // -------------------------------------------------------------------------------------------- /// Returns true if this signature is a valid signature for the specified message generated /// against the secret key matching the specified public key commitment. pub fn verify(&self, message: Word, pub_key: &PublicKey) -> bool { if self.h != *pub_key { return false; } let c = hash_to_point_rpo256(message, &self.nonce); verify_helper(&c, &self.s2, pub_key) } } impl Serializable for Signature { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write(&self.header); target.write(&self.nonce); target.write(&self.s2); target.write(&self.h); } } impl Deserializable for Signature { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let header = source.read()?; let nonce = source.read()?; let s2 = source.read()?; let h = source.read()?; Ok(Self { header, nonce, s2, h }) } } // SIGNATURE HEADER // ================================================================================================ /// The header byte used to encode the signature metadata. #[derive(Debug, Clone, PartialEq, Eq)] pub struct SignatureHeader(u8); impl Default for SignatureHeader { /// According to section 3.11.3 in the specification [1], the signature header has the format /// `0cc1nnnn` where: /// /// 1. `cc` signifies the encoding method. `01` denotes using the compression encoding method /// and `10` denotes encoding using the uncompressed method. /// 2. `nnnn` encodes `LOG_N`. /// /// For RPO Falcon 512 we use compression encoding and N = 512. Moreover, to differentiate the /// RPO Falcon variant from the reference variant using SHAKE256, we flip the first bit in the /// header. Thus, for RPO Falcon 512 the header is `10111001` /// /// [1]: https://falcon-sign.info/falcon.pdf fn default() -> Self { Self(0b1011_1001) } } impl Serializable for &SignatureHeader { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write_u8(self.0) } } impl Deserializable for SignatureHeader { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let header = source.read_u8()?; let (encoding, log_n) = (header >> 4, header & 0b00001111); if encoding != 0b1011 { return Err(DeserializationError::InvalidValue( "Failed to decode signature: not supported encoding algorithm".to_string(), )); } if log_n != LOG_N { return Err(DeserializationError::InvalidValue(format!( "Failed to decode signature: only supported irreducible polynomial degree is 512, 2^{log_n} was provided" ))); } Ok(Self(header)) } } // SIGNATURE POLYNOMIAL // ================================================================================================ /// A polynomial used as the `s2` component of the signature. #[derive(Debug, Clone, PartialEq, Eq)] pub struct SignaturePoly(pub Polynomial<FalconFelt>); impl Deref for SignaturePoly { type Target = Polynomial<FalconFelt>; fn deref(&self) -> &Self::Target { &self.0 } } impl From<Polynomial<FalconFelt>> for SignaturePoly { fn from(pk_poly: Polynomial<FalconFelt>) -> Self { Self(pk_poly) } } impl TryFrom<&[i16; N]> for SignaturePoly { type Error = (); fn try_from(coefficients: &[i16; N]) -> Result<Self, Self::Error> { if are_coefficients_valid(coefficients) { Ok(Self(coefficients.to_vec().into())) } else { Err(()) } } } impl Serializable for &SignaturePoly { fn write_into<W: ByteWriter>(&self, target: &mut W) { let sig_coeff: Vec<i16> = self.0.coefficients.iter().map(|a| a.balanced_value()).collect(); let mut sk_bytes = vec![0_u8; SIG_POLY_BYTE_LEN]; let mut acc = 0; let mut acc_len = 0; let mut v = 0; let mut t; let mut w; // For each coefficient of x: // - the sign is encoded on 1 bit // - the 7 lower bits are encoded naively (binary) // - the high bits are encoded in unary encoding // // Algorithm 17 p. 47 of the specification [1]. // // [1]: https://falcon-sign.info/falcon.pdf for &c in sig_coeff.iter() { acc <<= 1; t = c; if t < 0 { t = -t; acc |= 1; } w = t as u16; acc <<= 7; let mask = 127_u32; acc |= (w as u32) & mask; w >>= 7; acc_len += 8; acc <<= w + 1; acc |= 1; acc_len += w + 1; while acc_len >= 8 { acc_len -= 8; sk_bytes[v] = (acc >> acc_len) as u8; v += 1; } } if acc_len > 0 { sk_bytes[v] = (acc << (8 - acc_len)) as u8; } target.write_bytes(&sk_bytes); } } impl Deserializable for SignaturePoly { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let input = source.read_array::<SIG_POLY_BYTE_LEN>()?; let mut input_idx = 0; let mut acc = 0u32; let mut acc_len = 0; let mut coefficients = [FalconFelt::zero(); N]; // Algorithm 18 p. 48 of the specification [1]. // // [1]: https://falcon-sign.info/falcon.pdf for c in coefficients.iter_mut() { acc = (acc << 8) | (input[input_idx] as u32); input_idx += 1; let b = acc >> acc_len; let s = b & 128; let mut m = b & 127; loop { if acc_len == 0 { acc = (acc << 8) | (input[input_idx] as u32); input_idx += 1; acc_len = 8; } acc_len -= 1; if ((acc >> acc_len) & 1) != 0 { break; } m += 128; if m >= 2048 { return Err(DeserializationError::InvalidValue(format!( "Failed to decode signature: high bits {m} exceed 2048", ))); } } if s != 0 && m == 0 { return Err(DeserializationError::InvalidValue( "Failed to decode signature: -0 is forbidden".to_string(), )); } let felt = if s != 0 { (MODULUS as u32 - m) as u16 } else { m as u16 }; *c = FalconFelt::new(felt as i16); } if (acc & ((1 << acc_len) - 1)) != 0 { return Err(DeserializationError::InvalidValue( "Failed to decode signature: Non-zero unused bits in the last byte".to_string(), )); } Ok(Polynomial::new(coefficients.to_vec()).into()) } } // HELPER FUNCTIONS // ================================================================================================ /// Takes the hash-to-point polynomial `c` of a message, the signature polynomial over /// the message `s2` and a public key polynomial and returns `true` is the signature is a valid /// signature for the given parameters, otherwise it returns `false`. fn verify_helper(c: &Polynomial<FalconFelt>, s2: &SignaturePoly, h: &PublicKey) -> bool { let h_fft = h.fft(); let s2_fft = s2.fft(); let c_fft = c.fft(); // compute the signature polynomial s1 using s1 = c - s2 * h let s1_fft = c_fft - s2_fft.hadamard_mul(&h_fft); let s1 = s1_fft.ifft(); // compute the norm squared of (s1, s2) let length_squared_s1 = s1.norm_squared(); let length_squared_s2 = s2.norm_squared(); let length_squared = length_squared_s1 + length_squared_s2; length_squared < SIG_L2_BOUND } /// Checks whether a set of coefficients is a valid one for a signature polynomial. fn are_coefficients_valid(x: &[i16]) -> bool { if x.len() != N { return false; } for &c in x { if !(-2047..=2047).contains(&c) { return false; } } true } // TESTS // ================================================================================================ #[cfg(all(test, feature = "std"))] mod tests { use rand::SeedableRng; use rand_chacha::ChaCha20Rng; use super::{ super::{SIG_SERIALIZED_LEN, SecretKey}, *, }; #[test] fn test_serialization_round_trip() { let seed = [0_u8; 32]; let mut rng = ChaCha20Rng::from_seed(seed); let sk = SecretKey::with_rng(&mut rng); let signature = sk.sign_with_rng(Word::default(), &mut rng); let serialized = signature.to_bytes(); assert_eq!(serialized.len(), SIG_SERIALIZED_LEN); let deserialized = Signature::read_from_bytes(&serialized).unwrap(); assert_eq!(signature.sig_poly(), deserialized.sig_poly()); } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/mod.rs
miden-crypto/src/dsa/falcon512_rpo/mod.rs
//! A deterministic RPO Falcon512 signature over a message. //! //! This version differs from the reference implementation in its use of the RPO algebraic hash //! function in its hash-to-point algorithm. //! //! Another point of difference is the determinism in the signing process. The approach used to //! achieve this is the one proposed in [1]. //! The main challenge in making the signing procedure deterministic is ensuring that the same //! secret key is never used to produce two inequivalent signatures for the same `c`. //! For a precise definition of equivalence of signatures see [1]. //! The reference implementation uses a random nonce per signature in order to make sure that, //! with overwhelming probability, no two c-s will ever repeat and this non-repetition turns out //! to be enough to make the security proof of the underlying construction go through in //! the random-oracle model. //! //! Making the signing process deterministic means that we cannot rely on the above use of nonce //! in the hash-to-point algorithm, i.e., the hash-to-point algorithm is deterministic. It also //! means that we have to derandomize the trapdoor sampling process and use the entropy in //! the secret key, together with the message, as the seed of a CPRNG. This is exactly the approach //! taken in [2] but, as explained at length in [1], this is not enough. The reason for this //! is that the sampling process during signature generation must be ensured to be consistent //! across the entire computing stack i.e., hardware, compiler, OS, sampler implementations ... //! //! This is made even more difficult by the extensive use of floating-point arithmetic by //! the sampler. In relation to this point, the current implementation does not use any platform //! specific optimizations (e.g., AVX2, NEON, FMA ...) and relies solely on the builtin `f64` type. //! Moreover, as per the time of this writing, the implementation does not use any methods or //! functions from `std::f64` that have non-deterministic precision mentioned in their //! documentation. //! //! [1]: https://github.com/algorand/falcon/blob/main/falcon-det.pdf //! [2]: https://datatracker.ietf.org/doc/html/rfc6979#section-3.5 use crate::{ Felt, ZERO, hash::rpo::Rpo256, utils::{ByteReader, ByteWriter, Deserializable, DeserializationError, Serializable}, }; mod hash_to_point; mod keys; mod math; mod signature; #[cfg(all(test, feature = "std"))] mod tests; pub use self::{ keys::{PublicKey, SecretKey}, math::Polynomial, signature::{Signature, SignatureHeader, SignaturePoly}, }; // CONSTANTS // ================================================================================================ // The Falcon modulus p. const MODULUS: i16 = 12289; // Number of bits needed to encode an element in the Falcon field. const FALCON_ENCODING_BITS: u32 = 14; // The Falcon parameters for Falcon-512. This is the degree of the polynomial `phi := x^N + 1` // defining the ring Z_p[x]/(phi). const N: usize = 512; const LOG_N: u8 = 9; /// Length of nonce used for signature generation. const SIG_NONCE_LEN: usize = 40; /// Length of the preversioned portion of the fixed nonce. /// /// Since we use one byte to encode the version of the nonce, this is equal to `SIG_NONCE_LEN - 1`. const PREVERSIONED_NONCE_LEN: usize = 39; /// Current version of the fixed nonce. /// /// The usefulness of the notion of versioned fixed nonce is discussed in Section 2.1 in [1]. /// /// [1]: https://github.com/algorand/falcon/blob/main/falcon-det.pdf const NONCE_VERSION_BYTE: u8 = 1; /// The preversioned portion of the fixed nonce constructed following [1]. /// /// Note that reference [1] uses the term salt instead of nonce. const PREVERSIONED_NONCE: [u8; PREVERSIONED_NONCE_LEN] = [ 9, 82, 80, 79, 45, 70, 65, 76, 67, 79, 78, 45, 68, 69, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, ]; /// Number of filed elements used to encode a nonce. const NONCE_ELEMENTS: usize = 8; /// Public key length as a u8 vector. pub const PK_LEN: usize = 897; /// Secret key length as a u8 vector. pub const SK_LEN: usize = 1281; /// Signature length as a u8 vector. const SIG_POLY_BYTE_LEN: usize = 625; /// Signature size when serialized as a u8 vector. #[cfg(all(test, feature = "std"))] const SIG_SERIALIZED_LEN: usize = 1524; /// Bound on the squared-norm of the signature. const SIG_L2_BOUND: u64 = 34034726; /// Standard deviation of the Gaussian over the lattice. const SIGMA: f64 = 165.7366171829776; // TYPE ALIASES // ================================================================================================ type ShortLatticeBasis = [Polynomial<i16>; 4]; // NONCE // ================================================================================================ /// Nonce of the Falcon signature. #[derive(Debug, Clone, PartialEq, Eq)] pub struct Nonce([u8; SIG_NONCE_LEN]); impl Nonce { /// Returns a new deterministic [Nonce]. /// /// This is used in deterministic signing following [1] and is composed of two parts: /// /// 1. a byte serving as a version byte, /// 2. a pre-versioned fixed nonce which is the UTF8 encoding of the domain separator /// "RPO-FALCON-DET" padded with enough zeros to make it of size 39 bytes. /// /// The usefulness of the notion of versioned fixed nonce is discussed in Section 2.1 in [1]. /// /// [1]: https://github.com/algorand/falcon/blob/main/falcon-det.pdf pub fn deterministic() -> Self { let mut nonce_bytes = [0u8; SIG_NONCE_LEN]; nonce_bytes[0] = NONCE_VERSION_BYTE; nonce_bytes[1..].copy_from_slice(&PREVERSIONED_NONCE); Self(nonce_bytes) } /// Returns a new [Nonce] drawn from the provided RNG. /// /// This is used only in testing against the test vectors of the reference (non-deterministic) /// Falcon DSA implementation. #[cfg(all(test, feature = "std"))] pub fn random<R: rand::Rng>(rng: &mut R) -> Self { let mut nonce_bytes = [0u8; SIG_NONCE_LEN]; rng.fill_bytes(&mut nonce_bytes); Self::from_bytes(nonce_bytes) } /// Returns the underlying concatenated bytes of this nonce. pub fn as_bytes(&self) -> [u8; SIG_NONCE_LEN] { self.0 } /// Returns a `Nonce` given an array of bytes. pub fn from_bytes(nonce_bytes: [u8; SIG_NONCE_LEN]) -> Self { Self(nonce_bytes) } /// Converts byte representation of the nonce into field element representation. /// /// Nonce bytes are converted to field elements by taking consecutive 5 byte chunks /// of the nonce and interpreting them as field elements. pub fn to_elements(&self) -> [Felt; NONCE_ELEMENTS] { let mut buffer = [0_u8; 8]; let mut result = [ZERO; 8]; for (i, bytes) in self.as_bytes().chunks(5).enumerate() { buffer[..5].copy_from_slice(bytes); // we can safely (without overflow) create a new Felt from u64 value here since this // value contains at most 5 bytes result[i] = Felt::new(u64::from_le_bytes(buffer)); } result } } impl Serializable for &Nonce { fn write_into<W: ByteWriter>(&self, target: &mut W) { target.write_u8(self.0[0]) } } impl Deserializable for Nonce { fn read_from<R: ByteReader>(source: &mut R) -> Result<Self, DeserializationError> { let nonce_version: u8 = source.read()?; let mut nonce_bytes = [0u8; SIG_NONCE_LEN]; nonce_bytes[0] = nonce_version; nonce_bytes[1..].copy_from_slice(&PREVERSIONED_NONCE); Ok(Self(nonce_bytes)) } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/math/field.rs
miden-crypto/src/dsa/falcon512_rpo/math/field.rs
use alloc::string::String; use core::ops::{Add, AddAssign, Div, DivAssign, Mul, MulAssign, Neg, Sub, SubAssign}; use num::{One, Zero}; use super::{Inverse, MODULUS, fft::CyclotomicFourier}; #[derive(Debug, Clone, Copy, PartialEq, Eq, Default)] pub struct FalconFelt(u32); impl FalconFelt { pub const fn new(value: i16) -> Self { let gtz_bool = value >= 0; let gtz_int = gtz_bool as i16; let gtz_sign = gtz_int - ((!gtz_bool) as i16); let reduced = gtz_sign * (gtz_sign * value) % MODULUS; let canonical_representative = (reduced + MODULUS * (1 - gtz_int)) as u32; FalconFelt(canonical_representative) } pub const fn value(&self) -> i16 { self.0 as i16 } pub fn balanced_value(&self) -> i16 { let value = self.value(); let g = (value > ((MODULUS) / 2)) as i16; value - (MODULUS) * g } pub const fn multiply(&self, other: Self) -> Self { FalconFelt((self.0 * other.0) % MODULUS as u32) } } impl Add for FalconFelt { type Output = Self; #[allow(clippy::suspicious_arithmetic_impl)] fn add(self, rhs: Self) -> Self::Output { let (s, _) = self.0.overflowing_add(rhs.0); let (d, n) = s.overflowing_sub(MODULUS as u32); let (r, _) = d.overflowing_add(MODULUS as u32 * (n as u32)); FalconFelt(r) } } impl AddAssign for FalconFelt { fn add_assign(&mut self, rhs: Self) { *self = *self + rhs; } } impl Sub for FalconFelt { type Output = Self; fn sub(self, rhs: Self) -> Self::Output { self + -rhs } } impl SubAssign for FalconFelt { fn sub_assign(&mut self, rhs: Self) { *self = *self - rhs; } } impl Neg for FalconFelt { type Output = FalconFelt; fn neg(self) -> Self::Output { let is_nonzero = self.0 != 0; let r = MODULUS as u32 - self.0; FalconFelt(r * (is_nonzero as u32)) } } impl Mul for FalconFelt { fn mul(self, rhs: Self) -> Self::Output { FalconFelt((self.0 * rhs.0) % MODULUS as u32) } type Output = Self; } impl MulAssign for FalconFelt { fn mul_assign(&mut self, rhs: Self) { *self = *self * rhs; } } impl Div for FalconFelt { type Output = FalconFelt; #[allow(clippy::suspicious_arithmetic_impl)] fn div(self, rhs: Self) -> Self::Output { self * rhs.inverse_or_zero() } } impl DivAssign for FalconFelt { fn div_assign(&mut self, rhs: Self) { *self = *self / rhs } } impl Zero for FalconFelt { fn zero() -> Self { FalconFelt::new(0) } fn is_zero(&self) -> bool { self.0 == 0 } } impl One for FalconFelt { fn one() -> Self { FalconFelt::new(1) } } impl Inverse for FalconFelt { fn inverse_or_zero(self) -> Self { // q-2 = 0b10 11 11 11 11 11 11 let two = self.multiply(self); let three = two.multiply(self); let six = three.multiply(three); let twelve = six.multiply(six); let fifteen = twelve.multiply(three); let thirty = fifteen.multiply(fifteen); let sixty = thirty.multiply(thirty); let sixty_three = sixty.multiply(three); let sixty_three_sq = sixty_three.multiply(sixty_three); let sixty_three_qu = sixty_three_sq.multiply(sixty_three_sq); let sixty_three_oc = sixty_three_qu.multiply(sixty_three_qu); let sixty_three_hx = sixty_three_oc.multiply(sixty_three_oc); let sixty_three_tt = sixty_three_hx.multiply(sixty_three_hx); let sixty_three_sf = sixty_three_tt.multiply(sixty_three_tt); let all_ones = sixty_three_sf.multiply(sixty_three); let two_e_twelve = all_ones.multiply(self); let two_e_thirteen = two_e_twelve.multiply(two_e_twelve); two_e_thirteen.multiply(all_ones) } } impl CyclotomicFourier for FalconFelt { fn primitive_root_of_unity(n: usize) -> Self { let log2n = n.ilog2(); assert!(log2n <= 12); // and 1331 is a twelfth root of unity let mut a = FalconFelt::new(1331); let num_squarings = 12 - n.ilog2(); for _ in 0..num_squarings { a *= a; } a } } impl TryFrom<u32> for FalconFelt { type Error = String; fn try_from(value: u32) -> Result<Self, Self::Error> { if value >= MODULUS as u32 { Err(format!("value {value} is greater than or equal to the field modulus {MODULUS}")) } else { Ok(FalconFelt::new(value as i16)) } } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/math/polynomial.rs
miden-crypto/src/dsa/falcon512_rpo/math/polynomial.rs
//! Generic polynomial type and operations used in Falcon. use alloc::vec::Vec; use core::{ default::Default, fmt::Debug, ops::{Add, AddAssign, Div, Mul, MulAssign, Neg, Sub, SubAssign}, }; use num::{One, Zero}; use p3_field::PrimeCharacteristicRing; use super::{Inverse, field::FalconFelt}; use crate::{ Felt, dsa::falcon512_rpo::{MODULUS, N}, utils::zeroize::{Zeroize, ZeroizeOnDrop}, }; /// Represents a polynomial with coefficients of type F. #[derive(Debug, Clone, Default)] pub struct Polynomial<F> { /// Coefficients of the polynomial, ordered from lowest to highest degree. pub coefficients: Vec<F>, } impl<F> Polynomial<F> where F: Clone, { /// Creates a new polynomial from the provided coefficients. pub fn new(coefficients: Vec<F>) -> Self { Self { coefficients } } } impl<F: Mul<Output = F> + Sub<Output = F> + AddAssign + Zero + Div<Output = F> + Clone + Inverse> Polynomial<F> { /// Multiplies two polynomials coefficient-wise (Hadamard multiplication). pub fn hadamard_mul(&self, other: &Self) -> Self { Polynomial::new( self.coefficients .iter() .zip(other.coefficients.iter()) .map(|(a, b)| *a * *b) .collect(), ) } /// Divides two polynomials coefficient-wise (Hadamard division). pub fn hadamard_div(&self, other: &Self) -> Self { let other_coefficients_inverse = F::batch_inverse_or_zero(&other.coefficients); Polynomial::new( self.coefficients .iter() .zip(other_coefficients_inverse.iter()) .map(|(a, b)| *a * *b) .collect(), ) } /// Computes the coefficient-wise inverse (Hadamard inverse). pub fn hadamard_inv(&self) -> Self { let coefficients_inverse = F::batch_inverse_or_zero(&self.coefficients); Polynomial::new(coefficients_inverse) } } impl<F: Zero + PartialEq + Clone> Polynomial<F> { /// Returns the degree of the polynomial. pub fn degree(&self) -> Option<usize> { if self.coefficients.is_empty() { return None; } let mut max_index = self.coefficients.len() - 1; while self.coefficients[max_index] == F::zero() { if let Some(new_index) = max_index.checked_sub(1) { max_index = new_index; } else { return None; } } Some(max_index) } /// Returns the leading coefficient of the polynomial. pub fn lc(&self) -> F { match self.degree() { Some(non_negative_degree) => self.coefficients[non_negative_degree].clone(), None => F::zero(), } } } /// The following implementations are specific to cyclotomic polynomial rings, /// i.e., F\[ X \] / <X^n + 1>, and are used extensively in Falcon. impl< F: One + Zero + Clone + Neg<Output = F> + MulAssign + AddAssign + Div<Output = F> + Sub<Output = F> + PartialEq, > Polynomial<F> { /// Reduce the polynomial by X^n + 1. pub fn reduce_by_cyclotomic(&self, n: usize) -> Self { let mut coefficients = vec![F::zero(); n]; let mut sign = -F::one(); for (i, c) in self.coefficients.iter().cloned().enumerate() { if i.is_multiple_of(n) { sign *= -F::one(); } coefficients[i % n] += sign.clone() * c; } Polynomial::new(coefficients) } /// Computes the field norm of the polynomial as an element of the cyclotomic ring /// F\[ X \] / <X^n + 1 > relative to one of half the size, i.e., F\[ X \] / <X^(n/2) + 1> . /// /// Corresponds to formula 3.25 in the spec [1, p.30]. /// /// [1]: https://falcon-sign.info/falcon.pdf pub fn field_norm(&self) -> Self { let n = self.coefficients.len(); let mut f0_coefficients = vec![F::zero(); n / 2]; let mut f1_coefficients = vec![F::zero(); n / 2]; for i in 0..n / 2 { f0_coefficients[i] = self.coefficients[2 * i].clone(); f1_coefficients[i] = self.coefficients[2 * i + 1].clone(); } let f0 = Polynomial::new(f0_coefficients); let f1 = Polynomial::new(f1_coefficients); let f0_squared = (f0.clone() * f0).reduce_by_cyclotomic(n / 2); let f1_squared = (f1.clone() * f1).reduce_by_cyclotomic(n / 2); let x = Polynomial::new(vec![F::zero(), F::one()]); f0_squared - (x * f1_squared).reduce_by_cyclotomic(n / 2) } /// Lifts an element from a cyclotomic polynomial ring to one of double the size. pub fn lift_next_cyclotomic(&self) -> Self { let n = self.coefficients.len(); let mut coefficients = vec![F::zero(); n * 2]; for i in 0..n { coefficients[2 * i] = self.coefficients[i].clone(); } Self::new(coefficients) } /// Computes the galois adjoint of the polynomial in the cyclotomic ring F\[ X \] / < X^n + 1 > /// , which corresponds to f(x^2). pub fn galois_adjoint(&self) -> Self { Self::new( self.coefficients .iter() .enumerate() .map(|(i, c)| { if i.is_multiple_of(2) { c.clone() } else { c.clone().neg() } }) .collect(), ) } } impl<F: Clone + Into<f64>> Polynomial<F> { pub(crate) fn l2_norm_squared(&self) -> f64 { self.coefficients .iter() .map(|i| Into::<f64>::into(i.clone())) .map(|i| i * i) .sum::<f64>() } } impl<F> PartialEq for Polynomial<F> where F: Zero + PartialEq + Clone + AddAssign, { fn eq(&self, other: &Self) -> bool { if self.is_zero() && other.is_zero() { true } else if self.is_zero() || other.is_zero() { false } else { let self_degree = self.degree().unwrap(); let other_degree = other.degree().unwrap(); self.coefficients[0..=self_degree] == other.coefficients[0..=other_degree] } } } impl<F> Eq for Polynomial<F> where F: Zero + PartialEq + Clone + AddAssign {} impl<F> Add for &Polynomial<F> where F: Add<Output = F> + AddAssign + Clone, { type Output = Polynomial<F>; fn add(self, rhs: Self) -> Self::Output { let coefficients = if self.coefficients.len() >= rhs.coefficients.len() { let mut coefficients = self.coefficients.clone(); for (i, c) in rhs.coefficients.iter().enumerate() { coefficients[i] += c.clone(); } coefficients } else { let mut coefficients = rhs.coefficients.clone(); for (i, c) in self.coefficients.iter().enumerate() { coefficients[i] += c.clone(); } coefficients }; Self::Output { coefficients } } } impl<F> Add for Polynomial<F> where F: Add<Output = F> + AddAssign + Clone, { type Output = Polynomial<F>; fn add(self, rhs: Self) -> Self::Output { let coefficients = if self.coefficients.len() >= rhs.coefficients.len() { let mut coefficients = self.coefficients.clone(); for (i, c) in rhs.coefficients.into_iter().enumerate() { coefficients[i] += c; } coefficients } else { let mut coefficients = rhs.coefficients.clone(); for (i, c) in self.coefficients.into_iter().enumerate() { coefficients[i] += c; } coefficients }; Self::Output { coefficients } } } impl<F> AddAssign for Polynomial<F> where F: Add<Output = F> + AddAssign + Clone, { fn add_assign(&mut self, rhs: Self) { if self.coefficients.len() >= rhs.coefficients.len() { for (i, c) in rhs.coefficients.into_iter().enumerate() { self.coefficients[i] += c; } } else { let mut coefficients = rhs.coefficients.clone(); for (i, c) in self.coefficients.iter().enumerate() { coefficients[i] += c.clone(); } self.coefficients = coefficients; } } } impl<F> Sub for &Polynomial<F> where F: Sub<Output = F> + Clone + Neg<Output = F> + Add<Output = F> + AddAssign, { type Output = Polynomial<F>; fn sub(self, rhs: Self) -> Self::Output { self + &(-rhs) } } impl<F> Sub for Polynomial<F> where F: Sub<Output = F> + Clone + Neg<Output = F> + Add<Output = F> + AddAssign, { type Output = Polynomial<F>; fn sub(self, rhs: Self) -> Self::Output { self + (-rhs) } } impl<F> SubAssign for Polynomial<F> where F: Add<Output = F> + Neg<Output = F> + AddAssign + Clone + Sub<Output = F>, { fn sub_assign(&mut self, rhs: Self) { self.coefficients = self.clone().sub(rhs).coefficients; } } impl<F: Neg<Output = F> + Clone> Neg for &Polynomial<F> { type Output = Polynomial<F>; fn neg(self) -> Self::Output { Self::Output { coefficients: self.coefficients.iter().cloned().map(|a| -a).collect(), } } } impl<F: Neg<Output = F> + Clone> Neg for Polynomial<F> { type Output = Self; fn neg(self) -> Self::Output { Self::Output { coefficients: self.coefficients.iter().cloned().map(|a| -a).collect(), } } } impl<F> Mul for &Polynomial<F> where F: Add + AddAssign + Mul<Output = F> + Sub<Output = F> + Zero + PartialEq + Clone, { type Output = Polynomial<F>; fn mul(self, other: Self) -> Self::Output { if self.is_zero() || other.is_zero() { return Polynomial::<F>::zero(); } let mut coefficients = vec![F::zero(); self.coefficients.len() + other.coefficients.len() - 1]; for i in 0..self.coefficients.len() { for j in 0..other.coefficients.len() { coefficients[i + j] += self.coefficients[i].clone() * other.coefficients[j].clone(); } } Polynomial { coefficients } } } impl<F> Mul for Polynomial<F> where F: Add + AddAssign + Mul<Output = F> + Zero + PartialEq + Clone, { type Output = Self; fn mul(self, other: Self) -> Self::Output { if self.is_zero() || other.is_zero() { return Self::zero(); } let mut coefficients = vec![F::zero(); self.coefficients.len() + other.coefficients.len() - 1]; for i in 0..self.coefficients.len() { for j in 0..other.coefficients.len() { coefficients[i + j] += self.coefficients[i].clone() * other.coefficients[j].clone(); } } Self { coefficients } } } impl<F: Add + Mul<Output = F> + Zero + Clone> Mul<F> for &Polynomial<F> { type Output = Polynomial<F>; fn mul(self, other: F) -> Self::Output { Polynomial { coefficients: self.coefficients.iter().cloned().map(|i| i * other.clone()).collect(), } } } impl<F: Add + Mul<Output = F> + Zero + Clone> Mul<F> for Polynomial<F> { type Output = Polynomial<F>; fn mul(self, other: F) -> Self::Output { Polynomial { coefficients: self.coefficients.iter().cloned().map(|i| i * other.clone()).collect(), } } } impl<F: Mul<Output = F> + Sub<Output = F> + AddAssign + Zero + Div<Output = F> + Clone> Polynomial<F> { /// Multiply two polynomials using Karatsuba's divide-and-conquer algorithm. pub fn karatsuba(&self, other: &Self) -> Self { Polynomial::new(vector_karatsuba(&self.coefficients, &other.coefficients)) } } impl<F> One for Polynomial<F> where F: Clone + One + PartialEq + Zero + AddAssign, { fn one() -> Self { Self { coefficients: vec![F::one()] } } } impl<F> Zero for Polynomial<F> where F: Zero + PartialEq + Clone + AddAssign, { fn zero() -> Self { Self { coefficients: vec![] } } fn is_zero(&self) -> bool { self.degree().is_none() } } impl<F: Zero + Clone> Polynomial<F> { /// Shifts the polynomial by the specified amount (adds leading zeros). pub fn shift(&self, shamt: usize) -> Self { Self { coefficients: [vec![F::zero(); shamt], self.coefficients.clone()].concat(), } } /// Creates a constant polynomial with a single coefficient. pub fn constant(f: F) -> Self { Self { coefficients: vec![f] } } /// Applies a function to each coefficient and returns a new polynomial. pub fn map<G: Clone, C: FnMut(&F) -> G>(&self, closure: C) -> Polynomial<G> { Polynomial::<G>::new(self.coefficients.iter().map(closure).collect()) } /// Folds the coefficients using the provided function and initial value. pub fn fold<G, C: FnMut(G, &F) -> G + Clone>(&self, mut initial_value: G, closure: C) -> G { for c in self.coefficients.iter() { initial_value = (closure.clone())(initial_value, c); } initial_value } } impl<F> Div<Polynomial<F>> for Polynomial<F> where F: Zero + One + PartialEq + AddAssign + Clone + Mul<Output = F> + MulAssign + Div<Output = F> + Neg<Output = F> + Sub<Output = F>, { type Output = Polynomial<F>; fn div(self, denominator: Self) -> Self::Output { if denominator.is_zero() { panic!(); } if self.is_zero() { Self::zero(); } let mut remainder = self.clone(); let mut quotient = Polynomial::<F>::zero(); while remainder.degree().unwrap() >= denominator.degree().unwrap() { let shift = remainder.degree().unwrap() - denominator.degree().unwrap(); let quotient_coefficient = remainder.lc() / denominator.lc(); let monomial = Self::constant(quotient_coefficient).shift(shift); quotient += monomial.clone(); remainder -= monomial * denominator.clone(); if remainder.is_zero() { break; } } quotient } } fn vector_karatsuba< F: Zero + AddAssign + Mul<Output = F> + Sub<Output = F> + Div<Output = F> + Clone, >( left: &[F], right: &[F], ) -> Vec<F> { let n = left.len(); if n <= 8 { let mut product = vec![F::zero(); left.len() + right.len() - 1]; for (i, l) in left.iter().enumerate() { for (j, r) in right.iter().enumerate() { product[i + j] += l.clone() * r.clone(); } } return product; } let n_over_2 = n / 2; let mut product = vec![F::zero(); 2 * n - 1]; let left_lo = &left[0..n_over_2]; let right_lo = &right[0..n_over_2]; let left_hi = &left[n_over_2..]; let right_hi = &right[n_over_2..]; let left_sum: Vec<F> = left_lo.iter().zip(left_hi).map(|(a, b)| a.clone() + b.clone()).collect(); let right_sum: Vec<F> = right_lo.iter().zip(right_hi).map(|(a, b)| a.clone() + b.clone()).collect(); let prod_lo = vector_karatsuba(left_lo, right_lo); let prod_hi = vector_karatsuba(left_hi, right_hi); let prod_mid: Vec<F> = vector_karatsuba(&left_sum, &right_sum) .iter() .zip(prod_lo.iter().zip(prod_hi.iter())) .map(|(s, (l, h))| s.clone() - (l.clone() + h.clone())) .collect(); for (i, l) in prod_lo.into_iter().enumerate() { product[i] = l; } for (i, m) in prod_mid.into_iter().enumerate() { product[i + n_over_2] += m; } for (i, h) in prod_hi.into_iter().enumerate() { product[i + n] += h } product } impl From<Polynomial<FalconFelt>> for Polynomial<Felt> { fn from(item: Polynomial<FalconFelt>) -> Self { let res: Vec<Felt> = item.coefficients.iter().map(|a| Felt::from_u16(a.value() as u16)).collect(); Polynomial::new(res) } } impl From<&Polynomial<FalconFelt>> for Polynomial<Felt> { fn from(item: &Polynomial<FalconFelt>) -> Self { let res: Vec<Felt> = item.coefficients.iter().map(|a| Felt::from_u16(a.value() as u16)).collect(); Polynomial::new(res) } } impl From<Polynomial<i16>> for Polynomial<FalconFelt> { fn from(item: Polynomial<i16>) -> Self { let res: Vec<FalconFelt> = item.coefficients.iter().map(|&a| FalconFelt::new(a)).collect(); Polynomial::new(res) } } impl From<&Polynomial<i16>> for Polynomial<FalconFelt> { fn from(item: &Polynomial<i16>) -> Self { let res: Vec<FalconFelt> = item.coefficients.iter().map(|&a| FalconFelt::new(a)).collect(); Polynomial::new(res) } } impl From<Vec<i16>> for Polynomial<FalconFelt> { fn from(item: Vec<i16>) -> Self { let res: Vec<FalconFelt> = item.iter().map(|&a| FalconFelt::new(a)).collect(); Polynomial::new(res) } } impl From<&Vec<i16>> for Polynomial<FalconFelt> { fn from(item: &Vec<i16>) -> Self { let res: Vec<FalconFelt> = item.iter().map(|&a| FalconFelt::new(a)).collect(); Polynomial::new(res) } } impl Polynomial<FalconFelt> { /// Computes the squared L2 norm of the polynomial. pub fn norm_squared(&self) -> u64 { self.coefficients .iter() .map(|&i| i.balanced_value() as i64) .map(|i| (i * i) as u64) .sum::<u64>() } // PUBLIC ACCESSORS // -------------------------------------------------------------------------------------------- /// Returns the coefficients of this polynomial as field elements. pub fn to_elements(&self) -> Vec<Felt> { self.coefficients.iter().map(|&a| Felt::from_u16(a.value() as u16)).collect() } // POLYNOMIAL OPERATIONS // -------------------------------------------------------------------------------------------- /// Multiplies two polynomials over Z_p\[x\] without reducing modulo p. Given that the degrees /// of the input polynomials are less than 512 and their coefficients are less than the modulus /// q equal to 12289, the resulting product polynomial is guaranteed to have coefficients less /// than the Miden prime. /// /// Note that this multiplication is not over Z_p\[x\]/(phi). pub fn mul_modulo_p(a: &Self, b: &Self) -> [u64; 1024] { let mut c = [0; 2 * N]; for i in 0..N { for j in 0..N { c[i + j] += a.coefficients[i].value() as u64 * b.coefficients[j].value() as u64; } } c } /// Reduces a polynomial, that is the product of two polynomials over Z_p\[x\], modulo /// the irreducible polynomial phi. This results in an element in Z_p\[x\]/(phi). pub fn reduce_negacyclic(a: &[u64; 1024]) -> Self { let mut c = [FalconFelt::zero(); N]; let modulus = MODULUS as u16; for i in 0..N { let ai = a[N + i] % modulus as u64; let neg_ai = (modulus - ai as u16) % modulus; let bi = (a[i] % modulus as u64) as u16; c[i] = FalconFelt::new(((neg_ai + bi) % modulus) as i16); } Self::new(c.to_vec()) } } impl Polynomial<Felt> { /// Returns the coefficients of this polynomial as Miden field elements. pub fn to_elements(&self) -> Vec<Felt> { self.coefficients.to_vec() } } impl Polynomial<i16> { /// Returns the balanced values of the coefficients of this polynomial. pub fn to_balanced_values(&self) -> Vec<i16> { self.coefficients.iter().map(|c| FalconFelt::new(*c).balanced_value()).collect() } } // ZEROIZE IMPLEMENTATIONS // ================================================================================================ impl<F: Zeroize> Zeroize for Polynomial<F> { fn zeroize(&mut self) { self.coefficients.zeroize(); } } impl<F: Zeroize> ZeroizeOnDrop for Polynomial<F> {} // TESTS // ================================================================================================ #[cfg(all(test, feature = "std"))] mod tests { use super::{FalconFelt, N, Polynomial}; use crate::rand::test_utils::rand_array; #[test] fn test_negacyclic_reduction() { let coef1: [u8; N] = rand_array(); let coef2: [u8; N] = rand_array(); let poly1 = Polynomial::new(coef1.iter().map(|&a| FalconFelt::new(a as i16)).collect()); let poly2 = Polynomial::new(coef2.iter().map(|&a| FalconFelt::new(a as i16)).collect()); let prod = poly1.clone() * poly2.clone(); assert_eq!( prod.reduce_by_cyclotomic(N), Polynomial::reduce_negacyclic(&Polynomial::mul_modulo_p(&poly1, &poly2)) ); } }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false
0xMiden/crypto
https://github.com/0xMiden/crypto/blob/b30552ecceb5f70565cc0267fca227f30c5af7ab/miden-crypto/src/dsa/falcon512_rpo/math/mod.rs
miden-crypto/src/dsa/falcon512_rpo/math/mod.rs
//! Contains different structs and methods related to the Falcon DSA. //! //! It uses and acknowledges the work in: //! //! 1. The [reference](https://falcon-sign.info/impl/README.txt.html) implementation by Thomas //! Pornin. //! 2. The [Rust](https://github.com/aszepieniec/falcon-rust) implementation by Alan Szepieniec. use alloc::vec::Vec; use core::ops::MulAssign; use num::{BigInt, FromPrimitive, One, Zero}; use num_complex::Complex64; use rand::Rng; use super::{ MODULUS, keys::{WIDTH_BIG_POLY_COEFFICIENT, WIDTH_SMALL_POLY_COEFFICIENT}, }; mod fft; pub use fft::{CyclotomicFourier, FastFft}; mod field; pub use field::FalconFelt; mod ffsampling; pub use ffsampling::{LdlTree, ffldl, ffsampling, gram, normalize_tree}; mod samplerz; use self::samplerz::sampler_z; mod polynomial; pub use polynomial::Polynomial; const MAX_SMALL_POLY_COEFFICIENT_SIZE: i16 = (1 << (WIDTH_SMALL_POLY_COEFFICIENT - 1)) - 1; const MAX_BIG_POLY_COEFFICIENT_SIZE: i16 = (1 << (WIDTH_BIG_POLY_COEFFICIENT - 1)) - 1; pub trait Inverse: Copy + Zero + MulAssign + One { /// Gets the inverse of a, or zero if it is zero. fn inverse_or_zero(self) -> Self; /// Gets the inverses of a batch of elements, and skip over any that are zero. fn batch_inverse_or_zero(batch: &[Self]) -> Vec<Self> { let mut acc = Self::one(); let mut rp: Vec<Self> = Vec::with_capacity(batch.len()); for batch_item in batch { if !batch_item.is_zero() { rp.push(acc); acc = *batch_item * acc; } else { rp.push(Self::zero()); } } let mut inv = Self::inverse_or_zero(acc); for i in (0..batch.len()).rev() { if !batch[i].is_zero() { rp[i] *= inv; inv *= batch[i]; } } rp } } impl Inverse for Complex64 { fn inverse_or_zero(self) -> Self { let modulus = self.re * self.re + self.im * self.im; Complex64::new(self.re / modulus, -self.im / modulus) } fn batch_inverse_or_zero(batch: &[Self]) -> Vec<Self> { batch.iter().map(|&c| Complex64::new(1.0, 0.0) / c).collect() } } impl Inverse for f64 { fn inverse_or_zero(self) -> Self { 1.0 / self } fn batch_inverse_or_zero(batch: &[Self]) -> Vec<Self> { batch.iter().map(|&c| 1.0 / c).collect() } } /// Samples 4 small polynomials f, g, F, G such that f * G - g * F = q mod (X^n + 1). /// Algorithm 5 (NTRUgen) of the documentation [1, p.34]. /// /// [1]: https://falcon-sign.info/falcon.pdf pub(crate) fn ntru_gen<R: Rng>(n: usize, rng: &mut R) -> [Polynomial<i16>; 4] { loop { let f = gen_poly(n, rng); let g = gen_poly(n, rng); // we do bound checks on the coefficients of the sampled polynomials in order to make sure // that they will be encodable/decodable if !(check_coefficients_bound(&f, MAX_SMALL_POLY_COEFFICIENT_SIZE) && check_coefficients_bound(&g, MAX_SMALL_POLY_COEFFICIENT_SIZE)) { continue; } let f_ntt = f.map(|&i| FalconFelt::new(i)).fft(); if f_ntt.coefficients.iter().any(|e| e.is_zero()) { continue; } let gamma = gram_schmidt_norm_squared(&f, &g); if gamma > 1.3689f64 * (MODULUS as f64) { continue; } if let Some((capital_f, capital_g)) = ntru_solve(&f.map(|&i| i.into()), &g.map(|&i| i.into())) { // we do bound checks on the coefficients of the solution polynomials in order to make // sure that they will be encodable/decodable let capital_f = capital_f.map(|i| i.try_into().unwrap()); let capital_g = capital_g.map(|i| i.try_into().unwrap()); if !(check_coefficients_bound(&capital_f, MAX_BIG_POLY_COEFFICIENT_SIZE) && check_coefficients_bound(&capital_g, MAX_BIG_POLY_COEFFICIENT_SIZE)) { continue; } return [g, -f, capital_g, -capital_f]; } } } /// Solves the NTRU equation. Given f, g in ZZ[X], find F, G in ZZ[X] such that: /// /// f G - g F = q mod (X^n + 1) /// /// Algorithm 6 of the specification [1, p.35]. /// /// [1]: https://falcon-sign.info/falcon.pdf fn ntru_solve( f: &Polynomial<BigInt>, g: &Polynomial<BigInt>, ) -> Option<(Polynomial<BigInt>, Polynomial<BigInt>)> { let n = f.coefficients.len(); if n == 1 { let (gcd, u, v) = xgcd(&f.coefficients[0], &g.coefficients[0]); if gcd != BigInt::one() { return None; } return Some(( (Polynomial::new(vec![-v * BigInt::from_u32(MODULUS as u32).unwrap()])), Polynomial::new(vec![u * BigInt::from_u32(MODULUS as u32).unwrap()]), )); } let f_prime = f.field_norm(); let g_prime = g.field_norm(); let (capital_f_prime, capital_g_prime) = ntru_solve(&f_prime, &g_prime)?; let capital_f_prime_xsq = capital_f_prime.lift_next_cyclotomic(); let capital_g_prime_xsq = capital_g_prime.lift_next_cyclotomic(); let f_minx = f.galois_adjoint(); let g_minx = g.galois_adjoint(); let mut capital_f = (capital_f_prime_xsq.karatsuba(&g_minx)).reduce_by_cyclotomic(n); let mut capital_g = (capital_g_prime_xsq.karatsuba(&f_minx)).reduce_by_cyclotomic(n); babai_reduce(f, g, &mut capital_f, &mut capital_g).map(|()| (capital_f, capital_g)) } /// Generates a polynomial of degree at most n-1 whose coefficients are distributed according /// to a discrete Gaussian with mu = 0 and sigma = 1.17 * sqrt(Q / (2n)). fn gen_poly<R: Rng>(n: usize, rng: &mut R) -> Polynomial<i16> { let mu = 0.0; let sigma_star = 1.43300980528773; Polynomial { coefficients: (0..4096) .map(|_| sampler_z(mu, sigma_star, sigma_star - 0.001, rng)) .collect::<Vec<i16>>() .chunks(4096 / n) .map(|ch| ch.iter().sum()) .collect(), } } /// Computes the Gram-Schmidt norm of B = [[g, -f], [G, -F]] from f and g. /// Corresponds to line 9 in algorithm 5 of the spec [1, p.34] /// /// [1]: https://falcon-sign.info/falcon.pdf fn gram_schmidt_norm_squared(f: &Polynomial<i16>, g: &Polynomial<i16>) -> f64 { let n = f.coefficients.len(); let norm_f_squared = f.l2_norm_squared(); let norm_g_squared = g.l2_norm_squared(); let gamma1 = norm_f_squared + norm_g_squared; let f_fft = f.map(|i| Complex64::new(*i as f64, 0.0)).fft(); let g_fft = g.map(|i| Complex64::new(*i as f64, 0.0)).fft(); let f_adj_fft = f_fft.map(|c| c.conj()); let g_adj_fft = g_fft.map(|c| c.conj()); let ffgg_fft = f_fft.hadamard_mul(&f_adj_fft) + g_fft.hadamard_mul(&g_adj_fft); let ffgg_fft_inverse = ffgg_fft.hadamard_inv(); let qf_over_ffgg_fft = f_adj_fft.map(|c| c * (MODULUS as f64)).hadamard_mul(&ffgg_fft_inverse); let qg_over_ffgg_fft = g_adj_fft.map(|c| c * (MODULUS as f64)).hadamard_mul(&ffgg_fft_inverse); let norm_f_over_ffgg_squared = qf_over_ffgg_fft.coefficients.iter().map(|c| (c * c.conj()).re).sum::<f64>() / (n as f64); let norm_g_over_ffgg_squared = qg_over_ffgg_fft.coefficients.iter().map(|c| (c * c.conj()).re).sum::<f64>() / (n as f64); let gamma2 = norm_f_over_ffgg_squared + norm_g_over_ffgg_squared; f64::max(gamma1, gamma2) } /// Reduces the vector (F,G) relative to (f,g). This method follows the python implementation [1]. /// Note that this algorithm can end up in an infinite loop. (It's one of the things the author /// would like to fix.) When this happens, control returns an error (hence the return type) and /// generates another keypair with fresh randomness. /// /// Algorithm 7 in the spec [2, p.35] /// /// [1]: https://github.com/tprest/falcon.py /// /// [2]: https://falcon-sign.info/falcon.pdf fn babai_reduce( f: &Polynomial<BigInt>, g: &Polynomial<BigInt>, capital_f: &mut Polynomial<BigInt>, capital_g: &mut Polynomial<BigInt>, ) -> Option<()> { let bitsize = |bi: &BigInt| (bi.bits() + 7) & (u64::MAX ^ 7); let n = f.coefficients.len(); let size = [ f.map(bitsize).fold(0, |a, &b| u64::max(a, b)), g.map(bitsize).fold(0, |a, &b| u64::max(a, b)), 53, ] .into_iter() .max() .unwrap(); let shift = (size as i64) - 53; let f_adjusted = f .map(|bi| Complex64::new(i64::try_from(bi >> shift).unwrap() as f64, 0.0)) .fft(); let g_adjusted = g .map(|bi| Complex64::new(i64::try_from(bi >> shift).unwrap() as f64, 0.0)) .fft(); let f_star_adjusted = f_adjusted.map(|c| c.conj()); let g_star_adjusted = g_adjusted.map(|c| c.conj()); let denominator_fft = f_adjusted.hadamard_mul(&f_star_adjusted) + g_adjusted.hadamard_mul(&g_star_adjusted); let mut counter = 0; loop { let capital_size = [ capital_f.map(bitsize).fold(0, |a, &b| u64::max(a, b)), capital_g.map(bitsize).fold(0, |a, &b| u64::max(a, b)), 53, ] .into_iter() .max() .unwrap(); if capital_size < size { break; } let capital_shift = (capital_size as i64) - 53; let capital_f_adjusted = capital_f .map(|bi| Complex64::new(i64::try_from(bi >> capital_shift).unwrap() as f64, 0.0)) .fft(); let capital_g_adjusted = capital_g .map(|bi| Complex64::new(i64::try_from(bi >> capital_shift).unwrap() as f64, 0.0)) .fft(); let numerator = capital_f_adjusted.hadamard_mul(&f_star_adjusted) + capital_g_adjusted.hadamard_mul(&g_star_adjusted); let quotient = numerator.hadamard_div(&denominator_fft).ifft(); let k = quotient.map(|f| Into::<BigInt>::into(f.re.round() as i64)); if k.is_zero() { break; } let kf = (k.clone().karatsuba(f)) .reduce_by_cyclotomic(n) .map(|bi| bi << (capital_size - size)); let kg = (k.clone().karatsuba(g)) .reduce_by_cyclotomic(n) .map(|bi| bi << (capital_size - size)); *capital_f -= kf; *capital_g -= kg; counter += 1; if counter > 1000 { // If we get here, it means that (with high likelihood) we are in an infinite loop. return None; } } Some(()) } /// Extended Euclidean algorithm for computing the greatest common divisor (g) and /// Bézout coefficients (u, v) for the relation /// /// $$ u a + v b = g . $$ /// /// Implementation adapted from Wikipedia [1]. /// /// [1]: https://en.wikipedia.org/wiki/Extended_Euclidean_algorithm#Pseudocode fn xgcd(a: &BigInt, b: &BigInt) -> (BigInt, BigInt, BigInt) { let (mut old_r, mut r) = (a.clone(), b.clone()); let (mut old_s, mut s) = (BigInt::one(), BigInt::zero()); let (mut old_t, mut t) = (BigInt::zero(), BigInt::one()); while r != BigInt::zero() { let quotient = old_r.clone() / r.clone(); (old_r, r) = (r.clone(), old_r.clone() - quotient.clone() * r); (old_s, s) = (s.clone(), old_s.clone() - quotient.clone() * s); (old_t, t) = (t.clone(), old_t.clone() - quotient * t); } (old_r, old_s, old_t) } /// Asserts that the balanced values of the coefficients of a polynomial are within the interval /// [-bound, bound]. fn check_coefficients_bound(polynomial: &Polynomial<i16>, bound: i16) -> bool { polynomial.to_balanced_values().iter().all(|c| *c <= bound && *c >= -bound) }
rust
Apache-2.0
b30552ecceb5f70565cc0267fca227f30c5af7ab
2026-01-04T20:24:48.363198Z
false