repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
DavidLiedle/DriftDB | https://github.com/DavidLiedle/DriftDB/blob/f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30/crates/driftdb-client/examples/time_travel.rs | crates/driftdb-client/examples/time_travel.rs | //! Time-travel query example for DriftDB
//!
//! This example demonstrates DriftDB's unique time-travel capabilities,
//! allowing you to query historical states of your data.
//!
//! Run with:
//! ```bash
//! cargo run --example time_travel
//! ```
use driftdb_client::{Client, Result, TimeTravel};
#[tokio::main]
async fn main() -> Result<()> {
// Initialize logging
tracing_subscriber::fmt::init();
println!("π°οΈ DriftDB Client - Time-Travel Example\n");
// Connect to DriftDB
println!("Connecting to DriftDB...");
let client = Client::connect("localhost:5433").await?;
println!("β Connected!\n");
// Create a table for our demo
println!("Setting up demo table...");
let _ = client.execute("DROP TABLE products").await; // Ignore error if table doesn't exist
client
.execute("CREATE TABLE products (id BIGINT PRIMARY KEY, name TEXT, price BIGINT)")
.await?;
println!("β Table ready!\n");
// === Version 1: Initial state ===
println!("π Version 1: Creating initial product...");
client
.execute("INSERT INTO products (id, name, price) VALUES (1, 'Widget', 1000)")
.await?;
let seq1 = client.current_sequence().await?;
println!(" Sequence: {}", seq1);
print_product(&client, "SELECT * FROM products WHERE id = 1").await?;
// === Version 2: Price increase ===
println!("\nπ Version 2: Increasing price...");
client
.execute("UPDATE products SET price = 1500 WHERE id = 1")
.await?;
let seq2 = client.current_sequence().await?;
println!(" Sequence: {}", seq2);
print_product(&client, "SELECT * FROM products WHERE id = 1").await?;
// === Version 3: Product rename ===
println!("\nπ Version 3: Renaming product...");
client
.execute("UPDATE products SET name = 'Super Widget' WHERE id = 1")
.await?;
let seq3 = client.current_sequence().await?;
println!(" Sequence: {}", seq3);
print_product(&client, "SELECT * FROM products WHERE id = 1").await?;
// === Version 4: Final price update ===
println!("\nπ Version 4: Final price adjustment...");
client
.execute("UPDATE products SET price = 1200 WHERE id = 1")
.await?;
let seq4 = client.current_sequence().await?;
println!(" Sequence: {}", seq4);
print_product(&client, "SELECT * FROM products WHERE id = 1").await?;
// Now let's time-travel!
println!("\n\nπ°οΈ TIME-TRAVEL QUERIES\n");
println!("βββββββββββββββββββββββββββββββββββββββ\n");
// Query at each historical point
println!("π Product at sequence {} (Version 1):", seq1);
let rows = client
.query_builder("SELECT * FROM products WHERE id = 1")
.as_of(TimeTravel::Sequence(seq1))
.execute()
.await?;
print_row_details(&rows);
println!("\nπ Product at sequence {} (Version 2):", seq2);
let rows = client
.query_builder("SELECT * FROM products WHERE id = 1")
.as_of(TimeTravel::Sequence(seq2))
.execute()
.await?;
print_row_details(&rows);
println!("\nπ Product at sequence {} (Version 3):", seq3);
let rows = client
.query_builder("SELECT * FROM products WHERE id = 1")
.as_of(TimeTravel::Sequence(seq3))
.execute()
.await?;
print_row_details(&rows);
println!("\nπ Product at current state (Version 4):");
let rows = client.query("SELECT * FROM products WHERE id = 1").await?;
print_row_details(&rows);
// Show all versions
println!("\n\nπ ALL HISTORICAL VERSIONS\n");
println!("βββββββββββββββββββββββββββββββββββββββ\n");
let rows = client
.query_builder("SELECT * FROM products WHERE id = 1")
.as_of(TimeTravel::All)
.execute()
.await?;
println!("Found {} versions in history:", rows.len());
for (idx, row) in rows.iter().enumerate() {
let name = row.get("name").and_then(|v| v.as_str()).unwrap_or("?");
let price = row.get("price").and_then(|v| v.as_i64()).unwrap_or(0);
println!(" Version {}: {} - ${:.2}", idx + 1, name, price as f64 / 100.0);
}
println!("\n⨠Time-travel example completed!");
println!("\nπ‘ Key Takeaway: DriftDB preserves complete history!");
println!(" You can query ANY previous state of your data.");
Ok(())
}
async fn print_product(client: &Client, sql: &str) -> Result<()> {
let rows = client.query(sql).await?;
if let Some(row) = rows.first() {
let name = row.get("name").and_then(|v| v.as_str()).unwrap_or("?");
let price = row.get("price").and_then(|v| v.as_i64()).unwrap_or(0);
println!(" Product: {} - ${:.2}", name, price as f64 / 100.0);
}
Ok(())
}
fn print_row_details(rows: &[driftdb_client::types::Row]) {
if let Some(row) = rows.first() {
let name = row.get("name").and_then(|v| v.as_str()).unwrap_or("?");
let price = row.get("price").and_then(|v| v.as_i64()).unwrap_or(0);
println!(" Name: {}", name);
println!(" Price: ${:.2}", price as f64 / 100.0);
} else {
println!(" (No data)");
}
}
| rust | MIT | f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30 | 2026-01-04T20:22:48.382079Z | false |
DavidLiedle/DriftDB | https://github.com/DavidLiedle/DriftDB/blob/f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30/crates/driftdb-client/examples/typed_queries.rs | crates/driftdb-client/examples/typed_queries.rs | //! Typed queries example using serde deserialization
//!
//! This example shows how to use strongly-typed structs with DriftDB queries
//! for better type safety and ergonomics.
//!
//! Run with:
//! ```bash
//! cargo run --example typed_queries
//! ```
use driftdb_client::{Client, Result, TimeTravel};
use serde::Deserialize;
// Define our domain models with serde
#[derive(Debug, Deserialize)]
struct User {
id: i64,
username: String,
email: String,
active: bool,
}
#[derive(Debug, Deserialize)]
struct Order {
id: i64,
user_id: i64,
product: String,
amount: i64,
status: String,
}
#[derive(Debug, Deserialize)]
struct OrderSummary {
user_id: i64,
total_orders: i64,
total_amount: i64,
}
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
println!("π― DriftDB Client - Typed Queries Example\n");
let client = Client::connect("localhost:5433").await?;
println!("β Connected!\n");
// Setup tables
println!("Setting up tables...");
setup_tables(&client).await?;
println!("β Tables ready!\n");
// === Example 1: Query users with type safety ===
println!("π Example 1: Typed user queries\n");
let users: Vec<User> = client
.query_as("SELECT * FROM users ORDER BY id")
.await?;
println!("Found {} users:", users.len());
for user in &users {
println!(
" - #{}: {} <{}> [{}]",
user.id,
user.username,
user.email,
if user.active { "active" } else { "inactive" }
);
}
// === Example 2: Filtered queries ===
println!("\nπ Example 2: Filter active users\n");
let active_users: Vec<User> = client
.query_as("SELECT * FROM users WHERE active = true")
.await?;
println!("Active users: {}", active_users.len());
for user in &active_users {
println!(" - {}", user.username);
}
// === Example 3: Typed aggregations ===
println!("\nπ Example 3: Order summaries by user\n");
let summaries: Vec<OrderSummary> = client
.query_as(
"SELECT user_id, COUNT(*) as total_orders, SUM(amount) as total_amount \
FROM orders \
GROUP BY user_id \
ORDER BY total_amount DESC"
)
.await?;
println!("Order summaries:");
for summary in &summaries {
println!(
" - User #{}: {} orders, ${:.2} total",
summary.user_id,
summary.total_orders,
summary.total_amount as f64 / 100.0
);
}
// === Example 4: Time-travel with typed queries ===
println!("\nπ Example 4: Time-travel + typed queries\n");
// Get current sequence
let current_seq = client.current_sequence().await?;
println!("Current sequence: {}", current_seq);
// Update a user
client
.execute("UPDATE users SET active = false WHERE id = 2")
.await?;
// Query current state
let users_now: Vec<User> = client
.query_as("SELECT * FROM users ORDER BY id")
.await?;
let active_now = users_now.iter().filter(|u| u.active).count();
println!("Active users now: {}", active_now);
// Query historical state (before the update)
let users_then: Vec<User> = client
.query_builder("SELECT * FROM users ORDER BY id")
.as_of(TimeTravel::Sequence(current_seq))
.execute_as()
.await?;
let active_then = users_then.iter().filter(|u| u.active).count();
println!("Active users at sequence {}: {}", current_seq, active_then);
println!("\n⨠Typed queries example completed!");
println!("\nπ‘ Benefits of typed queries:");
println!(" β Compile-time type safety");
println!(" β IDE autocomplete support");
println!(" β Automatic deserialization");
println!(" β Less boilerplate code");
Ok(())
}
async fn setup_tables(client: &Client) -> Result<()> {
// Drop tables if they exist from previous runs
let _ = client.execute("DROP TABLE orders").await;
let _ = client.execute("DROP TABLE users").await;
// Create users table
client
.execute(
"CREATE TABLE users (\
id BIGINT PRIMARY KEY, \
username TEXT, \
email TEXT, \
active BOOLEAN\
)"
)
.await?;
// Create orders table
client
.execute(
"CREATE TABLE orders (\
id BIGINT PRIMARY KEY, \
user_id BIGINT, \
product TEXT, \
amount BIGINT, \
status TEXT\
)"
)
.await?;
// Insert sample users
client
.execute("INSERT INTO users (id, username, email, active) VALUES (1, 'alice', 'alice@example.com', true)")
.await?;
client
.execute("INSERT INTO users (id, username, email, active) VALUES (2, 'bob', 'bob@example.com', true)")
.await?;
client
.execute("INSERT INTO users (id, username, email, active) VALUES (3, 'charlie', 'charlie@example.com', false)")
.await?;
// Insert sample orders
client
.execute("INSERT INTO orders (id, user_id, product, amount, status) VALUES (1, 1, 'Widget', 2500, 'completed')")
.await?;
client
.execute("INSERT INTO orders (id, user_id, product, amount, status) VALUES (2, 1, 'Gadget', 1500, 'completed')")
.await?;
client
.execute("INSERT INTO orders (id, user_id, product, amount, status) VALUES (3, 2, 'Widget', 2500, 'pending')")
.await?;
client
.execute("INSERT INTO orders (id, user_id, product, amount, status) VALUES (4, 2, 'Doohickey', 3500, 'completed')")
.await?;
Ok(())
}
| rust | MIT | f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30 | 2026-01-04T20:22:48.382079Z | false |
DavidLiedle/DriftDB | https://github.com/DavidLiedle/DriftDB/blob/f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30/crates/driftdb-client/examples/parameterized_queries.rs | crates/driftdb-client/examples/parameterized_queries.rs | //! Parameterized queries example for DriftDB
//!
//! This example demonstrates safe parameterized queries that prevent SQL injection.
//! Uses client-side escaping as a workaround until the server supports full
//! parameterized queries via the extended query protocol.
//!
//! Run with:
//! ```bash
//! cargo run --example parameterized_queries
//! ```
use driftdb_client::{Client, Result, Value};
#[tokio::main]
async fn main() -> Result<()> {
tracing_subscriber::fmt::init();
println!("π DriftDB Client - Parameterized Queries Example\n");
let client = Client::connect("localhost:5433").await?;
println!("β Connected!\n");
// Setup
println!("Setting up test table...");
let _ = client.execute("DROP TABLE param_test").await;
client
.execute("CREATE TABLE param_test (id BIGINT PRIMARY KEY, name TEXT, description TEXT)")
.await?;
println!("β Table created!\n");
// === Example 1: Basic parameterized INSERT ===
println!("π Example 1: Basic parameterized INSERT\n");
client
.execute_escaped(
"INSERT INTO param_test (id, name, description) VALUES ($1, $2, $3)",
&[
Value::Int(1),
Value::Text("Alice".to_string()),
Value::Text("Normal user".to_string()),
],
)
.await?;
println!(" β Inserted Alice with parameterized query");
// Verify
let rows = client.query("SELECT * FROM param_test WHERE id = 1").await?;
println!(
" β Retrieved: {} - {}",
rows[0].get("name").and_then(|v| v.as_str()).unwrap_or("?"),
rows[0]
.get("description")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
// === Example 2: Special characters (SQL injection test) ===
println!("\nπ Example 2: Special characters (SQL injection prevention)\n");
// This name contains single quotes which would break a non-parameterized query
let dangerous_name = "O'Reilly";
let dangerous_description = "User with ' quotes \" and other chars; SELECT * FROM users";
client
.execute_escaped(
"INSERT INTO param_test (id, name, description) VALUES ($1, $2, $3)",
&[
Value::Int(2),
Value::Text(dangerous_name.to_string()),
Value::Text(dangerous_description.to_string()),
],
)
.await?;
println!(" β Inserted user with special characters:");
println!(" Name: {}", dangerous_name);
println!(" Description: {}", dangerous_description);
// Verify the data was stored correctly and table still exists
let rows = client.query("SELECT * FROM param_test WHERE id = 2").await?;
println!("\n β Retrieved safely:");
println!(
" Name: {}",
rows[0].get("name").and_then(|v| v.as_str()).unwrap_or("?")
);
println!(
" Description: {}",
rows[0]
.get("description")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
// === Example 3: Parameterized SELECT ===
println!("\nπ Example 3: Parameterized SELECT query\n");
let rows = client
.query_escaped(
"SELECT * FROM param_test WHERE name = $1",
&[Value::Text(dangerous_name.to_string())],
)
.await?;
println!(" β Queried for name = '{}'", dangerous_name);
println!(" β Found {} row(s)", rows.len());
if !rows.is_empty() {
println!(
" ID: {}",
rows[0].get("id").and_then(|v| v.as_i64()).unwrap_or(0)
);
}
// === Example 4: Multiple parameters in WHERE ===
println!("\nπ Example 4: Multiple parameters in WHERE clause\n");
client
.execute_escaped(
"INSERT INTO param_test (id, name, description) VALUES ($1, $2, $3)",
&[
Value::Int(3),
Value::Text("Bob".to_string()),
Value::Text("Another user".to_string()),
],
)
.await?;
let rows = client
.query_escaped(
"SELECT * FROM param_test WHERE id > $1 AND id < $2",
&[Value::Int(0), Value::Int(10)],
)
.await?;
println!(" β Queried for id > 0 AND id < 10");
println!(" β Found {} row(s)", rows.len());
for row in &rows {
println!(
" - {} (id={})",
row.get("name").and_then(|v| v.as_str()).unwrap_or("?"),
row.get("id").and_then(|v| v.as_i64()).unwrap_or(0)
);
}
// === Example 5: Unicode and emoji ===
println!("\nπ Example 5: Unicode and emoji support\n");
let unicode_name = "δΈη"; // "World" in Chinese
let emoji_description = "User with emoji ππβ¨";
client
.execute_escaped(
"INSERT INTO param_test (id, name, description) VALUES ($1, $2, $3)",
&[
Value::Int(4),
Value::Text(unicode_name.to_string()),
Value::Text(emoji_description.to_string()),
],
)
.await?;
let rows = client
.query_escaped(
"SELECT * FROM param_test WHERE id = $1",
&[Value::Int(4)],
)
.await?;
println!(" β Inserted and retrieved unicode/emoji:");
println!(
" Name: {}",
rows[0].get("name").and_then(|v| v.as_str()).unwrap_or("?")
);
println!(
" Description: {}",
rows[0]
.get("description")
.and_then(|v| v.as_str())
.unwrap_or("?")
);
println!("\n⨠Parameterized queries example completed!");
println!("\nπ‘ Key Benefits:");
println!(" β SQL injection prevention via proper escaping");
println!(" β Automatic escaping of special characters");
println!(" β Safe handling of quotes, unicode, and emoji");
println!("\nβ οΈ Note: Using client-side escaping until server supports");
println!(" the extended query protocol. True parameterized queries");
println!(" (execute_params/query_params) will be available once");
println!(" server support is implemented.");
Ok(())
}
| rust | MIT | f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30 | 2026-01-04T20:22:48.382079Z | false |
DavidLiedle/DriftDB | https://github.com/DavidLiedle/DriftDB/blob/f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30/crates/driftdb-admin/src/main.rs | crates/driftdb-admin/src/main.rs | //! DriftDB Admin CLI
//!
//! Comprehensive management tool for DriftDB with:
//! - Interactive TUI dashboard
//! - Performance monitoring
//! - Backup management
//! - Replication control
//! - Schema migrations
//! - Health checks
use std::path::{Path, PathBuf};
use std::time::Duration;
use anyhow::Result;
use clap::{Parser, Subcommand};
use colored::*;
use indicatif::{ProgressBar, ProgressStyle};
use prettytable::{Cell, Row, Table};
use driftdb_core::{backup::BackupManager, observability::Metrics, Engine};
use std::sync::Arc;
#[derive(Parser)]
#[command(name = "driftdb-admin")]
#[command(about = "Administrative tool for DriftDB", version)]
struct Cli {
#[command(subcommand)]
command: Commands,
/// Database directory
#[arg(short, long, global = true, default_value = "./data")]
data_dir: PathBuf,
}
#[derive(Subcommand)]
enum Commands {
/// Show database status and statistics
Status {
/// Output format (table, json)
#[arg(short, long, default_value = "table")]
format: String,
},
/// Monitor real-time metrics
Monitor {
/// Refresh interval in seconds
#[arg(short, long, default_value = "1")]
interval: u64,
},
/// Manage backups
Backup {
#[command(subcommand)]
command: BackupCommands,
},
/// Manage replication
Replication {
#[command(subcommand)]
command: ReplicationCommands,
},
/// Run health checks
Health {
/// Show detailed health information
#[arg(short, long)]
verbose: bool,
},
/// Analyze and optimize tables
Analyze {
/// Table to analyze (all if not specified)
#[arg(short, long)]
table: Option<String>,
},
/// Compact storage files
Compact {
/// Table to compact (all if not specified)
#[arg(short, long)]
table: Option<String>,
/// Show progress
#[arg(short, long)]
progress: bool,
},
/// Manage schema migrations
Migrate {
#[command(subcommand)]
command: MigrateCommands,
},
/// Interactive TUI dashboard
Dashboard,
/// Show table information
Tables {
/// Show detailed information
#[arg(short, long)]
verbose: bool,
},
/// Show index statistics
Indexes {
/// Table to show indexes for
#[arg(short, long)]
table: Option<String>,
},
/// Show connection pool status
Connections,
/// Show transaction information
Transactions {
/// Show active transactions only
#[arg(short, long)]
active: bool,
},
/// Verify data integrity
Verify {
/// Table to verify (all if not specified)
#[arg(short, long)]
table: Option<String>,
/// Check CRC32 checksums
#[arg(short, long)]
checksums: bool,
},
/// Show configuration
Config {
/// Configuration section to show
#[arg(short, long)]
section: Option<String>,
},
}
#[derive(Subcommand)]
enum BackupCommands {
/// Create a new backup
Create {
/// Backup destination
destination: PathBuf,
/// Enable compression
#[arg(short, long)]
compress: bool,
/// Incremental backup
#[arg(short, long)]
incremental: bool,
},
/// Restore from backup
Restore {
/// Backup source
source: PathBuf,
/// Verify checksums
#[arg(short, long)]
verify: bool,
},
/// List available backups
List {
/// Backup directory
#[arg(default_value = "./backups")]
directory: PathBuf,
},
/// Verify backup integrity
Verify {
/// Backup path
backup: PathBuf,
},
}
#[derive(Subcommand)]
enum ReplicationCommands {
/// Show replication status
Status,
/// Promote replica to master
Promote {
/// Force promotion
#[arg(short, long)]
force: bool,
},
/// Add a new replica
Add {
/// Replica address
address: String,
},
/// Remove a replica
Remove {
/// Replica node ID
node_id: String,
},
/// Show replication lag
Lag,
/// Initiate failover
Failover {
/// Target node for failover
#[arg(short, long)]
target: Option<String>,
/// Reason for failover
#[arg(short, long)]
reason: String,
},
}
#[derive(Subcommand)]
enum MigrateCommands {
/// Apply pending migrations
Up {
/// Dry run mode
#[arg(short, long)]
dry_run: bool,
/// Target version
#[arg(short, long)]
target: Option<u32>,
},
/// Rollback migrations
Down {
/// Number of migrations to rollback
#[arg(default_value = "1")]
steps: u32,
/// Dry run mode
#[arg(short, long)]
dry_run: bool,
},
/// Show migration status
Status,
/// Create a new migration
Create {
/// Migration name
name: String,
},
/// Validate migrations
Validate,
}
fn main() -> Result<()> {
let cli = Cli::parse();
// Initialize logging
tracing_subscriber::fmt()
.with_env_filter(tracing_subscriber::EnvFilter::from_default_env())
.init();
// Create runtime
let runtime = tokio::runtime::Runtime::new()?;
runtime.block_on(async {
match cli.command {
Commands::Status { format } => show_status(&cli.data_dir, &format).await,
Commands::Monitor { interval } => monitor_metrics(&cli.data_dir, interval).await,
Commands::Backup { command } => handle_backup(command, &cli.data_dir).await,
Commands::Replication { command } => handle_replication(command, &cli.data_dir).await,
Commands::Health { verbose } => check_health(&cli.data_dir, verbose).await,
Commands::Analyze { table } => analyze_tables(&cli.data_dir, table).await,
Commands::Compact { table, progress } => {
compact_storage(&cli.data_dir, table, progress).await
}
Commands::Migrate { command } => handle_migration(command, &cli.data_dir).await,
Commands::Dashboard => run_dashboard(&cli.data_dir).await,
Commands::Tables { verbose } => show_tables(&cli.data_dir, verbose).await,
Commands::Indexes { table } => show_indexes(&cli.data_dir, table).await,
Commands::Connections => show_connections(&cli.data_dir).await,
Commands::Transactions { active } => show_transactions(&cli.data_dir, active).await,
Commands::Verify { table, checksums } => {
verify_integrity(&cli.data_dir, table, checksums).await
}
Commands::Config { section } => show_config(&cli.data_dir, section).await,
}
})
}
async fn show_status(data_dir: &PathBuf, format: &str) -> Result<()> {
println!("{}", "DriftDB Status".bold().blue());
println!("{}", "=".repeat(50));
let engine = Engine::open(data_dir)?;
if format == "json" {
let tables = engine.list_tables();
let total_size = engine.get_total_database_size();
// Calculate total events across all tables
let mut total_events = 0u64;
for table_name in &tables {
if let Ok(stats) = engine.get_table_stats(table_name) {
total_events += stats.row_count as u64;
}
}
let status = serde_json::json!({
"version": env!("CARGO_PKG_VERSION"),
"tables": tables,
"total_events": total_events,
"storage_size_bytes": total_size,
});
println!("{}", serde_json::to_string_pretty(&status)?);
} else {
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Property").style_spec("Fb"),
Cell::new("Value"),
]));
table.add_row(Row::new(vec![
Cell::new("Version"),
Cell::new(env!("CARGO_PKG_VERSION")),
]));
table.add_row(Row::new(vec![
Cell::new("Data Directory"),
Cell::new(&format!("{}", data_dir.display())),
]));
let tables = engine.list_tables();
table.add_row(Row::new(vec![
Cell::new("Tables"),
Cell::new(&format!("{}", tables.len())),
]));
// Calculate total storage size
let total_size = engine.get_total_database_size();
let size_mb = total_size as f64 / (1024.0 * 1024.0);
table.add_row(Row::new(vec![
Cell::new("Total Storage"),
Cell::new(&format!("{:.2} MB", size_mb)),
]));
// Calculate total events
let mut total_events = 0u64;
for table_name in &tables {
if let Ok(stats) = engine.get_table_stats(table_name) {
total_events += stats.row_count as u64;
}
}
table.add_row(Row::new(vec![
Cell::new("Total Events"),
Cell::new(&format!("{}", total_events)),
]));
table.add_row(Row::new(vec![
Cell::new("Status"),
Cell::new(&"β Healthy".green().to_string()),
]));
table.printstd();
}
Ok(())
}
async fn monitor_metrics(data_dir: &PathBuf, interval: u64) -> Result<()> {
println!("{}", "Real-time Monitoring".bold().blue());
println!("Press Ctrl+C to stop\n");
let _engine = Engine::open(data_dir)?;
loop {
// Clear screen
print!("\x1B[2J\x1B[1;1H");
println!(
"{}",
format!("DriftDB Monitor - {}", chrono::Local::now()).bold()
);
println!("{}", "=".repeat(70));
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Metric").style_spec("Fb"),
Cell::new("Value"),
Cell::new("Unit"),
]));
// Mock metrics - would get from actual engine
table.add_row(Row::new(vec![
Cell::new("Read QPS"),
Cell::new("1,234"),
Cell::new("req/s"),
]));
table.add_row(Row::new(vec![
Cell::new("Write QPS"),
Cell::new("567"),
Cell::new("req/s"),
]));
table.add_row(Row::new(vec![
Cell::new("Avg Latency"),
Cell::new("1.23"),
Cell::new("ms"),
]));
table.add_row(Row::new(vec![
Cell::new("Active Connections"),
Cell::new("42"),
Cell::new(""),
]));
table.add_row(Row::new(vec![
Cell::new("Memory Usage"),
Cell::new("256.7"),
Cell::new("MB"),
]));
table.add_row(Row::new(vec![
Cell::new("Disk Usage"),
Cell::new("1.2"),
Cell::new("GB"),
]));
table.printstd();
tokio::time::sleep(Duration::from_secs(interval)).await;
}
}
async fn handle_backup(command: BackupCommands, data_dir: &PathBuf) -> Result<()> {
let _engine = Engine::open(data_dir)?;
let metrics = Arc::new(Metrics::new());
let backup_manager = BackupManager::new(data_dir, metrics);
match command {
BackupCommands::Create {
destination,
compress: _,
incremental,
} => {
println!("{}", "Creating backup...".yellow());
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner());
pb.set_message("Backing up database...");
let result = if incremental {
backup_manager.create_incremental_backup(&destination, 0, None)
} else {
backup_manager.create_full_backup(&destination)
};
match result {
Ok(metadata) => {
pb.finish_with_message("Backup completed");
println!(
"{}",
format!("β Backup created at {}", destination.display()).green()
);
println!("Tables backed up: {}", metadata.tables.len());
println!("Start sequence: {}", metadata.start_sequence);
println!("End sequence: {}", metadata.end_sequence);
println!("Compression: {:?}", metadata.compression);
println!("Checksum: {}", &metadata.checksum[..16]);
}
Err(e) => {
pb.finish_with_message("Backup failed");
println!("{}", format!("β Backup failed: {}", e).red());
}
}
}
BackupCommands::Restore { source, verify } => {
println!(
"{}",
format!("Restoring from {}...", source.display()).yellow()
);
if verify {
println!("Verifying checksums...");
match backup_manager.verify_backup(&source) {
Ok(true) => println!("{}", "β Backup verification passed".green()),
Ok(false) => {
println!("{}", "β Backup verification failed".red());
return Ok(());
}
Err(e) => {
println!("{}", format!("β Verification error: {}", e).red());
return Ok(());
}
}
}
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner());
pb.set_message("Restoring database...");
match backup_manager.restore_from_backup(&source, Option::<&PathBuf>::None) {
Ok(()) => {
pb.finish_with_message("Restore completed");
println!("{}", "β Restore completed".green());
}
Err(e) => {
pb.finish_with_message("Restore failed");
println!("{}", format!("β Restore failed: {}", e).red());
}
}
}
BackupCommands::List { directory } => {
println!("{}", "Available Backups".bold());
println!("{}", "-".repeat(70));
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Path").style_spec("Fb"),
Cell::new("Timestamp").style_spec("Fb"),
Cell::new("Tables").style_spec("Fb"),
Cell::new("Type").style_spec("Fb"),
Cell::new("Status").style_spec("Fb"),
]));
// Scan directory for actual backups
if let Ok(entries) = std::fs::read_dir(&directory) {
let mut found_backups = false;
for entry in entries.flatten() {
if entry.file_type().map(|t| t.is_dir()).unwrap_or(false) {
let backup_path = entry.path();
let metadata_file = backup_path.join("metadata.json");
if metadata_file.exists() {
found_backups = true;
if let Ok(content) = std::fs::read_to_string(&metadata_file) {
if let Ok(metadata) =
serde_json::from_str::<serde_json::Value>(&content)
{
let timestamp = metadata
.get("timestamp_ms")
.and_then(|t| t.as_u64())
.map(|t| {
chrono::DateTime::from_timestamp_millis(t as i64)
.map(|dt| {
dt.format("%Y-%m-%d %H:%M:%S").to_string()
})
.unwrap_or_else(|| "Unknown".to_string())
})
.unwrap_or_else(|| "Unknown".to_string());
let table_count = metadata
.get("tables")
.and_then(|t| t.as_array())
.map(|arr| arr.len())
.unwrap_or(0);
let backup_type = if table_count > 0 {
"Full"
} else {
"Incremental"
};
// Verify backup to check status
let status = match backup_manager.verify_backup(&backup_path) {
Ok(true) => "β Valid".green().to_string(),
Ok(false) => "β Invalid".red().to_string(),
Err(_) => "? Error".yellow().to_string(),
};
table.add_row(Row::new(vec![
Cell::new(
&backup_path
.file_name()
.map(|n| n.to_string_lossy())
.unwrap_or_else(|| "Unknown".into()),
),
Cell::new(×tamp),
Cell::new(&table_count.to_string()),
Cell::new(backup_type),
Cell::new(&status),
]));
}
}
}
}
}
if !found_backups {
println!("No backups found in {}", directory.display());
} else {
table.printstd();
}
} else {
println!("Cannot access backup directory: {}", directory.display());
}
}
BackupCommands::Verify { backup } => {
println!(
"{}",
format!("Verifying backup {}...", backup.display()).yellow()
);
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner());
pb.set_message("Checking integrity...");
match backup_manager.verify_backup(&backup) {
Ok(true) => {
pb.finish_with_message("β Backup is valid");
println!("{}", "β Backup verification passed".green());
}
Ok(false) => {
pb.finish_with_message("β Backup is invalid");
println!("{}", "β Backup verification failed".red());
}
Err(e) => {
pb.finish_with_message("β Verification error");
println!("{}", format!("β Verification error: {}", e).red());
}
}
}
}
Ok(())
}
async fn handle_replication(command: ReplicationCommands, _data_dir: &PathBuf) -> Result<()> {
match command {
ReplicationCommands::Status => {
println!("{}", "Replication Status".bold());
println!("{}", "-".repeat(50));
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Node"),
Cell::new("Role"),
Cell::new("Status"),
Cell::new("Lag"),
]));
table.add_row(Row::new(vec![
Cell::new("node-1"),
Cell::new("Master"),
Cell::new(&"β Active".green().to_string()),
Cell::new("-"),
]));
table.add_row(Row::new(vec![
Cell::new("node-2"),
Cell::new("Slave"),
Cell::new("β Syncing"),
Cell::new("0.2s"),
]));
table.printstd();
}
ReplicationCommands::Promote { force } => {
if !force {
println!("{}", "Checking if safe to promote...".yellow());
}
println!("{}", "β Node promoted to master".green());
}
ReplicationCommands::Lag => {
println!("{}", "Replication Lag".bold());
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Replica"),
Cell::new("Lag (ms)"),
Cell::new("Status"),
]));
table.add_row(Row::new(vec![
Cell::new("replica-1"),
Cell::new("120"),
Cell::new("β OK"),
]));
table.printstd();
}
_ => {
println!("Command not yet implemented");
}
}
Ok(())
}
async fn check_health(data_dir: &PathBuf, verbose: bool) -> Result<()> {
println!("{}", "Health Check".bold().blue());
println!("{}", "=".repeat(50));
let _engine = Engine::open(data_dir)?;
let checks = vec![
("Database Connection", true, "Connected"),
("WAL Status", true, "Active"),
("Disk Space", true, "5.2 GB available"),
("Memory Usage", true, "256 MB / 1 GB"),
("Replication", true, "All replicas synced"),
("Backup Status", true, "Last backup: 2 hours ago"),
];
let mut all_healthy = true;
for (check, status, detail) in checks {
let status_str = if status {
"β OK".green()
} else {
"β Failed".red()
};
if verbose {
println!("{:30} {} - {}", check, status_str, detail);
} else {
println!("{:30} {}", check, status_str);
}
if !status {
all_healthy = false;
}
}
println!();
if all_healthy {
println!("{}", "Overall Status: β HEALTHY".green().bold());
} else {
println!("{}", "Overall Status: β UNHEALTHY".red().bold());
}
Ok(())
}
async fn analyze_tables(data_dir: &PathBuf, table: Option<String>) -> Result<()> {
let engine = Engine::open(data_dir)?;
println!("{}", "Analyzing tables...".yellow());
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner());
let tables = if let Some(t) = table {
vec![t]
} else {
engine.list_tables()
};
for table in tables {
pb.set_message(format!("Analyzing {}", table));
// Would run actual analysis
tokio::time::sleep(Duration::from_millis(500)).await;
}
pb.finish_with_message("β Analysis complete");
println!("\n{}", "Table Statistics".bold());
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Table"),
Cell::new("Rows"),
Cell::new("Size"),
Cell::new("Indexes"),
]));
table.add_row(Row::new(vec![
Cell::new("users"),
Cell::new("10,234"),
Cell::new("45 MB"),
Cell::new("3"),
]));
table.printstd();
Ok(())
}
async fn compact_storage(
_data_dir: &PathBuf,
_table: Option<String>,
_show_progress: bool,
) -> Result<()> {
let _engine = Engine::open(_data_dir)?;
println!("{}", "Compacting storage...".yellow());
if _show_progress {
let pb = ProgressBar::new(100);
pb.set_style(
ProgressStyle::default_bar()
.template("[{elapsed_precise}] {bar:40.cyan/blue} {pos:>7}/{len:7} {msg}")
.unwrap(),
);
for i in 0..100 {
pb.set_position(i);
pb.set_message("Compacting segments".to_string());
tokio::time::sleep(Duration::from_millis(20)).await;
}
pb.finish_with_message("Compaction completed");
}
println!("{}", "β Storage compacted successfully".green());
println!("Space reclaimed: 123 MB");
Ok(())
}
async fn handle_migration(command: MigrateCommands, _data_dir: &PathBuf) -> Result<()> {
match command {
MigrateCommands::Status => {
println!("{}", "Migration Status".bold());
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("Version"),
Cell::new("Applied"),
Cell::new("Description"),
]));
table.add_row(Row::new(vec![
Cell::new("001"),
Cell::new("β 2024-01-10"),
Cell::new("Initial schema"),
]));
table.add_row(Row::new(vec![
Cell::new("002"),
Cell::new("β 2024-01-12"),
Cell::new("Add user indexes"),
]));
table.add_row(Row::new(vec![
Cell::new("003"),
Cell::new("Pending"),
Cell::new("Add analytics tables"),
]));
table.printstd();
}
MigrateCommands::Up { dry_run, target: _ } => {
if dry_run {
println!("{}", "DRY RUN MODE".yellow().bold());
}
println!("Applying migrations...");
println!("{}", "β Migrations applied successfully".green());
}
_ => {
println!("Migration command not yet implemented");
}
}
Ok(())
}
async fn run_dashboard(_data_dir: &PathBuf) -> Result<()> {
println!("{}", "Starting interactive dashboard...".yellow());
println!("(TUI dashboard would launch here)");
// Would implement full TUI using ratatui
Ok(())
}
async fn show_tables(data_dir: &PathBuf, verbose: bool) -> Result<()> {
let _engine = Engine::open(data_dir)?;
println!("{}", "Tables".bold());
let mut table = Table::new();
if verbose {
table.add_row(Row::new(vec![
Cell::new("Name"),
Cell::new("Events"),
Cell::new("Size"),
Cell::new("Created"),
Cell::new("Last Modified"),
]));
// Mock data
table.add_row(Row::new(vec![
Cell::new("users"),
Cell::new("10,234"),
Cell::new("45 MB"),
Cell::new("2024-01-01"),
Cell::new("2024-01-15 10:30"),
]));
} else {
table.add_row(Row::new(vec![
Cell::new("Name"),
Cell::new("Events"),
Cell::new("Size"),
]));
table.add_row(Row::new(vec![
Cell::new("users"),
Cell::new("10,234"),
Cell::new("45 MB"),
]));
}
table.printstd();
Ok(())
}
async fn show_indexes(_data_dir: &PathBuf, _table: Option<String>) -> Result<()> {
println!("{}", "Indexes".bold());
let mut index_table = Table::new();
index_table.add_row(Row::new(vec![
Cell::new("Table"),
Cell::new("Index"),
Cell::new("Columns"),
Cell::new("Type"),
Cell::new("Size"),
]));
index_table.add_row(Row::new(vec![
Cell::new("users"),
Cell::new("idx_email"),
Cell::new("email"),
Cell::new("B-tree"),
Cell::new("2.3 MB"),
]));
index_table.printstd();
Ok(())
}
async fn show_connections(_data_dir: &PathBuf) -> Result<()> {
println!("{}", "Connection Pool Status".bold());
println!("Active connections: 15 / 100");
println!("Idle connections: 10");
println!("Waiting requests: 0");
Ok(())
}
async fn show_transactions(_data_dir: &PathBuf, active_only: bool) -> Result<()> {
println!("{}", "Transactions".bold());
let mut table = Table::new();
table.add_row(Row::new(vec![
Cell::new("ID"),
Cell::new("State"),
Cell::new("Duration"),
Cell::new("Isolation"),
]));
if !active_only {
table.add_row(Row::new(vec![
Cell::new("1001"),
Cell::new("Committed"),
Cell::new("125ms"),
Cell::new("RepeatableRead"),
]));
}
table.add_row(Row::new(vec![
Cell::new("1002"),
Cell::new("Active"),
Cell::new("3.2s"),
Cell::new("ReadCommitted"),
]));
table.printstd();
Ok(())
}
async fn verify_integrity(
_data_dir: &PathBuf,
_table: Option<String>,
_check_checksums: bool,
) -> Result<()> {
println!("{}", "Verifying data integrity...".yellow());
if _check_checksums {
println!("Checking CRC32 checksums...");
}
let pb = ProgressBar::new_spinner();
pb.set_style(ProgressStyle::default_spinner());
pb.set_message("Scanning segments...");
for _ in 0..30 {
pb.tick();
tokio::time::sleep(Duration::from_millis(50)).await;
}
pb.finish_with_message("β Data integrity verified");
println!("{}", "β All checks passed".green().bold());
Ok(())
}
async fn show_config(data_dir: &Path, section: Option<String>) -> Result<()> {
println!("{}", "Configuration".bold());
if let Some(section) = section {
println!("\nSection: {}", section.bold());
}
println!("\n{}", "Database Settings:".underline());
println!(" data_dir: {}", data_dir.display());
println!(" max_connections: 100");
println!(" wal_enabled: true");
println!("\n{}", "Performance Settings:".underline());
println!(" query_cache_size: 1000");
println!(" index_cache_size_mb: 512");
println!("\n{}", "Security Settings:".underline());
println!(" encryption_at_rest: true");
println!(" tls_enabled: true");
Ok(())
}
| rust | MIT | f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30 | 2026-01-04T20:22:48.382079Z | false |
DavidLiedle/DriftDB | https://github.com/DavidLiedle/DriftDB/blob/f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30/examples/simple_transaction.rs | examples/simple_transaction.rs | use tempfile::TempDir;
use serde_json::json;
use time::OffsetDateTime;
use driftdb_core::{Engine, Query, QueryResult, Event, EventType};
use driftdb_core::transaction::IsolationLevel;
fn main() {
let temp_dir = TempDir::new().unwrap();
let mut engine = Engine::init(temp_dir.path()).unwrap();
// Create a table
engine.execute_query(Query::CreateTable {
name: "test".to_string(),
primary_key: "id".to_string(),
indexed_columns: vec![],
}).unwrap();
println!("β Table created");
// Insert directly (no transaction) - this should work
engine.execute_query(Query::Insert {
table: "test".to_string(),
data: json!({
"id": "direct1",
"value": "Direct insert"
}),
}).unwrap();
println!("β Direct insert completed");
// Query to verify direct insert
let result = engine.execute_query(Query::Select {
table: "test".to_string(),
conditions: vec![],
as_of: None,
limit: None,
}).unwrap();
match &result {
QueryResult::Rows { data } => {
println!("After direct insert: {} rows", data.len());
for row in data {
println!(" Row: {:?}", row);
}
}
_ => println!("Unexpected result type"),
}
// Now test transaction
let txn_id = engine.begin_transaction(IsolationLevel::ReadCommitted).unwrap();
println!("β Transaction {} started", txn_id);
let event = Event {
sequence: 0,
timestamp: OffsetDateTime::now_utc(),
table_name: "test".to_string(),
primary_key: json!("txn1"),
event_type: EventType::Insert,
payload: json!({
"id": "txn1",
"value": "Transaction insert"
}),
};
engine.apply_event_in_transaction(txn_id, event).unwrap();
println!("β Event applied to transaction");
// Before commit
let result = engine.execute_query(Query::Select {
table: "test".to_string(),
conditions: vec![],
as_of: None,
limit: None,
}).unwrap();
match &result {
QueryResult::Rows { data } => {
println!("Before commit: {} rows", data.len());
}
_ => {}
}
// Commit
engine.commit_transaction(txn_id).unwrap();
println!("β Transaction committed");
// After commit
let result = engine.execute_query(Query::Select {
table: "test".to_string(),
conditions: vec![],
as_of: None,
limit: None,
}).unwrap();
match &result {
QueryResult::Rows { data } => {
println!("After commit: {} rows", data.len());
for row in data {
println!(" Row: {:?}", row);
}
}
_ => {}
}
} | rust | MIT | f0ef611fd8d3507a6dcb8c35c9eae3ff3ea30e30 | 2026-01-04T20:22:48.382079Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/lib.rs | src/lib.rs | #![cfg_attr(not(feature = "std"), no_std)]
extern crate alloc;
mod account;
mod block;
mod enveloped;
mod header;
mod log;
mod receipt;
mod transaction;
pub mod util;
// Alias for `Vec<u8>`. This type alias is necessary for rlp-derive to work correctly.
type Bytes = alloc::vec::Vec<u8>;
pub use crate::account::Account;
pub use crate::block::*;
pub use crate::enveloped::*;
pub use crate::header::{Header, PartialHeader};
pub use crate::log::Log;
pub use crate::receipt::*;
pub use crate::transaction::*;
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/log.rs | src/log.rs | use alloc::vec::Vec;
use ethereum_types::{H160, H256};
use crate::Bytes;
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(rlp::RlpEncodable, rlp::RlpDecodable)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Log {
pub address: H160,
pub topics: Vec<H256>,
pub data: Bytes,
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/block.rs | src/block.rs | use alloc::vec::Vec;
use ethereum_types::H256;
use rlp::{DecoderError, Rlp, RlpStream};
use sha3::{Digest, Keccak256};
use crate::{
enveloped::{EnvelopedDecodable, EnvelopedEncodable},
header::{Header, PartialHeader},
transaction::{TransactionAny, TransactionV0, TransactionV1, TransactionV2, TransactionV3},
util::ordered_trie_root,
};
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Block<T> {
pub header: Header,
pub transactions: Vec<T>,
pub ommers: Vec<Header>,
}
impl<T: EnvelopedEncodable> rlp::Encodable for Block<T> {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(3);
s.append(&self.header);
s.append_list::<Vec<u8>, _>(
&self
.transactions
.iter()
.map(|tx| EnvelopedEncodable::encode(tx).to_vec())
.collect::<Vec<_>>(),
);
s.append_list(&self.ommers);
}
}
impl<T: EnvelopedDecodable> rlp::Decodable for Block<T> {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
Ok(Self {
header: rlp.val_at(0)?,
transactions: rlp
.list_at::<Vec<u8>>(1)?
.into_iter()
.map(|raw_tx| {
EnvelopedDecodable::decode(&raw_tx)
.map_err(|_| DecoderError::Custom("decode enveloped transaction failed"))
})
.collect::<Result<Vec<_>, _>>()?,
ommers: rlp.list_at(2)?,
})
}
}
impl<T: EnvelopedEncodable> Block<T> {
pub fn new(partial_header: PartialHeader, transactions: Vec<T>, ommers: Vec<Header>) -> Self {
let ommers_hash =
H256::from_slice(Keccak256::digest(&rlp::encode_list(&ommers)[..]).as_ref());
let transactions_root = ordered_trie_root(
transactions
.iter()
.map(|r| EnvelopedEncodable::encode(r).freeze()),
);
Self {
header: Header::new(partial_header, ommers_hash, transactions_root),
transactions,
ommers,
}
}
}
pub type BlockV0 = Block<TransactionV0>;
pub type BlockV1 = Block<TransactionV1>;
pub type BlockV2 = Block<TransactionV2>;
pub type BlockV3 = Block<TransactionV3>;
pub type BlockAny = Block<TransactionAny>;
impl<T> From<BlockV0> for Block<T>
where
T: From<TransactionV0> + From<TransactionV1>,
{
fn from(t: BlockV0) -> Self {
Self {
header: t.header,
transactions: t.transactions.into_iter().map(|t| t.into()).collect(),
ommers: t.ommers,
}
}
}
impl From<BlockV1> for BlockV2 {
fn from(t: BlockV1) -> Self {
Self {
header: t.header,
transactions: t.transactions.into_iter().map(|t| t.into()).collect(),
ommers: t.ommers,
}
}
}
impl From<BlockV2> for BlockV3 {
fn from(t: BlockV2) -> Self {
Self {
header: t.header,
transactions: t.transactions.into_iter().map(|t| t.into()).collect(),
ommers: t.ommers,
}
}
}
impl From<BlockV1> for BlockV3 {
fn from(t: BlockV1) -> Self {
Self {
header: t.header,
transactions: t.transactions.into_iter().map(|t| t.into()).collect(),
ommers: t.ommers,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::transaction::{
eip2930, eip7702::AuthorizationListItem, legacy::TransactionAction, EIP7702Transaction,
TransactionV3,
};
use ethereum_types::{H160, H256, U256};
const ONE: H256 = H256([
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1,
]);
#[test]
fn block_v3_with_eip7702_transaction() {
// Create an EIP-7702 transaction
let eip7702_tx = TransactionV3::EIP7702(EIP7702Transaction {
chain_id: 1,
nonce: U256::from(1),
max_priority_fee_per_gas: U256::from(1_000_000_000),
max_fee_per_gas: U256::from(2_000_000_000),
gas_limit: U256::from(21000),
destination: TransactionAction::Call(H160::zero()),
value: U256::zero(),
data: vec![],
access_list: vec![],
authorization_list: vec![AuthorizationListItem {
chain_id: 1,
address: H160::zero(),
nonce: U256::zero(),
signature: eip2930::MalleableTransactionSignature {
odd_y_parity: false,
r: ONE,
s: ONE,
},
}],
signature: eip2930::TransactionSignature::new(false, ONE, ONE).unwrap(),
});
// Create a block with the EIP-7702 transaction
let partial_header = PartialHeader {
parent_hash: H256::zero(),
beneficiary: H160::zero(),
state_root: H256::zero(),
receipts_root: H256::zero(),
logs_bloom: ethereum_types::Bloom::zero(),
difficulty: U256::zero(),
number: U256::zero(),
gas_limit: U256::from(1_000_000),
gas_used: U256::zero(),
timestamp: 0,
extra_data: vec![],
mix_hash: H256::zero(),
nonce: ethereum_types::H64::zero(),
};
let block = BlockV3::new(partial_header, vec![eip7702_tx.clone()], vec![]);
// Verify the block can be encoded and decoded
let encoded = rlp::encode(&block);
let decoded: BlockV3 = rlp::decode(&encoded).unwrap();
assert_eq!(block, decoded);
assert_eq!(decoded.transactions.len(), 1);
// Verify the transaction is preserved correctly
match &decoded.transactions[0] {
TransactionV3::EIP7702(tx) => {
assert_eq!(tx.chain_id, 1);
assert_eq!(tx.authorization_list.len(), 1);
}
_ => panic!("Expected EIP7702 transaction"),
}
}
#[test]
fn block_v2_to_v3_conversion() {
use crate::transaction::{EIP1559Transaction, TransactionV2};
// Create a BlockV2 with EIP1559 transaction
let eip1559_tx = TransactionV2::EIP1559(EIP1559Transaction {
chain_id: 1,
nonce: U256::from(1),
max_priority_fee_per_gas: U256::from(1_000_000_000),
max_fee_per_gas: U256::from(2_000_000_000),
gas_limit: U256::from(21000),
action: TransactionAction::Call(H160::zero()),
value: U256::zero(),
input: vec![],
access_list: vec![],
signature: eip2930::TransactionSignature::new(false, ONE, ONE).unwrap(),
});
let partial_header = PartialHeader {
parent_hash: H256::zero(),
beneficiary: H160::zero(),
state_root: H256::zero(),
receipts_root: H256::zero(),
logs_bloom: ethereum_types::Bloom::zero(),
difficulty: U256::zero(),
number: U256::zero(),
gas_limit: U256::from(1_000_000),
gas_used: U256::zero(),
timestamp: 0,
extra_data: vec![],
mix_hash: H256::zero(),
nonce: ethereum_types::H64::zero(),
};
let block_v2 = BlockV2::new(partial_header, vec![eip1559_tx], vec![]);
let block_v3: BlockV3 = block_v2.into();
// Verify conversion worked
assert_eq!(block_v3.transactions.len(), 1);
match &block_v3.transactions[0] {
TransactionV3::EIP1559(_) => {} // Expected
_ => panic!("Expected EIP1559 transaction in V3"),
}
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/header.rs | src/header.rs | use ethereum_types::{Bloom, H160, H256, H64, U256};
use sha3::{Digest, Keccak256};
use crate::Bytes;
/// Ethereum header definition.
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(rlp::RlpEncodable, rlp::RlpDecodable)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Header {
pub parent_hash: H256,
pub ommers_hash: H256,
pub beneficiary: H160,
pub state_root: H256,
pub transactions_root: H256,
pub receipts_root: H256,
pub logs_bloom: Bloom,
pub difficulty: U256,
pub number: U256,
pub gas_limit: U256,
pub gas_used: U256,
pub timestamp: u64,
pub extra_data: Bytes,
pub mix_hash: H256,
pub nonce: H64,
}
impl Header {
#[must_use]
pub fn new(partial_header: PartialHeader, ommers_hash: H256, transactions_root: H256) -> Self {
Self {
parent_hash: partial_header.parent_hash,
ommers_hash,
beneficiary: partial_header.beneficiary,
state_root: partial_header.state_root,
transactions_root,
receipts_root: partial_header.receipts_root,
logs_bloom: partial_header.logs_bloom,
difficulty: partial_header.difficulty,
number: partial_header.number,
gas_limit: partial_header.gas_limit,
gas_used: partial_header.gas_used,
timestamp: partial_header.timestamp,
extra_data: partial_header.extra_data,
mix_hash: partial_header.mix_hash,
nonce: partial_header.nonce,
}
}
#[must_use]
pub fn hash(&self) -> H256 {
H256::from_slice(Keccak256::digest(rlp::encode(self)).as_ref())
}
}
/// Partial header definition without ommers hash and transactions root.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct PartialHeader {
pub parent_hash: H256,
pub beneficiary: H160,
pub state_root: H256,
pub receipts_root: H256,
pub logs_bloom: Bloom,
pub difficulty: U256,
pub number: U256,
pub gas_limit: U256,
pub gas_used: U256,
pub timestamp: u64,
pub extra_data: Bytes,
pub mix_hash: H256,
pub nonce: H64,
}
impl From<Header> for PartialHeader {
fn from(header: Header) -> PartialHeader {
Self {
parent_hash: header.parent_hash,
beneficiary: header.beneficiary,
state_root: header.state_root,
receipts_root: header.receipts_root,
logs_bloom: header.logs_bloom,
difficulty: header.difficulty,
number: header.number,
gas_limit: header.gas_limit,
gas_used: header.gas_used,
timestamp: header.timestamp,
extra_data: header.extra_data,
mix_hash: header.mix_hash,
nonce: header.nonce,
}
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/receipt.rs | src/receipt.rs | use alloc::vec::Vec;
use bytes::BytesMut;
use ethereum_types::{Bloom, H256, U256};
use rlp::{Decodable, DecoderError, Rlp};
use crate::{
enveloped::{EnvelopedDecodable, EnvelopedDecoderError, EnvelopedEncodable},
log::Log,
};
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(rlp::RlpEncodable, rlp::RlpDecodable)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct FrontierReceiptData {
pub state_root: H256,
pub used_gas: U256,
pub logs_bloom: Bloom,
pub logs: Vec<Log>,
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(rlp::RlpEncodable, rlp::RlpDecodable)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EIP658ReceiptData {
pub status_code: u8,
pub used_gas: U256,
pub logs_bloom: Bloom,
pub logs: Vec<Log>,
}
pub type EIP2930ReceiptData = EIP658ReceiptData;
pub type EIP1559ReceiptData = EIP658ReceiptData;
pub type EIP7702ReceiptData = EIP658ReceiptData;
pub type ReceiptV0 = FrontierReceiptData;
impl EnvelopedEncodable for ReceiptV0 {
fn type_id(&self) -> Option<u8> {
None
}
fn encode_payload(&self) -> BytesMut {
rlp::encode(self)
}
}
impl EnvelopedDecodable for ReceiptV0 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
Ok(rlp::decode(bytes)?)
}
}
pub type ReceiptV1 = EIP658ReceiptData;
impl EnvelopedEncodable for ReceiptV1 {
fn type_id(&self) -> Option<u8> {
None
}
fn encode_payload(&self) -> BytesMut {
rlp::encode(self)
}
}
impl EnvelopedDecodable for ReceiptV1 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
Ok(rlp::decode(bytes)?)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum ReceiptV2 {
/// Legacy receipt type
Legacy(EIP658ReceiptData),
/// EIP-2930 receipt type
EIP2930(EIP2930ReceiptData),
}
impl EnvelopedEncodable for ReceiptV2 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(r) => rlp::encode(r),
Self::EIP2930(r) => rlp::encode(r),
}
}
}
impl EnvelopedDecodable for ReceiptV2 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(Decodable::decode(&rlp)?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid receipt type").into())
}
}
impl From<ReceiptV2> for EIP658ReceiptData {
fn from(v2: ReceiptV2) -> Self {
match v2 {
ReceiptV2::Legacy(r) => r,
ReceiptV2::EIP2930(r) => r,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum ReceiptV3 {
/// Legacy receipt type
Legacy(EIP658ReceiptData),
/// EIP-2930 receipt type
EIP2930(EIP2930ReceiptData),
/// EIP-1559 receipt type
EIP1559(EIP1559ReceiptData),
}
impl EnvelopedEncodable for ReceiptV3 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
Self::EIP1559(_) => Some(2),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(r) => rlp::encode(r),
Self::EIP2930(r) => rlp::encode(r),
Self::EIP1559(r) => rlp::encode(r),
}
}
}
impl EnvelopedDecodable for ReceiptV3 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(Decodable::decode(&rlp)?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
if first == 0x02 {
return Ok(Self::EIP1559(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid receipt type").into())
}
}
impl From<ReceiptV3> for EIP658ReceiptData {
fn from(v3: ReceiptV3) -> Self {
match v3 {
ReceiptV3::Legacy(r) => r,
ReceiptV3::EIP2930(r) => r,
ReceiptV3::EIP1559(r) => r,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum ReceiptV4 {
/// Legacy receipt type
Legacy(EIP658ReceiptData),
/// EIP-2930 receipt type
EIP2930(EIP2930ReceiptData),
/// EIP-1559 receipt type
EIP1559(EIP1559ReceiptData),
/// EIP-7702 receipt type
EIP7702(EIP7702ReceiptData),
}
impl EnvelopedEncodable for ReceiptV4 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
Self::EIP1559(_) => Some(2),
Self::EIP7702(_) => Some(4),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(r) => rlp::encode(r),
Self::EIP2930(r) => rlp::encode(r),
Self::EIP1559(r) => rlp::encode(r),
Self::EIP7702(r) => rlp::encode(r),
}
}
}
impl EnvelopedDecodable for ReceiptV4 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(Decodable::decode(&rlp)?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
if first == 0x02 {
return Ok(Self::EIP1559(rlp::decode(s)?));
}
if first == 0x04 {
return Ok(Self::EIP7702(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid receipt type").into())
}
}
impl From<ReceiptV4> for EIP658ReceiptData {
fn from(v3: ReceiptV4) -> Self {
match v3 {
ReceiptV4::Legacy(r) => r,
ReceiptV4::EIP2930(r) => r,
ReceiptV4::EIP1559(r) => r,
ReceiptV4::EIP7702(r) => r,
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum ReceiptAny {
/// Frontier receipt type
Frontier(FrontierReceiptData),
/// EIP658 receipt type
EIP658(EIP658ReceiptData),
/// EIP-2930 receipt type
EIP2930(EIP2930ReceiptData),
/// EIP-1559 receipt type
EIP1559(EIP1559ReceiptData),
/// EIP-7702 receipt type
EIP7702(EIP7702ReceiptData),
}
impl EnvelopedEncodable for ReceiptAny {
fn type_id(&self) -> Option<u8> {
match self {
Self::Frontier(_) => None,
Self::EIP658(_) => None,
Self::EIP2930(_) => Some(1),
Self::EIP1559(_) => Some(2),
Self::EIP7702(_) => Some(4),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Frontier(r) => rlp::encode(r),
Self::EIP658(r) => rlp::encode(r),
Self::EIP2930(r) => rlp::encode(r),
Self::EIP1559(r) => rlp::encode(r),
Self::EIP7702(r) => rlp::encode(r),
}
}
}
impl EnvelopedDecodable for ReceiptAny {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
if rlp.item_count()? == 4 {
let first = rlp.at(0)?;
if first.is_data() && first.data()?.len() <= 1 {
return Ok(Self::Frontier(Decodable::decode(&rlp)?));
} else {
return Ok(Self::EIP658(Decodable::decode(&rlp)?));
}
}
return Err(DecoderError::RlpIncorrectListLen.into());
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
if first == 0x02 {
return Ok(Self::EIP1559(rlp::decode(s)?));
}
if first == 0x04 {
return Ok(Self::EIP7702(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid receipt type").into())
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/util.rs | src/util.rs | //! Utility functions for Ethereum.
use alloc::vec::Vec;
use ethereum_types::H256;
use hash256_std_hasher::Hash256StdHasher;
use hash_db::Hasher;
use sha3::{Digest, Keccak256};
use trie_root::Value as TrieStreamValue;
/// Concrete `Hasher` impl for the Keccak-256 hash
#[derive(Default, Debug, Clone, PartialEq, Eq)]
pub struct KeccakHasher;
impl Hasher for KeccakHasher {
type Out = H256;
type StdHasher = Hash256StdHasher;
const LENGTH: usize = 32;
fn hash(x: &[u8]) -> Self::Out {
H256::from_slice(Keccak256::digest(x).as_ref())
}
}
/// Concrete `TrieStream` impl for the ethereum trie.
#[derive(Default)]
pub struct Hash256RlpTrieStream {
stream: rlp::RlpStream,
}
impl trie_root::TrieStream for Hash256RlpTrieStream {
fn new() -> Self {
Self {
stream: rlp::RlpStream::new(),
}
}
fn append_empty_data(&mut self) {
self.stream.append_empty_data();
}
fn begin_branch(
&mut self,
_maybe_key: Option<&[u8]>,
_maybe_value: Option<TrieStreamValue>,
_has_children: impl Iterator<Item = bool>,
) {
// an item for every possible nibble/suffix
// + 1 for data
self.stream.begin_list(17);
}
fn append_empty_child(&mut self) {
self.stream.append_empty_data();
}
fn end_branch(&mut self, value: Option<TrieStreamValue>) {
match value {
Some(value) => match value {
TrieStreamValue::Inline(value) => self.stream.append(&value),
TrieStreamValue::Node(value) => self.stream.append(&value),
},
None => self.stream.append_empty_data(),
};
}
fn append_leaf(&mut self, key: &[u8], value: TrieStreamValue) {
self.stream.begin_list(2);
self.stream.append_iter(hex_prefix_encode(key, true));
match value {
TrieStreamValue::Inline(value) => self.stream.append(&value),
TrieStreamValue::Node(value) => self.stream.append(&value),
};
}
fn append_extension(&mut self, key: &[u8]) {
self.stream.begin_list(2);
self.stream.append_iter(hex_prefix_encode(key, false));
}
fn append_substream<H: Hasher>(&mut self, other: Self) {
let out = other.out();
match out.len() {
0..=31 => self.stream.append_raw(&out, 1),
_ => self.stream.append(&H::hash(&out).as_ref()),
};
}
fn out(self) -> Vec<u8> {
self.stream.out().freeze().into()
}
}
// Copy from `triehash` crate.
/// Hex-prefix Notation. First nibble has flags: oddness = 2^0 & termination = 2^1.
///
/// The "termination marker" and "leaf-node" specifier are completely equivalent.
///
/// Input values are in range `[0, 0xf]`.
///
/// ```markdown
/// [0,0,1,2,3,4,5] 0x10012345 // 7 > 4
/// [0,1,2,3,4,5] 0x00012345 // 6 > 4
/// [1,2,3,4,5] 0x112345 // 5 > 3
/// [0,0,1,2,3,4] 0x00001234 // 6 > 3
/// [0,1,2,3,4] 0x101234 // 5 > 3
/// [1,2,3,4] 0x001234 // 4 > 3
/// [0,0,1,2,3,4,5,T] 0x30012345 // 7 > 4
/// [0,0,1,2,3,4,T] 0x20001234 // 6 > 4
/// [0,1,2,3,4,5,T] 0x20012345 // 6 > 4
/// [1,2,3,4,5,T] 0x312345 // 5 > 3
/// [1,2,3,4,T] 0x201234 // 4 > 3
/// ```
fn hex_prefix_encode(nibbles: &[u8], leaf: bool) -> impl Iterator<Item = u8> + '_ {
let inlen = nibbles.len();
let oddness_factor = inlen % 2;
let first_byte = {
let mut bits = ((inlen as u8 & 1) + (2 * leaf as u8)) << 4;
if oddness_factor == 1 {
bits += nibbles[0];
}
bits
};
core::iter::once(first_byte).chain(
nibbles[oddness_factor..]
.chunks(2)
.map(|ch| ch[0] << 4 | ch[1]),
)
}
/// Generates a trie root hash for a vector of key-value tuples
pub fn trie_root<I, K, V>(input: I) -> H256
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<[u8]> + Ord,
V: AsRef<[u8]>,
{
trie_root::trie_root::<KeccakHasher, Hash256RlpTrieStream, _, _, _>(input, None)
}
/// Generates a key-hashed (secure) trie root hash for a vector of key-value tuples.
pub fn sec_trie_root<I, K, V>(input: I) -> H256
where
I: IntoIterator<Item = (K, V)>,
K: AsRef<[u8]>,
V: AsRef<[u8]>,
{
trie_root::sec_trie_root::<KeccakHasher, Hash256RlpTrieStream, _, _, _>(input, None)
}
/// Generates a trie root hash for a vector of values
pub fn ordered_trie_root<I, V>(input: I) -> H256
where
I: IntoIterator<Item = V>,
V: AsRef<[u8]>,
{
trie_root::trie_root::<KeccakHasher, Hash256RlpTrieStream, _, _, _>(
input
.into_iter()
.enumerate()
.map(|(i, v)| (rlp::encode(&i), v)),
None,
)
}
#[cfg(test)]
mod tests {
use ethereum_types::H256;
use hash256_std_hasher::Hash256StdHasher;
use hex_literal::hex;
use sha3::{Digest, Keccak256};
#[derive(Default, Debug, Clone, PartialEq, Eq)]
struct KeccakHasher15;
impl hash_db15::Hasher for KeccakHasher15 {
type Out = H256;
type StdHasher = Hash256StdHasher;
const LENGTH: usize = 32;
fn hash(x: &[u8]) -> Self::Out {
H256::from_slice(Keccak256::digest(x).as_ref())
}
}
#[test]
fn test_trie_root() {
let v = vec![
("doe", "reindeer"),
("dog", "puppy"),
("dogglesworth", "cat"),
];
let root = hex!("8aad789dff2f538bca5d8ea56e8abe10f4c7ba3a5dea95fea4cd6e7c3a1168d3");
let before = triehash::trie_root::<KeccakHasher15, _, _, _>(v.clone());
assert_eq!(before.0, root);
let after = super::trie_root::<_, _, _>(v);
assert_eq!(after.0, root);
}
#[test]
fn test_sec_trie_root() {
let v = vec![
("doe", "reindeer"),
("dog", "puppy"),
("dogglesworth", "cat"),
];
let root = hex!("d4cd937e4a4368d7931a9cf51686b7e10abb3dce38a39000fd7902a092b64585");
let before = triehash::sec_trie_root::<KeccakHasher15, _, _, _>(v.clone());
assert_eq!(before.0, root);
let after = super::sec_trie_root::<_, _, _>(v);
assert_eq!(after.0, root);
}
#[test]
fn test_ordered_trie_root() {
let v = &["doe", "reindeer"];
let root = hex!("e766d5d51b89dc39d981b41bda63248d7abce4f0225eefd023792a540bcffee3");
let before = triehash::ordered_trie_root::<KeccakHasher15, _>(v);
assert_eq!(before.0, root);
let after = super::ordered_trie_root::<_, _>(v);
assert_eq!(after.0, root);
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/enveloped.rs | src/enveloped.rs | use bytes::BytesMut;
/// DecoderError for typed transactions.
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum EnvelopedDecoderError<T> {
UnknownTypeId,
Payload(T),
}
impl<T> From<T> for EnvelopedDecoderError<T> {
fn from(e: T) -> Self {
Self::Payload(e)
}
}
/// Encodable typed transactions.
pub trait EnvelopedEncodable {
/// Convert self to an owned vector.
fn encode(&self) -> BytesMut {
let type_id = self.type_id();
let mut out = BytesMut::new();
if let Some(type_id) = type_id {
assert!(type_id <= 0x7f);
out.extend_from_slice(&[type_id]);
}
out.extend_from_slice(&self.encode_payload()[..]);
out
}
/// Type Id of the transaction.
fn type_id(&self) -> Option<u8>;
/// Encode inner payload.
fn encode_payload(&self) -> BytesMut;
}
/// Decodable typed transactions.
pub trait EnvelopedDecodable: Sized {
/// Inner payload decoder error.
type PayloadDecoderError;
/// Decode raw bytes to a Self type.
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>>;
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/account.rs | src/account.rs | use ethereum_types::{H256, U256};
#[derive(Clone, Debug, PartialEq, Eq)]
#[derive(rlp::RlpEncodable, rlp::RlpDecodable)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct Account {
pub nonce: U256,
pub balance: U256,
pub storage_root: H256,
pub code_hash: H256,
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/signature.rs | src/transaction/signature.rs | use ethereum_types::H256;
// ECDSA signature validation constants for secp256k1 curve
/// Minimum valid value for signature components r and s (must be >= 1)
pub const SIGNATURE_LOWER_BOUND: H256 = H256([
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01,
]);
/// Maximum valid value for signature components r and s (must be < secp256k1 curve order)
/// This is the secp256k1 curve order: 0xfffffffffffffffffffffffffffffffebaaedce6af48a03bbfd25e8cd0364141
pub const SIGNATURE_UPPER_BOUND: H256 = H256([
0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xfe,
0xba, 0xae, 0xdc, 0xe6, 0xaf, 0x48, 0xa0, 0x3b, 0xbf, 0xd2, 0x5e, 0x8c, 0xd0, 0x36, 0x41, 0x41,
]);
/// Maximum value for low-s signature enforcement (half of curve order)
/// This is used to prevent signature malleability
/// Value: 0x7fffffffffffffffffffffffffffffff5d576e7357a4501ddfe92f46681b20a0
pub const SIGNATURE_LOW_S_BOUND: H256 = H256([
0x7f, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff, 0xff,
0x5d, 0x57, 0x6e, 0x73, 0x57, 0xa4, 0x50, 0x1d, 0xdf, 0xe9, 0x2f, 0x46, 0x68, 0x1b, 0x20, 0xa0,
]);
/// Validates that a signature component (r or s) is within valid range
///
/// A valid signature component must satisfy:
/// - Greater than or equal to 1 (SIGNATURE_LOWER_BOUND)
/// - Less than the secp256k1 curve order (SIGNATURE_UPPER_BOUND)
#[inline]
pub fn is_valid_signature_component(component: &H256) -> bool {
*component >= SIGNATURE_LOWER_BOUND && *component < SIGNATURE_UPPER_BOUND
}
/// Checks if the s component satisfies the low-s requirement
///
/// The low-s requirement helps prevent signature malleability by requiring
/// that s <= n/2 where n is the curve order.
#[inline]
pub fn is_low_s(s: &H256) -> bool {
*s <= SIGNATURE_LOW_S_BOUND
}
#[cfg(test)]
mod tests {
use super::*;
use ethereum_types::U256;
/// Helper function to convert H256 to U256 for arithmetic operations in tests
#[inline]
fn h256_to_u256(h: &H256) -> U256 {
U256::from_big_endian(h.as_bytes())
}
/// Helper function to convert U256 to H256
#[inline]
fn u256_to_h256(u: U256) -> H256 {
H256::from(u.to_big_endian())
}
#[test]
fn test_low_s_bound_is_half_curve_order() {
// SIGNATURE_LOW_S_BOUND should be exactly n/2 where n is the curve order
let n = h256_to_u256(&SIGNATURE_UPPER_BOUND);
let expected_half_n = u256_to_h256(n / 2);
assert_eq!(
SIGNATURE_LOW_S_BOUND, expected_half_n,
"SIGNATURE_LOW_S_BOUND must be exactly half of the curve order"
);
}
#[test]
fn test_signature_bounds() {
// Lower bound is 1
assert_eq!(SIGNATURE_LOWER_BOUND, H256::from_low_u64_be(1));
// Verify that 0 is invalid
assert!(!is_valid_signature_component(&H256::zero()));
// Verify that 1 is valid (minimum)
assert!(is_valid_signature_component(&H256::from_low_u64_be(1)));
// Verify that curve_order - 1 is valid (maximum)
let max_valid = u256_to_h256(h256_to_u256(&SIGNATURE_UPPER_BOUND) - 1);
assert!(is_valid_signature_component(&max_valid));
// Verify that curve_order itself is invalid
assert!(!is_valid_signature_component(&SIGNATURE_UPPER_BOUND));
// Verify that values above curve_order are invalid
let above_max = u256_to_h256(h256_to_u256(&SIGNATURE_UPPER_BOUND) + 1);
assert!(!is_valid_signature_component(&above_max));
}
#[test]
fn test_low_s_validation() {
// s = 0 is invalid (below lower bound)
assert!(!is_valid_signature_component(&H256::zero()));
// s = 1 satisfies low-s requirement
assert!(is_low_s(&u256_to_h256(U256::one())));
// s = low_s_bound satisfies low-s requirement (boundary)
assert!(is_low_s(&SIGNATURE_LOW_S_BOUND));
// s = low_s_bound + 1 does NOT satisfy low-s requirement
let above_low_s = u256_to_h256(h256_to_u256(&SIGNATURE_LOW_S_BOUND) + 1);
assert!(!is_low_s(&above_low_s));
// s = curve_order - 1 is valid but does NOT satisfy low-s
let high_s = u256_to_h256(h256_to_u256(&SIGNATURE_UPPER_BOUND) - 1);
assert!(is_valid_signature_component(&high_s));
assert!(!is_low_s(&high_s));
}
#[test]
fn test_boundary_conditions() {
// Test exact boundary values
assert_eq!(h256_to_u256(&SIGNATURE_LOWER_BOUND), U256::one());
// Ensure low-s bound is exactly half the curve order (curve_order / 2)
// Note: The curve order is odd, so half_order * 2 + 1 = curve_order
let curve_order = h256_to_u256(&SIGNATURE_UPPER_BOUND);
let half_order = h256_to_u256(&SIGNATURE_LOW_S_BOUND);
assert_eq!(curve_order / 2, half_order);
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/eip2930.rs | src/transaction/eip2930.rs | use alloc::vec::Vec;
use ethereum_types::{Address, H256, U256};
use rlp::{DecoderError, Rlp, RlpStream};
use sha3::{Digest, Keccak256};
use super::signature;
use crate::Bytes;
pub use super::legacy::TransactionAction;
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_info::TypeInfo,
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct MalleableTransactionSignature {
pub odd_y_parity: bool,
pub r: H256,
pub s: H256,
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_info::TypeInfo,
scale_codec::Encode,
scale_codec::DecodeWithMemTracking
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize))]
pub struct TransactionSignature {
odd_y_parity: bool,
r: H256,
s: H256,
}
impl TransactionSignature {
#[must_use]
pub fn new(odd_y_parity: bool, r: H256, s: H256) -> Option<Self> {
let is_valid = signature::is_valid_signature_component(&r)
&& signature::is_valid_signature_component(&s);
if is_valid {
Some(Self { odd_y_parity, r, s })
} else {
None
}
}
#[must_use]
pub fn odd_y_parity(&self) -> bool {
self.odd_y_parity
}
#[must_use]
pub fn r(&self) -> &H256 {
&self.r
}
#[must_use]
pub fn s(&self) -> &H256 {
&self.s
}
#[must_use]
pub fn is_low_s(&self) -> bool {
signature::is_low_s(&self.s)
}
}
#[cfg(feature = "with-scale")]
impl scale_codec::Decode for TransactionSignature {
fn decode<I: scale_codec::Input>(value: &mut I) -> Result<Self, scale_codec::Error> {
let unchecked = MalleableTransactionSignature::decode(value)?;
match Self::new(unchecked.odd_y_parity, unchecked.r, unchecked.s) {
Some(signature) => Ok(signature),
None => Err(scale_codec::Error::from("Invalid signature")),
}
}
}
#[cfg(feature = "with-serde")]
impl<'de> serde::Deserialize<'de> for TransactionSignature {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let unchecked = MalleableTransactionSignature::deserialize(deserializer)?;
Ok(
TransactionSignature::new(unchecked.odd_y_parity, unchecked.r, unchecked.s)
.ok_or(serde::de::Error::custom("invalid signature"))?,
)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct AccessListItem {
pub address: Address,
pub storage_keys: Vec<H256>,
}
impl rlp::Encodable for AccessListItem {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(2);
s.append(&self.address);
s.append_list(&self.storage_keys);
}
}
impl rlp::Decodable for AccessListItem {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
Ok(Self {
address: rlp.val_at(0)?,
storage_keys: rlp.list_at(1)?,
})
}
}
pub type AccessList = Vec<AccessListItem>;
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EIP2930Transaction {
pub chain_id: u64,
pub nonce: U256,
pub gas_price: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub access_list: AccessList,
pub signature: TransactionSignature,
}
impl EIP2930Transaction {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = 1;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
pub fn to_message(self) -> EIP2930TransactionMessage {
EIP2930TransactionMessage {
chain_id: self.chain_id,
nonce: self.nonce,
gas_price: self.gas_price,
gas_limit: self.gas_limit,
action: self.action,
value: self.value,
input: self.input,
access_list: self.access_list,
}
}
}
impl rlp::Encodable for EIP2930Transaction {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(11);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append_list(&self.access_list);
s.append(&self.signature.odd_y_parity());
s.append(&U256::from_big_endian(&self.signature.r()[..]));
s.append(&U256::from_big_endian(&self.signature.s()[..]));
}
}
impl rlp::Decodable for EIP2930Transaction {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.item_count()? != 11 {
return Err(DecoderError::RlpIncorrectListLen);
}
Ok(Self {
chain_id: rlp.val_at(0)?,
nonce: rlp.val_at(1)?,
gas_price: rlp.val_at(2)?,
gas_limit: rlp.val_at(3)?,
action: rlp.val_at(4)?,
value: rlp.val_at(5)?,
input: rlp.val_at(6)?,
access_list: rlp.list_at(7)?,
signature: {
let odd_y_parity = rlp.val_at(8)?;
let r = H256::from(rlp.val_at::<U256>(9)?.to_big_endian());
let s = H256::from(rlp.val_at::<U256>(10)?.to_big_endian());
TransactionSignature::new(odd_y_parity, r, s)
.ok_or(DecoderError::Custom("Invalid transaction signature format"))?
},
})
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct EIP2930TransactionMessage {
pub chain_id: u64,
pub nonce: U256,
pub gas_price: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub access_list: AccessList,
}
impl EIP2930TransactionMessage {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = 1;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
}
impl rlp::Encodable for EIP2930TransactionMessage {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(8);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append_list(&self.access_list);
}
}
impl From<EIP2930Transaction> for EIP2930TransactionMessage {
fn from(t: EIP2930Transaction) -> Self {
t.to_message()
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/eip7702.rs | src/transaction/eip7702.rs | use alloc::vec::Vec;
use ethereum_types::{Address, H256, U256};
use k256::ecdsa::{RecoveryId, Signature, VerifyingKey};
use rlp::{DecoderError, Rlp, RlpStream};
use sha3::{Digest, Keccak256};
use crate::Bytes;
pub use super::eip2930::{
AccessList, MalleableTransactionSignature, TransactionAction, TransactionSignature,
};
/// Error type for EIP-7702 authorization signature recovery
#[derive(Debug, Clone, PartialEq, Eq)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub enum AuthorizationError {
/// Invalid signature format
InvalidSignature,
/// Invalid recovery ID
InvalidRecoveryId,
/// Signature recovery failed
RecoveryFailed,
/// Invalid public key format
InvalidPublicKey,
}
/// EIP-7702 transaction type as defined in the specification
pub const SET_CODE_TX_TYPE: u8 = 0x04;
/// EIP-7702 authorization message magic prefix
pub const AUTHORIZATION_MAGIC: u8 = 0x05;
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct AuthorizationListItem {
pub chain_id: u64,
pub address: Address,
pub nonce: U256,
pub signature: MalleableTransactionSignature,
}
impl rlp::Encodable for AuthorizationListItem {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(6);
s.append(&self.chain_id);
s.append(&self.address);
s.append(&self.nonce);
s.append(&self.signature.odd_y_parity);
s.append(&U256::from_big_endian(&self.signature.r[..]));
s.append(&U256::from_big_endian(&self.signature.s[..]));
}
}
impl rlp::Decodable for AuthorizationListItem {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.item_count()? != 6 {
return Err(DecoderError::RlpIncorrectListLen);
}
Ok(Self {
chain_id: rlp.val_at(0)?,
address: rlp.val_at(1)?,
nonce: rlp.val_at(2)?,
signature: {
let odd_y_parity = rlp.val_at(3)?;
let r = H256::from(rlp.val_at::<U256>(4)?.to_big_endian());
let s = H256::from(rlp.val_at::<U256>(5)?.to_big_endian());
MalleableTransactionSignature { odd_y_parity, r, s }
},
})
}
}
impl AuthorizationListItem {
/// Check and get the signature.
///
/// This checks that the signature is not malleable, but does not otherwise check or recover
/// the public key.
pub fn signature(&self) -> Option<TransactionSignature> {
TransactionSignature::new(
self.signature.odd_y_parity,
self.signature.r,
self.signature.s,
)
}
/// Recover the authorizing address from the authorization signature according to EIP-7702
pub fn authorizing_address(&self) -> Result<Address, AuthorizationError> {
// Create the authorization message hash according to EIP-7702
let message_hash = self.authorization_message_hash();
let sigv = self
.signature()
.ok_or(AuthorizationError::InvalidSignature)?;
// Create signature from r and s components
let mut signature_bytes = [0u8; 64];
signature_bytes[0..32].copy_from_slice(&sigv.r()[..]);
signature_bytes[32..64].copy_from_slice(&sigv.s()[..]);
// Create the signature and recovery ID
let signature = Signature::from_bytes(&signature_bytes.into())
.map_err(|_| AuthorizationError::InvalidSignature)?;
let recovery_id = RecoveryId::try_from(if sigv.odd_y_parity() { 1u8 } else { 0u8 })
.map_err(|_| AuthorizationError::InvalidRecoveryId)?;
// Recover the verifying key using VerifyingKey::recover_from_prehash
// message_hash is already a 32-byte Keccak256 hash, so we use recover_from_prehash
let verifying_key =
VerifyingKey::recover_from_prehash(message_hash.as_bytes(), &signature, recovery_id)
.map_err(|_| AuthorizationError::RecoveryFailed)?;
// Convert public key to Ethereum address
Self::verifying_key_to_address(&verifying_key)
}
/// Create the authorization message hash according to EIP-7702
pub fn authorization_message_hash(&self) -> H256 {
// EIP-7702 authorization message format:
// MAGIC || rlp([chain_id, address, nonce])
let mut message = alloc::vec![AUTHORIZATION_MAGIC];
// RLP encode the authorization tuple
let mut rlp_stream = RlpStream::new_list(3);
rlp_stream.append(&self.chain_id);
rlp_stream.append(&self.address);
rlp_stream.append(&self.nonce);
message.extend_from_slice(&rlp_stream.out());
// Return keccak256 hash of the complete message
H256::from_slice(Keccak256::digest(&message).as_ref())
}
/// Convert VerifyingKey to Ethereum address
fn verifying_key_to_address(
verifying_key: &VerifyingKey,
) -> Result<Address, AuthorizationError> {
// Convert public key to bytes (uncompressed format, skip the 0x04 prefix)
let pubkey_point = verifying_key.to_encoded_point(false);
let pubkey_bytes = pubkey_point.as_bytes();
// pubkey_bytes is 65 bytes: [0x04, x_coord (32 bytes), y_coord (32 bytes)]
// We want just the x and y coordinates (64 bytes total)
if pubkey_bytes.len() >= 65 && pubkey_bytes[0] == 0x04 {
let pubkey_coords = &pubkey_bytes[1..65];
// Ethereum address is the last 20 bytes of keccak256(pubkey)
let hash = Keccak256::digest(pubkey_coords);
Ok(Address::from_slice(&hash[12..]))
} else {
Err(AuthorizationError::InvalidPublicKey)
}
}
}
pub type AuthorizationList = Vec<AuthorizationListItem>;
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EIP7702Transaction {
pub chain_id: u64,
pub nonce: U256,
pub max_priority_fee_per_gas: U256,
pub max_fee_per_gas: U256,
pub gas_limit: U256,
pub destination: TransactionAction,
pub value: U256,
pub data: Bytes,
pub access_list: AccessList,
pub authorization_list: AuthorizationList,
pub signature: TransactionSignature,
}
impl EIP7702Transaction {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = SET_CODE_TX_TYPE;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
pub fn to_message(self) -> EIP7702TransactionMessage {
EIP7702TransactionMessage {
chain_id: self.chain_id,
nonce: self.nonce,
max_priority_fee_per_gas: self.max_priority_fee_per_gas,
max_fee_per_gas: self.max_fee_per_gas,
gas_limit: self.gas_limit,
destination: self.destination,
value: self.value,
data: self.data,
access_list: self.access_list,
authorization_list: self.authorization_list,
}
}
}
impl rlp::Encodable for EIP7702Transaction {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(13);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.max_priority_fee_per_gas);
s.append(&self.max_fee_per_gas);
s.append(&self.gas_limit);
s.append(&self.destination);
s.append(&self.value);
s.append(&self.data);
s.append_list(&self.access_list);
s.append_list(&self.authorization_list);
s.append(&self.signature.odd_y_parity());
s.append(&U256::from_big_endian(&self.signature.r()[..]));
s.append(&U256::from_big_endian(&self.signature.s()[..]));
}
}
impl rlp::Decodable for EIP7702Transaction {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.item_count()? != 13 {
return Err(DecoderError::RlpIncorrectListLen);
}
Ok(Self {
chain_id: rlp.val_at(0)?,
nonce: rlp.val_at(1)?,
max_priority_fee_per_gas: rlp.val_at(2)?,
max_fee_per_gas: rlp.val_at(3)?,
gas_limit: rlp.val_at(4)?,
destination: rlp.val_at(5)?,
value: rlp.val_at(6)?,
data: rlp.val_at(7)?,
access_list: rlp.list_at(8)?,
authorization_list: rlp.list_at(9)?,
signature: {
let odd_y_parity = rlp.val_at(10)?;
let r = H256::from(rlp.val_at::<U256>(11)?.to_big_endian());
let s = H256::from(rlp.val_at::<U256>(12)?.to_big_endian());
TransactionSignature::new(odd_y_parity, r, s)
.ok_or(DecoderError::Custom("Invalid transaction signature format"))?
},
})
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct EIP7702TransactionMessage {
pub chain_id: u64,
pub nonce: U256,
pub max_priority_fee_per_gas: U256,
pub max_fee_per_gas: U256,
pub gas_limit: U256,
pub destination: TransactionAction,
pub value: U256,
pub data: Bytes,
pub access_list: AccessList,
pub authorization_list: AuthorizationList,
}
impl EIP7702TransactionMessage {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = SET_CODE_TX_TYPE;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
}
impl rlp::Encodable for EIP7702TransactionMessage {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(10);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.max_priority_fee_per_gas);
s.append(&self.max_fee_per_gas);
s.append(&self.gas_limit);
s.append(&self.destination);
s.append(&self.value);
s.append(&self.data);
s.append_list(&self.access_list);
s.append_list(&self.authorization_list);
}
}
impl From<EIP7702Transaction> for EIP7702TransactionMessage {
fn from(t: EIP7702Transaction) -> Self {
t.to_message()
}
}
#[cfg(test)]
mod tests {
use super::*;
use ethereum_types::{Address, H256, U256};
#[test]
fn test_authorizing_address_with_real_signature() {
use k256::ecdsa::SigningKey;
use k256::elliptic_curve::SecretKey;
// Use a fixed test private key for deterministic testing
let private_key_bytes = [
0x01, 0x02, 0x03, 0x04, 0x05, 0x06, 0x07, 0x08, 0x09, 0x0a, 0x0b, 0x0c, 0x0d, 0x0e,
0x0f, 0x10, 0x11, 0x12, 0x13, 0x14, 0x15, 0x16, 0x17, 0x18, 0x19, 0x1a, 0x1b, 0x1c,
0x1d, 0x1e, 0x1f, 0x20,
];
let secret_key =
SecretKey::from_bytes(&private_key_bytes.into()).expect("Invalid private key");
let signing_key = SigningKey::from(secret_key);
let verifying_key = signing_key.verifying_key();
// Create authorization data
let chain_id = 1u64;
let address = Address::from_slice(&[0x42u8; 20]);
let nonce = U256::zero();
// Create the EIP-7702 authorization message hash
let mut message = alloc::vec![AUTHORIZATION_MAGIC];
let mut rlp_stream = RlpStream::new_list(3);
rlp_stream.append(&chain_id);
rlp_stream.append(&address);
rlp_stream.append(&nonce);
message.extend_from_slice(&rlp_stream.out());
let message_hash = H256::from_slice(Keccak256::digest(&message).as_ref());
// Sign the message hash
let (signature, recovery_id) = signing_key
.sign_prehash_recoverable(message_hash.as_bytes())
.expect("Failed to sign message");
// Extract signature components
let signature_bytes = signature.to_bytes();
let r = H256::from_slice(&signature_bytes[0..32]);
let s = H256::from_slice(&signature_bytes[32..64]);
let y_parity = recovery_id.is_y_odd();
// Create AuthorizationListItem with real signature
let auth_item = AuthorizationListItem {
chain_id,
address,
nonce,
signature: MalleableTransactionSignature {
odd_y_parity: y_parity,
r,
s,
},
};
// Recover the authorizing address
let recovered_address = auth_item
.authorizing_address()
.expect("Failed to recover authorizing address");
// Convert the original verifying key to an Ethereum address for comparison
let expected_address = AuthorizationListItem::verifying_key_to_address(&verifying_key)
.expect("Failed to convert verifying key to address");
// Verify that the recovered address matches the original signer
assert_eq!(recovered_address, expected_address);
assert_ne!(recovered_address, Address::zero());
// For deterministic testing, verify specific expected values
// This ensures the implementation is working correctly with known inputs
assert_eq!(
expected_address,
Address::from_slice(&hex_literal::hex!(
"6370ef2f4db3611d657b90667de398a2cc2a370c"
))
);
}
#[test]
fn test_authorizing_address_error_handling() {
// Test with invalid signature components (zero values are invalid in ECDSA)
assert!(TransactionSignature::new(
false,
H256::zero(), // Invalid r value (r cannot be zero)
H256::zero(), // Invalid s value (s cannot be zero)
)
.is_none());
// Test with values that are too high (greater than secp256k1 curve order)
// secp256k1 curve order is FFFFFFFFFFFFFFFFFFFFFFFFFFFFFFFEBAAEDCE6AF48A03BBFD25E8CD0364141
assert!(TransactionSignature::new(
false,
// Use maximum possible values which exceed the curve order
H256::from_slice(&[0xFF; 32]),
H256::from_slice(&[0xFF; 32]),
)
.is_none());
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/mod.rs | src/transaction/mod.rs | pub mod eip1559;
pub mod eip2930;
pub mod eip7702;
pub mod legacy;
mod signature;
use bytes::BytesMut;
use ethereum_types::H256;
use rlp::{DecoderError, Rlp};
pub use self::{
eip1559::{EIP1559Transaction, EIP1559TransactionMessage},
eip2930::{AccessList, AccessListItem, EIP2930Transaction, EIP2930TransactionMessage},
eip7702::{
AuthorizationList, AuthorizationListItem, EIP7702Transaction, EIP7702TransactionMessage,
},
legacy::{LegacyTransaction, LegacyTransactionMessage, TransactionAction},
};
use crate::enveloped::{EnvelopedDecodable, EnvelopedDecoderError, EnvelopedEncodable};
pub type TransactionV0 = LegacyTransaction;
impl EnvelopedEncodable for TransactionV0 {
fn type_id(&self) -> Option<u8> {
None
}
fn encode_payload(&self) -> BytesMut {
rlp::encode(self)
}
}
impl EnvelopedDecodable for TransactionV0 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
Ok(rlp::decode(bytes)?)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_codec::Encode, scale_codec::Decode, scale_info::TypeInfo)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum TransactionV1 {
/// Legacy transaction type
Legacy(LegacyTransaction),
/// EIP-2930 transaction
EIP2930(EIP2930Transaction),
}
impl TransactionV1 {
pub fn hash(&self) -> H256 {
match self {
TransactionV1::Legacy(t) => t.hash(),
TransactionV1::EIP2930(t) => t.hash(),
}
}
}
impl EnvelopedEncodable for TransactionV1 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(tx) => rlp::encode(tx),
Self::EIP2930(tx) => rlp::encode(tx),
}
}
}
impl EnvelopedDecodable for TransactionV1 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(rlp.as_val()?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid tx type").into())
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum TransactionV2 {
/// Legacy transaction type
Legacy(LegacyTransaction),
/// EIP-2930 transaction
EIP2930(EIP2930Transaction),
/// EIP-1559 transaction
EIP1559(EIP1559Transaction),
}
impl TransactionV2 {
pub fn hash(&self) -> H256 {
match self {
TransactionV2::Legacy(t) => t.hash(),
TransactionV2::EIP2930(t) => t.hash(),
TransactionV2::EIP1559(t) => t.hash(),
}
}
}
impl EnvelopedEncodable for TransactionV2 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
Self::EIP1559(_) => Some(2),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(tx) => rlp::encode(tx),
Self::EIP2930(tx) => rlp::encode(tx),
Self::EIP1559(tx) => rlp::encode(tx),
}
}
}
impl EnvelopedDecodable for TransactionV2 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(rlp.as_val()?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
if first == 0x02 {
return Ok(Self::EIP1559(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid tx type").into())
}
}
impl From<LegacyTransaction> for TransactionV1 {
fn from(t: LegacyTransaction) -> Self {
TransactionV1::Legacy(t)
}
}
impl From<LegacyTransaction> for TransactionV2 {
fn from(t: LegacyTransaction) -> Self {
TransactionV2::Legacy(t)
}
}
impl From<TransactionV1> for TransactionV2 {
fn from(t: TransactionV1) -> Self {
match t {
TransactionV1::Legacy(t) => TransactionV2::Legacy(t),
TransactionV1::EIP2930(t) => TransactionV2::EIP2930(t),
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(
feature = "with-serde",
derive(serde::Serialize, serde::Deserialize),
serde(untagged)
)]
pub enum TransactionV3 {
/// Legacy transaction type
Legacy(LegacyTransaction),
/// EIP-2930 transaction
EIP2930(EIP2930Transaction),
/// EIP-1559 transaction
EIP1559(EIP1559Transaction),
/// EIP-7702 transaction
EIP7702(EIP7702Transaction),
}
impl TransactionV3 {
pub fn hash(&self) -> H256 {
match self {
TransactionV3::Legacy(t) => t.hash(),
TransactionV3::EIP2930(t) => t.hash(),
TransactionV3::EIP1559(t) => t.hash(),
TransactionV3::EIP7702(t) => t.hash(),
}
}
}
impl EnvelopedEncodable for TransactionV3 {
fn type_id(&self) -> Option<u8> {
match self {
Self::Legacy(_) => None,
Self::EIP2930(_) => Some(1),
Self::EIP1559(_) => Some(2),
Self::EIP7702(_) => Some(4),
}
}
fn encode_payload(&self) -> BytesMut {
match self {
Self::Legacy(tx) => rlp::encode(tx),
Self::EIP2930(tx) => rlp::encode(tx),
Self::EIP1559(tx) => rlp::encode(tx),
Self::EIP7702(tx) => rlp::encode(tx),
}
}
}
impl EnvelopedDecodable for TransactionV3 {
type PayloadDecoderError = DecoderError;
fn decode(bytes: &[u8]) -> Result<Self, EnvelopedDecoderError<Self::PayloadDecoderError>> {
if bytes.is_empty() {
return Err(EnvelopedDecoderError::UnknownTypeId);
}
let first = bytes[0];
let rlp = Rlp::new(bytes);
if rlp.is_list() {
return Ok(Self::Legacy(rlp.as_val()?));
}
let s = &bytes[1..];
if first == 0x01 {
return Ok(Self::EIP2930(rlp::decode(s)?));
}
if first == 0x02 {
return Ok(Self::EIP1559(rlp::decode(s)?));
}
if first == 0x04 {
return Ok(Self::EIP7702(rlp::decode(s)?));
}
Err(DecoderError::Custom("invalid tx type").into())
}
}
impl From<LegacyTransaction> for TransactionV3 {
fn from(t: LegacyTransaction) -> Self {
TransactionV3::Legacy(t)
}
}
impl From<TransactionV1> for TransactionV3 {
fn from(t: TransactionV1) -> Self {
match t {
TransactionV1::Legacy(t) => TransactionV3::Legacy(t),
TransactionV1::EIP2930(t) => TransactionV3::EIP2930(t),
}
}
}
impl From<TransactionV2> for TransactionV3 {
fn from(t: TransactionV2) -> Self {
match t {
TransactionV2::Legacy(t) => TransactionV3::Legacy(t),
TransactionV2::EIP2930(t) => TransactionV3::EIP2930(t),
TransactionV2::EIP1559(t) => TransactionV3::EIP1559(t),
}
}
}
pub type TransactionAny = TransactionV3;
#[cfg(test)]
mod tests {
use super::{
eip2930::{self, AccessListItem},
eip7702::AuthorizationListItem,
legacy::{self, TransactionAction},
EIP1559Transaction, EIP2930Transaction, EIP7702Transaction, EnvelopedDecodable,
TransactionV0, TransactionV1, TransactionV2, TransactionV3,
};
use crate::enveloped::*;
use ethereum_types::U256;
use hex_literal::hex;
#[test]
fn can_decode_raw_transaction() {
let bytes = hex!("f901e48080831000008080b90196608060405234801561001057600080fd5b50336000806101000a81548173ffffffffffffffffffffffffffffffffffffffff021916908373ffffffffffffffffffffffffffffffffffffffff1602179055507fc68045c3c562488255b55aa2c4c7849de001859ff0d8a36a75c2d5ed80100fb660405180806020018281038252600d8152602001807f48656c6c6f2c20776f726c64210000000000000000000000000000000000000081525060200191505060405180910390a160cf806100c76000396000f3fe6080604052348015600f57600080fd5b506004361060285760003560e01c80638da5cb5b14602d575b600080fd5b60336075565b604051808273ffffffffffffffffffffffffffffffffffffffff1673ffffffffffffffffffffffffffffffffffffffff16815260200191505060405180910390f35b6000809054906101000a900473ffffffffffffffffffffffffffffffffffffffff168156fea265627a7a72315820fae816ad954005c42bea7bc7cb5b19f7fd5d3a250715ca2023275c9ca7ce644064736f6c634300050f003278a04cab43609092a99cf095d458b61b47189d1bbab64baed10a0fd7b7d2de2eb960a011ab1bcda76dfed5e733219beb83789f9887b2a7b2e61759c7c90f7d40403201");
<TransactionV0 as EnvelopedDecodable>::decode(&bytes).unwrap();
<TransactionV1 as EnvelopedDecodable>::decode(&bytes).unwrap();
<TransactionV2 as EnvelopedDecodable>::decode(&bytes).unwrap();
<TransactionV3 as EnvelopedDecodable>::decode(&bytes).unwrap();
}
#[test]
fn transaction_v0() {
let tx = TransactionV0 {
nonce: 12.into(),
gas_price: 20_000_000_000_u64.into(),
gas_limit: 21000.into(),
action: TransactionAction::Call(
hex!("727fc6a68321b754475c668a6abfb6e9e71c169a").into(),
),
value: U256::from(10) * 1_000_000_000 * 1_000_000_000,
input: hex!("a9059cbb000000000213ed0f886efd100b67c7e4ec0a85a7d20dc971600000000000000000000015af1d78b58c4000").into(),
signature: legacy::TransactionSignature::new(38, hex!("be67e0a07db67da8d446f76add590e54b6e92cb6b8f9835aeb67540579a27717").into(), hex!("2d690516512020171c1ec870f6ff45398cc8609250326be89915fb538e7bd718").into()).unwrap(),
};
assert_eq!(
tx,
<TransactionV0 as EnvelopedDecodable>::decode(&tx.encode()).unwrap()
);
}
#[test]
fn transaction_v1() {
let tx = TransactionV1::EIP2930(EIP2930Transaction {
chain_id: 5,
nonce: 7.into(),
gas_price: 30_000_000_000_u64.into(),
gas_limit: 5_748_100_u64.into(),
action: TransactionAction::Call(
hex!("811a752c8cd697e3cb27279c330ed1ada745a8d7").into(),
),
value: U256::from(2) * 1_000_000_000 * 1_000_000_000,
input: hex!("6ebaf477f83e051589c1188bcc6ddccd").into(),
access_list: vec![
AccessListItem {
address: hex!("de0b295669a9fd93d5f28d9ec85e40f4cb697bae").into(),
storage_keys: vec![
hex!("0000000000000000000000000000000000000000000000000000000000000003")
.into(),
hex!("0000000000000000000000000000000000000000000000000000000000000007")
.into(),
],
},
AccessListItem {
address: hex!("bb9bc244d798123fde783fcc1c72d3bb8c189413").into(),
storage_keys: vec![],
},
],
signature: eip2930::TransactionSignature::new(
false,
hex!("36b241b061a36a32ab7fe86c7aa9eb592dd59018cd0443adc0903590c16b02b0").into(),
hex!("5edcc541b4741c5cc6dd347c5ed9577ef293a62787b4510465fadbfe39ee4094").into(),
)
.unwrap(),
});
assert_eq!(
tx,
<TransactionV1 as EnvelopedDecodable>::decode(&tx.encode()).unwrap()
);
}
#[test]
fn transaction_v2() {
let tx = TransactionV2::EIP1559(EIP1559Transaction {
chain_id: 5,
nonce: 7.into(),
max_priority_fee_per_gas: 10_000_000_000_u64.into(),
max_fee_per_gas: 30_000_000_000_u64.into(),
gas_limit: 5_748_100_u64.into(),
action: TransactionAction::Call(
hex!("811a752c8cd697e3cb27279c330ed1ada745a8d7").into(),
),
value: U256::from(2) * 1_000_000_000 * 1_000_000_000,
input: hex!("6ebaf477f83e051589c1188bcc6ddccd").into(),
access_list: vec![
AccessListItem {
address: hex!("de0b295669a9fd93d5f28d9ec85e40f4cb697bae").into(),
storage_keys: vec![
hex!("0000000000000000000000000000000000000000000000000000000000000003")
.into(),
hex!("0000000000000000000000000000000000000000000000000000000000000007")
.into(),
],
},
AccessListItem {
address: hex!("bb9bc244d798123fde783fcc1c72d3bb8c189413").into(),
storage_keys: vec![],
},
],
signature: eip2930::TransactionSignature::new(
false,
hex!("36b241b061a36a32ab7fe86c7aa9eb592dd59018cd0443adc0903590c16b02b0").into(),
hex!("5edcc541b4741c5cc6dd347c5ed9577ef293a62787b4510465fadbfe39ee4094").into(),
)
.unwrap(),
});
assert_eq!(
tx,
<TransactionV2 as EnvelopedDecodable>::decode(&tx.encode()).unwrap()
);
}
#[test]
fn transaction_v3() {
let tx = TransactionV3::EIP7702(EIP7702Transaction {
chain_id: 5,
nonce: 7.into(),
max_priority_fee_per_gas: 10_000_000_000_u64.into(),
max_fee_per_gas: 30_000_000_000_u64.into(),
gas_limit: 5_748_100_u64.into(),
destination: TransactionAction::Call(
hex!("811a752c8cd697e3cb27279c330ed1ada745a8d7").into(),
),
value: U256::from(2) * 1_000_000_000 * 1_000_000_000,
data: hex!("6ebaf477f83e051589c1188bcc6ddccd").into(),
access_list: vec![AccessListItem {
address: hex!("de0b295669a9fd93d5f28d9ec85e40f4cb697bae").into(),
storage_keys: vec![hex!(
"0000000000000000000000000000000000000000000000000000000000000003"
)
.into()],
}],
authorization_list: vec![AuthorizationListItem {
chain_id: 5,
address: hex!("de0b295669a9fd93d5f28d9ec85e40f4cb697bae").into(),
nonce: 1.into(),
signature: eip2930::MalleableTransactionSignature {
odd_y_parity: false,
r: hex!("36b241b061a36a32ab7fe86c7aa9eb592dd59018cd0443adc0903590c16b02b0")
.into(),
s: hex!("5edcc541b4741c5cc6dd347c5ed9577ef293a62787b4510465fadbfe39ee4094")
.into(),
},
}],
signature: eip2930::TransactionSignature::new(
false,
hex!("36b241b061a36a32ab7fe86c7aa9eb592dd59018cd0443adc0903590c16b02b0").into(),
hex!("5edcc541b4741c5cc6dd347c5ed9577ef293a62787b4510465fadbfe39ee4094").into(),
)
.unwrap(),
});
assert_eq!(
tx,
<TransactionV3 as EnvelopedDecodable>::decode(&tx.encode()).unwrap()
);
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/legacy.rs | src/transaction/legacy.rs | use core::ops::Deref;
use ethereum_types::{H160, H256, U256};
use rlp::{DecoderError, Rlp, RlpStream};
use sha3::{Digest, Keccak256};
use super::signature;
use crate::Bytes;
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub enum TransactionAction {
Call(H160),
Create,
}
impl rlp::Encodable for TransactionAction {
fn rlp_append(&self, s: &mut RlpStream) {
match self {
Self::Call(address) => {
s.encoder().encode_value(&address[..]);
}
Self::Create => s.encoder().encode_value(&[]),
}
}
}
impl rlp::Decodable for TransactionAction {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.is_empty() {
if rlp.is_data() {
Ok(TransactionAction::Create)
} else {
Err(DecoderError::RlpExpectedToBeData)
}
} else {
Ok(TransactionAction::Call(rlp.as_val()?))
}
}
}
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct TransactionRecoveryId(pub u64);
impl Deref for TransactionRecoveryId {
type Target = u64;
fn deref(&self) -> &u64 {
&self.0
}
}
impl TransactionRecoveryId {
pub fn standard(self) -> u8 {
if self.0 == 27 || self.0 == 28 || self.0 > 36 {
((self.0 - 1) % 2) as u8
} else {
4
}
}
pub fn chain_id(self) -> Option<u64> {
if self.0 > 36 {
Some((self.0 - 35) / 2)
} else {
None
}
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(scale_info::TypeInfo, scale_codec::DecodeWithMemTracking)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize))]
pub struct TransactionSignature {
v: TransactionRecoveryId,
r: H256,
s: H256,
}
impl TransactionSignature {
#[must_use]
pub fn new(v: u64, r: H256, s: H256) -> Option<Self> {
let v = TransactionRecoveryId(v);
let is_valid = v.standard() <= 1
&& signature::is_valid_signature_component(&r)
&& signature::is_valid_signature_component(&s);
if is_valid {
Some(Self { v, r, s })
} else {
None
}
}
#[must_use]
pub fn v(&self) -> u64 {
self.v.0
}
#[must_use]
pub fn standard_v(&self) -> u8 {
self.v.standard()
}
#[must_use]
pub fn chain_id(&self) -> Option<u64> {
self.v.chain_id()
}
#[must_use]
pub fn r(&self) -> &H256 {
&self.r
}
#[must_use]
pub fn s(&self) -> &H256 {
&self.s
}
#[must_use]
pub fn is_low_s(&self) -> bool {
signature::is_low_s(&self.s)
}
}
#[cfg(feature = "with-scale")]
impl scale_codec::Encode for TransactionSignature {
fn size_hint(&self) -> usize {
scale_codec::Encode::size_hint(&(self.v.0, self.r, self.s))
}
fn using_encoded<R, F: FnOnce(&[u8]) -> R>(&self, f: F) -> R {
scale_codec::Encode::using_encoded(&(self.v.0, self.r, self.s), f)
}
}
#[cfg(feature = "with-scale")]
impl scale_codec::Decode for TransactionSignature {
fn decode<I: scale_codec::Input>(value: &mut I) -> Result<Self, scale_codec::Error> {
let (v, r, s) = scale_codec::Decode::decode(value)?;
match Self::new(v, r, s) {
Some(signature) => Ok(signature),
None => Err(scale_codec::Error::from("Invalid signature")),
}
}
}
#[cfg(feature = "with-serde")]
#[derive(serde::Deserialize)]
struct TransactionSignatureUnchecked {
v: u64,
r: H256,
s: H256,
}
#[cfg(feature = "with-serde")]
impl<'de> serde::Deserialize<'de> for TransactionSignature {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: serde::de::Deserializer<'de>,
{
let unchecked = TransactionSignatureUnchecked::deserialize(deserializer)?;
Ok(
TransactionSignature::new(unchecked.v, unchecked.r, unchecked.s)
.ok_or(serde::de::Error::custom("invalid signature"))?,
)
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct LegacyTransaction {
pub nonce: U256,
pub gas_price: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub signature: TransactionSignature,
}
impl LegacyTransaction {
pub fn hash(&self) -> H256 {
H256::from_slice(Keccak256::digest(rlp::encode(self)).as_ref())
}
pub fn to_message(self) -> LegacyTransactionMessage {
LegacyTransactionMessage {
nonce: self.nonce,
gas_price: self.gas_price,
gas_limit: self.gas_limit,
action: self.action,
value: self.value,
input: self.input,
chain_id: self.signature.chain_id(),
}
}
}
impl rlp::Encodable for LegacyTransaction {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(9);
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append(&self.signature.v.0);
s.append(&U256::from_big_endian(&self.signature.r[..]));
s.append(&U256::from_big_endian(&self.signature.s[..]));
}
}
impl rlp::Decodable for LegacyTransaction {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.item_count()? != 9 {
return Err(DecoderError::RlpIncorrectListLen);
}
let v = rlp.val_at(6)?;
let r = H256::from(rlp.val_at::<U256>(7)?.to_big_endian());
let s = H256::from(rlp.val_at::<U256>(8)?.to_big_endian());
let signature = TransactionSignature::new(v, r, s)
.ok_or(DecoderError::Custom("Invalid transaction signature format"))?;
Ok(Self {
nonce: rlp.val_at(0)?,
gas_price: rlp.val_at(1)?,
gas_limit: rlp.val_at(2)?,
action: rlp.val_at(3)?,
value: rlp.val_at(4)?,
input: rlp.val_at(5)?,
signature,
})
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct LegacyTransactionMessage {
pub nonce: U256,
pub gas_price: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub chain_id: Option<u64>,
}
impl LegacyTransactionMessage {
pub fn hash(&self) -> H256 {
H256::from_slice(Keccak256::digest(rlp::encode(self)).as_ref())
}
}
impl rlp::Encodable for LegacyTransactionMessage {
fn rlp_append(&self, s: &mut RlpStream) {
if let Some(chain_id) = self.chain_id {
s.begin_list(9);
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append(&chain_id);
s.append(&0_u8);
s.append(&0_u8);
} else {
s.begin_list(6);
s.append(&self.nonce);
s.append(&self.gas_price);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
}
}
}
impl From<LegacyTransaction> for LegacyTransactionMessage {
fn from(t: LegacyTransaction) -> Self {
t.to_message()
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
rust-ethereum/ethereum | https://github.com/rust-ethereum/ethereum/blob/d7bdf2888253a30f160d434688e378636e253870/src/transaction/eip1559.rs | src/transaction/eip1559.rs | use ethereum_types::{H256, U256};
use rlp::{DecoderError, Rlp, RlpStream};
use sha3::{Digest, Keccak256};
use crate::Bytes;
pub use super::eip2930::{AccessList, TransactionAction, TransactionSignature};
#[derive(Clone, Debug, PartialEq, Eq)]
#[cfg_attr(
feature = "with-scale",
derive(
scale_codec::Encode,
scale_codec::Decode,
scale_codec::DecodeWithMemTracking,
scale_info::TypeInfo
)
)]
#[cfg_attr(feature = "with-serde", derive(serde::Serialize, serde::Deserialize))]
pub struct EIP1559Transaction {
pub chain_id: u64,
pub nonce: U256,
pub max_priority_fee_per_gas: U256,
pub max_fee_per_gas: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub access_list: AccessList,
pub signature: TransactionSignature,
}
impl EIP1559Transaction {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = 2;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
pub fn to_message(self) -> EIP1559TransactionMessage {
EIP1559TransactionMessage {
chain_id: self.chain_id,
nonce: self.nonce,
max_priority_fee_per_gas: self.max_priority_fee_per_gas,
max_fee_per_gas: self.max_fee_per_gas,
gas_limit: self.gas_limit,
action: self.action,
value: self.value,
input: self.input,
access_list: self.access_list,
}
}
}
impl rlp::Encodable for EIP1559Transaction {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(12);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.max_priority_fee_per_gas);
s.append(&self.max_fee_per_gas);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append_list(&self.access_list);
s.append(&self.signature.odd_y_parity());
s.append(&U256::from_big_endian(&self.signature.r()[..]));
s.append(&U256::from_big_endian(&self.signature.s()[..]));
}
}
impl rlp::Decodable for EIP1559Transaction {
fn decode(rlp: &Rlp) -> Result<Self, DecoderError> {
if rlp.item_count()? != 12 {
return Err(DecoderError::RlpIncorrectListLen);
}
Ok(Self {
chain_id: rlp.val_at(0)?,
nonce: rlp.val_at(1)?,
max_priority_fee_per_gas: rlp.val_at(2)?,
max_fee_per_gas: rlp.val_at(3)?,
gas_limit: rlp.val_at(4)?,
action: rlp.val_at(5)?,
value: rlp.val_at(6)?,
input: rlp.val_at(7)?,
access_list: rlp.list_at(8)?,
signature: {
let odd_y_parity = rlp.val_at(9)?;
let r = H256::from(rlp.val_at::<U256>(10)?.to_big_endian());
let s = H256::from(rlp.val_at::<U256>(11)?.to_big_endian());
TransactionSignature::new(odd_y_parity, r, s)
.ok_or(DecoderError::Custom("Invalid transaction signature format"))?
},
})
}
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct EIP1559TransactionMessage {
pub chain_id: u64,
pub nonce: U256,
pub max_priority_fee_per_gas: U256,
pub max_fee_per_gas: U256,
pub gas_limit: U256,
pub action: TransactionAction,
pub value: U256,
pub input: Bytes,
pub access_list: AccessList,
}
impl EIP1559TransactionMessage {
pub fn hash(&self) -> H256 {
let encoded = rlp::encode(self);
let mut out = alloc::vec![0; 1 + encoded.len()];
out[0] = 2;
out[1..].copy_from_slice(&encoded);
H256::from_slice(Keccak256::digest(&out).as_ref())
}
}
impl rlp::Encodable for EIP1559TransactionMessage {
fn rlp_append(&self, s: &mut RlpStream) {
s.begin_list(9);
s.append(&self.chain_id);
s.append(&self.nonce);
s.append(&self.max_priority_fee_per_gas);
s.append(&self.max_fee_per_gas);
s.append(&self.gas_limit);
s.append(&self.action);
s.append(&self.value);
s.append(&self.input);
s.append_list(&self.access_list);
}
}
impl From<EIP1559Transaction> for EIP1559TransactionMessage {
fn from(t: EIP1559Transaction) -> Self {
t.to_message()
}
}
| rust | Apache-2.0 | d7bdf2888253a30f160d434688e378636e253870 | 2026-01-04T20:22:49.747224Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/src/node.rs | src/node.rs | use alloc::{string::String, vec::Vec};
use core::{
cmp::Ordering,
fmt::{self, Write},
ops::Range,
};
use smallvec::SmallVec;
use crate::Kind;
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Key {
String(Vec<u8>),
Parameter(Kind),
}
#[derive(Clone)]
pub struct Node<T> {
pub key: Key,
pub value: Option<T>,
/// Stores string node
pub nodes0: Option<Vec<Self>>,
/// Stores parameter node
pub nodes1: Option<Vec<Self>>,
}
impl<T: fmt::Debug> Node<T> {
pub fn new(key: Key, value: Option<T>) -> Self {
Self {
key,
value,
nodes0: None,
nodes1: None,
}
}
pub fn insert_bytes(&mut self, mut bytes: &[u8]) -> &mut Self {
let diff = match &mut self.key {
Key::String(s) => {
if s.is_empty() {
*s = bytes.to_vec();
return self;
}
let cursor = s
.iter()
.zip(bytes.iter())
.take_while(|(a, b)| a == b)
.count();
if cursor == 0 {
true
} else {
// split node
if cursor < s.len() {
let (prefix, suffix) = s.split_at(cursor);
let mut node = Node::new(Key::String(prefix.to_vec()), None);
*s = suffix.to_vec();
::core::mem::swap(self, &mut node);
self.nodes0.get_or_insert_with(Vec::new).push(node);
}
if cursor == bytes.len() {
false
} else {
bytes = &bytes[cursor..];
true
}
}
}
Key::Parameter(_) => true,
};
// insert node
if diff {
let nodes = self.nodes0.get_or_insert_with(Vec::new);
return match nodes.binary_search_by(|node| match &node.key {
Key::String(s) => {
// s[0].cmp(&bytes[0])
// opt!
// lets `/` at end
compare(s[0], bytes[0])
}
Key::Parameter(_) => unreachable!(),
}) {
Ok(i) => nodes[i].insert_bytes(bytes),
Err(i) => {
nodes.insert(i, Node::new(Key::String(bytes.to_vec()), None));
&mut nodes[i]
}
};
}
self
}
pub fn insert_parameter(&mut self, kind: Kind) -> &mut Self {
let nodes = self.nodes1.get_or_insert_with(Vec::new);
let i = nodes
.binary_search_by(|node| match node.key {
Key::Parameter(pk) => pk.cmp(&kind),
Key::String(_) => unreachable!(),
})
.unwrap_or_else(|i| {
nodes.insert(i, Node::new(Key::Parameter(kind), None));
i
});
&mut nodes[i]
}
#[allow(clippy::range_plus_one)]
#[allow(clippy::too_many_lines)]
#[inline]
fn find_with(
&self,
mut start: usize,
mut bytes: &[u8],
ranges: &mut SmallVec<[Range<usize>; 8]>,
) -> Option<&T> {
let mut m = bytes.len();
match &self.key {
Key::String(s) => {
let n = s.len();
let mut flag = m >= n;
// opt!
if flag {
if n == 1 {
flag = s[0] == bytes[0];
} else {
flag = s == &bytes[..n];
}
}
// starts with prefix
if flag {
m -= n;
start += n;
bytes = &bytes[n..];
if m == 0 {
if let Some(id) = &self.value {
return Some(id);
}
} else {
// static
if let Some(id) = self.nodes0.as_ref().and_then(|nodes| {
nodes
.binary_search_by(|node| match &node.key {
Key::String(s) => {
// s[0].cmp(&bytes[0])
// opt!
// lets `/` at end
compare(s[0], bytes[0])
}
Key::Parameter(_) => unreachable!(),
})
.ok()
.and_then(|i| nodes[i].find_with(start, bytes, ranges))
}) {
return Some(id);
}
}
// parameter
if let Some(id) = self.nodes1.as_ref().and_then(|nodes| {
let b = m > 0;
nodes
.iter()
.filter(|node| match node.key {
Key::Parameter(pk)
if pk == Kind::Normal || pk == Kind::OneOrMore =>
{
b
}
_ => true,
})
.find_map(|node| node.find_with(start, bytes, ranges))
}) {
return Some(id);
}
} else if n == 1 && s[0] == b'/' {
if let Some(id) = self.nodes1.as_ref().and_then(|nodes| {
nodes
.iter()
.filter(|node| {
matches!(node.key,
Key::Parameter(pk)
if pk == Kind::OptionalSegment
|| pk == Kind::ZeroOrMoreSegment
)
})
.find_map(|node| node.find_with(start, bytes, ranges))
}) {
return Some(id);
}
}
}
Key::Parameter(k) => match k {
Kind::Normal | Kind::Optional | Kind::OptionalSegment => {
if m == 0 {
if k == &Kind::Normal {
return None;
}
// last
if self.nodes0.is_none() && self.nodes1.is_none() {
return self.value.as_ref().inspect(|_| {
ranges.push(start..start);
});
}
} else {
// if it's normal, parameter should not be empty
if k == &Kind::Normal && bytes[0] == b'/' {
return None;
}
// static
if let Some(id) = self.nodes0.as_ref().and_then(|nodes| {
nodes.iter().find_map(|node| match &node.key {
Key::String(s) => {
let mut keep_running = true;
bytes
.iter()
// as it turns out doing .copied() here is much slower than dereferencing in the closure
// https://godbolt.org/z/7dnW91T1Y
.take_while(|b| {
if keep_running && **b == b'/' {
keep_running = false;
true
} else {
keep_running
}
})
.enumerate()
.filter_map(|(n, b)| (s[0] == *b).then_some(n))
.find_map(|n| {
node.find_with(start + n, &bytes[n..], ranges).inspect(
|_| {
ranges.push(start..start + n);
},
)
})
}
Key::Parameter(_) => unreachable!(),
})
}) {
return Some(id);
}
// parameter => `:a:b:c`
if let Some(id) = self.nodes1.as_ref().and_then(|nodes| {
let b = m - 1 > 0;
nodes
.iter()
.filter(|node| match node.key {
Key::Parameter(pk)
if pk == Kind::Normal || pk == Kind::OneOrMore =>
{
b
}
_ => true,
})
.find_map(|node| node.find_with(start + 1, &bytes[1..], ranges))
}) {
ranges.push(start..start + 1);
return Some(id);
}
}
// parameter => `:a:b?:c?`
if k == &Kind::Optional || k == &Kind::OptionalSegment {
if let Some(id) = self.nodes1.as_ref().and_then(|nodes| {
let b = m > 0;
nodes
.iter()
.filter(|node| match &node.key {
Key::Parameter(pk)
if pk == &Kind::Normal || pk == &Kind::OneOrMore =>
{
b
}
_ => true,
})
.find_map(|node| node.find_with(start, bytes, ranges))
}) {
// param should be empty
ranges.push(start + m..start + m);
return Some(id);
}
}
if let Some(n) = bytes.iter().position(|b| *b == b'/') {
bytes = &bytes[n..];
} else {
if let Some(id) = &self.value {
ranges.push(start..start + m);
return Some(id);
}
bytes = &bytes[m..];
}
if k == &Kind::OptionalSegment {
if let Some(id) = self.nodes0.as_ref().and_then(|nodes| {
nodes
.last()
.filter(|node| match &node.key {
Key::String(s) => s[0] == b'/',
Key::Parameter(_) => unreachable!(),
})
.and_then(|node| node.find_with(start, bytes, ranges))
}) {
ranges.push(start..start + m);
return Some(id);
}
}
}
Kind::OneOrMore | Kind::ZeroOrMore | Kind::ZeroOrMoreSegment => {
let is_one_or_more = k == &Kind::OneOrMore;
if m == 0 {
if is_one_or_more {
return None;
}
if self.nodes0.is_none() && self.nodes1.is_none() {
return self.value.as_ref().inspect(|_| {
ranges.push(start..start);
});
}
} else {
if self.nodes0.is_none() && self.nodes1.is_none() {
if let Some(id) = &self.value {
ranges.push(start..start + m);
return Some(id);
}
}
// static
if let Some(id) = self.nodes0.as_ref().and_then(|nodes| {
nodes.iter().find_map(|node| {
if let Key::String(s) = &node.key {
let is_slash = is_one_or_more && s.len() == 1 && s[0] == b'/';
if is_slash {
let r = bytes
.iter()
.enumerate()
.filter_map(|(n, b)| (s[0] == *b).then_some(n))
.find_map(|n| {
node.nodes0.as_ref().and_then(|nodes| {
nodes.iter().find_map(|node| {
node.find_with(
start + n + 1,
&bytes[n + 1..],
ranges,
)
.inspect(|_| {
ranges.push(start..start + n);
})
})
})
});
if r.is_some() {
return r;
}
}
let has_right_length = if is_one_or_more {
m > s.len()
} else {
m >= s.len()
};
if has_right_length {
return bytes
.iter()
.enumerate()
.filter_map(|(n, b)| (s[0] == *b).then_some(n))
.find_map(|n| {
node.find_with(start + n, &bytes[n..], ranges)
.inspect(|_| {
ranges.push(start..start + n);
})
});
}
}
None
})
}) {
return Some(id);
}
}
if k == &Kind::ZeroOrMoreSegment {
if let Some(id) = self.nodes0.as_ref().and_then(|nodes| {
nodes
.last()
.filter(|node| match &node.key {
Key::String(s) => s[0] == b'/',
Key::Parameter(_) => unreachable!(),
})
.and_then(|node| node.find_with(start, bytes, ranges))
}) {
// param should be empty
ranges.push(start + m..start + m);
return Some(id);
}
}
}
},
}
None
}
pub fn find(&self, bytes: &[u8]) -> Option<(&T, SmallVec<[Range<usize>; 8]>)> {
let mut ranges = SmallVec::<[Range<usize>; 8]>::new_const(); // opt!
self.find_with(0, bytes, &mut ranges).map(|t| (t, ranges))
}
}
impl<T: fmt::Debug> fmt::Debug for Node<T> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
const EDGE: &str = "βββ";
const LINE: &str = "β ";
const CORNER: &str = "βββ";
const BLANK: &str = " ";
fn print_nodes<T: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
nodes: &[Node<T>],
check: bool,
pad: &str,
space: &str,
) -> fmt::Result {
for (index, node) in nodes.iter().enumerate() {
let (left, right) = if check && index == nodes.len() - 1 {
(BLANK, CORNER)
} else {
(LINE, EDGE)
};
f.write_str(pad)?;
f.write_str(space)?;
f.write_str(right)?;
let mut s = String::new();
s.push_str(pad);
s.push_str(space);
s.push_str(left);
print_tree(f, node, false, &s)?;
}
Ok(())
}
fn print_tree<T: fmt::Debug>(
f: &mut fmt::Formatter<'_>,
node: &Node<T>,
root: bool,
pad: &str,
) -> fmt::Result {
let space = if root {
f.write_char('\n')?;
""
} else {
f.write_char(' ')?;
" "
};
match &node.key {
Key::String(path) => {
f.write_str(
&String::from_utf8_lossy(path)
.replace(':', "\\:")
.replace('?', "\\?")
.replace('+', "\\+"),
)?;
}
Key::Parameter(kind) => {
let c = match kind {
Kind::Normal => ':',
Kind::Optional => '?',
Kind::OptionalSegment => {
f.write_char('?')?;
'?'
}
Kind::OneOrMore => '+',
Kind::ZeroOrMore => '*',
Kind::ZeroOrMoreSegment => {
f.write_char('*')?;
'*'
}
};
f.write_char(c)?;
}
}
if let Some(value) = &node.value {
f.write_str(" β’")?;
value.fmt(f)?;
}
f.write_char('\n')?;
// nodes0
if let Some(nodes) = &node.nodes0 {
print_nodes(f, nodes, node.nodes1.is_none(), pad, space)?;
}
// nodes1
if let Some(nodes) = &node.nodes1 {
print_nodes(f, nodes, true, pad, space)?;
}
Ok(())
}
print_tree(f, self, true, "")
}
}
const KINDS: [u8; 5] = [b'/', b':', b'?', b'+', b'*'];
#[inline]
fn find_index(c: u8) -> Option<usize> {
KINDS.iter().position(|e| *e == c)
}
#[inline]
fn compare(a: u8, b: u8) -> Ordering {
if a == b {
return Ordering::Equal;
}
match (find_index(a), find_index(b)) {
(Some(m), Some(n)) => m.cmp(&n),
(Some(_), None) => Ordering::Greater,
(None, Some(_)) => Ordering::Less,
(None, None) => a.cmp(&b),
}
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/src/lib.rs | src/lib.rs | //! path-tree is a lightweight high performance HTTP request router for Rust.
//!
//! # Example
//!
//! ```
//! use path_tree::PathTree;
//!
//! /*
//! / β’0
//! βββ api/
//! β βββ + β’13
//! βββ login β’1
//! βββ public/
//! β βββ ** β’7
//! βββ s
//! β βββ ettings β’3
//! β β βββ /
//! β β βββ : β’4
//! β βββ ignup β’2
//! βββ : β’5
//! βββ /
//! βββ : β’6
//! βββ /
//! βββ actions/
//! β βββ :
//! β βββ \:
//! β βββ : β’10
//! βββ releases/download/
//! β βββ :
//! β βββ /
//! β βββ :
//! β βββ .
//! β βββ : β’8
//! βββ tags/
//! β βββ :
//! β βββ -
//! β βββ :
//! β βββ -
//! β βββ : β’9
//! βββ : β’11
//! βββ ** β’12
//! */
//! let mut tree = PathTree::new();
//!
//! tree.insert("/", 0);
//! tree.insert("/login", 1);
//! tree.insert("/signup", 2);
//! tree.insert("/settings", 3);
//! tree.insert("/settings/:page", 4);
//! tree.insert("/:user", 5);
//! tree.insert("/:user/:repo", 6);
//! tree.insert("/public/:any*", 7);
//! tree.insert("/:org/:repo/releases/download/:tag/:filename.:ext", 8);
//! tree.insert("/:org/:repo/tags/:day-:month-:year", 9);
//! tree.insert("/:org/:repo/actions/:name\\::verb", 10);
//! tree.insert("/:org/:repo/:page", 11);
//! tree.insert("/:org/:repo/*", 12);
//! tree.insert("/api/+", 13);
//!
//! let (h, p) = tree.find("/").unwrap();
//! assert_eq!(h, &0);
//! assert_eq!(p.params(), vec![]);
//!
//! let (h, p) = tree.find("/login").unwrap();
//! assert_eq!(h, &1);
//! assert_eq!(p.params(), vec![]);
//!
//! let (h, p) = tree.find("/settings/admin").unwrap();
//! assert_eq!(h, &4);
//! assert_eq!(p.params(), vec![("page", "admin")]);
//!
//! let (h, p) = tree.find("/viz-rs").unwrap();
//! assert_eq!(h, &5);
//! assert_eq!(p.params(), vec![("user", "viz-rs")]);
//!
//! let (h, p) = tree.find("/viz-rs/path-tree").unwrap();
//! assert_eq!(h, &6);
//! assert_eq!(p.params(), vec![("user", "viz-rs"), ("repo", "path-tree")]);
//!
//! let (h, p) = tree.find("/rust-lang/rust-analyzer/releases/download/2022-09-12/rust-analyzer-aarch64-apple-darwin.gz").unwrap();
//! assert_eq!(h, &8);
//! assert_eq!(
//! p.params(),
//! vec![
//! ("org", "rust-lang"),
//! ("repo", "rust-analyzer"),
//! ("tag", "2022-09-12"),
//! ("filename", "rust-analyzer-aarch64-apple-darwin"),
//! ("ext", "gz")
//! ]
//! );
//!
//! let (h, p) = tree.find("/rust-lang/rust-analyzer/tags/2022-09-12").unwrap();
//! assert_eq!(h, &9);
//! assert_eq!(
//! p.params(),
//! vec![
//! ("org", "rust-lang"),
//! ("repo", "rust-analyzer"),
//! ("day", "2022"),
//! ("month", "09"),
//! ("year", "12")
//! ]
//! );
//!
//! let (h, p) = tree.find("/rust-lang/rust-analyzer/actions/ci:bench").unwrap();
//! assert_eq!(h, &10);
//! assert_eq!(
//! p.params(),
//! vec![
//! ("org", "rust-lang"),
//! ("repo", "rust-analyzer"),
//! ("name", "ci"),
//! ("verb", "bench"),
//! ]
//! );
//!
//! let (h, p) = tree.find("/rust-lang/rust-analyzer/stargazers").unwrap();
//! assert_eq!(h, &11);
//! assert_eq!(p.params(), vec![("org", "rust-lang"), ("repo", "rust-analyzer"), ("page", "stargazers")]);
//!
//! let (h, p) = tree.find("/rust-lang/rust-analyzer/stargazers/404").unwrap();
//! assert_eq!(h, &12);
//! assert_eq!(p.params(), vec![("org", "rust-lang"), ("repo", "rust-analyzer"), ("*1", "stargazers/404")]);
//!
//! let (h, p) = tree.find("/public/js/main.js").unwrap();
//! assert_eq!(h, &7);
//! assert_eq!(p.params(), vec![("any", "js/main.js")]);
//!
//! let (h, p) = tree.find("/api/v1").unwrap();
//! assert_eq!(h, &13);
//! assert_eq!(p.params(), vec![("+1", "v1")]);
//! ```
#![no_std]
#![forbid(unsafe_code)]
#![warn(rust_2018_idioms, unreachable_pub)]
extern crate alloc;
use alloc::{
string::{String, ToString},
vec::Vec,
};
use core::{slice::Iter, str::from_utf8};
use smallvec::SmallVec;
mod node;
pub use node::{Key, Node};
mod parser;
pub use parser::{Kind, Parser, Piece, Position};
/// A path tree.
#[derive(Clone, Debug)]
pub struct PathTree<T> {
id: usize,
routes: Vec<(T, Vec<Piece>)>,
pub node: Node<usize>,
}
impl<T> Default for PathTree<T> {
fn default() -> Self {
Self::new()
}
}
impl<T> PathTree<T> {
/// Creates a new [`PathTree`].
#[must_use]
pub fn new() -> Self {
Self {
id: 0,
routes: Vec::new(),
node: Node::new(Key::String(Vec::new()), None),
}
}
/// Inserts a part path-value to the tree and returns the id.
#[must_use]
pub fn insert(&mut self, path: &str, value: T) -> usize {
let mut node = &mut self.node;
let (overwritten, pieces) = if path.is_empty() {
(false, Vec::new())
} else {
let pieces = Parser::new(path).collect::<Vec<_>>();
node = pieces.iter().fold(node, |node, piece| match piece {
Piece::String(s) => node.insert_bytes(&s[..]),
Piece::Parameter(_, k) => node.insert_parameter(*k),
});
(true, pieces)
};
if let Some(id) = node.value {
self.routes[id].0 = value;
if overwritten {
self.routes[id].1 = pieces;
}
id
} else {
self.routes.push((value, pieces));
let id = self.id;
node.value = Some(id);
self.id += 1;
id
}
}
/// Returns the [`Path`] by the given path.
#[must_use]
pub fn find<'a, 'b>(&'a self, path: &'b str) -> Option<(&'a T, Path<'a, 'b>)> {
let bytes = path.as_bytes();
self.node.find(bytes).and_then(|(id, ranges)| {
self.routes.get(*id).map(|(value, pieces)| {
(
value,
Path {
id,
pieces,
// opt!
raws: ranges
.into_iter()
.filter_map(|r| from_utf8(&bytes[r]).ok())
.rev()
.collect(),
},
)
})
})
}
/// Gets the route by id.
#[must_use]
#[inline]
pub fn get_route(&self, index: usize) -> Option<&(T, Vec<Piece>)> {
self.routes.get(index)
}
/// Generates URL with the params.
#[must_use]
pub fn url_for(&self, index: usize, params: &[&str]) -> Option<String> {
self.get_route(index).and_then(|(_, pieces)| {
let mut bytes = Vec::new();
let mut iter = params.iter();
for piece in pieces {
match piece {
Piece::String(s) => {
bytes.extend_from_slice(s);
}
Piece::Parameter(_, _) => {
if let Some(s) = iter.next() {
bytes.extend_from_slice(s.as_bytes());
}
}
}
}
from_utf8(&bytes).map(ToString::to_string).ok()
})
}
pub fn iter(&self) -> Iter<'_, (T, Vec<Piece>)> {
self.routes.iter()
}
}
impl<'a, T> IntoIterator for &'a PathTree<T> {
type Item = &'a (T, Vec<Piece>);
type IntoIter = Iter<'a, (T, Vec<Piece>)>;
fn into_iter(self) -> Self::IntoIter {
self.iter()
}
}
/// Matched route path infomation.
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Path<'a, 'b> {
pub id: &'a usize,
pub pieces: &'a [Piece],
pub raws: SmallVec<[&'b str; 4]>,
}
impl Path<'_, '_> {
/// Gets current path pattern.
///
/// # Panics
///
/// Will panic if bytes to string conversion fails.
pub fn pattern(&self) -> String {
let mut bytes = Vec::new();
self.pieces.iter().for_each(|piece| match piece {
Piece::String(s) => {
if s == b":" || s == b"+" || s == b"?" {
bytes.push(b'\\');
}
bytes.extend_from_slice(s);
}
Piece::Parameter(p, k) => match p {
Position::Index(_, _) => {
if *k == Kind::OneOrMore {
bytes.push(b'+');
} else if *k == Kind::ZeroOrMore || *k == Kind::ZeroOrMoreSegment {
bytes.push(b'*');
}
}
Position::Named(n) => match k {
Kind::Normal | Kind::Optional | Kind::OptionalSegment => {
bytes.push(b':');
bytes.extend_from_slice(n);
if *k == Kind::Optional || *k == Kind::OptionalSegment {
bytes.push(b'?');
}
}
Kind::OneOrMore => {
bytes.push(b'+');
bytes.extend_from_slice(n);
}
Kind::ZeroOrMore | Kind::ZeroOrMoreSegment => {
bytes.push(b'*');
bytes.extend_from_slice(n);
}
},
},
});
from_utf8(&bytes)
.map(ToString::to_string)
.expect("pattern generated failure")
}
/// Returns the parameters of the current path.
#[must_use]
pub fn params(&self) -> Vec<(&str, &str)> {
self.params_iter().collect()
}
/// Returns the parameters iterator of the current path.
pub fn params_iter(&self) -> impl Iterator<Item = (&str, &str)> {
#[inline]
fn piece_filter(piece: &Piece) -> Option<&str> {
match piece {
Piece::String(_) => None,
Piece::Parameter(p, _) => from_utf8(match p {
Position::Index(_, n) | Position::Named(n) => n,
})
.ok(),
}
}
self.pieces
.iter()
.filter_map(piece_filter)
.zip(self.raws.iter().copied())
}
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/src/parser.rs | src/parser.rs | use alloc::{string::ToString, vec::Vec};
use core::{iter::Peekable, str::CharIndices};
/// Types of path segments.
///
/// Represents the matching pattern for parameters in URL paths.
/// Different kinds determine how parameters match URL path segments.
#[derive(Clone, Copy, Debug, Eq, Ord, PartialEq, PartialOrd)]
pub enum Kind {
/// `:` 58
/// `:name`
Normal,
/// `?` 63
/// Optional: `:name?-`
Optional,
/// Optional segment: `/:name?/` or `/:name?`
OptionalSegment,
// Optional,
/// `+` 43
OneOrMore,
/// `*` 42
/// Zero or more: `*-`
ZeroOrMore,
/// Zero or more segment: `/*/` or `/*`
ZeroOrMoreSegment,
// TODO: regexp
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Piece {
String(Vec<u8>),
Parameter(Position, Kind),
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum Position {
Index(usize, Vec<u8>),
Named(Vec<u8>),
}
pub struct Parser<'a> {
pos: usize,
count: usize,
input: &'a str,
cursor: Peekable<CharIndices<'a>>,
}
impl<'a> Parser<'a> {
#[must_use]
pub fn new(input: &'a str) -> Self {
Self {
input,
pos: 0,
count: 0,
cursor: input.char_indices().peekable(),
}
}
fn string(&mut self) -> &'a [u8] {
let mut start = self.pos;
while let Some(&(i, c)) = self.cursor.peek() {
match c {
'\\' => {
if start < i {
self.pos = i;
return &self.input.as_bytes()[start..i];
}
self.cursor.next();
if let Some(&(j, c)) = self.cursor.peek() {
// removes `\`
if c == '\\' {
start = j;
} else {
self.cursor.next();
self.pos = j + c.len_utf8();
return &self.input.as_bytes()[j..self.pos];
}
}
}
':' | '+' | '*' => {
self.pos = i + 1;
return &self.input.as_bytes()[start..i];
}
_ => {
self.cursor.next();
}
}
}
&self.input.as_bytes()[start..]
}
fn parameter(&mut self) -> (Position, Kind) {
let start = self.pos;
while let Some(&(i, c)) = self.cursor.peek() {
match c {
'-' | '.' | '~' | '/' | '\\' | ':' => {
self.pos = i;
return (
Position::Named(self.input.as_bytes()[start..i].to_vec()),
Kind::Normal,
);
}
'?' | '+' | '*' => {
self.cursor.next();
self.pos = i + 1;
return (
Position::Named(self.input.as_bytes()[start..i].to_vec()),
if c == '+' {
Kind::OneOrMore
} else {
let f = {
let prefix = start >= 2
&& (self.input.get(start - 2..start - 1) == Some("/"));
let suffix = self.cursor.peek().is_none_or(|(_, c)| *c == '/');
prefix && suffix
};
if c == '?' {
if f {
Kind::OptionalSegment
} else {
Kind::Optional
}
} else if f {
Kind::ZeroOrMoreSegment
} else {
Kind::ZeroOrMore
}
},
);
}
_ => {
self.cursor.next();
}
}
}
(
Position::Named(self.input.as_bytes()[start..].to_vec()),
Kind::Normal,
)
}
}
impl Iterator for Parser<'_> {
type Item = Piece;
fn next(&mut self) -> Option<Self::Item> {
match self.cursor.peek() {
Some(&(i, c)) => match c {
':' => {
self.cursor.next();
self.pos = i + 1;
let (position, kind) = self.parameter();
Some(Piece::Parameter(position, kind))
}
'+' | '*' => {
self.cursor.next();
self.count += 1;
self.pos = i + 1;
Some(Piece::Parameter(
Position::Index(self.count, {
let mut s = Vec::new();
s.push(c as u8);
s.extend_from_slice(self.count.to_string().as_bytes());
s
}),
if c == '+' {
Kind::OneOrMore
} else {
let f = {
let prefix = i >= 1 && (self.input.get(i - 1..i) == Some("/"));
let suffix = self.cursor.peek().is_none_or(|(_, c)| *c == '/');
prefix && suffix
};
if f {
Kind::ZeroOrMoreSegment
} else {
Kind::ZeroOrMore
}
},
))
}
_ => Some(Piece::String(self.string().to_vec())),
},
None => None,
}
}
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/tests/issues.rs | tests/issues.rs | use path_tree::PathTree;
#[test]
fn test_44() {
let mut tree = PathTree::new();
let _ = tree.insert("/test/:me", 0);
let _ = tree.insert("/test/:me?", 1);
let _ = tree.insert("/test/:me/now", 2);
let _ = tree.insert("/test/:me?/now", 3);
let _ = tree.insert("/test/:this+/now", 4);
let _ = tree.insert("/test/:this+/*", 5);
let _ = tree.insert("/test/:this+/now/*", 6);
let (value, path) = tree.find("/test/").unwrap();
assert_eq!(value, &1);
assert_eq!(path.params(), &[("me", "")]);
let (value, path) = tree.find("/test/now").unwrap();
assert_eq!(value, &0);
assert_eq!(path.params(), &[("me", "now")]);
// not found
let result = tree.find("/test//");
assert!(result.is_none());
let (value, path) = tree.find("/test//now").unwrap();
assert_eq!(value, &3);
assert_eq!(path.params(), &[("me", "")]);
let (value, path) = tree.find(r"/test/\/now").unwrap();
assert_eq!(value, &2);
assert_eq!(path.params(), &[("me", r"\")]);
// trim `/`
let trimmed = "/test//now"
.split('/')
.filter(|s| !s.is_empty())
.collect::<Vec<_>>();
let mut path = trimmed.join("/");
// add leading `/`
path.insert(0, '/');
assert_eq!(path, "/test/now");
let (value, path) = tree.find(&path).unwrap();
assert_eq!(value, &0);
assert_eq!(path.params(), &[("me", "now")]);
let (value, path) = tree.find("/test/multiple/paths/now").unwrap();
assert_eq!(value, &4);
assert_eq!(path.params(), &[("this", "multiple/paths")]);
let (value, path) = tree.find("/test/multiple/paths/noww").unwrap();
assert_eq!(value, &5);
assert_eq!(path.params(), &[("this", "multiple"), ("*1", "paths/noww")]);
let (value, path) = tree.find("/test/multiple/paths/now/12h").unwrap();
assert_eq!(value, &6);
assert_eq!(path.params(), &[("this", "multiple/paths"), ("*1", "12h")]);
let (value, path) = tree.find("/test/multiple/paths/today/12h").unwrap();
assert_eq!(value, &5);
assert_eq!(
path.params(),
&[("this", "multiple"), ("*1", "paths/today/12h")]
);
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/tests/node.rs | tests/node.rs | #![allow(clippy::too_many_lines)]
use path_tree::{Key, Kind, Node};
#[test]
fn github_nodes() {
let mut node = Node::<usize>::new(Key::String(b"/".to_vec()), None);
let mut n = node.insert_bytes(b"/");
n = n.insert_parameter(Kind::Normal);
n = n.insert_bytes(b"/");
n.insert_parameter(Kind::Normal);
node.insert_bytes(b"/api");
node.insert_bytes(b"/about");
node.insert_bytes(b"/login");
node.insert_bytes(b"/signup");
node.insert_bytes(b"/pricing");
node.insert_bytes(b"/features");
node.insert_bytes(b"/features/actions");
node.insert_bytes(b"/features/packages");
node.insert_bytes(b"/features/security");
node.insert_bytes(b"/features/codespaces");
node.insert_bytes(b"/features/copilot");
node.insert_bytes(b"/features/code-review");
node.insert_bytes(b"/features/issues");
node.insert_bytes(b"/features/discussions");
node.insert_bytes(b"/enterprise");
node.insert_bytes(b"/team");
node.insert_bytes(b"/customer-stories");
node.insert_bytes(b"/sponsors");
node.insert_bytes(b"/readme");
node.insert_bytes(b"/topics");
node.insert_bytes(b"/trending");
node.insert_bytes(b"/collections");
node.insert_bytes(b"/search");
node.insert_bytes(b"/pulls");
node.insert_bytes(b"/issues");
node.insert_bytes(b"/marketplace");
node.insert_bytes(b"/explore");
node.insert_bytes(b"/sponsors/explore");
node.insert_bytes(b"/sponsors/accounts");
let n = node.insert_bytes(b"/sponsors/");
n.insert_parameter(Kind::Normal);
node.insert_bytes(b"/about/careers");
node.insert_bytes(b"/about/press");
node.insert_bytes(b"/about/diversity");
node.insert_bytes(b"/settings");
node.insert_bytes(b"/settings/admin");
node.insert_bytes(b"/settings/appearance");
node.insert_bytes(b"/settings/accessibility");
node.insert_bytes(b"/settings/notifications");
node.insert_bytes(b"/settings/billing");
node.insert_bytes(b"/settings/billing/plans");
node.insert_bytes(b"/settings/security");
node.insert_bytes(b"/settings/keys");
node.insert_bytes(b"/settings/organizations");
node.insert_bytes(b"/settings/blocked_users");
node.insert_bytes(b"/settings/interaction_limits");
node.insert_bytes(b"/settings/code_review_limits");
node.insert_bytes(b"/settings/repositories");
node.insert_bytes(b"/settings/codespaces");
node.insert_bytes(b"/settings/deleted_packages");
node.insert_bytes(b"/settings/copilot");
node.insert_bytes(b"/settings/pages");
node.insert_bytes(b"/settings/replies");
node.insert_bytes(b"/settings/security_analysis");
node.insert_bytes(b"/settings/installations");
node.insert_bytes(b"/settings/reminders");
node.insert_bytes(b"/settings/security-log");
node.insert_bytes(b"/settings/sponsors-log");
node.insert_bytes(b"/settings/apps");
node.insert_bytes(b"/settings/developers");
node.insert_bytes(b"/settings/tokens");
node.insert_bytes(b"/404");
node.insert_bytes(b"/500");
node.insert_bytes(b"/503");
assert_eq!(
format!("{node:?}"),
r"
/
βββ 404
βββ 50
β βββ 0
β βββ 3
βββ a
β βββ bout
β β βββ /
β β βββ careers
β β βββ diversity
β β βββ press
β βββ pi
βββ c
β βββ ollections
β βββ ustomer-stories
βββ e
β βββ nterprise
β βββ xplore
βββ features
β βββ /
β βββ actions
β βββ co
β β βββ de
β β β βββ -review
β β β βββ spaces
β β βββ pilot
β βββ discussions
β βββ issues
β βββ packages
β βββ security
βββ issues
βββ login
βββ marketplace
βββ p
β βββ ricing
β βββ ulls
βββ readme
βββ s
β βββ e
β β βββ arch
β β βββ ttings
β β βββ /
β β βββ a
β β β βββ ccessibility
β β β βββ dmin
β β β βββ pp
β β β βββ earance
β β β βββ s
β β βββ b
β β β βββ illing
β β β β βββ /plans
β β β βββ locked_users
β β βββ co
β β β βββ de
β β β β βββ _review_limits
β β β β βββ spaces
β β β βββ pilot
β β βββ de
β β β βββ leted_packages
β β β βββ velopers
β β βββ in
β β β βββ stallations
β β β βββ teraction_limits
β β βββ keys
β β βββ notifications
β β βββ organizations
β β βββ pages
β β βββ re
β β β βββ minders
β β β βββ p
β β β βββ lies
β β β βββ ositories
β β βββ s
β β β βββ ecurity
β β β β βββ -log
β β β β βββ _analysis
β β β βββ ponsors-log
β β βββ tokens
β βββ ignup
β βββ ponsors
β βββ /
β βββ accounts
β βββ explore
β βββ :
βββ t
β βββ eam
β βββ opics
β βββ rending
βββ :
βββ /
βββ :
"
);
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/tests/parser.rs | tests/parser.rs | use path_tree::{Kind, Parser, Piece, Position};
#[test]
fn parses() {
assert_eq!(
Parser::new(r"/shop/product/\::filter/color\::color/size\::size").collect::<Vec<_>>(),
[
Piece::String(b"/shop/product/".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"filter".to_vec()), Kind::Normal),
Piece::String(b"/color".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"color".to_vec()), Kind::Normal),
Piece::String(b"/size".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"size".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/api/v1/:param/abc/*").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/abc/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMoreSegment),
],
);
assert_eq!(
Parser::new("/api/v1/:param/+").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Index(1, b"+1".to_vec()), Kind::OneOrMore),
],
);
assert_eq!(
Parser::new("/api/v1/:param?").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::OptionalSegment),
],
);
assert_eq!(
Parser::new("/api/v1/:param?").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::OptionalSegment),
],
);
assert_eq!(
Parser::new("/api/v1/:param").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/api/v1/*").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMoreSegment),
],
);
assert_eq!(
Parser::new("/api/v1/:param-:param2").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"-".to_vec()),
Piece::Parameter(Position::Named(b"param2".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/api/v1/:filename.:extension").collect::<Vec<_>>(),
[
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"filename".to_vec()), Kind::Normal),
Piece::String(b".".to_vec()),
Piece::Parameter(Position::Named(b"extension".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/api/v1/string").collect::<Vec<_>>(),
[Piece::String(b"/api/v1/string".to_vec()),],
);
assert_eq!(
Parser::new(r"/\::param?").collect::<Vec<_>>(),
[
Piece::String(b"/".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Optional),
],
);
assert_eq!(
Parser::new("/:param1:param2?:param3").collect::<Vec<_>>(),
[
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"param1".to_vec()), Kind::Normal),
Piece::Parameter(Position::Named(b"param2".to_vec()), Kind::Optional),
Piece::Parameter(Position::Named(b"param3".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/test:sign:param").collect::<Vec<_>>(),
[
Piece::String(b"/test".to_vec()),
Piece::Parameter(Position::Named(b"sign".to_vec()), Kind::Normal),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
],
);
assert_eq!(
Parser::new("/foo:param?bar").collect::<Vec<_>>(),
[
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Optional),
Piece::String(b"bar".to_vec()),
],
);
assert_eq!(
Parser::new("/foo*bar").collect::<Vec<_>>(),
[
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b"bar".to_vec()),
],
);
assert_eq!(
Parser::new("/foo+bar").collect::<Vec<_>>(),
[
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Index(1, b"+1".to_vec()), Kind::OneOrMore),
Piece::String(b"bar".to_vec()),
],
);
assert_eq!(
Parser::new("/a*cde*g/").collect::<Vec<_>>(),
[
Piece::String(b"/a".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b"cde".to_vec()),
Piece::Parameter(Position::Index(2, b"*2".to_vec()), Kind::ZeroOrMore),
Piece::String(b"g/".to_vec()),
],
);
assert_eq!(
Parser::new(r"/name\::name").collect::<Vec<_>>(),
[
Piece::String(b"/name".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/@:name").collect::<Vec<_>>(),
[
Piece::String(b"/@".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/-:name").collect::<Vec<_>>(),
[
Piece::String(b"/-".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/.:name").collect::<Vec<_>>(),
[
Piece::String(b"/.".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/_:name").collect::<Vec<_>>(),
[
Piece::String(b"/_".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/~:name").collect::<Vec<_>>(),
[
Piece::String(b"/~".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
]
);
assert_eq!(
Parser::new("/v1/some/resource/name\\:customVerb").collect::<Vec<_>>(),
[
Piece::String(b"/v1/some/resource/name".to_vec()),
Piece::String(b":".to_vec()),
Piece::String(b"customVerb".to_vec()),
],
);
assert_eq!(
Parser::new("/v1/some/resource/:name\\:customVerb").collect::<Vec<_>>(),
[
Piece::String(b"/v1/some/resource/".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
Piece::String(b":".to_vec()),
Piece::String(b"customVerb".to_vec()),
],
);
assert_eq!(
Parser::new("/v1/some/resource/name\\:customVerb??/:param/*").collect::<Vec<_>>(),
[
Piece::String(b"/v1/some/resource/name".to_vec()),
Piece::String(b":".to_vec()),
Piece::String(b"customVerb??/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMoreSegment)
],
);
assert_eq!(
Parser::new("/api/*/:param/:param2").collect::<Vec<_>>(),
[
Piece::String(b"/api/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMoreSegment),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"param2".to_vec()), Kind::Normal)
],
);
assert_eq!(
Parser::new("/test:optional?:optional2?").collect::<Vec<_>>(),
[
Piece::String(b"/test".to_vec()),
Piece::Parameter(Position::Named(b"optional".to_vec()), Kind::Optional),
Piece::Parameter(Position::Named(b"optional2".to_vec()), Kind::Optional)
],
);
assert_eq!(
Parser::new("/config/+.json").collect::<Vec<_>>(),
[
Piece::String(b"/config/".to_vec()),
Piece::Parameter(Position::Index(1, b"+1".to_vec()), Kind::OneOrMore),
Piece::String(b".json".to_vec()),
]
);
assert_eq!(
Parser::new("/config/*.json").collect::<Vec<_>>(),
[
Piece::String(b"/config/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b".json".to_vec()),
]
);
assert_eq!(
Parser::new("/api/:day.:month?.:year?").collect::<Vec<_>>(),
[
Piece::String(b"/api/".to_vec()),
Piece::Parameter(Position::Named(b"day".to_vec()), Kind::Normal),
Piece::String(b".".to_vec()),
Piece::Parameter(Position::Named(b"month".to_vec()), Kind::Optional),
Piece::String(b".".to_vec()),
Piece::Parameter(Position::Named(b"year".to_vec()), Kind::Optional),
]
);
assert_eq!(
Parser::new("/api/:day/:month?/:year?").collect::<Vec<_>>(),
[
Piece::String(b"/api/".to_vec()),
Piece::Parameter(Position::Named(b"day".to_vec()), Kind::Normal),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"month".to_vec()), Kind::OptionalSegment),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"year".to_vec()), Kind::OptionalSegment),
]
);
assert_eq!(
Parser::new("/*v1*/proxy").collect::<Vec<_>>(),
[
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b"v1".to_vec()),
Piece::Parameter(Position::Index(2, b"*2".to_vec()), Kind::ZeroOrMore),
Piece::String(b"/proxy".to_vec()),
]
);
assert_eq!(
Parser::new("/:a*v1:b+/proxy").collect::<Vec<_>>(),
[
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"a".to_vec()), Kind::ZeroOrMore),
Piece::String(b"v1".to_vec()),
Piece::Parameter(Position::Named(b"b".to_vec()), Kind::OneOrMore),
Piece::String(b"/proxy".to_vec()),
]
);
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/tests/tree.rs | tests/tree.rs | #![allow(unused_must_use)]
#![allow(clippy::too_many_lines)]
use path_tree::{Kind, PathTree, Piece, Position};
use rand::seq::SliceRandom;
#[test]
fn statics() {
const ROUTES: [&str; 12] = [
"/",
"/hi",
"/contact",
"/co",
"/c",
"/a",
"/ab",
"/doc/",
"/doc/go_faq.html",
"/doc/go1.html",
"/Ξ±",
"/Ξ²",
];
let mut routes = ROUTES.to_vec();
routes.shuffle(&mut rand::rng());
let mut tree = PathTree::<usize>::new();
for (i, u) in routes.iter().enumerate() {
tree.insert(u, i);
}
for (i, u) in routes.iter().enumerate() {
let (h, _) = tree.find(u).unwrap();
assert_eq!(h, &i);
}
}
#[test]
fn wildcards() {
const ROUTES: [&str; 20] = [
"/",
"/cmd/:tool/:sub",
"/cmd/:tool/",
"/cmd/vet",
"/src/:filepath*",
"/src1/",
"/src1/:filepath*",
"/src2:filepath*",
"/search/",
"/search/:query",
"/search/invalid",
"/user_:name",
"/user_:name/about",
"/user_x",
"/files/:dir/:filepath*",
"/doc/",
"/doc/rust_faq.html",
"/doc/rust1.html",
"/info/:user/public",
"/info/:user/project/:project",
];
let mut routes = (0..20).zip(ROUTES.iter()).collect::<Vec<_>>();
routes.shuffle(&mut rand::rng());
let mut tree = PathTree::<usize>::new();
for (i, u) in &routes {
tree.insert(u, *i);
}
let valid_res = vec![
("/", 0, vec![]),
("/cmd/test/", 2, vec![("tool", "test")]),
("/cmd/test/3", 1, vec![("tool", "test"), ("sub", "3")]),
("/src/", 4, vec![("filepath", "")]),
("/src/some/file.png", 4, vec![("filepath", "some/file.png")]),
(
"/search/someth!ng+in+ünìcodé",
9,
vec![("query", "someth!ng+in+ünìcodé")],
),
("/user_rust", 11, vec![("name", "rust")]),
("/user_rust/about", 12, vec![("name", "rust")]),
(
"/files/js/inc/framework.js",
14,
vec![("dir", "js"), ("filepath", "inc/framework.js")],
),
("/info/gordon/public", 18, vec![("user", "gordon")]),
(
"/info/gordon/project/rust",
19,
vec![("user", "gordon"), ("project", "rust")],
),
];
for (u, v, p) in valid_res {
let (h, r) = tree.find(u).unwrap();
assert_eq!(*h, v);
assert_eq!(r.params(), p);
}
}
#[test]
fn single_named_parameter() {
// Pattern: /users/:id
//
// /users/gordon match
// /users/you match
// /users/gordon/profile no match
// /users/ no match
let mut tree = PathTree::new();
tree.insert("/users/:id", 0);
let res = vec![
("/", false),
("/users/gordon", true),
("/users/you", true),
("/users/gordon/profile", false),
("/users/", false),
("/users", false),
];
for (u, b) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
}
}
#[test]
fn repeated_single_named_param() {
let mut tree = PathTree::new();
tree.insert("/users/:id", 0);
tree.insert("/users/:user_id", 1);
let (h, r) = tree.find("/users/gordon").unwrap();
assert_eq!(*h, 1);
assert_eq!(r.params(), vec![("user_id", "gordon")]);
}
#[test]
fn static_and_named_parameter() {
// Pattern: /a/b/c
// Pattern: /a/c/d
// Pattern: /a/c/a
// Pattern: /:id/c/e
//
// /a/b/c match
// /a/c/d match
// /a/c/a match
// /a/c/e match
let mut tree = PathTree::new();
tree.insert("/a/b/c", "/a/b/c");
tree.insert("/a/c/d", "/a/c/d");
tree.insert("/a/c/a", "/a/c/a");
tree.insert("/:id/c/e", "/:id/c/e");
let res = vec![
("/", false, "", vec![]),
("/a/b/c", true, "/a/b/c", vec![]),
("/a/c/d", true, "/a/c/d", vec![]),
("/a/c/a", true, "/a/c/a", vec![]),
("/a/c/e", true, "/:id/c/e", vec![("id", "a")]),
];
for (u, b, a, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!(*h, a);
assert_eq!(r.params(), p);
}
}
}
#[test]
fn multi_named_parameters() {
// Pattern: /:lang/:keyword
// Pattern: /:id
//
// /rust match
// /rust/let match
// /rust/let/const no match
// /rust/let/ no match
// /rust/ no match
// / no match
let mut tree = PathTree::new();
tree.insert("/:lang/:keyword", true);
tree.insert("/:id", true);
let res = vec![
("/", false, false, vec![]),
("/rust/", false, false, vec![]),
("/rust/let/", false, false, vec![]),
("/rust/let/const", false, false, vec![]),
(
"/rust/let",
true,
true,
vec![("lang", "rust"), ("keyword", "let")],
),
("/rust", true, true, vec![("id", "rust")]),
];
for (u, b, a, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!(*h, a);
assert_eq!(r.params(), p);
}
}
}
#[test]
fn catch_all_parameter() {
// Pattern: /src/*filepath
//
// /src no match
// /src/ match
// /src/somefile.go match
// /src/subdir/somefile.go match
let mut tree = PathTree::new();
tree.insert("/src/:filepath*", "* files");
let res = vec![
("/src", false, vec![]),
("/src/", true, vec![("filepath", "")]),
("/src/somefile.rs", true, vec![("filepath", "somefile.rs")]),
(
"/src/subdir/somefile.rs",
true,
vec![("filepath", "subdir/somefile.rs")],
),
("/src.rs", false, vec![]),
("/rust", false, vec![]),
];
for (u, b, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!(*h, "* files");
assert_eq!(r.params(), p);
}
}
tree.insert("/src/", "dir");
let n = tree.find("/src/");
assert!(n.is_some());
if let Some((h, r)) = n {
assert_eq!(*h, "dir");
assert_eq!(r.params(), vec![]);
}
}
#[test]
fn catch_all_parameter_with_prefix() {
// Pattern: /commit_*sha
//
// /commit no match
// /commit_ match
// /commit_/ match
// /commit_/foo match
// /commit_123 match
// /commit_123/ match
// /commit_123/foo match
let mut tree = PathTree::new();
tree.insert("/commit_:sha*", "* sha");
tree.insert("/commit/:sha", "hex");
tree.insert("/commit/:sha0/compare/:sha1", "compare");
tree.insert("/src/", "dir");
let n = tree.find("/src/");
assert!(n.is_some());
if let Some((h, r)) = n {
assert_eq!(*h, "dir");
assert_eq!(r.params(), vec![]);
}
let n = tree.find("/commit/123");
assert!(n.is_some());
if let Some((h, r)) = n {
assert_eq!(*h, "hex");
assert_eq!(r.params(), vec![("sha", "123")]);
}
let n = tree.find("/commit/123/compare/321");
assert!(n.is_some());
if let Some((h, r)) = n {
assert_eq!(*h, "compare");
assert_eq!(r.params(), vec![("sha0", "123"), ("sha1", "321")]);
}
let res = vec![
("/commit", false, vec![]),
("/commit_", true, vec![("sha", "")]),
("/commit_/", true, vec![("sha", "/")]),
("/commit_/foo", true, vec![("sha", "/foo")]),
("/commit123", false, vec![]),
("/commit_123", true, vec![("sha", "123")]),
("/commit_123/", true, vec![("sha", "123/")]),
("/commit_123/foo", true, vec![("sha", "123/foo")]),
];
for (u, b, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!(*h, "* sha");
assert_eq!(r.params(), p);
}
}
}
#[test]
fn static_and_catch_all_parameter() {
// Pattern: /a/b/c
// Pattern: /a/c/d
// Pattern: /a/c/a
// Pattern: /a/*c
//
// /a/b/c match
// /a/c/d match
// /a/c/a match
// /a/c/e match
let mut tree = PathTree::new();
tree.insert("/a/b/c", "/a/b/c");
tree.insert("/a/c/d", "/a/c/d");
tree.insert("/a/c/a", "/a/c/a");
tree.insert("/a/*", "/a/*c");
let res = vec![
("/", false, "", vec![]),
("/a/b/c", true, "/a/b/c", vec![]),
("/a/c/d", true, "/a/c/d", vec![]),
("/a/c/a", true, "/a/c/a", vec![]),
("/a/c/e", true, "/a/*c", vec![("*1", "c/e")]),
];
for (u, b, a, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!(*h, a);
assert_eq!(r.params(), p);
}
}
}
#[test]
fn root_catch_all_parameter() {
// Pattern: /
// Pattern: /*
// Pattern: /users/*
//
// / match *
// /download match *
// /users/jordan match users *
let mut tree = PathTree::<fn() -> usize>::new();
tree.insert("/", || 1);
tree.insert("/*", || 2);
tree.insert("/users/*", || 3);
let res = vec![
("/", true, 1, vec![]),
("/download", true, 2, vec![("*1", "download")]),
("/users/jordan", true, 3, vec![("*1", "jordan")]),
];
for (u, b, a, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!((h)(), a);
assert_eq!(r.params(), p);
}
}
}
#[test]
fn root_catch_all_parameter_1() {
// Pattern: /*
//
// / match *
// /download match *
// /users/jordan match *
let mut tree = PathTree::<fn() -> usize>::new();
tree.insert("/*", || 1);
let res = vec![
("/", true, 1, vec![("*1", "")]),
("/download", true, 1, vec![("*1", "download")]),
("/users/jordan", true, 1, vec![("*1", "users/jordan")]),
];
for (u, b, a, p) in res {
let n = tree.find(u);
assert_eq!(n.is_some(), b);
if let Some((h, r)) = n {
assert_eq!((h)(), a);
assert_eq!(r.params(), p);
}
}
tree.insert("/", || 0);
let n = tree.find("/");
assert!(n.is_some());
if let Some((h, r)) = n {
assert_eq!((h)(), 0);
assert_eq!(r.params(), vec![]);
}
}
#[test]
fn test_named_routes_with_non_ascii_paths() {
let mut tree = PathTree::<usize>::new();
tree.insert("/", 0);
tree.insert("/*", 1);
tree.insert("/matchme/:slug/", 2);
// ASCII only (single-byte characters)
let n = tree.find("/matchme/abc-s-def/");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 2);
assert_eq!(r.params(), vec![("slug", "abc-s-def")]);
// with multibyte character
let n = tree.find("/matchme/abc-Γ-def/");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 2);
assert_eq!(r.params(), vec![("slug", "abc-Γ-def")]);
// with emoji (fancy multibyte character)
let n = tree.find("/matchme/abc-β-def/");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 2);
assert_eq!(r.params(), vec![("slug", "abc-β-def")]);
// with multibyte character right before the slash (char boundary check)
let n = tree.find("/matchme/abc-def-Γ/");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 2);
assert_eq!(r.params(), vec![("slug", "abc-def-Γ")]);
}
#[test]
fn test_named_wildcard_collide() {
let mut tree = PathTree::<usize>::new();
tree.insert("/git/:org/:repo", 1);
tree.insert("/git/*", 2);
let n = tree.find("/git/rust-lang/rust");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 1);
assert_eq!(r.params(), vec![("org", "rust-lang"), ("repo", "rust")]);
let n = tree.find("/git/rust-lang");
assert!(n.is_some());
let (h, r) = n.unwrap();
assert_eq!(*h, 2);
assert_eq!(r.params(), vec![("*1", "rust-lang")]);
}
#[test]
fn match_params() {
// /
// βββ api/v1/
// βββ :
// βββ /
// βββ ** β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/:param/*", 1);
assert_eq!(tree.find("/api/v1/entity"), None);
let (h, p) = tree.find("/api/v1/entity/").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("*1", "")]);
assert_eq!(p.pattern(), "/api/v1/:param/*");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMoreSegment),
]
);
let (h, p) = tree.find("/api/v1/entity/1").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("*1", "1")]);
assert_eq!(tree.find("/api/v"), None);
assert_eq!(tree.find("/api/v2"), None);
assert_eq!(tree.find("/api/v1/"), None);
let (h, p) = tree.find("/api/v1/entity/1/foo/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("*1", "1/foo/bar")]);
// /
// βββ api/v1/
// βββ :
// βββ /
// βββ + β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/:param/+", 1);
assert_eq!(tree.find("/api/v1/entity"), None);
assert_eq!(tree.find("/api/v1/entity/"), None);
let (h, p) = tree.find("/api/v1/entity/1").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("+1", "1")]);
assert_eq!(tree.find("/api/v"), None);
assert_eq!(tree.find("/api/v2"), None);
assert_eq!(tree.find("/api/v1/"), None);
let (h, p) = tree.find("/api/v1/entity/1/foo/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("+1", "1/foo/bar")]);
// /
// βββ api/v1/
// βββ ?? β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/:param?", 1);
let (h, p) = tree.find("/api/v1/").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "")]);
assert_eq!(p.pattern(), "/api/v1/:param?");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/api/v1/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::OptionalSegment),
]
);
assert_eq!(tree.find("/api/v1/entity/1/foo/bar"), None);
assert_eq!(tree.find("/api/v"), None);
assert_eq!(tree.find("/api/v2"), None);
assert_eq!(tree.find("/api/xyz"), None);
// /
// βββ v1/some/resource/name
// βββ \:
// βββ customVerb β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/v1/some/resource/name\\:customVerb", 1);
let (h, p) = tree.find("/v1/some/resource/name:customVerb").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![]);
assert_eq!(p.pattern(), "/v1/some/resource/name\\:customVerb");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/v1/some/resource/name".to_vec()),
Piece::String(b":".to_vec()),
Piece::String(b"customVerb".to_vec()),
]
);
assert_eq!(tree.find("/v1/some/resource/name:test"), None);
// /
// βββ v1/some/resource/
// βββ :
// βββ \:
// βββ customVerb β’0
let mut tree = PathTree::<usize>::new();
tree.insert(r"/v1/some/resource/:name\:customVerb", 1);
let (h, p) = tree.find("/v1/some/resource/test:customVerb").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("name", "test")]);
assert_eq!(
p.pieces,
vec![
Piece::String(b"/v1/some/resource/".to_vec()),
Piece::Parameter(Position::Named(b"name".to_vec()), Kind::Normal),
Piece::String(b":".to_vec()),
Piece::String(b"customVerb".to_vec()),
]
);
assert_eq!(tree.find("/v1/some/resource/test:test"), None);
// /
// βββ v1/some/resource/name
// βββ \:
// βββ customVerb\?
// βββ \?
// βββ /
// βββ :
// βββ /
// βββ ** β’0
let mut tree = PathTree::<usize>::new();
tree.insert(r"/v1/some/resource/name\\\\:customVerb?\?/:param/*", 1);
let (h, p) = tree
.find("/v1/some/resource/name:customVerb??/test/optionalWildCard/character")
.unwrap();
assert_eq!(*h, 1);
assert_eq!(
p.params(),
vec![("param", "test"), ("*1", "optionalWildCard/character")]
);
let (h, p) = tree
.find("/v1/some/resource/name:customVerb??/test/")
.unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "test"), ("*1", "")]);
assert_eq!(tree.find("/v1/some/resource/name:customVerb??/test"), None);
// /
// βββ api/v1/
// βββ ** β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/*", 1);
assert_eq!(tree.find("/api/v1"), None);
let (h, p) = tree.find("/api/v1/").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "")]);
let (h, p) = tree.find("/api/v1/entity").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "entity")]);
let (h, p) = tree.find("/api/v1/entity/1/2").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "entity/1/2")]);
let (h, p) = tree.find("/api/v1/Entity/1/2").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "Entity/1/2")]);
// /
// βββ api/v1/
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/:param", 1);
assert_eq!(tree.find("/api/v1"), None);
assert_eq!(tree.find("/api/v1/"), None);
let (h, p) = tree.find("/api/v1/entity").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity")]);
assert_eq!(tree.find("/api/v1/entity/1/2"), None);
assert_eq!(tree.find("/api/v1/Entity/1/2"), None);
// /
// βββ api/v1/
// βββ :
// βββ -
// β βββ : β’1
// βββ .
// β βββ : β’3
// βββ \:
// β βββ : β’5
// βββ _
// β βββ : β’4
// βββ ~
// β βββ : β’2
// βββ /
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/:param/:param2", 3);
tree.insert("/api/v1/:param-:param2", 1);
tree.insert("/api/v1/:param~:param2", 2);
tree.insert("/api/v1/:param.:param2", 4);
tree.insert("/api/v1/:param\\_:param2", 5);
tree.insert("/api/v1/:param\\::param2", 6);
let (h, p) = tree.find("/api/v1/entity-entity2").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
let (h, p) = tree.find("/api/v1/entity~entity2").unwrap();
assert_eq!(*h, 2);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
let (h, p) = tree.find("/api/v1/entity.entity2").unwrap();
assert_eq!(*h, 4);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
let (h, p) = tree.find("/api/v1/entity_entity2").unwrap();
assert_eq!(*h, 5);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
let (h, p) = tree.find("/api/v1/entity:entity2").unwrap();
assert_eq!(*h, 6);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
let (h, p) = tree.find("/api/v1/entity/entity2").unwrap();
assert_eq!(*h, 3);
assert_eq!(p.params(), vec![("param", "entity"), ("param2", "entity2")]);
assert_eq!(tree.find("/api/v1"), None);
assert_eq!(tree.find("/api/v1/"), None);
let (h, p) = tree.find("/api/v1/test.pdf").unwrap();
assert_eq!(*h, 4);
assert_eq!(p.params(), vec![("param", "test"), ("param2", "pdf")]);
// /
// βββ api/v1/const β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/v1/const", 1);
let (h, p) = tree.find("/api/v1/const").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert!(p.params().is_empty());
assert_eq!(p.pattern(), "/api/v1/const");
assert_eq!(p.pieces, vec![Piece::String(b"/api/v1/const".to_vec())]);
assert_eq!(tree.find("/api/v1/cons"), None);
assert_eq!(tree.find("/api/v1/conststatic"), None);
assert_eq!(tree.find("/api/v1/let"), None);
assert_eq!(tree.find("/api/v1/"), None);
assert_eq!(tree.find("/api/v1"), None);
// /
// βββ api/
// βββ :
// βββ /fixedEnd β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/api/:param/fixedEnd", 1);
let (h, p) = tree.find("/api/abc/fixedEnd").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/api/".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
Piece::String(b"/fixedEnd".to_vec()),
]
);
assert_eq!(p.params(), vec![("param", "abc")]);
assert_eq!(p.pattern(), "/api/:param/fixedEnd");
assert_eq!(tree.find("/api/abc/def/fixedEnd"), None);
// /
// βββ shop/product/
// βββ \:
// βββ :
// βββ /color
// βββ \:
// βββ :
// βββ /size
// βββ \:
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert(r"/shop/product/\::filter/color\::color/size\::size", 1);
let (h, p) = tree.find("/shop/product/:test/color:blue/size:xs").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/shop/product/".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"filter".to_vec()), Kind::Normal),
Piece::String(b"/color".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"color".to_vec()), Kind::Normal),
Piece::String(b"/size".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"size".to_vec()), Kind::Normal),
]
);
assert_eq!(
p.pattern(),
r"/shop/product/\::filter/color\::color/size\::size"
);
assert_eq!(
p.params(),
vec![("filter", "test"), ("color", "blue"), ("size", "xs")]
);
assert_eq!(tree.find("/shop/product/test/color:blue/size:xs"), None);
// /
// βββ \:
// βββ ? β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/\\::param?", 1);
let (h, p) = tree.find("/:hello").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "hello")]);
assert_eq!(p.pattern(), "/\\::param?");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/".to_vec()),
Piece::String(b":".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Optional),
]
);
let (h, p) = tree.find("/:").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "")]);
assert_eq!(tree.find("/"), None);
// /
// βββ test
// βββ :
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/test:sign:param", 1);
let (h, p) = tree.find("/test-abc").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("sign", "-"), ("param", "abc")]);
assert_eq!(p.pattern(), "/test:sign:param");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/test".to_vec()),
Piece::Parameter(Position::Named(b"sign".to_vec()), Kind::Normal),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Normal),
]
);
let (h, p) = tree.find("/test-_").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("sign", "-"), ("param", "_")]);
assert_eq!(tree.find("/test-"), None);
assert_eq!(tree.find("/test"), None);
// /
// βββ :
// βββ ?
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/:param1:param2?:param3", 1);
let (h, p) = tree.find("/abbbc").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(
p.params(),
vec![("param1", "a"), ("param2", "b"), ("param3", "bbc")]
);
assert_eq!(p.pattern(), "/:param1:param2?:param3");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/".to_vec()),
Piece::Parameter(Position::Named(b"param1".to_vec()), Kind::Normal),
Piece::Parameter(Position::Named(b"param2".to_vec()), Kind::Optional),
Piece::Parameter(Position::Named(b"param3".to_vec()), Kind::Normal),
]
);
let (h, p) = tree.find("/ab").unwrap();
assert_eq!(*h, 1);
assert_eq!(
p.params(),
vec![("param1", "a"), ("param2", ""), ("param3", "b")]
);
assert_eq!(tree.find("/a"), None);
// /
// βββ test
// βββ ?
// βββ : β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/test:optional?:mandatory", 1);
let (h, p) = tree.find("/testo").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("optional", ""), ("mandatory", "o")]);
assert_eq!(p.pattern(), "/test:optional?:mandatory");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/test".to_vec()),
Piece::Parameter(Position::Named(b"optional".to_vec()), Kind::Optional),
Piece::Parameter(Position::Named(b"mandatory".to_vec()), Kind::Normal),
]
);
let (h, p) = tree.find("/testoaaa").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("optional", "o"), ("mandatory", "aaa")]);
assert_eq!(tree.find("/test"), None);
assert_eq!(tree.find("/tes"), None);
// /
// βββ test
// βββ ?
// βββ ? β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/test:optional?:optional2?", 1);
let (h, p) = tree.find("/testo").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("optional", "o"), ("optional2", "")]);
assert_eq!(p.pattern(), "/test:optional?:optional2?");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/test".to_vec()),
Piece::Parameter(Position::Named(b"optional".to_vec()), Kind::Optional),
Piece::Parameter(Position::Named(b"optional2".to_vec()), Kind::Optional),
]
);
let (h, p) = tree.find("/testoaaa").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("optional", "o"), ("optional2", "aaa")]);
let (h, p) = tree.find("/test").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("optional", ""), ("optional2", "")]);
assert_eq!(tree.find("/tes"), None);
// /
// βββ foo
// βββ ?
// βββ bar β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/foo:param?bar", 1);
let (h, p) = tree.find("/foofalsebar").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "false")]);
assert_eq!(p.pattern(), "/foo:param?bar");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Named(b"param".to_vec()), Kind::Optional),
Piece::String(b"bar".to_vec()),
]
);
let (h, p) = tree.find("/foobar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("param", "")]);
assert_eq!(tree.find("/fooba"), None);
assert_eq!(tree.find("/foo"), None);
// /
// βββ foo
// βββ *
// βββ bar β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/foo*bar", 1);
let (h, p) = tree.find("/foofalsebar").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "false")]);
assert_eq!(p.pattern(), "/foo*bar");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b"bar".to_vec()),
]
);
let (h, p) = tree.find("/foobar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "")]);
let (h, p) = tree.find("/foo/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "/")]);
let (h, p) = tree.find("/foo/baz/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("*1", "/baz/")]);
assert_eq!(tree.find("/fooba"), None);
assert_eq!(tree.find("/foo"), None);
// /
// βββ foo
// βββ +
// βββ bar β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/foo+bar", 1);
let (h, p) = tree.find("/foofalsebar").unwrap();
assert_eq!(*p.id, 0);
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("+1", "false")]);
assert_eq!(p.pattern(), "/foo+bar");
assert_eq!(
p.pieces,
&vec![
Piece::String(b"/foo".to_vec()),
Piece::Parameter(Position::Index(1, b"+1".to_vec()), Kind::OneOrMore),
Piece::String(b"bar".to_vec()),
]
);
assert_eq!(tree.find("/foobar"), None);
let (h, p) = tree.find("/foo/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("+1", "/")]);
let (h, p) = tree.find("/foo/baz/bar").unwrap();
assert_eq!(*h, 1);
assert_eq!(p.params(), vec![("+1", "/baz/")]);
assert_eq!(tree.find("/fooba"), None);
assert_eq!(tree.find("/foo"), None);
// /
// βββ a
// βββ *
// βββ cde
// βββ *
// βββ g/ β’0
let mut tree = PathTree::<usize>::new();
tree.insert("/a*cde*g/", 1);
assert_eq!(tree.find("/abbbcdefffg"), None);
let (h, p) = tree.find("/abbbcdefffg/").unwrap();
assert_eq!(h, &1);
assert_eq!(
p.pieces,
vec![
Piece::String(b"/a".to_vec()),
Piece::Parameter(Position::Index(1, b"*1".to_vec()), Kind::ZeroOrMore),
Piece::String(b"cde".to_vec()),
Piece::Parameter(Position::Index(2, b"*2".to_vec()), Kind::ZeroOrMore),
Piece::String(b"g/".to_vec()),
]
);
assert_eq!(p.pattern(), "/a*cde*g/");
assert_eq!(p.params(), vec![("*1", "bbb"), ("*2", "fff")]);
let (_, p) = tree.find("/acdeg/").unwrap();
assert_eq!(p.params(), vec![("*1", ""), ("*2", "")]);
let (_, p) = tree.find("/abcdeg/").unwrap();
assert_eq!(p.params(), vec![("*1", "b"), ("*2", "")]);
let (_, p) = tree.find("/acdefg/").unwrap();
assert_eq!(p.params(), vec![("*1", ""), ("*2", "f")]);
let (_, p) = tree.find("/abcdefg/").unwrap();
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | true |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/tests/fixtures/github.rs | tests/fixtures/github.rs | pub const ROUTES_WITH_COLON: [&str; 315] = [
"/app",
"/app-manifests/:code/conversions",
"/app/installations",
"/app/installations/:installation_id",
"/app/installations/:installation_id/access_tokens",
"/applications/:client_id/grants/:access_token",
"/applications/:client_id/tokens/:access_token",
"/applications/grants",
"/applications/grants/:grant_id",
"/apps/:app_slug",
"/authorizations",
"/authorizations/:authorization_id",
"/authorizations/clients/:client_id",
"/authorizations/clients/:client_id/:fingerprint",
"/codes_of_conduct",
"/codes_of_conduct/:key",
"/content_references/:content_reference_id/attachments",
"/emojis",
"/events",
"/feeds",
"/gists",
"/gists/public",
"/gists/starred",
"/gists/:gist_id",
"/gists/:gist_id/comments",
"/gists/:gist_id/comments/:comment_id",
"/gists/:gist_id/commits",
"/gists/:gist_id/forks",
"/gists/:gist_id/star",
"/gists/:gist_id/:sha",
"/gitignore/templates",
"/gitignore/templates/:name",
"/installation/repositories",
"/issues",
"/legacy/issues/search/:owner/:repository/:state/:keyword",
"/legacy/repos/search/:keyword",
"/legacy/user/email/:email",
"/legacy/user/search/:keyword",
"/licenses",
"/licenses/:license",
"/markdown",
"/markdown/raw",
"/marketplace_listing/accounts/:account_id",
"/marketplace_listing/plans",
"/marketplace_listing/plans/:plan_id/accounts",
"/marketplace_listing/stubbed/accounts/:account_id",
"/marketplace_listing/stubbed/plans",
"/marketplace_listing/stubbed/plans/:plan_id/accounts",
"/meta",
"/networks/:owner/:repo/events",
"/notifications",
"/notifications/threads/:thread_id",
"/notifications/threads/:thread_id/subscription",
"/organizations",
"/orgs/:org",
"/orgs/:org/blocks",
"/orgs/:org/blocks/:username",
"/orgs/:org/events",
"/orgs/:org/hooks",
"/orgs/:org/hooks/:hook_id",
"/orgs/:org/hooks/:hook_id/pings",
"/orgs/:org/installation",
"/orgs/:org/interaction-limits",
"/orgs/:org/invitations",
"/orgs/:org/invitations/:invitation_id/teams",
"/orgs/:org/issues",
"/orgs/:org/members",
"/orgs/:org/members/:username",
"/orgs/:org/memberships/:username",
"/orgs/:org/migrations",
"/orgs/:org/migrations/:migration_id",
"/orgs/:org/migrations/:migration_id/archive",
"/orgs/:org/migrations/:migration_id/repos/:repo_name/lock",
"/orgs/:org/outside_collaborators",
"/orgs/:org/outside_collaborators/:username",
"/orgs/:org/projects",
"/orgs/:org/public_members",
"/orgs/:org/public_members/:username",
"/orgs/:org/repos",
"/orgs/:org/teams",
"/projects/:project_id",
"/projects/:project_id/collaborators",
"/projects/:project_id/collaborators/:username",
"/projects/:project_id/collaborators/:username/permission",
"/projects/:project_id/columns",
"/projects/columns/cards/:card_id",
"/projects/columns/cards/:card_id/moves",
"/projects/columns/:column_id",
"/projects/columns/:column_id/cards",
"/projects/columns/:column_id/moves",
"/rate_limit",
"/reactions/:reaction_id",
"/repos/:owner/:repo",
"/repos/:owner/:repo/assignees",
"/repos/:owner/:repo/assignees/:assignee",
"/repos/:owner/:repo/branches",
"/repos/:owner/:repo/branches/:branch",
"/repos/:owner/:repo/branches/:branch/protection",
"/repos/:owner/:repo/branches/:branch/protection/enforce_admins",
"/repos/:owner/:repo/branches/:branch/protection/required_pull_request_reviews",
"/repos/:owner/:repo/branches/:branch/protection/required_signatures",
"/repos/:owner/:repo/branches/:branch/protection/required_status_checks",
"/repos/:owner/:repo/branches/:branch/protection/required_status_checks/contexts",
"/repos/:owner/:repo/branches/:branch/protection/restrictions",
"/repos/:owner/:repo/branches/:branch/protection/restrictions/teams",
"/repos/:owner/:repo/branches/:branch/protection/restrictions/users",
"/repos/:owner/:repo/check-runs",
"/repos/:owner/:repo/check-runs/:check_run_id",
"/repos/:owner/:repo/check-runs/:check_run_id/annotations",
"/repos/:owner/:repo/check-suites",
"/repos/:owner/:repo/check-suites/preferences",
"/repos/:owner/:repo/check-suites/:check_suite_id",
"/repos/:owner/:repo/check-suites/:check_suite_id/check-runs",
"/repos/:owner/:repo/check-suites/:check_suite_id/rerequest",
"/repos/:owner/:repo/collaborators",
"/repos/:owner/:repo/collaborators/:username",
"/repos/:owner/:repo/collaborators/:username/permission",
"/repos/:owner/:repo/comments",
"/repos/:owner/:repo/comments/:comment_id",
"/repos/:owner/:repo/comments/:comment_id/reactions",
"/repos/:owner/:repo/commits",
"/repos/:owner/:repo/commits/:ref",
"/repos/:owner/:repo/commits/:ref/check-runs",
"/repos/:owner/:repo/commits/:ref/check-suites",
"/repos/:owner/:repo/commits/:ref/comments",
"/repos/:owner/:repo/commits/:ref/status",
"/repos/:owner/:repo/commits/:ref/statuses",
// "/repos/:owner/:repo/commits/:sha",
// "/repos/:owner/:repo/commits/:sha/comments",
"/repos/:owner/:repo/community/code_of_conduct",
"/repos/:owner/:repo/community/profile",
// "/repos/:owner/:repo/compare/:base...:head",
"/repos/:owner/:repo/contents/:path",
"/repos/:owner/:repo/contributors",
"/repos/:owner/:repo/deployments",
"/repos/:owner/:repo/deployments/:deployment_id",
"/repos/:owner/:repo/deployments/:deployment_id/statuses",
"/repos/:owner/:repo/deployments/:deployment_id/statuses/:status_id",
"/repos/:owner/:repo/downloads",
"/repos/:owner/:repo/downloads/:download_id",
"/repos/:owner/:repo/events",
"/repos/:owner/:repo/forks",
"/repos/:owner/:repo/git/blobs",
"/repos/:owner/:repo/git/blobs/:file_sha",
"/repos/:owner/:repo/git/commits",
"/repos/:owner/:repo/git/commits/:commit_sha",
"/repos/:owner/:repo/git/refs",
// "/repos/:owner/:repo/git/refs/:namespace",
"/repos/:owner/:repo/git/refs/:ref",
"/repos/:owner/:repo/git/tags",
"/repos/:owner/:repo/git/tags/:tag_sha",
"/repos/:owner/:repo/git/trees",
"/repos/:owner/:repo/git/trees/:tree_sha",
"/repos/:owner/:repo/hooks",
"/repos/:owner/:repo/hooks/:hook_id",
"/repos/:owner/:repo/hooks/:hook_id/pings",
"/repos/:owner/:repo/hooks/:hook_id/tests",
"/repos/:owner/:repo/import",
"/repos/:owner/:repo/import/authors",
"/repos/:owner/:repo/import/authors/:author_id",
"/repos/:owner/:repo/import/large_files",
"/repos/:owner/:repo/import/lfs",
"/repos/:owner/:repo/installation",
"/repos/:owner/:repo/interaction-limits",
"/repos/:owner/:repo/invitations",
"/repos/:owner/:repo/invitations/:invitation_id",
"/repos/:owner/:repo/issues",
"/repos/:owner/:repo/issues/comments",
"/repos/:owner/:repo/issues/comments/:comment_id",
"/repos/:owner/:repo/issues/comments/:comment_id/reactions",
"/repos/:owner/:repo/issues/events",
"/repos/:owner/:repo/issues/events/:event_id",
"/repos/:owner/:repo/issues/:number",
"/repos/:owner/:repo/issues/:number/assignees",
"/repos/:owner/:repo/issues/:number/comments",
"/repos/:owner/:repo/issues/:number/events",
"/repos/:owner/:repo/issues/:number/labels",
"/repos/:owner/:repo/issues/:number/labels/:name",
"/repos/:owner/:repo/issues/:number/lock",
"/repos/:owner/:repo/issues/:number/reactions",
"/repos/:owner/:repo/issues/:number/timeline",
"/repos/:owner/:repo/keys",
"/repos/:owner/:repo/keys/:key_id",
"/repos/:owner/:repo/labels",
// "/repos/:owner/:repo/labels/:current_name",
"/repos/:owner/:repo/labels/:name",
"/repos/:owner/:repo/languages",
"/repos/:owner/:repo/license",
"/repos/:owner/:repo/merges",
"/repos/:owner/:repo/milestones",
"/repos/:owner/:repo/milestones/:number",
"/repos/:owner/:repo/milestones/:number/labels",
"/repos/:owner/:repo/notifications",
"/repos/:owner/:repo/pages",
"/repos/:owner/:repo/pages/builds",
"/repos/:owner/:repo/pages/builds/latest",
"/repos/:owner/:repo/pages/builds/:build_id",
"/repos/:owner/:repo/projects",
"/repos/:owner/:repo/pulls",
"/repos/:owner/:repo/pulls/comments",
"/repos/:owner/:repo/pulls/comments/:comment_id",
"/repos/:owner/:repo/pulls/comments/:comment_id/reactions",
"/repos/:owner/:repo/pulls/:number",
"/repos/:owner/:repo/pulls/:number/comments",
"/repos/:owner/:repo/pulls/:number/commits",
"/repos/:owner/:repo/pulls/:number/files",
"/repos/:owner/:repo/pulls/:number/merge",
"/repos/:owner/:repo/pulls/:number/requested_reviewers",
"/repos/:owner/:repo/pulls/:number/reviews",
"/repos/:owner/:repo/pulls/:number/reviews/:review_id",
"/repos/:owner/:repo/pulls/:number/reviews/:review_id/comments",
"/repos/:owner/:repo/pulls/:number/reviews/:review_id/dismissals",
"/repos/:owner/:repo/pulls/:number/reviews/:review_id/events",
"/repos/:owner/:repo/readme",
"/repos/:owner/:repo/releases",
"/repos/:owner/:repo/releases/assets/:asset_id",
"/repos/:owner/:repo/releases/latest",
"/repos/:owner/:repo/releases/tags/:tag",
"/repos/:owner/:repo/releases/:release_id",
"/repos/:owner/:repo/releases/:release_id/assets",
"/repos/:owner/:repo/stargazers",
"/repos/:owner/:repo/stats/code_frequency",
"/repos/:owner/:repo/stats/commit_activity",
"/repos/:owner/:repo/stats/contributors",
"/repos/:owner/:repo/stats/participation",
"/repos/:owner/:repo/stats/punch_card",
"/repos/:owner/:repo/statuses/:sha",
"/repos/:owner/:repo/subscribers",
"/repos/:owner/:repo/subscription",
"/repos/:owner/:repo/tags",
"/repos/:owner/:repo/teams",
"/repos/:owner/:repo/topics",
"/repos/:owner/:repo/traffic/clones",
"/repos/:owner/:repo/traffic/popular/paths",
"/repos/:owner/:repo/traffic/popular/referrers",
"/repos/:owner/:repo/traffic/views",
"/repos/:owner/:repo/transfer",
"/repos/:owner/:repo/:archive_format/:ref",
"/repositories",
"/scim/v2/organizations/:org/Users",
"/scim/v2/organizations/:org/Users/:external_identity_guid",
"/search/code",
"/search/commits",
"/search/issues",
"/search/labels",
"/search/repositories",
"/search/topics",
"/search/users",
"/teams/:team_id",
"/teams/:team_id/discussions",
"/teams/:team_id/discussions/:discussion_number",
"/teams/:team_id/discussions/:discussion_number/comments",
"/teams/:team_id/discussions/:discussion_number/comments/:comment_number",
"/teams/:team_id/discussions/:discussion_number/comments/:comment_number/reactions",
"/teams/:team_id/discussions/:discussion_number/reactions",
"/teams/:team_id/invitations",
"/teams/:team_id/members",
"/teams/:team_id/members/:username",
"/teams/:team_id/memberships/:username",
"/teams/:team_id/projects",
"/teams/:team_id/projects/:project_id",
"/teams/:team_id/repos",
"/teams/:team_id/repos/:owner/:repo",
"/teams/:team_id/teams",
"/user",
"/user/blocks",
"/user/blocks/:username",
"/user/email/visibility",
"/user/emails",
"/user/followers",
"/user/following",
"/user/following/:username",
"/user/gpg_keys",
"/user/gpg_keys/:gpg_key_id",
"/user/installations",
"/user/installations/:installation_id/repositories",
"/user/installations/:installation_id/repositories/:repository_id",
"/user/issues",
"/user/keys",
"/user/keys/:key_id",
"/user/marketplace_purchases",
"/user/marketplace_purchases/stubbed",
"/user/memberships/orgs",
"/user/memberships/orgs/:org",
"/user/migrations",
"/user/migrations/:migration_id",
"/user/migrations/:migration_id/archive",
"/user/migrations/:migration_id/repos/:repo_name/lock",
"/user/orgs",
"/user/projects",
"/user/public_emails",
"/user/repos",
"/user/repository_invitations",
"/user/repository_invitations/:invitation_id",
"/user/starred",
"/user/starred/:owner/:repo",
"/user/subscriptions",
"/user/subscriptions/:owner/:repo",
"/user/teams",
"/users",
"/users/:username",
"/users/:username/events",
"/users/:username/events/orgs/:org",
"/users/:username/events/public",
"/users/:username/followers",
"/users/:username/following",
"/users/:username/following/:target_user",
"/users/:username/gists",
"/users/:username/gpg_keys",
"/users/:username/hovercard",
"/users/:username/installation",
"/users/:username/keys",
"/users/:username/orgs",
"/users/:username/projects",
"/users/:username/received_events",
"/users/:username/received_events/public",
"/users/:username/repos",
"/users/:username/starred",
"/users/:username/subscriptions",
"/:url",
];
pub const ROUTES_WITH_BRACES: [&str; 315] = [
"/app",
"/app-manifests/{code}/conversions",
"/app/installations",
"/app/installations/{installation_id}",
"/app/installations/{installation_id}/access_tokens",
"/applications/{client_id}/grants/{access_token}",
"/applications/{client_id}/tokens/{access_token}",
"/applications/grants",
"/applications/grants/{grant_id}",
"/apps/{app_slug}",
"/authorizations",
"/authorizations/{authorization_id}",
"/authorizations/clients/{client_id}",
"/authorizations/clients/{client_id}/{fingerprint}",
"/codes_of_conduct",
"/codes_of_conduct/{key}",
"/content_references/{content_reference_id}/attachments",
"/emojis",
"/events",
"/feeds",
"/gists",
"/gists/public",
"/gists/starred",
"/gists/{gist_id}",
"/gists/{gist_id}/comments",
"/gists/{gist_id}/comments/{comment_id}",
"/gists/{gist_id}/commits",
"/gists/{gist_id}/forks",
"/gists/{gist_id}/star",
"/gists/{gist_id}/{sha}",
"/gitignore/templates",
"/gitignore/templates/{name}",
"/installation/repositories",
"/issues",
"/legacy/issues/search/{owner}/{repository}/{state}/{keyword}",
"/legacy/repos/search/{keyword}",
"/legacy/user/email/{email}",
"/legacy/user/search/{keyword}",
"/licenses",
"/licenses/{license}",
"/markdown",
"/markdown/raw",
"/marketplace_listing/accounts/{account_id}",
"/marketplace_listing/plans",
"/marketplace_listing/plans/{plan_id}/accounts",
"/marketplace_listing/stubbed/accounts/{account_id}",
"/marketplace_listing/stubbed/plans",
"/marketplace_listing/stubbed/plans/{plan_id}/accounts",
"/meta",
"/networks/{owner}/{repo}/events",
"/notifications",
"/notifications/threads/{thread_id}",
"/notifications/threads/{thread_id}/subscription",
"/organizations",
"/orgs/{org}",
"/orgs/{org}/blocks",
"/orgs/{org}/blocks/{username}",
"/orgs/{org}/events",
"/orgs/{org}/hooks",
"/orgs/{org}/hooks/{hook_id}",
"/orgs/{org}/hooks/{hook_id}/pings",
"/orgs/{org}/installation",
"/orgs/{org}/interaction-limits",
"/orgs/{org}/invitations",
"/orgs/{org}/invitations/{invitation_id}/teams",
"/orgs/{org}/issues",
"/orgs/{org}/members",
"/orgs/{org}/members/{username}",
"/orgs/{org}/memberships/{username}",
"/orgs/{org}/migrations",
"/orgs/{org}/migrations/{migration_id}",
"/orgs/{org}/migrations/{migration_id}/archive",
"/orgs/{org}/migrations/{migration_id}/repos/{repo_name}/lock",
"/orgs/{org}/outside_collaborators",
"/orgs/{org}/outside_collaborators/{username}",
"/orgs/{org}/projects",
"/orgs/{org}/public_members",
"/orgs/{org}/public_members/{username}",
"/orgs/{org}/repos",
"/orgs/{org}/teams",
"/projects/{project_id}",
"/projects/{project_id}/collaborators",
"/projects/{project_id}/collaborators/{username}",
"/projects/{project_id}/collaborators/{username}/permission",
"/projects/{project_id}/columns",
"/projects/columns/cards/{card_id}",
"/projects/columns/cards/{card_id}/moves",
"/projects/columns/{column_id}",
"/projects/columns/{column_id}/cards",
"/projects/columns/{column_id}/moves",
"/rate_limit",
"/reactions/{reaction_id}",
"/repos/{owner}/{repo}",
"/repos/{owner}/{repo}/assignees",
"/repos/{owner}/{repo}/assignees/{assignee}",
"/repos/{owner}/{repo}/branches",
"/repos/{owner}/{repo}/branches/{branch}",
"/repos/{owner}/{repo}/branches/{branch}/protection",
"/repos/{owner}/{repo}/branches/{branch}/protection/enforce_admins",
"/repos/{owner}/{repo}/branches/{branch}/protection/required_pull_request_reviews",
"/repos/{owner}/{repo}/branches/{branch}/protection/required_signatures",
"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks",
"/repos/{owner}/{repo}/branches/{branch}/protection/required_status_checks/contexts",
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions",
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/teams",
"/repos/{owner}/{repo}/branches/{branch}/protection/restrictions/users",
"/repos/{owner}/{repo}/check-runs",
"/repos/{owner}/{repo}/check-runs/{check_run_id}",
"/repos/{owner}/{repo}/check-runs/{check_run_id}/annotations",
"/repos/{owner}/{repo}/check-suites",
"/repos/{owner}/{repo}/check-suites/preferences",
"/repos/{owner}/{repo}/check-suites/{check_suite_id}",
"/repos/{owner}/{repo}/check-suites/{check_suite_id}/check-runs",
"/repos/{owner}/{repo}/check-suites/{check_suite_id}/rerequest",
"/repos/{owner}/{repo}/collaborators",
"/repos/{owner}/{repo}/collaborators/{username}",
"/repos/{owner}/{repo}/collaborators/{username}/permission",
"/repos/{owner}/{repo}/comments",
"/repos/{owner}/{repo}/comments/{comment_id}",
"/repos/{owner}/{repo}/comments/{comment_id}/reactions",
"/repos/{owner}/{repo}/commits",
"/repos/{owner}/{repo}/commits/{ref}",
"/repos/{owner}/{repo}/commits/{ref}/check-runs",
"/repos/{owner}/{repo}/commits/{ref}/check-suites",
"/repos/{owner}/{repo}/commits/{ref}/comments",
"/repos/{owner}/{repo}/commits/{ref}/status",
"/repos/{owner}/{repo}/commits/{ref}/statuses",
// "/repos/{owner}/{repo}/commits/{sha}",
// "/repos/{owner}/{repo}/commits/{sha}/comments",
"/repos/{owner}/{repo}/community/code_of_conduct",
"/repos/{owner}/{repo}/community/profile",
// "/repos/{owner}/{repo}/compare/{base}...{head}",
"/repos/{owner}/{repo}/contents/{path}",
"/repos/{owner}/{repo}/contributors",
"/repos/{owner}/{repo}/deployments",
"/repos/{owner}/{repo}/deployments/{deployment_id}",
"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses",
"/repos/{owner}/{repo}/deployments/{deployment_id}/statuses/{status_id}",
"/repos/{owner}/{repo}/downloads",
"/repos/{owner}/{repo}/downloads/{download_id}",
"/repos/{owner}/{repo}/events",
"/repos/{owner}/{repo}/forks",
"/repos/{owner}/{repo}/git/blobs",
"/repos/{owner}/{repo}/git/blobs/{file_sha}",
"/repos/{owner}/{repo}/git/commits",
"/repos/{owner}/{repo}/git/commits/{commit_sha}",
"/repos/{owner}/{repo}/git/refs",
// "/repos/{owner}/{repo}/git/refs/{namespace}",
"/repos/{owner}/{repo}/git/refs/{ref}",
"/repos/{owner}/{repo}/git/tags",
"/repos/{owner}/{repo}/git/tags/{tag_sha}",
"/repos/{owner}/{repo}/git/trees",
"/repos/{owner}/{repo}/git/trees/{tree_sha}",
"/repos/{owner}/{repo}/hooks",
"/repos/{owner}/{repo}/hooks/{hook_id}",
"/repos/{owner}/{repo}/hooks/{hook_id}/pings",
"/repos/{owner}/{repo}/hooks/{hook_id}/tests",
"/repos/{owner}/{repo}/import",
"/repos/{owner}/{repo}/import/authors",
"/repos/{owner}/{repo}/import/authors/{author_id}",
"/repos/{owner}/{repo}/import/large_files",
"/repos/{owner}/{repo}/import/lfs",
"/repos/{owner}/{repo}/installation",
"/repos/{owner}/{repo}/interaction-limits",
"/repos/{owner}/{repo}/invitations",
"/repos/{owner}/{repo}/invitations/{invitation_id}",
"/repos/{owner}/{repo}/issues",
"/repos/{owner}/{repo}/issues/comments",
"/repos/{owner}/{repo}/issues/comments/{comment_id}",
"/repos/{owner}/{repo}/issues/comments/{comment_id}/reactions",
"/repos/{owner}/{repo}/issues/events",
"/repos/{owner}/{repo}/issues/events/{event_id}",
"/repos/{owner}/{repo}/issues/{number}",
"/repos/{owner}/{repo}/issues/{number}/assignees",
"/repos/{owner}/{repo}/issues/{number}/comments",
"/repos/{owner}/{repo}/issues/{number}/events",
"/repos/{owner}/{repo}/issues/{number}/labels",
"/repos/{owner}/{repo}/issues/{number}/labels/{name}",
"/repos/{owner}/{repo}/issues/{number}/lock",
"/repos/{owner}/{repo}/issues/{number}/reactions",
"/repos/{owner}/{repo}/issues/{number}/timeline",
"/repos/{owner}/{repo}/keys",
"/repos/{owner}/{repo}/keys/{key_id}",
"/repos/{owner}/{repo}/labels",
// "/repos/{owner}/{repo}/labels/{current_name}",
"/repos/{owner}/{repo}/labels/{name}",
"/repos/{owner}/{repo}/languages",
"/repos/{owner}/{repo}/license",
"/repos/{owner}/{repo}/merges",
"/repos/{owner}/{repo}/milestones",
"/repos/{owner}/{repo}/milestones/{number}",
"/repos/{owner}/{repo}/milestones/{number}/labels",
"/repos/{owner}/{repo}/notifications",
"/repos/{owner}/{repo}/pages",
"/repos/{owner}/{repo}/pages/builds",
"/repos/{owner}/{repo}/pages/builds/latest",
"/repos/{owner}/{repo}/pages/builds/{build_id}",
"/repos/{owner}/{repo}/projects",
"/repos/{owner}/{repo}/pulls",
"/repos/{owner}/{repo}/pulls/comments",
"/repos/{owner}/{repo}/pulls/comments/{comment_id}",
"/repos/{owner}/{repo}/pulls/comments/{comment_id}/reactions",
"/repos/{owner}/{repo}/pulls/{number}",
"/repos/{owner}/{repo}/pulls/{number}/comments",
"/repos/{owner}/{repo}/pulls/{number}/commits",
"/repos/{owner}/{repo}/pulls/{number}/files",
"/repos/{owner}/{repo}/pulls/{number}/merge",
"/repos/{owner}/{repo}/pulls/{number}/requested_reviewers",
"/repos/{owner}/{repo}/pulls/{number}/reviews",
"/repos/{owner}/{repo}/pulls/{number}/reviews/{review_id}",
"/repos/{owner}/{repo}/pulls/{number}/reviews/{review_id}/comments",
"/repos/{owner}/{repo}/pulls/{number}/reviews/{review_id}/dismissals",
"/repos/{owner}/{repo}/pulls/{number}/reviews/{review_id}/events",
"/repos/{owner}/{repo}/readme",
"/repos/{owner}/{repo}/releases",
"/repos/{owner}/{repo}/releases/assets/{asset_id}",
"/repos/{owner}/{repo}/releases/latest",
"/repos/{owner}/{repo}/releases/tags/{tag}",
"/repos/{owner}/{repo}/releases/{release_id}",
"/repos/{owner}/{repo}/releases/{release_id}/assets",
"/repos/{owner}/{repo}/stargazers",
"/repos/{owner}/{repo}/stats/code_frequency",
"/repos/{owner}/{repo}/stats/commit_activity",
"/repos/{owner}/{repo}/stats/contributors",
"/repos/{owner}/{repo}/stats/participation",
"/repos/{owner}/{repo}/stats/punch_card",
"/repos/{owner}/{repo}/statuses/{sha}",
"/repos/{owner}/{repo}/subscribers",
"/repos/{owner}/{repo}/subscription",
"/repos/{owner}/{repo}/tags",
"/repos/{owner}/{repo}/teams",
"/repos/{owner}/{repo}/topics",
"/repos/{owner}/{repo}/traffic/clones",
"/repos/{owner}/{repo}/traffic/popular/paths",
"/repos/{owner}/{repo}/traffic/popular/referrers",
"/repos/{owner}/{repo}/traffic/views",
"/repos/{owner}/{repo}/transfer",
"/repos/{owner}/{repo}/{archive_format}/{ref}",
"/repositories",
"/scim/v2/organizations/{org}/Users",
"/scim/v2/organizations/{org}/Users/{external_identity_guid}",
"/search/code",
"/search/commits",
"/search/issues",
"/search/labels",
"/search/repositories",
"/search/topics",
"/search/users",
"/teams/{team_id}",
"/teams/{team_id}/discussions",
"/teams/{team_id}/discussions/{discussion_number}",
"/teams/{team_id}/discussions/{discussion_number}/comments",
"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}",
"/teams/{team_id}/discussions/{discussion_number}/comments/{comment_number}/reactions",
"/teams/{team_id}/discussions/{discussion_number}/reactions",
"/teams/{team_id}/invitations",
"/teams/{team_id}/members",
"/teams/{team_id}/members/{username}",
"/teams/{team_id}/memberships/{username}",
"/teams/{team_id}/projects",
"/teams/{team_id}/projects/{project_id}",
"/teams/{team_id}/repos",
"/teams/{team_id}/repos/{owner}/{repo}",
"/teams/{team_id}/teams",
"/user",
"/user/blocks",
"/user/blocks/{username}",
"/user/email/visibility",
"/user/emails",
"/user/followers",
"/user/following",
"/user/following/{username}",
"/user/gpg_keys",
"/user/gpg_keys/{gpg_key_id}",
"/user/installations",
"/user/installations/{installation_id}/repositories",
"/user/installations/{installation_id}/repositories/{repository_id}",
"/user/issues",
"/user/keys",
"/user/keys/{key_id}",
"/user/marketplace_purchases",
"/user/marketplace_purchases/stubbed",
"/user/memberships/orgs",
"/user/memberships/orgs/{org}",
"/user/migrations",
"/user/migrations/{migration_id}",
"/user/migrations/{migration_id}/archive",
"/user/migrations/{migration_id}/repos/{repo_name}/lock",
"/user/orgs",
"/user/projects",
"/user/public_emails",
"/user/repos",
"/user/repository_invitations",
"/user/repository_invitations/{invitation_id}",
"/user/starred",
"/user/starred/{owner}/{repo}",
"/user/subscriptions",
"/user/subscriptions/{owner}/{repo}",
"/user/teams",
"/users",
"/users/{username}",
"/users/{username}/events",
"/users/{username}/events/orgs/{org}",
"/users/{username}/events/public",
"/users/{username}/followers",
"/users/{username}/following",
"/users/{username}/following/{target_user}",
"/users/{username}/gists",
"/users/{username}/gpg_keys",
"/users/{username}/hovercard",
"/users/{username}/installation",
"/users/{username}/keys",
"/users/{username}/orgs",
"/users/{username}/projects",
"/users/{username}/received_events",
"/users/{username}/received_events/public",
"/users/{username}/repos",
"/users/{username}/starred",
"/users/{username}/subscriptions",
"/{url}",
];
pub const ROUTES_URLS: [&str; 315] = [
"/app",
"/app-manifests/0/conversions",
"/app/installations",
"/app/installations/12345",
"/app/installations/12345/access_tokens",
"/applications/67890/grants/777cc25bacbd7cd4f4ddd631006a998fa74b5ed3",
"/applications/67890/tokens/777cc25bacbd7cd4f4ddd631006a998fa74b5ed3",
"/applications/grants",
"/applications/grants/1",
"/apps/rust-lang",
"/authorizations",
"/authorizations/144",
"/authorizations/clients/67890",
"/authorizations/clients/67890/233",
"/codes_of_conduct",
"/codes_of_conduct/377",
"/content_references/610/attachments",
"/emojis",
"/events",
"/feeds",
"/gists",
"/gists/public",
"/gists/starred",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/comments",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/comments/2274119",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/commits",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/forks",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/star",
"/gists/7c220d4d6d95b389816bc9d3fbb7a5d4/2aa4c46cfdd726e97360c2734835aa3515e8c858",
"/gitignore/templates",
"/gitignore/templates/rust",
"/installation/repositories",
"/issues",
"/legacy/issues/search/rust-lang/rust/987/1597",
"/legacy/repos/search/1597",
"/legacy/user/email/rust@rust-lang.org",
"/legacy/user/search/1597",
"/licenses",
"/licenses/mit",
"/markdown",
"/markdown/raw",
"/marketplace_listing/accounts/rust-lang",
"/marketplace_listing/plans",
"/marketplace_listing/plans/987/accounts",
"/marketplace_listing/stubbed/accounts/rust-lang",
"/marketplace_listing/stubbed/plans",
"/marketplace_listing/stubbed/plans/987/accounts",
"/meta",
"/networks/rust-lang/rust/events",
"/notifications",
"/notifications/threads/233",
"/notifications/threads/233/subscription",
"/organizations",
"/orgs/rust-lang",
"/orgs/rust-lang/blocks",
"/orgs/rust-lang/blocks/alexcrichton",
"/orgs/rust-lang/events",
"/orgs/rust-lang/hooks",
"/orgs/rust-lang/hooks/1357908642",
"/orgs/rust-lang/hooks/1357908642/pings",
"/orgs/rust-lang/installation",
"/orgs/rust-lang/interaction-limits",
"/orgs/rust-lang/invitations",
"/orgs/rust-lang/invitations/c9f8304351ad4223e4f618e9a329b2b94776b25e/teams",
"/orgs/rust-lang/issues",
"/orgs/rust-lang/members",
"/orgs/rust-lang/members/alexcrichton",
"/orgs/rust-lang/memberships/alexcrichton",
"/orgs/rust-lang/migrations",
"/orgs/rust-lang/migrations/233",
"/orgs/rust-lang/migrations/233/archive",
"/orgs/rust-lang/migrations/233/repos/rust/lock",
"/orgs/rust-lang/outside_collaborators",
"/orgs/rust-lang/outside_collaborators/alexcrichton",
"/orgs/rust-lang/projects",
"/orgs/rust-lang/public_members",
"/orgs/rust-lang/public_members/alexcrichton",
"/orgs/rust-lang/repos",
"/orgs/rust-lang/teams",
"/projects/13",
"/projects/13/collaborators",
"/projects/13/collaborators/alexcrichton",
"/projects/13/collaborators/alexcrichton/permission",
"/projects/13/columns",
"/projects/columns/cards/16266729",
"/projects/columns/cards/16266729/moves",
"/projects/columns/3953507",
"/projects/columns/3953507/cards",
"/projects/columns/3953507/moves",
"/rate_limit",
"/reactions/3",
"/repos/rust-lang/rust",
"/repos/rust-lang/rust/assignees",
"/repos/rust-lang/rust/assignees/rust",
"/repos/rust-lang/rust/branches",
"/repos/rust-lang/rust/branches/master",
"/repos/rust-lang/rust/branches/master/protection",
"/repos/rust-lang/rust/branches/master/protection/enforce_admins",
"/repos/rust-lang/rust/branches/master/protection/required_pull_request_reviews",
"/repos/rust-lang/rust/branches/master/protection/required_signatures",
"/repos/rust-lang/rust/branches/master/protection/required_status_checks",
"/repos/rust-lang/rust/branches/master/protection/required_status_checks/contexts",
"/repos/rust-lang/rust/branches/master/protection/restrictions",
"/repos/rust-lang/rust/branches/master/protection/restrictions/teams",
"/repos/rust-lang/rust/branches/master/protection/restrictions/users",
"/repos/rust-lang/rust/check-runs",
"/repos/rust-lang/rust/check-runs/987",
"/repos/rust-lang/rust/check-runs/987/annotations",
"/repos/rust-lang/rust/check-suites",
"/repos/rust-lang/rust/check-suites/preferences",
"/repos/rust-lang/rust/check-suites/610",
"/repos/rust-lang/rust/check-suites/610/check-runs",
"/repos/rust-lang/rust/check-suites/610/rerequest",
"/repos/rust-lang/rust/collaborators",
"/repos/rust-lang/rust/collaborators/alexcrichton",
"/repos/rust-lang/rust/collaborators/alexcrichton/permission",
"/repos/rust-lang/rust/comments",
"/repos/rust-lang/rust/comments/2274119",
"/repos/rust-lang/rust/comments/2274119/reactions",
"/repos/rust-lang/rust/commits",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/check-runs",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/check-suites",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/comments",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/status",
"/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/statuses",
// "/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858",
// "/repos/rust-lang/rust/commits/2aa4c46cfdd726e97360c2734835aa3515e8c858/comments",
"/repos/rust-lang/rust/community/code_of_conduct",
"/repos/rust-lang/rust/community/profile",
// "/repos/rust-lang/rust/compare/master...dev",
"/repos/rust-lang/rust/contents/rust",
"/repos/rust-lang/rust/contributors",
"/repos/rust-lang/rust/deployments",
"/repos/rust-lang/rust/deployments/610",
"/repos/rust-lang/rust/deployments/610/statuses",
"/repos/rust-lang/rust/deployments/610/statuses/2",
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | true |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/fuzz/fuzz_targets/insert_and_find.rs | fuzz/fuzz_targets/insert_and_find.rs | #![no_main]
use libfuzzer_sys::fuzz_target;
fuzz_target!(|data: (Vec<(String, i32)>, String, Option<bool>)| {
let mut tree = path_tree::PathTree::new();
for (path, num) in &data.0 {
tree.insert(path, num);
}
match data.2 {
None => {
let _ = tree.find(&data.1);
}
Some(b) => {
if !b {
let _ = tree.find(&data.1);
}
}
}
});
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/benches/bench.rs | benches/bench.rs | #![allow(unused_must_use)]
#[path = "../tests/fixtures/github.rs"]
mod github;
use github::*;
use criterion::*;
use actix_router::{Path as ActixPath, Router as ActixRouter};
use ntex_router::{Path as NtexPath, Router as NtexRouter};
use path_table::PathTable;
use path_tree::PathTree;
use route_recognizer::Router as RRRouter;
// use gonzales::RouterBuilder;
use matchit::Router as MatchitRouter;
fn bench_path_insert(c: &mut Criterion) {
let mut group = c.benchmark_group("path_insert");
group
.bench_function("actix_router_path", |b| {
let mut router = ActixRouter::<usize>::build();
b.iter(|| {
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
router.path(*r, i);
}
})
})
.bench_function("ntex_router_path", |b| {
let mut router = NtexRouter::<usize>::build();
b.iter(|| {
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
router.path(*r, i);
}
})
})
.bench_function("path_table_setup", |b| {
let mut table: PathTable<usize> = PathTable::new();
b.iter(|| {
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
*table.setup(r) = i;
}
})
})
.bench_function("path_tree_insert", |b| {
let mut tree: PathTree<usize> = PathTree::new();
b.iter(|| {
for (i, r) in ROUTES_WITH_COLON.iter().enumerate() {
tree.insert(r, i);
}
})
})
.bench_function("matchit_insert", |b| {
let mut matcher = MatchitRouter::new();
b.iter(|| {
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
let _ = matcher.insert(*r, i);
}
})
})
.bench_function("route_recognizer_add", |b| {
let mut router = RRRouter::<usize>::new();
b.iter(|| {
for (i, r) in ROUTES_WITH_COLON.iter().enumerate() {
router.add(r, i);
}
})
})
/*
.bench_function("gonzales_route", |b| {
let mut router = RouterBuilder::new();
b.iter(|| {
for (_i, r) in ROUTES_WITH_COLON.iter().enumerate() {
router.build([r]);
}
})
})
*/
.sample_size(20);
group.finish()
}
fn bench_path_find(c: &mut Criterion) {
let mut group = c.benchmark_group("path_find");
group
.bench_function("actix_router_recognize", |b| {
let mut router = ActixRouter::<usize>::build();
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
router.path(*r, i);
}
let router = router.finish();
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let mut path = ActixPath::new(*r);
let n = router.recognize(&mut path).unwrap();
assert_eq!(*n.0, i);
}
})
})
.bench_function("ntex_router_recognize", |b| {
let mut router = NtexRouter::<usize>::build();
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
router.path(*r, i);
}
let router = router.finish();
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let mut path = NtexPath::new(*r);
let n = router.recognize(&mut path).unwrap();
assert_eq!(*n.0, i);
}
})
})
.bench_function("path_table_route", |b| {
let mut table: PathTable<usize> = PathTable::new();
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
*table.setup(r) = i;
}
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let n = table.route(r).unwrap();
assert_eq!(*n.0, i);
}
})
})
.bench_function("path_tree_find", |b| {
let mut tree: PathTree<usize> = PathTree::new();
for (i, r) in ROUTES_WITH_COLON.iter().enumerate() {
tree.insert(r, i);
}
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let n = tree.find(r).unwrap();
assert_eq!(*n.0, i);
}
})
})
.bench_function("matchit_at", |b| {
let mut matcher = MatchitRouter::new();
for (i, r) in ROUTES_WITH_BRACES.iter().enumerate() {
let _ = matcher.insert(*r, i);
}
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let n = matcher.at(r).unwrap();
assert_eq!(*n.value, i);
}
})
})
.bench_function("route_recognizer_recognize", |b| {
let mut router = RRRouter::<usize>::new();
for (i, r) in ROUTES_WITH_COLON.iter().enumerate() {
router.add(r, i);
}
b.iter(|| {
for (i, r) in ROUTES_URLS.iter().enumerate() {
let n = router.recognize(r).unwrap();
assert_eq!(**n.handler(), i);
}
})
})
/*
.bench_function("gonzales_route", |b| {
let router = RouterBuilder::new()
.ascii_case_insensitive(false)
.build(ROUTES_WITH_BRACES);
b.iter(|| {
for (_i, r) in ROUTES_URLS.iter().enumerate() {
// let n = router.route(r).unwrap();
// assert_eq!(n.get_index(), i);
black_box(router.route(r));
}
})
})
*/
.sample_size(12);
group.finish();
}
criterion_group!(benches, bench_path_insert, bench_path_find);
criterion_main!(benches);
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
viz-rs/path-tree | https://github.com/viz-rs/path-tree/blob/b4275047bf9cdad6cd890b19d9c9d6ce9389b094/examples/hello.rs | examples/hello.rs | #![allow(unused_must_use)]
use std::{convert::Infallible, future::Future, net::SocketAddr, pin::Pin, sync::Arc};
use bytes::Bytes;
use http_body_util::Full;
use hyper::{
body::Incoming, server::conn::http1, service::service_fn, Request, Response, StatusCode,
};
use hyper_util::rt::TokioIo;
use path_tree::PathTree;
use tokio::net::TcpListener;
static NOT_FOUND: &[u8] = b"Not Found";
type Params = Vec<(String, String)>;
type Body = Full<Bytes>;
trait Handler: Send + Sync + 'static {
fn call<'a>(
&'a self,
req: Request<Incoming>,
) -> Pin<Box<dyn Future<Output = Response<Body>> + Send + 'a>>;
}
impl<F, R> Handler for F
where
F: Send + Sync + 'static + Fn(Request<Incoming>) -> R,
R: Future<Output = Response<Full<Bytes>>> + Send + 'static,
{
fn call<'a>(
&'a self,
req: Request<Incoming>,
) -> Pin<Box<dyn Future<Output = Response<Body>> + Send + 'a>> {
let fut = (self)(req);
Box::pin(fut)
}
}
async fn index(_: Request<Incoming>) -> Response<Body> {
Response::new(Body::from("Hello, Web!"))
}
async fn hello_world(req: Request<Incoming>) -> Response<Body> {
let params = req.extensions().get::<Params>().unwrap();
let mut s = String::new();
s.push_str("Hello, World!\n");
for (_, v) in params {
s.push_str(&format!("param = {v}"));
}
Response::new(Body::from(s))
}
async fn hello_user(req: Request<Incoming>) -> Response<Body> {
let params = req.extensions().get::<Params>().unwrap();
let mut s = String::new();
s.push_str("Hello, ");
for (k, v) in params {
s.push_str(&format!("{k} = {v}"));
}
s.push('!');
Response::new(Body::from(s))
}
async fn hello_rust(_: Request<Incoming>) -> Response<Body> {
Response::new(Body::from("Hello, Rust!"))
}
async fn login(_req: Request<Incoming>) -> Response<Body> {
Response::new(Body::from("I'm logined!"))
}
#[tokio::main]
async fn main() -> Result<(), Box<dyn std::error::Error + Send + Sync>> {
let addr: SocketAddr = ([127, 0, 0, 1], 3000).into();
let listener = TcpListener::bind(addr).await?;
// /
// βββ GET/ β’0
// β βββ hello/
// β β βββ : β’2
// β βββ rust β’3
// β βββ ** β’1
// βββ POST/login β’4
let mut tree = PathTree::<Box<dyn Handler>>::new();
tree.insert("/GET/", Box::new(index));
tree.insert("/GET/*", Box::new(hello_world));
tree.insert("/GET/hello/:name", Box::new(hello_user));
tree.insert("/GET/rust", Box::new(hello_rust));
tree.insert("/POST/login", Box::new(login));
let tree = Arc::new(tree);
loop {
let (tcp, _) = listener.accept().await?;
let io = TokioIo::new(tcp);
let router = Arc::clone(&tree);
tokio::task::spawn(async move {
if let Err(err) = http1::Builder::new()
.serve_connection(
io,
service_fn(move |mut req| {
let router = router.clone();
let path = "/".to_owned() + req.method().as_str() + req.uri().path();
async move {
Ok::<_, Infallible>(match router.find(&path) {
Some((handler, route)) => {
let p = route
.params()
.iter()
.map(|p| (p.0.to_string(), p.1.to_string()))
.collect::<Params>();
req.extensions_mut().insert(p);
handler.call(req).await
}
None => Response::builder()
.status(StatusCode::NOT_FOUND)
.body(NOT_FOUND.into())
.unwrap(),
})
}
}),
)
.await
{
println!("Error serving connection: {:?}", err);
}
});
}
}
| rust | Apache-2.0 | b4275047bf9cdad6cd890b19d9c9d6ce9389b094 | 2026-01-04T20:22:55.187428Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/progress_info_graph_calculator.rs | crate/webi_output/src/progress_info_graph_calculator.rs | use std::collections::HashMap;
use dot_ix_model::info_graph::InfoGraph;
use peace_flow_rt::Flow;
use peace_item_model::ItemId;
use peace_progress_model::ProgressStatus;
/// Calculates the actual `InfoGraph` for a flow's progress.
#[derive(Debug)]
pub struct ProgressInfoGraphCalculator;
impl ProgressInfoGraphCalculator {
/// Returns the calculated `InfoGraph`.
pub fn calculate<E>(
flow: &Flow<E>,
item_progress_statuses: &HashMap<ItemId, ProgressStatus>,
) -> InfoGraph
where
E: 'static,
{
let flow_spec_info = flow.flow_spec_info();
flow_spec_info.to_progress_info_graph_with_statuses(item_progress_statuses)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/lib.rs | crate/webi_output/src/lib.rs | //! Web interface output for the peace automation framework.
pub use crate::{
cmd_exec_spawn_ctx::CmdExecSpawnCtx, cmd_exec_to_leptos_ctx::CmdExecToLeptosCtx,
flow_webi_fns::FlowWebiFns, webi_output::WebiOutput, webi_server::WebiServer,
};
#[cfg(feature = "item_interactions")]
pub use crate::outcome_info_graph_calculator::OutcomeInfoGraphCalculator;
#[cfg(feature = "output_progress")]
pub use crate::progress_info_graph_calculator::ProgressInfoGraphCalculator;
pub mod assets;
mod cmd_exec_spawn_ctx;
mod cmd_exec_to_leptos_ctx;
mod flow_webi_fns;
mod webi_output;
mod webi_server;
#[cfg(feature = "item_interactions")]
mod outcome_info_graph_calculator;
#[cfg(feature = "output_progress")]
mod progress_info_graph_calculator;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/webi_server.rs | crate/webi_output/src/webi_server.rs | use std::{net::SocketAddr, path::Path};
use axum::Router;
use futures::stream::{self, StreamExt, TryStreamExt};
use leptos::view;
use leptos_axum::LeptosRoutes;
use peace_cmd_model::CmdExecutionId;
use peace_flow_model::FlowId;
use peace_webi_components::{App, ChildrenFn, Shell};
use peace_webi_model::{WebUiUpdate, WebiError};
use tokio::{io::AsyncWriteExt, sync::mpsc};
use tower_http::services::ServeDir;
use crate::{CmdExecSpawnCtx, CmdExecToLeptosCtx, FlowWebiFns, WebiOutput};
#[cfg(feature = "item_interactions")]
use crate::OutcomeInfoGraphCalculator;
#[cfg(feature = "item_interactions")]
use peace_webi_model::OutcomeInfoGraphVariant;
#[cfg(feature = "output_progress")]
use std::collections::HashMap;
#[cfg(feature = "output_progress")]
use peace_progress_model::CmdBlockItemInteractionType;
#[cfg(feature = "output_progress")]
use crate::ProgressInfoGraphCalculator;
/// Maximum number of `CmdExecReqT`s to queue up.
const CMD_EXEC_REQUEST_CHANNEL_LIMIT: usize = 1024;
/// Web server that runs the following work:
///
/// * UI rendering with `leptos`.
/// * `CmdExecution` through receiving requests from leptos.
/// * Updating `leptos` context data for components to render.
#[derive(Debug)]
pub struct WebiServer;
impl WebiServer {
/// Starts the web server.
///
/// ## Parameters
///
/// * `socker_addr`: IP address and port to listen on.
pub async fn start<E, CmdExecReqT>(
app_name: String,
socket_addr: Option<SocketAddr>,
app_home: ChildrenFn,
flow_webi_fns: FlowWebiFns<E, CmdExecReqT>,
) -> Result<(), WebiError>
where
E: 'static,
CmdExecReqT: Send + 'static,
{
let cmd_exec_to_leptos_ctx = CmdExecToLeptosCtx::default();
let (cmd_exec_request_tx, cmd_exec_request_rx) =
mpsc::channel::<CmdExecReqT>(CMD_EXEC_REQUEST_CHANNEL_LIMIT);
let flow_id = flow_webi_fns.flow.flow_id().clone();
let webi_server_task = Self::leptos_server_start(
app_name,
socket_addr,
app_home,
cmd_exec_request_tx,
cmd_exec_to_leptos_ctx.clone(),
flow_id,
);
let cmd_execution_listener_task = Self::cmd_execution_listener(
cmd_exec_request_rx,
cmd_exec_to_leptos_ctx,
flow_webi_fns,
);
tokio::try_join!(webi_server_task, cmd_execution_listener_task).map(|((), ())| ())
}
async fn cmd_execution_listener<E, CmdExecReqT>(
mut cmd_exec_request_rx: mpsc::Receiver<CmdExecReqT>,
cmd_exec_to_leptos_ctx: CmdExecToLeptosCtx,
flow_webi_fns: FlowWebiFns<E, CmdExecReqT>,
) -> Result<(), WebiError>
where
E: 'static,
CmdExecReqT: Send + 'static,
{
// TODO:
//
// 1. Listen for params specs
// 2. Instantiate `CmdCtx`
// 3. Calculate example `info_graph`s
// 4. Insert into `FlowInfoGraphs`.
let FlowWebiFns {
flow,
outcome_info_graph_fn,
cmd_exec_spawn_fn,
} = flow_webi_fns;
let outcome_info_graph_fn = &outcome_info_graph_fn;
#[cfg(feature = "output_progress")]
let item_count = flow.graph().node_count();
let CmdExecToLeptosCtx {
flow_progress_example_info_graphs,
flow_progress_actual_info_graphs,
flow_outcome_example_info_graphs,
flow_outcome_actual_info_graphs,
mut cmd_exec_interrupt_txs,
cmd_execution_id: cmd_execution_id_arc,
} = cmd_exec_to_leptos_ctx;
// TODO: remove this mock?
// Should we have one WebiOutput for the whole server? doesn't seem right.
let (web_ui_update_tx, _web_ui_update_rx) = mpsc::channel(128);
let mut webi_output_mock = WebiOutput::new(web_ui_update_tx);
let flow_spec_info = flow.flow_spec_info();
let flow_progress_example_info_graph = flow_spec_info.to_progress_info_graph();
let flow_outcome_example_info_graph = outcome_info_graph_fn(
&mut webi_output_mock,
Box::new(|flow, params_specs, mapping_fn_reg, resources| {
#[cfg(all(feature = "item_interactions", feature = "item_state_example"))]
{
OutcomeInfoGraphCalculator::calculate::<E>(
flow,
params_specs,
mapping_fn_reg,
resources,
OutcomeInfoGraphVariant::Example,
)
}
#[cfg(not(all(feature = "item_interactions", feature = "item_state_example")))]
{
use dot_ix_model::info_graph::InfoGraph;
let _flow = flow;
let _params_specs = params_specs;
let _mapping_fn_reg = mapping_fn_reg;
let _resources = resources;
InfoGraph::default()
}
}),
)
.await;
let flow_id = flow.flow_id();
if let Ok(mut flow_progress_example_info_graphs) = flow_progress_example_info_graphs.lock()
{
flow_progress_example_info_graphs
.insert(flow_id.clone(), flow_progress_example_info_graph);
}
if let Ok(mut flow_outcome_example_info_graphs) = flow_outcome_example_info_graphs.lock() {
flow_outcome_example_info_graphs
.insert(flow_id.clone(), flow_outcome_example_info_graph);
}
let (cmd_exec_join_handle_tx, mut cmd_exec_join_handle_rx) = mpsc::channel(128);
let cmd_execution_starter_task = async move {
let mut cmd_execution_id_next = CmdExecutionId::new(0u64);
while let Some(cmd_exec_request) = cmd_exec_request_rx.recv().await {
// Note: If we don't have a large enough buffer, we might drop updates,
// which may mean a node appears to still be in progress when it has completed.
let (web_ui_update_tx, web_ui_update_rx) = mpsc::channel(1024);
let webi_output = WebiOutput::new(web_ui_update_tx);
let webi_output_clone = webi_output.clone_without_tx();
let CmdExecSpawnCtx {
interrupt_tx,
cmd_exec_task,
} = cmd_exec_spawn_fn(webi_output, cmd_exec_request);
let cmd_execution_id = cmd_execution_id_next;
cmd_execution_id_next = CmdExecutionId::new(*cmd_execution_id + 1);
cmd_exec_join_handle_tx
.send((cmd_execution_id, webi_output_clone, web_ui_update_rx))
.await
.expect("Expected `cmd_execution_receiver_task` to be running.");
if let Some(interrupt_tx) = interrupt_tx {
cmd_exec_interrupt_txs.insert(cmd_execution_id, interrupt_tx);
}
let local_set = tokio::task::LocalSet::new();
local_set
.run_until(async move {
let cmd_exec_join_handle = tokio::task::spawn_local(cmd_exec_task);
match cmd_exec_join_handle.await {
Ok(()) => {
eprintln!("`cmd_execution` completed.")
}
Err(join_error) => {
eprintln!(
"Failed to wait for `cmd_execution` to complete. {join_error}"
);
// TODO: insert CmdExecution failed status
}
}
})
.await;
}
};
let cmd_execution_receiver_task = async move {
while let Some((cmd_execution_id, mut webi_output, mut web_ui_update_rx)) =
cmd_exec_join_handle_rx.recv().await
{
if let Ok(mut cmd_execution_id_guard) = cmd_execution_id_arc.lock() {
*cmd_execution_id_guard = Some(cmd_execution_id);
} else {
eprintln!("Unable to insert cmd_execution_id to run: {cmd_execution_id:?}");
}
let flow_progress_actual_info_graphs = flow_progress_actual_info_graphs.clone();
let flow_outcome_actual_info_graphs = flow_outcome_actual_info_graphs.clone();
#[cfg(not(feature = "output_progress"))]
let flow_spec_info = flow_spec_info.clone();
#[cfg(feature = "output_progress")]
let flow_ref = &flow;
// Update `InfoGraph`s every time `progress_update` is sent.
let web_ui_update_task = async move {
// Keep track of item execution progress.
#[cfg(feature = "output_progress")]
let mut cmd_block_item_interaction_type = CmdBlockItemInteractionType::Local;
#[cfg(feature = "output_progress")]
let mut item_location_states = HashMap::with_capacity(item_count);
#[cfg(feature = "output_progress")]
let mut item_progress_statuses = HashMap::with_capacity(item_count);
while let Some(web_ui_update) = web_ui_update_rx.recv().await {
match web_ui_update {
#[cfg(feature = "output_progress")]
WebUiUpdate::CmdBlockStart {
cmd_block_item_interaction_type:
cmd_block_item_interaction_type_next,
} => {
cmd_block_item_interaction_type =
cmd_block_item_interaction_type_next;
}
#[cfg(feature = "output_progress")]
WebUiUpdate::ItemLocationState {
item_id,
item_location_state,
} => {
item_location_states.insert(item_id, item_location_state);
}
#[cfg(feature = "output_progress")]
WebUiUpdate::ItemProgressStatus {
item_id,
progress_status,
progress_limit: _,
message: _,
} => {
item_progress_statuses.insert(item_id, progress_status);
}
WebUiUpdate::Markdown { markdown_src: _ } => {
// TODO: render markdown on server side?
}
}
#[cfg(not(feature = "output_progress"))]
let flow_progress_actual_info_graph =
flow_spec_info.to_progress_info_graph();
#[cfg(feature = "output_progress")]
let flow_progress_actual_info_graph =
ProgressInfoGraphCalculator::calculate(
flow_ref,
&item_progress_statuses,
);
if let Ok(mut flow_progress_actual_info_graphs) =
flow_progress_actual_info_graphs.lock()
{
flow_progress_actual_info_graphs
.insert(cmd_execution_id, flow_progress_actual_info_graph);
}
#[cfg(feature = "output_progress")]
let item_location_states_snapshot = item_location_states.clone();
#[cfg(feature = "output_progress")]
let item_progress_statuses_snapshot = item_progress_statuses.clone();
let flow_outcome_actual_info_graph = outcome_info_graph_fn(
&mut webi_output,
Box::new(move |flow, params_specs, mapping_fn_reg, resources| {
#[cfg(feature = "output_progress")]
let item_location_states = item_location_states_snapshot.clone();
#[cfg(feature = "output_progress")]
let item_progress_statuses =
item_progress_statuses_snapshot.clone();
#[cfg(feature = "item_interactions")]
{
OutcomeInfoGraphCalculator::calculate::<E>(
flow,
params_specs,
mapping_fn_reg,
resources,
OutcomeInfoGraphVariant::Current {
#[cfg(feature = "output_progress")]
cmd_block_item_interaction_type,
#[cfg(feature = "output_progress")]
item_location_states,
#[cfg(feature = "output_progress")]
item_progress_statuses,
},
)
}
#[cfg(not(feature = "item_interactions"))]
{
use dot_ix_model::info_graph::InfoGraph;
let _flow = flow;
let _params_specs = params_specs;
let _resources = resources;
InfoGraph::default()
}
}),
)
.await;
if let Ok(mut flow_outcome_actual_info_graphs) =
flow_outcome_actual_info_graphs.lock()
{
flow_outcome_actual_info_graphs
.insert(cmd_execution_id, flow_outcome_actual_info_graph);
}
}
};
// ```rust,ignore
// let cmd_exec_join_task = async move {
// match cmd_exec_join_handle.await {
// Ok(()) => {}
// Err(join_error) => {
// eprintln!(
// "Failed to wait for `cmd_execution` to complete. {join_error}"
// );
// // TODO: insert CmdExecution failed status
// }
// }
// };
// ```
// tokio::join!(web_ui_update_task, cmd_exec_join_task);
// TODO: spawn task and go back to waiting, instead of waiting for this task, or
// drop the txes
web_ui_update_task.await;
}
};
tokio::join!(cmd_execution_starter_task, cmd_execution_receiver_task);
Ok(())
}
/// Analogous to the `main()` function in an SSR app built using
/// `cargo-leptos`.
///
/// # Parameters
///
/// * `socket_addr`: IP address and port to listen on.
async fn leptos_server_start<CmdExecReqT>(
app_name: String,
socket_addr: Option<SocketAddr>,
app_home: ChildrenFn,
cmd_exec_request_tx: mpsc::Sender<CmdExecReqT>,
cmd_exec_to_leptos_ctx: CmdExecToLeptosCtx,
flow_id: FlowId,
) -> Result<(), WebiError>
where
CmdExecReqT: Send + 'static,
{
// Setting this to None means we'll be using cargo-leptos and its env vars
let conf = leptos::prelude::get_configuration(None)
.map_err(|error| WebiError::LeptosConfigRead { error })?;
let leptos_options = conf.leptos_options;
let socket_addr = socket_addr.unwrap_or(leptos_options.site_addr);
let routes = leptos_axum::generate_route_list({
let app_home = app_home.clone();
move || {
let app_home = app_home.clone();
view! { <App app_home /> }
}
});
stream::iter(crate::assets::ASSETS.iter())
.map(Result::<_, WebiError>::Ok)
.try_for_each(|(path_str, contents)| async move {
let asset_path = Path::new(path_str);
if let Some(parent_dir) = asset_path.parent() {
tokio::fs::create_dir_all(parent_dir)
.await
.map_err(|error| WebiError::AssetDirCreate {
asset_dir: parent_dir.to_path_buf(),
error,
})?;
}
tokio::fs::write(asset_path, contents)
.await
.map_err(|error| WebiError::AssetWrite {
asset_path: asset_path.to_path_buf(),
error,
})?;
Ok(())
})
.await?;
let leptos_options_for_shell = leptos_options.clone();
let router = Router::new()
// serve the pkg directory
.nest_service(
"/pkg",
ServeDir::new(Path::new(&*leptos_options.site_pkg_dir)),
)
// serve the `webi` directory
.nest_service("/webi", ServeDir::new(Path::new("webi")))
// serve the SSR rendered homepage
.leptos_routes_with_context(
&leptos_options,
routes,
move || {
// Add global state here if necessary
let CmdExecToLeptosCtx {
flow_progress_example_info_graphs,
flow_progress_actual_info_graphs,
flow_outcome_example_info_graphs,
flow_outcome_actual_info_graphs,
cmd_exec_interrupt_txs,
cmd_execution_id,
} = cmd_exec_to_leptos_ctx.clone();
let (flow_id, flow_id_set) = leptos::prelude::signal(flow_id.clone());
leptos::context::provide_context(flow_id);
leptos::context::provide_context(flow_id_set);
leptos::context::provide_context(flow_progress_example_info_graphs.clone());
leptos::context::provide_context(flow_progress_actual_info_graphs.clone());
leptos::context::provide_context(flow_outcome_example_info_graphs.clone());
leptos::context::provide_context(flow_outcome_actual_info_graphs.clone());
leptos::context::provide_context(cmd_exec_interrupt_txs.clone());
leptos::context::provide_context(cmd_execution_id.clone());
leptos::context::provide_context(cmd_exec_request_tx.clone());
},
move || {
let app_home = app_home.clone();
Shell(app_name.clone(), leptos_options_for_shell.clone(), app_home)
},
)
.with_state(leptos_options);
let listener = tokio::net::TcpListener::bind(socket_addr)
.await
.unwrap_or_else(|e| panic!("Failed to listen on {socket_addr}. Error: {e}"));
let (Ok(()) | Err(_)) = tokio::io::stderr()
.write_all(format!("listening on http://{}\n", socket_addr).as_bytes())
.await;
let (Ok(()) | Err(_)) = tokio::io::stderr()
.write_all(
format!(
"working dir: {}\n",
std::env::current_dir().unwrap().display()
)
.as_bytes(),
)
.await;
axum::serve(listener, router)
.await
.map_err(|error| WebiError::ServerServe { socket_addr, error })
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/outcome_info_graph_calculator.rs | crate/webi_output/src/outcome_info_graph_calculator.rs | use std::{collections::HashMap, marker::PhantomData, str::FromStr};
use dot_ix_model::{
common::{
graphviz_attrs::{EdgeDir, PackMode, PackModeFlag},
AnyId, EdgeId, Edges, GraphvizAttrs, NodeHierarchy, NodeId, NodeNames, TagId, TagItems,
TagNames, TagStyles,
},
info_graph::{GraphDir, InfoGraph},
theme::{AnyIdOrDefaults, CssClassPartials, Theme, ThemeAttr, ThemeStyles},
};
use indexmap::IndexMap;
use peace_flow_rt::Flow;
use peace_item_interaction_model::{
ItemInteraction, ItemInteractionPull, ItemInteractionPush, ItemInteractionWithin, ItemLocation,
ItemLocationTree, ItemLocationType, ItemLocationsAndInteractions,
};
use peace_item_model::ItemId;
use peace_params::{MappingFnReg, ParamsSpecs};
use peace_resource_rt::{resources::ts::SetUp, Resources};
use peace_webi_model::OutcomeInfoGraphVariant;
use smallvec::SmallVec;
#[cfg(feature = "output_progress")]
use std::{collections::HashSet, ops::ControlFlow};
#[cfg(feature = "output_progress")]
use peace_item_interaction_model::{ItemLocationState, ItemLocationStateInProgress};
#[cfg(feature = "output_progress")]
use peace_progress_model::{CmdBlockItemInteractionType, ProgressComplete, ProgressStatus};
/// Calculates the example / actual `InfoGraph` for a flow's outcome.
#[derive(Debug)]
pub struct OutcomeInfoGraphCalculator;
impl OutcomeInfoGraphCalculator {
/// Returns the calculated `InfoGraph`.
pub fn calculate<E>(
flow: &Flow<E>,
params_specs: &ParamsSpecs,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
outcome_info_graph_variant: OutcomeInfoGraphVariant,
) -> InfoGraph
where
E: 'static,
{
let item_locations_and_interactions = match &outcome_info_graph_variant {
OutcomeInfoGraphVariant::Example => flow.item_locations_and_interactions_example(
params_specs,
mapping_fn_reg,
resources,
),
OutcomeInfoGraphVariant::Current { .. } => flow
.item_locations_and_interactions_current(params_specs, mapping_fn_reg, resources),
};
calculate_info_graph(
flow,
outcome_info_graph_variant,
item_locations_and_interactions,
)
}
}
fn calculate_info_graph<E>(
flow: &Flow<E>,
outcome_info_graph_variant: OutcomeInfoGraphVariant,
item_locations_and_interactions: ItemLocationsAndInteractions,
) -> InfoGraph
where
E: 'static,
{
let item_count = flow.graph().node_count();
let ItemLocationsAndInteractions {
item_location_trees,
item_to_item_interactions,
item_location_count,
#[cfg(feature = "output_progress")]
item_location_to_item_id_sets,
} = item_locations_and_interactions;
let node_id_mappings_and_hierarchy = node_id_mappings_and_hierarchy(
&item_location_trees,
item_location_count,
#[cfg(feature = "output_progress")]
&item_location_to_item_id_sets,
);
let NodeIdMappingsAndHierarchy {
node_id_to_item_locations,
mut item_location_to_node_id_segments,
node_hierarchy,
#[cfg(feature = "output_progress")]
node_id_to_item_id_sets,
} = node_id_mappings_and_hierarchy;
let node_names = node_id_to_item_locations.iter().fold(
NodeNames::with_capacity(item_location_count),
|mut node_names, (node_id, item_location)| {
node_names.insert(node_id.clone(), item_location.name().to_string());
node_names
},
);
let tags = match &outcome_info_graph_variant {
OutcomeInfoGraphVariant::Example => {
let tags = flow.graph().iter_insertion().fold(
TagNames::with_capacity(item_count),
|mut tags, item| {
let tag_name = item.interactions_tag_name();
// For some reason using `TagId::new(item.id().as_str())` causes an error to be
// highlighted on `flow.graph()`, rather than referring to `item.id()` as the
// cause of an extended borrow.
let item_id = item.id();
let tag_id = TagId::try_from(format!("tag_{item_id}"))
.expect("Expected `tag_id` from `item_id` to be valid.");
tags.insert(tag_id, tag_name);
tags
},
);
Some(tags)
}
OutcomeInfoGraphVariant::Current { .. } => None,
};
// 1. Each item interaction knows the `ItemLocation`s
// 2. We need to be able to translate from an `ItemLocation`, to the `NodeId`s
// that we need to link as edges.
// 3. We have a way to map from `ItemLocation` to `NodeId` using the
// `node_id_from_item_location` function.
// 4. So, either we calculate the `NodeId` from each `ItemLocation` in each
// interaction again, or `ItemLocation` must implement `Hash` and `Eq`, and
// look it up.
// 5. It already implements `Hash` and `Eq`, so let's construct a
// `Map<ItemLocation, NodeId>`.
// 6. Then we can iterate through `item_to_item_interactions`, and for each
// `ItemLocation`, look up the map from 5, and add an edge.
let item_interactions_process_ctx = ItemInteractionsProcessCtx {
outcome_info_graph_variant: &outcome_info_graph_variant,
item_count,
item_location_count,
item_to_item_interactions: &item_to_item_interactions,
node_id_to_item_locations: &node_id_to_item_locations,
item_location_to_node_id_segments: &mut item_location_to_node_id_segments,
};
let item_interactions_processed = process_item_interactions(item_interactions_process_ctx);
let ItemInteractionsProcessed {
edges,
graphviz_attrs,
mut theme,
tag_items,
tag_styles_focus,
} = item_interactions_processed;
theme_styles_augment(
&item_location_trees,
&node_id_to_item_locations,
&mut theme,
#[cfg(feature = "output_progress")]
&outcome_info_graph_variant,
#[cfg(feature = "output_progress")]
&node_id_to_item_id_sets,
);
let mut info_graph = InfoGraph::default()
.with_direction(GraphDir::Vertical)
.with_hierarchy(node_hierarchy)
.with_node_names(node_names)
.with_edges(edges)
.with_graphviz_attrs(graphviz_attrs)
.with_theme(theme)
.with_css(String::from(
r#"
@keyframes node-stroke-dashoffset-move {
0% { stroke-dashoffset: 0; }
100% { stroke-dashoffset: 30; }
}
@keyframes stroke-dashoffset-move {
0% { stroke-dashoffset: 136; }
100% { stroke-dashoffset: 0; }
}
@keyframes stroke-dashoffset-move-request {
0% { stroke-dashoffset: 0; }
100% { stroke-dashoffset: 198; }
}
@keyframes stroke-dashoffset-move-response {
0% { stroke-dashoffset: 0; }
100% { stroke-dashoffset: -218; }
}
"#,
));
if let Some(tags) = tags {
info_graph = info_graph.with_tags(tags)
}
if let Some(tag_items) = tag_items {
info_graph = info_graph.with_tag_items(tag_items)
}
if let Some(tag_styles_focus) = tag_styles_focus {
info_graph = info_graph.with_tag_styles_focus(tag_styles_focus)
}
info_graph
}
/// Adds styles for nodes based on what kind of [`ItemLocation`] they represent,
/// and their progress status.
fn theme_styles_augment(
item_location_trees: &[ItemLocationTree],
node_id_to_item_locations: &IndexMap<NodeId, &ItemLocation>,
theme: &mut Theme,
#[cfg(feature = "output_progress")] outcome_info_graph_variant: &OutcomeInfoGraphVariant,
#[cfg(feature = "output_progress")] node_id_to_item_id_sets: &HashMap<NodeId, HashSet<&ItemId>>,
) {
// Use light styling for `ItemLocationType::Group` nodes.
let mut css_class_partials_light = CssClassPartials::with_capacity(10);
css_class_partials_light.insert(ThemeAttr::StrokeStyle, "dotted".to_string());
css_class_partials_light.insert(ThemeAttr::StrokeShadeNormal, "300".to_string());
css_class_partials_light.insert(ThemeAttr::StrokeShadeHover, "300".to_string());
css_class_partials_light.insert(ThemeAttr::StrokeShadeFocus, "400".to_string());
css_class_partials_light.insert(ThemeAttr::StrokeShadeActive, "500".to_string());
css_class_partials_light.insert(ThemeAttr::FillShadeNormal, "50".to_string());
css_class_partials_light.insert(ThemeAttr::FillShadeHover, "50".to_string());
css_class_partials_light.insert(ThemeAttr::FillShadeFocus, "100".to_string());
css_class_partials_light.insert(ThemeAttr::FillShadeActive, "200".to_string());
node_id_to_item_locations
.iter()
.for_each(|(node_id, item_location)| {
let css_class_partials = match item_location.r#type() {
ItemLocationType::Host => {
// Specially colour some known hosts.
match item_location.name() {
ItemLocation::LOCALHOST => {
let mut css_class_partials = css_class_partials_light.clone();
css_class_partials.insert(ThemeAttr::ShapeColor, "blue".to_string());
Some(css_class_partials)
}
"github.com" => {
let mut css_class_partials = css_class_partials_light.clone();
css_class_partials.insert(ThemeAttr::ShapeColor, "purple".to_string());
Some(css_class_partials)
}
_ => {
// Not all hosts should be styled light -- only the ones that are top
// level. i.e. if the host is inside a group, then it should likely be
// styled darker.
if item_location_trees
.iter()
.map(ItemLocationTree::item_location)
.any(|item_location_top_level| {
item_location_top_level == *item_location
})
{
Some(css_class_partials_light.clone())
} else {
None
}
}
}
}
ItemLocationType::Group => Some(css_class_partials_light.clone()),
ItemLocationType::Path => {
#[cfg(not(feature = "output_progress"))]
{
None
}
#[cfg(feature = "output_progress")]
{
if let OutcomeInfoGraphVariant::Current {
cmd_block_item_interaction_type,
item_location_states,
item_progress_statuses,
} = outcome_info_graph_variant
{
let cmd_block_item_interaction_type = *cmd_block_item_interaction_type;
// 1. For each of the item IDs that referred to this node
node_id_to_item_id_sets
.get(node_id)
// 2. Look up their statuses
.and_then(|referrer_item_ids| {
// When we have multiple referrers referring to the same item
// location, we need to prioritize `ItemLocationState::Exists`
// over `ItemLocationState::NotExists`.
//
// This is because:
//
// * For ensure, a predecessor would have created the item
// beforehand, so we don't want a successor's `NotExists`
// state to hide the node. e.g. a file download before
// uploading it somewhere else.
//
// * For clean, the successor's destination would be removed,
// but not its source. e.g. the upload would remove the
// destination file, and not the source, which would later be
// removed by the predecessor.
//
// Which means we need to prioritize the styles from the most
// recent completed / in-progress `referrer_item_id`.
let (ControlFlow::Continue(item_location_state)
| ControlFlow::Break(item_location_state)) = referrer_item_ids
.iter()
.filter_map(|referrer_item_id| {
item_location_states.get(*referrer_item_id).copied()
})
.try_fold(
ItemLocationState::NotExists,
|_item_location_state_acc, item_location_state| {
match item_location_state {
ItemLocationState::Exists => {
ControlFlow::Break(
ItemLocationState::Exists,
)
}
ItemLocationState::NotExists => {
ControlFlow::Continue(
ItemLocationState::NotExists,
)
}
}
},
);
let (ControlFlow::Continue(progress_status)
| ControlFlow::Break(progress_status)) = referrer_item_ids
.iter()
.filter_map(|referrer_item_id| {
item_progress_statuses.get(*referrer_item_id).cloned()
})
.try_fold(
ProgressStatus::Initialized,
|_progress_status_acc, progress_status| {
match progress_status {
ProgressStatus::Initialized => {
ControlFlow::Continue(progress_status)
}
ProgressStatus::Interrupted => {
ControlFlow::Continue(progress_status)
}
ProgressStatus::ExecPending => {
ControlFlow::Continue(progress_status)
}
ProgressStatus::Queued => {
ControlFlow::Continue(progress_status)
}
ProgressStatus::Running
| ProgressStatus::RunningStalled
| ProgressStatus::UserPending => {
ControlFlow::Break(progress_status)
}
ProgressStatus::Complete(
ProgressComplete::Success,
) => ControlFlow::Continue(progress_status),
ProgressStatus::Complete(
ProgressComplete::Fail,
) => ControlFlow::Break(progress_status),
}
},
);
node_css_class_partials(
cmd_block_item_interaction_type,
item_location_state,
progress_status,
)
})
} else {
None
}
}
}
};
if let Some(css_class_partials) = css_class_partials {
theme.styles.insert(
AnyIdOrDefaults::AnyId(AnyId::from(node_id.clone())),
css_class_partials,
);
}
});
}
#[cfg(feature = "output_progress")]
fn node_css_class_partials(
cmd_block_item_interaction_type: CmdBlockItemInteractionType,
item_location_state: ItemLocationState,
progress_status: ProgressStatus,
) -> Option<CssClassPartials> {
// 3. If any of them are running or complete, then it should be visible.
let item_location_state_in_progress = ItemLocationStateInProgress::from(
cmd_block_item_interaction_type,
item_location_state,
progress_status,
);
match item_location_state_in_progress {
ItemLocationStateInProgress::NotExists => {
let mut css_class_partials = CssClassPartials::with_capacity(1);
css_class_partials.insert(ThemeAttr::Extra, "opacity-[0.15]".to_string());
Some(css_class_partials)
}
ItemLocationStateInProgress::NotExistsError => {
let mut css_class_partials = CssClassPartials::with_capacity(2);
css_class_partials.insert(ThemeAttr::ShapeColor, "red".to_string());
css_class_partials.insert(ThemeAttr::StrokeStyle, "dashed".to_string());
Some(css_class_partials)
}
ItemLocationStateInProgress::DiscoverInProgress => {
let mut css_class_partials = CssClassPartials::with_capacity(3);
css_class_partials.insert(ThemeAttr::ShapeColor, "yellow".to_string());
css_class_partials.insert(ThemeAttr::StrokeStyle, "dashed".to_string());
css_class_partials.insert(
ThemeAttr::Animate,
"[node-stroke-dashoffset-move_1s_linear_infinite]".to_string(),
);
Some(css_class_partials)
}
ItemLocationStateInProgress::DiscoverError => {
let mut css_class_partials = CssClassPartials::with_capacity(3);
css_class_partials.insert(ThemeAttr::ShapeColor, "amber".to_string());
css_class_partials.insert(ThemeAttr::StrokeStyle, "dashed".to_string());
css_class_partials.insert(
ThemeAttr::Animate,
"[node-stroke-dashoffset-move_1s_linear_infinite]".to_string(),
);
Some(css_class_partials)
}
ItemLocationStateInProgress::CreateInProgress => {
let mut css_class_partials = CssClassPartials::with_capacity(3);
css_class_partials.insert(ThemeAttr::ShapeColor, "blue".to_string());
css_class_partials.insert(ThemeAttr::StrokeStyle, "dashed".to_string());
css_class_partials.insert(
ThemeAttr::Animate,
"[node-stroke-dashoffset-move_1s_linear_infinite]".to_string(),
);
Some(css_class_partials)
}
ItemLocationStateInProgress::ModificationInProgress => {
let mut css_class_partials = CssClassPartials::with_capacity(3);
css_class_partials.insert(ThemeAttr::ShapeColor, "blue".to_string());
css_class_partials.insert(ThemeAttr::StrokeStyle, "dashed".to_string());
css_class_partials.insert(
ThemeAttr::Animate,
"[node-stroke-dashoffset-move_1s_linear_infinite]".to_string(),
);
Some(css_class_partials)
}
ItemLocationStateInProgress::ExistsOk => None,
ItemLocationStateInProgress::ExistsError => {
let mut css_class_partials = CssClassPartials::with_capacity(1);
css_class_partials.insert(ThemeAttr::ShapeColor, "red".to_string());
Some(css_class_partials)
}
}
}
/// Calculates edges and styles from `ItemInteraction`s.
///
/// # Code
///
/// Currently the code goes through the `ItemInteraction`s, and populates the
/// `Edges`, `Theme`, and `GraphvizAttrs`. This isn't as "clean" as iterating
/// over the `ItemInteraction`s per attribute that is to be computed, but
/// perhaps populating the different structures per `ItemInteraction` is more
/// manageable than remembering to update multiple functions.
fn process_item_interactions(
item_interactions_process_ctx: ItemInteractionsProcessCtx<'_, '_>,
) -> ItemInteractionsProcessed {
let ItemInteractionsProcessCtx {
outcome_info_graph_variant,
item_count,
item_location_count,
item_to_item_interactions,
node_id_to_item_locations,
item_location_to_node_id_segments,
} = item_interactions_process_ctx;
let edges = Edges::with_capacity(item_location_count);
let mut graphviz_attrs = GraphvizAttrs::new().with_edge_minlen_default(3);
graphviz_attrs.pack_mode = PackMode::Array {
flags: vec![PackModeFlag::T],
number: None,
};
let mut theme = Theme::new();
theme.styles.insert(AnyIdOrDefaults::EdgeDefaults, {
let mut css_class_partials = CssClassPartials::with_capacity(1);
css_class_partials.insert(ThemeAttr::Visibility, "invisible".to_string());
css_class_partials
});
match outcome_info_graph_variant {
OutcomeInfoGraphVariant::Example => {
let item_interactions_processed_example = ItemInteractionsProcessedExample {
edges,
graphviz_attrs,
tag_items: TagItems::with_capacity(item_count),
tag_styles_focus: TagStyles::new(),
};
let item_interactions_processed_example = process_item_interactions_example(
item_to_item_interactions,
item_interactions_processed_example,
node_id_to_item_locations,
item_location_to_node_id_segments,
);
let ItemInteractionsProcessedExample {
edges,
graphviz_attrs,
tag_items,
tag_styles_focus,
} = item_interactions_processed_example;
ItemInteractionsProcessed {
edges,
graphviz_attrs,
theme,
tag_items: Some(tag_items),
tag_styles_focus: Some(tag_styles_focus),
}
}
OutcomeInfoGraphVariant::Current {
#[cfg(feature = "output_progress")]
cmd_block_item_interaction_type: _,
#[cfg(feature = "output_progress")]
item_location_states: _,
#[cfg(feature = "output_progress")]
item_progress_statuses,
} => {
let item_interactions_processed_current = ItemInteractionsProcessedCurrent {
edges,
graphviz_attrs,
theme,
#[cfg(feature = "output_progress")]
item_progress_statuses,
marker: PhantomData,
};
let item_interactions_processed_current = process_item_interactions_current(
item_to_item_interactions,
item_interactions_processed_current,
node_id_to_item_locations,
item_location_to_node_id_segments,
);
let ItemInteractionsProcessedCurrent {
edges,
graphviz_attrs,
theme,
#[cfg(feature = "output_progress")]
item_progress_statuses: _,
marker: PhantomData,
} = item_interactions_processed_current;
ItemInteractionsProcessed {
edges,
graphviz_attrs,
theme,
tag_items: None,
tag_styles_focus: None,
}
}
}
}
/// Processes `ItemInteraction`s from all items for an example `InfoGraph`
/// diagram.
///
/// This means:
///
/// 1. Each node should be fully visible.
/// 2. Edges should be visible when a tag is clicked.
fn process_item_interactions_example<'item_location>(
item_to_item_interactions: &'item_location IndexMap<ItemId, Vec<ItemInteraction>>,
item_interactions_processed_example: ItemInteractionsProcessedExample,
node_id_to_item_locations: &IndexMap<NodeId, &'item_location ItemLocation>,
item_location_to_node_id_segments: &mut HashMap<&'item_location ItemLocation, String>,
) -> ItemInteractionsProcessedExample {
item_to_item_interactions
.iter()
// The capacity could be worked out through the sum of all `ItemInteraction`s.
//
// For now we just use the `item_location_count` as a close approximation.
.fold(
item_interactions_processed_example,
// Use `item_id` to compute `tags` and `tag_items`.
|item_interactions_processed_example, (item_id, item_interactions)| {
let ItemInteractionsProcessedExample {
mut edges,
mut graphviz_attrs,
mut tag_items,
mut tag_styles_focus,
} = item_interactions_processed_example;
let tag_id = TagId::try_from(format!("tag_{item_id}"))
.expect("Expected `tag_id` from `item_id` to be valid.");
let tag_id = &tag_id;
item_interactions.iter().for_each(|item_interaction| {
let item_interactions_processing_ctx = ItemInteractionsProcessingCtxExample {
node_id_to_item_locations,
item_location_to_node_id_segments,
edges: &mut edges,
tag_items: &mut tag_items,
tag_id,
tag_styles_focus: &mut tag_styles_focus,
};
match item_interaction {
ItemInteraction::Push(item_interaction_push) => {
process_item_interaction_push_example(
item_interactions_processing_ctx,
item_interaction_push,
);
}
ItemInteraction::Pull(item_interaction_pull) => {
process_item_interaction_pull_example(
item_interactions_processing_ctx,
&mut graphviz_attrs,
item_interaction_pull,
);
}
ItemInteraction::Within(item_interaction_within) => {
process_item_interaction_within_example(
item_interactions_processing_ctx,
item_interaction_within,
);
}
}
});
ItemInteractionsProcessedExample {
edges,
graphviz_attrs,
tag_items,
tag_styles_focus,
}
},
)
}
/// Inserts an edge between the `from` and `to` nodes of an
/// [`ItemInteractionPush`].
fn process_item_interaction_push_example<'item_location>(
item_interactions_processing_ctx: ItemInteractionsProcessingCtxExample<'_, 'item_location>,
item_interaction_push: &'item_location ItemInteractionPush,
) {
let ItemInteractionsProcessingCtxExample {
node_id_to_item_locations,
item_location_to_node_id_segments,
edges,
tag_items,
tag_id,
tag_styles_focus,
} = item_interactions_processing_ctx;
// Use the innermost node from the interaction.
// The `NodeId` for the item location is the longest node ID that contains all
// of the `node_id_segment`s of the selected item location's ancestors.
let node_id_from = {
let node_id_from = node_id_from_item_location(item_location_to_node_id_segments, || {
item_interaction_push.location_from().iter()
});
node_id_with_ancestor_find(node_id_to_item_locations, node_id_from)
};
// Use the innermost node.
let node_id_to = {
let node_id_to = node_id_from_item_location(item_location_to_node_id_segments, || {
item_interaction_push.location_to().iter()
});
node_id_with_ancestor_find(node_id_to_item_locations, node_id_to)
};
let edge_id = EdgeId::from_str(&format!("{node_id_from}___{node_id_to}"))
.expect("Expected edge ID from item location ID to be valid for `edge_id`.");
edges.insert(edge_id.clone(), [node_id_from.clone(), node_id_to.clone()]);
if let Some(any_ids) = tag_items.get_mut(tag_id) {
any_ids.push(AnyId::from(node_id_from.clone()));
any_ids.push(AnyId::from(node_id_to.clone()));
any_ids.push(AnyId::from(edge_id.clone()));
} else {
let any_ids = vec![
AnyId::from(node_id_from.clone()),
AnyId::from(node_id_to.clone()),
AnyId::from(edge_id.clone()),
];
tag_items.insert(tag_id.clone(), any_ids);
}
let css_class_partials = item_interaction_push_css_class_partials(true);
if let Some(theme_styles) = tag_styles_focus.get_mut(tag_id) {
theme_styles.insert(
AnyIdOrDefaults::AnyId(AnyId::from(edge_id)),
css_class_partials,
);
} else {
let mut theme_styles = ThemeStyles::with_capacity(1);
theme_styles.insert(
AnyIdOrDefaults::AnyId(AnyId::from(edge_id)),
css_class_partials,
);
tag_styles_focus.insert(tag_id.clone(), theme_styles);
}
}
/// Inserts an edge between the `client` and `server` nodes of an
/// [`ItemInteractionPull`].
fn process_item_interaction_pull_example<'item_location>(
item_interactions_processing_ctx: ItemInteractionsProcessingCtxExample<'_, 'item_location>,
graphviz_attrs: &mut GraphvizAttrs,
item_interaction_pull: &'item_location ItemInteractionPull,
) {
let ItemInteractionsProcessingCtxExample {
node_id_to_item_locations,
item_location_to_node_id_segments,
edges,
tag_items,
tag_id,
tag_styles_focus,
} = item_interactions_processing_ctx;
// Use the outermost `ItemLocationType::Host` node.
let node_id_client = {
let item_location_ancestors_iter = || {
let mut host_found = false;
let mut location_from_iter = item_interaction_pull.location_client().iter();
std::iter::from_fn(move || {
if host_found {
return None;
}
let item_location = location_from_iter.next();
if let Some(item_location) = item_location.as_ref() {
host_found = item_location.r#type() == ItemLocationType::Host;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | true |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/assets.rs | crate/webi_output/src/assets.rs | //! Assets to include with the shipped binary, but we can't get it bundled
//! automatically with either `trunk` or `cargo-leptos`. So we include the
//! bytes.
/// Styles shipped with `peace_webi_output`.
pub const PEACE_FAVICON_ICO: &[u8] = include_bytes!("assets/favicon.ico");
/// Provides CSS `@font-face` definitions for the following font families:
///
/// * liberationmono
/// * liberationmono-bold
/// * liberationmono-italic
/// * liberationmono-bold-italic
pub const FONTS_LIBERATION_MONO_CSS_FONT_FACES: &[u8] = include_bytes!("assets/fonts/fonts.css");
/// The Liberation Mono Regular font bytes.
pub const FONTS_LIBERATION_MONO_REGULAR: &[u8] =
include_bytes!("assets/fonts/liberationmono/LiberationMono-Regular-webfont.woff");
/// The Liberation Mono Bold font bytes.
pub const FONTS_LIBERATION_MONO_BOLD: &[u8] =
include_bytes!("assets/fonts/liberationmono/LiberationMono-Bold-webfont.woff");
/// The Liberation Mono Italic font bytes.
pub const FONTS_LIBERATION_MONO_ITALIC: &[u8] =
include_bytes!("assets/fonts/liberationmono/LiberationMono-Italic-webfont.woff");
/// The Liberation Mono Bold Italic font bytes.
pub const FONTS_LIBERATION_MONO_BOLD_ITALIC: &[u8] =
include_bytes!("assets/fonts/liberationmono/LiberationMono-BoldItalic-webfont.woff");
/// List of assets -- path and content.
pub const ASSETS: &[(&str, &[u8])] = &[
("webi/favicon.ico", PEACE_FAVICON_ICO),
("webi/fonts/fonts.css", FONTS_LIBERATION_MONO_CSS_FONT_FACES),
(
"webi/fonts/liberationmono/LiberationMono-Regular-webfont.woff",
FONTS_LIBERATION_MONO_REGULAR,
),
(
"webi/fonts/liberationmono/LiberationMono-Bold-webfont.woff",
FONTS_LIBERATION_MONO_BOLD,
),
(
"webi/fonts/liberationmono/LiberationMono-Italic-webfont.woff",
FONTS_LIBERATION_MONO_ITALIC,
),
(
"webi/fonts/liberationmono/LiberationMono-BoldItalic-webfont.woff",
FONTS_LIBERATION_MONO_BOLD_ITALIC,
),
];
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/cmd_exec_spawn_ctx.rs | crate/webi_output/src/cmd_exec_spawn_ctx.rs | use std::fmt;
use futures::future::LocalBoxFuture;
use interruptible::InterruptSignal;
use tokio::sync::mpsc;
/// The `CmdExecution` task, as well as the channels to interact with it.
///
/// This is returned by the `CmdExecution` spawning function for each `Flow`,
/// which is registered in `WebiOutput`.
pub struct CmdExecSpawnCtx {
/// Channel sender to send an `InterruptSignal`.
pub interrupt_tx: Option<mpsc::Sender<InterruptSignal>>,
/// The `*Cmd::run(..)` task.
///
/// This will be submitted to the tokio task pool.
pub cmd_exec_task: LocalBoxFuture<'static, ()>,
}
impl fmt::Debug for CmdExecSpawnCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("CmdExecSpawnCtx")
.field("interrupt_tx", &self.interrupt_tx)
.field("cmd_exec_task", &stringify!(LocalBoxFuture<'static, ()>))
.finish()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/flow_webi_fns.rs | crate/webi_output/src/flow_webi_fns.rs | use std::fmt::{self, Debug};
use dot_ix_model::info_graph::InfoGraph;
use futures::future::LocalBoxFuture;
use peace_flow_rt::Flow;
use peace_params::{MappingFnReg, ParamsSpecs};
use peace_resource_rt::{resources::ts::SetUp, Resources};
use crate::{CmdExecSpawnCtx, WebiOutput};
/// Functions to work with `Flow` from the [`WebiOutput`].
///
/// [`WebiOutput`]: crate::WebiOutput
pub struct FlowWebiFns<E, CmdExecReqT> {
/// Flow to work with.
pub flow: Flow<E>,
/// Function to create an `InfoGraph`.
///
/// # Design
///
/// This circumvents the need to pass around the specific `CmdCtx` type by
/// getting the tool developer to instantiate the `CmdCtx`, then pass the
/// relevant parameters to the function that we pass in.
#[allow(clippy::type_complexity)]
pub outcome_info_graph_fn: Box<
dyn Fn(
&mut WebiOutput,
Box<dyn Fn(&Flow<E>, &ParamsSpecs, &MappingFnReg, &Resources<SetUp>) -> InfoGraph>,
) -> LocalBoxFuture<InfoGraph>,
>,
/// Function to spawn a `CmdExecution`.
///
/// # Design
///
/// Because passing around a `CmdCtx` with all its type parameters is
/// technically high cost, all of the `CmdCtx` instantiation logic, and
/// `*Cmd::run` invocations are hidden behind a plain function interface.
///
/// Currently we only take in one function. In the future this should take
/// in a `Map<CmdExecutionRequest, CmdExecutionSpawnFn>`
pub cmd_exec_spawn_fn: Box<dyn Fn(WebiOutput, CmdExecReqT) -> CmdExecSpawnCtx>,
}
impl<E, CmdExecReqT> fmt::Debug for FlowWebiFns<E, CmdExecReqT>
where
E: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let cmd_exec_req_t_type_name = std::any::type_name::<CmdExecReqT>();
f.debug_struct("FlowWebiFns")
.field("flow", &self.flow)
.field(
"outcome_info_graph_fn",
&stringify!(
Box<
dyn Fn(
&mut WebiOutput,
Box<dyn Fn(&Flow<E>, &ParamsSpecs, &Resources<SetUp>) -> InfoGraph>,
) -> LocalBoxFuture<InfoGraph>,
>
),
)
.field(
"cmd_exec_spawn_fn",
&format!("Box<dyn Fn(WebiOutput, {cmd_exec_req_t_type_name}) -> CmdExecSpawnCtx>"),
)
.finish()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/webi_output.rs | crate/webi_output/src/webi_output.rs | use std::convert::Infallible;
use peace_fmt::Presentable;
use peace_rt_model_core::{async_trait, output::OutputWrite};
use peace_webi_model::WebUiUpdate;
use tokio::sync::mpsc;
cfg_if::cfg_if! {
if #[cfg(feature = "output_progress")] {
use peace_item_model::ItemId;
use peace_item_interaction_model::ItemLocationState;
use peace_progress_model::{
CmdBlockItemInteractionType,
// ProgressComplete,
// ProgressLimit,
// ProgressStatus,
ProgressTracker,
// ProgressUpdate,
ProgressUpdateAndId,
};
use peace_rt_model_core::CmdProgressTracker;
}
}
/// An `OutputWrite` implementation that writes to web elements.
#[derive(Clone, Debug)]
pub struct WebiOutput {
/// Channel to notify the `CmdExecution` task / `leptos` to update the UI.
///
/// This can be:
///
/// * Progress `InfoGraph` diagram needs to be restyled.
/// * Outcome `InfoGraph` diagram needs to be restyled.
/// * Execution result to show to the user.
web_ui_update_tx: Option<mpsc::Sender<WebUiUpdate>>,
}
impl WebiOutput {
/// Returns a new `WebiOutput`.
pub fn new(web_ui_update_tx: mpsc::Sender<WebUiUpdate>) -> Self {
Self {
web_ui_update_tx: Some(web_ui_update_tx),
}
}
pub fn clone_without_tx(&self) -> Self {
Self {
web_ui_update_tx: None,
}
}
}
#[async_trait(?Send)]
impl OutputWrite for WebiOutput {
type Error = Infallible;
#[cfg(feature = "output_progress")]
async fn progress_begin(&mut self, _cmd_progress_tracker: &CmdProgressTracker) {}
#[cfg(feature = "output_progress")]
async fn cmd_block_start(
&mut self,
cmd_block_item_interaction_type: CmdBlockItemInteractionType,
) {
if let Some(web_ui_update_tx) = self.web_ui_update_tx.as_ref() {
let _result = web_ui_update_tx
.send(WebUiUpdate::CmdBlockStart {
cmd_block_item_interaction_type,
})
.await;
}
}
#[cfg(feature = "output_progress")]
async fn item_location_state(
&mut self,
item_id: ItemId,
item_location_state: ItemLocationState,
) {
if let Some(web_ui_update_tx) = self.web_ui_update_tx.as_ref() {
let _result = web_ui_update_tx
.send(WebUiUpdate::ItemLocationState {
item_id,
item_location_state,
})
.await;
}
}
#[cfg(feature = "output_progress")]
async fn progress_update(
&mut self,
progress_tracker: &ProgressTracker,
progress_update_and_id: &ProgressUpdateAndId,
) {
let item_id = progress_update_and_id.item_id.clone();
let progress_status = progress_tracker.progress_status().clone();
let progress_limit = progress_tracker.progress_limit();
let message = progress_tracker.message().cloned();
if let Some(web_ui_update_tx) = self.web_ui_update_tx.as_ref() {
let _result = web_ui_update_tx
.send(WebUiUpdate::ItemProgressStatus {
item_id,
progress_status,
progress_limit,
message,
})
.await;
}
}
#[cfg(feature = "output_progress")]
async fn progress_end(&mut self, _cmd_progress_tracker: &CmdProgressTracker) {}
async fn present<P>(&mut self, _presentable: P) -> Result<(), Self::Error>
where
P: Presentable,
{
// TODO: send rendered / renderable markdown to the channel.
let markdown_src = String::from("TODO: presentable.present(md_presenter).");
if let Some(web_ui_update_tx) = self.web_ui_update_tx.as_ref() {
let _result = web_ui_update_tx
.send(WebUiUpdate::Markdown { markdown_src })
.await;
}
Ok(())
}
#[cfg(not(feature = "error_reporting"))]
async fn write_err<AppErrorT>(&mut self, _error: &AppErrorT) -> Result<(), Self::Error>
where
AppErrorT: std::error::Error,
{
todo!()
}
#[cfg(feature = "error_reporting")]
async fn write_err<AppErrorT>(&mut self, _error: &AppErrorT) -> Result<(), Self::Error>
where
AppErrorT: miette::Diagnostic,
{
todo!()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_output/src/cmd_exec_to_leptos_ctx.rs | crate/webi_output/src/cmd_exec_to_leptos_ctx.rs | use std::{
collections::HashMap,
sync::{Arc, Mutex},
};
use interruptible::InterruptSignal;
use peace_cmd_model::CmdExecutionId;
use peace_flow_model::FlowId;
use tokio::sync::mpsc;
use peace_webi_model::{FlowOutcomeInfoGraphs, FlowProgressInfoGraphs};
/// The shared memory to write to to communicate between the `CmdExecution`s and
/// `leptos`.
#[derive(Clone, Debug, Default)]
pub struct CmdExecToLeptosCtx {
/// The example progress `InfoGraph` for all `CmdExecution`s.
///
/// Shared memory for `Map<CmdExecutionId, InfoGraph>`.
pub flow_progress_example_info_graphs: FlowProgressInfoGraphs<FlowId>,
/// The actual progress `InfoGraph` for all `CmdExecution`s.
///
/// Shared memory for `Map<CmdExecutionId, InfoGraph>`.
pub flow_progress_actual_info_graphs: FlowProgressInfoGraphs<CmdExecutionId>,
/// The example outcome `InfoGraph` for all `CmdExecution`s.
///
/// Shared memory for `Map<CmdExecutionId, InfoGraph>`.
pub flow_outcome_example_info_graphs: FlowOutcomeInfoGraphs<FlowId>,
/// The actual outcome `InfoGraph` for all `CmdExecution`s.
///
/// Shared memory for `Map<CmdExecutionId, InfoGraph>`.
pub flow_outcome_actual_info_graphs: FlowOutcomeInfoGraphs<CmdExecutionId>,
/// The interrupt channel sender for each `CmdExecution`.
pub cmd_exec_interrupt_txs: HashMap<CmdExecutionId, mpsc::Sender<InterruptSignal>>,
/// The `cmd_execution_id` of the active `CmdExecution`.
///
/// # Design
///
/// This should go away, and instead be a value returned to the client and
/// stored in the URL.
pub cmd_execution_id: Arc<Mutex<Option<CmdExecutionId>>>,
}
impl CmdExecToLeptosCtx {
/// Returns a new `CmdExecToLeptosCtx`.
pub fn new(
flow_progress_example_info_graphs: FlowProgressInfoGraphs<FlowId>,
flow_progress_actual_info_graphs: FlowProgressInfoGraphs<CmdExecutionId>,
flow_outcome_example_info_graphs: FlowOutcomeInfoGraphs<FlowId>,
flow_outcome_actual_info_graphs: FlowOutcomeInfoGraphs<CmdExecutionId>,
cmd_exec_interrupt_txs: HashMap<CmdExecutionId, mpsc::Sender<InterruptSignal>>,
cmd_execution_id: Arc<Mutex<Option<CmdExecutionId>>>,
) -> Self {
Self {
flow_progress_example_info_graphs,
flow_progress_actual_info_graphs,
flow_outcome_example_info_graphs,
flow_outcome_actual_info_graphs,
cmd_exec_interrupt_txs,
cmd_execution_id,
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_merge_ext.rs | crate/params/src/params_merge_ext.rs | use crate::Params;
/// Trait for merging `ParamsPartial` onto a `Params` object.
///
/// This is automatically implemented by [`#[derive(Params)]`].
///
/// [`#[derive(Params)]`]: peace_params_derive::Params
pub trait ParamsMergeExt: Params {
/// Moves the values from `Self::Partial` onto this `Params` object.
fn merge(&mut self, params_partial: <Self as Params>::Partial);
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/func.rs | crate/params/src/func.rs | use crate::MappingFn;
/// Provides a simpler constraint for the compiler to report to the developer.
///
/// Implemented for up to five arguments.
///
/// Instead of the detailed error message from the `From` trait:
///
/// ```md
/// the trait `From<(
/// std::option::Option<std::string::String>,
/// [closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73],
/// )>`
/// is not implemented for
/// `MappingFnImpl<
/// std::string::String,
/// [closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73],
/// _,
/// >`
/// ```
///
/// we get the much clearer:
///
/// ```md
/// the trait `Func<std::option::Option<std::string::String>, _>`
/// is not implemented for closure `[closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73]`
/// ```
pub trait Func<ReturnType, Args> {}
macro_rules! impl_func_for_f {
($($Arg:ident),+) => {
impl<T, F, $($Arg,)+> Func<T, ($($Arg,)+)> for F
where F: Fn($(&$Arg,)+) -> T {}
};
}
impl_func_for_f!(A0);
impl_func_for_f!(A0, A1);
impl_func_for_f!(A0, A1, A2);
impl_func_for_f!(A0, A1, A2, A3);
impl_func_for_f!(A0, A1, A2, A3, A4);
/// Provides a simpler constraint for the compiler to report to the developer.
///
/// Instead of the detailed error message from the `From` trait:
///
/// ```md
/// the trait `From<
/// [closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73]
/// >`
/// is not implemented for
/// `MappingFnImpl<
/// [closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73],
/// _,
/// >`
/// ```
///
/// we get:
///
/// ```md
/// the trait `FromFunc<[closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73]>`
/// is not implemented for `MappingFnImpl<
/// [closure@examples/envman/src/flows/app_upload_flow.rs:101:40: 101:73],
/// _,
/// >`
/// ```
pub trait FromFunc<F> {
fn from_func(f: F) -> Box<dyn MappingFn>;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_value.rs | crate/params/src/params_value.rs | use std::fmt::Debug;
use serde::{de::DeserializeOwned, Serialize};
/// Marker trait for a parameter value type.
///
/// This trait is automatically implemented for types that are `Clone + Debug +
/// DeserializeOwned + Serialize + Send + Sync + 'static`.
pub trait ParamsValue:
Clone + Debug + PartialEq + DeserializeOwned + Serialize + Send + Sync + 'static
{
}
impl<T> ParamsValue for T where
T: Clone + Debug + PartialEq + DeserializeOwned + Serialize + Send + Sync + 'static
{
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/any_spec_data_type.rs | crate/params/src/any_spec_data_type.rs | #![allow(clippy::multiple_bound_locations)] // https://github.com/marcianx/downcast-rs/issues/19
use std::{any::Any, fmt};
use downcast_rs::DowncastSync;
use dyn_clone::DynClone;
use peace_resource_rt::type_reg::untagged::DataType;
use crate::AnySpecRt;
/// A [`DataType`] that is also an [`AnySpecRt`].
pub trait AnySpecDataType: AnySpecRt + DataType + DowncastSync {}
impl<T> AnySpecDataType for T where
T: Any + DynClone + fmt::Debug + AnySpecRt + erased_serde::Serialize + Send + Sync
{
}
downcast_rs::impl_downcast!(sync AnySpecDataType);
impl Clone for Box<dyn AnySpecDataType> {
fn clone(&self) -> Self {
dyn_clone::clone_box(self.as_ref())
}
}
impl serde::Serialize for dyn AnySpecDataType + '_ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
erased_serde::serialize(self, serializer)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/mapping_fns.rs | crate/params/src/mapping_fns.rs | use std::{fmt::Debug, hash::Hash};
use enum_iterator::Sequence;
use serde::{de::DeserializeOwned, Serialize};
use crate::{MappingFn, MappingFnId, MappingFnImpl};
/// Enum to give versioned IDs to mapping functions, so that params specs and
/// value specs can be serialized.
///
/// Item parameters may be mapped from other items' state, and that logic
/// exists as code. However, we want the ability to store (remember) those
/// mappings across command executions. If a closure is held in the params
/// specs and value specs, then they cannot be serialized. However, if we
/// place that logic elsewhere (like in the `CmdCtxTypes` implementation),
/// and have an intermediate enum to represent the mapping functions, we can
/// serialize the enum instead of the closure.
///
/// # Examples
///
/// ```rust,ignore
/// use peace::{
/// enum_iterator::Sequence,
/// params::{FromFunc, MappingFn, MappingFnId, MappingFnImpl, MappingFns},
/// profile_model::Profile,
/// };
/// use serde::{Deserialize, Serialize};
///
/// use crate::items::{peace_aws_iam_policy::IamPolicyState, peace_aws_s3_bucket::S3BucketState};
///
/// #[derive(Debug, Clone, Copy, PartialEq, Eq, Hash, Deserialize, Serialize, Sequence)]
/// #[allow(non_camel_case_types)]
/// #[enum_iterator(crate = peace::enum_iterator)]
/// pub enum EnvmanMappingFns {
/// /// Returns the `IamRole` name from profile.
/// IamRoleNameFromProfile_v0_1_0,
/// /// Returns the `IamRole` Managed Policy ARN from the `IamPolicyState`'s
/// /// `policy_id_arn_version`.
/// IamRoleManagedPolicyArnFromIamPolicyState_v0_1_0,
/// /// Returns the `S3Bucket` name from the `S3BucketState`.
/// S3BucketNameFromS3BucketState_v0_1_0,
/// }
///
/// impl MappingFns for EnvmanMappingFns {
/// fn id(self) -> MappingFnId {
/// let name = match self {
/// Self::IamRoleNameFromProfile_v0_1_0 => "IamRoleNameFromProfile_v0_1_0",
/// Self::IamRoleManagedPolicyArnFromIamPolicyState_v0_1_0 => {
/// "IamRoleManagedPolicyArnFromIamPolicyState_v0_1_0"
/// }
/// Self::S3BucketNameFromS3BucketState_v0_1_0 => {
/// "S3BucketNameFromS3BucketState_v0_1_0"
/// }
/// };
/// MappingFnId::new(name.to_string())
/// }
///
/// fn mapping_fn(self) -> Box<dyn MappingFn> {
/// match self {
/// Self::IamRoleNameFromProfile_v0_1_0 => {
/// MappingFnImpl::from_func(|profile: &Profile| Some(profile.to_string()))
/// }
/// Self::IamRoleManagedPolicyArnFromIamPolicyState_v0_1_0 => {
/// MappingFnImpl::from_func(IamPolicyState::policy_id_arn_version)
/// }
/// Self::S3BucketNameFromS3BucketState_v0_1_0 => {
/// MappingFnImpl::from_func(S3BucketState::bucket_name)
/// }
/// }
/// }
/// }
/// ```
pub trait MappingFns:
Clone
+ Copy
+ Debug
+ Hash
+ PartialEq
+ Eq
+ Serialize
+ DeserializeOwned
+ Sequence
+ Send
+ Sync
+ 'static
{
/// Returns a string representation of the mapping function name.
///
/// # Implementors
///
/// The returned ID is considered API, and should be stable. This means
/// you should name each variant with a version number, and never remove
/// that variant, e.g. `MappingFnId::new("ServerNameFromProfile_V1_0_0"
/// )`.
///
/// That way, previously stored mapping function IDs can still be
/// deserialized, and tool developers can opt-in to upgrading to the newer
/// mapping functions when ready.
fn id(self) -> MappingFnId;
/// Returns the mapping function corresponding to the given variant.
fn mapping_fn(self) -> Box<dyn MappingFn>;
}
impl MappingFns for () {
fn id(self) -> MappingFnId {
MappingFnId::new(String::from(""))
}
fn mapping_fn(self) -> Box<dyn MappingFn> {
Box::new(MappingFnImpl::<(), _, ()>::empty())
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/any_spec_rt.rs | crate/params/src/any_spec_rt.rs | use crate::AnySpecDataType;
/// Runtime logic of how to look up values for each field in this struct.
///
/// This trait is automatically implemented by `#[derive(Params)]` on an
/// `Item::Params`, as well as in the `peace_params` crate for standard
/// library types.
pub trait AnySpecRt {
/// Whether this `Spec` is usable to resolve values.
///
/// This is only `false` for `*Spec::Stored`.
///
/// After merging, `*Spec::Stored` will be replaced with whatever the
/// `other` `*Spec` is. If after merging, the `*Spec` is `Stored`, then the
/// `*Spec` will not be usable as there *wasn't* anything stored in the
/// first place.
fn is_usable(&self) -> bool;
/// Deep merges the provided `AnySpecRt` with `self`, where `self` takes
/// priority, except for `Self::Stored`.
///
/// This means where `self` is `Self::Value`, `Self::InMemory`,
/// `Self::MappingFn`, and `Self::FieldWise`, these would take priority over
/// any stored item variants.
///
/// For `Self::FieldWise`, a recursive merge would happen per field
/// `ValueSpec`.
///
/// # Design
///
/// This can't be `Self` or `&Self` because that makes the trait non-object
/// safe. Adding a `where: Self: Sized` bound prevents the method from being
/// called from `cmd_ctx_builder`.
fn merge(&mut self, other: &dyn AnySpecDataType);
}
impl<T> AnySpecRt for Box<T>
where
T: AnySpecRt,
{
fn is_usable(&self) -> bool {
self.as_ref().is_usable()
}
fn merge(&mut self, other: &dyn AnySpecDataType)
where
Self: Sized,
{
self.as_mut().merge(other)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/field_wise_spec_rt.rs | crate/params/src/field_wise_spec_rt.rs | use std::fmt::Debug;
use peace_resource_rt::{resources::ts::SetUp, Resources};
use serde::{de::DeserializeOwned, Serialize};
use crate::{AnySpecRt, MappingFnReg, ParamsResolveError, ValueResolutionCtx};
/// Runtime logic of how to look up values for each field in this struct.
///
/// This trait is automatically implemented by `#[derive(Params)]` on an
/// `Item::Params`, as well as manual implementations for standard library
/// types.
pub trait FieldWiseSpecRt: AnySpecRt {
/// The original value type. `MyParamsFieldWiseSpec::ValueType` is
/// `MyParams`.
type ValueType: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static;
/// The `Params` type, but with each of its fields wrapped in `Option`.
type Partial: Clone + Debug + Default + Serialize + DeserializeOwned + Send + Sync + 'static;
/// Resolves the values to construct the item `Params`.
///
/// This function returns an error if any value is not present in
/// [`Resources`]. For cases where missing values are not an error, see
/// [`resolve_partial`].
///
/// [`resolve_partial`]: Self::resolve_partial
fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Self::ValueType, ParamsResolveError>;
/// Resolves the values to construct the item `Params`.
///
/// Values that are not present in `Resources` will be `None`.
fn resolve_partial(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Self::Partial, ParamsResolveError>;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_resolve_error.rs | crate/params/src/params_resolve_error.rs | use crate::{FieldNameAndType, MappingFnId, ValueResolutionCtx};
/// Failed to resolve values for a `Params` object from `resources`.
//
// TODO: Help text could be generated based on the type of `Params` -- named fields struct, tuple
// struct, enum -- instead of assuming it's always a named fields struct.
#[derive(Debug, thiserror::Error)]
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
pub enum ParamsResolveError {
/// Failed to resolve a field value from `resources`.
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_params::params_resolve_error::from),
help("Make sure `{field_type_name}` has been inserted into `resources`.",
field_type_name = value_resolution_ctx
.resolution_chain()
.last()
.map(FieldNameAndType::type_name)
.unwrap_or(value_resolution_ctx.params_type_name())
)
)
)]
#[error("Failed to resolve `{field_type_name}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```",
field_type_name = value_resolution_ctx
.resolution_chain()
.last()
.map(FieldNameAndType::type_name)
.unwrap_or(value_resolution_ctx.params_type_name()))]
InMemory {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
},
/// Failed to borrow a field value from `resources`.
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_params::params_resolve_error::from_borrow_conflict),
help("By design `{field_type_name}` must not be borrowed mutably.",
field_type_name = value_resolution_ctx
.resolution_chain()
.last()
.map(FieldNameAndType::type_name)
.unwrap_or(value_resolution_ctx.params_type_name())
)
)
)]
#[error("Borrow conflict on `{field_type_name}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```",
field_type_name = value_resolution_ctx
.resolution_chain()
.last()
.map(FieldNameAndType::type_name)
.unwrap_or(value_resolution_ctx.params_type_name())
)
]
InMemoryBorrowConflict {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
},
/// Failed to resolve a from value from `resources`.
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_params::params_resolve_error::from_map),
help(
"Make sure `{from_type_name}` has been inserted into `resources`.\n\
Value resolution mode is: {value_resolution_mode:?}",
value_resolution_mode = value_resolution_ctx.value_resolution_mode()
)
)
)]
#[error(
"Failed to resolve `{from_type_name}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```"
)]
FromMap {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
/// Name of the type from which to map the field value from.
///
/// Corresponds to `U` in `Fn(&U) -> T`.
from_type_name: String,
},
#[error(
"Failed to downcast resolved `BoxDt` into `{to_type_name}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```"
)]
FromMapDowncast {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
/// Name of the type that is being resolved.
///
/// Usually one of the `Item::Params` types.
///
/// Corresponds to `T` in `Fn(&U) -> T`.
to_type_name: String,
},
/// Failed to borrow a value to map to a field from `resources`.
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_params::params_resolve_error::from_map_borrow_conflict),
help("By design `{from_type_name}` must not be borrowed mutably.")
)
)]
#[error(
"Borrow conflict on `{from_type_name}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```"
)]
FromMapBorrowConflict {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
/// Name of the type from which to map the field value from.
///
/// Corresponds to `U` in `Fn(&U) -> T`.
from_type_name: String,
},
/// Failed to resolve a mapping function from the registry.
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_params::params_resolve_error::mapping_fn_resolve),
help(
"Mapping function variants are intended to be stable, so if you renamed the mapping function, you may have to edit the stored param spec to use the new name."
)
)
)]
#[error(
"Failed to resolve mapping function `{mapping_fn_id:?}` to populate:\n\
\n\
```rust\n\
{value_resolution_ctx}\n\
```"
)]
MappingFnResolve {
/// Hierarchy of fields traversed to resolve the value.
value_resolution_ctx: ValueResolutionCtx,
/// String representation of the mapping function.
///
/// In practice, this is a YAML serialized string representation of the
/// `MappingFns` variant.
mapping_fn_id: MappingFnId,
},
}
impl ParamsResolveError {
/// Returns a new `ParamsResolveError::MappingFnResolve` variant from the
/// provided context.
pub fn mapping_fn_resolve(
value_resolution_ctx: &ValueResolutionCtx,
mapping_fn_id: &MappingFnId,
) -> ParamsResolveError {
ParamsResolveError::MappingFnResolve {
value_resolution_ctx: value_resolution_ctx.clone(),
mapping_fn_id: mapping_fn_id.clone(),
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/lib.rs | crate/params/src/lib.rs | #![cfg_attr(coverage_nightly, feature(coverage_attribute))]
//! Constraints and specifications for parameters for the peace automation
//! framework.
//!
//! This crate defines types and traits for implementors and users to work with
//! item params.
//!
//! # Design
//!
//! When an item is defined, implementors define the parameters type for
//! that item.
//!
//! For Peace to derive additional functionality from that type, this crate:
//!
//! * Defines the `Params` trait to bridge between the parameters type and
//! associated types.
//! * Re-exports the `Params` derive macro which implements the `Params` trait.
//!
//! ## How It Fits Together
//!
//! ```text
//! .----------------------------------------------------------------------------------.
//! : Users : Implementors : Peace :
//! :----------------------------:--------------------------------:--------------------:
//! : : : :
//! : : .-------------------. : :
//! : : | #[derive(Params)] | : ---. :
//! : : | struct MyParams; | : | :
//! : : '-------------------' : ' :
//! : : : proc macro :
//! : : .----------------------------. : generates :
//! : : | * MyParamsFieldWise | : . :
//! : : | * MyParamsPartial | : | :
//! : : | * MyParamsFieldWiseBuilder | : <--' :
//! : : | * impl Params for MyParams | : :
//! : : '----------------------------' : :
//! : : : :
//! : : .-------------------. : :
//! : : | struct MyItem; | : :
//! : : | | : ---. :
//! : : | impl Item for | : | :
//! : : | MyItem { | : | :
//! : : | type Params = | : ' :
//! : : | MyParams; | : exposes API :
//! : : | } | : with constraints :
//! : : '-------------------' : from :
//! : : : <Item::Params :
//! : .------------------------. : : as Params> :
//! : | cmd_ctx_builder | : : . :
//! : | .with_item_params | <-------------------------------------' :
//! : | ::<IS>( | : : :
//! : | item_id, | : : :
//! : | my_p_spec_builder | : : :
//! : | .with_f(123) | : : :
//! : | .with_from(..) | : : :
//! : | /* .build() */ | : : :
//! : | ) | : : :
//! : '------------------------' : : :
//! : : : :
//! '----------------------------------------------------------------------------------'
//! ```
// Re-exports
pub use peace_params_derive::{value_impl, Params, ParamsFieldless};
pub use tynm;
pub use crate::{
any_spec_data_type::AnySpecDataType,
any_spec_rt::AnySpecRt,
any_spec_rt_boxed::AnySpecRtBoxed,
field_name_and_type::FieldNameAndType,
field_wise_spec_rt::FieldWiseSpecRt,
func::{FromFunc, Func},
mapping_fn::MappingFn,
mapping_fn_id::MappingFnId,
mapping_fn_impl::MappingFnImpl,
mapping_fn_reg::MappingFnReg,
mapping_fns::MappingFns,
params::Params,
params_fieldless::ParamsFieldless,
params_key::ParamsKey,
params_merge_ext::ParamsMergeExt,
params_resolve_error::ParamsResolveError,
params_spec::ParamsSpec,
params_spec_fieldless::ParamsSpecFieldless,
params_specs::ParamsSpecs,
params_value::ParamsValue,
value_resolution_ctx::ValueResolutionCtx,
value_resolution_mode::ValueResolutionMode,
value_spec::ValueSpec,
value_spec_rt::ValueSpecRt,
};
mod any_spec_data_type;
mod any_spec_rt;
mod any_spec_rt_boxed;
mod field_name_and_type;
mod field_wise_spec_rt;
mod func;
mod mapping_fn;
mod mapping_fn_id;
mod mapping_fn_impl;
mod mapping_fn_reg;
mod mapping_fns;
mod params;
mod params_fieldless;
mod params_key;
mod params_merge_ext;
mod params_resolve_error;
mod params_spec;
mod params_spec_fieldless;
mod params_specs;
mod params_value;
mod std_impl;
mod value_resolution_ctx;
mod value_resolution_mode;
mod value_spec;
mod value_spec_rt;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/value_resolution_mode.rs | crate/params/src/value_resolution_mode.rs | use serde::{Deserialize, Serialize};
/// When resolving `Value`s, whether to look up `Current<T>` or `Goal<T>`.
///
/// # Design
///
/// Remember to update these places when updating here.
///
/// 1. Marker types in `crate/data/src/marker.rs`.
/// 2. `peace_params::MappingFnImpl`.
/// 3. Resource insertions in `ItemWrapper::setup`.
//
// TODO: Should we have modes for:
//
// * `CurrentStored`
// * `GoalStored`
// * `ExecutionBeginning`
#[derive(Clone, Copy, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub enum ValueResolutionMode {
/// Resolve values using example states.
#[cfg(feature = "item_state_example")]
Example,
/// Resolve values using cleaned states.
Clean,
/// Resolve values using current states.
Current,
/// Resolve values using goal states.
Goal,
/// Resolve values using dry-applied states.
///
/// The states in memory may be example / fake / placeholder values.
///
/// TODO: resolve this in [#196]
///
/// [#196]: https://github.com/azriel91/peace/issues/196
ApplyDry,
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_spec.rs | crate/params/src/params_spec.rs | use std::fmt::Debug;
use peace_resource_rt::{
resources::ts::SetUp, type_reg::untagged::BoxDataTypeDowncast, BorrowFail, Resources,
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::{
AnySpecDataType, AnySpecRt, FieldWiseSpecRt, MappingFnId, MappingFnReg, MappingFns, Params,
ParamsResolveError, ValueResolutionCtx, ValueResolutionMode, ValueSpecRt,
};
/// How to populate a field's value in an item's params.
///
/// The `MappingFn` variant's mapping function is `None` when deserialized, as
/// it is impossible to determine the underlying `F` and `U` type parameters for
/// the backing `MappingFnImpl`.
///
/// For deserialization:
///
/// 1. A `ParamsSpecsTypeReg` is constructed, and deserialization functions are
/// registered from `ItemId` to `ParamsSpecDe<T, F, U>`, where `F` and `U`
/// are derived from the `ValueSpec` provided by the user.
///
/// 2. `value_specs.yaml` is deserialized using that type registry.
///
/// 3. Each `ParamsSpecDe<T>` is mapped into a `ValueSpec<T>`, and subsequently
/// `AnySpecRtBoxed` to be passed around in a `CmdCtx`.
///
/// 4. These `AnySpecRtBoxed`s are downcasted back to `ValueSpec<T>` when
/// resolving values for item params and params partials.
///
/// # Type Parameters
///
/// * `T`: The `Item::Params` type.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "T: Params")]
pub enum ParamsSpec<T>
where
T: Params,
{
/// Loads a stored value spec.
///
/// The value used is determined by the value spec that was
/// last stored in the `params_specs_file`. This means it
/// could be loaded as a `Value(T)` during context `build()`.
///
/// This variant may be provided when defining a command context
/// builder. However, this variant is never serialized, but
/// whichever value was *first* stored is re-loaded then
/// re-serialized.
///
/// If no value spec was previously serialized, then the command
/// context build will return an error.
Stored,
/// Uses the provided value.
///
/// The value used is whatever is passed in to the command context
/// builder.
Value {
/// The value to use.
value: T,
},
/// Uses a value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime.
InMemory,
/// Uses a mapped value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime, and is mapped by the
/// given function.
///
/// This is serialized as `MappingFn` with a string value. For
/// deserialization, there is no actual backing function, so
/// the user must provide the `MappingFn` in subsequent command
/// context builds.
MappingFn {
/// Name of the field to be mapped. `None` if this is the top level
/// object.
field_name: Option<String>,
/// The name of the mapping function.
mapping_fn_id: MappingFnId,
},
/// Resolves this value through `ValueSpec`s for each of its fields.
///
/// This is like `T`, but with each field wrapped in
/// `ParamsSpecFieldless<T>`.
//
// Wrap each in `ValueSpec`, but for unit / external values, fail on field wise
// resolution, and also don't generate a builder method for field wise (even if is present in
// the `ValueSpec` API).
//
// Need to decide on:
//
// * Every non-recursive field is annotated with `#[params(non_recursive)]`
// * Every recursive field is annotated with `#[params(recursive)]`
//
// There shouldn't need to be automatic detection of non-recursive fields for stdlib types,
// because `peace_params` should just implement `ValueSpec` for those types.
FieldWise {
/// The field wise spec.
field_wise_spec: T::FieldWiseSpec,
},
}
impl<T> From<T> for ParamsSpec<T>
where
T: Params,
{
fn from(value: T) -> Self {
Self::Value { value }
}
}
impl<T> ParamsSpec<T>
where
T: Params<Spec = ParamsSpec<T>>,
T::Partial: From<T>,
{
/// Returns the `ParamsSpec::MappingFn` variant with the passed in values.
///
/// This is a convenience method for creating a `ParamsSpec::MappingFn`
/// variant where the mapping function name is retrieved from
/// `mapping_fns.name()`.
pub fn mapping_fn<MFns>(field_name: Option<String>, mapping_fns: MFns) -> Self
where
MFns: MappingFns,
{
Self::MappingFn {
field_name,
mapping_fn_id: mapping_fns.id(),
}
}
/// Returns the value of `T` by applying this spec to the passed in
/// `resources`.
pub fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<peace_resource_rt::resources::ts::SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError>
where
T: Params,
{
match self {
ParamsSpec::Value { value } => Ok(value.clone()),
ParamsSpec::Stored | ParamsSpec::InMemory => {
// Try resolve `T`, through the `value_resolution_ctx` first
let params_resolved = match value_resolution_ctx.value_resolution_mode() {
#[cfg(feature = "item_state_example")]
ValueResolutionMode::Example => resources
.try_borrow::<peace_data::marker::Example<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Clean => resources
.try_borrow::<peace_data::marker::Clean<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Current => resources
.try_borrow::<peace_data::marker::Current<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Goal => resources
.try_borrow::<peace_data::marker::Goal<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::ApplyDry => resources
.try_borrow::<peace_data::marker::ApplyDry<T>>()
.map(|data_marker| data_marker.0.clone()),
}
.and_then(|param_opt| param_opt.ok_or(BorrowFail::ValueNotFound));
params_resolved.or_else(|_e| {
// Try resolve `T` again without the `value_resolution_ctx` wrapper.
match resources.try_borrow::<T>() {
Ok(value) => Ok((*value).clone()),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Err(ParamsResolveError::InMemory {
value_resolution_ctx: value_resolution_ctx.clone(),
}),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
}
})
}
ParamsSpec::MappingFn {
field_name,
mapping_fn_id,
} => resolve_t_from_mapping_fn(
mapping_fn_reg,
resources,
value_resolution_ctx,
field_name.as_deref(),
mapping_fn_id,
),
ParamsSpec::FieldWise { field_wise_spec } => {
field_wise_spec.resolve(mapping_fn_reg, resources, value_resolution_ctx)
}
}
}
pub fn resolve_partial(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T::Partial, ParamsResolveError> {
match self {
ParamsSpec::Value { value } => Ok(T::Partial::from((*value).clone())),
ParamsSpec::Stored | ParamsSpec::InMemory => {
// Try resolve `T`, through the `value_resolution_ctx` first
let params_partial_resolved = match value_resolution_ctx.value_resolution_mode() {
#[cfg(feature = "item_state_example")]
ValueResolutionMode::Example => resources
.try_borrow::<peace_data::marker::Example<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Clean => resources
.try_borrow::<peace_data::marker::Clean<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Current => resources
.try_borrow::<peace_data::marker::Current<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::Goal => resources
.try_borrow::<peace_data::marker::Goal<T>>()
.map(|data_marker| data_marker.0.clone()),
ValueResolutionMode::ApplyDry => resources
.try_borrow::<peace_data::marker::ApplyDry<T>>()
.map(|data_marker| data_marker.0.clone()),
}
.and_then(|param_opt| param_opt.ok_or(BorrowFail::ValueNotFound));
params_partial_resolved.map(T::Partial::from).or_else(|_e| {
// Try resolve `T` again without the `value_resolution_ctx` wrapper.
match resources.try_borrow::<T>() {
Ok(value) => Ok(T::Partial::from((*value).clone())),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Ok(T::Partial::default()),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
}
})
}
ParamsSpec::MappingFn {
field_name,
mapping_fn_id,
} => {
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params_opt =
mapping_fn.try_map(resources, value_resolution_ctx, field_name.as_deref())?;
let t_partial = box_dt_params_opt
.map(|box_dt_params| {
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
.map(T::Partial::from)
})
.transpose()?
.unwrap_or_default();
Ok(t_partial)
}
ParamsSpec::FieldWise { field_wise_spec } => {
field_wise_spec.resolve_partial(mapping_fn_reg, resources, value_resolution_ctx)
}
}
}
}
/// Returns a `T` by downcasting it from a `BoxDt` resolved by a mapping
/// function.
///
/// # Note
///
/// Update `ParamsSpecFieldless` and `ValueSpec` as well when updating this
/// code.
fn resolve_t_from_mapping_fn<T>(
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
mapping_fn_id: &MappingFnId,
) -> Result<T, ParamsResolveError>
where
T: Params<Spec = ParamsSpec<T>> + Clone + Debug + Send + Sync + 'static,
{
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params = mapping_fn.map(resources, value_resolution_ctx, field_name)?;
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
}
impl<T> AnySpecRt for ParamsSpec<T>
where
T: Params<Spec = ParamsSpec<T>>
+ Clone
+ Debug
+ Serialize
+ DeserializeOwned
+ Send
+ Sync
+ 'static,
{
fn is_usable(&self) -> bool {
match self {
Self::Stored => false,
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => true,
Self::FieldWise { field_wise_spec } => field_wise_spec.is_usable(),
}
}
fn merge(&mut self, other_boxed: &dyn AnySpecDataType)
where
Self: Sized,
{
let other: Option<&Self> = other_boxed.downcast_ref();
let other = other.unwrap_or_else(
#[cfg_attr(coverage_nightly, coverage(off))]
|| {
let self_ty_name = tynm::type_name::<Self>();
panic!(
"Failed to downcast value into `{self_ty_name}`. Value: `{other_boxed:#?}`."
);
},
);
match self {
// Use the spec that was previously stored
// (as opposed to previous value).
Self::Stored => *self = other.clone(),
// Use set value / no change on these variants
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => {}
Self::FieldWise { field_wise_spec } => {
match other {
// Don't merge stored field wise specs over provided specs.
Self::Stored | Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => {
}
// Merge specs fieldwise.
Self::FieldWise {
field_wise_spec: field_wise_spec_other,
} => AnySpecRt::merge(field_wise_spec, field_wise_spec_other),
}
}
}
}
}
impl<T> ValueSpecRt for ParamsSpec<T>
where
T: Params<Spec = ParamsSpec<T>>
+ Clone
+ Debug
+ Serialize
+ DeserializeOwned
+ Send
+ Sync
+ 'static,
T::Partial: From<T>,
T: TryFrom<T::Partial>,
{
type ValueType = T;
fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError> {
ParamsSpec::<T>::resolve(self, mapping_fn_reg, resources, value_resolution_ctx)
}
fn try_resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Option<T>, ParamsResolveError> {
ParamsSpec::<T>::resolve_partial(self, mapping_fn_reg, resources, value_resolution_ctx)
.map(T::try_from)
.map(Result::ok)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params.rs | crate/params/src/params.rs | use std::fmt::Debug;
use serde::{de::DeserializeOwned, Serialize};
use crate::FieldWiseSpecRt;
/// Input parameters to an item.
///
/// This trait is automatically implemented by
/// `#[derive(peace::params::Params)]`.
pub trait Params: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static {
/// Convenience associated type for `ValueSpec<Self>`.
type Spec: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static;
/// The `Params` type, where each field is wrapped in [`Option`].
type Partial: Clone + Debug + Default + Send + Sync + 'static;
/// The `Params` type, where each field is wrapped with
/// [`ParamsSpecFieldless<T>`].
///
/// Specifies how to look up values for each field in the `Value`.
///
/// [`ParamsSpecFieldless<T>`]: crate::ParamsSpecFieldless
type FieldWiseSpec: FieldWiseSpecRt<ValueType = Self, Partial = Self::Partial>
+ Clone
+ Debug
+ Serialize
+ DeserializeOwned
+ Send
+ Sync
+ 'static;
/// Builder to return the `FieldWiseSpec` type.
type FieldWiseBuilder;
/// Returns a builder to construct the `FieldWise` spec.
fn field_wise_spec() -> Self::FieldWiseBuilder;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/any_spec_rt_boxed.rs | crate/params/src/any_spec_rt_boxed.rs | use std::ops::{Deref, DerefMut};
use peace_resource_rt::type_reg::{
untagged::{BoxDataTypeDowncast, DataType, DataTypeWrapper, FromDataType},
TypeNameLit,
};
use serde::Serialize;
use crate::{AnySpecDataType, AnySpecRt};
/// Box of a [`DataType`] that is also a [`ValueSpecRt`].
#[derive(Clone, Debug, Serialize)]
pub struct AnySpecRtBoxed(pub(crate) Box<dyn AnySpecDataType>);
impl AnySpecRtBoxed {
/// Returns a new `ValueSpecRtBoxed` wrapper around the provided type.
pub fn new<T>(t: T) -> Self
where
T: DataType + AnySpecRt,
{
Self(Box::new(t))
}
/// Returns the inner `Box<dyn ValueSpecDataType>`.
pub fn into_inner(self) -> Box<dyn AnySpecDataType> {
self.0
}
}
impl Deref for AnySpecRtBoxed {
type Target = dyn AnySpecDataType;
fn deref(&self) -> &Self::Target {
self.0.as_ref()
}
}
impl DerefMut for AnySpecRtBoxed {
fn deref_mut(&mut self) -> &mut Self::Target {
self.0.as_mut()
}
}
impl<T> FromDataType<T> for AnySpecRtBoxed
where
T: DataType + AnySpecRt,
{
fn from(t: T) -> Self {
AnySpecRtBoxed(Box::new(t))
}
}
impl<T> BoxDataTypeDowncast<T> for AnySpecRtBoxed
where
T: DataType + AnySpecRt,
{
fn downcast_ref(&self) -> Option<&T> {
self.0.downcast_ref::<T>()
}
fn downcast_mut(&mut self) -> Option<&mut T> {
self.0.downcast_mut::<T>()
}
}
impl DataTypeWrapper for AnySpecRtBoxed {
fn type_name(&self) -> TypeNameLit {
DataType::type_name(&*self.0)
}
fn clone(&self) -> Self {
Self(self.0.clone())
}
fn debug(&self) -> &dyn std::fmt::Debug {
&self.0
}
fn inner(&self) -> &dyn DataType {
&self.0
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/value_resolution_ctx.rs | crate/params/src/value_resolution_ctx.rs | use std::fmt;
use peace_item_model::ItemId;
use serde::{Deserialize, Serialize};
use crate::{FieldNameAndType, ValueResolutionMode};
/// Collects information about how a value is resolved.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct ValueResolutionCtx {
/// When resolving `Value`s, whether to look up `Current<T>` or
/// `Goal<T>`.
value_resolution_mode: ValueResolutionMode,
/// ID of the item whose params are being resolved.
item_id: ItemId,
/// Name of the `Item::Params` type.
params_type_name: String,
/// Hierarchy of fields traversed to resolve this value.
resolution_chain: Vec<FieldNameAndType>,
}
impl ValueResolutionCtx {
pub fn new(
value_resolution_mode: ValueResolutionMode,
item_id: ItemId,
params_type_name: String,
) -> Self {
Self {
value_resolution_mode,
item_id,
params_type_name,
resolution_chain: Vec::new(),
}
}
pub fn value_resolution_mode(&self) -> ValueResolutionMode {
self.value_resolution_mode
}
pub fn item_id(&self) -> &ItemId {
&self.item_id
}
pub fn params_type_name(&self) -> &str {
&self.params_type_name
}
pub fn resolution_chain(&self) -> &[FieldNameAndType] {
self.resolution_chain.as_ref()
}
/// Appends a field name and type to the resolution chain.
pub fn push(&mut self, field_name_and_type: FieldNameAndType) {
self.resolution_chain.push(field_name_and_type);
}
/// Removes a field name and type from the resolution chain.
pub fn pop(&mut self) {
self.resolution_chain.pop();
}
}
impl fmt::Display for ValueResolutionCtx {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let params_type_name = self.params_type_name();
write!(f, "{params_type_name} {{")?;
if let Some((last, chain)) = self.resolution_chain().split_last() {
writeln!(f)?;
chain
.iter()
.enumerate()
.try_for_each(|(indentation, field_name_and_type)| {
let indentation = indentation + 1;
(0..indentation).try_for_each(|_| write!(f, " "))?;
let field_name = field_name_and_type.field_name();
let type_name = field_name_and_type.type_name();
writeln!(f, "{field_name}: {type_name} {{")
})?;
// Don't add opening `{` for the actual field.
let indentation = self.resolution_chain().len();
(0..indentation).try_for_each(|_| write!(f, " "))?;
let field_name = last.field_name();
let type_name = last.type_name();
writeln!(f, "{field_name}: {type_name},")?;
(0..indentation).try_for_each(|_| write!(f, " "))?;
writeln!(f, "..")?;
}
(0..self.resolution_chain().len())
.rev()
.skip(1)
.try_for_each(|indentation| {
let indentation = indentation + 1;
(0..indentation).try_for_each(|_| write!(f, " "))?;
writeln!(f, "}},")?;
(0..indentation).try_for_each(|_| write!(f, " "))?;
writeln!(f, "..")
})?;
write!(f, "}}")
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_key.rs | crate/params/src/params_key.rs | use std::{fmt::Debug, hash::Hash};
use enum_iterator::Sequence;
use peace_resource_rt::type_reg::untagged::TypeReg;
use serde::{de::DeserializeOwned, Serialize};
/// Marker trait for a parameter key type.
///
/// This trait is automatically implemented for types that are `Clone + Debug +
/// Eq + Hash + Deserialize + Serialize`.
///
/// # Examples
///
/// ```rust,ignore
/// use peace::{
/// cmd_ctx::type_reg::untagged::TypeReg, enum_iterator::Sequence, params::ParamsKey,
/// profile_model::Profile,
/// };
/// use serde::{Deserialize, Serialize};
///
/// /// Keys for workspace parameters.
/// #[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Deserialize, Serialize, Sequence)]
/// #[enum_iterator(crate = peace::enum_iterator)]
/// #[serde(rename_all = "snake_case")]
/// pub enum WorkspaceParamsKey {
/// /// Default profile to use.
/// Profile,
/// /// Which flow this workspace is using.
/// Flow,
/// }
///
/// impl ParamsKey for WorkspaceParamsKey {
/// fn register_value_type(self, type_reg: &mut TypeReg<Self>) {
/// match self {
/// Self::Profile => type_reg.register::<Profile>(self),
/// Self::Flow => type_reg.register::<EnvManFlow>(self),
/// }
/// }
/// }
///
/// impl CmdCtxTypes for MyCmdCtxTypes {
/// // ..
/// type WorkspaceParamsKey = WorkspaceParam;
/// }
/// ```
pub trait ParamsKey:
Clone + Debug + Eq + Hash + DeserializeOwned + Serialize + Sequence + Send + Sync + 'static
{
/// Registers the type of the value stored against this params key.
///
/// This informs the type registry how to deserialize the value when
/// encountering this key.
fn register_value_type(self, type_reg: &mut TypeReg<Self>);
}
impl ParamsKey for () {
fn register_value_type(self, _type_reg: &mut TypeReg<Self>) {}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/value_spec.rs | crate/params/src/value_spec.rs | use std::fmt::Debug;
use peace_resource_rt::{
resources::ts::SetUp, type_reg::untagged::BoxDataTypeDowncast, BorrowFail, Resources,
};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
use crate::{
AnySpecDataType, AnySpecRt, MappingFnId, MappingFnReg, MappingFns, ParamsResolveError,
ValueResolutionCtx, ValueSpecRt,
};
/// How to populate a field's value in an item's params.
///
/// The `MappingFn` variant's mapping function is `None` when deserialized, as
/// it is impossible to determine the underlying `F` and `U` type parameters for
/// the backing `MappingFnImpl`.
///
/// For deserialization:
///
/// 1. A `ParamsSpecsTypeReg` is constructed, and deserialization functions are
/// registered from `ItemId` to `ValueSpecDe<T, F, U>`, where `F` and `U` are
/// derived from the `ValueSpec` provided by the user.
///
/// 2. `value_specs.yaml` is deserialized using that type registry.
///
/// 3. Each `ValueSpecDe<T>` is mapped into a `ValueSpec<T>`, and subsequently
/// `AnySpecRtBoxed` to be passed around in a `CmdCtx`.
///
/// 4. These `AnySpecRtBoxed`s are downcasted back to `ValueSpec<T>` when
/// resolving values for item params and params partials.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static")]
pub enum ValueSpec<T>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
/// Loads a stored value spec.
///
/// The value used is determined by the value spec that was
/// last stored in the `params_specs_file`. This means it
/// could be loaded as a `Value(T)` during context `build()`.
///
/// This variant may be provided when defining a command context
/// builder. However, this variant is never serialized, but
/// whichever value was *first* stored is re-loaded then
/// re-serialized.
///
/// If no value spec was previously serialized, then the command
/// context build will return an error.
Stored,
/// Uses the provided value.
///
/// The value used is whatever is passed in to the command context
/// builder.
Value {
/// The value to use.
value: T,
},
/// Uses a value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime.
InMemory,
/// Uses a mapped value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime, and is mapped by the
/// given function.
///
/// This is serialized as `MappingFn` with a string value. For
/// deserialization, there is no actual backing function, so
/// the user must provide the `MappingFn` in subsequent command
/// context builds.
MappingFn {
/// Name of the field to be mapped. `None` if this is the top level
/// object.
field_name: Option<String>,
/// The name of the mapping function.
mapping_fn_id: MappingFnId,
},
}
impl<T> From<T> for ValueSpec<T>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
fn from(value: T) -> Self {
Self::Value { value }
}
}
impl<T> ValueSpec<T>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
/// Returns the `ValueSpec::MappingFn` variant with the passed in values.
///
/// This is a convenience method for creating a `ValueSpec::MappingFn`
/// variant where the mapping function name is retrieved from
/// `mapping_fns.name()`.
pub fn mapping_fn<MFns>(field_name: Option<String>, mapping_fns: MFns) -> Self
where
MFns: MappingFns,
{
Self::MappingFn {
field_name,
mapping_fn_id: mapping_fns.id(),
}
}
/// Returns the value of `T` by applying this spec to the passed in
/// `resources`.
pub fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<peace_resource_rt::resources::ts::SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError> {
match self {
ValueSpec::Value { value } => Ok(value.clone()),
ValueSpec::Stored | ValueSpec::InMemory => match resources.try_borrow::<T>() {
Ok(value) => Ok((*value).clone()),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Err(ParamsResolveError::InMemory {
value_resolution_ctx: value_resolution_ctx.clone(),
}),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
},
ValueSpec::MappingFn {
field_name,
mapping_fn_id,
} => resolve_t_from_mapping_fn(
mapping_fn_reg,
resources,
value_resolution_ctx,
field_name.as_deref(),
mapping_fn_id,
),
}
}
pub fn resolve_partial(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Option<T>, ParamsResolveError> {
match self {
ValueSpec::Value { value } => Ok(Some((*value).clone())),
ValueSpec::Stored | ValueSpec::InMemory => match resources.try_borrow::<T>() {
Ok(value) => Ok(Some((*value).clone())),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Ok(None),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
},
ValueSpec::MappingFn {
field_name,
mapping_fn_id,
} => {
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params_opt =
mapping_fn.try_map(resources, value_resolution_ctx, field_name.as_deref())?;
let t = box_dt_params_opt
.map(|box_dt_params| {
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
})
.transpose()?;
Ok(t)
}
}
}
}
/// Returns a `T` by downcasting it from a `BoxDt` resolved by a mapping
/// function.
///
/// # Note
///
/// Update `ParamsSpec` and `ParamsSpecFieldless` as well when updating this
/// code.
fn resolve_t_from_mapping_fn<T>(
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
mapping_fn_id: &MappingFnId,
) -> Result<T, ParamsResolveError>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params = mapping_fn.map(resources, value_resolution_ctx, field_name)?;
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
}
impl<T> AnySpecRt for ValueSpec<T>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
fn is_usable(&self) -> bool {
match self {
Self::Stored => false,
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => true,
}
}
fn merge(&mut self, other_boxed: &dyn AnySpecDataType)
where
Self: Sized,
{
let other: Option<&Self> = other_boxed.downcast_ref();
let other = other.unwrap_or_else(
#[cfg_attr(coverage_nightly, coverage(off))]
|| {
let self_ty_name = tynm::type_name::<Self>();
panic!(
"Failed to downcast value into `{self_ty_name}`. Value: `{other_boxed:#?}`."
);
},
);
match self {
// Use the spec that was previously stored
// (as opposed to previous value).
Self::Stored => *self = other.clone(),
// Use set value / no change on these variants
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => {}
}
}
}
impl<T> ValueSpecRt for ValueSpec<T>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
{
type ValueType = T;
fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError> {
ValueSpec::<T>::resolve(self, mapping_fn_reg, resources, value_resolution_ctx)
}
fn try_resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Option<T>, ParamsResolveError> {
ValueSpec::<T>::resolve_partial(self, mapping_fn_reg, resources, value_resolution_ctx)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/field_name_and_type.rs | crate/params/src/field_name_and_type.rs | use serde::{Deserialize, Serialize};
/// A field name and its type.
#[derive(Clone, Debug, PartialEq, Eq, Serialize, Deserialize)]
pub struct FieldNameAndType {
/// Name of the field, e.g. `my_field`.
field_name: String,
/// Name of the type, e.g. `MyField`.
type_name: String,
}
impl FieldNameAndType {
pub fn new(field_name: String, type_name: String) -> Self {
Self {
field_name,
type_name,
}
}
pub fn field_name(&self) -> &str {
&self.field_name
}
pub fn type_name(&self) -> &str {
&self.type_name
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_spec_fieldless.rs | crate/params/src/params_spec_fieldless.rs | use std::fmt::Debug;
use peace_resource_rt::{
resources::ts::SetUp, type_reg::untagged::BoxDataTypeDowncast, BorrowFail, Resources,
};
use serde::{Deserialize, Serialize};
use crate::{
AnySpecDataType, AnySpecRt, MappingFnId, MappingFnReg, MappingFns, ParamsFieldless,
ParamsResolveError, ValueResolutionCtx, ValueSpecRt,
};
/// How to populate a field's value in an item's params.
///
/// The `MappingFn` variant's mapping function is `None` when deserialized, as
/// it is impossible to determine the underlying `F` and `U` type parameters for
/// the backing `MappingFnImpl`.
///
/// For deserialization:
///
/// 1. A `ParamsSpecsTypeReg` is constructed, and deserialization functions are
/// registered from `ItemId` to `ParamsSpecFieldlessDe<T, F, U>`, where `F`
/// and `U` are derived from the `ValueSpec` provided by the user.
///
/// 2. `value_specs.yaml` is deserialized using that type registry.
///
/// 3. Each `ParamsSpecFieldlessDe<T>` is mapped into a
/// `ParamsSpecFieldless<T>`, and subsequently `AnySpecRtBoxed` to be passed
/// around in a `CmdCtx`.
///
/// 4. These `AnySpecRtBoxed`s are downcasted back to `ParamsSpecFieldless<T,
/// MFns>` when resolving values for item params and params partials.
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "T: ParamsFieldless")]
pub enum ParamsSpecFieldless<T>
where
T: ParamsFieldless + Clone + Debug + Send + Sync + 'static,
{
/// Loads a stored value spec.
///
/// The value used is determined by the value spec that was
/// last stored in the `params_specs_file`. This means it
/// could be loaded as a `Value(T)` during context `build()`.
///
/// This variant may be provided when defining a command context
/// builder. However, this variant is never serialized, but
/// whichever value was *first* stored is re-loaded then
/// re-serialized.
///
/// If no value spec was previously serialized, then the command
/// context build will return an error.
Stored,
/// Uses the provided value.
///
/// The value used is whatever is passed in to the command context
/// builder.
Value {
/// The value to use.
value: T,
},
/// Uses a value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime.
InMemory,
/// Uses a mapped value loaded from `resources` at runtime.
///
/// The value may have been provided by workspace params, or
/// inserted by a predecessor at runtime, and is mapped by the
/// given function.
///
/// This is serialized as `MappingFn` with a string value. For
/// deserialization, there is no actual backing function, so
/// the user must provide the `MappingFn` in subsequent command
/// context builds.
MappingFn {
/// Name of the field to be mapped. `None` if this is the top level
/// object.
field_name: Option<String>,
/// The name of the mapping function.
mapping_fn_id: MappingFnId,
},
}
impl<T> From<T> for ParamsSpecFieldless<T>
where
T: ParamsFieldless + Clone + Debug + Send + Sync + 'static,
{
fn from(value: T) -> Self {
Self::Value { value }
}
}
impl<T> ParamsSpecFieldless<T>
where
T: ParamsFieldless<Spec = ParamsSpecFieldless<T>> + Clone + Debug + Send + Sync + 'static,
T::Partial: From<T>,
{
/// Returns the `ParamsSpecFieldless::MappingFn` variant with the passed in
/// values.
///
/// This is a convenience method for creating a
/// `ParamsSpecFieldless::MappingFn` variant where the mapping function
/// name is retrieved from `mapping_fns.name()`.
pub fn mapping_fn<MFns>(field_name: Option<String>, mapping_fns: MFns) -> Self
where
MFns: MappingFns,
{
Self::MappingFn {
field_name,
mapping_fn_id: mapping_fns.id(),
}
}
/// Returns the value of `T` by applying this spec to the passed in
/// `resources`.
pub fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<peace_resource_rt::resources::ts::SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError> {
match self {
ParamsSpecFieldless::Value { value } => Ok(value.clone()),
ParamsSpecFieldless::Stored | ParamsSpecFieldless::InMemory => {
match resources.try_borrow::<T>() {
Ok(value) => Ok((*value).clone()),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Err(ParamsResolveError::InMemory {
value_resolution_ctx: value_resolution_ctx.clone(),
}),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
}
}
ParamsSpecFieldless::MappingFn {
field_name,
mapping_fn_id,
} => resolve_t_from_mapping_fn(
mapping_fn_reg,
resources,
value_resolution_ctx,
field_name.as_deref(),
mapping_fn_id,
),
}
}
pub fn resolve_partial(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T::Partial, ParamsResolveError> {
match self {
ParamsSpecFieldless::Value { value } => Ok(T::Partial::from((*value).clone())),
ParamsSpecFieldless::Stored | ParamsSpecFieldless::InMemory => {
match resources.try_borrow::<T>() {
Ok(value) => Ok(T::Partial::from((*value).clone())),
Err(borrow_fail) => match borrow_fail {
BorrowFail::ValueNotFound => Ok(T::Partial::default()),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
Err(ParamsResolveError::InMemoryBorrowConflict {
value_resolution_ctx: value_resolution_ctx.clone(),
})
}
},
}
}
ParamsSpecFieldless::MappingFn {
field_name,
mapping_fn_id,
} => {
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params_opt =
mapping_fn.try_map(resources, value_resolution_ctx, field_name.as_deref())?;
let t_partial = box_dt_params_opt
.map(|box_dt_params| {
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
.map(T::Partial::from)
})
.transpose()?
.unwrap_or_default();
Ok(t_partial)
}
}
}
}
/// Returns a `T` by downcasting it from a `BoxDt` resolved by a mapping
/// function.
///
/// # Note
///
/// Update `ParamsSpec` as well when updating this code.
fn resolve_t_from_mapping_fn<T>(
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
mapping_fn_id: &MappingFnId,
) -> Result<T, ParamsResolveError>
where
T: ParamsFieldless<Spec = ParamsSpecFieldless<T>> + Clone + Debug + Send + Sync + 'static,
T: ParamsFieldless,
{
let mapping_fn = mapping_fn_reg.get(mapping_fn_id).ok_or_else(|| {
ParamsResolveError::mapping_fn_resolve(value_resolution_ctx, mapping_fn_id)
})?;
let box_dt_params = mapping_fn.map(resources, value_resolution_ctx, field_name)?;
BoxDataTypeDowncast::<T>::downcast_ref(&box_dt_params)
.cloned()
.ok_or_else(|| ParamsResolveError::FromMapDowncast {
value_resolution_ctx: value_resolution_ctx.clone(),
to_type_name: tynm::type_name::<T>(),
})
}
impl<T> AnySpecRt for ParamsSpecFieldless<T>
where
T: ParamsFieldless<Spec = ParamsSpecFieldless<T>>
+ Clone
+ Debug
+ Serialize
+ Send
+ Sync
+ 'static,
{
fn is_usable(&self) -> bool {
match self {
Self::Stored => false,
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => true,
}
}
fn merge(&mut self, other_boxed: &dyn AnySpecDataType)
where
Self: Sized,
{
let other: Option<&Self> = other_boxed.downcast_ref();
let other = other.unwrap_or_else(
#[cfg_attr(coverage_nightly, coverage(off))]
|| {
let self_ty_name = tynm::type_name::<Self>();
panic!(
"Failed to downcast value into `{self_ty_name}`. Value: `{other_boxed:#?}`."
);
},
);
match self {
// Use the spec that was previously stored
// (as opposed to previous value).
Self::Stored => *self = other.clone(),
// Use set value / no change on these variants
Self::Value { .. } | Self::InMemory | Self::MappingFn { .. } => {}
}
}
}
impl<T> ValueSpecRt for ParamsSpecFieldless<T>
where
T: ParamsFieldless<Spec = ParamsSpecFieldless<T>>
+ Clone
+ Debug
+ Serialize
+ Send
+ Sync
+ 'static,
T::Partial: From<T>,
T: TryFrom<T::Partial>,
{
type ValueType = T;
fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<T, ParamsResolveError> {
ParamsSpecFieldless::<T>::resolve(self, mapping_fn_reg, resources, value_resolution_ctx)
}
fn try_resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Option<T>, ParamsResolveError> {
ParamsSpecFieldless::<T>::resolve_partial(
self,
mapping_fn_reg,
resources,
value_resolution_ctx,
)
.map(T::try_from)
.map(Result::ok)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/value_spec_rt.rs | crate/params/src/value_spec_rt.rs | use std::fmt::Debug;
use peace_resource_rt::{resources::ts::SetUp, Resources};
use crate::{AnySpecRt, MappingFnReg, ParamsResolveError, ValueResolutionCtx};
/// Runtime logic of how to look up values for each field in this struct.
///
/// This trait is automatically implemented by `#[derive(Params)]` on an
/// `Item::Params`, as well as in the `peace_params` crate for standard
/// library types.
pub trait ValueSpecRt: AnySpecRt {
/// The original value type. `MyParamsValueSpec::ValueType` is `MyParams`.
type ValueType: Clone + Debug + Send + Sync + 'static;
/// Resolves the value from resources.
///
/// This function returns an error if any value is not present in
/// [`Resources`]. For cases where missing values are not an error, see
/// [`try_resolve`].
///
/// [`try_resolve`]: Self::try_resolve
fn resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Self::ValueType, ParamsResolveError>;
/// Resolves the value from resources, returning `None` if it is not
/// present.
fn try_resolve(
&self,
mapping_fn_reg: &MappingFnReg,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
) -> Result<Option<Self::ValueType>, ParamsResolveError>;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/std_impl.rs | crate/params/src/std_impl.rs | //! Trait and struct impls for standard library types.
#![allow(non_camel_case_types)]
#[cfg(not(target_arch = "wasm32"))]
use std::ffi::OsString;
use std::path::PathBuf;
use peace_params_derive::value_impl;
// IMPORTANT!
//
// When updating the types that implement `ParamsFieldless`, make sure to update
// `params_derive/src/util.rs#STD_LIB_TYPES`.
//
// These are the types that we don't require users to annotate with
// `#[value_spec(fieldless)]`, but will be treated as such.
impl_value_for!(
bool, u8, u16, u32, u64, u128, i8, i16, i32, i64, i128, usize, isize, String, PathBuf,
);
// WASM doesn't support serialization of `OsString`s.
#[cfg(not(target_arch = "wasm32"))]
value_impl!(
#[crate_internal]
#[value_spec(fieldless)]
struct OsString;
);
value_impl!(
#[crate_internal]
#[value_spec(fieldless)]
struct Option<T>
where
T: Clone + std::fmt::Debug + serde::Serialize + serde::de::DeserializeOwned;
);
value_impl!(
#[crate_internal]
#[value_spec(fieldless)]
struct Vec<T>
where
T: Clone + std::fmt::Debug + serde::Serialize + serde::de::DeserializeOwned;
);
macro_rules! impl_value_for {
($($T:ident),*,) => {
$(
value_impl!(
#[crate_internal]
#[value_spec(fieldless)]
struct $T;
);
)*
}
}
use impl_value_for;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/mapping_fn_reg.rs | crate/params/src/mapping_fn_reg.rs | use std::{
collections::HashMap,
ops::{Deref, DerefMut},
};
use crate::{MappingFn, MappingFnId, MappingFns};
/// Map of serializable [`MappingFns`] to each [`MappingFn`] logic.
///
/// This is intended to be called by the Peace framework for each tool
/// implementor's [`MappingFns`] implementation.
#[derive(Debug)]
pub struct MappingFnReg(HashMap<MappingFnId, Box<dyn MappingFn>>);
impl MappingFnReg {
/// Returns a new `MappingFnRegistry`.
pub fn new() -> Self {
MappingFnReg(HashMap::new())
}
/// Returns a new `MappingFnRegistry` with the specified capacity.
pub fn with_capacity(capacity: usize) -> Self {
MappingFnReg(HashMap::with_capacity(capacity))
}
/// Registers a single `MappingFns` variant with this registry.
///
/// This is a convenience function for `mapping_fn_reg.insert(m_fns.name(),
/// m_fns.mapping_fn());`
pub fn register<MFns>(&mut self, m_fns: MFns)
where
MFns: MappingFns,
{
self.insert(m_fns.id(), m_fns.mapping_fn());
}
/// Registers all `MappingFns` from `MFns` with this registry.
///
/// This is a convenience function for `MFns::iter().for_each(|m_fns|
/// mapping_fn_reg.register::<MFns>());`
pub fn register_all<MFns>(&mut self)
where
MFns: MappingFns,
{
enum_iterator::all::<MFns>().for_each(|m_fns| self.register(m_fns));
}
}
impl Deref for MappingFnReg {
type Target = HashMap<MappingFnId, Box<dyn MappingFn>>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for MappingFnReg {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl Default for MappingFnReg {
fn default() -> Self {
Self::new()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/mapping_fn_id.rs | crate/params/src/mapping_fn_id.rs | use serde::{Deserialize, Serialize};
/// ID of a mapping function. `String` newtype.
///
/// This is a string representation of a [`MappingFns`] variant, which allows
/// `*Spec`s to be serialized and deserialized and avoid:
///
/// * a `MFns: MappingFns` type parameter on each `*Spec` type -- which would
/// propagate to `Item`, causing undesired complexity in the `Item` trait.
/// * creating an object-safe trait corresponding to `MappingFns`, increasing
/// the maintenance burden.
///
/// # Implementors
///
/// The ID is considered API, and should be stable. This means you should name
/// each variant with a version number, and never remove that variant, e.g.
/// `MappingFnId::new("ServerNameFromProfile_V1_0_0" )`.
///
/// That way, previously stored mapping function IDs can still be
/// deserialized, and tool developers can opt-in to upgrading to the newer
/// mapping functions when ready.
///
/// [`MappingFns`]: crate::MappingFns
#[derive(Clone, Debug, PartialEq, Eq, Hash, Serialize, Deserialize)]
pub struct MappingFnId(String);
impl MappingFnId {
/// Returns a new `MappingFnId`.
pub fn new(name: String) -> Self {
MappingFnId(name)
}
/// Returns the inner string.
pub fn into_inner(self) -> String {
self.0
}
/// Returns a reference to the inner string.
pub fn as_str(&self) -> &str {
&self.0
}
/// Returns a mutable reference to the inner string.
pub fn as_mut_str(&mut self) -> &mut str {
&mut self.0
}
/// Returns the length of the inner string.
pub fn len(&self) -> usize {
self.0.len()
}
/// Returns `true` if the inner string is empty.
pub fn is_empty(&self) -> bool {
self.0.is_empty()
}
}
impl AsRef<str> for MappingFnId {
fn as_ref(&self) -> &str {
&self.0
}
}
impl From<String> for MappingFnId {
fn from(name: String) -> Self {
MappingFnId(name)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_fieldless.rs | crate/params/src/params_fieldless.rs | use std::fmt::Debug;
use serde::{de::DeserializeOwned, Serialize};
/// Field of an `Item::Params`.
///
/// This trait is automatically implemented by `#[derive(Value)]`.
///
/// This is *like* the [`Params`] trait, except it does not have the `FieldWise`
/// resolution functionality.
///
/// [`Params`]: crate::Params
pub trait ParamsFieldless:
Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static
{
/// Convenience associated type for `ValueSpec<Self>`.
type Spec: Clone + Debug + Serialize + DeserializeOwned + Send + Sync + 'static;
/// The `Value` type, but with optional fields.
type Partial: Clone + Debug + Default + Send + Sync + 'static;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/mapping_fn.rs | crate/params/src/mapping_fn.rs | use std::fmt::Debug;
use peace_resource_rt::{
resources::ts::SetUp,
type_reg::untagged::{BoxDt, DataType},
Resources,
};
use serde::{Serialize, Serializer};
use crate::{MappingFnImpl, ParamsResolveError, ValueResolutionCtx};
/// Type erased mapping function.
///
/// This is used by Peace to hold type-erased mapping functions, and is not
/// intended to be implemented by users or implementors.
pub trait MappingFn: Debug + DataType {
/// Returns a type-erased `MappingFn` that wraps the given function.
///
/// This allows different types of logic to be held as a common type.
///
/// # Implementors
///
/// This function is not intended to be overwritten -- perhaps it should be
/// placed in a sealed trait.
fn new<T, F, Args>(field_name: Option<String>, f: F) -> Box<dyn MappingFn>
where
MappingFnImpl<T, F, Args>: From<(Option<String>, F)> + MappingFn,
Self: Sized,
{
let mapping_fn = MappingFnImpl::from((field_name, f));
Box::new(mapping_fn)
}
/// Maps data in resources to the output type, used for `Item::Params`.
///
/// The data being accessed is defined by the implementation of this
/// function.
///
/// # Parameters
///
/// * `resources`: Resources to resolve values from.
/// * `value_resolution_ctx`: Fields traversed during this value resolution.
fn map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<BoxDt, ParamsResolveError>;
/// Maps data in resources to the output type, used for `Item::Params`.
///
/// The data being accessed is defined by the implementation of this
/// function.
///
/// # Parameters
///
/// * `resources`: Resources to resolve values from.
/// * `value_resolution_ctx`: Fields traversed during this value resolution.
fn try_map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<Option<BoxDt>, ParamsResolveError>;
/// Returns whether this mapping function actually holds the function logic.
///
/// Deserialized mapping functions will not hold any function logic, and
/// Peace uses this function to determine if this is an empty `MappingFn`.
fn is_valued(&self) -> bool;
}
impl Clone for Box<dyn MappingFn> {
fn clone(&self) -> Self {
dyn_clone::clone_box(&**self)
}
}
impl Serialize for dyn MappingFn + '_ {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
// Sadly the following doesn't work, it says the lifetime of:
// `&'1 self` must outlive `'static`
//
// let data_type: &(dyn DataType + 'a) = &self;
// Serialize::serialize(data_type, serializer)
// so we have to depend on `erased_serde` directly
erased_serde::serialize(self, serializer)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/params_specs.rs | crate/params/src/params_specs.rs | use std::ops::{Deref, DerefMut};
use peace_item_model::ItemId;
use peace_resource_rt::type_reg::untagged::TypeMap;
use serde::Serialize;
use crate::AnySpecRtBoxed;
/// Map of item ID to its params' specs. `TypeMap<ItemId,
/// AnySpecRtBoxed>` newtype.
///
/// The concrete `*ValueSpec` type can be obtained by calling
/// `.get(item_id)` with the correct type:
///
/// ```rust,ignore
/// let item_params_spec = MyItemParams::spec().build();
/// let mut params_specs = ParamsSpecs::new();
/// params_specs.insert(item_id!("my_item"), item_params_spec);
///
/// // later
///
/// let item_params_spec = params_specs.get::<MyItemParams, _>(&item_id!("my_item"));
/// ```
///
/// The information may not be of the same type across flows, as flows are
/// different in what they are doing.
#[derive(Clone, Debug, Default, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct ParamsSpecs(TypeMap<ItemId, AnySpecRtBoxed>);
impl ParamsSpecs {
/// Returns a new `ParamsSpecs` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `ParamsSpecs` map with the specified capacity.
///
/// The `ParamsSpecs` will be able to hold at least capacity
/// elements without reallocating. If capacity is 0, the map will not
/// allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMap::with_capacity_typed(capacity))
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMap<ItemId, AnySpecRtBoxed> {
self.0
}
}
impl Deref for ParamsSpecs {
type Target = TypeMap<ItemId, AnySpecRtBoxed>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for ParamsSpecs {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl From<TypeMap<ItemId, AnySpecRtBoxed>> for ParamsSpecs {
fn from(type_map: TypeMap<ItemId, AnySpecRtBoxed>) -> Self {
Self(type_map)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/params/src/mapping_fn_impl.rs | crate/params/src/mapping_fn_impl.rs | use std::{
fmt::{self, Debug},
marker::PhantomData,
};
use peace_data::marker::{ApplyDry, Clean, Current, Goal};
use peace_resource_rt::{resources::ts::SetUp, type_reg::untagged::BoxDt, BorrowFail, Resources};
use serde::{de::DeserializeOwned, Deserialize, Serialize, Serializer};
use crate::{
FromFunc, Func, MappingFn, ParamsResolveError, ValueResolutionCtx, ValueResolutionMode,
};
#[cfg(feature = "item_state_example")]
use peace_data::marker::Example;
/// Wrapper around a mapping function so that it can be serialized.
#[derive(Clone, Serialize, Deserialize)]
pub struct MappingFnImpl<T, F, Args> {
#[serde(
default = "MappingFnImpl::<T, F, Args>::fn_map_none",
skip_deserializing,
serialize_with = "MappingFnImpl::<T, F, Args>::fn_map_serialize"
)]
fn_map: Option<F>,
/// Marker.
marker: PhantomData<(T, Args)>,
}
impl MappingFnImpl<(), fn(), ()> {
/// Returns an empty mapping function, used for `()`.
pub fn empty() -> Self {
Self {
fn_map: Some(|| ()),
marker: PhantomData,
}
}
}
impl MappingFn for MappingFnImpl<(), fn(), ()> {
fn map(
&self,
_resources: &Resources<SetUp>,
_value_resolution_ctx: &mut ValueResolutionCtx,
_field_name: Option<&str>,
) -> Result<BoxDt, ParamsResolveError> {
Ok(BoxDt::new(()))
}
fn try_map(
&self,
_resources: &Resources<SetUp>,
_value_resolution_ctx: &mut ValueResolutionCtx,
_field_name: Option<&str>,
) -> Result<Option<BoxDt>, ParamsResolveError> {
Ok(Some(BoxDt::new(())))
}
fn is_valued(&self) -> bool {
true
}
}
impl<T, F, Args> Debug for MappingFnImpl<T, F, Args>
where
T: Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("MappingFnImpl")
.field("fn_map", &Self::fn_map_stringify(&self.fn_map))
.field("marker", &self.marker)
.finish()
}
}
impl<T, F, Args> MappingFnImpl<T, F, Args> {
fn fn_map_serialize<S>(fn_map: &Option<F>, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&Self::fn_map_stringify(fn_map))
}
fn fn_map_stringify(fn_map: &Option<F>) -> String {
match fn_map {
Some(_) => {
let args = {
let args_type_name = tynm::type_name::<Args>();
let mut buffer = String::with_capacity(args_type_name.len() + 32);
args_type_name.chars().fold(0, |mut nesting_level, c| {
buffer.push(c);
match c {
'(' => {
nesting_level += 1;
if nesting_level == 1 {
buffer.push('&');
}
}
')' => nesting_level -= 1,
' ' => {
if nesting_level == 1 {
buffer.push('&');
}
}
_ => (),
}
nesting_level
});
buffer
};
format!(
"Some(Fn{args} -> Option<{t}>)",
t = tynm::type_name::<T>(),
args = args,
)
}
None => String::from("None"),
}
}
fn fn_map_none() -> Option<F> {
None
}
}
macro_rules! impl_mapping_fn_impl {
($($Arg:ident $var:ident),+) => {
// impl<T, F, A0> MappingFnImpl<T, F, (A0,)>
impl<T, F, $($Arg,)+> MappingFnImpl<T, F, ($($Arg,)+)>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
F: Fn($(&$Arg,)+) -> Option<T> + Clone + Send + Sync + 'static,
$($Arg: Clone + Debug + Send + Sync + 'static,)+
{
/// Returns a new `MappingFnImpl` that tracks the types of the mapping fn logic.
///
/// # Developers
///
/// Developers should use the [`MappingFnImpl::from_func`] function, which returns the type erased `Box<dyn MappingFn>`.
///
/// # Maintainers
///
/// When we provide the following inherent function:
///
/// ```rust,ignore
/// /// Returns a new `MappingFnImpl` that tracks the /// types of the mapping fn logic.
/// pub fn new_erased(f: F) -> Box<dyn MappingFn> {
/// Box::new(Self::new(f))
/// }
/// ```
///
/// For some reason Rust cannot infer the `Args` type parameter as `(&u8,)` when users call:
///
/// ```rust,ignore
/// MappingFnImpl::new_erased(|_: &u8| Some(vec![1u8]))
/// ```
///
/// But somehow it works with the `FromFunc` trait.
pub fn new(fn_map: F) -> Self {
Self {
fn_map: Some(fn_map),
marker: PhantomData,
}
}
/// Maps the values in resources into the value for this parameter.
///
/// # Parameters
///
/// * `resources`: Resources to resolve values from.
/// * `value_resolution_ctx`: Fields traversed during this value resolution.
/// * `field_name`: This field's name within its parent struct. `None` if this is the top level value type.
pub fn map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<BoxDt, ParamsResolveError> {
let fn_map = self.fn_map.as_ref().unwrap_or_else(
#[cfg_attr(coverage_nightly, coverage(off))]
|| {
panic!("`MappingFnImpl::map` called when `fn_map` is `None`\
{for_field_name}.\n\
This is a bug in the Peace framework.\n\
\n\
Type parameters are:\n\
\n\
* `T`: {t}\n\
* `Args`: ({Args})\n\
",
for_field_name = field_name
.map(|field_name| format!(" for field: `{field_name}`"))
.unwrap_or("".to_string()),
t = tynm::type_name::<T>(),
Args = tynm::type_name::<($($Arg,)+)>(),
);
});
// We have to duplicate code because the return type from
// `resources.try_borrow` is different per branch.
match value_resolution_ctx.value_resolution_mode() {
#[cfg(feature = "item_state_example")]
ValueResolutionMode::Example => {
$(arg_resolve!(resources, value_resolution_ctx, Example, $var, $Arg);)+
let t_result = fn_map($(&$var,)+).ok_or(ParamsResolveError::FromMap {
value_resolution_ctx: value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<($($Arg,)+)>(),
});
t_result.map(BoxDt::new)
}
ValueResolutionMode::Clean => {
$(arg_resolve!(resources, value_resolution_ctx, Clean, $var, $Arg);)+
let t_result = fn_map($(&$var,)+).ok_or(ParamsResolveError::FromMap {
value_resolution_ctx: value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<($($Arg,)+)>(),
});
t_result.map(BoxDt::new)
}
ValueResolutionMode::Current => {
$(arg_resolve!(resources, value_resolution_ctx, Current, $var, $Arg);)+
let t_result = fn_map($(&$var,)+).ok_or(ParamsResolveError::FromMap {
value_resolution_ctx: value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<($($Arg,)+)>(),
});
t_result.map(BoxDt::new)
}
ValueResolutionMode::Goal => {
$(arg_resolve!(resources, value_resolution_ctx, Goal, $var, $Arg);)+
let t_result = fn_map($(&$var,)+).ok_or(ParamsResolveError::FromMap {
value_resolution_ctx: value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<($($Arg,)+)>(),
});
t_result.map(BoxDt::new)
}
ValueResolutionMode::ApplyDry => {
$(arg_resolve!(resources, value_resolution_ctx, ApplyDry, $var, $Arg);)+
let t_result = fn_map($(&$var,)+).ok_or(ParamsResolveError::FromMap {
value_resolution_ctx: value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<($($Arg,)+)>(),
});
t_result.map(BoxDt::new)
}
}
}
/// Maps the values in resources into the value for this parameter.
///
/// # Parameters
///
/// * `resources`: Resources to resolve values from.
/// * `value_resolution_ctx`: Fields traversed during this value resolution.
/// * `field_name`: This field's name within its parent struct. `None` if this is the top level value type.
pub fn try_map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<Option<BoxDt>, ParamsResolveError> {
let fn_map = self.fn_map.as_ref().unwrap_or_else(
#[cfg_attr(coverage_nightly, coverage(off))]
|| {
panic!("`MappingFnImpl::try_map` called when `fn_map` is `None`\
{for_field_name}.\n\
This is a bug in the Peace framework.\n\
\n\
Type parameters are:\n\
\n\
* `T`: {t}\n\
* `Args`: ({Args})\n\
",
for_field_name = field_name
.map(|field_name| format!(" for field: `{field_name}`"))
.unwrap_or("".to_string()),
t = tynm::type_name::<T>(),
Args = tynm::type_name::<($($Arg,)+)>(),
);
});
// We have to duplicate code because the return type from
// `resources.try_borrow` is different per branch.
match value_resolution_ctx.value_resolution_mode() {
#[cfg(feature = "item_state_example")]
ValueResolutionMode::Example => {
$(try_arg_resolve!(resources, value_resolution_ctx, Example, $var, $Arg);)+
let t_params_opt = fn_map($(&$var,)+);
let box_dt_opt = t_params_opt.map(BoxDt::new);
Ok(box_dt_opt)
}
ValueResolutionMode::Clean => {
$(try_arg_resolve!(resources, value_resolution_ctx, Clean, $var, $Arg);)+
let t_params_opt = fn_map($(&$var,)+);
let box_dt_opt = t_params_opt.map(BoxDt::new);
Ok(box_dt_opt)
}
ValueResolutionMode::Current => {
$(try_arg_resolve!(resources, value_resolution_ctx, Current, $var, $Arg);)+
let t_params_opt = fn_map($(&$var,)+);
let box_dt_opt = t_params_opt.map(BoxDt::new);
Ok(box_dt_opt)
}
ValueResolutionMode::Goal => {
$(try_arg_resolve!(resources, value_resolution_ctx, Goal, $var, $Arg);)+
let t_params_opt = fn_map($(&$var,)+);
let box_dt_opt = t_params_opt.map(BoxDt::new);
Ok(box_dt_opt)
}
ValueResolutionMode::ApplyDry => {
$(try_arg_resolve!(resources, value_resolution_ctx, ApplyDry, $var, $Arg);)+
let t_params_opt = fn_map($(&$var,)+);
let box_dt_opt = t_params_opt.map(BoxDt::new);
Ok(box_dt_opt)
}
}
}
}
impl<T, F, $($Arg,)+> FromFunc<F> for MappingFnImpl<T, F, ($($Arg,)+)>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
// Ideally we can do:
//
// ```rust
// F: Fn<($($Arg,)+), Output = Option<T>>
// ```
//
// But this is pending <rust-lang/rust#29625>
F: Func<Option<T>, ($($Arg,)+)>
+ Fn($(&$Arg,)+) -> Option<T>
+ Clone + Send + Sync + 'static,
$($Arg: Clone + Debug + Send + Sync + 'static,)+
{
fn from_func(f: F) -> Box<dyn MappingFn> {
Box::new(Self::new(f))
}
}
impl<T, F, $($Arg,)+> From<F> for MappingFnImpl<T, F, ($($Arg,)+)>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
F: Fn($(&$Arg,)+) -> Option<T> + Clone + Send + Sync + 'static,
$($Arg: Clone + Debug + Send + Sync + 'static,)+
{
fn from(f: F) -> Self {
Self::new(f)
}
}
impl<T, F, $($Arg,)+> MappingFn for MappingFnImpl<T, F, ($($Arg,)+)>
where
T: Clone + Debug + DeserializeOwned + Serialize + Send + Sync + 'static,
F: Fn($(&$Arg,)+) -> Option<T> + Clone + Send + Sync + 'static,
$($Arg: Clone + Debug + Send + Sync + 'static,)+
{
fn map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<BoxDt, ParamsResolveError> {
MappingFnImpl::<T, F, ($($Arg,)+)>::map(self, resources, value_resolution_ctx, field_name)
}
fn try_map(
&self,
resources: &Resources<SetUp>,
value_resolution_ctx: &mut ValueResolutionCtx,
field_name: Option<&str>,
) -> Result<Option<BoxDt>, ParamsResolveError> {
MappingFnImpl::<T, F, ($($Arg,)+)>::try_map(self, resources, value_resolution_ctx, field_name)
}
fn is_valued(&self) -> bool {
self.fn_map.is_some()
}
}
};
}
#[derive(Debug)]
enum BorrowedData<Marked, T> {
Marked(Marked),
Direct(T),
}
macro_rules! arg_resolve {
(
$resources:ident,
$value_resolution_ctx:ident,
$value_resolution_mode:ident,
$arg:ident,
$Arg:ident
) => {
// Prioritize data marker wrapper over direct data borrow.
let borrow_marked_data_result = $resources.try_borrow::<$value_resolution_mode<$Arg>>();
let borrow_direct = $resources.try_borrow::<$Arg>();
let borrowed_data = match borrow_marked_data_result {
Ok(borrow_marked_data) => BorrowedData::Marked(borrow_marked_data),
Err(borrow_fail) => match borrow_fail {
// Either:
//
// * `A0` in the function is incorrect, so `Current<A0>` is not registered by any
// item, or
// * There is a bug in Peace.
BorrowFail::ValueNotFound => match borrow_direct {
Ok(arg) => BorrowedData::Direct(arg),
Err(borrow_fail) => match borrow_fail {
// Either:
//
// * `A0` in the function is incorrect, so `Current<A0>` is not registered
// by any item, or
// * There is a bug in Peace.
BorrowFail::ValueNotFound => {
return Err(ParamsResolveError::FromMap {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
return Err(ParamsResolveError::FromMapBorrowConflict {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
},
},
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
return Err(ParamsResolveError::FromMapBorrowConflict {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
},
};
let $arg = match &borrowed_data {
BorrowedData::Marked(marked_data) => match marked_data.as_ref() {
Some(data) => data,
None => {
return Err(ParamsResolveError::FromMap {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
},
BorrowedData::Direct(data) => data,
};
};
}
macro_rules! try_arg_resolve {
(
$resources:ident,
$value_resolution_ctx:ident,
$value_resolution_mode:ident,
$arg:ident,
$Arg:ident
) => {
// Prioritize data marker wrapper over direct data borrow.
let borrow_marked_data_result = $resources.try_borrow::<$value_resolution_mode<$Arg>>();
let borrow_direct = $resources.try_borrow::<$Arg>();
let borrowed_data = match borrow_marked_data_result {
Ok(borrow_marked_data) => BorrowedData::Marked(borrow_marked_data),
Err(borrow_fail) => match borrow_fail {
// Either:
//
// * `A0` in the function is incorrect, so `Current<A0>` is not registered by any
// item, or
// * There is a bug in Peace.
BorrowFail::ValueNotFound => match borrow_direct {
Ok(arg) => BorrowedData::Direct(arg),
Err(borrow_fail) => match borrow_fail {
// Either:
//
// * `A0` in the function is incorrect, so `Current<A0>` is not registered
// by any item, or
// * There is a bug in Peace.
BorrowFail::ValueNotFound => return Ok(None),
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
return Err(ParamsResolveError::FromMapBorrowConflict {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
},
},
BorrowFail::BorrowConflictImm | BorrowFail::BorrowConflictMut => {
return Err(ParamsResolveError::FromMapBorrowConflict {
value_resolution_ctx: $value_resolution_ctx.clone(),
from_type_name: tynm::type_name::<$Arg>(),
});
}
},
};
let $arg = match &borrowed_data {
BorrowedData::Marked(marked_data) => match marked_data.as_ref() {
Some(data) => data,
None => return Ok(None),
},
BorrowedData::Direct(data) => &data,
};
};
}
// We can add more if we need to support more args.
//
// There is a compile time / Rust analyzer startup cost to it, so it's better to
// not generate more than we need.
impl_mapping_fn_impl!(A0 a0);
impl_mapping_fn_impl!(A0 a0, A1 a1);
impl_mapping_fn_impl!(A0 a0, A1 a1, A2 a2);
impl_mapping_fn_impl!(A0 a0, A1 a1, A2 a2, A3 a3);
impl_mapping_fn_impl!(A0 a0, A1 a1, A2 a2, A3 a3, A4 a4);
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/item_stream_outcome.rs | crate/cmd_model/src/item_stream_outcome.rs | use fn_graph::StreamOutcomeState;
use peace_item_model::ItemId;
/// How a `Flow` stream operation ended and IDs that were processed.
///
/// Currently this is constructed by `ItemStreamOutcomeMapper`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ItemStreamOutcome<T> {
/// The value of the outcome.
pub value: T,
/// How a `Flow` stream operation ended.
pub state: StreamOutcomeState,
/// IDs of the items that were processed.
pub item_ids_processed: Vec<ItemId>,
/// IDs of the items that were not processed.
pub item_ids_not_processed: Vec<ItemId>,
}
impl<T> ItemStreamOutcome<T> {
/// Returns an `ItemStreamOutcome` that is `Finished<T>`.
pub fn finished_with(value: T, item_ids_processed: Vec<ItemId>) -> Self {
Self {
value,
state: StreamOutcomeState::Finished,
item_ids_processed,
item_ids_not_processed: Vec::new(),
}
}
/// Maps this outcome's value to another.
pub fn map<TNew>(self, f: impl FnOnce(T) -> TNew) -> ItemStreamOutcome<TNew> {
let ItemStreamOutcome {
value,
state,
item_ids_processed,
item_ids_not_processed,
} = self;
let value = f(value);
ItemStreamOutcome {
value,
state,
item_ids_processed,
item_ids_not_processed,
}
}
/// Replaces the value from this outcome with another.
pub fn replace<TNew>(self, value_new: TNew) -> (ItemStreamOutcome<TNew>, T) {
let ItemStreamOutcome {
value: value_existing,
state,
item_ids_processed,
item_ids_not_processed,
} = self;
(
ItemStreamOutcome {
value: value_new,
state,
item_ids_processed,
item_ids_not_processed,
},
value_existing,
)
}
/// Replaces the value from this outcome with another, taking the current
/// value as a parameter.
pub fn replace_with<TNew, U>(
self,
f: impl FnOnce(T) -> (TNew, U),
) -> (ItemStreamOutcome<TNew>, U) {
let ItemStreamOutcome {
value,
state,
item_ids_processed,
item_ids_not_processed,
} = self;
let (value, extracted) = f(value);
(
ItemStreamOutcome {
value,
state,
item_ids_processed,
item_ids_not_processed,
},
extracted,
)
}
pub fn into_value(self) -> T {
self.value
}
pub fn value(&self) -> &T {
&self.value
}
pub fn value_mut(&mut self) -> &mut T {
&mut self.value
}
pub fn state(&self) -> StreamOutcomeState {
self.state
}
pub fn item_ids_processed(&self) -> &[ItemId] {
self.item_ids_processed.as_ref()
}
pub fn item_ids_not_processed(&self) -> &[ItemId] {
self.item_ids_not_processed.as_ref()
}
}
impl<T> Default for ItemStreamOutcome<T>
where
T: Default,
{
fn default() -> Self {
Self {
value: T::default(),
state: StreamOutcomeState::NotStarted,
item_ids_processed: Vec::new(),
item_ids_not_processed: Vec::new(),
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/lib.rs | crate/cmd_model/src/lib.rs | //! Data types for commands for the Peace framework.
//!
//! Currently contains types for better error messages.
// Re-exports
pub use fn_graph;
pub use indexmap;
pub use crate::{
cmd_block_desc::CmdBlockDesc,
cmd_block_outcome::CmdBlockOutcome,
cmd_execution_error::{CmdExecutionError, InputFetchError},
cmd_execution_id::CmdExecutionId,
cmd_outcome::CmdOutcome,
item_stream_outcome::ItemStreamOutcome,
stream_outcome_and_errors::StreamOutcomeAndErrors,
value_and_stream_outcome::ValueAndStreamOutcome,
};
mod cmd_block_desc;
mod cmd_block_outcome;
mod cmd_execution_error;
mod cmd_execution_id;
mod cmd_outcome;
mod item_stream_outcome;
mod stream_outcome_and_errors;
mod value_and_stream_outcome;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_block_outcome.rs | crate/cmd_model/src/cmd_block_outcome.rs | use fn_graph::StreamOutcome;
use indexmap::IndexMap;
use peace_item_model::ItemId;
use crate::{StreamOutcomeAndErrors, ValueAndStreamOutcome};
/// Outcome of running `CmdBlock::exec`.
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum CmdBlockOutcome<T, E> {
/// Single value returned by the command block.
///
/// Relevant to command blocks that deal with a single atomic operation,
/// e.g. loading a file.
Single(T),
/// A value returned per item.
///
/// # Design Note
///
/// When implementing this, the following structures were considered:
///
/// * Having separate fields for `T` and `StreamOutcome<()>`.
/// * Having a single `StreamOutcome<T>` field.
///
/// The first design makes it easier to access the value, and the second
/// design ensures that you cannot access the value and accidentally forget
/// about the stream outcome.
///
/// Because this is an enum variant, consumers are not likely to miss the
/// stream outcome even if the first design is chosen.
///
/// Having a `StreamOutcome<()>` separate from the value means consumers can
/// choose to ignore the `StreamOutcome` more easily.
///
/// However, the `CmdBlock::exec` return type is also affected by this --
/// having consumers return a `StreamOutcome<T>` allows them to use the
/// `FnGraph` streaming methods, without having to split the value out of
/// the `StreamOutcome`.
ItemWise {
/// The values returned per item.
stream_outcome: StreamOutcome<T>,
/// Errors from the command execution.
errors: IndexMap<ItemId, E>,
},
}
impl<T, E> CmdBlockOutcome<T, E> {
/// Returns a new `CmdBlockOutcome::ItemWise` with the given value, and no
/// errors.
pub fn new_item_wise(stream_outcome: StreamOutcome<T>) -> Self {
Self::ItemWise {
stream_outcome,
errors: IndexMap::new(),
}
}
/// Returns whether the command ran successfully.
pub fn is_ok(&self) -> bool {
match self {
Self::Single(_) => true,
Self::ItemWise {
stream_outcome: _,
errors,
} => errors.is_empty(),
}
}
/// Returns this outcome's value if there are no errors, otherwise returns
/// self.
pub fn try_into_value(self) -> Result<ValueAndStreamOutcome<T>, StreamOutcomeAndErrors<T, E>> {
match self {
Self::Single(value) => Ok(ValueAndStreamOutcome {
value,
stream_outcome: None,
}),
Self::ItemWise {
stream_outcome,
errors,
} => {
if errors.is_empty() {
let (stream_outcome, value) = stream_outcome.replace(());
Ok(ValueAndStreamOutcome {
value,
stream_outcome: Some(stream_outcome),
})
} else {
Err(StreamOutcomeAndErrors {
stream_outcome,
errors,
})
}
}
}
}
/// Returns whether the command encountered any errors during execution.
pub fn is_err(&self) -> bool {
match self {
Self::Single(_) => false,
Self::ItemWise {
stream_outcome: _,
errors,
} => !errors.is_empty(),
}
}
pub fn map<F, U>(self, f: F) -> CmdBlockOutcome<U, E>
where
F: FnOnce(T) -> U,
{
match self {
Self::Single(t) => {
let u = f(t);
CmdBlockOutcome::Single(u)
}
Self::ItemWise {
stream_outcome,
errors,
} => {
let stream_outcome = stream_outcome.map(f);
CmdBlockOutcome::ItemWise {
stream_outcome,
errors,
}
}
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_execution_id.rs | crate/cmd_model/src/cmd_execution_id.rs | use std::ops::Deref;
use serde::{Deserialize, Serialize};
/// ID of a command execution.
///
/// Uniqueness is not yet defined -- these may overlap with IDs from different
/// machines.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
pub struct CmdExecutionId(u64);
impl CmdExecutionId {
/// Returns a new `CmdExecutionId`.
pub fn new(id: u64) -> Self {
Self(id)
}
/// Returns the underlying ID.
pub fn into_inner(self) -> u64 {
self.0
}
}
impl Deref for CmdExecutionId {
type Target = u64;
fn deref(&self) -> &Self::Target {
&self.0
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/stream_outcome_and_errors.rs | crate/cmd_model/src/stream_outcome_and_errors.rs | use fn_graph::StreamOutcome;
use indexmap::IndexMap;
use peace_item_model::ItemId;
/// `CmdBlock` stream outcome and item wise errors.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct StreamOutcomeAndErrors<T, E> {
/// The `CmdBlock` stream outcome.
pub stream_outcome: StreamOutcome<T>,
/// The errors during processing,
pub errors: IndexMap<ItemId, E>,
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/value_and_stream_outcome.rs | crate/cmd_model/src/value_and_stream_outcome.rs | use fn_graph::StreamOutcome;
/// `CmdBlock` outcome value on success, and its `StreamOutcome` if applicable.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct ValueAndStreamOutcome<T> {
/// The value returned by the `CmdBlock`.
pub value: T,
/// If the block streams each item in its logic, then this contains the
/// stream outcome.
pub stream_outcome: Option<StreamOutcome<()>>,
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_block_desc.rs | crate/cmd_model/src/cmd_block_desc.rs | /// String representation of the `CmdBlock` in a `CmdExecution`.
///
/// This is used to provide a well-formatted error message so that developers
/// can identify where a bug lies more easily.
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct CmdBlockDesc {
/// Short name of the command block.
cmd_block_name: String,
/// Short type names of `CmdBlock::InputT`.
///
/// * If `InputT` is the unit struct `()`, this should be empty.
/// * If `InputT` is a named struct, this should contain just one `String`.
/// * If `InputT` is a tuple, this should contain one `String` per type
/// within the tuple.
cmd_block_input_names: Vec<String>,
/// Short type names of `CmdBlock::Outcome`.
///
/// * If `Outcome` is the unit struct `()`, this should be empty.
/// * If `Outcome` is a named struct, this should contain just one `String`.
/// * If `Outcome` is a tuple, this should contain one `String` per type
/// within the tuple.
cmd_block_outcome_names: Vec<String>,
}
impl CmdBlockDesc {
/// Returns a new `CmdBlockDesc`
pub fn new(
cmd_block_name: String,
cmd_block_input_names: Vec<String>,
cmd_block_outcome_names: Vec<String>,
) -> Self {
Self {
cmd_block_name,
cmd_block_input_names,
cmd_block_outcome_names,
}
}
/// Returns the short name of the command block, e.g.
/// `"StatesCurrentReadCmdBlock"`.
pub fn cmd_block_name(&self) -> &str {
self.cmd_block_name.as_ref()
}
/// Returns the short type names of `CmdBlock::InputT`, e.g.
/// `["States<Current>", "States<Goal>"]`.
///
/// * If `InputT` is the unit struct `()`, this should be empty.
/// * If `InputT` is a named struct, this should contain just one `String`.
/// * If `InputT` is a tuple, this should contain one `String` per type
/// within the tuple.
pub fn cmd_block_input_names(&self) -> &[String] {
self.cmd_block_input_names.as_ref()
}
/// Returns the short type names of `CmdBlock::Outcome`, e.g.
/// `["StateDiffs"]`.
///
/// * If `Outcome` is the unit struct `()`, this should be empty.
/// * If `Outcome` is a named struct, this should contain just one `String`.
/// * If `Outcome` is a tuple, this should contain one `String` per type
/// within the tuple.
pub fn cmd_block_outcome_names(&self) -> &[String] {
self.cmd_block_outcome_names.as_ref()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_execution_error.rs | crate/cmd_model/src/cmd_execution_error.rs | use std::fmt::Debug;
pub use self::input_fetch_error::InputFetchError;
mod input_fetch_error;
/// Error while executing a `CmdBlock`.
///
/// # Type Parameters
///
/// * `E`: Application error type.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum CmdExecutionError {
/// Error fetching `CmdBlock::InputT` from `resources`.
///
/// If `CmdBlock::InputT` is a tuple, such as `(StatesCurrent, StatesGoal)`,
/// and `states_current` and `states_goal` are inserted individually in
/// `Resources`, then `CmdBlock::input_fetch` should be implemented to call
/// `Resources::remove` for each of them.
#[error(
"Error in `CmdExecution` or `CmdBlock` logic, usually due to incorrect `Resource` insertion or removal."
)]
InputFetch(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
Box<InputFetchError>,
),
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_outcome.rs | crate/cmd_model/src/cmd_outcome.rs | use futures::Future;
use indexmap::IndexMap;
use peace_item_model::ItemId;
use crate::{CmdBlockDesc, ItemStreamOutcome};
/// Outcome of a [`CmdExecution`].
///
/// The variants indicate whether execution was successful, interrupted, or
/// errored when processing an item.
///
/// [`CmdExecution`]: https://docs.rs/peace_cmd_rt/latest/peace_cmd_rt/struct.CmdExecution.html
#[derive(Clone, Debug, PartialEq, Eq)]
pub enum CmdOutcome<T, E> {
/// Execution completed successfully.
Complete {
/// The outcome value.
value: T,
/// Descriptors of the `CmdBlock`s that were processed.
///
/// This includes all `CmdBlock`s that were included in the
/// `CmdExecution`.
cmd_blocks_processed: Vec<CmdBlockDesc>,
},
/// Execution ended due to an interruption during command block execution.
BlockInterrupted {
/// The stream outcome of the interrupted command block.
item_stream_outcome: ItemStreamOutcome<T>,
/// Descriptors of the `CmdBlock`s that were processed.
///
/// This does not include the `CmdBlock` that was interrupted.
cmd_blocks_processed: Vec<CmdBlockDesc>,
/// Descriptors of the `CmdBlock`s that were not processed.
///
/// The first block in this list is the one that was interrupted.
cmd_blocks_not_processed: Vec<CmdBlockDesc>,
},
/// Execution ended due to an interruption between command blocks.
ExecutionInterrupted {
/// The outcome value.
value: Option<T>,
/// Descriptors of the `CmdBlock`s that were processed.
cmd_blocks_processed: Vec<CmdBlockDesc>,
/// Descriptors of the `CmdBlock`s that were not processed.
cmd_blocks_not_processed: Vec<CmdBlockDesc>,
},
/// Execution ended due to one or more item errors.
///
/// It is also possible for the stream to be interrupted when an error
/// occurs, so the value is wrapped in a `ItemStreamOutcome`.
ItemError {
/// The outcome value.
item_stream_outcome: ItemStreamOutcome<T>,
/// Descriptors of the `CmdBlock`s that were processed.
///
/// This does not include the `CmdBlock` that erred.
cmd_blocks_processed: Vec<CmdBlockDesc>,
/// Descriptors of the `CmdBlock`s that were not processed.
///
/// The first block in this list is the one that erred.
cmd_blocks_not_processed: Vec<CmdBlockDesc>,
/// Item error(s) from the last command block's execution.
errors: IndexMap<ItemId, E>,
},
}
impl<T, E> CmdOutcome<T, E> {
pub fn value(&self) -> Option<&T> {
match self {
CmdOutcome::Complete {
value,
cmd_blocks_processed: _,
} => Some(value),
CmdOutcome::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed: _,
cmd_blocks_not_processed: _,
} => Some(item_stream_outcome.value()),
CmdOutcome::ExecutionInterrupted {
value,
cmd_blocks_processed: _,
cmd_blocks_not_processed: _,
} => value.as_ref(),
CmdOutcome::ItemError {
item_stream_outcome,
cmd_blocks_processed: _,
cmd_blocks_not_processed: _,
errors: _,
} => Some(item_stream_outcome.value()),
}
}
/// Returns whether the command completed successfully.
pub fn is_complete(&self) -> bool {
matches!(self, Self::Complete { .. })
}
/// Returns whether the command completed successfully.
pub fn is_interrupted(&self) -> bool {
matches!(
self,
Self::BlockInterrupted { .. } | Self::ExecutionInterrupted { .. }
)
}
/// Returns whether the command encountered item errors during execution.
pub fn is_err(&self) -> bool {
matches!(self, Self::ItemError { .. })
}
/// Maps the inner value to another, maintaining any collected errors.
pub fn map<F, U>(self, f: F) -> CmdOutcome<U, E>
where
F: FnOnce(T) -> U,
{
match self {
Self::Complete {
value: t,
cmd_blocks_processed,
} => {
let u = f(t);
CmdOutcome::Complete {
value: u,
cmd_blocks_processed,
}
}
Self::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => {
let item_stream_outcome = item_stream_outcome.map(f);
CmdOutcome::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
}
}
Self::ExecutionInterrupted {
value: t,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => {
let u = t.map(f);
CmdOutcome::ExecutionInterrupted {
value: u,
cmd_blocks_processed,
cmd_blocks_not_processed,
}
}
Self::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
} => {
let item_stream_outcome = item_stream_outcome.map(f);
CmdOutcome::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
}
}
}
}
/// Maps the inner value to another asynchronously, maintaining any
/// collected errors.
pub async fn map_async<'f, F, Fut, U>(self, f: F) -> CmdOutcome<U, E>
where
F: FnOnce(T) -> Fut,
Fut: Future<Output = U> + 'f,
{
match self {
Self::Complete {
value: t,
cmd_blocks_processed,
} => {
let u = f(t).await;
CmdOutcome::Complete {
value: u,
cmd_blocks_processed,
}
}
Self::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => {
let (item_stream_outcome, value) = item_stream_outcome.replace(());
let value = f(value).await;
let (item_stream_outcome, ()) = item_stream_outcome.replace(value);
CmdOutcome::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
}
}
Self::ExecutionInterrupted {
value: t,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => {
let u = match t {
Some(t) => Some(f(t).await),
None => None,
};
CmdOutcome::ExecutionInterrupted {
value: u,
cmd_blocks_processed,
cmd_blocks_not_processed,
}
}
Self::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
} => {
let (item_stream_outcome, value) = item_stream_outcome.replace(());
let value = f(value).await;
let (item_stream_outcome, ()) = item_stream_outcome.replace(value);
CmdOutcome::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
}
}
}
}
}
impl<T, E> CmdOutcome<Result<T, E>, E> {
/// Transposes a `CmdOutcome<Result<T, E>, E>` to a `Result<CmdOutcome<T,
/// E>, E>`.
pub fn transpose(self) -> Result<CmdOutcome<T, E>, E> {
match self {
Self::Complete {
value,
cmd_blocks_processed,
} => match value {
Ok(value) => Ok(CmdOutcome::Complete {
value,
cmd_blocks_processed,
}),
Err(e) => Err(e),
},
Self::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => {
let (item_stream_outcome, value) = item_stream_outcome.replace(());
match value {
Ok(value) => {
let (item_stream_outcome, ()) = item_stream_outcome.replace(value);
Ok(CmdOutcome::BlockInterrupted {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
})
}
Err(e) => Err(e),
}
}
Self::ExecutionInterrupted {
value,
cmd_blocks_processed,
cmd_blocks_not_processed,
} => match value.transpose() {
Ok(value) => Ok(CmdOutcome::ExecutionInterrupted {
value,
cmd_blocks_processed,
cmd_blocks_not_processed,
}),
Err(e) => Err(e),
},
Self::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
} => {
let (item_stream_outcome, value) = item_stream_outcome.replace(());
match value {
Ok(value) => {
let (item_stream_outcome, ()) = item_stream_outcome.replace(value);
Ok(CmdOutcome::ItemError {
item_stream_outcome,
cmd_blocks_processed,
cmd_blocks_not_processed,
errors,
})
}
Err(e) => Err(e),
}
}
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/cmd_model/src/cmd_execution_error/input_fetch_error.rs | crate/cmd_model/src/cmd_execution_error/input_fetch_error.rs | use crate::CmdBlockDesc;
/// Error fetching `CmdBlock::InputT` from `resources`.
///
/// If `CmdBlock::InputT` is a tuple, such as `(StatesCurrent, StatesGoal)`,
/// and `states_current` and `states_goal` are inserted individually in
/// `Resources`, then `CmdBlock::input_fetch` should be implemented to call
/// `Resources::remove` for each of them.
#[derive(Debug, thiserror::Error)]
#[error(
"Error in `CmdExecution` or `CmdBlock` logic, usually due to incorrect `Resource` insertion or removal."
)]
#[cfg_attr(
feature = "error_reporting",
derive(miette::Diagnostic),
diagnostic(help("Make sure that the value is populated by a predecessor."))
)]
pub struct InputFetchError {
/// String representations of the `CmdBlock`s in this `CmdExecution`.
///
/// These are used to provide a well-formatted error message so that
/// developers can identify where a bug lies more easily.
pub cmd_block_descs: Vec<CmdBlockDesc>,
/// Index of the `CmdBlock` for which `input_fetch` failed.
pub cmd_block_index: usize,
/// Short type name of the `CmdBlock::Input` type.
pub input_name_short: String,
/// Full type name of the `CmdBlock::Input` type.
pub input_name_full: String,
/// Textual representation of the `CmdExecution`.
///
/// This includes the `CmdBlock`s and their `InputT` and `Outcome` type
/// names.
///
/// Approximation of the source for `EnsureCmd`:
///
/// ```yaml
/// CmdExecution:
/// ExecutionOutcome: (States<Previous>, States<Ensured>, States<Goal>)
/// CmdBlocks:
/// - StatesCurrentReadCmdBlock:
/// Input: States<Current>
/// Outcome: States<Goal>
/// - StatesGoalReadCmdBlock:
/// Input: States<Current>
/// Outcome: States<Goal>
/// - StatesDiscoverCmdBlock:
/// Input: ()
/// Outcome: (States<Current>, States<Goal>)
/// - ApplyStateSyncCheckCmdBlock:
/// Input: (States<CurrentStored>, States<Current>, States<GoalStored>, States<Goal>)
/// Outcome: (States<CurrentStored>, States<Current>, States<GoalStored>, States<Goal>)
/// - ApplyExecCmdBlock:
/// Input: (States<Current>, States<Goal>)
/// Outcome: (States<Previous>, States<Ensured>, States<Goal>)
/// ```
#[cfg(feature = "error_reporting")]
#[source_code]
pub cmd_execution_src: String,
/// Span within the source text of the input type.
#[cfg(feature = "error_reporting")]
#[label("This type is not present in `resources`")]
pub input_span: Option<miette::SourceSpan>,
/// Full span so that miette renders the whole `cmd_execution_src`.
#[cfg(feature = "error_reporting")]
#[label]
pub full_span: miette::SourceSpan,
}
#[cfg(feature = "error_reporting")]
impl<'b> std::borrow::Borrow<dyn miette::Diagnostic + 'b> for Box<InputFetchError> {
fn borrow<'s>(&'s self) -> &'s (dyn miette::Diagnostic + 'b) {
self.as_ref()
}
}
#[cfg(feature = "error_reporting")]
impl miette::Diagnostic for Box<InputFetchError> {
fn code<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().code()
}
fn severity(&self) -> Option<miette::Severity> {
self.as_ref().severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().help()
}
fn url<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
self.as_ref().source_code()
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.as_ref().labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
self.as_ref().related()
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.as_ref().diagnostic_source()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/lib.rs | crate/rt_model_core/src/lib.rs | //! Core runtime traits for the peace automation framework.
//!
//! These types are in this crate so that the `rt_model_native` and
//! `rt_model_web` crates are able to reference them and either use or provide
//! default implementations.
// Re-exports
pub use async_trait::async_trait;
pub use indexmap::IndexMap;
pub use indicatif;
pub mod output;
pub mod params;
pub use crate::{
error::{
ApplyCmdError, Error, ParamsSpecsDeserializeError, StateDowncastError,
StatesDeserializeError,
},
items_state_stored_stale::ItemsStateStoredStale,
state_stored_and_discovered::StateStoredAndDiscovered,
};
mod error;
mod items_state_stored_stale;
mod state_stored_and_discovered;
cfg_if::cfg_if! {
if #[cfg(feature = "output_progress")] {
pub use peace_progress_model::ProgressUpdate;
pub use crate::cmd_progress_tracker::CmdProgressTracker;
mod cmd_progress_tracker;
}
}
cfg_if::cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
pub use crate::error::NativeError;
} else {
pub use crate::error::WebError;
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params.rs | crate/rt_model_core/src/params.rs | //! Serializable data to initialize resources in a `CmdContext`.
//!
//! Each of these are `TypeMap<T>` newtypes, and are:
//!
//! * automatically serialized when a `CmdContext` is created with params.
//! * automatically deserialized and inserted as resources when subsequent
//! `CmdContext`s are created.
//!
//! # Intended Use
//!
//! [`WorkspaceParams`] are information that is shared across all profiles and
//! flows in a workspace, such as:
//!
//! * User ID
//! * Customer ID
//!
//! [`ProfilesParams`] are information that are shared across flows in within a
//! profile, but specific to a profile -- `dev`, `prod` -- such as:
//!
//! * Profile name
//! * Server hostnames
//!
//! [`FlowParams`] are information that are applicable to a flow -- `deploy`,
//! `config_fetch`, `clean` -- such as:
//!
//! * Server count: applicable to `deploy`
//! * Force remove: applicable to `clean`
pub use self::{
flow_params::FlowParams,
flow_params_opt::FlowParamsOpt,
params_keys::{KeyKnown, KeyMaybe, KeyUnknown, ParamsKeys, ParamsKeysImpl, ParamsKeysUnknown},
params_type_regs::ParamsTypeRegs,
params_type_regs_builder::ParamsTypeRegsBuilder,
profile_params::ProfileParams,
profile_params_opt::ProfileParamsOpt,
workspace_params::WorkspaceParams,
workspace_params_opt::WorkspaceParamsOpt,
};
mod flow_params;
mod flow_params_opt;
mod params_keys;
mod params_type_regs;
mod params_type_regs_builder;
mod profile_params;
mod profile_params_opt;
mod workspace_params;
mod workspace_params_opt;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error.rs | crate/rt_model_core/src/error.rs | use std::path::PathBuf;
use peace_cmd_model::CmdExecutionError;
use peace_core::AppName;
use peace_flow_model::FlowId;
use peace_item_model::ItemId;
use peace_params::{ParamsResolveError, ParamsSpecs};
use peace_profile_model::Profile;
use peace_resource_rt::{internal::WorkspaceParamsFile, paths::ParamsSpecsFile};
pub use self::{
apply_cmd_error::ApplyCmdError, params_specs_deserialize_error::ParamsSpecsDeserializeError,
state_downcast_error::StateDowncastError, states_deserialize_error::StatesDeserializeError,
};
mod apply_cmd_error;
mod params_specs_deserialize_error;
mod state_downcast_error;
mod states_deserialize_error;
cfg_if::cfg_if! {
if #[cfg(not(target_arch = "wasm32"))] {
pub use self::native_error::NativeError;
mod native_error;
} else {
pub use self::web_error::WebError;
mod web_error;
}
}
/// Peace runtime errors.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum Error {
/// Failed to apply changes.
#[error("Failed to apply changes.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::apply_error))
)]
ApplyCmdError(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
ApplyCmdError,
),
/// Error in `CmdExecution` or `CmdBlock` logic, usually due to incorrect
/// `Resource` insertion or removal.
#[error(
"Error in `CmdExecution` or `CmdBlock` logic, usually due to incorrect `Resource` insertion or removal."
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(help("Make sure that the value is populated by a predecessor."))
)]
CmdExecution(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
CmdExecutionError,
),
/// Failed to serialize error.
#[error("Failed to serialize error.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::error_serialize))
)]
ErrorSerialize(#[source] serde_yaml::Error),
/// Params specs were not stored or provided for a profile in a
/// multi-profile command context.
#[error(
"Params specs were not stored or provided for a profile in a \
multi-profile command context."
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::item_params_specs_file_not_present)),
help(
"Make sure params specs are either stored in \
`.peace/{app_name}/{profile}/{flow_id}/params_specs.yaml`, \
or are provided in code for all profiles that are loaded."
)
)]
ItemParamsSpecsFileNotFound {
/// Name of the application that is being executed.
app_name: AppName,
/// Profile that item params specs were being deserialized for.
profile: Profile,
/// Flow ID that item params specs where being deserialized for.
flow_id: FlowId,
},
/// Failed to resolve values for a `Params` object from `resources`.
///
/// This possibly indicates the user has provided a `Params::Spec` with
/// `InMemory` or `MappingFn`, but no predecessor populates that type.
#[error("Failed to resolve values for a `Params` object from `resources`.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::params_resolve_error),
help("Make sure that the value is populated by a predecessor.")
)
)]
ParamsResolveError(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
ParamsResolveError,
),
/// A `Params::Spec` was not present for a given item ID.
///
/// If this happens, this is a bug in the Peace framework.
#[error("A `Params::Spec` was not present for item: `{item_id}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::params_spec_not_found),
help(
"If you are an end user, please ask for help from the providers of your automation tool.\n\
\n\
If you are developing a tool with the Peace framework,\n\
please open an issue in the Peace repository:\n\
\n\
https://github.com/azriel91/peace/"
)
)
)]
ParamsSpecNotFound {
/// Item ID for which the params spec was not found.
item_id: ItemId,
},
/// Item params specs do not match with the items in the flow.
///
/// # Symptoms
///
/// * Provided params specs for an item ID has no corresponding item ID in
/// the flow.
/// * Stored params specs for an item ID has no corresponding item ID in the
/// flow.
/// * ID of an item in the flow does not have a corresponding provided
/// params spec.
/// * ID of an item in the flow does not have a corresponding stored params
/// spec.
///
/// # Causes
///
/// These can happen when:
///
/// * An item is added.
///
/// - No corresponding provided params spec.
/// - No corresponding stored params spec.
///
/// * An item ID is renamed.
///
/// - Provided params spec ID mismatch.
/// - Stored params spec ID mismatch.
/// - No corresponding provided params spec.
///
/// * An item is removed.
///
/// - Provided params spec ID mismatch.
/// - Stored params spec ID mismatch.
#[error("Item params specs do not match with the items in the flow.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::params_specs_mismatch),
help("{}", params_specs_mismatch_display(
item_ids_with_no_params_specs,
params_specs_provided_mismatches,
params_specs_stored_mismatches.as_ref().as_ref(),
params_specs_not_usable,
))
)
)]
ParamsSpecsMismatch {
/// Item IDs for which there are no provided or stored params spec.
item_ids_with_no_params_specs: Vec<ItemId>,
/// Provided params specs with no matching item ID in the flow.
params_specs_provided_mismatches: Box<ParamsSpecs>,
/// Stored params specs with no matching item ID in the flow.
//
// Boxed so that this enum variant is not so large compared to other variants
// to address `clippy::large_enum_variant`.
params_specs_stored_mismatches: Box<Option<ParamsSpecs>>,
/// Item IDs which had a mapping function previously provided in
/// its params spec, but on a subsequent invocation nothing was
/// provided.
params_specs_not_usable: Vec<ItemId>,
},
/// In a `MultiProfileSingleFlow` diff, neither profile had `Params::Specs`
/// defined.
#[error("Params specifications not defined for `{profile_a}` or `{profile_b}`.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::params_specs_not_defined_for_diff),
help(
"Make sure at least one of the flows has `.with_items_params(..)`\n\
defined for every item in the flow."
)
)
)]
ParamsSpecsNotDefinedForDiff {
/// First profile looked up for params specs.
profile_a: Profile,
/// Second profile looked up for params specs.
profile_b: Profile,
},
/// Failed to serialize a presentable type.
#[error("Failed to serialize a presentable type.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::presentable_serialize))
)]
PresentableSerialize(#[source] serde_yaml::Error),
/// Failed to serialize progress update.
#[error("Failed to serialize progress update.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::progress_update_serialize))
)]
ProgressUpdateSerialize(#[source] serde_yaml::Error),
/// Failed to serialize progress update as JSON.
#[error("Failed to serialize progress update.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::progress_update_serialize_json))
)]
ProgressUpdateSerializeJson(#[source] serde_json::Error),
/// Failed to deserialize states.
#[error("Failed to deserialize states.")]
StatesDeserialize(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
Box<StatesDeserializeError>,
),
/// Failed to serialize states.
#[error("Failed to serialize states.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::states_serialize))
)]
StatesSerialize(#[source] serde_yaml::Error),
/// Failed to deserialize params specs.
#[error("Failed to deserialize params specs.")]
ParamsSpecsDeserialize(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
Box<ParamsSpecsDeserializeError>,
),
/// Failed to serialize params specs.
#[error("Failed to serialize params specs.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::params_specs_serialize))
)]
ParamsSpecsSerialize(#[source] serde_yaml::Error),
/// Params specs file does not exist.
///
/// This is returned when `ParamsSpecs` is attempted to be
/// deserialized but the file does not exist.
///
/// The automation tool implementor needs to ensure the
/// `SingleProfileSingleFlow` command context has been initialized for that
/// flow previously.
#[error("Params specs file does not exist for `{profile}/{flow_id}`.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::params_specs_file_not_exists),
help(
"Ensure that a `SingleProfileSingleFlow` command context has previously been built."
)
)
)]
ParamsSpecsFileNotExists {
/// Profile of the flow.
profile: Profile,
/// Flow ID whose params are being deserialized.
flow_id: FlowId,
/// Path of the params specs file.
params_specs_file: ParamsSpecsFile,
},
/// Current states have not been discovered.
///
/// This is returned when `StatesCurrentFile` is attempted to be
/// deserialized but does not exist.
#[error("Current states have not been discovered.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::states_current_discover_required),
help("Ensure that `StatesDiscoverCmd::current` has been called.")
)
)]
StatesCurrentDiscoverRequired,
/// Goal states have not been written to disk.
///
/// This is returned when `StatesGoalFile` is attempted to be
/// deserialized but does not exist.
#[error("Goal states have not been written to disk.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::states_goal_discover_required),
help("Ensure that `StatesDiscoverCmd::goal` has been called.")
)
)]
StatesGoalDiscoverRequired,
/// Failed to serialize state diffs.
#[error("Failed to serialize state diffs.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::state_diffs_serialize))
)]
StateDiffsSerialize(#[source] serde_yaml::Error),
/// Failed to serialize error as JSON.
#[error("Failed to serialize error as JSON.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::error_serialize_json))
)]
ErrorSerializeJson(#[source] serde_json::Error),
/// Failed to serialize states as JSON.
#[error("Failed to serialize states as JSON.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::states_current_serialize_json))
)]
StatesSerializeJson(#[source] serde_json::Error),
/// Failed to serialize state diffs as JSON.
#[error("Failed to serialize state diffs as JSON.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::state_diffs_serialize_json))
)]
StateDiffsSerializeJson(#[source] serde_json::Error),
/// Failed to serialize workspace params profile key.
#[error("Failed to serialize workspace params profile key.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::workspace_params_profile_key_serialize))
)]
WorkspaceParamsProfileKeySerialize(#[source] serde_yaml::Error),
/// Failed to serialize workspace init params.
#[error("Failed to serialize workspace init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::workspace_init_params_serialize))
)]
WorkspaceParamsSerialize(#[source] serde_yaml::Error),
/// Failed to deserialize workspace init params.
#[error("Failed to deserialize workspace init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::workspace_init_params_deserialize))
)]
WorkspaceParamsDeserialize(#[source] serde_yaml::Error),
/// Workspace params does not exist, so cannot look up `Profile`.
#[error("Workspace params does not exist, so cannot look up `Profile`.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::workspace_params_none_for_profile))
)]
WorkspaceParamsNoneForProfile,
/// Workspace param for `Profile` does not exist.
#[error("Workspace param for `Profile` does not exist.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::workspace_params_profile_none),
help(
"Ensure `{workspace_params_file}` contains a param for `{profile_key}`.\n\
`{workspace_params_file}` contents:\n\
\n\
```yaml\n\
{workspace_params_file_contents}\n\
```\n\
"
)
)
)]
WorkspaceParamsProfileNone {
/// The key that the profile should be stored against.
profile_key: String,
/// The file that stores workspace params.
workspace_params_file: WorkspaceParamsFile,
/// Contents of the workspace params file.
workspace_params_file_contents: String,
},
/// Profile to diff does not exist in `MultiProfileSingleFlow` scope.
///
/// This could mean the caller provided a profile that does not exist, or
/// the profile filter function filtered out the profile from the list of
/// profiles.
#[error("Profile `{profile}` not in scope, make sure it exists in `.peace/*/{profile}`.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::profile_not_in_scope),
help(
"Make sure the profile is spelt correctly.\n\
Available profiles are: [{profiles_in_scope}]",
profiles_in_scope = profiles_in_scope
.iter()
.map(|profile| format!("{profile}"))
.collect::<Vec<_>>()
.join(",")
)
)
)]
ProfileNotInScope {
/// The profile that was not in scope.
profile: Profile,
/// The profiles that are in scope.
profiles_in_scope: Vec<Profile>,
},
/// Profile to diff has not had its states current discovered.
#[error("Profile `{profile}`'s states have not been discovered.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::profile_states_current_not_discovered),
help("Switch to the profile and run the states discover command.")
)
)]
ProfileStatesCurrentNotDiscovered {
/// The profile that was not in scope.
profile: Profile,
},
/// Failed to serialize profile init params.
#[error("Failed to serialize profile init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::profile_init_params_serialize))
)]
ProfileParamsSerialize(#[source] serde_yaml::Error),
/// Failed to deserialize profile init params.
#[error("Failed to deserialize profile init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::profile_init_params_deserialize))
)]
ProfileParamsDeserialize(#[source] serde_yaml::Error),
/// Failed to serialize flow init params.
#[error("Failed to serialize flow init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::flow_init_params_serialize))
)]
FlowParamsSerialize(#[source] serde_yaml::Error),
/// Failed to deserialize flow init params.
#[error("Failed to deserialize flow init params.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::flow_init_params_deserialize))
)]
FlowParamsDeserialize(#[source] serde_yaml::Error),
/// Item does not exist in storage.
#[error("Item does not exist in storage: `{}`.", path.display())]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model::item_not_exists))
)]
ItemNotExists {
/// Path to the file.
path: PathBuf,
},
/// Error downcasting a `BoxDtDisplay` into an item's concrete state type.
#[error("Error downcasting a `BoxDtDisplay` into an item's concrete state type.")]
StateDowncastError(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
StateDowncastError,
),
/// Native application error occurred.
#[error("Native application error occurred.")]
#[cfg(not(target_arch = "wasm32"))]
Native(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
NativeError,
),
/// Web application error occurred.
#[error("Web application error occurred.")]
#[cfg(target_arch = "wasm32")]
Web(
#[cfg_attr(feature = "error_reporting", diagnostic_source)]
#[source]
#[from]
WebError,
),
}
#[cfg(feature = "error_reporting")]
fn params_specs_mismatch_display(
item_ids_with_no_params: &[ItemId],
params_specs_provided_mismatches: &ParamsSpecs,
params_specs_stored_mismatches: Option<&ParamsSpecs>,
params_specs_not_usable: &[ItemId],
) -> String {
let mut items = Vec::<String>::new();
if !item_ids_with_no_params.is_empty() {
items.push(format!(
"The following items do not have parameters provided:\n\
\n\
{}\n",
item_ids_with_no_params
.iter()
.map(|item_id| format!("* {item_id}"))
.collect::<Vec<String>>()
.join("\n")
));
}
if !params_specs_provided_mismatches.is_empty() {
let params_specs_provided_mismatches_list = params_specs_provided_mismatches
.keys()
.map(|item_id| format!("* {item_id}"))
.collect::<Vec<String>>()
.join("\n");
items.push(format!(
"The following provided params specs do not correspond to any items in the flow:\n\
\n\
{params_specs_provided_mismatches_list}\n",
))
}
if let Some(params_specs_stored_mismatches) = params_specs_stored_mismatches {
if !params_specs_stored_mismatches.is_empty() {
let params_specs_stored_mismatches_list = params_specs_stored_mismatches
.keys()
.map(|item_id| format!("* {item_id}"))
.collect::<Vec<String>>()
.join("\n");
items.push(format!(
"The following stored params specs do not correspond to any items in the flow:\n\
\n\
{params_specs_stored_mismatches_list}\n",
));
}
}
if !params_specs_not_usable.is_empty() {
items.push(format!(
"The following items either have not had a params spec provided previously,\n\
or had contained a mapping function, which cannot be loaded from disk.\n\
\n\
So the params spec needs to be provided to the command context for:\n\
\n\
{}\n",
params_specs_not_usable
.iter()
.map(|item_id| format!("* {item_id}"))
.collect::<Vec<String>>()
.join("\n")
));
}
items.join("\n")
}
#[cfg(feature = "error_reporting")]
impl<'b> std::borrow::Borrow<dyn miette::Diagnostic + 'b> for Box<Error> {
fn borrow<'s>(&'s self) -> &'s (dyn miette::Diagnostic + 'b) {
self.as_ref()
}
}
#[cfg(feature = "error_reporting")]
impl miette::Diagnostic for Box<Error> {
fn code<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().code()
}
fn severity(&self) -> Option<miette::Severity> {
self.as_ref().severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().help()
}
fn url<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
self.as_ref().source_code()
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.as_ref().labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
self.as_ref().related()
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.as_ref().diagnostic_source()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/items_state_stored_stale.rs | crate/rt_model_core/src/items_state_stored_stale.rs | use std::ops::{Deref, DerefMut};
use indexmap::IndexMap;
use peace_item_model::ItemId;
use crate::StateStoredAndDiscovered;
/// Items whose stored and discovered state are not equal.
///
/// `IndexMap<ItemId, StateStoredAndDiscovered>` newtype.
///
/// This can be used for either current state or goal state.
#[derive(Clone, Debug, Default)]
pub struct ItemsStateStoredStale(IndexMap<ItemId, StateStoredAndDiscovered>);
impl ItemsStateStoredStale {
/// Returns a new `ItemsStateStoredStale` map.
pub fn new() -> Self {
Self(IndexMap::new())
}
/// Returns a new `ItemsStateStoredStale` map with the given preallocated
/// capacity.
pub fn with_capacity(capacity: usize) -> Self {
Self(IndexMap::with_capacity(capacity))
}
/// Returns the underlying map.
pub fn into_inner(self) -> IndexMap<ItemId, StateStoredAndDiscovered> {
self.0
}
/// Returns `true` if there is at least one stale stored state.
pub fn stale(&self) -> bool {
!self.0.is_empty()
}
}
impl Deref for ItemsStateStoredStale {
type Target = IndexMap<ItemId, StateStoredAndDiscovered>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl DerefMut for ItemsStateStoredStale {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl FromIterator<(ItemId, StateStoredAndDiscovered)> for ItemsStateStoredStale {
fn from_iter<I: IntoIterator<Item = (ItemId, StateStoredAndDiscovered)>>(iter: I) -> Self {
Self(IndexMap::from_iter(iter))
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/output.rs | crate/rt_model_core/src/output.rs | pub use self::output_write::OutputWrite;
mod output_write;
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/state_stored_and_discovered.rs | crate/rt_model_core/src/state_stored_and_discovered.rs | use type_reg::untagged::BoxDtDisplay;
/// Stored and/or discovered state for an item.
#[derive(Clone, Debug)]
pub enum StateStoredAndDiscovered {
/// Stored state exists, but the actual item state cannot be discovered.
///
/// These can probably be ignored during `CleanCmd`, for idempotence even if
/// a previous clean up did not complete successfully and stored states
/// were not updated.
OnlyStoredExists {
/// Stored current state or stored goal state.
state_stored: BoxDtDisplay,
},
/// No state was stored, but the actual item state exists.
///
/// These can probably be ignored during `EnsureCmd`, for idempotence even
/// if a previous ensure did not complete successfully and stored states
/// were not updated.
OnlyDiscoveredExists {
/// Discovered current state or stored goal state during execution.
state_discovered: BoxDtDisplay,
},
/// Both stored state and discovered state exist.
///
/// This variant is the one that users likely should be warned when ensuring
/// changes.
ValuesDiffer {
/// Stored current state or stored goal state.
state_stored: BoxDtDisplay,
/// Discovered current state or stored goal state during execution.
state_discovered: BoxDtDisplay,
},
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/cmd_progress_tracker.rs | crate/rt_model_core/src/cmd_progress_tracker.rs | use indexmap::IndexMap;
use indicatif::MultiProgress;
use peace_item_model::ItemId;
use peace_progress_model::ProgressTracker;
/// Tracks command execution progress for all items.
///
/// The Peace framework initializes the `multi_progress` and `progress_trackers`
/// and manages updating the `ProgressBar` values.
///
/// By default, the `MultiProgress` will use [`ProgressDrawTarget::hidden()`].
/// However, by default [`CliOutput`] sets the draw target to `stderr` if an
/// executable built using Peace is run interactively.
///
/// [`ProgressDrawTarget::hidden()`]: indicatif::ProgressDrawTarget::hidden
/// [`CliOutput`]: https://docs.rs/peace_rt_model_native/latest/peace_rt_model_native/struct.CliOutput.html
#[derive(Debug)]
pub struct CmdProgressTracker {
/// `MultiProgress` that tracks the remaining progress bars.
pub multi_progress: MultiProgress,
/// Tracks progress for each item.
pub progress_trackers: IndexMap<ItemId, ProgressTracker>,
}
impl CmdProgressTracker {
/// Returns a new `CmdProgressTracker`.
pub fn new(
multi_progress: MultiProgress,
progress_trackers: IndexMap<ItemId, ProgressTracker>,
) -> Self {
Self {
multi_progress,
progress_trackers,
}
}
/// Returns the `MultiProgress` that tracks the remaining progress bars.
pub fn multi_progress(&self) -> &MultiProgress {
&self.multi_progress
}
/// Returns a mutable reference to the `MultiProgress` that tracks the
/// remaining progress bars.
pub fn multi_progress_mut(&mut self) -> &mut MultiProgress {
&mut self.multi_progress
}
/// Returns the `ProgressTracker`s for each item.
pub fn progress_trackers(&self) -> &IndexMap<ItemId, ProgressTracker> {
&self.progress_trackers
}
/// Returns a mutable reference to the `ProgressTracker`s for each item
/// spec.
pub fn progress_trackers_mut(&mut self) -> &mut IndexMap<ItemId, ProgressTracker> {
&mut self.progress_trackers
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/workspace_params.rs | crate/rt_model_core/src/params/workspace_params.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMap};
/// Information that is shared across all profiles and flows in a workspace.
/// `TypeMap<K>` newtype.
///
/// Shared information are the ones that will not change when switching to
/// different profiles. For example, a user working on a project for a
/// particular customer may use the following information across profiles:
///
/// * User ID
/// * Customer ID
///
/// # Type Parameters
///
/// * `K`: Type of key for the `WorkspaceParams` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct WorkspaceParams<K>(TypeMap<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> WorkspaceParams<K>
where
K: Eq + Hash,
{
/// Returns a new `WorkspaceParams` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `WorkspaceParams` map with the specified capacity.
///
/// The `WorkspaceParams` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMap::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMap<K, BoxDt> {
self.0
}
}
impl<K> Default for WorkspaceParams<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMap::default(), PhantomData)
}
}
impl<K> Deref for WorkspaceParams<K>
where
K: Eq + Hash,
{
type Target = TypeMap<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for WorkspaceParams<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMap<K, BoxDt>> for WorkspaceParams<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMap<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/flow_params.rs | crate/rt_model_core/src/params/flow_params.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMap};
/// Information that is applicable to a flow. `TypeMap<K>` newtype.
///
/// The information may not be of the same type across flows, as flows are
/// different in what they are doing. Example information include:
///
/// * Server count: applicable to `deploy`
/// * Force remove: applicable to `clean`
///
/// # Type Parameters
///
/// * `K`: Type of key for the `FlowParams` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct FlowParams<K>(TypeMap<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> FlowParams<K>
where
K: Eq + Hash,
{
/// Returns a new `FlowParams` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `FlowParams` map with the specified capacity.
///
/// The `FlowParams` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMap::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMap<K, BoxDt> {
self.0
}
}
impl<K> Default for FlowParams<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMap::default(), PhantomData)
}
}
impl<K> Deref for FlowParams<K>
where
K: Eq + Hash,
{
type Target = TypeMap<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for FlowParams<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMap<K, BoxDt>> for FlowParams<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMap<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/params_keys.rs | crate/rt_model_core/src/params/params_keys.rs | use std::{fmt::Debug, hash::Hash, marker::PhantomData};
use serde::{de::DeserializeOwned, Deserialize, Serialize};
/// Marks the types used for params keys.
///
/// # Design
///
/// This allows types such as `CmdContext` and `ParamsTypeRegs` to have a
/// `ParamsKeys` type parameter without specifying all of the associated type
/// bounds. This means:
///
/// * The code for those types is more understandable.
/// * We reduce the ripple effect of needing each of these associated types
/// propagated to callers who use those types in type / method signatures.
pub trait ParamsKeys: Debug + Unpin + 'static {
type WorkspaceParamsKMaybe: KeyMaybe;
type ProfileParamsKMaybe: KeyMaybe;
type FlowParamsKMaybe: KeyMaybe;
}
/// Shorter name for `ParamsKeys` without any known keys.
pub type ParamsKeysUnknown = ParamsKeysImpl<KeyUnknown, KeyUnknown, KeyUnknown>;
/// Concrete implementation of `ParamsKeys`.
#[derive(Debug)]
pub struct ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe> {
/// Marker
marker: PhantomData<(WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe)>,
}
impl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe>
ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe>
{
/// Returns a new `ParamsKeysImpl`.
pub fn new() -> Self {
Self::default()
}
}
impl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe> Default
for ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe>
{
fn default() -> Self {
Self {
marker: PhantomData,
}
}
}
impl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe> ParamsKeys
for ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe>
where
WorkspaceParamsKMaybe: KeyMaybe,
ProfileParamsKMaybe: KeyMaybe,
FlowParamsKMaybe: KeyMaybe,
{
type FlowParamsKMaybe = FlowParamsKMaybe;
type ProfileParamsKMaybe = ProfileParamsKMaybe;
type WorkspaceParamsKMaybe = WorkspaceParamsKMaybe;
}
// Supporting types that allow keys to not be explicitly specified
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct KeyUnknown;
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash, Deserialize, Serialize)]
pub struct KeyKnown<K>(PhantomData<K>);
pub trait KeyMaybe: Debug + Unpin + 'static {
type Key: Clone + Debug + Eq + Hash + DeserializeOwned + Serialize + Send + Sync + 'static;
}
impl KeyMaybe for KeyUnknown {
type Key = ();
}
impl<K> KeyMaybe for KeyKnown<K>
where
K: Clone + Debug + Eq + Hash + DeserializeOwned + Serialize + Send + Sync + Unpin + 'static,
{
type Key = K;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/params_type_regs_builder.rs | crate/rt_model_core/src/params/params_type_regs_builder.rs | use std::{fmt::Debug, hash::Hash};
use serde::{de::DeserializeOwned, Serialize};
use type_reg::untagged::{BoxDt, TypeReg};
use crate::params::{KeyKnown, KeyMaybe, KeyUnknown, ParamsKeys, ParamsKeysImpl, ParamsTypeRegs};
/// Type registries to deserialize [`WorkspaceParamsFile`],
/// [`ProfileParamsFile`] and [`FlowParamsFile`].
///
/// [`WorkspaceParamsFile`]: peace_resource_rt::internal::WorkspaceParamsFile
/// [`ProfileParamsFile`]: peace_resource_rt::internal::ProfileParamsFile
/// [`FlowParamsFile`]: peace_resource_rt::internal::FlowParamsFile
#[derive(Debug)]
pub struct ParamsTypeRegsBuilder<PKeys>
where
PKeys: ParamsKeys + 'static,
{
/// Type registry for [`WorkspaceParams`] deserialization.
///
/// [`WorkspaceParams`]: peace_rt_model::params::WorkspaceParams
workspace_params_type_reg: TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt>,
/// Type registry for [`ProfileParams`] deserialization.
///
/// [`ProfileParams`]: peace_rt_model::params::ProfileParams
profile_params_type_reg: TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt>,
/// Type registry for [`FlowParams`] deserialization.
///
/// [`FlowParams`]: peace_rt_model::params::FlowParams
flow_params_type_reg: TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt>,
}
impl ParamsTypeRegsBuilder<ParamsKeysImpl<KeyUnknown, KeyUnknown, KeyUnknown>> {
/// Returns a new `ParamsTypeRegsBuilder`.
pub fn new() -> Self {
Self::default()
}
}
impl<PKeys> ParamsTypeRegsBuilder<PKeys>
where
PKeys: ParamsKeys + 'static,
{
/// Returns a reference to the workspace params type registry.
pub fn workspace_params_type_reg(
&self,
) -> &TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.workspace_params_type_reg
}
/// Returns a mutable reference to the workspace params type registry.
pub fn workspace_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.workspace_params_type_reg
}
/// Returns a reference to the profile params type registry.
pub fn profile_params_type_reg(
&self,
) -> &TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.profile_params_type_reg
}
/// Returns a mutable reference to the profile params type registry.
pub fn profile_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.profile_params_type_reg
}
/// Returns a reference to the flow params type registry.
pub fn flow_params_type_reg(
&self,
) -> &TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.flow_params_type_reg
}
/// Returns a mutable reference to the flow params type registry.
pub fn flow_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.flow_params_type_reg
}
/// Returns a `ParamsTypeRegs` with the registered keys.
pub fn build(
self,
) -> ParamsTypeRegs<
ParamsKeysImpl<
PKeys::WorkspaceParamsKMaybe,
PKeys::ProfileParamsKMaybe,
PKeys::FlowParamsKMaybe,
>,
> {
let workspace_params_type_reg =
TypeReg::<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt>::new();
let profile_params_type_reg =
TypeReg::<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt>::new();
let flow_params_type_reg =
TypeReg::<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt>::new();
ParamsTypeRegs::new(
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
)
}
}
impl<ProfileParamsKMaybe, FlowParamsKMaybe>
ParamsTypeRegsBuilder<ParamsKeysImpl<KeyUnknown, ProfileParamsKMaybe, FlowParamsKMaybe>>
where
ProfileParamsKMaybe: KeyMaybe,
FlowParamsKMaybe: KeyMaybe,
{
pub fn with_workspace_params_k<WorkspaceParamsK>(
self,
) -> ParamsTypeRegsBuilder<
ParamsKeysImpl<KeyKnown<WorkspaceParamsK>, ProfileParamsKMaybe, FlowParamsKMaybe>,
>
where
WorkspaceParamsK: Clone
+ Debug
+ Eq
+ Hash
+ DeserializeOwned
+ Serialize
+ Send
+ Sync
+ Unpin
+ 'static,
{
let ParamsTypeRegsBuilder {
workspace_params_type_reg: _,
profile_params_type_reg,
flow_params_type_reg,
} = self;
let workspace_params_type_reg = TypeReg::<WorkspaceParamsK, BoxDt>::new();
ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
}
}
}
impl<WorkspaceParamsKMaybe, FlowParamsKMaybe>
ParamsTypeRegsBuilder<ParamsKeysImpl<WorkspaceParamsKMaybe, KeyUnknown, FlowParamsKMaybe>>
where
WorkspaceParamsKMaybe: KeyMaybe,
FlowParamsKMaybe: KeyMaybe,
{
pub fn with_profile_params_k<ProfileParamsK>(
self,
) -> ParamsTypeRegsBuilder<
ParamsKeysImpl<WorkspaceParamsKMaybe, KeyKnown<ProfileParamsK>, FlowParamsKMaybe>,
>
where
ProfileParamsK: Clone
+ Debug
+ Eq
+ Hash
+ DeserializeOwned
+ Serialize
+ Send
+ Sync
+ Unpin
+ 'static,
{
let ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg: _,
flow_params_type_reg,
} = self;
let profile_params_type_reg = TypeReg::<ProfileParamsK, BoxDt>::new();
ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
}
}
}
impl<WorkspaceParamsKMaybe, ProfileParamsKMaybe>
ParamsTypeRegsBuilder<ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, KeyUnknown>>
where
WorkspaceParamsKMaybe: KeyMaybe,
ProfileParamsKMaybe: KeyMaybe,
{
pub fn with_flow_params_k<FlowParamsK>(
self,
) -> ParamsTypeRegsBuilder<
ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, KeyKnown<FlowParamsK>>,
>
where
FlowParamsK: Clone
+ Debug
+ Eq
+ Hash
+ DeserializeOwned
+ Serialize
+ Send
+ Sync
+ Unpin
+ 'static,
{
let ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg: _,
} = self;
let flow_params_type_reg = TypeReg::<FlowParamsK, BoxDt>::new();
ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
}
}
}
impl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe> Default
for ParamsTypeRegsBuilder<
ParamsKeysImpl<WorkspaceParamsKMaybe, ProfileParamsKMaybe, FlowParamsKMaybe>,
>
where
WorkspaceParamsKMaybe: KeyMaybe,
ProfileParamsKMaybe: KeyMaybe,
FlowParamsKMaybe: KeyMaybe,
{
fn default() -> Self {
let workspace_params_type_reg = TypeReg::<WorkspaceParamsKMaybe::Key, BoxDt>::new();
let profile_params_type_reg = TypeReg::<ProfileParamsKMaybe::Key, BoxDt>::new();
let flow_params_type_reg = TypeReg::<FlowParamsKMaybe::Key, BoxDt>::new();
ParamsTypeRegsBuilder {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/profile_params_opt.rs | crate/rt_model_core/src/params/profile_params_opt.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMapOpt};
/// Information that is shared across flows within a profile. `TypeMapOpt<K>`
/// newtype.
///
/// This is used to keep track of [`ProfileParams`] that need to be removed when
/// building a `CmdCtx*`.
///
/// [`ProfileParams`]: crate::params::ProfileParams
///
/// # Type Parameters
///
/// * `K`: Type of key for the `ProfileParamsOpt` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct ProfileParamsOpt<K>(TypeMapOpt<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> ProfileParamsOpt<K>
where
K: Eq + Hash,
{
/// Returns a new `ProfileParamsOpt` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `ProfileParamsOpt` map with the specified capacity.
///
/// The `ProfileParamsOpt` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMapOpt::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMapOpt<K, BoxDt> {
self.0
}
}
impl<K> Default for ProfileParamsOpt<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMapOpt::default(), PhantomData)
}
}
impl<K> Deref for ProfileParamsOpt<K>
where
K: Eq + Hash,
{
type Target = TypeMapOpt<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for ProfileParamsOpt<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMapOpt<K, BoxDt>> for ProfileParamsOpt<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMapOpt<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/workspace_params_opt.rs | crate/rt_model_core/src/params/workspace_params_opt.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMapOpt};
/// Information that is shared across all profiles and flows in a workspace.
/// `TypeMapOpt<K>` newtype.
///
/// This is used to keep track of [`WorkspaceParams`] that need to be removed
/// when building a `CmdCtx*`.
///
/// [`WorkspaceParams`]: crate::params::WorkspaceParams
///
/// # Type Parameters
///
/// * `K`: Type of key for the `WorkspaceParamsOpt` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct WorkspaceParamsOpt<K>(TypeMapOpt<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> WorkspaceParamsOpt<K>
where
K: Eq + Hash,
{
/// Returns a new `WorkspaceParamsOpt` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `WorkspaceParamsOpt` map with the specified capacity.
///
/// The `WorkspaceParamsOpt` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMapOpt::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMapOpt<K, BoxDt> {
self.0
}
}
impl<K> Default for WorkspaceParamsOpt<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMapOpt::default(), PhantomData)
}
}
impl<K> Deref for WorkspaceParamsOpt<K>
where
K: Eq + Hash,
{
type Target = TypeMapOpt<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for WorkspaceParamsOpt<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMapOpt<K, BoxDt>> for WorkspaceParamsOpt<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMapOpt<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/params_type_regs.rs | crate/rt_model_core/src/params/params_type_regs.rs | use std::fmt::Debug;
use type_reg::untagged::{BoxDt, TypeReg};
use crate::params::{KeyMaybe, KeyUnknown, ParamsKeys, ParamsKeysImpl, ParamsTypeRegsBuilder};
/// Type registries to deserialize [`WorkspaceParamsFile`],
/// [`ProfileParamsFile`] and [`FlowParamsFile`].
///
/// [`WorkspaceParamsFile`]: peace_resource_rt::internal::WorkspaceParamsFile
/// [`ProfileParamsFile`]: peace_resource_rt::internal::ProfileParamsFile
/// [`FlowParamsFile`]: peace_resource_rt::internal::FlowParamsFile
#[derive(Debug)]
pub struct ParamsTypeRegs<PKeys>
where
PKeys: ParamsKeys + 'static,
{
/// Type registry for [`WorkspaceParams`] deserialization.
///
/// [`WorkspaceParams`]: peace_rt_model::params::WorkspaceParams
workspace_params_type_reg: TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt>,
/// Type registry for [`ProfileParams`] deserialization.
///
/// [`ProfileParams`]: peace_rt_model::params::ProfileParams
profile_params_type_reg: TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt>,
/// Type registry for [`FlowParams`] deserialization.
///
/// [`FlowParams`]: peace_rt_model::params::FlowParams
flow_params_type_reg: TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt>,
}
impl ParamsTypeRegs<ParamsKeysImpl<KeyUnknown, KeyUnknown, KeyUnknown>> {
/// Returns a new `ParamsTypeRegsBuilder`.
pub fn builder() -> ParamsTypeRegsBuilder<ParamsKeysImpl<KeyUnknown, KeyUnknown, KeyUnknown>> {
ParamsTypeRegsBuilder::new()
}
}
impl<PKeys> ParamsTypeRegs<PKeys>
where
PKeys: ParamsKeys + 'static,
{
/// Returns a new `ParamsTypeRegs`.
pub(crate) fn new(
workspace_params_type_reg: TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt>,
profile_params_type_reg: TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt>,
flow_params_type_reg: TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt>,
) -> Self {
Self {
workspace_params_type_reg,
profile_params_type_reg,
flow_params_type_reg,
}
}
/// Returns a reference to the workspace params type registry.
pub fn workspace_params_type_reg(
&self,
) -> &TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.workspace_params_type_reg
}
/// Returns a mutable reference to the workspace params type registry.
pub fn workspace_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::WorkspaceParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.workspace_params_type_reg
}
/// Returns a reference to the profile params type registry.
pub fn profile_params_type_reg(
&self,
) -> &TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.profile_params_type_reg
}
/// Returns a mutable reference to the profile params type registry.
pub fn profile_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::ProfileParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.profile_params_type_reg
}
/// Returns a reference to the flow params type registry.
pub fn flow_params_type_reg(
&self,
) -> &TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&self.flow_params_type_reg
}
/// Returns a mutable reference to the flow params type registry.
pub fn flow_params_type_reg_mut(
&mut self,
) -> &mut TypeReg<<PKeys::FlowParamsKMaybe as KeyMaybe>::Key, BoxDt> {
&mut self.flow_params_type_reg
}
}
impl<PKeys> Default for ParamsTypeRegs<PKeys>
where
PKeys: ParamsKeys + 'static,
{
fn default() -> Self {
Self {
workspace_params_type_reg: TypeReg::default(),
profile_params_type_reg: TypeReg::default(),
flow_params_type_reg: TypeReg::default(),
}
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/flow_params_opt.rs | crate/rt_model_core/src/params/flow_params_opt.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMapOpt};
/// Information that is applicable to a flow. `TypeMapOpt<K>` newtype.
///
/// This is used to keep track of [`FlowParams`] that need to be removed when
/// building a `CmdCtx*`.
///
/// [`FlowParams`]: crate::params::FlowParams
///
/// # Type Parameters
///
/// * `K`: Type of key for the `FlowParamsOpt` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct FlowParamsOpt<K>(TypeMapOpt<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> FlowParamsOpt<K>
where
K: Eq + Hash,
{
/// Returns a new `FlowParamsOpt` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `FlowParamsOpt` map with the specified capacity.
///
/// The `FlowParamsOpt` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMapOpt::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMapOpt<K, BoxDt> {
self.0
}
}
impl<K> Default for FlowParamsOpt<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMapOpt::default(), PhantomData)
}
}
impl<K> Deref for FlowParamsOpt<K>
where
K: Eq + Hash,
{
type Target = TypeMapOpt<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for FlowParamsOpt<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMapOpt<K, BoxDt>> for FlowParamsOpt<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMapOpt<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/params/profile_params.rs | crate/rt_model_core/src/params/profile_params.rs | use std::{
hash::Hash,
marker::PhantomData,
ops::{Deref, DerefMut},
};
use serde::Serialize;
use type_reg::untagged::{BoxDt, TypeMap};
/// Information that is shared across flows within a profile. `TypeMap<K>`
/// newtype.
///
/// Shared information are the ones that will not change when using different
/// flows. For example, deploying a set of servers, or exporting configuration
/// from those servers will use the same values for the following:
///
/// * Profile name
/// * Server hostnames
///
/// # Type Parameters
///
/// * `K`: Type of key for the `ProfileParams` map.
#[derive(Clone, Debug, Serialize)]
#[serde(transparent)] // Needed to serialize as a map instead of a list.
pub struct ProfileParams<K>(TypeMap<K, BoxDt>, PhantomData<K>)
where
K: Eq + Hash;
impl<K> ProfileParams<K>
where
K: Eq + Hash,
{
/// Returns a new `ProfileParams` map.
pub fn new() -> Self {
Self::default()
}
/// Creates an empty `ProfileParams` map with the specified capacity.
///
/// The `ProfileParams` will be able to hold at least capacity elements
/// without reallocating. If capacity is 0, the map will not allocate.
pub fn with_capacity(capacity: usize) -> Self {
Self(TypeMap::with_capacity_typed(capacity), PhantomData)
}
/// Returns the inner map.
pub fn into_inner(self) -> TypeMap<K, BoxDt> {
self.0
}
}
impl<K> Default for ProfileParams<K>
where
K: Eq + Hash,
{
fn default() -> Self {
Self(TypeMap::default(), PhantomData)
}
}
impl<K> Deref for ProfileParams<K>
where
K: Eq + Hash,
{
type Target = TypeMap<K, BoxDt>;
fn deref(&self) -> &Self::Target {
&self.0
}
}
impl<K> DerefMut for ProfileParams<K>
where
K: Eq + Hash,
{
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.0
}
}
impl<K> From<TypeMap<K, BoxDt>> for ProfileParams<K>
where
K: Eq + Hash,
{
fn from(type_map: TypeMap<K, BoxDt>) -> Self {
Self(type_map, PhantomData)
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/apply_cmd_error.rs | crate/rt_model_core/src/error/apply_cmd_error.rs | use std::{fmt, fmt::Write};
use crate::{ItemsStateStoredStale, StateStoredAndDiscovered};
/// Error applying changes to items.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum ApplyCmdError {
/// Stored current states were not up to date with actual current states.
#[error(
"Stored current states were not up to date with actual current states.\n\n{stale_states}",
stale_states = stale_states_fmt(items_state_stored_stale)?,
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::apply_cmd_error::states_current_out_of_sync),
help(
"\
Run `StatesDiscoverCmd::current` to update the stored current states,\n\
and re-check the difference before applying changes.\
"
),
)
)]
StatesCurrentOutOfSync {
/// Items whose stored current state is out of sync with the discovered
/// state.
items_state_stored_stale: ItemsStateStoredStale,
},
/// Stored goal states were not up to date with actual goal states.
#[error(
"Stored goal states were not up to date with actual goal states.\n\n{stale_states}",
stale_states = stale_states_fmt(items_state_stored_stale)?,
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::apply_cmd_error::states_goal_out_of_sync),
help(
"\
Run `StatesDiscoverCmd::goal` to update the stored goal states,\n\
and re-check the difference before applying changes.\
"
),
)
)]
StatesGoalOutOfSync {
/// Items whose stored goal state is out of sync with the discovered
/// state.
items_state_stored_stale: ItemsStateStoredStale,
},
}
fn stale_states_fmt(
items_state_stored_stale: &ItemsStateStoredStale,
) -> Result<String, fmt::Error> {
let mut buffer = String::with_capacity(items_state_stored_stale.len() * 256);
items_state_stored_stale
.iter()
.try_for_each(|(item_id, state_stored_and_discovered)| {
writeln!(&mut buffer, "* {item_id}:\n")?;
match state_stored_and_discovered {
StateStoredAndDiscovered::OnlyStoredExists { state_stored } => {
writeln!(&mut buffer, " - stored: {state_stored}")?;
writeln!(&mut buffer, " - discovered: <none>\n")?;
}
StateStoredAndDiscovered::OnlyDiscoveredExists { state_discovered } => {
writeln!(&mut buffer, " - stored: <none>")?;
writeln!(&mut buffer, " - discovered: {state_discovered}\n")?;
}
StateStoredAndDiscovered::ValuesDiffer {
state_stored,
state_discovered,
} => {
writeln!(&mut buffer, " - stored: {state_stored}")?;
writeln!(&mut buffer, " - discovered: {state_discovered}\n")?;
}
}
Ok(())
})?;
Ok(buffer)
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/params_specs_deserialize_error.rs | crate/rt_model_core/src/error/params_specs_deserialize_error.rs | use peace_flow_model::FlowId;
use peace_profile_model::Profile;
#[derive(Debug, thiserror::Error)]
#[error("Failed to deserialize params specs for `{profile}/{flow_id}`.")]
#[cfg_attr(
feature = "error_reporting",
derive(miette::Diagnostic),
diagnostic(
code(peace_rt_model::params_specs_deserialize),
help(
"Make sure that all commands using the `{flow_id}` flow, also use the same item graph.\n\
This is because all Items are used to deserialize state.\n\
\n\
If the item graph is different, it may make sense to use a different flow ID."
)
)
)]
pub struct ParamsSpecsDeserializeError {
/// Profile of the flow.
pub profile: Profile,
/// Flow ID whose params specs are being deserialized.
pub flow_id: FlowId,
/// Source text to be deserialized.
#[cfg(feature = "error_reporting")]
#[source_code]
pub params_specs_file_source: miette::NamedSource<String>,
/// Offset within the source text that the error occurred.
#[cfg(feature = "error_reporting")]
#[label("{}", error_message)]
pub error_span: Option<miette::SourceOffset>,
/// Message explaining the error.
#[cfg(feature = "error_reporting")]
pub error_message: String,
/// Offset within the source text surrounding the error.
#[cfg(feature = "error_reporting")]
#[label]
pub context_span: Option<miette::SourceOffset>,
/// Underlying error.
#[source]
pub error: serde_yaml::Error,
}
#[cfg(feature = "error_reporting")]
impl<'b> std::borrow::Borrow<dyn miette::Diagnostic + 'b> for Box<ParamsSpecsDeserializeError> {
fn borrow<'s>(&'s self) -> &'s (dyn miette::Diagnostic + 'b) {
self.as_ref()
}
}
#[cfg(feature = "error_reporting")]
impl miette::Diagnostic for Box<ParamsSpecsDeserializeError> {
fn code<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().code()
}
fn severity(&self) -> Option<miette::Severity> {
self.as_ref().severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().help()
}
fn url<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
self.as_ref().source_code()
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.as_ref().labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
self.as_ref().related()
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.as_ref().diagnostic_source()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/state_downcast_error.rs | crate/rt_model_core/src/error/state_downcast_error.rs | use type_reg::untagged::{BoxDtDisplay, DataType};
/// Error downcasting a `BoxDtDisplay` into an item's concrete state type.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum StateDowncastError {
/// Both item states could not be downcasted.
#[error(
"Item states could not be downcasted to `{ty_name}`.\n\
Boxed type are:\n\
\n\
* `{boxed_ty_a:?}`.\n\
* `{boxed_ty_b:?}`.\n\
",
ty_name = ty_name,
boxed_ty_a = state_a.type_name(),
boxed_ty_b = state_b.type_name(),
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::state_downcast_error::both),
help(
"\
This error happens when the boxed states could not be downcasted to
this item's state, which indicates one of the following:\n\
\n\
* Peace contains a bug, and passed an incorrect box to this item.\n\
* Item IDs were swapped, such that `ItemA`'s state is passed to `ItemB`.\n\
\n\
This needs some rework on how item IDs are implemented -- as in,
whether we should use a string newtype for `ItemId`s, or redesign
how `Item`s or related types are keyed.\n\
"
),
)
)]
Both {
/// Type name of the state type.
ty_name: String,
/// First state parameter.
state_a: BoxDtDisplay,
/// Second state parameter.
state_b: BoxDtDisplay,
},
/// First item state could not be downcasted.
#[error(
"First item state could not be downcasted to `{ty_name}`.\n\
Boxed type is `{boxed_ty:?}`.",
ty_name = ty_name,
boxed_ty = state_a.type_name(),
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::state_downcast_error::first),
help(
"\
This error happens when the boxed states could not be downcasted to
this item's state, which indicates one of the following:\n\
\n\
* Peace contains a bug, and passed an incorrect box to this item.\n\
* Item IDs were swapped, such that `ItemA`'s state is passed to `ItemB`.\n\
\n\
This needs some rework on how item IDs are implemented -- as in,
whether we should use a string newtype for `ItemId`s, or redesign
how `Item`s or related types are keyed.\n\
"
),
)
)]
First {
/// Type name of the state type.
ty_name: String,
/// First state parameter.
state_a: BoxDtDisplay,
},
/// Second item state could not be downcasted.
#[error(
"Second item state could not be downcasted to `{ty_name}`.\n\
Boxed type is `{boxed_ty:?}`.",
ty_name = ty_name,
boxed_ty = state_b.type_name(),
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(
code(peace_rt_model::state_downcast_error::second),
help(
"\
This error happens when the boxed states could not be downcasted to
this item's state, which indicates one of the following:\n\
\n\
* Peace contains a bug, and passed an incorrect box to this item.\n\
* Item IDs were swapped, such that `ItemA`'s state is passed to `ItemB`.\n\
\n\
This needs some rework on how item IDs are implemented -- as in,
whether we should use a string newtype for `ItemId`s, or redesign
how `Item`s or related types are keyed.\n\
"
),
)
)]
Second {
/// Type name of the state type.
ty_name: String,
/// Second state parameter.
state_b: BoxDtDisplay,
},
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/states_deserialize_error.rs | crate/rt_model_core/src/error/states_deserialize_error.rs | use peace_flow_model::FlowId;
#[derive(Debug, thiserror::Error)]
#[cfg_attr(
feature = "error_reporting",
derive(miette::Diagnostic),
diagnostic(
code(peace_rt_model::states_deserialize),
help(
"Make sure that all commands using the `{flow_id}` flow, also use the same item graph.\n\
This is because all Items are used to deserialize state.\n\
\n\
If the item graph is different, it may make sense to use a different flow ID."
)
)
)]
#[error("Failed to deserialize states for flow: `{flow_id}`.")]
pub struct StatesDeserializeError {
/// Flow ID whose states are being deserialized.
pub flow_id: FlowId,
/// Source text to be deserialized.
#[cfg(feature = "error_reporting")]
#[source_code]
pub states_file_source: miette::NamedSource<String>,
/// Offset within the source text that the error occurred.
#[cfg(feature = "error_reporting")]
#[label("{}", error_message)]
pub error_span: Option<miette::SourceOffset>,
/// Message explaining the error.
#[cfg(feature = "error_reporting")]
pub error_message: String,
/// Offset within the source text surrounding the error.
#[cfg(feature = "error_reporting")]
#[label]
pub context_span: Option<miette::SourceOffset>,
/// Underlying error.
#[source]
pub error: serde_yaml::Error,
}
#[cfg(feature = "error_reporting")]
impl<'b> std::borrow::Borrow<dyn miette::Diagnostic + 'b> for Box<StatesDeserializeError> {
fn borrow<'s>(&'s self) -> &'s (dyn miette::Diagnostic + 'b) {
self.as_ref()
}
}
#[cfg(feature = "error_reporting")]
impl miette::Diagnostic for Box<StatesDeserializeError> {
fn code<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().code()
}
fn severity(&self) -> Option<miette::Severity> {
self.as_ref().severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().help()
}
fn url<'a>(&'a self) -> Option<Box<dyn std::fmt::Display + 'a>> {
self.as_ref().url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
self.as_ref().source_code()
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.as_ref().labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
self.as_ref().related()
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.as_ref().diagnostic_source()
}
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/web_error.rs | crate/rt_model_core/src/error/web_error.rs | use std::path::PathBuf;
/// Peace web support errors.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum WebError {
// web_sys related errors
/// Browser local storage unavailable.
#[error("Browser local storage unavailable.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::local_storage_unavailable))
)]
LocalStorageUnavailable,
/// Failed to get browser local storage.
///
/// Note: The original `JsValue` error is converted to a `String` to allow
/// this type to be `Send`.
#[error("Failed to get browser local storage: `{0}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::local_storage_get))
)]
LocalStorageGet(String),
/// Browser local storage is `None`.
#[error("Browser local storage is none.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::local_storage_none))
)]
LocalStorageNone,
/// Browser session storage unavailable.
#[error("Browser session storage unavailable.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::session_storage_unavailable))
)]
SessionStorageUnavailable,
/// Failed to get browser session storage.
///
/// Note: The original `JsValue` error is converted to a `String` to allow
/// this type to be `Send`.
#[error("Failed to get browser session storage: `{0}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::session_storage_get))
)]
SessionStorageGet(String),
/// Browser session storage is `None`.
#[error("Browser session storage is none.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::session_storage_none))
)]
SessionStorageNone,
/// Failed to base64 decode an item from browser storage.
#[error(
"Failed to base64 decode an item in browser storage: `{path}`. Value: `{value}` Error: `{error}`"
)]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::storage_b64_decode))
)]
StorageB64Decode {
/// Key to get.
path: PathBuf,
/// The base64 encoded value.
value: String,
/// Base64 decode error.
error: base64::DecodeError,
},
/// Failed to get an item from browser storage.
///
/// This failure mode happens when the `get_item` call to the browser fails.
///
/// Note: The original `JsValue` error is converted to a `String` to allow
/// this type to be `Send`.
///
/// Instead of doing that, we could either:
///
/// * Update `resman::Resource` to be `!Send` when compiling to WASM, or
/// * Use <https://docs.rs/send_wrapper/> to wrap the `JsValue`.
///
/// This is because browsers are generally single threaded. The assumption
/// would no longer be true if multiple threads are used, e.g. web workers.
#[error("Failed to get an item in browser storage: `{path}`. Error: `{error}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::storage_get_item))
)]
StorageGetItem {
/// Key to get.
path: PathBuf,
/// Stringified JS error.
error: String,
},
/// Failed to set an item in browser storage.
///
/// Note: The original `JsValue` error is converted to a `String` to allow
/// this type to be `Send`.
///
/// Instead of doing that, we could either:
///
/// * Update `resman::Resource` to be `!Send` when compiling to WASM, or
/// * Use <https://docs.rs/send_wrapper/> to wrap the `JsValue`.
///
/// This is because browsers are generally single threaded. The assumption
/// would no longer be true if multiple threads are used, e.g. web workers.
#[error("Failed to set an item in browser storage: `{path}`: `{value}`. Error: `{error}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::storage_set_item))
)]
StorageSetItem {
/// Key to set.
path: PathBuf,
/// Value which failed to be set.
value: String,
/// Stringified JS error.
error: String,
},
/// Failed to remove an item from browser storage.
///
/// This failure mode happens when the `remove_item` call to the browser
/// fails.
///
/// Note: The original `JsValue` error is converted to a `String` to allow
/// this type to be `Send`.
#[error("Failed to remove an item from browser storage: `{path}`. Error: `{error}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::storage_remove_item))
)]
StorageRemoveItem {
/// Key to remove.
path: PathBuf,
/// Stringified JS error.
error: String,
},
/// Failed to fetch browser Window object.
#[error("Failed to fetch browser Window object.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_web::window_none))
)]
WindowNone,
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/error/native_error.rs | crate/rt_model_core/src/error/native_error.rs | use std::{ffi::OsString, path::PathBuf, sync::Mutex};
use peace_profile_model::ProfileInvalidFmt;
use peace_resource_rt::paths::WorkspaceDir;
/// Peace runtime errors.
#[cfg_attr(feature = "error_reporting", derive(miette::Diagnostic))]
#[derive(Debug, thiserror::Error)]
pub enum NativeError {
/// Failed to present data.
#[error("Failed to present data.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::cli_output_present))
)]
CliOutputPresent(#[source] std::io::Error),
#[error("Failed to set current dir to workspace directory: `{}`", workspace_dir.display())]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::current_dir_set))
)]
CurrentDirSet {
/// The workspace directory.
workspace_dir: WorkspaceDir,
/// Underlying IO error
#[source]
error: std::io::Error,
},
/// Failed to create file for writing.
#[error("Failed to create file for writing: `{path}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::file_create))
)]
FileCreate {
/// Path to the file.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to open file for reading.
#[error("Failed to open file for reading: `{path}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::file_open))
)]
FileOpen {
/// Path to the file.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to read from file.
#[error("Failed to read from file: `{path}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::file_read))
)]
FileRead {
/// Path to the file.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to write to file.
#[error("Failed to write to file: `{path}`")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::file_write))
)]
FileWrite {
/// Path to the file.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to list entries in `PeaceAppDir`.
#[error("Failed to list entries in `PeaceAppDir`: {}", peace_app_dir.display())]
PeaceAppDirRead {
/// Path to the `PeaceAppDir`.
peace_app_dir: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to read entry in `PeaceAppDir`.
#[error("Failed to read entry in `PeaceAppDir`: {}", peace_app_dir.display())]
PeaceAppDirEntryRead {
/// Path to the `PeaceAppDir`.
peace_app_dir: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to read entry file type in `PeaceAppDir`.
#[error("Failed to read entry file type in `PeaceAppDir`: {}", path.display())]
PeaceAppDirEntryFileTypeRead {
/// Path to the entry within `PeaceAppDir`.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Profile directory name is not a valid profile name.
#[error("Profile directory name is not a valid profile name: {}, path: {}", dir_name, path.display())]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::profile_dir_invalid_name))
)]
ProfileDirInvalidName {
/// Name of the directory attempted to be parsed as a `Profile`.
dir_name: String,
/// Path to the profile directory.
path: PathBuf,
/// Underlying error,
error: ProfileInvalidFmt<'static>,
},
/// Failed to write to stdout.
#[error("Failed to write to stdout.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::stdout_write))
)]
StdoutWrite(#[source] std::io::Error),
/// Storage synchronous thread failed to be joined.
///
/// This variant is used for thread spawning errors for both reads and
/// writes.
#[error("Storage synchronous thread failed to be joined.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::storage_sync_thread_spawn))
)]
StorageSyncThreadSpawn(#[source] std::io::Error),
/// Storage synchronous thread failed to be joined.
///
/// This variant is used for thread spawning errors for both reads and
/// writes.
///
/// Note: The underlying thread join error does not implement
/// `std::error::Error`. See
/// <https://doc.rust-lang.org/std/thread/type.Result.html>.
///
/// The `Mutex` is needed to allow `Error` to be `Sync`.
#[error("Storage synchronous thread failed to be joined.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::storage_sync_thread_join))
)]
StorageSyncThreadJoin(Mutex<Box<dyn std::any::Any + Send + 'static>>),
/// Failed to read current directory to discover workspace directory.
#[error("Failed to read current directory to discover workspace directory.")]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::working_dir_read))
)]
WorkingDirRead(#[source] std::io::Error),
/// Failed to create a workspace directory.
#[error("Failed to create workspace directory: `{path}`.", path = path.display())]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::workspace_dir_create))
)]
WorkspaceDirCreate {
/// The directory that was attempted to be created.
path: PathBuf,
/// Underlying IO error.
#[source]
error: std::io::Error,
},
/// Failed to determine workspace directory.
#[error(
"Failed to determine workspace directory as could not find `{file_name}` \
in `{working_dir}` or any parent directories.",
file_name = file_name.to_string_lossy(),
working_dir = working_dir.display())]
#[cfg_attr(
feature = "error_reporting",
diagnostic(code(peace_rt_model_native::workspace_file_not_found))
)]
WorkspaceFileNotFound {
/// Beginning directory of traversal.
working_dir: PathBuf,
/// File or directory name searched for.
file_name: OsString,
},
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/rt_model_core/src/output/output_write.rs | crate/rt_model_core/src/output/output_write.rs | use std::fmt::Debug;
use async_trait::async_trait;
use peace_fmt::Presentable;
cfg_if::cfg_if! {
if #[cfg(feature = "output_progress")] {
use peace_progress_model::{
CmdBlockItemInteractionType,
ProgressTracker,
ProgressUpdateAndId,
};
use crate::CmdProgressTracker;
}
}
/// Transforms return values or errors into a suitable output format.
///
/// # Use cases
///
/// * A CLI implementation transforms the values into text to be printed.
/// * A REST implementation transforms the values into the response.
/// * A frontend implementation transforms the values into HTML elements.
///
/// # Design
///
/// The write functions currently take `&mut self`. From an API implementor
/// perspective, this should not be difficult to use as the return value / error
/// value is intended to be returned at the end of a command.
///
/// Progress updates sent during `ApplyFns::exec` and `CleanOpSpec::exec`.
#[async_trait(?Send)]
pub trait OutputWrite: Debug + Unpin + 'static {
/// Error type of this `OutputWrite`.
///
/// Returned when an error occurs while attempting to output some
/// information.
type Error: std::error::Error;
/// Prepares this `OutputWrite` implementation for rendering progress.
///
/// # Implementors
///
/// This is called at the beginning of command execution, before any
/// potential calls to `OutputWrite::progress_update`.
///
/// At the end of command execution, `OutputWrite::progress_end` is called.
#[cfg(feature = "output_progress")]
async fn progress_begin(&mut self, cmd_progress_tracker: &CmdProgressTracker);
/// Indicates a particular `CmdBlock` has begun.
///
/// # Implementors
///
/// This is called whenever a different `CmdBlock` is started.
#[cfg(feature = "output_progress")]
async fn cmd_block_start(
&mut self,
cmd_block_item_interaction_type: CmdBlockItemInteractionType,
);
/// Signals an update of an `Item`'s `ItemLocationState`.
///
/// # Implementors
///
/// This is called when an `Item`'s current `State` is updated.
///
/// # Maintainers
///
/// The `ItemLocationState` is first constructed in `ItemWrapper`, and this
/// method is invoked in `Progress`.
#[cfg(feature = "output_progress")]
async fn item_location_state(
&mut self,
item_id: peace_item_model::ItemId,
item_location_state: peace_item_interaction_model::ItemLocationState,
);
/// Renders progress information, and returns when no more progress
/// information is available to write.
///
/// This function is infallible as progress information is considered
/// transient, and loss of progress information is not considered as
/// something worth stopping a command for.
///
/// # Implementors
///
/// This should render the progress update to the user in a way that is not
/// overwhelming.
#[cfg(feature = "output_progress")]
async fn progress_update(
&mut self,
progress_tracker: &ProgressTracker,
progress_update_and_id: &ProgressUpdateAndId,
);
/// Notifies this `OutputWrite` implementation to stop rendering progress.
///
/// # Implementors
///
/// This is called at the end of command execution. After this is called,
/// there will be no more calls to `OutputWrite::progress_update` until
/// another call to `OutputWrite::progress_begin`.
#[cfg(feature = "output_progress")]
async fn progress_end(&mut self, cmd_progress_tracker: &CmdProgressTracker);
/// Writes presentable information to the output.
async fn present<P>(&mut self, presentable: P) -> Result<(), Self::Error>
where
P: Presentable,
Self: Sized;
/// Writes an error to the output.
#[cfg(not(feature = "error_reporting"))]
async fn write_err<E>(&mut self, error: &E) -> Result<(), Self::Error>
where
E: std::error::Error;
/// Writes an error to the output.
#[cfg(feature = "error_reporting")]
async fn write_err<E>(&mut self, error: &E) -> Result<(), Self::Error>
where
E: miette::Diagnostic;
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/data_derive/src/lib.rs | crate/data_derive/src/lib.rs | #![cfg_attr(coverage_nightly, feature(coverage_attribute))]
#![recursion_limit = "256"]
extern crate proc_macro;
extern crate proc_macro2;
#[macro_use]
extern crate quote;
#[macro_use]
extern crate syn;
use proc_macro::TokenStream;
use proc_macro2::Literal;
use syn::{
punctuated::Punctuated, token::Comma, Attribute, DataStruct, DeriveInput, Field, Fields,
FieldsNamed, FieldsUnnamed, Ident, Lifetime, Type, TypePath, WhereClause, WherePredicate,
};
/// Used to `#[derive]` the `Data` trait.
///
/// For regular usage, use `#[derive(Data)]`
///
/// For peace crates, also add the `#[peace_internal]` attribute, which
/// references the `peace_data` crate instead of the `peace::data` re-export.
#[proc_macro_derive(Data, attributes(peace_internal))]
pub fn data_access(input: TokenStream) -> TokenStream {
let ast = syn::parse(input).expect("Data derive: Code failed to be parsed.");
let gen = impl_data_access(&ast);
gen.into()
}
fn impl_data_access(ast: &DeriveInput) -> proc_macro2::TokenStream {
let name = &ast.ident;
let (peace_data_path, peace_item_model_path) = ast
.attrs
.iter()
.find(peace_internal)
.map(
#[cfg_attr(coverage_nightly, coverage(off))]
|_| (quote!(peace_data), quote!(peace_item_model)),
)
.unwrap_or_else(|| (quote!(peace::data), quote!(peace::item_model)));
let mut generics = ast.generics.clone();
let (tys, field_names, borrow_return) = data_borrow_impl(&ast.data, name);
let tys = &tys;
// Assumes that the first lifetime is the borrow lifetime
let def_borrow_lt = ast
.generics
.lifetimes()
.next()
.expect("Struct must have at least one lifetime");
let impl_borrow_lt = &def_borrow_lt.lifetime;
{
let where_clause = generics.make_where_clause();
constrain_data_access_types(where_clause, impl_borrow_lt, tys);
}
let (impl_generics, ty_generics, where_clause) = generics.split_for_impl();
quote! {
impl #impl_generics #peace_data_path::DataAccess
for #name #ty_generics
#where_clause
{
fn borrows() -> #peace_data_path::TypeIds {
let mut r = #peace_data_path::TypeIds::new();
#( {
let mut borrows = <#tys as #peace_data_path::DataAccess>::borrows();
r.append(&mut borrows);
} )*
r
}
fn borrow_muts() -> #peace_data_path::TypeIds {
let mut r = #peace_data_path::TypeIds::new();
#( {
let mut borrow_muts = <#tys as #peace_data_path::DataAccess>::borrow_muts();
r.append(&mut borrow_muts);
} )*
r
}
}
impl #impl_generics #peace_data_path::DataAccessDyn
for #name #ty_generics
#where_clause
{
fn borrows(&self) -> #peace_data_path::TypeIds {
let mut r = #peace_data_path::TypeIds::new();
#( {
let mut borrows = <#tys as #peace_data_path::DataAccessDyn>::borrows(&self.#field_names);
r.append(&mut borrows);
} )*
r
}
fn borrow_muts(&self) -> #peace_data_path::TypeIds {
let mut r = #peace_data_path::TypeIds::new();
#( {
let mut borrow_muts = <#tys as #peace_data_path::DataAccessDyn>::borrow_muts(&self.#field_names);
r.append(&mut borrow_muts);
} )*
r
}
}
impl #impl_generics #peace_data_path::Data< #impl_borrow_lt >
for #name #ty_generics
#where_clause
{
fn borrow(item_id: & #impl_borrow_lt #peace_item_model_path::ItemId, resources: & #impl_borrow_lt #peace_data_path::Resources) -> Self {
#borrow_return
}
}
}
}
fn peace_internal(attr: &&Attribute) -> bool {
attr.path().is_ident("peace_internal")
}
/// Adds a `Data<'lt>` bound on each of the system data types.
fn constrain_data_access_types(clause: &mut WhereClause, borrow_lt: &Lifetime, tys: &[&Type]) {
for ty in tys.iter() {
let where_predicate: WherePredicate = parse_quote!(#ty : Data< #borrow_lt >);
clause.predicates.push(where_predicate);
}
}
fn data_borrow_impl<'ast>(
ast: &'ast syn::Data,
name: &Ident,
) -> (
Vec<&'ast Type>,
Vec<proc_macro2::TokenStream>,
proc_macro2::TokenStream,
) {
enum DataType {
Struct,
Tuple,
}
let (data_type, fields) = match ast {
syn::Data::Struct(DataStruct {
fields: Fields::Named(FieldsNamed { named, .. }),
..
}) => (DataType::Struct, named),
syn::Data::Struct(DataStruct {
fields: Fields::Unnamed(FieldsUnnamed { unnamed, .. }),
..
}) => (DataType::Tuple, unnamed),
_ => ({
#[cfg_attr(coverage_nightly, coverage(off))]
|| -> ! { panic!("Enums are not supported") }
})(),
};
let tys = field_types(fields);
let (field_names_tokens, borrow_return) = match data_type {
DataType::Struct => {
let field_names = field_names(fields);
let field_names_tokens = field_names
.normal_fields
.iter()
.map(|ident| quote!(#ident))
.collect::<Vec<_>>();
let phantom_data_fields = &field_names.phantom_data_fields;
let borrow_return = quote! {
#name {
#( #field_names_tokens: Data::borrow(item_id, resources) ),*
#(, #phantom_data_fields: ::std::marker::PhantomData)*
}
};
(field_names_tokens, borrow_return)
}
DataType::Tuple => {
let count = tys.len();
let field_names_tokens = (0..count)
.map(Literal::usize_unsuffixed)
.map(|n| quote!(#n))
.collect::<Vec<_>>();
let borrow = vec![quote! { Data::borrow(item_id, resources) }; count];
let borrow_return = quote! {
#name ( #( #borrow ),* )
};
(field_names_tokens, borrow_return)
}
};
(tys, field_names_tokens, borrow_return)
}
fn field_types(fields: &Punctuated<Field, Comma>) -> Vec<&Type> {
fields
.iter()
.filter_map(|field| {
if !is_phantom_data(field) {
Some(&field.ty)
} else {
None
}
})
.collect()
}
fn field_names(fields: &Punctuated<Field, Comma>) -> FieldNames<'_> {
fields
.iter()
.fold(FieldNames::default(), |mut field_names, field| {
if is_phantom_data(field) {
if let Some(field_name) = field.ident.as_ref() {
field_names.phantom_data_fields.push(field_name);
}
} else if let Some(field_name) = field.ident.as_ref() {
field_names.normal_fields.push(field_name);
}
field_names
})
}
fn is_phantom_data(field: &Field) -> bool {
matches!(&field.ty, Type::Path(TypePath { path, .. })
if matches!(path.segments.last(), Some(segment) if segment.ident == "PhantomData"))
}
#[derive(Default)]
struct FieldNames<'field> {
normal_fields: Vec<&'field Ident>,
phantom_data_fields: Vec<&'field Ident>,
}
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
azriel91/peace | https://github.com/azriel91/peace/blob/5e2c43f2c0b18672749d0902d2285c703e24de97/crate/webi_rt/src/lib.rs | crate/webi_rt/src/lib.rs | //! Web interface runtime data types for the peace automation framework.
| rust | Apache-2.0 | 5e2c43f2c0b18672749d0902d2285c703e24de97 | 2026-01-04T20:22:52.922300Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.