repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/arithmetic/lui.rs | prover/src/arithmetic/lui.rs | //! Support for the LUI instructions. rt = imm << 16
//!
//! This crate verifies an LUI instruction, which takes two
//! 32-bit inputs S and A, and produces a 32-bit output C satisfying
//!
//! C = A << 16 (mod 2^32) for LUI
//!
//! The way this computation is carried is by providing a third input
//! B = 1 << 16 (mod 2^32)
//! and then computing:
//! C = A * B (mod 2^32) for LUI
//!
//! Inputs A, S, and B, and output C, are given as arrays of 16-bit
//! limbs. For example, if the limbs of A are a[0]...a[15], then
//!
//! A = \sum_{i=0}^15 a[i] β^i,
//!
//! where β = 2^16 = 2^LIMB_BITS. To verify that A, S, B and C satisfy
//! the equations, we proceed similarly to MUL for LUI.
use crate::arithmetic::columns::{
INPUT_REGISTER_0, INPUT_REGISTER_1, IS_LUI, NUM_ARITH_COLUMNS, N_LIMBS, OUTPUT_REGISTER,
};
use crate::arithmetic::mul;
use crate::arithmetic::utils::{read_value, read_value_i64_limbs, u32_to_array};
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::PrimeField64;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
pub fn generate<F: PrimeField64>(lv: &mut [F], _nv: &mut [F], filter: usize, imm: u32, rt: u32) {
u32_to_array(&mut lv[INPUT_REGISTER_0], imm);
u32_to_array(&mut lv[INPUT_REGISTER_1], 1u32 << 16);
u32_to_array(&mut lv[OUTPUT_REGISTER], rt);
let input0 = read_value_i64_limbs(lv, INPUT_REGISTER_0); // imm
let input1 = read_value_i64_limbs(lv, INPUT_REGISTER_1); // 1 << 16
match filter {
IS_LUI => {
// We generate the multiplication 1 * (imm << 16) using mul.rs.
mul::generate_mul(lv, input0, input1);
}
_ => panic!("unexpected operation filter"),
};
}
pub(crate) fn eval_packed_generic<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
_nv: &[P; NUM_ARITH_COLUMNS],
yield_constr: &mut ConstraintConsumer<P>,
) {
eval_packed_lui(lv, yield_constr);
}
/// Evaluates the constraints for an SHL opcode.
/// The logic is the same as the one for MUL. The only difference is that
/// the inputs are in `INPUT_REGISTER_0` and `INPUT_REGISTER_2` instead of
/// `INPUT_REGISTER_0` and `INPUT_REGISTER_1`.
fn eval_packed_lui<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
yield_constr: &mut ConstraintConsumer<P>,
) {
let is_lui = lv[IS_LUI];
let left_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_0);
let right_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_1);
mul::eval_packed_generic_mul(lv, is_lui, left_limbs, right_limbs, yield_constr);
}
pub(crate) fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
_nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
eval_ext_circuit_lui(builder, lv, yield_constr);
}
fn eval_ext_circuit_lui<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let is_lui = lv[IS_LUI];
let left_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_0);
let right_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_1);
mul::eval_ext_mul_circuit(builder, lv, is_lui, left_limbs, right_limbs, yield_constr);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/arithmetic/mod.rs | prover/src/arithmetic/mod.rs | pub mod addcy;
pub mod arithmetic_stark;
pub mod columns;
pub mod div;
pub mod lo_hi;
pub mod lui;
pub mod mul;
pub mod mult;
pub mod shift;
pub mod slt;
pub mod sra;
pub mod utils;
use crate::witness::util::sign_extend;
use plonky2::field::types::PrimeField64;
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(crate) enum BinaryOperator {
ADD,
ADDU,
ADDI,
ADDIU,
SUB,
SUBU,
MULT,
MULTU,
MUL,
DIV,
DIVU,
SLLV,
SRLV,
SRAV,
SLL,
SRL,
SRA,
SLT,
SLTU,
SLTI,
SLTIU,
LUI,
MFHI,
MTHI,
MFLO,
MTLO,
}
impl BinaryOperator {
pub(crate) fn result(&self, input0: u32, input1: u32) -> (u32, u32) {
match self {
BinaryOperator::ADD => (input0.overflowing_add(input1).0, 0),
BinaryOperator::ADDU => (input0.overflowing_add(input1).0, 0),
BinaryOperator::ADDI => {
let sein = sign_extend::<16>(input1);
(input0.overflowing_add(sein).0, 0)
}
BinaryOperator::ADDIU => {
let sein = sign_extend::<16>(input1);
(input0.overflowing_add(sein).0, 0)
}
BinaryOperator::SUB => (input0.overflowing_sub(input1).0, 0),
BinaryOperator::SUBU => (input0.overflowing_sub(input1).0, 0),
BinaryOperator::SLL => (if input1 > 31 { 0 } else { input0 << input1 }, 0),
BinaryOperator::SRL => (if input1 > 31 { 0 } else { input0 >> input1 }, 0),
BinaryOperator::SRA => {
let sin = input0 as i32;
let sout = if input1 > 31 { 0 } else { sin >> input1 };
(sout as u32, 0)
}
BinaryOperator::SLLV => (input0 << (input1 & 0x1f), 0),
BinaryOperator::SRLV => (input0 >> (input1 & 0x1F), 0),
BinaryOperator::SRAV => {
// same as SRA
let sin = input0 as i32;
let sout = sin >> (input1 & 0x1f);
(sout as u32, 0)
}
BinaryOperator::MUL => (input0.overflowing_mul(input1).0, 0),
BinaryOperator::SLTU => {
if input0 < input1 {
(1, 0)
} else {
(0, 0)
}
}
BinaryOperator::SLT => {
if (input0 as i32) < (input1 as i32) {
(1, 0)
} else {
(0, 0)
}
}
BinaryOperator::SLTIU => {
let out = sign_extend::<16>(input1);
if input0 < out {
(1, 0)
} else {
(0, 0)
}
}
BinaryOperator::SLTI => {
let out = sign_extend::<16>(input1);
if (input0 as i32) < (out as i32) {
(1, 0)
} else {
(0, 0)
}
}
BinaryOperator::LUI => {
let out = sign_extend::<16>(input0);
(out.overflowing_shl(16).0, 0)
}
BinaryOperator::MULT => {
let out = (((input0 as i32) as i64) * ((input1 as i32) as i64)) as u64;
(out as u32, (out >> 32) as u32) // lo,hi
}
BinaryOperator::MULTU => {
let out = input0 as u64 * input1 as u64;
(out as u32, (out >> 32) as u32) //lo,hi
}
BinaryOperator::DIV => (
((input0 as i32) / (input1 as i32)) as u32, // lo
((input0 as i32) % (input1 as i32)) as u32, // hi
),
BinaryOperator::DIVU => (input0 / input1, input0 % input1), //lo,hi
BinaryOperator::MFHI
| BinaryOperator::MTHI
| BinaryOperator::MFLO
| BinaryOperator::MTLO => (input0, 0),
}
}
pub(crate) fn row_filter(&self) -> usize {
match self {
BinaryOperator::ADD => columns::IS_ADD,
BinaryOperator::ADDU => columns::IS_ADDU,
BinaryOperator::ADDI => columns::IS_ADDI,
BinaryOperator::ADDIU => columns::IS_ADDIU,
BinaryOperator::SUB => columns::IS_SUB,
BinaryOperator::SUBU => columns::IS_SUBU,
BinaryOperator::MULT => columns::IS_MULT,
BinaryOperator::MULTU => columns::IS_MULTU,
BinaryOperator::MUL => columns::IS_MUL,
BinaryOperator::DIV => columns::IS_DIV,
BinaryOperator::DIVU => columns::IS_DIVU,
BinaryOperator::SLL => columns::IS_SLL,
BinaryOperator::SRL => columns::IS_SRL,
BinaryOperator::SRA => columns::IS_SRA,
BinaryOperator::SLLV => columns::IS_SLLV,
BinaryOperator::SRLV => columns::IS_SRLV,
BinaryOperator::SRAV => columns::IS_SRAV,
BinaryOperator::SLTIU => columns::IS_SLTIU,
BinaryOperator::SLTI => columns::IS_SLTI,
BinaryOperator::SLTU => columns::IS_SLTU,
BinaryOperator::SLT => columns::IS_SLT,
BinaryOperator::LUI => columns::IS_LUI,
BinaryOperator::MFHI => columns::IS_MFHI,
BinaryOperator::MTHI => columns::IS_MTHI,
BinaryOperator::MFLO => columns::IS_MFLO,
BinaryOperator::MTLO => columns::IS_MTLO,
}
}
}
/// An enum representing arithmetic operations that can be either binary.
#[derive(Debug, Clone)]
pub(crate) enum Operation {
BinaryOperation {
operator: BinaryOperator,
input0: u32,
input1: u32,
result0: u32,
result1: u32,
},
}
impl Operation {
/// Create a binary operator with given inputs.
///
/// NB: This works as you would expect, EXCEPT for SHL and SHR,
/// whose inputs need a small amount of preprocessing. Specifically,
/// to create `SHL(shift, value)`, call (note the reversal of
/// argument order):
///
/// `Operation::binary(BinaryOperator::Shl, value, 1 << shift)`
///
/// Similarly, to create `SHR(shift, value)`, call
///
/// `Operation::binary(BinaryOperator::Shr, value, 1 << shift)`
///
/// See witness/operation.rs::append_shift() for an example (indeed
/// the only call site for such inputs).
pub(crate) fn binary(operator: BinaryOperator, input0: u32, input1: u32) -> Self {
let (result0, result1) = operator.result(input0, input1);
Self::BinaryOperation {
operator,
input0,
input1,
result0,
result1,
}
}
pub(crate) fn result(&self) -> (u32, u32) {
match self {
Operation::BinaryOperation {
result0, result1, ..
} => (*result0, *result1),
}
}
/// Convert operation into one or two rows of the trace.
///
/// Morally these types should be [F; NUM_ARITH_COLUMNS], but we
/// use vectors because that's what utils::transpose (who consumes
/// the result of this function as part of the range check code)
/// expects.
///
/// The `is_simulated` bool indicates whether we use a native arithmetic
/// operation or simulate one with another. This is used to distinguish
/// SHL and SHR operations that are simulated through MUL and DIV respectively.
fn to_rows<F: PrimeField64>(&self) -> (Vec<F>, Option<Vec<F>>) {
match *self {
Operation::BinaryOperation {
operator,
input0,
input1,
result0,
result1,
} => binary_op_to_rows(operator, input0, input1, result0, result1),
}
}
}
fn binary_op_to_rows<F: PrimeField64>(
op: BinaryOperator,
input0: u32,
input1: u32,
result0: u32,
result1: u32,
) -> (Vec<F>, Option<Vec<F>>) {
let mut row = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
row[op.row_filter()] = F::ONE;
match op {
BinaryOperator::ADD
| BinaryOperator::SUB
| BinaryOperator::ADDI
| BinaryOperator::ADDIU
| BinaryOperator::ADDU
| BinaryOperator::SUBU => {
addcy::generate(&mut row, op.row_filter(), input0, input1);
(row, None)
}
BinaryOperator::MUL => {
mul::generate(&mut row, input0, input1);
(row, None)
}
BinaryOperator::SLT
| BinaryOperator::SLTI
| BinaryOperator::SLTU
| BinaryOperator::SLTIU => {
slt::generate(&mut row, op.row_filter(), input0, input1, result0);
(row, None)
}
BinaryOperator::MULT | BinaryOperator::MULTU => {
mult::generate(&mut row, op.row_filter(), input0, input1);
(row, None)
}
BinaryOperator::DIV | BinaryOperator::DIVU => {
let mut nv = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
div::generate(
&mut row,
&mut nv,
op.row_filter(),
input0,
input1,
result0,
result1,
);
(row, Some(nv))
}
BinaryOperator::LUI => {
let mut nv = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
lui::generate(&mut row, &mut nv, op.row_filter(), input0, result0);
(row, None)
}
BinaryOperator::SLL | BinaryOperator::SLLV => {
let mut nv = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
shift::generate(&mut row, &mut nv, op.row_filter(), input1, input0, result0);
(row, None)
}
BinaryOperator::SRL | BinaryOperator::SRLV => {
let mut nv = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
shift::generate(&mut row, &mut nv, op.row_filter(), input1, input0, result0);
(row, Some(nv))
}
BinaryOperator::SRA | BinaryOperator::SRAV => {
let mut nv = vec![F::ZERO; columns::NUM_ARITH_COLUMNS];
sra::generate(&mut row, &mut nv, op.row_filter(), input1, input0, result0);
(row, Some(nv))
}
BinaryOperator::MFHI
| BinaryOperator::MTHI
| BinaryOperator::MFLO
| BinaryOperator::MTLO => {
lo_hi::generate(&mut row, op.row_filter(), input0, result0);
(row, None)
}
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/arithmetic/addcy.rs | prover/src/arithmetic/addcy.rs | //! Support for MIPS instructions ADD, SUB, LT and GT
//!
//! This crate verifies MIPS instructions ADD, SUB, LT and GT (i.e. for
//! unsigned inputs). Each of these instructions can be verified using
//! the "add with carry out" equation
//!
//! X + Y = Z + CY * 2^32
//!
//! by an appropriate assignment of "inputs" and "outputs" to the
//! variables X, Y, Z and CY. Specifically,
//!
//! ADD: X + Y, inputs X, Y, output Z, ignore CY
//! SUB: Z - X, inputs X, Z, output Y, ignore CY
//! GT: X > Z, inputs X, Z, output CY, auxiliary output Y
//! LT: Z < X, inputs Z, X, output CY, auxiliary output Y
use itertools::Itertools;
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::{Field, PrimeField64};
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::arithmetic::columns::*;
use crate::arithmetic::utils::u32_to_array;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
/// Generate row for ADD, SUB, GT and LT operations.
pub(crate) fn generate<F: PrimeField64>(lv: &mut [F], filter: usize, left_in: u32, right_in: u32) {
u32_to_array(&mut lv[INPUT_REGISTER_0], left_in);
u32_to_array(&mut lv[INPUT_REGISTER_1], right_in);
u32_to_array(&mut lv[INPUT_REGISTER_2], 0);
match filter {
IS_ADD | IS_ADDI => {
let (result, cy) = left_in.overflowing_add(right_in);
u32_to_array(&mut lv[AUX_INPUT_REGISTER_0], cy as u32);
u32_to_array(&mut lv[OUTPUT_REGISTER], result);
}
IS_SUB | IS_SUBU => {
let (diff, cy) = left_in.overflowing_sub(right_in);
u32_to_array(&mut lv[AUX_INPUT_REGISTER_0], cy as u32);
u32_to_array(&mut lv[OUTPUT_REGISTER], diff);
}
IS_ADDU | IS_ADDIU => {
// FIXME: add constraints
let (result, cy) = left_in.overflowing_add(right_in);
u32_to_array(&mut lv[AUX_INPUT_REGISTER_0], cy as u32);
u32_to_array(&mut lv[OUTPUT_REGISTER], result);
}
_ => panic!("unexpected operation filter"),
};
}
/// 2^-16 mod (2^64 - 2^32 + 1)
const GOLDILOCKS_INVERSE_65536: u64 = 18446462594437939201;
/// Constrains x + y == z + cy*2^32, assuming filter != 0.
///
/// Set `is_two_row_op=true` to allow the code to be called from the
/// two-row `modular` code (for checking that the modular output is
/// reduced).
///
/// NB: This function ONLY verifies that cy is 0 or 1 when
/// is_two_row_op=false; when is_two_row_op=true the caller must
/// verify for itself.
///
/// Note that the digits of `x + y` are in `[0, 2*(2^16-1)]`
/// (i.e. they are the sums of two 16-bit numbers), whereas the digits
/// of `z` can only be in `[0, 2^16-1]`. In the function we check that:
///
/// \sum_i (x_i + y_i) * 2^(16*i) = \sum_i z_i * 2^(16*i) + given_cy*2^32.
///
/// If `N_LIMBS = 1`, then this amounts to verifying that either `x_0
/// + y_0 = z_0` or `x_0 + y_0 == z_0 + cy*2^16` (this is `t` on line
/// 127ff). Ok. Now assume the constraints are valid for `N_LIMBS =
/// n-1`. Then by induction,
///
/// \sum_{i=0}^{n-1} (x_i + y_i) * 2^(16*i) + (x_n + y_n)*2^(16*n) ==
/// \sum_{i=0}^{n-1} z_i * 2^(16*i) + cy_{n-1}*2^(16*n) + z_n*2^(16*n)
/// + cy_n*2^(16*n)
///
/// is true if `(x_n + y_n)*2^(16*n) == cy_{n-1}*2^(16*n) +
/// z_n*2^(16*n) + cy_n*2^(16*n)` (again, this is `t` on line 127ff)
/// with the last `cy_n` checked against the `given_cy` given as input.
pub(crate) fn eval_packed_generic_addcy<P: PackedField>(
yield_constr: &mut ConstraintConsumer<P>,
filter: P,
x: &[P],
y: &[P],
z: &[P],
given_cy: &[P],
is_two_row_op: bool,
) {
debug_assert!(
x.len() == N_LIMBS && y.len() == N_LIMBS && z.len() == N_LIMBS && given_cy.len() == N_LIMBS
);
let overflow = P::Scalar::from_canonical_u64(1u64 << LIMB_BITS);
let overflow_inv = P::Scalar::from_canonical_u64(GOLDILOCKS_INVERSE_65536);
debug_assert!(
overflow * overflow_inv == P::Scalar::ONE,
"only works with LIMB_BITS=16 and F=Goldilocks"
);
let mut cy = P::ZEROS;
for ((&xi, &yi), &zi) in x.iter().zip_eq(y).zip_eq(z) {
// Verify that (xi + yi) - zi is either 0 or 2^LIMB_BITS
let t = cy + xi + yi - zi;
if is_two_row_op {
yield_constr.constraint_transition(filter * t * (overflow - t));
} else {
yield_constr.constraint(filter * t * (overflow - t));
}
// cy <-- 0 or 1
// NB: this is multiplication by a constant, so doesn't
// increase the degree of the constraint.
cy = t * overflow_inv;
}
if is_two_row_op {
// NB: Mild hack: We don't check that given_cy[0] is 0 or 1
// when is_two_row_op is true because that's only the case
// when this function is called from
// modular::modular_constr_poly(), in which case (1) this
// condition has already been checked and (2) it exceeds the
// degree budget because given_cy[0] is already degree 2.
yield_constr.constraint_transition(filter * (cy - given_cy[0]));
for i in 1..N_LIMBS {
yield_constr.constraint_transition(filter * given_cy[i]);
}
} else {
yield_constr.constraint(filter * given_cy[0] * (given_cy[0] - P::ONES));
yield_constr.constraint(filter * (cy - given_cy[0]));
for i in 1..N_LIMBS {
yield_constr.constraint(filter * given_cy[i]);
}
}
}
pub fn eval_packed_generic<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
yield_constr: &mut ConstraintConsumer<P>,
) {
let is_add = lv[IS_ADD];
let is_sub = lv[IS_SUB];
let is_addi = lv[IS_ADDI];
let is_addiu = lv[IS_ADDIU];
let in0 = &lv[INPUT_REGISTER_0];
let in1 = &lv[INPUT_REGISTER_1];
let out = &lv[OUTPUT_REGISTER];
let aux = &lv[AUX_INPUT_REGISTER_0];
// x + y = z + w*2^32
eval_packed_generic_addcy(yield_constr, is_add, in0, in1, out, aux, false);
eval_packed_generic_addcy(yield_constr, is_sub, in1, out, in0, aux, false);
eval_packed_generic_addcy(yield_constr, is_addi, in0, in1, out, aux, false);
eval_packed_generic_addcy(yield_constr, is_addiu, in0, in1, out, aux, false);
}
#[allow(clippy::needless_collect)]
pub(crate) fn eval_ext_circuit_addcy<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
filter: ExtensionTarget<D>,
x: &[ExtensionTarget<D>],
y: &[ExtensionTarget<D>],
z: &[ExtensionTarget<D>],
given_cy: &[ExtensionTarget<D>],
is_two_row_op: bool,
) {
debug_assert!(
x.len() == N_LIMBS && y.len() == N_LIMBS && z.len() == N_LIMBS && given_cy.len() == N_LIMBS
);
// 2^LIMB_BITS in the base field
let overflow_base = F::from_canonical_u64(1 << LIMB_BITS);
// 2^LIMB_BITS in the extension field as an ExtensionTarget
let overflow = builder.constant_extension(F::Extension::from(overflow_base));
// 2^-LIMB_BITS in the base field.
let overflow_inv = F::from_canonical_u64(GOLDILOCKS_INVERSE_65536);
let mut cy = builder.zero_extension();
for ((&xi, &yi), &zi) in x.iter().zip_eq(y).zip_eq(z) {
// t0 = cy + xi + yi
let t0 = builder.add_many_extension([cy, xi, yi]);
// t = t0 - zi
let t = builder.sub_extension(t0, zi);
// t1 = overflow - t
let t1 = builder.sub_extension(overflow, t);
// t2 = t * t1
let t2 = builder.mul_extension(t, t1);
let filtered_limb_constraint = builder.mul_extension(filter, t2);
if is_two_row_op {
yield_constr.constraint_transition(builder, filtered_limb_constraint);
} else {
yield_constr.constraint(builder, filtered_limb_constraint);
}
cy = builder.mul_const_extension(overflow_inv, t);
}
let good_cy = builder.sub_extension(cy, given_cy[0]);
let cy_filter = builder.mul_extension(filter, good_cy);
// Check given carry is one bit
let bit_constr = builder.mul_sub_extension(given_cy[0], given_cy[0], given_cy[0]);
let bit_filter = builder.mul_extension(filter, bit_constr);
if is_two_row_op {
yield_constr.constraint_transition(builder, cy_filter);
for i in 1..N_LIMBS {
let t = builder.mul_extension(filter, given_cy[i]);
yield_constr.constraint_transition(builder, t);
}
} else {
yield_constr.constraint(builder, bit_filter);
yield_constr.constraint(builder, cy_filter);
for i in 1..N_LIMBS {
let t = builder.mul_extension(filter, given_cy[i]);
yield_constr.constraint(builder, t);
}
}
}
pub fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let is_add = lv[IS_ADD];
let is_sub = lv[IS_SUB];
//let is_lt = lv[IS_LT];
//let is_gt = lv[IS_GT];
let is_addi = lv[IS_ADDI];
let is_addiu = lv[IS_ADDIU];
let in0 = &lv[INPUT_REGISTER_0];
let in1 = &lv[INPUT_REGISTER_1];
let out = &lv[OUTPUT_REGISTER];
let aux = &lv[AUX_INPUT_REGISTER_0];
eval_ext_circuit_addcy(builder, yield_constr, is_add, in0, in1, out, aux, false);
eval_ext_circuit_addcy(builder, yield_constr, is_sub, in1, out, in0, aux, false);
eval_ext_circuit_addcy(builder, yield_constr, is_addi, in0, in1, out, aux, false);
eval_ext_circuit_addcy(builder, yield_constr, is_addiu, in0, in1, out, aux, false);
}
#[cfg(test)]
mod tests {
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::types::Sample;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use super::*;
// TODO: Should be able to refactor this test to apply to all operations.
#[test]
fn generate_eval_consistency_not_addcy() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng));
// if the operation filters are all zero, then the constraints
// should be met even if all values are
// garbage.
lv[IS_ADD] = F::ZERO;
lv[IS_SUB] = F::ZERO;
// lv[IS_LT] = F::ZERO;
// lv[IS_GT] = F::ZERO;
lv[IS_ADDI] = F::ZERO;
lv[IS_ADDIU] = F::ZERO;
let mut constrant_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
assert_eq!(acc, F::ZERO);
}
}
#[test]
fn generate_eval_consistency_addcy() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
const N_ITERS: usize = 1000;
for _ in 0..N_ITERS {
for op_filter in [IS_ADD, IS_SUB, IS_ADDI, IS_ADDIU] {
// set entire row to random 16-bit values
let mut lv = [F::default(); NUM_ARITH_COLUMNS]
.map(|_| F::from_canonical_u16(rng.gen::<u16>()));
// set operation filter and ensure all constraints are
// satisfied. We have to explicitly set the other
// operation filters to zero since all are treated by
// the call.
lv[IS_ADD] = F::ZERO;
lv[IS_SUB] = F::ZERO;
lv[IS_ADDI] = F::ZERO;
lv[IS_ADDIU] = F::ZERO;
lv[op_filter] = F::ONE;
let left_in = rng.gen::<u32>();
let right_in = rng.gen::<u32>();
generate(&mut lv, op_filter, left_in, right_in);
let mut constrant_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
F::ONE,
F::ONE,
F::ONE,
);
eval_packed_generic(&lv, &mut constrant_consumer);
for &acc in &constrant_consumer.constraint_accs {
assert_eq!(acc, F::ZERO);
}
let expected = match op_filter {
IS_ADD | IS_ADDI | IS_ADDIU => left_in.overflowing_add(right_in).0,
IS_SUB => left_in.overflowing_sub(right_in).0,
_ => panic!("unrecognised operation"),
};
let mut expected_limbs = [F::ZERO; N_LIMBS];
u32_to_array(&mut expected_limbs, expected);
assert!(expected_limbs
.iter()
.zip(&lv[OUTPUT_REGISTER])
.all(|(x, y)| x == y));
}
}
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/arithmetic/arithmetic_stark.rs | prover/src/arithmetic/arithmetic_stark.rs | use std::marker::PhantomData;
use std::ops::Range;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::util::transpose;
use static_assertions::const_assert;
use super::columns::{NUM_ARITH_COLUMNS, NUM_SHARED_COLS};
use super::shift;
use crate::all_stark::Table;
use crate::arithmetic::columns::{RANGE_COUNTER, RC_FREQUENCIES, SHARED_COLS};
use crate::arithmetic::{addcy, columns, div, lo_hi, lui, mul, mult, slt, sra, Operation};
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter, TableWithColumns};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::lookup::Lookup;
use crate::stark::Stark;
/// Link the 16-bit columns of the arithmetic table, split into groups
/// of N_LIMBS at a time in `regs`, with the corresponding 32-bit
/// columns of the CPU table. Does this for all ops in `ops`.
///
/// This is done by taking pairs of columns (x, y) of the arithmetic
/// table and combining them as x + y*2^16 to ensure they equal the
/// corresponding 32-bit number in the CPU table.
fn cpu_arith_data_link<F: Field>(
combined_ops: &[(usize, u32)],
regs: &[Range<usize>],
) -> Vec<Column<F>> {
let limb_base = F::from_canonical_u64(1 << columns::LIMB_BITS);
let mut res = vec![Column::linear_combination(
combined_ops
.iter()
.map(|&(col, code)| (col, F::from_canonical_u32(code))),
)];
// The inner for loop below assumes N_LIMBS is even.
const_assert!(columns::N_LIMBS % 2 == 0);
for reg_cols in regs {
// Loop below assumes we're operating on a "register" of N_LIMBS columns.
debug_assert_eq!(reg_cols.len(), columns::N_LIMBS);
for i in 0..(columns::N_LIMBS / 2) {
let c0 = reg_cols.start + 2 * i;
let c1 = reg_cols.start + 2 * i + 1;
res.push(Column::linear_combination([(c0, F::ONE), (c1, limb_base)]));
}
}
res
}
pub fn ctl_arithmetic_rows<F: Field>() -> TableWithColumns<F> {
// We scale each filter flag with the associated opcode value.
// If an arithmetic operation is happening on the CPU side,
// the CTL will enforce that the reconstructed opcode value
// from the opcode bits matches.
const COMBINED_OPS: [(usize, u32); 26] = [
(columns::IS_ADD, 0b100000 * (1 << 6)),
(columns::IS_ADDU, 0b100001 * (1 << 6)),
(columns::IS_ADDI, 0b001000),
(columns::IS_ADDIU, 0b001001),
(columns::IS_SUB, 0b100010 * (1 << 6)),
(columns::IS_SUBU, 0b100011 * (1 << 6)),
(columns::IS_MULT, 0b011000 * (1 << 6)),
(columns::IS_MULTU, 0b011001 * (1 << 6)),
(columns::IS_MUL, 0b011100 + 0b000010 * (1 << 6)),
(columns::IS_DIV, 0b011010 * (1 << 6)),
(columns::IS_DIVU, 0b011011 * (1 << 6)),
(columns::IS_SLLV, 0b000100 * (1 << 6)),
(columns::IS_SRLV, 0b000110 * (1 << 6)),
(columns::IS_SRAV, 0b000111 * (1 << 6)),
#[allow(clippy::erasing_op)]
(columns::IS_SLL, 0b000000 * (1 << 6)),
(columns::IS_SRL, 0b000010 * (1 << 6)),
(columns::IS_SRA, 0b000011 * (1 << 6)),
(columns::IS_SLT, 0b101010 * (1 << 6)),
(columns::IS_SLTU, 0b101011 * (1 << 6)),
(columns::IS_SLTI, 0b001010),
(columns::IS_SLTIU, 0b001011),
(columns::IS_LUI, 0b001111),
(columns::IS_MFHI, 0b010000 * (1 << 6)),
(columns::IS_MTHI, 0b010001 * (1 << 6)),
(columns::IS_MFLO, 0b010010 * (1 << 6)),
(columns::IS_MTLO, 0b010011 * (1 << 6)),
];
const REGISTER_MAP: [Range<usize>; 3] = [
columns::INPUT_REGISTER_0,
columns::INPUT_REGISTER_1,
columns::OUTPUT_REGISTER,
];
let filter = Some(Filter::new_simple(Column::sum(
COMBINED_OPS.iter().map(|(c, _v)| *c),
)));
// Create the Arithmetic Table whose columns are those of the
// operations listed in `ops` whose inputs and outputs are given
// by `regs`, where each element of `regs` is a range of columns
// corresponding to a 256-bit input or output register (also `ops`
// is used as the operation filter).
TableWithColumns::new(
Table::Arithmetic,
cpu_arith_data_link(&COMBINED_OPS, ®ISTER_MAP),
filter,
)
}
#[derive(Copy, Clone, Default)]
pub struct ArithmeticStark<F, const D: usize> {
pub f: PhantomData<F>,
}
const RANGE_MAX: usize = 1usize << 16; // Range check strict upper bound
impl<F: RichField, const D: usize> ArithmeticStark<F, D> {
/// Expects input in *column*-major layout
fn generate_range_checks(&self, cols: &mut [Vec<F>]) {
debug_assert!(cols.len() == columns::NUM_ARITH_COLUMNS);
let n_rows = cols[0].len();
debug_assert!(cols.iter().all(|col| col.len() == n_rows));
for i in 0..RANGE_MAX {
cols[RANGE_COUNTER][i] = F::from_canonical_usize(i);
}
for i in RANGE_MAX..n_rows {
cols[RANGE_COUNTER][i] = F::from_canonical_usize(RANGE_MAX - 1);
}
// Generate the frequencies column.
for col in SHARED_COLS {
for i in 0..n_rows {
let x = cols[col][i].to_canonical_u64() as usize;
assert!(
x < RANGE_MAX,
"column value {} exceeds the max range value {}",
x,
RANGE_MAX
);
cols[RC_FREQUENCIES][x] += F::ONE;
}
}
}
pub(crate) fn generate_trace(&self, operations: &Vec<Operation>) -> Vec<PolynomialValues<F>> {
// The number of rows reserved is the smallest value that's
// guaranteed to avoid a reallocation: The only ops that use
// two rows are the modular operations and DIV, so the only
// way to reach capacity is when every op is modular or DIV
// (which is obviously unlikely in normal
// circumstances). (Also need at least RANGE_MAX rows to
// accommodate range checks.)
let max_rows = std::cmp::max(2 * operations.len(), RANGE_MAX);
let mut trace_rows = Vec::with_capacity(max_rows);
for op in operations {
let (row1, maybe_row2) = op.to_rows();
trace_rows.push(row1);
if let Some(row2) = maybe_row2 {
trace_rows.push(row2);
}
}
// Pad the trace with zero rows if it doesn't have enough rows
// to accommodate the range check columns. Also make sure the
// trace length is a power of two.
let padded_len = trace_rows.len().next_power_of_two();
for _ in trace_rows.len()..std::cmp::max(padded_len, RANGE_MAX) {
trace_rows.push(vec![F::ZERO; columns::NUM_ARITH_COLUMNS]);
}
let mut trace_cols = transpose(&trace_rows);
self.generate_range_checks(&mut trace_cols);
trace_cols.into_iter().map(PolynomialValues::new).collect()
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ArithmeticStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize>
= StarkFrame<P, NUM_ARITH_COLUMNS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_ARITH_COLUMNS>;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: &Self::EvaluationFrame<FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
let lv: &[P; NUM_ARITH_COLUMNS] = vars.get_local_values().try_into().unwrap();
let nv: &[P; NUM_ARITH_COLUMNS] = vars.get_next_values().try_into().unwrap();
// Check the range column: First value must be 0, last row
// must be 2^16-1, and intermediate rows must increment by 0
// or 1.
let rc1 = lv[RANGE_COUNTER];
let rc2 = nv[RANGE_COUNTER];
yield_constr.constraint_first_row(rc1);
let incr = rc2 - rc1;
yield_constr.constraint_transition(incr * incr - incr);
let range_max = P::Scalar::from_canonical_u64((RANGE_MAX - 1) as u64);
yield_constr.constraint_last_row(rc1 - range_max);
mul::eval_packed_generic(lv, yield_constr);
mult::eval_packed_generic(lv, yield_constr);
addcy::eval_packed_generic(lv, yield_constr);
slt::eval_packed_generic(lv, yield_constr);
lui::eval_packed_generic(lv, nv, yield_constr);
div::eval_packed(lv, nv, yield_constr);
shift::eval_packed_generic(lv, nv, yield_constr);
sra::eval_packed_generic(lv, nv, yield_constr);
lo_hi::eval_packed_generic(lv, yield_constr);
}
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: &Self::EvaluationFrameTarget,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
vars.get_local_values().try_into().unwrap();
let nv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS] =
vars.get_next_values().try_into().unwrap();
let rc1 = lv[RANGE_COUNTER];
let rc2 = nv[RANGE_COUNTER];
yield_constr.constraint_first_row(builder, rc1);
let incr = builder.sub_extension(rc2, rc1);
let t = builder.mul_sub_extension(incr, incr, incr);
yield_constr.constraint_transition(builder, t);
let range_max =
builder.constant_extension(F::Extension::from_canonical_usize(RANGE_MAX - 1));
let t = builder.sub_extension(rc1, range_max);
yield_constr.constraint_last_row(builder, t);
mul::eval_ext_circuit(builder, lv, yield_constr);
mult::eval_ext_circuit(builder, lv, yield_constr);
addcy::eval_ext_circuit(builder, lv, yield_constr);
slt::eval_ext_circuit(builder, lv, yield_constr);
lui::eval_ext_circuit(builder, lv, nv, yield_constr);
div::eval_ext_circuit(builder, lv, nv, yield_constr);
shift::eval_ext_circuit(builder, lv, nv, yield_constr);
sra::eval_ext_circuit(builder, lv, nv, yield_constr);
lo_hi::eval_ext_circuit(builder, lv, yield_constr);
}
fn constraint_degree(&self) -> usize {
3
}
fn lookups(&self) -> Vec<Lookup<F>> {
vec![Lookup {
columns: Column::singles(SHARED_COLS).collect(),
table_column: Column::single(RANGE_COUNTER),
frequencies_column: Column::single(RC_FREQUENCIES),
filter_columns: vec![None; NUM_SHARED_COLS],
}]
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use itertools::Itertools;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use crate::arithmetic::arithmetic_stark::ArithmeticStark;
use crate::arithmetic::columns::OUTPUT_REGISTER;
use crate::arithmetic::*;
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
#[test]
fn degree() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ArithmeticStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_low_degree(stark)
}
#[test]
fn circuit() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ArithmeticStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_circuit_constraints::<F, C, S, D>(stark)
}
#[test]
fn basic_trace() {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ArithmeticStark<F, D>;
let stark = S {
f: Default::default(),
};
// 123 + 456 == 579
let add = Operation::binary(BinaryOperator::ADD, 123, 456);
// 123 * 456 == 56088
let mul = Operation::binary(BinaryOperator::MUL, 123, 456);
// 128 / 13 == 9
let div0 = Operation::binary(BinaryOperator::DIV, 128, 13);
// -128 / 13 == -9
let div1 = Operation::binary(BinaryOperator::DIV, -128i32 as u32, 13);
// 3526433982 / 14202 == 248305
let divu = Operation::binary(BinaryOperator::DIVU, 3526433982, 14202);
// 123 * 456 == 56088
let mult0 = Operation::binary(BinaryOperator::MULT, 123, 456);
// -123 * 456 == -56088
let mult1 = Operation::binary(BinaryOperator::MULT, -123i32 as u32, 456);
// 123 * 456 == 56088
let multu = Operation::binary(BinaryOperator::MULTU, 123, 456);
let ops: Vec<Operation> = vec![add, mul, div0, div1, divu, mult0, mult1, multu];
let pols = stark.generate_trace(&ops);
// Trace should always have NUM_ARITH_COLUMNS columns and
// min(RANGE_MAX, operations.len()) rows. In this case there
// are only 6 rows, so we should have RANGE_MAX rows.
assert!(
pols.len() == columns::NUM_ARITH_COLUMNS
&& pols.iter().all(|v| v.len() == super::RANGE_MAX)
);
// Each operation has a single word answer that we can check
let expected_output = [
// Row (some ops take two rows), expected
(0, [579u64, 0]), // ADD_OUTPUT
(1, [56088, 0]),
(2, [9, 0]),
(4, [65527, 65535]),
(6, [51697, 3]),
(8, [56088, 0]),
(9, [9448, 65535]),
(10, [56088, 0]),
];
for (row, expected) in expected_output {
// OUTPUT registers should match expected value...
for (expected, col) in expected.into_iter().zip_eq(OUTPUT_REGISTER) {
let out = pols[col].values[row].to_canonical_u64();
assert_eq!(
out, expected,
"expected column {} on row {} to be {} but it was {}",
col, row, expected, out,
);
}
}
}
#[test]
fn big_traces() {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ArithmeticStark<F, D>;
let stark = S {
f: Default::default(),
};
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let ops = (0..super::RANGE_MAX)
.map(|_| Operation::binary(BinaryOperator::MULT, rng.gen::<u32>(), rng.gen::<u32>()))
.collect::<Vec<_>>();
let pols = stark.generate_trace(&ops);
// Trace should always have NUM_ARITH_COLUMNS columns and
// min(RANGE_MAX, operations.len()) rows. In this case there
// are RANGE_MAX operations with one row each, so RANGE_MAX.
assert!(
pols.len() == columns::NUM_ARITH_COLUMNS
&& pols.iter().all(|v| v.len() == super::RANGE_MAX)
);
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/arithmetic/mul.rs | prover/src/arithmetic/mul.rs | //! Support for the MIPS MUL instruction.
//!
//! This crate verifies an MIPS MUL instruction, which takes two
//! 32-bit inputs A and B, and produces a 32-bit output C satisfying
//!
//! C = A*B (mod 2^32),
//!
//! i.e. C is the lower half of the usual long multiplication
//! A*B. Inputs A and B, and output C, are given as arrays of 16-bit
//! limbs. For example, if the limbs of A are a[0]...a[15], then
//!
//! A = \sum_{i=0}^15 a[i] β^i,
//!
//! where β = 2^16 = 2^LIMB_BITS. To verify that A, B and C satisfy
//! the equation we proceed as follows. Define
//!
//! a(x) = \sum_{i=0}^15 a[i] x^i
//!
//! (so A = a(β)) and similarly for b(x) and c(x). Then A*B = C (mod
//! 2^256) if and only if there exists q such that the polynomial
//!
//! a(x) * b(x) - c(x) - x^16 * q(x)
//!
//! is zero when evaluated at x = β, i.e. it is divisible by (x - β);
//! equivalently, there exists a polynomial s (representing the
//! carries from the long multiplication) such that
//!
//! a(x) * b(x) - c(x) - x^16 * q(x) - (x - β) * s(x) == 0
//!
//! As we only need the lower half of the product, we can omit q(x)
//! since it is multiplied by the modulus β^16 = 2^256. Thus we only
//! need to verify
//!
//! a(x) * b(x) - c(x) - (x - β) * s(x) == 0
//!
//! In the code below, this "constraint polynomial" is constructed in
//! the variable `constr_poly`. It must be identically zero for the
//! multiplication operation to be verified, or, equivalently, each of
//! its coefficients must be zero. The variable names of the
//! constituent polynomials are (writing N for N_LIMBS=16):
//!
//! a(x) = \sum_{i=0}^{N-1} input0[i] * x^i
//! b(x) = \sum_{i=0}^{N-1} input1[i] * x^i
//! c(x) = \sum_{i=0}^{N-1} output[i] * x^i
//! s(x) = \sum_i^{2N-3} aux[i] * x^i
//!
//! Because A, B and C are 256-bit numbers, the degrees of a, b and c
//! are (at most) 15. Thus deg(a*b) <= 30 and deg(s) <= 29; however,
//! as we're only verifying the lower half of A*B, we only need to
//! know s(x) up to degree 14 (so that (x - β)*s(x) has degree 15). On
//! the other hand, the coefficients of s(x) can be as large as
//! 16*(β-2) or 20 bits.
//!
//! Note that, unlike for the general modular multiplication (see the
//! file `modular.rs`), we don't need to check that output is reduced,
//! since any value of output is less than β^16 and is hence reduced.
use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::{Field, PrimeField64};
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::arithmetic::columns::*;
use crate::arithmetic::utils::*;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
/// Given the two limbs of `left_in` and `right_in`, computes `left_in * right_in`.
pub(crate) fn generate_mul<F: PrimeField64>(
lv: &mut [F],
left_in: [i64; N_LIMBS],
right_in: [i64; N_LIMBS],
) {
const MASK: i64 = (1i64 << LIMB_BITS) - 1i64;
// Input and output have 16-bit limbs
let mut output_limbs = [0i64; N_LIMBS];
// Column-wise pen-and-paper long multiplication on 16-bit limbs.
// First calculate the coefficients of a(x)*b(x) (in unreduced_prod),
// then do carry propagation to obtain C = c(β) = a(β)*b(β).
let mut cy = 0i64;
let mut unreduced_prod = pol_mul_lo(left_in, right_in);
for col in 0..N_LIMBS {
let t = unreduced_prod[col] + cy;
cy = t >> LIMB_BITS;
output_limbs[col] = t & MASK;
}
// In principle, the last cy could be dropped because this is
// multiplication modulo 2^256. However, we need it below for
// aux_limbs to handle the fact that unreduced_prod will
// inevitably contain one digit's worth that is > 2^256.
lv[OUTPUT_REGISTER].copy_from_slice(&output_limbs.map(|c| F::from_canonical_i64(c)));
pol_sub_assign(&mut unreduced_prod, &output_limbs);
let mut aux_limbs = pol_remove_root_2exp::<LIMB_BITS, _, N_LIMBS>(unreduced_prod);
aux_limbs[N_LIMBS - 1] = -cy;
for c in aux_limbs.iter_mut() {
// we store the unsigned offset value c + 2^20
*c += AUX_COEFF_ABS_MAX;
}
debug_assert!(aux_limbs.iter().all(|&c| c.abs() <= 2 * AUX_COEFF_ABS_MAX));
lv[MUL_AUX_INPUT_LO].copy_from_slice(&aux_limbs.map(|c| F::from_canonical_u16(c as u16)));
lv[MUL_AUX_INPUT_HI]
.copy_from_slice(&aux_limbs.map(|c| F::from_canonical_u16((c >> 16) as u16)));
}
pub fn generate<F: PrimeField64>(lv: &mut [F], left_in: u32, right_in: u32) {
// TODO: It would probably be clearer/cleaner to read the U32
// into an [i64;N] and then copy that to the lv table.
u32_to_array(&mut lv[INPUT_REGISTER_0], left_in);
u32_to_array(&mut lv[INPUT_REGISTER_1], right_in);
u32_to_array(&mut lv[INPUT_REGISTER_2], 0);
let input0 = read_value_i64_limbs(lv, INPUT_REGISTER_0);
let input1 = read_value_i64_limbs(lv, INPUT_REGISTER_1);
generate_mul(lv, input0, input1);
}
pub(crate) fn eval_packed_generic_mul<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
filter: P,
left_in_limbs: [P; 2],
right_in_limbs: [P; 2],
yield_constr: &mut ConstraintConsumer<P>,
) {
let output_limbs = read_value::<N_LIMBS, _>(lv, OUTPUT_REGISTER);
let base = P::Scalar::from_canonical_u64(1 << LIMB_BITS);
let aux_limbs = {
// MUL_AUX_INPUT was offset by 2^20 in generation, so we undo
// that here
let offset = P::Scalar::from_canonical_u64(AUX_COEFF_ABS_MAX as u64);
let mut aux_limbs = read_value::<N_LIMBS, _>(lv, MUL_AUX_INPUT_LO);
let aux_limbs_hi = &lv[MUL_AUX_INPUT_HI];
for (lo, &hi) in aux_limbs.iter_mut().zip(aux_limbs_hi) {
*lo += hi * base - offset;
}
aux_limbs
};
// Constraint poly holds the coefficients of the polynomial that
// must be identically zero for this multiplication to be
// verified.
//
// These two lines set constr_poly to the polynomial a(x)b(x) - c(x),
// where a, b and c are the polynomials
//
// a(x) = \sum_i input0_limbs[i] * x^i
// b(x) = \sum_i input1_limbs[i] * x^i
// c(x) = \sum_i output_limbs[i] * x^i
//
// This polynomial should equal (x - β)*s(x) where s is
//
// s(x) = \sum_i aux_limbs[i] * x^i
//
let mut constr_poly = pol_mul_lo(left_in_limbs, right_in_limbs);
pol_sub_assign(&mut constr_poly, &output_limbs);
// This subtracts (x - β) * s(x) from constr_poly.
pol_sub_assign(&mut constr_poly, &pol_adjoin_root(aux_limbs, base));
// At this point constr_poly holds the coefficients of the
// polynomial a(x)b(x) - c(x) - (x - β)*s(x). The
// multiplication is valid if and only if all of those
// coefficients are zero.
for &c in &constr_poly {
yield_constr.constraint(filter * c);
}
}
pub fn eval_packed_generic<P: PackedField>(
lv: &[P; NUM_ARITH_COLUMNS],
yield_constr: &mut ConstraintConsumer<P>,
) {
let is_mul = lv[IS_MUL];
let input0_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_0);
let input1_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_1);
eval_packed_generic_mul(lv, is_mul, input0_limbs, input1_limbs, yield_constr);
}
pub(crate) fn eval_ext_mul_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
filter: ExtensionTarget<D>,
left_in_limbs: [ExtensionTarget<D>; 2],
right_in_limbs: [ExtensionTarget<D>; 2],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let output_limbs = read_value::<N_LIMBS, _>(lv, OUTPUT_REGISTER);
let aux_limbs = {
let base = builder.constant_extension(F::Extension::from_canonical_u64(1 << LIMB_BITS));
let offset =
builder.constant_extension(F::Extension::from_canonical_u64(AUX_COEFF_ABS_MAX as u64));
let mut aux_limbs = read_value::<N_LIMBS, _>(lv, MUL_AUX_INPUT_LO);
let aux_limbs_hi = &lv[MUL_AUX_INPUT_HI];
for (lo, &hi) in aux_limbs.iter_mut().zip(aux_limbs_hi) {
//*lo = lo + hi * base - offset;
let t = builder.mul_sub_extension(hi, base, offset);
*lo = builder.add_extension(*lo, t);
}
aux_limbs
};
let mut constr_poly = pol_mul_lo_ext_circuit(builder, left_in_limbs, right_in_limbs);
pol_sub_assign_ext_circuit(builder, &mut constr_poly, &output_limbs);
let base = builder.constant_extension(F::Extension::from_canonical_u64(1 << LIMB_BITS));
let rhs = pol_adjoin_root_ext_circuit(builder, aux_limbs, base);
pol_sub_assign_ext_circuit(builder, &mut constr_poly, &rhs);
for &c in &constr_poly {
let filter = builder.mul_extension(filter, c);
yield_constr.constraint(builder, filter);
}
}
pub fn eval_ext_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
lv: &[ExtensionTarget<D>; NUM_ARITH_COLUMNS],
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let is_mul = lv[IS_MUL];
let input0_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_0);
let input1_limbs = read_value::<N_LIMBS, _>(lv, INPUT_REGISTER_1);
eval_ext_mul_circuit(
builder,
lv,
is_mul,
input0_limbs,
input1_limbs,
yield_constr,
);
}
#[cfg(test)]
mod tests {
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::types::Sample;
use rand::{Rng, SeedableRng};
use rand_chacha::ChaCha8Rng;
use super::*;
const N_RND_TESTS: usize = 1000;
// TODO: Should be able to refactor this test to apply to all operations.
#[test]
fn generate_eval_consistency_not_mul() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng));
// if `IS_MUL == 0`, then the constraints should be met even
// if all values are garbage.
lv[IS_MUL] = F::ZERO;
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
GoldilocksField::ONE,
GoldilocksField::ONE,
GoldilocksField::ONE,
);
eval_packed_generic(&lv, &mut constraint_consumer);
for &acc in &constraint_consumer.constraint_accs {
assert_eq!(acc, GoldilocksField::ZERO);
}
}
#[test]
fn generate_eval_consistency_mul() {
type F = GoldilocksField;
let mut rng = ChaCha8Rng::seed_from_u64(0x6feb51b7ec230f25);
let mut lv = [F::default(); NUM_ARITH_COLUMNS].map(|_| F::sample(&mut rng));
// set `IS_MUL == 1` and ensure all constraints are satisfied.
lv[IS_MUL] = F::ONE;
for _i in 0..N_RND_TESTS {
// set inputs to random values
for (ai, bi) in INPUT_REGISTER_0.zip(INPUT_REGISTER_1) {
lv[ai] = F::from_canonical_u16(rng.gen());
lv[bi] = F::from_canonical_u16(rng.gen());
}
let left_in = rng.gen::<u32>();
let right_in = rng.gen::<u32>();
generate(&mut lv, left_in, right_in);
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
GoldilocksField::ONE,
GoldilocksField::ONE,
GoldilocksField::ONE,
);
eval_packed_generic(&lv, &mut constraint_consumer);
for &acc in &constraint_consumer.constraint_accs {
assert_eq!(acc, GoldilocksField::ZERO);
}
}
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/generation/state.rs | prover/src/generation/state.rs | // use keccak_hash::keccak;
use crate::cpu::kernel::assembler::Kernel;
use crate::proof::PublicValues;
use crate::witness::errors::ProgramError;
use crate::witness::memory::MemoryState;
use crate::witness::state::RegistersState;
use crate::witness::traces::{TraceCheckpoint, Traces};
use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::config::GenericConfig;
use plonky2::plonk::proof::ProofWithPublicInputs;
use serde::{Deserialize, Serialize};
use sha2::{Digest, Sha256};
use std::{cell::RefCell, rc::Rc};
pub const ZERO: [u8; 32] = [0u8; 32];
pub(crate) struct GenerationStateCheckpoint {
pub(crate) registers: RegistersState,
pub(crate) traces: TraceCheckpoint,
}
#[derive(Clone, Debug, Serialize, Deserialize, Eq, Hash, PartialEq)]
pub struct Assumption {
pub claim: [u8; 32],
}
#[derive(Clone, Debug, Serialize, Deserialize)]
pub struct ReceiptClaim {
pub elf_id: Vec<u8>, // pre image id
pub commit: Vec<u8>, // commit info
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
pub struct InnerReceipt<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
pub proof: ProofWithPublicInputs<F, C, D>,
pub values: PublicValues,
pub claim: ReceiptClaim,
}
impl<F, C, const D: usize> InnerReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
pub fn claim_digest(&self) -> [u8; 32] {
let mut hasher = Sha256::new();
hasher.update(self.claim.elf_id.clone());
hasher.update(self.claim.commit.clone());
let digest: [u8; 32] = hasher.finalize().into();
digest
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
pub enum AssumptionReceipt<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>
{
// A [Receipt] for a proven assumption.
Proven(Box<InnerReceipt<F, C, D>>),
// An [Assumption] that is not directly proven to be true.
Unresolved(Assumption),
}
impl<F, C, const D: usize> AssumptionReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
/// Returns the digest of the claim for this [AssumptionReceipt].
pub fn claim_digest(&self) -> [u8; 32] {
match self {
Self::Proven(receipt) => receipt.claim_digest(),
Self::Unresolved(assumption) => assumption.claim,
}
}
}
/// Container for assumptions in the executor environment.
pub type AssumptionReceipts<F, C, const D: usize> = Vec<AssumptionReceipt<F, C, D>>;
pub type AssumptionUsage<F, C, const D: usize> = Vec<(Assumption, AssumptionReceipt<F, C, D>)>;
impl<F, C, const D: usize> From<InnerReceipt<F, C, D>> for AssumptionReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
/// Create a proven assumption from a [Receipt].
fn from(receipt: InnerReceipt<F, C, D>) -> Self {
Self::Proven(Box::new(receipt))
}
}
impl<F, C, const D: usize> From<Assumption> for AssumptionReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
/// Create an unresolved assumption from an [Assumption].
fn from(assumption: Assumption) -> Self {
Self::Unresolved(assumption)
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
pub struct CompositeReceipt<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub program_receipt: InnerReceipt<F, C, D>,
pub assumption_used: Rc<RefCell<AssumptionUsage<F, C, D>>>,
}
impl<F, C, const D: usize> CompositeReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
pub fn claim_digest(&self) -> [u8; 32] {
let mut hasher = Sha256::new();
hasher.update(self.program_receipt.claim.elf_id.clone());
hasher.update(self.program_receipt.claim.commit.clone());
let digest: [u8; 32] = hasher.finalize().into();
digest
}
}
#[derive(Clone, Debug, Serialize, Deserialize)]
#[serde(bound = "")]
pub enum Receipt<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize> {
Segments(InnerReceipt<F, C, D>),
Composite(CompositeReceipt<F, C, D>),
}
impl<F, C, const D: usize> Receipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
pub fn claim_digest(&self) -> [u8; 32] {
match self {
Self::Segments(receipt) => receipt.claim_digest(),
Self::Composite(receipt) => receipt.claim_digest(),
}
}
pub fn proof(&self) -> ProofWithPublicInputs<F, C, D> {
match self {
Self::Segments(receipt) => receipt.proof.clone(),
Self::Composite(receipt) => receipt.program_receipt.proof.clone(),
}
}
pub fn values(&self) -> PublicValues {
match self {
Self::Segments(receipt) => receipt.values.clone(),
Self::Composite(receipt) => receipt.program_receipt.values.clone(),
}
}
pub fn claim(&self) -> ReceiptClaim {
match self {
Self::Segments(receipt) => receipt.claim.clone(),
Self::Composite(receipt) => receipt.program_receipt.claim.clone(),
}
}
pub fn assumptions(&self) -> Rc<RefCell<AssumptionUsage<F, C, D>>> {
match self {
Self::Segments(_receipt) => Rc::new(RefCell::new(Vec::new())),
Self::Composite(receipt) => receipt.assumption_used.clone(),
}
}
}
impl<F, C, const D: usize> From<Receipt<F, C, D>> for InnerReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
/// Create a proven assumption from a [Receipt].
fn from(receipt: Receipt<F, C, D>) -> Self {
match receipt {
Receipt::<F, C, D>::Segments(segments_receipt) => segments_receipt,
Receipt::<F, C, D>::Composite(composite_receipt) => composite_receipt.program_receipt,
}
}
}
impl<F, C, const D: usize> From<Receipt<F, C, D>> for AssumptionReceipt<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
/// Create a proven assumption from a [Receipt].
fn from(receipt: Receipt<F, C, D>) -> Self {
let inner: InnerReceipt<F, C, D> = receipt.into();
inner.into()
}
}
#[derive(Clone)]
pub(crate) struct GenerationState<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
> {
pub(crate) registers: RegistersState,
pub(crate) memory: MemoryState,
pub(crate) input_stream: Vec<Vec<u8>>,
pub(crate) input_stream_ptr: usize,
pub(crate) public_values_stream: Vec<u8>,
pub(crate) public_values_stream_ptr: usize,
pub(crate) traces: Traces<F>,
pub(crate) assumptions: Rc<RefCell<AssumptionReceipts<F, C, D>>>,
pub(crate) assumptions_used: Rc<RefCell<AssumptionUsage<F, C, D>>>,
pub(crate) step: usize,
}
impl<F, C, const D: usize> GenerationState<F, C, D>
where
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
{
pub(crate) fn new(step: usize, kernel: &Kernel) -> Result<Self, ProgramError> {
Ok(GenerationState {
registers: RegistersState::new(kernel),
memory: MemoryState::new(&[]), // FIXME
traces: Traces::default(),
input_stream: kernel.program.input_stream.clone(),
input_stream_ptr: kernel.program.input_stream_ptr,
public_values_stream: kernel.program.public_values_stream.clone(),
public_values_stream_ptr: kernel.program.public_values_stream_ptr,
assumptions: Rc::new(RefCell::new(Vec::new())),
assumptions_used: Rc::new(RefCell::new(Vec::new())),
step,
})
}
pub fn add_assumption(
&mut self,
assumption: impl Into<AssumptionReceipt<F, C, D>>,
) -> &mut Self {
let receipt: AssumptionReceipt<F, C, D> = assumption.into();
log::info!("add assumption {:?}", receipt.claim_digest());
self.assumptions.borrow_mut().push(receipt);
self
}
pub(crate) fn find_assumption(
&self,
claim_digest: &[u8; 32],
) -> Option<(Assumption, AssumptionReceipt<F, C, D>)> {
for assumption_receipt in self.assumptions.borrow().iter() {
let cached_claim_digest = assumption_receipt.claim_digest();
if cached_claim_digest != *claim_digest {
log::debug!(
"receipt with claim {:?} does not match",
cached_claim_digest
);
continue;
}
return Some((
Assumption {
claim: *claim_digest,
},
assumption_receipt.clone(),
));
}
None
}
pub fn checkpoint(&self) -> GenerationStateCheckpoint {
GenerationStateCheckpoint {
registers: self.registers,
traces: self.traces.checkpoint(),
}
}
pub fn rollback(&mut self, checkpoint: GenerationStateCheckpoint) {
self.registers = checkpoint.registers;
self.traces.rollback(checkpoint.traces);
}
/// Updates `program_counter`, and potentially adds some extra handling if we're jumping to a
/// special location.
pub fn jump_to(&mut self, dst: usize) {
self.registers.program_counter = self.registers.next_pc;
self.registers.next_pc = dst;
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/generation/mod.rs | prover/src/generation/mod.rs | pub(crate) mod outputs;
pub mod state;
use crate::generation::state::{AssumptionReceipts, AssumptionUsage};
use crate::proof::{MemRoots, PublicValues};
use anyhow::anyhow;
use plonky2::field::extension::Extendable;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::config::GenericConfig;
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use crate::all_stark::NUM_PUBLIC_INPUT_USERDATA;
use crate::all_stark::{AllStark, NUM_TABLES};
use crate::config::StarkConfig;
use crate::cpu::bootstrap_kernel::generate_bootstrap_kernel;
use crate::cpu::columns::CpuColumnsView;
use crate::cpu::kernel::assembler::Kernel;
use crate::generation::outputs::{get_outputs, GenerationOutputs};
use crate::generation::state::GenerationState;
use crate::witness::transition::transition;
use std::{cell::RefCell, rc::Rc};
pub fn generate_traces<F: RichField + Extendable<D>, C: GenericConfig<D, F = F>, const D: usize>(
all_stark: &AllStark<F, D>,
kernel: &Kernel,
config: &StarkConfig,
timing: &mut TimingTree,
) -> anyhow::Result<(
[Vec<PolynomialValues<F>>; NUM_TABLES],
PublicValues,
GenerationOutputs,
)> {
// Decode the trace record
// 1. Decode instruction and fill in cpu columns
// 2. Decode memory and fill in memory columns
let mut state = GenerationState::<F, C, D>::new(kernel.program.step, kernel).unwrap();
generate_bootstrap_kernel::<F, C, D>(&mut state, kernel);
timed!(timing, "simulate CPU", simulate_cpu(&mut state, kernel)?);
log::info!(
"Trace lengths (before padding): {:?}",
state.traces.get_lengths()
);
let outputs = get_outputs(&mut state)
.map_err(|err| anyhow!("Failed to generate post-state info: {:?}", err))?;
// Execute the trace record
// Generate the public values and outputs
// let mut userdata = kernel.read_public_inputs();
// assert!(userdata.len() <= NUM_PUBLIC_INPUT_USERDATA);
// userdata.resize(NUM_PUBLIC_INPUT_USERDATA, 0u8);
let userdata = kernel.read_public_inputs();
assert!(userdata.len() == NUM_PUBLIC_INPUT_USERDATA);
let public_values = PublicValues {
roots_before: MemRoots {
root: unsafe { std::mem::transmute::<[u8; 32], [u32; 8]>(kernel.program.pre_image_id) },
},
roots_after: MemRoots {
root: unsafe { std::mem::transmute::<[u8; 32], [u32; 8]>(kernel.program.image_id) },
},
userdata,
};
let tables = timed!(
timing,
"convert trace data to tables",
state.traces.into_tables(all_stark, config, timing)
);
Ok((tables, public_values, outputs))
}
pub fn generate_traces_with_assumptions<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
all_stark: &AllStark<F, D>,
kernel: &Kernel,
config: &StarkConfig,
timing: &mut TimingTree,
assumptions: AssumptionReceipts<F, C, D>,
) -> anyhow::Result<(
[Vec<PolynomialValues<F>>; NUM_TABLES],
PublicValues,
GenerationOutputs,
Rc<RefCell<AssumptionUsage<F, C, D>>>,
)> {
// Decode the trace record
// 1. Decode instruction and fill in cpu columns
// 2. Decode memory and fill in memory columns
let mut state = GenerationState::<F, C, D>::new(kernel.program.step, kernel).unwrap();
for assumption in assumptions.iter() {
state.add_assumption(assumption.clone());
}
generate_bootstrap_kernel::<F, C, D>(&mut state, kernel);
timed!(timing, "simulate CPU", simulate_cpu(&mut state, kernel)?);
log::info!(
"Trace lengths (before padding): {:?}",
state.traces.get_lengths()
);
let outputs = get_outputs(&mut state)
.map_err(|err| anyhow!("Failed to generate post-state info: {:?}", err))?;
// Execute the trace record
// Generate the public values and outputs
// let mut userdata = kernel.read_public_inputs();
// assert!(userdata.len() <= NUM_PUBLIC_INPUT_USERDATA);
// userdata.resize(NUM_PUBLIC_INPUT_USERDATA, 0u8);
let userdata = kernel.read_public_inputs();
assert!(userdata.len() == NUM_PUBLIC_INPUT_USERDATA);
let public_values = PublicValues {
roots_before: MemRoots {
root: unsafe { std::mem::transmute::<[u8; 32], [u32; 8]>(kernel.program.pre_image_id) },
},
roots_after: MemRoots {
root: unsafe { std::mem::transmute::<[u8; 32], [u32; 8]>(kernel.program.image_id) },
},
userdata,
};
let tables = timed!(
timing,
"convert trace data to tables",
state.traces.into_tables(all_stark, config, timing)
);
Ok((tables, public_values, outputs, state.assumptions_used))
}
/// Perform MIPS instruction and transit state
pub(crate) fn simulate_cpu<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
state: &mut GenerationState<F, C, D>,
kernel: &Kernel,
) -> anyhow::Result<()> {
let mut step = 0;
loop {
// If we've reached the kernel's halt routine, and our trace length is a power of 2, stop.
let pc = state.registers.program_counter;
let halt = state.registers.is_kernel && (step == state.step || state.registers.exited);
log::trace!("pc: {:X}", pc);
if halt {
log::info!("CPU halted after {} cycles", state.traces.clock());
// FIXME: should be quit if not matching
if step == state.step && pc != kernel.program.end_pc {
log::error!(
"Segment split {} error at {:X} expected: {:X}",
step,
pc,
kernel.program.end_pc
)
}
//generate_exit_kernel::<F>(state, kernel);
// Padding
let mut row = CpuColumnsView::<F>::default();
row.clock = F::from_canonical_usize(state.traces.clock());
row.context = F::from_canonical_usize(state.registers.context);
row.program_counter = F::from_canonical_usize(pc);
row.next_program_counter = F::from_canonical_usize(state.registers.next_pc);
row.is_exit_kernel = F::ONE;
loop {
state.traces.push_cpu(row);
row.clock += F::ONE;
if state.traces.clock().is_power_of_two() {
break;
}
}
log::info!("CPU trace padded to {} cycles", state.traces.clock());
return Ok(());
}
transition(state, kernel)?;
step += 1;
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/generation/outputs.rs | prover/src/generation/outputs.rs | use plonky2::field::extension::Extendable;
use plonky2::hash::hash_types::RichField;
use plonky2::plonk::config::GenericConfig;
use crate::generation::state::GenerationState;
use crate::witness::errors::ProgramError;
#[derive(Clone, Debug)]
pub struct GenerationOutputs {
pub output: Vec<u8>,
}
pub(crate) fn get_outputs<
F: RichField + Extendable<D>,
C: GenericConfig<D, F = F>,
const D: usize,
>(
state: &mut GenerationState<F, C, D>,
) -> Result<GenerationOutputs, ProgramError> {
Ok(GenerationOutputs {
output: state.public_values_stream.clone(),
})
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/sha_compress_sponge/sha_compress_sponge_stark.rs | prover/src/sha_compress_sponge/sha_compress_sponge_stark.rs | use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::memory::segments::Segment;
use crate::sha_compress::wrapping_add_2::{
wrapping_add_2_ext_circuit_constraints, wrapping_add_2_packed_constraints,
};
use crate::sha_compress_sponge::columns::{
ShaCompressSpongeColumnsView, NUM_SHA_COMPRESS_SPONGE_COLUMNS, SHA_COMPRESS_SPONGE_COL_MAP,
};
use crate::sha_extend::logic::get_input_range_4;
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
use crate::witness::memory::MemoryAddress;
use itertools::Itertools;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use std::borrow::Borrow;
use std::marker::PhantomData;
pub(crate) const NUM_ROUNDS: usize = 64;
pub(crate) const SHA_COMPRESS_SPONGE_READ_BYTES: usize = 8 * 4; // h[0],...,h[7].
pub(crate) fn ctl_looking_sha_compress_inputs<F: Field>() -> Vec<Column<F>> {
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
let mut res: Vec<_> = Column::singles([cols.hx.as_slice()].concat()).collect();
res.extend(Column::singles([
cols.timestamp,
cols.w_start_segment,
cols.w_start_context,
cols.w_start_virt,
]));
res
}
pub(crate) fn ctl_looking_sha_compress_outputs<F: Field>() -> Vec<Column<F>> {
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
let mut res = vec![];
res.extend(Column::singles(&cols.output_state));
res.push(Column::single(cols.timestamp));
res
}
pub(crate) fn ctl_looked_data<F: Field>() -> Vec<Column<F>> {
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
let mut outputs = Vec::with_capacity(8);
for i in 0..8 {
let cur_col = Column::le_bytes(cols.output_hx[i].value);
outputs.push(cur_col);
}
Column::singles([cols.context, cols.segment, cols.hx_virt[0], cols.timestamp])
.chain(outputs)
.collect()
}
pub(crate) fn ctl_looking_memory<F: Field>(i: usize) -> Vec<Column<F>> {
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
let mut res = vec![Column::constant(F::ONE)]; // is_read
res.extend(Column::singles([cols.context, cols.segment]));
let start = i / 4;
res.push(Column::single(cols.hx_virt[start]));
// le_bit.reverse();
let u32_value: Column<F> = Column::le_bytes(&cols.hx[get_input_range_4(start)]);
res.push(u32_value);
res.push(Column::single(cols.timestamp));
assert_eq!(
res.len(),
crate::memory::memory_stark::ctl_data::<F>().len()
);
res
}
pub(crate) fn ctl_looking_sha_compress_filter<F: Field>() -> Filter<F> {
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
// only the normal round
Filter::new_simple(Column::single(cols.is_real_round))
}
pub(crate) fn ctl_looked_filter<F: Field>() -> Filter<F> {
// The CPU table is only interested in our final rows, since those contain the final
// compress sponge output.
let cols = SHA_COMPRESS_SPONGE_COL_MAP;
// only the normal round
Filter::new_simple(Column::single(cols.is_real_round))
}
#[derive(Clone, Debug)]
pub(crate) struct ShaCompressSpongeOp {
/// The base address at which inputs are read.
/// h[0],...,h[7], w_start_virtual.
pub(crate) base_address: Vec<MemoryAddress>,
/// The timestamp at which inputs are read.
pub(crate) timestamp: usize,
/// The input that was read.
/// Values: h[0],..., h[7] in le bytes order.
pub(crate) input: Vec<u8>,
/// The value of w_i used for compute output
pub(crate) w_i_s: Vec<[u8; 4]>,
}
#[derive(Copy, Clone, Default)]
pub struct ShaCompressSpongeStark<F, const D: usize> {
f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> ShaCompressSpongeStark<F, D> {
pub(crate) fn generate_trace(
&self,
operations: Vec<ShaCompressSpongeOp>,
min_rows: usize,
) -> Vec<PolynomialValues<F>> {
// Generate the witness row-wise.
let trace_rows = self.generate_trace_rows(operations, min_rows);
trace_rows_to_poly_values(trace_rows)
}
fn generate_trace_rows(
&self,
operations: Vec<ShaCompressSpongeOp>,
min_rows: usize,
) -> Vec<[F; NUM_SHA_COMPRESS_SPONGE_COLUMNS]> {
let base_len = operations.len();
let mut rows = Vec::with_capacity(base_len.max(min_rows).next_power_of_two());
for op in operations {
rows.push(self.generate_rows_for_op(op).into());
}
let padded_rows = rows.len().max(min_rows).next_power_of_two();
for _ in rows.len()..padded_rows {
rows.push(ShaCompressSpongeColumnsView::default().into());
}
rows
}
fn generate_rows_for_op(&self, op: ShaCompressSpongeOp) -> ShaCompressSpongeColumnsView<F> {
let mut row = ShaCompressSpongeColumnsView::default();
row.timestamp = F::from_canonical_usize(op.timestamp);
row.context = F::from_canonical_usize(op.base_address[0].context);
row.segment = F::from_canonical_usize(op.base_address[Segment::Code as usize].segment);
row.is_real_round = F::ONE;
let hx_virt: [usize; 8] = (0..8)
.map(|i| op.base_address[i].virt)
.collect_vec()
.try_into()
.unwrap();
row.hx_virt = hx_virt.map(F::from_canonical_usize);
row.w_start_virt = F::from_canonical_usize(op.base_address[8].virt);
row.w_start_segment = F::from_canonical_usize(op.base_address[8].segment);
row.w_start_context = F::from_canonical_usize(op.base_address[8].context);
row.hx = op
.input
.iter()
.map(|&x| F::from_canonical_u8(x))
.collect::<Vec<F>>()
.try_into()
.unwrap();
let h_x_t_minus_1 = op
.input
.chunks(4)
.map(|chunk| u32::from_le_bytes(chunk.try_into().unwrap()))
.collect::<Vec<u32>>()
.try_into()
.unwrap();
let output_state = self.compress(h_x_t_minus_1, op.w_i_s);
let output_state_bytes = output_state
.iter()
.flat_map(|&x| x.to_le_bytes())
.collect_vec();
row.output_state = output_state_bytes
.into_iter()
.map(F::from_canonical_u8)
.collect_vec()
.try_into()
.unwrap();
for i in 0..8 {
let _ = row.output_hx[i].generate_trace(h_x_t_minus_1[i], output_state[i]);
}
row
}
fn compress(&self, input_state: [u32; 8], w_i: Vec<[u8; 4]>) -> [u32; 8] {
let [mut a, mut b, mut c, mut d, mut e, mut f, mut g, mut h] = input_state;
for i in 0..64 {
let s1 = e.rotate_right(6) ^ e.rotate_right(11) ^ e.rotate_right(25);
let ch = (e & f) ^ (!e & g);
let w_i = u32::from_le_bytes(w_i[i]);
let temp1 = h
.wrapping_add(s1)
.wrapping_add(ch)
.wrapping_add(crate::sha_compress_sponge::constants::SHA_COMPRESS_K[i])
.wrapping_add(w_i);
let s0 = a.rotate_right(2) ^ a.rotate_right(13) ^ a.rotate_right(22);
let maj = (a & b) ^ (a & c) ^ (b & c);
let temp2 = s0.wrapping_add(maj);
h = g;
g = f;
f = e;
e = d.wrapping_add(temp1);
d = c;
c = b;
b = a;
a = temp1.wrapping_add(temp2);
}
[a, b, c, d, e, f, g, h]
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for ShaCompressSpongeStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize>
= StarkFrame<P, NUM_SHA_COMPRESS_SPONGE_COLUMNS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_SHA_COMPRESS_SPONGE_COLUMNS>;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: &Self::EvaluationFrame<FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
let local_values: &[P; NUM_SHA_COMPRESS_SPONGE_COLUMNS] =
vars.get_local_values().try_into().unwrap();
let local_values: &ShaCompressSpongeColumnsView<P> = local_values.borrow();
// check the filter
let is_normal_round = local_values.is_real_round;
yield_constr.constraint(is_normal_round * (is_normal_round - P::ONES));
// if not the padding row, the hx address must be a sequence of numbers spaced 4 units apart
for i in 0..7 {
yield_constr.constraint(
is_normal_round
* (local_values.hx_virt[i + 1]
- local_values.hx_virt[i]
- FE::from_canonical_u8(4)),
);
}
// wrapping add constraints
for i in 0..8 {
wrapping_add_2_packed_constraints(
local_values.hx[get_input_range_4(i)].try_into().unwrap(),
local_values.output_state[get_input_range_4(i)]
.try_into()
.unwrap(),
&local_values.output_hx[i],
)
.into_iter()
.for_each(|c| yield_constr.constraint(c * local_values.is_real_round));
}
}
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: &Self::EvaluationFrameTarget,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let local_values: &[ExtensionTarget<D>; NUM_SHA_COMPRESS_SPONGE_COLUMNS] =
vars.get_local_values().try_into().unwrap();
let local_values: &ShaCompressSpongeColumnsView<ExtensionTarget<D>> = local_values.borrow();
let four_ext = builder.constant_extension(F::Extension::from_canonical_u8(4));
// check the filter
let is_normal_round = local_values.is_real_round;
let constraint =
builder.mul_sub_extension(is_normal_round, is_normal_round, is_normal_round);
yield_constr.constraint(builder, constraint);
// if not the padding row, the hx address must be a sequence of numbers spaced 4 units apart
for i in 0..7 {
let increment =
builder.sub_extension(local_values.hx_virt[i + 1], local_values.hx_virt[i]);
let address_increment = builder.sub_extension(increment, four_ext);
let constraint = builder.mul_extension(is_normal_round, address_increment);
yield_constr.constraint(builder, constraint);
}
// wrapping add constraints
for i in 0..8 {
wrapping_add_2_ext_circuit_constraints(
builder,
local_values.hx[get_input_range_4(i)].try_into().unwrap(),
local_values.output_state[get_input_range_4(i)]
.try_into()
.unwrap(),
&local_values.output_hx[i],
)
.into_iter()
.for_each(|c| {
let constraint = builder.mul_extension(c, local_values.is_real_round);
yield_constr.constraint(builder, constraint)
});
}
}
fn constraint_degree(&self) -> usize {
3
}
}
#[cfg(test)]
mod test {
use crate::config::StarkConfig;
use crate::cross_table_lookup::{
Column, CtlData, CtlZData, Filter, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::prover::prove_single_table;
use crate::sha_compress_sponge::columns::ShaCompressSpongeColumnsView;
use crate::sha_compress_sponge::sha_compress_sponge_stark::{
ShaCompressSpongeOp, ShaCompressSpongeStark,
};
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
use crate::witness::memory::MemoryAddress;
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use rand::Rng;
use std::borrow::Borrow;
const W: [u32; 64] = [
0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 34013193, 67559435, 1711661200,
3020350282, 1447362251, 3118632270, 4004188394, 690615167, 6070360, 1105370215, 2385558114,
2348232513, 507799627, 2098764358, 5845374, 823657968, 2969863067, 3903496557, 4274682881,
2059629362, 1849247231, 2656047431, 835162919, 2096647516, 2259195856, 1779072524,
3152121987, 4210324067, 1557957044, 376930560, 982142628, 3926566666, 4164334963,
789545383, 1028256580, 2867933222, 3843938318, 1135234440, 390334875, 2025924737,
3318322046, 3436065867, 652746999, 4261492214, 2543173532, 3334668051, 3166416553,
634956631,
];
pub const H256_256: [u32; 8] = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a, 0x510e527f, 0x9b05688c, 0x1f83d9ab,
0x5be0cd19,
];
#[test]
fn test_generation() -> Result<(), String> {
const D: usize = 2;
type F = GoldilocksField;
type S = ShaCompressSpongeStark<F, D>;
let stark = S::default();
let hx_addresses: Vec<MemoryAddress> = (0..32)
.step_by(4)
.map(|i| MemoryAddress {
context: 0,
segment: 0,
virt: i,
})
.collect();
let w_addresses: Vec<MemoryAddress> = (32..288)
.step_by(4)
.map(|i| MemoryAddress {
context: 0,
segment: 0,
virt: i,
})
.collect();
let input = H256_256
.iter()
.flat_map(|x| (*x).to_le_bytes())
.collect::<Vec<_>>();
let w_i_s = W.iter().map(|x| x.to_le_bytes()).collect::<Vec<_>>();
let op = ShaCompressSpongeOp {
base_address: hx_addresses
.iter()
.chain([w_addresses[0]].iter())
.cloned()
.collect(),
// i: 0,
timestamp: 0,
// input_states: input_state,
input,
w_i_s,
};
let row = stark.generate_rows_for_op(op);
let local_values: &ShaCompressSpongeColumnsView<F> = row.borrow();
assert_eq!(
local_values.output_hx[0].value,
3592665057_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[1].value,
2164530888_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[2].value,
1223339564_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[3].value,
3041196771_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[4].value,
2006723467_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[5].value,
2963045520_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[6].value,
3851824201_u32.to_le_bytes().map(F::from_canonical_u8)
);
assert_eq!(
local_values.output_hx[7].value,
3453903005_u32.to_le_bytes().map(F::from_canonical_u8)
);
Ok(())
}
#[test]
fn test_stark_circuit() -> anyhow::Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ShaCompressSpongeStark<F, D>;
let stark = S::default();
test_stark_circuit_constraints::<F, C, S, D>(stark)
}
#[test]
fn test_stark_degree() -> anyhow::Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ShaCompressSpongeStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_low_degree(stark)
}
fn get_random_input() -> ShaCompressSpongeOp {
let mut rng = rand::thread_rng();
let hx_start_virt: u32 = rng.gen();
let hx_addresses: Vec<MemoryAddress> = (hx_start_virt..hx_start_virt + 32)
.step_by(4)
.map(|i| MemoryAddress {
context: 0,
segment: 0,
virt: i as usize,
})
.collect();
let w_start_virt: u32 = rng.gen();
let w_start_address = MemoryAddress {
context: 0,
segment: 0,
virt: w_start_virt as usize,
};
let mut rng = rand::thread_rng();
let hx: Vec<u32> = (0..8).map(|_| rng.gen()).collect();
let input = hx.iter().flat_map(|x| x.to_le_bytes()).collect::<Vec<_>>();
let w_i = (0..64).map(|_| rng.gen()).collect::<Vec<u32>>();
let w_i_s = w_i.iter().map(|x| x.to_le_bytes()).collect::<Vec<_>>();
let op = ShaCompressSpongeOp {
base_address: hx_addresses
.iter()
.chain([w_start_address].iter())
.cloned()
.collect(),
// i,
timestamp: 0,
// input_states: input_state,
input,
w_i_s,
};
op
}
#[test]
fn sha_extend_sponge_benchmark() -> anyhow::Result<()> {
const NUM_INPUTS: usize = 50;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = ShaCompressSpongeStark<F, D>;
let stark = S::default();
let config = StarkConfig::standard_fast_config();
init_logger();
let input = (0..NUM_INPUTS).map(|_| get_random_input()).collect();
let mut timing = TimingTree::new("prove", log::Level::Debug);
let trace_poly_values = stark.generate_trace(input, 8);
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
// or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`.
let cloned_trace_poly_values = timed!(timing, "clone", trace_poly_values.clone());
let trace_commitments = timed!(
timing,
"compute trace commitment",
PolynomialBatch::<F, C, D>::from_values(
cloned_trace_poly_values,
config.fri_config.rate_bits,
false,
config.fri_config.cap_height,
&mut timing,
None,
)
);
let degree = 1 << trace_commitments.degree_log;
// Fake CTL data.
let ctl_z_data = CtlZData {
helper_columns: vec![PolynomialValues::zero(degree)],
z: PolynomialValues::zero(degree),
challenge: GrandProductChallenge {
beta: F::ZERO,
gamma: F::ZERO,
},
columns: vec![],
filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))],
};
let ctl_data = CtlData {
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
};
prove_single_table(
&stark,
&config,
&trace_poly_values,
&trace_commitments,
&ctl_data,
&GrandProductChallengeSet {
challenges: vec![ctl_z_data.challenge; config.num_challenges],
},
&mut Challenger::new(),
&mut timing,
)?;
timing.print();
Ok(())
}
fn init_logger() {
let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "debug"));
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/sha_compress_sponge/columns.rs | prover/src/sha_compress_sponge/columns.rs | use crate::sha_compress::wrapping_add_2::WrappingAdd2Op;
use crate::util::{indices_arr, transmute_no_compile_time_size_checks};
use std::borrow::{Borrow, BorrowMut};
use std::mem::transmute;
pub(crate) struct ShaCompressSpongeColumnsView<T: Copy> {
pub hx: [T; 32],
// a, b, c,..., h after compress
pub output_state: [T; 32],
// hx[i] + a,..., hx[i+7] + h
pub output_hx: [WrappingAdd2Op<T>; 8],
pub hx_virt: [T; 8],
pub w_start_virt: T,
// The timestamp at which inputs should be read from memory.
pub timestamp: T,
pub context: T,
pub segment: T,
// The segment and context of w_start_virt
pub w_start_segment: T,
pub w_start_context: T,
pub is_real_round: T,
}
pub const NUM_SHA_COMPRESS_SPONGE_COLUMNS: usize = size_of::<ShaCompressSpongeColumnsView<u8>>(); //1420
impl<T: Copy> From<[T; NUM_SHA_COMPRESS_SPONGE_COLUMNS]> for ShaCompressSpongeColumnsView<T> {
fn from(value: [T; NUM_SHA_COMPRESS_SPONGE_COLUMNS]) -> Self {
unsafe { transmute_no_compile_time_size_checks(value) }
}
}
impl<T: Copy> From<ShaCompressSpongeColumnsView<T>> for [T; NUM_SHA_COMPRESS_SPONGE_COLUMNS] {
fn from(value: ShaCompressSpongeColumnsView<T>) -> Self {
unsafe { transmute_no_compile_time_size_checks(value) }
}
}
impl<T: Copy> Borrow<ShaCompressSpongeColumnsView<T>> for [T; NUM_SHA_COMPRESS_SPONGE_COLUMNS] {
fn borrow(&self) -> &ShaCompressSpongeColumnsView<T> {
unsafe { transmute(self) }
}
}
impl<T: Copy> BorrowMut<ShaCompressSpongeColumnsView<T>> for [T; NUM_SHA_COMPRESS_SPONGE_COLUMNS] {
fn borrow_mut(&mut self) -> &mut ShaCompressSpongeColumnsView<T> {
unsafe { transmute(self) }
}
}
impl<T: Copy> Borrow<[T; NUM_SHA_COMPRESS_SPONGE_COLUMNS]> for ShaCompressSpongeColumnsView<T> {
fn borrow(&self) -> &[T; NUM_SHA_COMPRESS_SPONGE_COLUMNS] {
unsafe { transmute(self) }
}
}
impl<T: Copy> BorrowMut<[T; NUM_SHA_COMPRESS_SPONGE_COLUMNS]> for ShaCompressSpongeColumnsView<T> {
fn borrow_mut(&mut self) -> &mut [T; NUM_SHA_COMPRESS_SPONGE_COLUMNS] {
unsafe { transmute(self) }
}
}
impl<T: Copy + Default> Default for ShaCompressSpongeColumnsView<T> {
fn default() -> Self {
[T::default(); NUM_SHA_COMPRESS_SPONGE_COLUMNS].into()
}
}
const fn make_col_map() -> ShaCompressSpongeColumnsView<usize> {
let indices_arr = indices_arr::<NUM_SHA_COMPRESS_SPONGE_COLUMNS>();
unsafe {
transmute::<[usize; NUM_SHA_COMPRESS_SPONGE_COLUMNS], ShaCompressSpongeColumnsView<usize>>(
indices_arr,
)
}
}
pub(crate) const SHA_COMPRESS_SPONGE_COL_MAP: ShaCompressSpongeColumnsView<usize> = make_col_map();
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/sha_compress_sponge/mod.rs | prover/src/sha_compress_sponge/mod.rs | pub mod columns;
pub mod constants;
pub mod sha_compress_sponge_stark;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/sha_compress_sponge/constants.rs | prover/src/sha_compress_sponge/constants.rs | pub const SHA_COMPRESS_K: [u32; 64] = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
];
pub const SHA_COMPRESS_K_LE_BYTES: [[u8; 4]; 64] = [
[152, 47, 138, 66],
[145, 68, 55, 113],
[207, 251, 192, 181],
[165, 219, 181, 233],
[91, 194, 86, 57],
[241, 17, 241, 89],
[164, 130, 63, 146],
[213, 94, 28, 171],
[152, 170, 7, 216],
[1, 91, 131, 18],
[190, 133, 49, 36],
[195, 125, 12, 85],
[116, 93, 190, 114],
[254, 177, 222, 128],
[167, 6, 220, 155],
[116, 241, 155, 193],
[193, 105, 155, 228],
[134, 71, 190, 239],
[198, 157, 193, 15],
[204, 161, 12, 36],
[111, 44, 233, 45],
[170, 132, 116, 74],
[220, 169, 176, 92],
[218, 136, 249, 118],
[82, 81, 62, 152],
[109, 198, 49, 168],
[200, 39, 3, 176],
[199, 127, 89, 191],
[243, 11, 224, 198],
[71, 145, 167, 213],
[81, 99, 202, 6],
[103, 41, 41, 20],
[133, 10, 183, 39],
[56, 33, 27, 46],
[252, 109, 44, 77],
[19, 13, 56, 83],
[84, 115, 10, 101],
[187, 10, 106, 118],
[46, 201, 194, 129],
[133, 44, 114, 146],
[161, 232, 191, 162],
[75, 102, 26, 168],
[112, 139, 75, 194],
[163, 81, 108, 199],
[25, 232, 146, 209],
[36, 6, 153, 214],
[133, 53, 14, 244],
[112, 160, 106, 16],
[22, 193, 164, 25],
[8, 108, 55, 30],
[76, 119, 72, 39],
[181, 188, 176, 52],
[179, 12, 28, 57],
[74, 170, 216, 78],
[79, 202, 156, 91],
[243, 111, 46, 104],
[238, 130, 143, 116],
[111, 99, 165, 120],
[20, 120, 200, 132],
[8, 2, 199, 140],
[250, 255, 190, 144],
[235, 108, 80, 164],
[247, 163, 249, 190],
[242, 120, 113, 198],
];
pub const NUM_COMPRESS_ROWS: usize = 65;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/poseidon/columns.rs | prover/src/poseidon/columns.rs | use crate::poseidon::constants::{HALF_N_FULL_ROUNDS, SPONGE_WIDTH};
use plonky2::hash::poseidon::N_PARTIAL_ROUNDS;
pub const FILTER: usize = 0;
const START_IN: usize = FILTER + 1;
pub const fn reg_in(i: usize) -> usize {
debug_assert!(i < SPONGE_WIDTH);
START_IN + i
}
const START_OUT: usize = START_IN + SPONGE_WIDTH;
pub const fn reg_out(i: usize) -> usize {
debug_assert!(i < SPONGE_WIDTH);
START_OUT + i
}
pub(crate) const TIMESTAMP: usize = START_OUT + SPONGE_WIDTH;
const START_FULL_0: usize = TIMESTAMP + 1;
// full round sbox intermediate
pub const fn reg_full0_s0(round: usize, i: usize) -> usize {
debug_assert!(i < SPONGE_WIDTH);
debug_assert!(round < HALF_N_FULL_ROUNDS);
START_FULL_0 + SPONGE_WIDTH * 2 * round + 2 * i
}
// full round sbox out
pub const fn reg_full0_s1(round: usize, i: usize) -> usize {
reg_full0_s0(round, i) + 1
}
const START_PARTIAL: usize = START_FULL_0 + SPONGE_WIDTH * 2 * HALF_N_FULL_ROUNDS;
pub const fn reg_partial_s0(round: usize) -> usize {
debug_assert!(round < N_PARTIAL_ROUNDS);
START_PARTIAL + round * 2
}
pub const fn reg_partial_s1(round: usize) -> usize {
reg_partial_s0(round) + 1
}
const START_FULL_1: usize = START_PARTIAL + N_PARTIAL_ROUNDS * 2;
pub const fn reg_full1_s0(round: usize, i: usize) -> usize {
debug_assert!(i < SPONGE_WIDTH);
debug_assert!(round < HALF_N_FULL_ROUNDS);
START_FULL_1 + SPONGE_WIDTH * 2 * round + 2 * i
}
pub const fn reg_full1_s1(round: usize, i: usize) -> usize {
reg_full1_s0(round, i) + 1
}
pub(crate) const NUM_COLUMNS: usize = START_FULL_1 + SPONGE_WIDTH * 2 * HALF_N_FULL_ROUNDS;
#[test]
fn test_cols() {
println!("first full rounds.");
for i in 0..HALF_N_FULL_ROUNDS {
for j in 0..SPONGE_WIDTH {
println!("{}", reg_full0_s0(i, j));
println!("{}", reg_full0_s1(i, j));
}
}
println!("partial rounds.");
for i in 0..N_PARTIAL_ROUNDS {
println!("{}", reg_partial_s0(i));
println!("{}", reg_partial_s1(i));
}
println!("last full rounds.");
for i in 0..HALF_N_FULL_ROUNDS {
for j in 0..SPONGE_WIDTH {
println!("{}", reg_full1_s0(i, j));
println!("{}", reg_full1_s1(i, j));
}
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/poseidon/poseidon_stark.rs | prover/src/poseidon/poseidon_stark.rs | use std::marker::PhantomData;
use itertools::Itertools;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::field::types::PrimeField64;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::poseidon::columns::{
reg_full0_s0, reg_full0_s1, reg_full1_s0, reg_full1_s1, reg_in, reg_out, reg_partial_s0,
reg_partial_s1, FILTER, NUM_COLUMNS, TIMESTAMP,
};
use crate::poseidon::constants::{
ALL_ROUND_CONSTANTS, FAST_PARTIAL_FIRST_ROUND_CONSTANT, FAST_PARTIAL_ROUND_CONSTANTS,
FAST_PARTIAL_ROUND_INITIAL_MATRIX, FAST_PARTIAL_ROUND_VS, FAST_PARTIAL_ROUND_W_HATS,
HALF_N_FULL_ROUNDS, MDS_MATRIX_CIRC, MDS_MATRIX_DIAG, N_PARTIAL_ROUNDS, N_ROUNDS, SPONGE_WIDTH,
};
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
pub fn ctl_data_inputs<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = (0..SPONGE_WIDTH).map(reg_in).collect();
res.push(TIMESTAMP);
Column::singles(res).collect()
}
pub fn ctl_data_outputs<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = (0..SPONGE_WIDTH).map(reg_out).collect();
res.push(TIMESTAMP);
Column::singles(res).collect()
}
pub fn ctl_filter_inputs<F: Field>() -> Filter<F> {
Filter::new_simple(Column::single(FILTER))
}
pub fn ctl_filter_outputs<F: Field>() -> Filter<F> {
Filter::new_simple(Column::single(FILTER))
}
pub fn poseidon_with_witness<F: PrimeField64>(
input: &[F; SPONGE_WIDTH],
) -> ([F; SPONGE_WIDTH], [F; NUM_COLUMNS]) {
let mut state = *input;
let mut witness = [F::ZEROS; NUM_COLUMNS];
let mut round_ctr = 0;
full_rounds(&mut state, &mut witness, &mut round_ctr, true);
partial_rounds(&mut state, &mut witness, &mut round_ctr);
full_rounds(&mut state, &mut witness, &mut round_ctr, false);
debug_assert_eq!(round_ctr, N_ROUNDS);
(state, witness)
}
fn full_rounds<F: PrimeField64>(
state: &mut [F; SPONGE_WIDTH],
witness: &mut [F; NUM_COLUMNS],
round_ctr: &mut usize,
is_first_full_round: bool,
) {
for r in 0..HALF_N_FULL_ROUNDS {
constant_layer(state, *round_ctr);
sbox_layer(state, witness, r, is_first_full_round);
*state = mds_layer(state);
*round_ctr += 1;
}
}
fn partial_rounds<F: PrimeField64>(
state: &mut [F; SPONGE_WIDTH],
witness: &mut [F; NUM_COLUMNS],
round_ctr: &mut usize,
) {
partial_first_constant_layer(state);
*state = mds_partial_layer_init(state);
for i in 0..N_PARTIAL_ROUNDS {
state[0] = sbox_monomial(state[0], witness, reg_partial_s0(i), reg_partial_s1(i));
unsafe {
state[0] = state[0].add_canonical_u64(FAST_PARTIAL_ROUND_CONSTANTS[i]);
}
*state = mds_partial_layer_fast(state, i);
}
*round_ctr += N_PARTIAL_ROUNDS;
}
#[derive(Copy, Clone, Default)]
pub struct PoseidonStark<F, const D: usize> {
pub(crate) f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> PoseidonStark<F, D> {
/// Generate the rows of the trace. Note that this does not generate the permuted columns used
/// in our lookup arguments, as those are computed after transposing to column-wise form.
fn generate_trace_rows(
&self,
inputs_and_timestamps: &[([F; SPONGE_WIDTH], usize)],
min_rows: usize,
) -> Vec<[F; NUM_COLUMNS]> {
let num_rows = inputs_and_timestamps
.len()
.max(min_rows)
.next_power_of_two();
let mut rows = Vec::with_capacity(num_rows);
for input_and_timestamp in inputs_and_timestamps.iter() {
let rows_for_perm = self.generate_trace_rows_for_perm(*input_and_timestamp, true);
rows.push(rows_for_perm);
}
let default_row = self.generate_trace_rows_for_perm(([F::ZEROS; SPONGE_WIDTH], 0), false);
while rows.len() < num_rows {
rows.push(default_row);
}
rows
}
fn generate_trace_rows_for_perm(
&self,
input_and_timestamp: ([F; SPONGE_WIDTH], usize),
need_ctl: bool,
) -> [F; NUM_COLUMNS] {
let (hash, mut rows) = poseidon_with_witness(&input_and_timestamp.0);
rows[FILTER] = F::from_bool(need_ctl);
for i in 0..SPONGE_WIDTH {
rows[reg_in(i)] = input_and_timestamp.0[i];
rows[reg_out(i)] = hash[i];
}
// Set the timestamp of the current input.
// It will be checked against the value in `KeccakSponge`.
// The timestamp is used to link the input and output of
// the same permutation together.
rows[TIMESTAMP] = F::from_canonical_usize(input_and_timestamp.1);
rows
}
pub fn generate_trace(
&self,
inputs: &[([F; SPONGE_WIDTH], usize)],
min_rows: usize,
) -> Vec<PolynomialValues<F>> {
// Generate the witness, except for permuted columns in the lookup argument.
let trace_rows = self.generate_trace_rows(inputs, min_rows);
trace_rows_to_poly_values(trace_rows)
}
}
fn constant_layer<F: Field>(state: &mut [F; SPONGE_WIDTH], round_ctr: usize) {
for i in 0..SPONGE_WIDTH {
let round_constant = ALL_ROUND_CONSTANTS[i + SPONGE_WIDTH * round_ctr];
state[i] += F::from_canonical_u64(round_constant);
}
}
fn constant_layer_field<P: PackedField>(state: &mut [P], round_ctr: usize) {
for i in 0..SPONGE_WIDTH {
state[i] +=
P::Scalar::from_canonical_u64(ALL_ROUND_CONSTANTS[i + SPONGE_WIDTH * round_ctr]);
}
}
fn constant_layer_circuit<F: RichField + Extendable<D>, const D: usize>(
state: &mut [ExtensionTarget<D>],
round_ctr: usize,
builder: &mut CircuitBuilder<F, D>,
) {
for i in 0..SPONGE_WIDTH {
let c = F::Extension::from_canonical_u64(ALL_ROUND_CONSTANTS[i + SPONGE_WIDTH * round_ctr]);
let c = builder.constant_extension(c);
state[i] = builder.add_extension(state[i], c);
}
}
fn sbox_field<P: PackedField>(
input: P,
inter: P,
output: P,
yield_constr: &mut ConstraintConsumer<P>,
) {
yield_constr.constraint(input * input * input - inter);
yield_constr.constraint(input * inter * inter - output);
}
fn sbox_circuit<F: RichField + Extendable<D>, const D: usize>(
input: ExtensionTarget<D>,
inter: ExtensionTarget<D>,
output: ExtensionTarget<D>,
builder: &mut CircuitBuilder<F, D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let cube = builder.cube_extension(input);
let constraint = builder.sub_extension(cube, inter);
yield_constr.constraint(builder, constraint);
let out = builder.mul_many_extension([input, inter, inter]);
let constraint = builder.sub_extension(out, output);
yield_constr.constraint(builder, constraint);
}
fn sbox_layer<F: PrimeField64>(
state: &mut [F; SPONGE_WIDTH],
witness: &mut [F; NUM_COLUMNS],
r: usize,
is_first_full_round: bool,
) {
for i in 0..SPONGE_WIDTH {
let idx0 = if is_first_full_round {
reg_full0_s0(r, i)
} else {
reg_full1_s0(r, i)
};
let idx1 = if is_first_full_round {
reg_full0_s1(r, i)
} else {
reg_full1_s1(r, i)
};
state[i] = sbox_monomial(state[i], witness, idx0, idx1);
}
}
fn sbox_monomial<F: PrimeField64>(
x: F,
witness: &mut [F; NUM_COLUMNS],
idx0: usize,
idx1: usize,
) -> F {
let x3 = x.cube();
let x6 = x3.square();
let out = x.mul(x6);
witness[idx0] = x3;
witness[idx1] = out;
out
}
fn sbox_layer_field<P: PackedField>(
lv: &[P],
state: &mut [P],
r: usize,
is_first_full_round: bool,
yield_constr: &mut ConstraintConsumer<P>,
) {
for i in 0..SPONGE_WIDTH {
let sbox_tmp = lv[if is_first_full_round {
reg_full0_s0(r, i)
} else {
reg_full1_s0(r, i)
}];
let sbox_out = lv[if is_first_full_round {
reg_full0_s1(r, i)
} else {
reg_full1_s1(r, i)
}];
sbox_field(state[i], sbox_tmp, sbox_out, yield_constr);
state[i] = sbox_out;
}
}
fn sbox_layer_circuit<F: RichField + Extendable<D>, const D: usize>(
lv: &[ExtensionTarget<D>],
state: &mut [ExtensionTarget<D>],
r: usize,
is_first_full_round: bool,
builder: &mut CircuitBuilder<F, D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
for i in 0..SPONGE_WIDTH {
let sbox_tmp = lv[if is_first_full_round {
reg_full0_s0(r, i)
} else {
reg_full1_s0(r, i)
}];
let sbox_out = lv[if is_first_full_round {
reg_full0_s1(r, i)
} else {
reg_full1_s1(r, i)
}];
sbox_circuit(state[i], sbox_tmp, sbox_out, builder, yield_constr);
state[i] = sbox_out;
}
}
fn mds_layer_field<P: PackedField>(state: &mut [P]) {
let res = (0..SPONGE_WIDTH)
.map(|i| {
(0..SPONGE_WIDTH)
.map(|j| {
state[(j + i) % SPONGE_WIDTH]
* P::Scalar::from_canonical_u64(MDS_MATRIX_CIRC[j])
})
.chain([state[i] * P::Scalar::from_canonical_u64(MDS_MATRIX_DIAG[i])])
.sum()
})
.collect_vec();
state.copy_from_slice(&res);
}
fn mds_layer<F: PrimeField64>(state_: &[F; SPONGE_WIDTH]) -> [F; SPONGE_WIDTH] {
let mut result = [F::ZERO; SPONGE_WIDTH];
let mut state = [0u64; SPONGE_WIDTH];
for r in 0..SPONGE_WIDTH {
state[r] = state_[r].to_noncanonical_u64();
}
// This is a hacky way of fully unrolling the loop.
for r in 0..12 {
if r < SPONGE_WIDTH {
let sum = mds_row_shf(r, &state);
let sum_lo = sum as u64;
let sum_hi = (sum >> 64) as u32;
result[r] = F::from_noncanonical_u96((sum_lo, sum_hi));
}
}
result
}
fn mds_row_shf(r: usize, v: &[u64; SPONGE_WIDTH]) -> u128 {
debug_assert!(r < SPONGE_WIDTH);
let mut res = 0u128;
for i in 0..12 {
if i < SPONGE_WIDTH {
res += (v[(i + r) % SPONGE_WIDTH] as u128) * (MDS_MATRIX_CIRC[i] as u128);
}
}
res += (v[r] as u128) * (MDS_MATRIX_DIAG[r] as u128);
res
}
fn mds_layer_circuit<F: RichField + Extendable<D>, const D: usize>(
state: &mut [ExtensionTarget<D>],
builder: &mut CircuitBuilder<F, D>,
) {
let res = (0..SPONGE_WIDTH)
.map(|i| {
let mut sum = (0..SPONGE_WIDTH)
.map(|j| {
builder.mul_const_extension(
F::from_canonical_u64(MDS_MATRIX_CIRC[j]),
state[(j + i) % SPONGE_WIDTH],
)
})
.collect_vec();
sum.push(
builder.mul_const_extension(F::from_canonical_u64(MDS_MATRIX_DIAG[i]), state[i]),
);
builder.add_many_extension(sum)
})
.collect_vec();
state.copy_from_slice(&res);
}
fn partial_first_constant_layer<P: PackedField>(state: &mut [P]) {
for i in 0..SPONGE_WIDTH {
state[i] += P::Scalar::from_canonical_u64(FAST_PARTIAL_FIRST_ROUND_CONSTANT[i]);
}
}
fn partial_first_constant_layer_circuit<F: RichField + Extendable<D>, const D: usize>(
state: &mut [ExtensionTarget<D>],
builder: &mut CircuitBuilder<F, D>,
) {
for i in 0..SPONGE_WIDTH {
state[i] = builder.add_const_extension(
state[i],
F::from_canonical_u64(FAST_PARTIAL_FIRST_ROUND_CONSTANT[i]),
);
}
}
fn mds_partial_layer_init<F: PrimeField64>(state: &mut [F; SPONGE_WIDTH]) -> [F; SPONGE_WIDTH] {
let mut result = [F::ZEROS; SPONGE_WIDTH];
result[0] = state[0];
for r in 1..SPONGE_WIDTH {
for c in 1..SPONGE_WIDTH {
let t = F::from_canonical_u64(FAST_PARTIAL_ROUND_INITIAL_MATRIX[r - 1][c - 1]);
result[c] += state[r] * t;
}
}
result
}
fn mds_partial_layer_init_field<P: PackedField>(state: &mut [P]) {
let mut result = [P::default(); SPONGE_WIDTH];
result[0] = state[0];
for r in 1..12 {
for c in 1..12 {
let t = P::Scalar::from_canonical_u64(FAST_PARTIAL_ROUND_INITIAL_MATRIX[r - 1][c - 1]);
result[c] += state[r] * t;
}
}
state.copy_from_slice(&result);
}
fn mds_partial_layer_init_circuit<F: RichField + Extendable<D>, const D: usize>(
state: &mut [ExtensionTarget<D>],
builder: &mut CircuitBuilder<F, D>,
) {
let mut result = [builder.zero_extension(); SPONGE_WIDTH];
result[0] = state[0];
for r in 1..12 {
for c in 1..12 {
result[c] = builder.mul_const_add_extension(
F::from_canonical_u64(FAST_PARTIAL_ROUND_INITIAL_MATRIX[r - 1][c - 1]),
state[r],
result[c],
);
}
}
state.copy_from_slice(&result);
}
fn partial_sbox_layer<P: PackedField>(
lv: &[P],
state: &mut [P],
r: usize,
yield_constr: &mut ConstraintConsumer<P>,
) {
let sbox_inter = lv[reg_partial_s0(r)];
let sbox_out = lv[reg_partial_s1(r)];
sbox_field(state[0], sbox_inter, sbox_out, yield_constr);
state[0] = sbox_out;
}
fn partial_sbox_layer_circuit<F: RichField + Extendable<D>, const D: usize>(
lv: &[ExtensionTarget<D>],
state: &mut [ExtensionTarget<D>],
r: usize,
builder: &mut CircuitBuilder<F, D>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let sbox_inter = lv[reg_partial_s0(r)];
let sbox_out = lv[reg_partial_s1(r)];
sbox_circuit(state[0], sbox_inter, sbox_out, builder, yield_constr);
state[0] = sbox_out;
}
fn mds_partial_layer_fast<F: PrimeField64>(
state: &mut [F; SPONGE_WIDTH],
r: usize,
) -> [F; SPONGE_WIDTH] {
let mut d_sum = (0u128, 0u32); // u160 accumulator
for i in 1..SPONGE_WIDTH {
let t = FAST_PARTIAL_ROUND_W_HATS[r][i - 1] as u128;
let si = state[i].to_noncanonical_u64() as u128;
d_sum = add_u160_u128(d_sum, si * t);
}
let s0 = state[0].to_noncanonical_u64() as u128;
let mds0to0 = (MDS_MATRIX_CIRC[0] + MDS_MATRIX_DIAG[0]) as u128;
d_sum = add_u160_u128(d_sum, s0 * mds0to0);
let d = reduce_u160::<F>(d_sum);
let mut result = [F::ZEROS; SPONGE_WIDTH];
result[0] = d;
for i in 1..12 {
if i < SPONGE_WIDTH {
let t = F::from_canonical_u64(FAST_PARTIAL_ROUND_VS[r][i - 1]);
result[i] = state[i].multiply_accumulate(state[0], t);
}
}
result
}
const fn add_u160_u128((x_lo, x_hi): (u128, u32), y: u128) -> (u128, u32) {
let (res_lo, over) = x_lo.overflowing_add(y);
let res_hi = x_hi + (over as u32);
(res_lo, res_hi)
}
fn reduce_u160<F: PrimeField64>((n_lo, n_hi): (u128, u32)) -> F {
let n_lo_hi = (n_lo >> 64) as u64;
let n_lo_lo = n_lo as u64;
let reduced_hi: u64 = F::from_noncanonical_u96((n_lo_hi, n_hi)).to_noncanonical_u64();
let reduced128: u128 = ((reduced_hi as u128) << 64) + (n_lo_lo as u128);
F::from_noncanonical_u128(reduced128)
}
fn mds_partial_layer_fast_field<P: PackedField>(state: &mut [P], r: usize) {
let s0 = state[0];
let mds0to0 = MDS_MATRIX_CIRC[0] + MDS_MATRIX_DIAG[0];
let mut d = s0 * P::Scalar::from_canonical_u64(mds0to0);
for i in 1..SPONGE_WIDTH {
let t = P::Scalar::from_canonical_u64(FAST_PARTIAL_ROUND_W_HATS[r][i - 1]);
d += state[i] * t;
}
let mut result = [P::default(); SPONGE_WIDTH];
result[0] = d;
for i in 1..SPONGE_WIDTH {
let t = P::Scalar::from_canonical_u64(FAST_PARTIAL_ROUND_VS[r][i - 1]);
result[i] = state[0] * t + state[i];
}
state.copy_from_slice(&result);
}
fn mds_partial_layer_fast_circuit<F: RichField + Extendable<D>, const D: usize>(
state: &mut [ExtensionTarget<D>],
r: usize,
builder: &mut CircuitBuilder<F, D>,
) {
let s0 = state[0];
let mds0to0 = MDS_MATRIX_CIRC[0] + MDS_MATRIX_DIAG[0];
let mut d = (1..SPONGE_WIDTH)
.map(|i| {
builder.mul_const_extension(
F::from_canonical_u64(FAST_PARTIAL_ROUND_W_HATS[r][i - 1]),
state[i],
)
})
.collect_vec();
d.push(builder.mul_const_extension(F::from_canonical_u64(mds0to0), s0));
let d = builder.add_many_extension(d);
let result = (1..SPONGE_WIDTH)
.map(|i| {
builder.mul_const_add_extension(
F::from_canonical_u64(FAST_PARTIAL_ROUND_VS[r][i - 1]),
state[0],
state[i],
)
})
.collect_vec();
state[0] = d;
state[1..].copy_from_slice(&result);
}
fn eval_packed_generic<P: PackedField>(lv: &[P], yield_constr: &mut ConstraintConsumer<P>) {
let mut state = [P::default(); SPONGE_WIDTH];
let input = (0..SPONGE_WIDTH).map(|i| lv[reg_in(i)]).collect_vec();
state.copy_from_slice(&input);
let mut round_ctr = 0;
// First set of full rounds.
for r in 0..HALF_N_FULL_ROUNDS {
constant_layer_field(&mut state, round_ctr);
sbox_layer_field(lv, &mut state, r, true, yield_constr);
mds_layer_field(&mut state);
round_ctr += 1;
}
// partial rounds
partial_first_constant_layer(&mut state);
mds_partial_layer_init_field(&mut state);
for r in 0..N_PARTIAL_ROUNDS - 1 {
partial_sbox_layer(lv, &mut state, r, yield_constr);
state[0] += P::Scalar::from_canonical_u64(FAST_PARTIAL_ROUND_CONSTANTS[r]);
mds_partial_layer_fast_field(&mut state, r);
}
partial_sbox_layer(lv, &mut state, N_PARTIAL_ROUNDS - 1, yield_constr);
mds_partial_layer_fast_field(&mut state, N_PARTIAL_ROUNDS - 1);
round_ctr += N_PARTIAL_ROUNDS;
// full round
for r in 0..HALF_N_FULL_ROUNDS {
constant_layer_field(&mut state, round_ctr);
sbox_layer_field(lv, &mut state, r, false, yield_constr);
mds_layer_field(&mut state);
round_ctr += 1;
}
for i in 0..SPONGE_WIDTH {
yield_constr.constraint(state[i] - lv[reg_out(i)]);
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for PoseidonStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize>
= StarkFrame<P, NUM_COLUMNS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: &Self::EvaluationFrame<FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
let lv = vars.get_local_values();
eval_packed_generic(lv, yield_constr);
}
fn eval_ext_circuit(
&self,
builder: &mut CircuitBuilder<F, D>,
vars: &Self::EvaluationFrameTarget,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let lv = vars.get_local_values();
let zero = builder.zero_extension();
let mut state = [zero; SPONGE_WIDTH];
let input = (0..SPONGE_WIDTH).map(|i| lv[reg_in(i)]).collect_vec();
state.copy_from_slice(&input);
let mut round_ctr = 0;
// First set of full rounds.
for r in 0..HALF_N_FULL_ROUNDS {
constant_layer_circuit(&mut state, round_ctr, builder);
sbox_layer_circuit(lv, &mut state, r, true, builder, yield_constr);
mds_layer_circuit(&mut state, builder);
round_ctr += 1;
}
// partial rounds
partial_first_constant_layer_circuit(&mut state, builder);
mds_partial_layer_init_circuit(&mut state, builder);
for r in 0..N_PARTIAL_ROUNDS - 1 {
partial_sbox_layer_circuit(lv, &mut state, r, builder, yield_constr);
state[0] = builder.add_const_extension(
state[0],
F::from_canonical_u64(FAST_PARTIAL_ROUND_CONSTANTS[r]),
);
mds_partial_layer_fast_circuit(&mut state, r, builder);
}
partial_sbox_layer_circuit(lv, &mut state, N_PARTIAL_ROUNDS - 1, builder, yield_constr);
mds_partial_layer_fast_circuit(&mut state, N_PARTIAL_ROUNDS - 1, builder);
round_ctr += N_PARTIAL_ROUNDS;
// full round
for r in 0..HALF_N_FULL_ROUNDS {
constant_layer_circuit(&mut state, round_ctr, builder);
sbox_layer_circuit(lv, &mut state, r, false, builder, yield_constr);
mds_layer_circuit(&mut state, builder);
round_ctr += 1;
}
for i in 0..SPONGE_WIDTH {
let z = builder.sub_extension(state[i], lv[reg_out(i)]);
yield_constr.constraint(builder, z);
}
}
fn constraint_degree(&self) -> usize {
3
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::{Field, Sample};
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use crate::config::StarkConfig;
use crate::constraint_consumer::ConstraintConsumer;
use crate::cross_table_lookup::{
Column, CtlData, CtlZData, Filter, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::poseidon::constants::SPONGE_WIDTH;
use crate::poseidon::poseidon_stark::{eval_packed_generic, PoseidonStark};
use crate::prover::prove_single_table;
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
#[test]
fn test_stark_degree() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = PoseidonStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_low_degree(stark)
}
#[test]
fn test_stark_circuit() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = PoseidonStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_circuit_constraints::<F, C, S, D>(stark)
}
#[test]
fn test_eval_consistency() {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = PoseidonStark<F, D>;
let stark = S::default();
init_logger();
let input: ([F; SPONGE_WIDTH], usize) = (F::rand_array(), 0);
let rows = stark.generate_trace_rows(&[input], 4);
let mut constraint_consumer = ConstraintConsumer::new(
vec![GoldilocksField(2), GoldilocksField(3), GoldilocksField(5)],
GoldilocksField::ONE,
GoldilocksField::ZERO,
GoldilocksField::ZERO,
);
eval_packed_generic(&rows[0], &mut constraint_consumer);
for &acc in &constraint_consumer.constraint_accs {
assert_eq!(acc, GoldilocksField::ZERO);
}
}
#[test]
fn poseidon_benchmark() -> Result<()> {
const NUM_PERMS: usize = 100;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = PoseidonStark<F, D>;
let stark = S::default();
let config = StarkConfig::standard_fast_config();
init_logger();
let input: Vec<([F; SPONGE_WIDTH], usize)> =
(0..NUM_PERMS).map(|_| (F::rand_array(), 0)).collect();
let mut timing = TimingTree::new("prove", log::Level::Debug);
let trace_poly_values = stark.generate_trace(&input, 8);
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
// or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`.
let cloned_trace_poly_values = timed!(timing, "clone", trace_poly_values.clone());
let trace_commitments = timed!(
timing,
"compute trace commitment",
PolynomialBatch::<F, C, D>::from_values(
cloned_trace_poly_values,
config.fri_config.rate_bits,
false,
config.fri_config.cap_height,
&mut timing,
None,
)
);
let degree = 1 << trace_commitments.degree_log;
// Fake CTL data.
let ctl_z_data = CtlZData {
helper_columns: vec![PolynomialValues::zero(degree)],
z: PolynomialValues::zero(degree),
challenge: GrandProductChallenge {
beta: F::ZERO,
gamma: F::ZERO,
},
columns: vec![],
filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))],
};
let ctl_data = CtlData {
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
};
prove_single_table(
&stark,
&config,
&trace_poly_values,
&trace_commitments,
&ctl_data,
&GrandProductChallengeSet {
challenges: vec![ctl_z_data.challenge; config.num_challenges],
},
&mut Challenger::new(),
&mut timing,
)?;
timing.print();
Ok(())
}
fn init_logger() {
let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "debug"));
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/poseidon/mod.rs | prover/src/poseidon/mod.rs | pub mod columns;
pub mod constants;
pub mod poseidon_stark;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/poseidon/constants.rs | prover/src/poseidon/constants.rs | pub const SPONGE_RATE: usize = 8;
pub const SPONGE_CAPACITY: usize = 4;
pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
pub const HALF_N_FULL_ROUNDS: usize = 4;
pub const N_FULL_ROUNDS_TOTAL: usize = 2 * HALF_N_FULL_ROUNDS;
pub const N_PARTIAL_ROUNDS: usize = 22;
pub const N_ROUNDS: usize = N_FULL_ROUNDS_TOTAL + N_PARTIAL_ROUNDS;
pub const MAX_WIDTH: usize = 12; // we only have width 8 and 12, and 12 is bigger. :)
#[rustfmt::skip]
pub const ALL_ROUND_CONSTANTS: [u64; MAX_WIDTH * N_ROUNDS] = [
0xb585f766f2144405, 0x7746a55f43921ad7, 0xb2fb0d31cee799b4, 0x0f6760a4803427d7,
0xe10d666650f4e012, 0x8cae14cb07d09bf1, 0xd438539c95f63e9f, 0xef781c7ce35b4c3d,
0xcdc4a239b0c44426, 0x277fa208bf337bff, 0xe17653a29da578a1, 0xc54302f225db2c76,
0x86287821f722c881, 0x59cd1a8a41c18e55, 0xc3b919ad495dc574, 0xa484c4c5ef6a0781,
0x308bbd23dc5416cc, 0x6e4a40c18f30c09c, 0x9a2eedb70d8f8cfa, 0xe360c6e0ae486f38,
0xd5c7718fbfc647fb, 0xc35eae071903ff0b, 0x849c2656969c4be7, 0xc0572c8c08cbbbad,
0xe9fa634a21de0082, 0xf56f6d48959a600d, 0xf7d713e806391165, 0x8297132b32825daf,
0xad6805e0e30b2c8a, 0xac51d9f5fcf8535e, 0x502ad7dc18c2ad87, 0x57a1550c110b3041,
0x66bbd30e6ce0e583, 0x0da2abef589d644e, 0xf061274fdb150d61, 0x28b8ec3ae9c29633,
0x92a756e67e2b9413, 0x70e741ebfee96586, 0x019d5ee2af82ec1c, 0x6f6f2ed772466352,
0x7cf416cfe7e14ca1, 0x61df517b86a46439, 0x85dc499b11d77b75, 0x4b959b48b9c10733,
0xe8be3e5da8043e57, 0xf5c0bc1de6da8699, 0x40b12cbf09ef74bf, 0xa637093ecb2ad631,
0x3cc3f892184df408, 0x2e479dc157bf31bb, 0x6f49de07a6234346, 0x213ce7bede378d7b,
0x5b0431345d4dea83, 0xa2de45780344d6a1, 0x7103aaf94a7bf308, 0x5326fc0d97279301,
0xa9ceb74fec024747, 0x27f8ec88bb21b1a3, 0xfceb4fda1ded0893, 0xfac6ff1346a41675,
0x7131aa45268d7d8c, 0x9351036095630f9f, 0xad535b24afc26bfb, 0x4627f5c6993e44be,
0x645cf794b8f1cc58, 0x241c70ed0af61617, 0xacb8e076647905f1, 0x3737e9db4c4f474d,
0xe7ea5e33e75fffb6, 0x90dee49fc9bfc23a, 0xd1b1edf76bc09c92, 0x0b65481ba645c602,
0x99ad1aab0814283b, 0x438a7c91d416ca4d, 0xb60de3bcc5ea751c, 0xc99cab6aef6f58bc,
0x69a5ed92a72ee4ff, 0x5e7b329c1ed4ad71, 0x5fc0ac0800144885, 0x32db829239774eca,
0x0ade699c5830f310, 0x7cc5583b10415f21, 0x85df9ed2e166d64f, 0x6604df4fee32bcb1,
0xeb84f608da56ef48, 0xda608834c40e603d, 0x8f97fe408061f183, 0xa93f485c96f37b89,
0x6704e8ee8f18d563, 0xcee3e9ac1e072119, 0x510d0e65e2b470c1, 0xf6323f486b9038f0,
0x0b508cdeffa5ceef, 0xf2417089e4fb3cbd, 0x60e75c2890d15730, 0xa6217d8bf660f29c,
0x7159cd30c3ac118e, 0x839b4e8fafead540, 0x0d3f3e5e82920adc, 0x8f7d83bddee7bba8,
0x780f2243ea071d06, 0xeb915845f3de1634, 0xd19e120d26b6f386, 0x016ee53a7e5fecc6,
0xcb5fd54e7933e477, 0xacb8417879fd449f, 0x9c22190be7f74732, 0x5d693c1ba3ba3621,
0xdcef0797c2b69ec7, 0x3d639263da827b13, 0xe273fd971bc8d0e7, 0x418f02702d227ed5,
0x8c25fda3b503038c, 0x2cbaed4daec8c07c, 0x5f58e6afcdd6ddc2, 0x284650ac5e1b0eba,
0x635b337ee819dab5, 0x9f9a036ed4f2d49f, 0xb93e260cae5c170e, 0xb0a7eae879ddb76d,
0xd0762cbc8ca6570c, 0x34c6efb812b04bf5, 0x40bf0ab5fa14c112, 0xb6b570fc7c5740d3,
0x5a27b9002de33454, 0xb1a5b165b6d2b2d2, 0x8722e0ace9d1be22, 0x788ee3b37e5680fb,
0x14a726661551e284, 0x98b7672f9ef3b419, 0xbb93ae776bb30e3a, 0x28fd3b046380f850,
0x30a4680593258387, 0x337dc00c61bd9ce1, 0xd5eca244c7a4ff1d, 0x7762638264d279bd,
0xc1e434bedeefd767, 0x0299351a53b8ec22, 0xb2d456e4ad251b80, 0x3e9ed1fda49cea0b,
0x2972a92ba450bed8, 0x20216dd77be493de, 0xadffe8cf28449ec6, 0x1c4dbb1c4c27d243,
0x15a16a8a8322d458, 0x388a128b7fd9a609, 0x2300e5d6baedf0fb, 0x2f63aa8647e15104,
0xf1c36ce86ecec269, 0x27181125183970c9, 0xe584029370dca96d, 0x4d9bbc3e02f1cfb2,
0xea35bc29692af6f8, 0x18e21b4beabb4137, 0x1e3b9fc625b554f4, 0x25d64362697828fd,
0x5a3f1bb1c53a9645, 0xdb7f023869fb8d38, 0xb462065911d4e1fc, 0x49c24ae4437d8030,
0xd793862c112b0566, 0xaadd1106730d8feb, 0xc43b6e0e97b0d568, 0xe29024c18ee6fca2,
0x5e50c27535b88c66, 0x10383f20a4ff9a87, 0x38e8ee9d71a45af8, 0xdd5118375bf1a9b9,
0x775005982d74d7f7, 0x86ab99b4dde6c8b0, 0xb1204f603f51c080, 0xef61ac8470250ecf,
0x1bbcd90f132c603f, 0x0cd1dabd964db557, 0x11a3ae5beb9d1ec9, 0xf755bfeea585d11d,
0xa3b83250268ea4d7, 0x516306f4927c93af, 0xddb4ac49c9efa1da, 0x64bb6dec369d4418,
0xf9cc95c22b4c1fcc, 0x08d37f755f4ae9f6, 0xeec49b613478675b, 0xf143933aed25e0b0,
0xe4c5dd8255dfc622, 0xe7ad7756f193198e, 0x92c2318b87fff9cb, 0x739c25f8fd73596d,
0x5636cac9f16dfed0, 0xdd8f909a938e0172, 0xc6401fe115063f5b, 0x8ad97b33f1ac1455,
0x0c49366bb25e8513, 0x0784d3d2f1698309, 0x530fb67ea1809a81, 0x410492299bb01f49,
0x139542347424b9ac, 0x9cb0bd5ea1a1115e, 0x02e3f615c38f49a1, 0x985d4f4a9c5291ef,
0x775b9feafdcd26e7, 0x304265a6384f0f2d, 0x593664c39773012c, 0x4f0a2e5fb028f2ce,
0xdd611f1000c17442, 0xd8185f9adfea4fd0, 0xef87139ca9a3ab1e, 0x3ba71336c34ee133,
0x7d3a455d56b70238, 0x660d32e130182684, 0x297a863f48cd1f43, 0x90e0a736a751ebb7,
0x549f80ce550c4fd3, 0x0f73b2922f38bd64, 0x16bf1f73fb7a9c3f, 0x6d1f5a59005bec17,
0x02ff876fa5ef97c4, 0xc5cb72a2a51159b0, 0x8470f39d2d5c900e, 0x25abb3f1d39fcb76,
0x23eb8cc9b372442f, 0xd687ba55c64f6364, 0xda8d9e90fd8ff158, 0xe3cbdc7d2fe45ea7,
0xb9a8c9b3aee52297, 0xc0d28a5c10960bd3, 0x45d7ac9b68f71a34, 0xeeb76e397069e804,
0x3d06c8bd1514e2d9, 0x9c9c98207cb10767, 0x65700b51aedfb5ef, 0x911f451539869408,
0x7ae6849fbc3a0ec6, 0x3bb340eba06afe7e, 0xb46e9d8b682ea65e, 0x8dcf22f9a3b34356,
0x77bdaeda586257a7, 0xf19e400a5104d20d, 0xc368a348e46d950f, 0x9ef1cd60e679f284,
0xe89cd854d5d01d33, 0x5cd377dc8bb882a2, 0xa7b0fb7883eee860, 0x7684403ec392950d,
0x5fa3f06f4fed3b52, 0x8df57ac11bc04831, 0x2db01efa1e1e1897, 0x54846de4aadb9ca2,
0xba6745385893c784, 0x541d496344d2c75b, 0xe909678474e687fe, 0xdfe89923f6c9c2ff,
0xece5a71e0cfedc75, 0x5ff98fd5d51fe610, 0x83e8941918964615, 0x5922040b47f150c1,
0xf97d750e3dd94521, 0x5080d4c2b86f56d7, 0xa7de115b56c78d70, 0x6a9242ac87538194,
0xf7856ef7f9173e44, 0x2265fc92feb0dc09, 0x17dfc8e4f7ba8a57, 0x9001a64209f21db8,
0x90004c1371b893c5, 0xb932b7cf752e5545, 0xa0b1df81b6fe59fc, 0x8ef1dd26770af2c2,
0x0541a4f9cfbeed35, 0x9e61106178bfc530, 0xb3767e80935d8af2, 0x0098d5782065af06,
0x31d191cd5c1466c7, 0x410fefafa319ac9d, 0xbdf8f242e316c4ab, 0x9e8cd55b57637ed0,
0xde122bebe9a39368, 0x4d001fd58f002526, 0xca6637000eb4a9f8, 0x2f2339d624f91f78,
0x6d1a7918c80df518, 0xdf9a4939342308e9, 0xebc2151ee6c8398c, 0x03cc2ba8a1116515,
0xd341d037e840cf83, 0x387cb5d25af4afcc, 0xbba2515f22909e87, 0x7248fe7705f38e47,
0x4d61e56a525d225a, 0x262e963c8da05d3d, 0x59e89b094d220ec2, 0x055d5b52b78b9c5e,
0x82b27eb33514ef99, 0xd30094ca96b7ce7b, 0xcf5cb381cd0a1535, 0xfeed4db6919e5a7c,
0x41703f53753be59f, 0x5eeea940fcde8b6f, 0x4cd1f1b175100206, 0x4a20358574454ec0,
0x1478d361dbbf9fac, 0x6f02dc07d141875c, 0x296a202ed8e556a2, 0x2afd67999bf32ee5,
0x7acfd96efa95491d, 0x6798ba0c0abb2c6d, 0x34c6f57b26c92122, 0x5736e1bad206b5de,
0x20057d2a0056521b, 0x3dea5bd5d0578bd7, 0x16e50d897d4634ac, 0x29bff3ecb9b7a6e3,
0x475cd3205a3bdcde, 0x18a42105c31b7e88, 0x023e7414af663068, 0x15147108121967d7,
0xe4a3dff1d7d6fef9, 0x01a8d1a588085737, 0x11b4c74eda62beef, 0xe587cc0d69a73346,
0x1ff7327017aa2a6e, 0x594e29c42473d06b, 0xf6f31db1899b12d5, 0xc02ac5e47312d3ca,
0xe70201e960cb78b8, 0x6f90ff3b6a65f108, 0x42747a7245e7fa84, 0xd1f507e43ab749b2,
0x1c86d265f15750cd, 0x3996ce73dd832c1c, 0x8e7fba02983224bd, 0xba0dec7103255dd4,
0x9e9cbd781628fc5b, 0xdae8645996edd6a5, 0xdebe0853b1a1d378, 0xa49229d24d014343,
0x7be5b9ffda905e1c, 0xa3c95eaec244aa30, 0x0230bca8f4df0544, 0x4135c2bebfe148c6,
0x166fc0cc438a3c72, 0x3762b59a8ae83efa, 0xe8928a4c89114750, 0x2a440b51a4945ee5,
0x80cefd2b7d99ff83, 0xbb9879c6e61fd62a, 0x6e7c8f1a84265034, 0x164bb2de1bbeddc8,
0xf3c12fe54d5c653b, 0x40b9e922ed9771e2, 0x551f5b0fbe7b1840, 0x25032aa7c4cb1811,
0xaaed34074b164346, 0x8ffd96bbf9c9c81d, 0x70fc91eb5937085c, 0x7f795e2a5f915440,
0x4543d9df5476d3cb, 0xf172d73e004fc90d, 0xdfd1c4febcc81238, 0xbc8dfb627fe558fc,
];
pub const MDS_MATRIX_CIRC: [u64; 12] = [17, 15, 41, 16, 2, 28, 13, 13, 39, 18, 34, 20];
pub const MDS_MATRIX_DIAG: [u64; 12] = [8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0];
pub const FAST_PARTIAL_FIRST_ROUND_CONSTANT: [u64; 12] = [
0x3cc3f892184df408,
0xe993fd841e7e97f1,
0xf2831d3575f0f3af,
0xd2500e0a350994ca,
0xc5571f35d7288633,
0x91d89c5184109a02,
0xf37f925d04e5667b,
0x2d6e448371955a69,
0x740ef19ce01398a1,
0x694d24c0752fdf45,
0x60936af96ee2f148,
0xc33448feadc78f0c,
];
pub const FAST_PARTIAL_ROUND_CONSTANTS: [u64; N_PARTIAL_ROUNDS] = [
0x74cb2e819ae421ab,
0xd2559d2370e7f663,
0x62bf78acf843d17c,
0xd5ab7b67e14d1fb4,
0xb9fe2ae6e0969bdc,
0xe33fdf79f92a10e8,
0x0ea2bb4c2b25989b,
0xca9121fbf9d38f06,
0xbdd9b0aa81f58fa4,
0x83079fa4ecf20d7e,
0x650b838edfcc4ad3,
0x77180c88583c76ac,
0xaf8c20753143a180,
0xb8ccfe9989a39175,
0x954a1729f60cc9c5,
0xdeb5b550c4dca53b,
0xf01bb0b00f77011e,
0xa1ebb404b676afd9,
0x860b6e1597a0173e,
0x308bb65a036acbce,
0x1aca78f31c97c876,
0x0,
];
pub const FAST_PARTIAL_ROUND_VS: [[u64; 12 - 1]; N_PARTIAL_ROUNDS] = [
[
0x94877900674181c3,
0xc6c67cc37a2a2bbd,
0xd667c2055387940f,
0x0ba63a63e94b5ff0,
0x99460cc41b8f079f,
0x7ff02375ed524bb3,
0xea0870b47a8caf0e,
0xabcad82633b7bc9d,
0x3b8d135261052241,
0xfb4515f5e5b0d539,
0x3ee8011c2b37f77c,
],
[
0x0adef3740e71c726,
0xa37bf67c6f986559,
0xc6b16f7ed4fa1b00,
0x6a065da88d8bfc3c,
0x4cabc0916844b46f,
0x407faac0f02e78d1,
0x07a786d9cf0852cf,
0x42433fb6949a629a,
0x891682a147ce43b0,
0x26cfd58e7b003b55,
0x2bbf0ed7b657acb3,
],
[
0x481ac7746b159c67,
0xe367de32f108e278,
0x73f260087ad28bec,
0x5cfc82216bc1bdca,
0xcaccc870a2663a0e,
0xdb69cd7b4298c45d,
0x7bc9e0c57243e62d,
0x3cc51c5d368693ae,
0x366b4e8cc068895b,
0x2bd18715cdabbca4,
0xa752061c4f33b8cf,
],
[
0xb22d2432b72d5098,
0x9e18a487f44d2fe4,
0x4b39e14ce22abd3c,
0x9e77fde2eb315e0d,
0xca5e0385fe67014d,
0x0c2cb99bf1b6bddb,
0x99ec1cd2a4460bfe,
0x8577a815a2ff843f,
0x7d80a6b4fd6518a5,
0xeb6c67123eab62cb,
0x8f7851650eca21a5,
],
[
0x11ba9a1b81718c2a,
0x9f7d798a3323410c,
0xa821855c8c1cf5e5,
0x535e8d6fac0031b2,
0x404e7c751b634320,
0xa729353f6e55d354,
0x4db97d92e58bb831,
0xb53926c27897bf7d,
0x965040d52fe115c5,
0x9565fa41ebd31fd7,
0xaae4438c877ea8f4,
],
[
0x37f4e36af6073c6e,
0x4edc0918210800e9,
0xc44998e99eae4188,
0x9f4310d05d068338,
0x9ec7fe4350680f29,
0xc5b2c1fdc0b50874,
0xa01920c5ef8b2ebe,
0x59fa6f8bd91d58ba,
0x8bfc9eb89b515a82,
0xbe86a7a2555ae775,
0xcbb8bbaa3810babf,
],
[
0x577f9a9e7ee3f9c2,
0x88c522b949ace7b1,
0x82f07007c8b72106,
0x8283d37c6675b50e,
0x98b074d9bbac1123,
0x75c56fb7758317c1,
0xfed24e206052bc72,
0x26d7c3d1bc07dae5,
0xf88c5e441e28dbb4,
0x4fe27f9f96615270,
0x514d4ba49c2b14fe,
],
[
0xf02a3ac068ee110b,
0x0a3630dafb8ae2d7,
0xce0dc874eaf9b55c,
0x9a95f6cff5b55c7e,
0x626d76abfed00c7b,
0xa0c1cf1251c204ad,
0xdaebd3006321052c,
0x3d4bd48b625a8065,
0x7f1e584e071f6ed2,
0x720574f0501caed3,
0xe3260ba93d23540a,
],
[
0xab1cbd41d8c1e335,
0x9322ed4c0bc2df01,
0x51c3c0983d4284e5,
0x94178e291145c231,
0xfd0f1a973d6b2085,
0xd427ad96e2b39719,
0x8a52437fecaac06b,
0xdc20ee4b8c4c9a80,
0xa2c98e9549da2100,
0x1603fe12613db5b6,
0x0e174929433c5505,
],
[
0x3d4eab2b8ef5f796,
0xcfff421583896e22,
0x4143cb32d39ac3d9,
0x22365051b78a5b65,
0x6f7fd010d027c9b6,
0xd9dd36fba77522ab,
0xa44cf1cb33e37165,
0x3fc83d3038c86417,
0xc4588d418e88d270,
0xce1320f10ab80fe2,
0xdb5eadbbec18de5d,
],
[
0x1183dfce7c454afd,
0x21cea4aa3d3ed949,
0x0fce6f70303f2304,
0x19557d34b55551be,
0x4c56f689afc5bbc9,
0xa1e920844334f944,
0xbad66d423d2ec861,
0xf318c785dc9e0479,
0x99e2032e765ddd81,
0x400ccc9906d66f45,
0xe1197454db2e0dd9,
],
[
0x84d1ecc4d53d2ff1,
0xd8af8b9ceb4e11b6,
0x335856bb527b52f4,
0xc756f17fb59be595,
0xc0654e4ea5553a78,
0x9e9a46b61f2ea942,
0x14fc8b5b3b809127,
0xd7009f0f103be413,
0x3e0ee7b7a9fb4601,
0xa74e888922085ed7,
0xe80a7cde3d4ac526,
],
[
0x238aa6daa612186d,
0x9137a5c630bad4b4,
0xc7db3817870c5eda,
0x217e4f04e5718dc9,
0xcae814e2817bd99d,
0xe3292e7ab770a8ba,
0x7bb36ef70b6b9482,
0x3c7835fb85bca2d3,
0xfe2cdf8ee3c25e86,
0x61b3915ad7274b20,
0xeab75ca7c918e4ef,
],
[
0xd6e15ffc055e154e,
0xec67881f381a32bf,
0xfbb1196092bf409c,
0xdc9d2e07830ba226,
0x0698ef3245ff7988,
0x194fae2974f8b576,
0x7a5d9bea6ca4910e,
0x7aebfea95ccdd1c9,
0xf9bd38a67d5f0e86,
0xfa65539de65492d8,
0xf0dfcbe7653ff787,
],
[
0x0bd87ad390420258,
0x0ad8617bca9e33c8,
0x0c00ad377a1e2666,
0x0ac6fc58b3f0518f,
0x0c0cc8a892cc4173,
0x0c210accb117bc21,
0x0b73630dbb46ca18,
0x0c8be4920cbd4a54,
0x0bfe877a21be1690,
0x0ae790559b0ded81,
0x0bf50db2f8d6ce31,
],
[
0x000cf29427ff7c58,
0x000bd9b3cf49eec8,
0x000d1dc8aa81fb26,
0x000bc792d5c394ef,
0x000d2ae0b2266453,
0x000d413f12c496c1,
0x000c84128cfed618,
0x000db5ebd48fc0d4,
0x000d1b77326dcb90,
0x000beb0ccc145421,
0x000d10e5b22b11d1,
],
[
0x00000e24c99adad8,
0x00000cf389ed4bc8,
0x00000e580cbf6966,
0x00000cde5fd7e04f,
0x00000e63628041b3,
0x00000e7e81a87361,
0x00000dabe78f6d98,
0x00000efb14cac554,
0x00000e5574743b10,
0x00000d05709f42c1,
0x00000e4690c96af1,
],
[
0x0000000f7157bc98,
0x0000000e3006d948,
0x0000000fa65811e6,
0x0000000e0d127e2f,
0x0000000fc18bfe53,
0x0000000fd002d901,
0x0000000eed6461d8,
0x0000001068562754,
0x0000000fa0236f50,
0x0000000e3af13ee1,
0x0000000fa460f6d1,
],
[
0x0000000011131738,
0x000000000f56d588,
0x0000000011050f86,
0x000000000f848f4f,
0x00000000111527d3,
0x00000000114369a1,
0x00000000106f2f38,
0x0000000011e2ca94,
0x00000000110a29f0,
0x000000000fa9f5c1,
0x0000000010f625d1,
],
[
0x000000000011f718,
0x000000000010b6c8,
0x0000000000134a96,
0x000000000010cf7f,
0x0000000000124d03,
0x000000000013f8a1,
0x0000000000117c58,
0x0000000000132c94,
0x0000000000134fc0,
0x000000000010a091,
0x0000000000128961,
],
[
0x0000000000001300,
0x0000000000001750,
0x000000000000114e,
0x000000000000131f,
0x000000000000167b,
0x0000000000001371,
0x0000000000001230,
0x000000000000182c,
0x0000000000001368,
0x0000000000000f31,
0x00000000000015c9,
],
[
0x0000000000000014,
0x0000000000000022,
0x0000000000000012,
0x0000000000000027,
0x000000000000000d,
0x000000000000000d,
0x000000000000001c,
0x0000000000000002,
0x0000000000000010,
0x0000000000000029,
0x000000000000000f,
],
];
pub const FAST_PARTIAL_ROUND_W_HATS: [[u64; 12 - 1]; N_PARTIAL_ROUNDS] = [
[
0x3d999c961b7c63b0,
0x814e82efcd172529,
0x2421e5d236704588,
0x887af7d4dd482328,
0xa5e9c291f6119b27,
0xbdc52b2676a4b4aa,
0x64832009d29bcf57,
0x09c4155174a552cc,
0x463f9ee03d290810,
0xc810936e64982542,
0x043b1c289f7bc3ac,
],
[
0x673655aae8be5a8b,
0xd510fe714f39fa10,
0x2c68a099b51c9e73,
0xa667bfa9aa96999d,
0x4d67e72f063e2108,
0xf84dde3e6acda179,
0x40f9cc8c08f80981,
0x5ead032050097142,
0x6591b02092d671bb,
0x00e18c71963dd1b7,
0x8a21bcd24a14218a,
],
[
0x202800f4addbdc87,
0xe4b5bdb1cc3504ff,
0xbe32b32a825596e7,
0x8e0f68c5dc223b9a,
0x58022d9e1c256ce3,
0x584d29227aa073ac,
0x8b9352ad04bef9e7,
0xaead42a3f445ecbf,
0x3c667a1d833a3cca,
0xda6f61838efa1ffe,
0xe8f749470bd7c446,
],
[
0xc5b85bab9e5b3869,
0x45245258aec51cf7,
0x16e6b8e68b931830,
0xe2ae0f051418112c,
0x0470e26a0093a65b,
0x6bef71973a8146ed,
0x119265be51812daf,
0xb0be7356254bea2e,
0x8584defff7589bd7,
0x3c5fe4aeb1fb52ba,
0x9e7cd88acf543a5e,
],
[
0x179be4bba87f0a8c,
0xacf63d95d8887355,
0x6696670196b0074f,
0xd99ddf1fe75085f9,
0xc2597881fef0283b,
0xcf48395ee6c54f14,
0x15226a8e4cd8d3b6,
0xc053297389af5d3b,
0x2c08893f0d1580e2,
0x0ed3cbcff6fcc5ba,
0xc82f510ecf81f6d0,
],
[
0x94b06183acb715cc,
0x500392ed0d431137,
0x861cc95ad5c86323,
0x05830a443f86c4ac,
0x3b68225874a20a7c,
0x10b3309838e236fb,
0x9b77fc8bcd559e2c,
0xbdecf5e0cb9cb213,
0x30276f1221ace5fa,
0x7935dd342764a144,
0xeac6db520bb03708,
],
[
0x7186a80551025f8f,
0x622247557e9b5371,
0xc4cbe326d1ad9742,
0x55f1523ac6a23ea2,
0xa13dfe77a3d52f53,
0xe30750b6301c0452,
0x08bd488070a3a32b,
0xcd800caef5b72ae3,
0x83329c90f04233ce,
0xb5b99e6664a0a3ee,
0x6b0731849e200a7f,
],
[
0xec3fabc192b01799,
0x382b38cee8ee5375,
0x3bfb6c3f0e616572,
0x514abd0cf6c7bc86,
0x47521b1361dcc546,
0x178093843f863d14,
0xad1003c5d28918e7,
0x738450e42495bc81,
0xaf947c59af5e4047,
0x4653fb0685084ef2,
0x057fde2062ae35bf,
],
[
0xe376678d843ce55e,
0x66f3860d7514e7fc,
0x7817f3dfff8b4ffa,
0x3929624a9def725b,
0x0126ca37f215a80a,
0xfce2f5d02762a303,
0x1bc927375febbad7,
0x85b481e5243f60bf,
0x2d3c5f42a39c91a0,
0x0811719919351ae8,
0xf669de0add993131,
],
[
0x7de38bae084da92d,
0x5b848442237e8a9b,
0xf6c705da84d57310,
0x31e6a4bdb6a49017,
0x889489706e5c5c0f,
0x0e4a205459692a1b,
0xbac3fa75ee26f299,
0x5f5894f4057d755e,
0xb0dc3ecd724bb076,
0x5e34d8554a6452ba,
0x04f78fd8c1fdcc5f,
],
[
0x4dd19c38779512ea,
0xdb79ba02704620e9,
0x92a29a3675a5d2be,
0xd5177029fe495166,
0xd32b3298a13330c1,
0x251c4a3eb2c5f8fd,
0xe1c48b26e0d98825,
0x3301d3362a4ffccb,
0x09bb6c88de8cd178,
0xdc05b676564f538a,
0x60192d883e473fee,
],
[
0x16b9774801ac44a0,
0x3cb8411e786d3c8e,
0xa86e9cf505072491,
0x0178928152e109ae,
0x5317b905a6e1ab7b,
0xda20b3be7f53d59f,
0xcb97dedecebee9ad,
0x4bd545218c59f58d,
0x77dc8d856c05a44a,
0x87948589e4f243fd,
0x7e5217af969952c2,
],
[
0xbc58987d06a84e4d,
0x0b5d420244c9cae3,
0xa3c4711b938c02c0,
0x3aace640a3e03990,
0x865a0f3249aacd8a,
0x8d00b2a7dbed06c7,
0x6eacb905beb7e2f8,
0x045322b216ec3ec7,
0xeb9de00d594828e6,
0x088c5f20df9e5c26,
0xf555f4112b19781f,
],
[
0xa8cedbff1813d3a7,
0x50dcaee0fd27d164,
0xf1cb02417e23bd82,
0xfaf322786e2abe8b,
0x937a4315beb5d9b6,
0x1b18992921a11d85,
0x7d66c4368b3c497b,
0x0e7946317a6b4e99,
0xbe4430134182978b,
0x3771e82493ab262d,
0xa671690d8095ce82,
],
[
0xb035585f6e929d9d,
0xba1579c7e219b954,
0xcb201cf846db4ba3,
0x287bf9177372cf45,
0xa350e4f61147d0a6,
0xd5d0ecfb50bcff99,
0x2e166aa6c776ed21,
0xe1e66c991990e282,
0x662b329b01e7bb38,
0x8aa674b36144d9a9,
0xcbabf78f97f95e65,
],
[
0xeec24b15a06b53fe,
0xc8a7aa07c5633533,
0xefe9c6fa4311ad51,
0xb9173f13977109a1,
0x69ce43c9cc94aedc,
0xecf623c9cd118815,
0x28625def198c33c7,
0xccfc5f7de5c3636a,
0xf5e6c40f1621c299,
0xcec0e58c34cb64b1,
0xa868ea113387939f,
],
[
0xd8dddbdc5ce4ef45,
0xacfc51de8131458c,
0x146bb3c0fe499ac0,
0x9e65309f15943903,
0x80d0ad980773aa70,
0xf97817d4ddbf0607,
0xe4626620a75ba276,
0x0dfdc7fd6fc74f66,
0xf464864ad6f2bb93,
0x02d55e52a5d44414,
0xdd8de62487c40925,
],
[
0xc15acf44759545a3,
0xcbfdcf39869719d4,
0x33f62042e2f80225,
0x2599c5ead81d8fa3,
0x0b306cb6c1d7c8d0,
0x658c80d3df3729b1,
0xe8d1b2b21b41429c,
0xa1b67f09d4b3ccb8,
0x0e1adf8b84437180,
0x0d593a5e584af47b,
0xa023d94c56e151c7,
],
[
0x49026cc3a4afc5a6,
0xe06dff00ab25b91b,
0x0ab38c561e8850ff,
0x92c3c8275e105eeb,
0xb65256e546889bd0,
0x3c0468236ea142f6,
0xee61766b889e18f2,
0xa206f41b12c30415,
0x02fe9d756c9f12d1,
0xe9633210630cbf12,
0x1ffea9fe85a0b0b1,
],
[
0x81d1ae8cc50240f3,
0xf4c77a079a4607d7,
0xed446b2315e3efc1,
0x0b0a6b70915178c3,
0xb11ff3e089f15d9a,
0x1d4dba0b7ae9cc18,
0x65d74e2f43b48d05,
0xa2df8c6b8ae0804a,
0xa4e6f0a8c33348a6,
0xc0a26efc7be5669b,
0xa6b6582c547d0d60,
],
[
0x84afc741f1c13213,
0x2f8f43734fc906f3,
0xde682d72da0a02d9,
0x0bb005236adb9ef2,
0x5bdf35c10a8b5624,
0x0739a8a343950010,
0x52f515f44785cfbc,
0xcbaf4e5d82856c60,
0xac9ea09074e3e150,
0x8f0fa011a2035fb0,
0x1a37905d8450904a,
],
[
0x3abeb80def61cc85,
0x9d19c9dd4eac4133,
0x075a652d9641a985,
0x9daf69ae1b67e667,
0x364f71da77920a18,
0x50bd769f745c95b1,
0xf223d1180dbbf3fc,
0x2f885e584e04aa99,
0xb69a0fa70aea684a,
0x09584acaa6e062a0,
0x0bc051640145b19b,
],
];
// NB: This is in ROW-major order to support cache-friendly pre-multiplication.
pub const FAST_PARTIAL_ROUND_INITIAL_MATRIX: [[u64; 12 - 1]; 12 - 1] = [
[
0x80772dc2645b280b,
0xdc927721da922cf8,
0xc1978156516879ad,
0x90e80c591f48b603,
0x3a2432625475e3ae,
0x00a2d4321cca94fe,
0x77736f524010c932,
0x904d3f2804a36c54,
0xbf9b39e28a16f354,
0x3a1ded54a6cd058b,
0x42392870da5737cf,
],
[
0xe796d293a47a64cb,
0xb124c33152a2421a,
0x0ee5dc0ce131268a,
0xa9032a52f930fae6,
0x7e33ca8c814280de,
0xad11180f69a8c29e,
0xc75ac6d5b5a10ff3,
0xf0674a8dc5a387ec,
0xb36d43120eaa5e2b,
0x6f232aab4b533a25,
0x3a1ded54a6cd058b,
],
[
0xdcedab70f40718ba,
0x14a4a64da0b2668f,
0x4715b8e5ab34653b,
0x1e8916a99c93a88e,
0xbba4b5d86b9a3b2c,
0xe76649f9bd5d5c2e,
0xaf8e2518a1ece54d,
0xdcda1344cdca873f,
0xcd080204256088e5,
0xb36d43120eaa5e2b,
0xbf9b39e28a16f354,
],
[
0xf4a437f2888ae909,
0xc537d44dc2875403,
0x7f68007619fd8ba9,
0xa4911db6a32612da,
0x2f7e9aade3fdaec1,
0xe7ffd578da4ea43d,
0x43a608e7afa6b5c2,
0xca46546aa99e1575,
0xdcda1344cdca873f,
0xf0674a8dc5a387ec,
0x904d3f2804a36c54,
],
[
0xf97abba0dffb6c50,
0x5e40f0c9bb82aab5,
0x5996a80497e24a6b,
0x07084430a7307c9a,
0xad2f570a5b8545aa,
0xab7f81fef4274770,
0xcb81f535cf98c9e9,
0x43a608e7afa6b5c2,
0xaf8e2518a1ece54d,
0xc75ac6d5b5a10ff3,
0x77736f524010c932,
],
[
0x7f8e41e0b0a6cdff,
0x4b1ba8d40afca97d,
0x623708f28fca70e8,
0xbf150dc4914d380f,
0xc26a083554767106,
0x753b8b1126665c22,
0xab7f81fef4274770,
0xe7ffd578da4ea43d,
0xe76649f9bd5d5c2e,
0xad11180f69a8c29e,
0x00a2d4321cca94fe,
],
[
0x726af914971c1374,
0x1d7f8a2cce1a9d00,
0x18737784700c75cd,
0x7fb45d605dd82838,
0x862361aeab0f9b6e,
0xc26a083554767106,
0xad2f570a5b8545aa,
0x2f7e9aade3fdaec1,
0xbba4b5d86b9a3b2c,
0x7e33ca8c814280de,
0x3a2432625475e3ae,
],
[
0x64dd936da878404d,
0x4db9a2ead2bd7262,
0xbe2e19f6d07f1a83,
0x02290fe23c20351a,
0x7fb45d605dd82838,
0xbf150dc4914d380f,
0x07084430a7307c9a,
0xa4911db6a32612da,
0x1e8916a99c93a88e,
0xa9032a52f930fae6,
0x90e80c591f48b603,
],
[
0x85418a9fef8a9890,
0xd8a2eb7ef5e707ad,
0xbfe85ababed2d882,
0xbe2e19f6d07f1a83,
0x18737784700c75cd,
0x623708f28fca70e8,
0x5996a80497e24a6b,
0x7f68007619fd8ba9,
0x4715b8e5ab34653b,
0x0ee5dc0ce131268a,
0xc1978156516879ad,
],
[
0x156048ee7a738154,
0x91f7562377e81df5,
0xd8a2eb7ef5e707ad,
0x4db9a2ead2bd7262,
0x1d7f8a2cce1a9d00,
0x4b1ba8d40afca97d,
0x5e40f0c9bb82aab5,
0xc537d44dc2875403,
0x14a4a64da0b2668f,
0xb124c33152a2421a,
0xdc927721da922cf8,
],
[
0xd841e8ef9dde8ba0,
0x156048ee7a738154,
0x85418a9fef8a9890,
0x64dd936da878404d,
0x726af914971c1374,
0x7f8e41e0b0a6cdff,
0xf97abba0dffb6c50,
0xf4a437f2888ae909,
0xdcedab70f40718ba,
0xe796d293a47a64cb,
0x80772dc2645b280b,
],
];
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/keccak_stark.rs | prover/src/keccak/keccak_stark.rs | use std::marker::PhantomData;
use itertools::Itertools;
use plonky2::field::extension::{Extendable, FieldExtension};
use plonky2::field::packed::PackedField;
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::plonk_common::reduce_with_powers_ext_circuit;
use super::columns::reg_input_limb;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::cross_table_lookup::{Column, Filter};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::keccak::columns::{
reg_a, reg_a_prime, reg_a_prime_prime, reg_a_prime_prime_0_0_bit, reg_a_prime_prime_prime,
reg_b, reg_c, reg_c_prime, reg_output_limb, reg_step, NUM_COLUMNS, TIMESTAMP,
};
use crate::keccak::constants::{rc_value, rc_value_bit};
use crate::keccak::logic::{
andn, andn_gen, andn_gen_circuit, xor, xor3_gen, xor3_gen_circuit, xor_gen, xor_gen_circuit,
};
use crate::stark::Stark;
use crate::util::trace_rows_to_poly_values;
/// Number of rounds in a Keccak permutation.
pub(crate) const NUM_ROUNDS: usize = 24;
/// Number of 64-bit elements in the Keccak permutation input.
pub(crate) const NUM_INPUTS: usize = 25;
pub fn ctl_data_inputs<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = (0..2 * NUM_INPUTS).map(reg_input_limb).collect();
res.push(Column::single(TIMESTAMP));
res
}
pub fn ctl_data_outputs<F: Field>() -> Vec<Column<F>> {
let mut res: Vec<_> = Column::singles((0..2 * NUM_INPUTS).map(reg_output_limb)).collect();
res.push(Column::single(TIMESTAMP));
res
}
pub fn ctl_filter_inputs<F: Field>() -> Filter<F> {
Filter::new_simple(Column::single(reg_step(0)))
}
pub fn ctl_filter_outputs<F: Field>() -> Filter<F> {
Filter::new_simple(Column::single(reg_step(NUM_ROUNDS - 1)))
}
#[derive(Copy, Clone, Default)]
pub struct KeccakStark<F, const D: usize> {
pub(crate) f: PhantomData<F>,
}
impl<F: RichField + Extendable<D>, const D: usize> KeccakStark<F, D> {
/// Generate the rows of the trace. Note that this does not generate the permuted columns used
/// in our lookup arguments, as those are computed after transposing to column-wise form.
fn generate_trace_rows(
&self,
inputs_and_timestamps: Vec<([u64; NUM_INPUTS], usize)>,
min_rows: usize,
) -> Vec<[F; NUM_COLUMNS]> {
let num_rows = (inputs_and_timestamps.len() * NUM_ROUNDS)
.max(min_rows)
.next_power_of_two();
let mut rows = Vec::with_capacity(num_rows);
for input_and_timestamp in inputs_and_timestamps.iter() {
let rows_for_perm = self.generate_trace_rows_for_perm(*input_and_timestamp);
rows.extend(rows_for_perm);
}
while rows.len() < num_rows {
rows.push([F::ZERO; NUM_COLUMNS]);
}
rows
}
fn generate_trace_rows_for_perm(
&self,
input_and_timestamp: ([u64; NUM_INPUTS], usize),
) -> Vec<[F; NUM_COLUMNS]> {
let mut rows = vec![[F::ZERO; NUM_COLUMNS]; NUM_ROUNDS];
let input = input_and_timestamp.0;
let timestamp = input_and_timestamp.1;
// Set the timestamp of the current input.
// It will be checked against the value in `KeccakSponge`.
// The timestamp is used to link the input and output of
// the same permutation together.
for round in 0..24 {
rows[round][TIMESTAMP] = F::from_canonical_usize(timestamp);
}
// Populate the round input for the first round.
for x in 0..5 {
for y in 0..5 {
let input_xy = input[y * 5 + x];
let reg_lo = reg_a(x, y);
let reg_hi = reg_lo + 1;
rows[0][reg_lo] = F::from_canonical_u64(input_xy & 0xFFFFFFFF);
rows[0][reg_hi] = F::from_canonical_u64(input_xy >> 32);
}
}
self.generate_trace_row_for_round(&mut rows[0], 0);
for round in 1..24 {
self.copy_output_to_input(rows[round - 1], &mut rows[round]);
self.generate_trace_row_for_round(&mut rows[round], round);
}
rows
}
fn copy_output_to_input(&self, prev_row: [F; NUM_COLUMNS], next_row: &mut [F; NUM_COLUMNS]) {
for x in 0..5 {
for y in 0..5 {
let in_lo = reg_a(x, y);
let in_hi = in_lo + 1;
let out_lo = reg_a_prime_prime_prime(x, y);
let out_hi = out_lo + 1;
next_row[in_lo] = prev_row[out_lo];
next_row[in_hi] = prev_row[out_hi];
}
}
}
fn generate_trace_row_for_round(&self, row: &mut [F; NUM_COLUMNS], round: usize) {
row[reg_step(round)] = F::ONE;
// Populate C[x] = xor(A[x, 0], A[x, 1], A[x, 2], A[x, 3], A[x, 4]).
for x in 0..5 {
for z in 0..64 {
let is_high_limb = z / 32;
let bit_in_limb = z % 32;
let a = [0, 1, 2, 3, 4].map(|i| {
let reg_a_limb = reg_a(x, i) + is_high_limb;
let a_limb = row[reg_a_limb].to_canonical_u64() as u32;
F::from_bool(((a_limb >> bit_in_limb) & 1) != 0)
});
row[reg_c(x, z)] = xor(a);
}
}
// Populate C'[x, z] = xor(C[x, z], C[x - 1, z], C[x + 1, z - 1]).
for x in 0..5 {
for z in 0..64 {
row[reg_c_prime(x, z)] = xor([
row[reg_c(x, z)],
row[reg_c((x + 4) % 5, z)],
row[reg_c((x + 1) % 5, (z + 63) % 64)],
]);
}
}
// Populate A'. To avoid shifting indices, we rewrite
// A'[x, y, z] = xor(A[x, y, z], C[x - 1, z], C[x + 1, z - 1])
// as
// A'[x, y, z] = xor(A[x, y, z], C[x, z], C'[x, z]).
for x in 0..5 {
for y in 0..5 {
for z in 0..64 {
let is_high_limb = z / 32;
let bit_in_limb = z % 32;
let reg_a_limb = reg_a(x, y) + is_high_limb;
let a_limb = row[reg_a_limb].to_canonical_u64() as u32;
let a_bit = F::from_bool(((a_limb >> bit_in_limb) & 1) != 0);
row[reg_a_prime(x, y, z)] =
xor([a_bit, row[reg_c(x, z)], row[reg_c_prime(x, z)]]);
}
}
}
// Populate A''.
// A''[x, y] = xor(B[x, y], andn(B[x + 1, y], B[x + 2, y])).
for x in 0..5 {
for y in 0..5 {
let get_bit = |z| {
xor([
row[reg_b(x, y, z)],
andn(row[reg_b((x + 1) % 5, y, z)], row[reg_b((x + 2) % 5, y, z)]),
])
};
let lo = (0..32)
.rev()
.fold(F::ZERO, |acc, z| acc.double() + get_bit(z));
let hi = (32..64)
.rev()
.fold(F::ZERO, |acc, z| acc.double() + get_bit(z));
let reg_lo = reg_a_prime_prime(x, y);
let reg_hi = reg_lo + 1;
row[reg_lo] = lo;
row[reg_hi] = hi;
}
}
// For the XOR, we split A''[0, 0] to bits.
let val_lo = row[reg_a_prime_prime(0, 0)].to_canonical_u64();
let val_hi = row[reg_a_prime_prime(0, 0) + 1].to_canonical_u64();
let val = val_lo | (val_hi << 32);
let bit_values: Vec<u64> = (0..64)
.scan(val, |acc, _| {
let tmp = *acc & 1;
*acc >>= 1;
Some(tmp)
})
.collect();
for i in 0..64 {
row[reg_a_prime_prime_0_0_bit(i)] = F::from_canonical_u64(bit_values[i]);
}
// A''[0, 0] is additionally xor'd with RC.
let in_reg_lo = reg_a_prime_prime(0, 0);
let in_reg_hi = in_reg_lo + 1;
let out_reg_lo = reg_a_prime_prime_prime(0, 0);
let out_reg_hi = out_reg_lo + 1;
let rc_lo = rc_value(round) & ((1 << 32) - 1);
let rc_hi = rc_value(round) >> 32;
row[out_reg_lo] = F::from_canonical_u64(row[in_reg_lo].to_canonical_u64() ^ rc_lo);
row[out_reg_hi] = F::from_canonical_u64(row[in_reg_hi].to_canonical_u64() ^ rc_hi);
}
pub fn generate_trace(
&self,
inputs: Vec<([u64; NUM_INPUTS], usize)>,
min_rows: usize,
) -> Vec<PolynomialValues<F>> {
// Generate the witness, except for permuted columns in the lookup argument.
let trace_rows = self.generate_trace_rows(inputs, min_rows);
trace_rows_to_poly_values(trace_rows)
}
}
impl<F: RichField + Extendable<D>, const D: usize> Stark<F, D> for KeccakStark<F, D> {
type EvaluationFrame<FE, P, const D2: usize>
= StarkFrame<P, NUM_COLUMNS>
where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>;
type EvaluationFrameTarget = StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>;
fn eval_packed_generic<FE, P, const D2: usize>(
&self,
vars: &Self::EvaluationFrame<FE, P, D2>,
yield_constr: &mut ConstraintConsumer<P>,
) where
FE: FieldExtension<D2, BaseField = F>,
P: PackedField<Scalar = FE>,
{
//eval_round_flags(vars, yield_constr);
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
// The filter must be 0 or 1.
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
yield_constr.constraint(filter * (filter - P::ONES));
// If this is not the final step, the filter must be off.
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
let not_final_step = P::ONES - final_step;
yield_constr.constraint(not_final_step * filter);
// If this is not the final step or a padding row,
// the local and next timestamps must match.
let sum_round_flags = (0..NUM_ROUNDS)
.map(|i| local_values[reg_step(i)])
.sum::<P>();
yield_constr.constraint(
sum_round_flags * not_final_step * (next_values[TIMESTAMP] - local_values[TIMESTAMP]),
);
// C'[x, z] = xor(C[x, z], C[x - 1, z], C[x + 1, z - 1]).
for x in 0..5 {
for z in 0..64 {
let xor = xor3_gen(
local_values[reg_c(x, z)],
local_values[reg_c((x + 4) % 5, z)],
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
);
let c_prime = local_values[reg_c_prime(x, z)];
yield_constr.constraint(c_prime - xor);
}
}
// Check that the input limbs are consistent with A' and D.
// A[x, y, z] = xor(A'[x, y, z], D[x, y, z])
// = xor(A'[x, y, z], C[x - 1, z], C[x + 1, z - 1])
// = xor(A'[x, y, z], C[x, z], C'[x, z]).
// The last step is valid based on the identity we checked above.
// It isn't required, but makes this check a bit cleaner.
for x in 0..5 {
for y in 0..5 {
let a_lo = local_values[reg_a(x, y)];
let a_hi = local_values[reg_a(x, y) + 1];
let get_bit = |z| {
let a_prime = local_values[reg_a_prime(x, y, z)];
let c = local_values[reg_c(x, z)];
let c_prime = local_values[reg_c_prime(x, z)];
xor3_gen(a_prime, c, c_prime)
};
let computed_lo = (0..32)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
let computed_hi = (32..64)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
yield_constr.constraint(computed_lo - a_lo);
yield_constr.constraint(computed_hi - a_hi);
}
}
// xor_{i=0}^4 A'[x, i, z] = C'[x, z], so for each x, z,
// diff * (diff - 2) * (diff - 4) = 0, where
// diff = sum_{i=0}^4 A'[x, i, z] - C'[x, z]
for x in 0..5 {
for z in 0..64 {
let sum: P = [0, 1, 2, 3, 4]
.map(|i| local_values[reg_a_prime(x, i, z)])
.into_iter()
.sum();
let diff = sum - local_values[reg_c_prime(x, z)];
yield_constr
.constraint(diff * (diff - FE::TWO) * (diff - FE::from_canonical_u8(4)));
}
}
// A''[x, y] = xor(B[x, y], andn(B[x + 1, y], B[x + 2, y])).
for x in 0..5 {
for y in 0..5 {
let get_bit = |z| {
xor_gen(
local_values[reg_b(x, y, z)],
andn_gen(
local_values[reg_b((x + 1) % 5, y, z)],
local_values[reg_b((x + 2) % 5, y, z)],
),
)
};
let reg_lo = reg_a_prime_prime(x, y);
let reg_hi = reg_lo + 1;
let lo = local_values[reg_lo];
let hi = local_values[reg_hi];
let computed_lo = (0..32)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
let computed_hi = (32..64)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_bit(z));
yield_constr.constraint(computed_lo - lo);
yield_constr.constraint(computed_hi - hi);
}
}
// A'''[0, 0] = A''[0, 0] XOR RC
let a_prime_prime_0_0_bits = (0..64)
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
.collect_vec();
let computed_a_prime_prime_0_0_lo = (0..32)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + a_prime_prime_0_0_bits[z]);
let computed_a_prime_prime_0_0_hi = (32..64)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + a_prime_prime_0_0_bits[z]);
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
yield_constr.constraint(computed_a_prime_prime_0_0_lo - a_prime_prime_0_0_lo);
yield_constr.constraint(computed_a_prime_prime_0_0_hi - a_prime_prime_0_0_hi);
let get_xored_bit = |i| {
let mut rc_bit_i = P::ZEROS;
for r in 0..NUM_ROUNDS {
let this_round = local_values[reg_step(r)];
let this_round_constant =
P::from(FE::from_canonical_u32(rc_value_bit(r, i) as u32));
rc_bit_i += this_round * this_round_constant;
}
xor_gen(a_prime_prime_0_0_bits[i], rc_bit_i)
};
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
let computed_a_prime_prime_prime_0_0_lo = (0..32)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_xored_bit(z));
let computed_a_prime_prime_prime_0_0_hi = (32..64)
.rev()
.fold(P::ZEROS, |acc, z| acc.doubles() + get_xored_bit(z));
yield_constr.constraint(computed_a_prime_prime_prime_0_0_lo - a_prime_prime_prime_0_0_lo);
yield_constr.constraint(computed_a_prime_prime_prime_0_0_hi - a_prime_prime_prime_0_0_hi);
// Enforce that this round's output equals the next round's input.
for x in 0..5 {
for y in 0..5 {
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
let input_lo = next_values[reg_a(x, y)];
let input_hi = next_values[reg_a(x, y) + 1];
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
let not_last_round = P::ONES - is_last_round;
yield_constr.constraint_transition(not_last_round * (output_lo - input_lo));
yield_constr.constraint_transition(not_last_round * (output_hi - input_hi));
}
}
}
fn eval_ext_circuit(
&self,
builder: &mut plonky2::plonk::circuit_builder::CircuitBuilder<F, D>,
vars: &Self::EvaluationFrameTarget,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one_ext = builder.one_extension();
let two = builder.two();
let two_ext = builder.two_extension();
let four_ext = builder.constant_extension(F::Extension::from_canonical_u8(4));
//eval_round_flags_recursively(builder, vars, yield_constr);
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
// The filter must be 0 or 1.
let filter = local_values[reg_step(NUM_ROUNDS - 1)];
let constraint = builder.mul_sub_extension(filter, filter, filter);
yield_constr.constraint(builder, constraint);
// If this is not the final step, the filter must be off.
let final_step = local_values[reg_step(NUM_ROUNDS - 1)];
let not_final_step = builder.sub_extension(one_ext, final_step);
let constraint = builder.mul_extension(not_final_step, filter);
yield_constr.constraint(builder, constraint);
// If this is not the final step or a padding row,
// the local and next timestamps must match.
let sum_round_flags =
builder.add_many_extension((0..NUM_ROUNDS).map(|i| local_values[reg_step(i)]));
let diff = builder.sub_extension(next_values[TIMESTAMP], local_values[TIMESTAMP]);
let constr = builder.mul_many_extension([sum_round_flags, not_final_step, diff]);
yield_constr.constraint(builder, constr);
// C'[x, z] = xor(C[x, z], C[x - 1, z], C[x + 1, z - 1]).
for x in 0..5 {
for z in 0..64 {
let xor = xor3_gen_circuit(
builder,
local_values[reg_c(x, z)],
local_values[reg_c((x + 4) % 5, z)],
local_values[reg_c((x + 1) % 5, (z + 63) % 64)],
);
let c_prime = local_values[reg_c_prime(x, z)];
let diff = builder.sub_extension(c_prime, xor);
yield_constr.constraint(builder, diff);
}
}
// Check that the input limbs are consistent with A' and D.
// A[x, y, z] = xor(A'[x, y, z], D[x, y, z])
// = xor(A'[x, y, z], C[x - 1, z], C[x + 1, z - 1])
// = xor(A'[x, y, z], C[x, z], C'[x, z]).
// The last step is valid based on the identity we checked above.
// It isn't required, but makes this check a bit cleaner.
for x in 0..5 {
for y in 0..5 {
let a_lo = local_values[reg_a(x, y)];
let a_hi = local_values[reg_a(x, y) + 1];
let mut get_bit = |z| {
let a_prime = local_values[reg_a_prime(x, y, z)];
let c = local_values[reg_c(x, z)];
let c_prime = local_values[reg_c_prime(x, z)];
xor3_gen_circuit(builder, a_prime, c, c_prime)
};
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
let bits_hi = (32..64).map(get_bit).collect_vec();
let computed_lo = reduce_with_powers_ext_circuit(builder, &bits_lo, two);
let computed_hi = reduce_with_powers_ext_circuit(builder, &bits_hi, two);
let diff = builder.sub_extension(computed_lo, a_lo);
yield_constr.constraint(builder, diff);
let diff = builder.sub_extension(computed_hi, a_hi);
yield_constr.constraint(builder, diff);
}
}
// xor_{i=0}^4 A'[x, i, z] = C'[x, z], so for each x, z,
// diff * (diff - 2) * (diff - 4) = 0, where
// diff = sum_{i=0}^4 A'[x, i, z] - C'[x, z]
for x in 0..5 {
for z in 0..64 {
let sum = builder.add_many_extension(
[0, 1, 2, 3, 4].map(|i| local_values[reg_a_prime(x, i, z)]),
);
let diff = builder.sub_extension(sum, local_values[reg_c_prime(x, z)]);
let diff_minus_two = builder.sub_extension(diff, two_ext);
let diff_minus_four = builder.sub_extension(diff, four_ext);
let constraint =
builder.mul_many_extension([diff, diff_minus_two, diff_minus_four]);
yield_constr.constraint(builder, constraint);
}
}
// A''[x, y] = xor(B[x, y], andn(B[x + 1, y], B[x + 2, y])).
for x in 0..5 {
for y in 0..5 {
let mut get_bit = |z| {
let andn = andn_gen_circuit(
builder,
local_values[reg_b((x + 1) % 5, y, z)],
local_values[reg_b((x + 2) % 5, y, z)],
);
xor_gen_circuit(builder, local_values[reg_b(x, y, z)], andn)
};
let reg_lo = reg_a_prime_prime(x, y);
let reg_hi = reg_lo + 1;
let lo = local_values[reg_lo];
let hi = local_values[reg_hi];
let bits_lo = (0..32).map(&mut get_bit).collect_vec();
let bits_hi = (32..64).map(get_bit).collect_vec();
let computed_lo = reduce_with_powers_ext_circuit(builder, &bits_lo, two);
let computed_hi = reduce_with_powers_ext_circuit(builder, &bits_hi, two);
let diff = builder.sub_extension(computed_lo, lo);
yield_constr.constraint(builder, diff);
let diff = builder.sub_extension(computed_hi, hi);
yield_constr.constraint(builder, diff);
}
}
// A'''[0, 0] = A''[0, 0] XOR RC
let a_prime_prime_0_0_bits = (0..64)
.map(|i| local_values[reg_a_prime_prime_0_0_bit(i)])
.collect_vec();
let computed_a_prime_prime_0_0_lo =
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[0..32], two);
let computed_a_prime_prime_0_0_hi =
reduce_with_powers_ext_circuit(builder, &a_prime_prime_0_0_bits[32..64], two);
let a_prime_prime_0_0_lo = local_values[reg_a_prime_prime(0, 0)];
let a_prime_prime_0_0_hi = local_values[reg_a_prime_prime(0, 0) + 1];
let diff = builder.sub_extension(computed_a_prime_prime_0_0_lo, a_prime_prime_0_0_lo);
yield_constr.constraint(builder, diff);
let diff = builder.sub_extension(computed_a_prime_prime_0_0_hi, a_prime_prime_0_0_hi);
yield_constr.constraint(builder, diff);
let mut get_xored_bit = |i| {
let mut rc_bit_i = builder.zero_extension();
for r in 0..NUM_ROUNDS {
let this_round = local_values[reg_step(r)];
let this_round_constant = builder
.constant_extension(F::from_canonical_u32(rc_value_bit(r, i) as u32).into());
rc_bit_i = builder.mul_add_extension(this_round, this_round_constant, rc_bit_i);
}
xor_gen_circuit(builder, a_prime_prime_0_0_bits[i], rc_bit_i)
};
let a_prime_prime_prime_0_0_lo = local_values[reg_a_prime_prime_prime(0, 0)];
let a_prime_prime_prime_0_0_hi = local_values[reg_a_prime_prime_prime(0, 0) + 1];
let bits_lo = (0..32).map(&mut get_xored_bit).collect_vec();
let bits_hi = (32..64).map(get_xored_bit).collect_vec();
let computed_a_prime_prime_prime_0_0_lo =
reduce_with_powers_ext_circuit(builder, &bits_lo, two);
let computed_a_prime_prime_prime_0_0_hi =
reduce_with_powers_ext_circuit(builder, &bits_hi, two);
let diff = builder.sub_extension(
computed_a_prime_prime_prime_0_0_lo,
a_prime_prime_prime_0_0_lo,
);
yield_constr.constraint(builder, diff);
let diff = builder.sub_extension(
computed_a_prime_prime_prime_0_0_hi,
a_prime_prime_prime_0_0_hi,
);
yield_constr.constraint(builder, diff);
// Enforce that this round's output equals the next round's input.
for x in 0..5 {
for y in 0..5 {
let output_lo = local_values[reg_a_prime_prime_prime(x, y)];
let output_hi = local_values[reg_a_prime_prime_prime(x, y) + 1];
let input_lo = next_values[reg_a(x, y)];
let input_hi = next_values[reg_a(x, y) + 1];
let is_last_round = local_values[reg_step(NUM_ROUNDS - 1)];
let diff = builder.sub_extension(input_lo, output_lo);
let filtered_diff = builder.mul_sub_extension(is_last_round, diff, diff);
yield_constr.constraint_transition(builder, filtered_diff);
let diff = builder.sub_extension(input_hi, output_hi);
let filtered_diff = builder.mul_sub_extension(is_last_round, diff, diff);
yield_constr.constraint_transition(builder, filtered_diff);
}
}
}
fn constraint_degree(&self) -> usize {
3
}
}
#[cfg(test)]
mod tests {
use anyhow::Result;
use env_logger::{try_init_from_env, Env, DEFAULT_FILTER_ENV};
use plonky2::field::polynomial::PolynomialValues;
use plonky2::field::types::{Field, PrimeField64};
use plonky2::fri::oracle::PolynomialBatch;
use plonky2::iop::challenger::Challenger;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::timed;
use plonky2::util::timing::TimingTree;
use tiny_keccak::keccakf;
use crate::config::StarkConfig;
use crate::cross_table_lookup::{
Column, CtlData, CtlZData, Filter, GrandProductChallenge, GrandProductChallengeSet,
};
use crate::keccak::columns::reg_output_limb;
use crate::keccak::keccak_stark::{KeccakStark, NUM_INPUTS, NUM_ROUNDS};
use crate::prover::prove_single_table;
use crate::stark_testing::{test_stark_circuit_constraints, test_stark_low_degree};
#[test]
fn test_stark_degree() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_low_degree(stark)
}
#[test]
fn test_stark_circuit() -> Result<()> {
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakStark<F, D>;
let stark = S {
f: Default::default(),
};
test_stark_circuit_constraints::<F, C, S, D>(stark)
}
#[test]
fn keccak_correctness_test() -> Result<()> {
let input: [u64; NUM_INPUTS] = rand::random();
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakStark<F, D>;
let stark = S {
f: Default::default(),
};
let rows = stark.generate_trace_rows(vec![(input, 0)], 8);
let last_row = rows[NUM_ROUNDS - 1];
let output = (0..NUM_INPUTS)
.map(|i| {
let hi = last_row[reg_output_limb(2 * i + 1)].to_canonical_u64();
let lo = last_row[reg_output_limb(2 * i)].to_canonical_u64();
(hi << 32) | lo
})
.collect::<Vec<_>>();
let expected = {
let mut state = input;
keccakf(&mut state);
state
};
assert_eq!(output, expected);
Ok(())
}
#[test]
fn keccak_benchmark() -> Result<()> {
const NUM_PERMS: usize = 85;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
type S = KeccakStark<F, D>;
let stark = S::default();
let config = StarkConfig::standard_fast_config();
init_logger();
let input: Vec<([u64; NUM_INPUTS], usize)> =
(0..NUM_PERMS).map(|_| (rand::random(), 0)).collect();
let mut timing = TimingTree::new("prove", log::Level::Debug);
let trace_poly_values = stark.generate_trace(input, 8);
// TODO: Cloning this isn't great; consider having `from_values` accept a reference,
// or having `compute_permutation_z_polys` read trace values from the `PolynomialBatch`.
let cloned_trace_poly_values = timed!(timing, "clone", trace_poly_values.clone());
let trace_commitments = timed!(
timing,
"compute trace commitment",
PolynomialBatch::<F, C, D>::from_values(
cloned_trace_poly_values,
config.fri_config.rate_bits,
false,
config.fri_config.cap_height,
&mut timing,
None,
)
);
let degree = 1 << trace_commitments.degree_log;
// Fake CTL data.
let ctl_z_data = CtlZData {
helper_columns: vec![PolynomialValues::zero(degree)],
z: PolynomialValues::zero(degree),
challenge: GrandProductChallenge {
beta: F::ZERO,
gamma: F::ZERO,
},
columns: vec![],
filter: vec![Some(Filter::new_simple(Column::constant(F::ZERO)))],
};
let ctl_data = CtlData {
zs_columns: vec![ctl_z_data.clone(); config.num_challenges],
};
prove_single_table(
&stark,
&config,
&trace_poly_values,
&trace_commitments,
&ctl_data,
&GrandProductChallengeSet {
challenges: vec![ctl_z_data.challenge; config.num_challenges],
},
&mut Challenger::new(),
&mut timing,
)?;
timing.print();
Ok(())
}
fn init_logger() {
let _ = try_init_from_env(Env::default().filter_or(DEFAULT_FILTER_ENV, "debug"));
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/round_flags.rs | prover/src/keccak/round_flags.rs | use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::Field;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
use crate::constraint_consumer::{ConstraintConsumer, RecursiveConstraintConsumer};
use crate::evaluation_frame::{StarkEvaluationFrame, StarkFrame};
use crate::keccak::columns::{reg_step, NUM_COLUMNS};
use crate::keccak::keccak_stark::NUM_ROUNDS;
pub(crate) fn eval_round_flags<F: Field, P: PackedField<Scalar = F>>(
vars: &StarkFrame<P, NUM_COLUMNS>,
yield_constr: &mut ConstraintConsumer<P>,
) {
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
// Initially, the first step flag should be 1 while the others should be 0.
yield_constr.constraint_first_row(local_values[reg_step(0)] - F::ONE);
for i in 1..NUM_ROUNDS {
yield_constr.constraint_first_row(local_values[reg_step(i)]);
}
// Flags should circularly increment, or be all zero for padding rows.
let next_any_flag = (0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]).sum::<P>();
for i in 0..NUM_ROUNDS {
let current_round_flag = local_values[reg_step(i)];
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
yield_constr.constraint_transition(next_any_flag * (next_round_flag - current_round_flag));
}
// Padding rows should always be followed by padding rows.
let current_any_flag = (0..NUM_ROUNDS)
.map(|i| local_values[reg_step(i)])
.sum::<P>();
yield_constr.constraint_transition(next_any_flag * (current_any_flag - F::ONE));
}
pub(crate) fn eval_round_flags_recursively<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
vars: &StarkFrame<ExtensionTarget<D>, NUM_COLUMNS>,
yield_constr: &mut RecursiveConstraintConsumer<F, D>,
) {
let one = builder.one_extension();
let local_values = vars.get_local_values();
let next_values = vars.get_next_values();
// Initially, the first step flag should be 1 while the others should be 0.
let step_0_minus_1 = builder.sub_extension(local_values[reg_step(0)], one);
yield_constr.constraint_first_row(builder, step_0_minus_1);
for i in 1..NUM_ROUNDS {
yield_constr.constraint_first_row(builder, local_values[reg_step(i)]);
}
// Flags should circularly increment, or be all zero for padding rows.
let next_any_flag =
builder.add_many_extension((0..NUM_ROUNDS).map(|i| next_values[reg_step(i)]));
for i in 0..NUM_ROUNDS {
let current_round_flag = local_values[reg_step(i)];
let next_round_flag = next_values[reg_step((i + 1) % NUM_ROUNDS)];
let diff = builder.sub_extension(next_round_flag, current_round_flag);
let constraint = builder.mul_extension(next_any_flag, diff);
yield_constr.constraint_transition(builder, constraint);
}
// Padding rows should always be followed by padding rows.
let current_any_flag =
builder.add_many_extension((0..NUM_ROUNDS).map(|i| local_values[reg_step(i)]));
let constraint = builder.mul_sub_extension(next_any_flag, current_any_flag, next_any_flag);
yield_constr.constraint_transition(builder, constraint);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/columns.rs | prover/src/keccak/columns.rs | use plonky2::field::types::Field;
use crate::cross_table_lookup::Column;
use crate::keccak::keccak_stark::{NUM_INPUTS, NUM_ROUNDS};
/// A register which is set to 1 if we are in the `i`th round, otherwise 0.
pub const fn reg_step(i: usize) -> usize {
debug_assert!(i < NUM_ROUNDS);
i
}
/// Registers to hold permutation inputs.
/// `reg_input_limb(2*i) -> input[i] as u32`
/// `reg_input_limb(2*i+1) -> input[i] >> 32`
pub fn reg_input_limb<F: Field>(i: usize) -> Column<F> {
debug_assert!(i < 2 * NUM_INPUTS);
let i_u64 = i / 2; // The index of the 64-bit chunk.
// The 5x5 state is treated as y-major, as per the Keccak spec.
let y = i_u64 / 5;
let x = i_u64 % 5;
let reg_low_limb = reg_a(x, y);
let is_high_limb = i % 2;
Column::single(reg_low_limb + is_high_limb)
}
/// Registers to hold permutation outputs.
/// `reg_output_limb(2*i) -> output[i] as u32`
/// `reg_output_limb(2*i+1) -> output[i] >> 32`
pub const fn reg_output_limb(i: usize) -> usize {
debug_assert!(i < 2 * NUM_INPUTS);
let i_u64 = i / 2; // The index of the 64-bit chunk.
// The 5x5 state is treated as y-major, as per the Keccak spec.
let y = i_u64 / 5;
let x = i_u64 % 5;
let is_high_limb = i % 2;
reg_a_prime_prime_prime(x, y) + is_high_limb
}
const R: [[u8; 5]; 5] = [
[0, 36, 3, 41, 18],
[1, 44, 10, 45, 2],
[62, 6, 43, 15, 61],
[28, 55, 25, 21, 56],
[27, 20, 39, 8, 14],
];
/// Column holding the timestamp, used to link inputs and outputs
/// in the `KeccakSpongeStark`.
pub(crate) const TIMESTAMP: usize = NUM_ROUNDS;
const START_A: usize = TIMESTAMP + 1;
pub(crate) const fn reg_a(x: usize, y: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(y < 5);
START_A + (x * 5 + y) * 2
}
// C[x] = xor(A[x, 0], A[x, 1], A[x, 2], A[x, 3], A[x, 4])
const START_C: usize = START_A + 5 * 5 * 2;
pub(crate) const fn reg_c(x: usize, z: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(z < 64);
START_C + x * 64 + z
}
// C'[x, z] = xor(C[x, z], C[x - 1, z], C[x + 1, z - 1])
const START_C_PRIME: usize = START_C + 5 * 64;
pub(crate) const fn reg_c_prime(x: usize, z: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(z < 64);
START_C_PRIME + x * 64 + z
}
// Note: D is inlined, not stored in the witness.
// A'[x, y] = xor(A[x, y], D[x])
// = xor(A[x, y], C[x - 1], ROT(C[x + 1], 1))
const START_A_PRIME: usize = START_C_PRIME + 5 * 64;
pub(crate) const fn reg_a_prime(x: usize, y: usize, z: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(y < 5);
debug_assert!(z < 64);
START_A_PRIME + x * 64 * 5 + y * 64 + z
}
pub(crate) const fn reg_b(x: usize, y: usize, z: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(y < 5);
debug_assert!(z < 64);
// B is just a rotation of A', so these are aliases for A' registers.
// From the spec,
// B[y, (2x + 3y) % 5] = ROT(A'[x, y], r[x, y])
// So,
// B[x, y] = f((x + 3y) % 5, x)
// where f(a, b) = ROT(A'[a, b], r[a, b])
let a = (x + 3 * y) % 5;
let b = x;
let rot = R[a][b] as usize;
reg_a_prime(a, b, (z + 64 - rot) % 64)
}
// A''[x, y] = xor(B[x, y], andn(B[x + 1, y], B[x + 2, y])).
const START_A_PRIME_PRIME: usize = START_A_PRIME + 5 * 5 * 64;
pub(crate) const fn reg_a_prime_prime(x: usize, y: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(y < 5);
START_A_PRIME_PRIME + x * 2 * 5 + y * 2
}
const START_A_PRIME_PRIME_0_0_BITS: usize = START_A_PRIME_PRIME + 5 * 5 * 2;
pub(crate) const fn reg_a_prime_prime_0_0_bit(i: usize) -> usize {
debug_assert!(i < 64);
START_A_PRIME_PRIME_0_0_BITS + i
}
const REG_A_PRIME_PRIME_PRIME_0_0_LO: usize = START_A_PRIME_PRIME_0_0_BITS + 64;
const REG_A_PRIME_PRIME_PRIME_0_0_HI: usize = REG_A_PRIME_PRIME_PRIME_0_0_LO + 1;
// A'''[0, 0] is additionally xor'd with RC.
pub(crate) const fn reg_a_prime_prime_prime(x: usize, y: usize) -> usize {
debug_assert!(x < 5);
debug_assert!(y < 5);
if x == 0 && y == 0 {
REG_A_PRIME_PRIME_PRIME_0_0_LO
} else {
reg_a_prime_prime(x, y)
}
}
pub(crate) const NUM_COLUMNS: usize = REG_A_PRIME_PRIME_PRIME_0_0_HI + 1;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/mod.rs | prover/src/keccak/mod.rs | pub mod columns;
pub mod constants;
pub mod keccak_stark;
pub mod logic;
pub mod round_flags;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/constants.rs | prover/src/keccak/constants.rs | const RC: [u64; 24] = [
0x0000000000000001,
0x0000000000008082,
0x800000000000808A,
0x8000000080008000,
0x000000000000808B,
0x0000000080000001,
0x8000000080008081,
0x8000000000008009,
0x000000000000008A,
0x0000000000000088,
0x0000000080008009,
0x000000008000000A,
0x000000008000808B,
0x800000000000008B,
0x8000000000008089,
0x8000000000008003,
0x8000000000008002,
0x8000000000000080,
0x000000000000800A,
0x800000008000000A,
0x8000000080008081,
0x8000000000008080,
0x0000000080000001,
0x8000000080008008,
];
const RC_BITS: [[u8; 64]; 24] = [
[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 1, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
1, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
1, 1, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 0, 0, 1, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 1, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
[
1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 0,
],
[
0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0,
0, 0, 0, 1,
],
];
pub(crate) const fn rc_value_bit(round: usize, bit_index: usize) -> u8 {
RC_BITS[round][bit_index]
}
pub(crate) const fn rc_value(round: usize) -> u64 {
RC[round]
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/src/keccak/logic.rs | prover/src/keccak/logic.rs | use plonky2::field::extension::Extendable;
use plonky2::field::packed::PackedField;
use plonky2::field::types::PrimeField64;
use plonky2::hash::hash_types::RichField;
use plonky2::iop::ext_target::ExtensionTarget;
use plonky2::plonk::circuit_builder::CircuitBuilder;
pub(crate) fn xor<F: PrimeField64, const N: usize>(xs: [F; N]) -> F {
xs.into_iter().fold(F::ZERO, |acc, x| {
debug_assert!(x.is_zero() || x.is_one());
F::from_canonical_u64(acc.to_canonical_u64() ^ x.to_canonical_u64())
})
}
/// Computes the arithmetic generalization of `xor(x, y)`, i.e. `x + y - 2 x y`.
pub(crate) fn xor_gen<P: PackedField>(x: P, y: P) -> P {
x + y - x * y.doubles()
}
/// Computes the arithmetic generalization of `xor3(x, y, z)`.
pub(crate) fn xor3_gen<P: PackedField>(x: P, y: P, z: P) -> P {
xor_gen(x, xor_gen(y, z))
}
/// Computes the arithmetic generalization of `xor(x, y)`, i.e. `x + y - 2 x y`.
pub(crate) fn xor_gen_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
x: ExtensionTarget<D>,
y: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
let sum = builder.add_extension(x, y);
builder.arithmetic_extension(-F::TWO, F::ONE, x, y, sum)
}
/// Computes the arithmetic generalization of `xor(x, y)`, i.e. `x + y - 2 x y`.
pub(crate) fn xor3_gen_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
x: ExtensionTarget<D>,
y: ExtensionTarget<D>,
z: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
let x_xor_y = xor_gen_circuit(builder, x, y);
xor_gen_circuit(builder, x_xor_y, z)
}
pub(crate) fn andn<F: PrimeField64>(x: F, y: F) -> F {
debug_assert!(x.is_zero() || x.is_one());
debug_assert!(y.is_zero() || y.is_one());
let x = x.to_canonical_u64();
let y = y.to_canonical_u64();
F::from_canonical_u64(!x & y)
}
pub(crate) fn andn_gen<P: PackedField>(x: P, y: P) -> P {
(P::ONES - x) * y
}
pub(crate) fn andn_gen_circuit<F: RichField + Extendable<D>, const D: usize>(
builder: &mut CircuitBuilder<F, D>,
x: ExtensionTarget<D>,
y: ExtensionTarget<D>,
) -> ExtensionTarget<D> {
// (1 - x) y = -xy + y
builder.arithmetic_extension(F::NEG_ONE, F::ONE, x, y, y)
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/tests/simple_recursion.rs | prover/tests/simple_recursion.rs | #![allow(clippy::upper_case_acronyms)]
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::types::Field;
use plonky2::iop::witness::{PartialWitness, WitnessWrite};
use plonky2::plonk::circuit_builder::CircuitBuilder;
use plonky2::plonk::circuit_data::CircuitConfig;
use plonky2::plonk::config::PoseidonGoldilocksConfig;
type F = GoldilocksField;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
// Tests proving two transactions, one of which with logs, and aggregating them.
#[test]
fn test_mips_with_aggreg_fibo() -> anyhow::Result<()> {
use plonky2x::backend::circuit::Groth16WrapperParameters;
use plonky2x::backend::wrapper::wrap::WrappedCircuit;
use plonky2x::frontend::builder::CircuitBuilder as WrapperBuilder;
use plonky2x::prelude::DefaultParameters;
type InnerParameters = DefaultParameters;
type OuterParameters = Groth16WrapperParameters;
env_logger::try_init().unwrap_or_default();
let config = CircuitConfig::standard_recursion_config();
let mut builder = CircuitBuilder::<F, D>::new(config);
// The arithmetic circuit.
let initial_a = builder.add_virtual_target();
let initial_b = builder.add_virtual_target();
let mut prev_target = initial_a;
let mut cur_target = initial_b;
for _ in 0..5 {
let temp = builder.add(prev_target, cur_target);
prev_target = cur_target;
cur_target = temp;
}
// Public inputs are the two initial values (provided below) and the result (which is generated).
builder.register_public_input(initial_a);
builder.register_public_input(initial_b);
builder.register_public_input(cur_target);
// Provide initial values.
let mut pw = PartialWitness::new();
pw.set_target(initial_a, F::ZERO);
pw.set_target(initial_b, F::ONE);
let data = builder.build::<C>();
let proof = data.prove(pw.clone())?;
println!(
"100th Fibonacci number mod |F| (starting with {}, {}) is: {}",
proof.public_inputs[0], proof.public_inputs[1], proof.public_inputs[2]
);
let _ = data.verify(proof.clone());
println!("pw.target_values.len() {:?}", pw.target_values.len());
println!(
"proof.public_inputs: {:?},proof.public_inputs.len(): {:?}",
proof.public_inputs,
proof.public_inputs.len()
);
println!(
"circuit.data.common.num_public_inputs {:?}",
data.common.num_public_inputs
);
let builder = WrapperBuilder::<DefaultParameters, 2>::new();
let mut circuit2 = builder.build();
circuit2.set_data(data);
let wrapped_circuit = WrappedCircuit::<InnerParameters, OuterParameters, D>::build(
circuit2,
Some((vec![], vec![8, 8, 8])), // bit length of public inputs
);
let build_path = "../verifier/data".to_string();
let path = format!("{}/test_circuit/", build_path);
let wrapped_proof = wrapped_circuit.prove(&proof).unwrap();
wrapped_proof.save(path).unwrap();
Ok(())
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/prove-large-seg/src/main.rs | prover/examples/prove-large-seg/src/main.rs | use std::env;
use zkm_emulator::utils::{get_block_path, split_seg_into_segs};
use zkm_utils::utils;
fn prove_large_segment() {
let basedir = env::var("BASEDIR").unwrap_or("/tmp/cannon".to_string());
let block = env::var("BLOCK_NO").unwrap_or("".to_string());
let file = env::var("BLOCK_FILE").unwrap_or(String::from(""));
let seg_file = env::var("SEG_FILE").expect("big segment file is missing");
let seg_dir = env::var("SEG_OUTPUT").expect("segment output dir is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or("1024".to_string());
let seg_size = seg_size.parse::<_>().unwrap_or(1usize);
let block_path = get_block_path(&basedir, &block, "");
let (_, seg_num, _) = split_seg_into_segs(&seg_file, &seg_dir, &block_path, seg_size);
let _ = utils::prove_segments(&seg_dir, &basedir, &block, &file, seg_num, 0, vec![]);
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_large_segment();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/prove-seg/src/main.rs | prover/examples/prove-seg/src/main.rs | use std::env;
use zkm_utils::utils;
fn prove_segments() {
let basedir = env::var("BASEDIR").unwrap_or("/tmp/cannon".to_string());
let block = env::var("BLOCK_NO").unwrap_or("".to_string());
let file = env::var("BLOCK_FILE").unwrap_or(String::from(""));
let seg_dir = env::var("SEG_FILE_DIR").expect("segment file dir is missing");
let seg_num = env::var("SEG_NUM").unwrap_or("1".to_string());
let seg_num = seg_num.parse::<_>().unwrap_or(1usize);
let seg_start_id = env::var("SEG_START_ID").unwrap_or("0".to_string());
let seg_start_id = seg_start_id.parse::<_>().unwrap_or(0usize);
let _ = utils::prove_segments(
&seg_dir,
&basedir,
&block,
&file,
seg_num,
seg_start_id,
vec![],
);
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_segments();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/split-seg/src/main.rs | prover/examples/split-seg/src/main.rs | use std::env;
use zkm_emulator::utils::{
get_block_path, load_elf_with_patch, split_prog_into_segs, SEGMENT_STEPS,
};
fn split_segments() {
// 1. split ELF into segs
let basedir = env::var("BASEDIR").unwrap_or("/tmp/output".to_string());
let elf_path = env::var("ELF_PATH").expect("ELF file is missing");
let block_no = env::var("BLOCK_NO").unwrap_or("".to_string());
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or(format!("{SEGMENT_STEPS}"));
let seg_size = seg_size.parse::<_>().unwrap_or(SEGMENT_STEPS);
let args = env::var("ARGS").unwrap_or("".to_string());
// assume the first arg is the hash output(which is a public input), and the others are the input.
let args: Vec<&str> = args.split_whitespace().collect();
let mut state = load_elf_with_patch(&elf_path, vec![]);
if !args.is_empty() {
let public_input: Vec<u8> = args[0].as_bytes().to_vec();
log::info!("public input value {:X?}", public_input);
state.add_input_stream(&public_input);
}
if args.len() > 1 {
for (i, arg) in args.iter().enumerate().skip(1) {
let private_input = arg.as_bytes().to_vec();
log::info!("private input value {}: {:X?}", i, private_input);
state.add_input_stream(&private_input);
}
}
let block_path = get_block_path(&basedir, &block_no, "");
if !block_no.is_empty() {
state.load_input(&block_path);
}
let _ = split_prog_into_segs(state, &seg_path, &block_path, seg_size);
}
fn main() {
env_logger::try_init().unwrap_or_default();
split_segments();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-composition/guest/src/main.rs | prover/examples/sha2-composition/guest/src/main.rs | #![no_std]
#![no_main]
use sha2::{Digest, Sha256};
extern crate alloc;
use alloc::vec::Vec;
zkm_runtime::entrypoint!(main);
pub fn main() {
let public_input: Vec<u8> = zkm_runtime::io::read();
let input: [u8; 32] = zkm_runtime::io::read();
let elf_id: Vec<u8> = zkm_runtime::io::read();
zkm_runtime::io::verify(elf_id, &input);
let mut hasher = Sha256::new();
hasher.update(input.to_vec());
let result = hasher.finalize();
let output: [u8; 32] = result.into();
assert_eq!(output.to_vec(), public_input);
zkm_runtime::io::commit::<[u8; 32]>(&output);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-composition/host/build.rs | prover/examples/sha2-composition/host/build.rs | fn main() {
zkm_build::build_program(&format!("{}/../guest", env!("CARGO_MANIFEST_DIR")));
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-composition/host/src/main.rs | prover/examples/sha2-composition/host/src/main.rs | use std::env;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_prover::generation::state::{AssumptionReceipts, Receipt};
use zkm_utils::utils::prove_segments;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
fn prove_sha_5_precompile(elf_path: &str, seg_path: &str) -> Receipt<F, C, D> {
let mut state = load_elf_with_patch(elf_path, vec![]);
let n: u32 = 5;
let public_input: [u8; 32] = [
37, 148, 182, 169, 46, 191, 177, 195, 49, 45, 235, 125, 1, 192, 21, 251, 149, 233, 251,
233, 189, 123, 198, 181, 39, 175, 7, 129, 62, 199, 185, 16,
];
state.add_input_stream(&public_input.to_vec());
state.add_input_stream(&n.to_le_bytes().to_vec());
let (_total_steps, seg_num, mut state) = split_prog_into_segs(state, seg_path, "", 0);
let value = state.read_public_values::<[u8; 32]>();
log::info!("public value: {:?}", value);
assert!(seg_num == 1);
prove_segments(seg_path, "", "", "", 1, 0, vec![]).unwrap()
}
const ELF_PATH: &str = "../guest/elf/mips-zkm-zkvm-elf";
fn prove_sha2_precompile() {
// 1. split ELF into segs
let precompile_path = env::var("PRECOMPILE_PATH").expect("PRECOMPILE ELF file is missing");
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let mut receipts: AssumptionReceipts<F, C, D> = vec![];
let receipt = prove_sha_5_precompile(&precompile_path, &seg_path);
log::info!(
"elf_id: {:?}, data: {:?}",
receipt.claim().elf_id,
receipt.claim().commit,
);
let image_id = receipt.claim().elf_id;
receipts.push(receipt.into());
let mut state = load_elf_with_patch(&ELF_PATH, vec![]);
let public_input: [u8; 32] = [
91, 15, 50, 181, 63, 91, 186, 46, 9, 26, 167, 190, 200, 232, 40, 101, 149, 181, 253, 89,
24, 150, 142, 102, 14, 67, 78, 221, 18, 205, 95, 28,
];
state.add_input_stream(&public_input.to_vec());
log::info!("expected public value: {:?}", public_input);
let private_input: [u8; 32] = [
37, 148, 182, 169, 46, 191, 177, 195, 49, 45, 235, 125, 1, 192, 21, 251, 149, 233, 251,
233, 189, 123, 198, 181, 39, 175, 7, 129, 62, 199, 185, 16,
];
log::info!("private input value: {:?}", private_input);
state.add_input_stream(&private_input);
state.add_input_stream(&image_id);
let (_total_steps, _seg_num, mut state) = split_prog_into_segs(state, &seg_path, "", 0);
let value = state.read_public_values::<[u8; 32]>();
log::info!("public value: {:X?}", value);
log::info!("public value: {} in hex", hex::encode(value));
let _ = prove_segments(&seg_path, "", "", "", 1, 0, receipts);
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_sha2_precompile();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-rust/guest/src/main.rs | prover/examples/sha2-rust/guest/src/main.rs | #![no_std]
#![no_main]
use sha2::{Digest, Sha256};
extern crate alloc;
use alloc::vec::Vec;
zkm_runtime::entrypoint!(main);
pub fn main() {
let public_input: Vec<u8> = zkm_runtime::io::read();
let input: Vec<u8> = zkm_runtime::io::read();
let mut hasher = Sha256::new();
hasher.update(input);
let result = hasher.finalize();
let output: [u8; 32] = result.into();
assert_eq!(output.to_vec(), public_input);
zkm_runtime::io::commit::<[u8; 32]>(&output);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-rust/host/build.rs | prover/examples/sha2-rust/host/build.rs | fn main() {
zkm_build::build_program(&format!("{}/../guest", env!("CARGO_MANIFEST_DIR")));
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-rust/host/src/main.rs | prover/examples/sha2-rust/host/src/main.rs | use std::env;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_utils::utils::prove_segments;
const ELF_PATH: &str = "../guest/elf/mips-zkm-zkvm-elf";
fn prove_sha2_rust() {
// 1. split ELF into segs
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or("65536".to_string());
let seg_size = seg_size.parse::<_>().unwrap_or(0);
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
// load input
let args = env::var("ARGS").unwrap_or("data-to-hash".to_string());
// assume the first arg is the hash output(which is a public input), and the second is the input.
let args: Vec<&str> = args.split_whitespace().collect();
assert_eq!(args.len(), 2);
let public_input: Vec<u8> = hex::decode(args[0]).unwrap();
state.add_input_stream(&public_input);
log::info!("expected public value in hex: {:X?}", args[0]);
log::info!("expected public value: {:X?}", public_input);
let private_input = args[1].as_bytes().to_vec();
log::info!("private input value: {:X?}", private_input);
state.add_input_stream(&private_input);
let (_total_steps, seg_num, mut state) = split_prog_into_segs(state, &seg_path, "", seg_size);
let value = state.read_public_values::<[u8; 32]>();
log::info!("public value: {:X?}", value);
log::info!("public value: {} in hex", hex::encode(value));
let _ = prove_segments(&seg_path, "", "", "", seg_num, 0, vec![]).unwrap();
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_sha2_rust();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/utils/src/lib.rs | prover/examples/utils/src/lib.rs | pub mod utils;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/utils/src/utils.rs | prover/examples/utils/src/utils.rs | use std::fs::File;
use std::io::BufReader;
use std::ops::Range;
use std::time::Duration;
use plonky2::plonk::config::{GenericConfig, PoseidonGoldilocksConfig};
use plonky2::util::timing::TimingTree;
use plonky2x::backend::circuit::Groth16WrapperParameters;
use plonky2x::backend::wrapper::wrap::WrappedCircuit;
use plonky2x::frontend::builder::CircuitBuilder as WrapperBuilder;
use plonky2x::prelude::DefaultParameters;
use zkm_prover::all_stark::AllStark;
use zkm_prover::config::StarkConfig;
use zkm_prover::cpu::kernel::assembler::segment_kernel;
use zkm_prover::fixed_recursive_verifier::AllRecursiveCircuits;
use zkm_prover::generation::state::{AssumptionReceipts, Receipt};
const DEGREE_BITS_RANGE: [Range<usize>; 12] = [
10..21,
12..22,
11..21,
8..21,
6..10,
6..10,
6..16,
6..16,
6..16,
6..16,
6..21,
13..23,
];
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type F = <C as GenericConfig<D>>::F;
pub fn prove_segments(
seg_dir: &str,
basedir: &str,
block: &str,
file: &str,
seg_file_number: usize,
seg_start_id: usize,
assumptions: AssumptionReceipts<F, C, D>,
) -> anyhow::Result<Receipt<F, C, D>> {
type InnerParameters = DefaultParameters;
type OuterParameters = Groth16WrapperParameters;
let total_timing = TimingTree::new("prove total time", log::Level::Info);
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
// Preprocess all circuits.
let all_circuits =
AllRecursiveCircuits::<F, C, D>::new(&all_stark, &DEGREE_BITS_RANGE, &config);
let seg_file = format!("{}/{}", seg_dir, seg_start_id);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(seg_file)?);
let input_first = segment_kernel(basedir, block, file, seg_reader);
let mut timing = TimingTree::new("prove root first", log::Level::Info);
let mut agg_receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input_first,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(agg_receipt.clone())?;
let mut base_seg = seg_start_id + 1;
let mut seg_num = seg_file_number - 1;
let mut is_agg = false;
if seg_file_number % 2 == 0 {
let seg_file = format!("{}/{}", seg_dir, seg_start_id + 1);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(seg_file)?);
let input = segment_kernel(basedir, block, file, seg_reader);
timing = TimingTree::new("prove root second", log::Level::Info);
let receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(receipt.clone())?;
timing = TimingTree::new("prove aggression", log::Level::Info);
// We can duplicate the proofs here because the state hasn't mutated.
agg_receipt = all_circuits.prove_aggregation(false, &agg_receipt, false, &receipt)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_aggregation(&agg_receipt)?;
is_agg = true;
base_seg = seg_start_id + 2;
seg_num -= 1;
}
for i in 0..seg_num / 2 {
let seg_file = format!("{}/{}", seg_dir, base_seg + (i << 1));
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(&seg_file)?);
let input_first = segment_kernel(basedir, block, file, seg_reader);
let mut timing = TimingTree::new("prove root first", log::Level::Info);
let root_receipt_first = all_circuits.prove_root_with_assumption(
&all_stark,
&input_first,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(root_receipt_first.clone())?;
let seg_file = format!("{}/{}", seg_dir, base_seg + (i << 1) + 1);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(&seg_file)?);
let input = segment_kernel(basedir, block, file, seg_reader);
let mut timing = TimingTree::new("prove root second", log::Level::Info);
let root_receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(root_receipt.clone())?;
timing = TimingTree::new("prove aggression", log::Level::Info);
// We can duplicate the proofs here because the state hasn't mutated.
let new_agg_receipt =
all_circuits.prove_aggregation(false, &root_receipt_first, false, &root_receipt)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_aggregation(&new_agg_receipt)?;
timing = TimingTree::new("prove nested aggression", log::Level::Info);
// We can duplicate the proofs here because the state hasn't mutated.
agg_receipt =
all_circuits.prove_aggregation(is_agg, &agg_receipt, true, &new_agg_receipt)?;
is_agg = true;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_aggregation(&agg_receipt)?;
}
log::info!(
"proof size: {:?}",
serde_json::to_string(&agg_receipt.proof().proof)
.unwrap()
.len()
);
let final_receipt = if seg_file_number > 1 {
let block_receipt = all_circuits.prove_block(None, &agg_receipt)?;
all_circuits.verify_block(&block_receipt)?;
let build_path = "../verifier/data".to_string();
let path = format!("{}/test_circuit/", build_path);
let builder = WrapperBuilder::<DefaultParameters, 2>::new();
let mut circuit = builder.build();
circuit.set_data(all_circuits.block.circuit);
let mut bit_size = vec![32usize; 16];
bit_size.extend(vec![8; 32]);
bit_size.extend(vec![64; 68]);
let wrapped_circuit = WrappedCircuit::<InnerParameters, OuterParameters, D>::build(
circuit,
Some((vec![], bit_size)),
);
let wrapped_proof = wrapped_circuit.prove(&block_receipt.proof()).unwrap();
wrapped_proof.save(path).unwrap();
block_receipt
} else {
agg_receipt
};
log::info!("build finish");
total_timing.filter(Duration::from_millis(100)).print();
Ok(final_receipt)
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/guest/src/consts.rs | prover/examples/sha2-syscall/guest/src/consts.rs |
#![allow(dead_code, clippy::unreadable_literal)]
pub const STATE_LEN: usize = 8;
pub const BLOCK_LEN: usize = 16;
pub type State256 = [u32; STATE_LEN];
pub type State512 = [u64; STATE_LEN];
/// Constants necessary for SHA-256 family of digests.
pub const K32: [u32; 64] = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
];
/// Constants necessary for SHA-256 family of digests.
pub const K32X4: [[u32; 4]; 16] = [
[K32[3], K32[2], K32[1], K32[0]],
[K32[7], K32[6], K32[5], K32[4]],
[K32[11], K32[10], K32[9], K32[8]],
[K32[15], K32[14], K32[13], K32[12]],
[K32[19], K32[18], K32[17], K32[16]],
[K32[23], K32[22], K32[21], K32[20]],
[K32[27], K32[26], K32[25], K32[24]],
[K32[31], K32[30], K32[29], K32[28]],
[K32[35], K32[34], K32[33], K32[32]],
[K32[39], K32[38], K32[37], K32[36]],
[K32[43], K32[42], K32[41], K32[40]],
[K32[47], K32[46], K32[45], K32[44]],
[K32[51], K32[50], K32[49], K32[48]],
[K32[55], K32[54], K32[53], K32[52]],
[K32[59], K32[58], K32[57], K32[56]],
[K32[63], K32[62], K32[61], K32[60]],
];
/// Constants necessary for SHA-512 family of digests.
pub const K64: [u64; 80] = [
0x428a2f98d728ae22, 0x7137449123ef65cd, 0xb5c0fbcfec4d3b2f, 0xe9b5dba58189dbbc,
0x3956c25bf348b538, 0x59f111f1b605d019, 0x923f82a4af194f9b, 0xab1c5ed5da6d8118,
0xd807aa98a3030242, 0x12835b0145706fbe, 0x243185be4ee4b28c, 0x550c7dc3d5ffb4e2,
0x72be5d74f27b896f, 0x80deb1fe3b1696b1, 0x9bdc06a725c71235, 0xc19bf174cf692694,
0xe49b69c19ef14ad2, 0xefbe4786384f25e3, 0x0fc19dc68b8cd5b5, 0x240ca1cc77ac9c65,
0x2de92c6f592b0275, 0x4a7484aa6ea6e483, 0x5cb0a9dcbd41fbd4, 0x76f988da831153b5,
0x983e5152ee66dfab, 0xa831c66d2db43210, 0xb00327c898fb213f, 0xbf597fc7beef0ee4,
0xc6e00bf33da88fc2, 0xd5a79147930aa725, 0x06ca6351e003826f, 0x142929670a0e6e70,
0x27b70a8546d22ffc, 0x2e1b21385c26c926, 0x4d2c6dfc5ac42aed, 0x53380d139d95b3df,
0x650a73548baf63de, 0x766a0abb3c77b2a8, 0x81c2c92e47edaee6, 0x92722c851482353b,
0xa2bfe8a14cf10364, 0xa81a664bbc423001, 0xc24b8b70d0f89791, 0xc76c51a30654be30,
0xd192e819d6ef5218, 0xd69906245565a910, 0xf40e35855771202a, 0x106aa07032bbd1b8,
0x19a4c116b8d2d0c8, 0x1e376c085141ab53, 0x2748774cdf8eeb99, 0x34b0bcb5e19b48a8,
0x391c0cb3c5c95a63, 0x4ed8aa4ae3418acb, 0x5b9cca4f7763e373, 0x682e6ff3d6b2b8a3,
0x748f82ee5defb2fc, 0x78a5636f43172f60, 0x84c87814a1f0ab72, 0x8cc702081a6439ec,
0x90befffa23631e28, 0xa4506cebde82bde9, 0xbef9a3f7b2c67915, 0xc67178f2e372532b,
0xca273eceea26619c, 0xd186b8c721c0c207, 0xeada7dd6cde0eb1e, 0xf57d4f7fee6ed178,
0x06f067aa72176fba, 0x0a637dc5a2c898a6, 0x113f9804bef90dae, 0x1b710b35131c471b,
0x28db77f523047d84, 0x32caab7b40c72493, 0x3c9ebe0a15c9bebc, 0x431d67c49c100d4c,
0x4cc5d4becb3e42b6, 0x597f299cfc657e2a, 0x5fcb6fab3ad6faec, 0x6c44198c4a475817,
];
/// Constants necessary for SHA-512 family of digests.
pub const K64X2: [[u64; 2]; 40] = [
[K64[1], K64[0]], [K64[3], K64[2]], [K64[5], K64[4]], [K64[7], K64[6]],
[K64[9], K64[8]], [K64[11], K64[10]], [K64[13], K64[12]], [K64[15], K64[14]],
[K64[17], K64[16]], [K64[19], K64[18]], [K64[21], K64[20]], [K64[23], K64[22]],
[K64[25], K64[24]], [K64[27], K64[26]], [K64[29], K64[28]], [K64[31], K64[30]],
[K64[33], K64[32]], [K64[35], K64[34]], [K64[37], K64[36]], [K64[39], K64[38]],
[K64[41], K64[40]], [K64[43], K64[42]], [K64[45], K64[44]], [K64[47], K64[46]],
[K64[49], K64[48]], [K64[51], K64[50]], [K64[53], K64[52]], [K64[55], K64[54]],
[K64[57], K64[56]], [K64[59], K64[58]], [K64[61], K64[60]], [K64[63], K64[62]],
[K64[65], K64[64]], [K64[67], K64[66]], [K64[69], K64[68]], [K64[71], K64[70]],
[K64[73], K64[72]], [K64[75], K64[74]], [K64[77], K64[76]], [K64[79], K64[78]],
];
pub const H256_224: State256 = [
0xc1059ed8, 0x367cd507, 0x3070dd17, 0xf70e5939,
0xffc00b31, 0x68581511, 0x64f98fa7, 0xbefa4fa4,
];
pub const H256_256: State256 = [
0x6a09e667, 0xbb67ae85, 0x3c6ef372, 0xa54ff53a,
0x510e527f, 0x9b05688c, 0x1f83d9ab, 0x5be0cd19,
];
pub const H512_224: State512 = [
0x8c3d37c819544da2, 0x73e1996689dcd4d6, 0x1dfab7ae32ff9c82, 0x679dd514582f9fcf,
0x0f6d2b697bd44da8, 0x77e36f7304c48942, 0x3f9d85a86a1d36c8, 0x1112e6ad91d692a1,
];
pub const H512_256: State512 = [
0x22312194fc2bf72c, 0x9f555fa3c84c64c2, 0x2393b86b6f53b151, 0x963877195940eabd,
0x96283ee2a88effe3, 0xbe5e1e2553863992, 0x2b0199fc2c85b8aa, 0x0eb72ddc81c52ca2,
];
pub const H512_384: State512 = [
0xcbbb9d5dc1059ed8, 0x629a292a367cd507, 0x9159015a3070dd17, 0x152fecd8f70e5939,
0x67332667ffc00b31, 0x8eb44a8768581511, 0xdb0c2e0d64f98fa7, 0x47b5481dbefa4fa4,
];
pub const H512_512: State512 = [
0x6a09e667f3bcc908, 0xbb67ae8584caa73b, 0x3c6ef372fe94f82b, 0xa54ff53a5f1d36f1,
0x510e527fade682d1, 0x9b05688c2b3e6c1f, 0x1f83d9abfb41bd6b, 0x5be0cd19137e2179,
];
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/guest/src/lib.rs | prover/examples/sha2-syscall/guest/src/lib.rs | //! An implementation of the [SHA-2][1] cryptographic hash algorithms.
//!
//! There are 6 standard algorithms specified in the SHA-2 standard: [`Sha224`],
//! [`Sha256`], [`Sha512_224`], [`Sha512_256`], [`Sha384`], and [`Sha512`].
//!
//! Algorithmically, there are only 2 core algorithms: SHA-256 and SHA-512.
//! All other algorithms are just applications of these with different initial
//! hash values, and truncated to different digest bit lengths. The first two
//! algorithms in the list are based on SHA-256, while the last three on SHA-512.
//!
//! # Usage
//!
//! ```rust
//! use hex_literal::hex;
//! use sha2::{Sha256, Sha512, Digest};
//!
//! // create a Sha256 object
//! let mut hasher = Sha256::new();
//!
//! // write input message
//! hasher.update(b"hello world");
//!
//! // read hash digest and consume hasher
//! let result = hasher.finalize();
//!
//! assert_eq!(result[..], hex!("
//! b94d27b9934d3e08a52e52d7da7dabfac484efe37a5380ee9088f7ace2efcde9
//! ")[..]);
//!
//! // same for Sha512
//! let mut hasher = Sha512::new();
//! hasher.update(b"hello world");
//! let result = hasher.finalize();
//!
//! assert_eq!(result[..], hex!("
//! 309ecc489c12d6eb4cc40f50c902f2b4d0ed77ee511a7c7a9bcd3ca86d4cd86f
//! 989dd35bc5ff499670da34255b45b0cfd830e81f605dcf7dc5542e93ae9cd76f
//! ")[..]);
//! ```
//!
//! Also see [RustCrypto/hashes][2] readme.
//!
//! [1]: https://en.wikipedia.org/wiki/SHA-2
//! [2]: https://github.com/RustCrypto/hashes
#![no_std]
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/guest/src/core_api.rs | prover/examples/sha2-syscall/guest/src/core_api.rs | use crate::consts;
use core::{fmt, slice::from_ref};
use digest::{
block_buffer::Eager,
core_api::{
AlgorithmName, Block, BlockSizeUser, Buffer, BufferKindUser, OutputSizeUser, TruncSide,
UpdateCore, VariableOutputCore,
},
typenum::{Unsigned, U32, U64},
HashMarker, InvalidOutputSize, Output,
generic_array::GenericArray,
};
/// Core block-level SHA-256 hasher with variable output size.
///
/// Supports initialization only for 28 and 32 byte output sizes,
/// i.e. 224 and 256 bits respectively.
#[derive(Clone)]
pub struct Sha256VarCore {
state: consts::State256,
block_len: u64,
}
impl HashMarker for Sha256VarCore {}
impl BlockSizeUser for Sha256VarCore {
type BlockSize = U64;
}
impl BufferKindUser for Sha256VarCore {
type BufferKind = Eager;
}
impl UpdateCore for Sha256VarCore {
#[inline]
fn update_blocks(&mut self, blocks: &[Block<Self>]) {
self.block_len += blocks.len() as u64;
compress256(&mut self.state, blocks);
}
}
impl OutputSizeUser for Sha256VarCore {
type OutputSize = U32;
}
impl VariableOutputCore for Sha256VarCore {
const TRUNC_SIDE: TruncSide = TruncSide::Left;
#[inline]
fn new(output_size: usize) -> Result<Self, InvalidOutputSize> {
let state = match output_size {
28 => consts::H256_224,
32 => consts::H256_256,
_ => return Err(InvalidOutputSize),
};
let block_len = 0;
Ok(Self { state, block_len })
}
#[inline]
fn finalize_variable_core(&mut self, buffer: &mut Buffer<Self>, out: &mut Output<Self>) {
let bs = Self::BlockSize::U64;
let bit_len = 8 * (buffer.get_pos() as u64 + bs * self.block_len);
buffer.len64_padding_be(bit_len, |b| compress256(&mut self.state, from_ref(b)));
for (chunk, v) in out.chunks_exact_mut(4).zip(self.state.iter()) {
chunk.copy_from_slice(&v.to_be_bytes());
}
}
}
impl AlgorithmName for Sha256VarCore {
#[inline]
fn write_alg_name(f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Sha256")
}
}
impl fmt::Debug for Sha256VarCore {
#[inline]
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str("Sha256VarCore { ... }")
}
}
pub fn compress256(state: &mut [u32; 8], blocks: &[GenericArray<u8, U64>]) {
// SAFETY: GenericArray<u8, U64> and [u8; 64] have
// exactly the same memory layout
let p = blocks.as_ptr() as *const [u8; 64];
let blocks = unsafe { core::slice::from_raw_parts(p, blocks.len()) };
zkm_runtime::io::compress(state, blocks)
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/guest/src/main.rs | prover/examples/sha2-syscall/guest/src/main.rs | #![no_std]
#![no_main]
extern crate alloc;
use alloc::vec::Vec;
pub use digest::{self, Digest};
#[cfg(feature = "oid")]
use digest::const_oid::{AssociatedOid, ObjectIdentifier};
use digest::{
consts::{U28, U32},
core_api::{CoreWrapper, CtVariableCoreWrapper},
impl_oid_carrier,
};
#[rustfmt::skip]
mod consts;
mod core_api;
pub use core_api::{compress256, Sha256VarCore};
impl_oid_carrier!(OidSha256, "2.16.840.1.101.3.4.2.1");
impl_oid_carrier!(OidSha224, "2.16.840.1.101.3.4.2.4");
/// SHA-224 hasher.
pub type Sha224 = CoreWrapper<CtVariableCoreWrapper<Sha256VarCore, U28, OidSha224>>;
/// SHA-256 hasher.
pub type Sha256 = CoreWrapper<CtVariableCoreWrapper<Sha256VarCore, U32, OidSha256>>;
zkm_runtime::entrypoint!(main);
pub fn main() {
let public_input: Vec<u8> = zkm_runtime::io::read();
let input: Vec<u8> = zkm_runtime::io::read();
let result = Sha256::digest(input);
let output: [u8; 32] = result.into();
assert_eq!(output.to_vec(), public_input);
zkm_runtime::io::commit::<[u8; 32]>(&output);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/host/build.rs | prover/examples/sha2-syscall/host/build.rs | fn main() {
zkm_build::build_program(&format!("{}/../guest", env!("CARGO_MANIFEST_DIR")));
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-syscall/host/src/main.rs | prover/examples/sha2-syscall/host/src/main.rs | use std::env;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_utils::utils::prove_segments;
const ELF_PATH: &str = "../guest/elf/mips-zkm-zkvm-elf";
fn prove_sha2_rust() {
// 1. split ELF into segs
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or("65536".to_string());
let seg_size = seg_size.parse::<_>().unwrap_or(0);
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
// load input
let args = env::var("ARGS").unwrap_or("data-to-hash".to_string());
// assume the first arg is the hash output(which is a public input), and the second is the input.
let args: Vec<&str> = args.split_whitespace().collect();
assert_eq!(args.len(), 2);
let public_input: Vec<u8> = hex::decode(args[0]).unwrap();
state.add_input_stream(&public_input);
log::info!("expected public value in hex: {:X?}", args[0]);
log::info!("expected public value: {:X?}", public_input);
let private_input = args[1].as_bytes().to_vec();
log::info!("private input value: {:X?}", private_input);
state.add_input_stream(&private_input);
let (_total_steps, seg_num, mut state) = split_prog_into_segs(state, &seg_path, "", seg_size);
let value = state.read_public_values::<[u8; 32]>();
log::info!("public value: {:X?}", value);
log::info!("public value: {} in hex", hex::encode(value));
let _ = prove_segments(&seg_path, "", "", "", seg_num, 0, vec![]).unwrap();
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_sha2_rust();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/sha2-go/host/src/main.rs | prover/examples/sha2-go/host/src/main.rs | use serde::{Deserialize, Serialize};
use std::env;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_utils::utils::prove_segments;
#[derive(Debug, Clone, Deserialize, Serialize)]
pub enum DataId {
TYPE1,
TYPE2,
TYPE3,
}
#[derive(Debug, Clone, Deserialize, Serialize)]
pub struct Data {
pub input1: [u8; 10],
pub input2: u8,
pub input3: i8,
pub input4: u16,
pub input5: i16,
pub input6: u32,
pub input7: i32,
pub input8: u64,
pub input9: i64,
pub input10: Vec<u8>,
pub input11: DataId,
pub input12: String,
}
impl Default for Data {
fn default() -> Self {
Self::new()
}
}
impl Data {
pub fn new() -> Self {
let array = [1u8, 2u8, 3u8, 4u8, 5u8, 6u8, 7u8, 8u8, 9u8, 10u8];
Self {
input1: array,
input2: 0x11u8,
input3: -1i8,
input4: 0x1122u16,
input5: -1i16,
input6: 0x112233u32,
input7: -1i32,
input8: 0x1122334455u64,
input9: -1i64,
input10: array[1..3].to_vec(),
input11: DataId::TYPE3,
input12: "hello".to_string(),
}
}
}
const ELF_PATH: &str = "../guest/sha2-go";
fn prove_sha2_go() {
// 1. split ELF into segs
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or("0".to_string());
let seg_size = seg_size.parse::<_>().unwrap_or(0);
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
// load input
let args = env::var("ARGS").unwrap_or("data-to-hash".to_string());
// assume the first arg is the hash output(which is a public input), and the second is the input.
let args: Vec<&str> = args.split_whitespace().collect();
assert_eq!(args.len(), 2);
let mut data = Data::new();
// Fill in the input data
data.input10 = hex::decode(args[0]).unwrap();
data.input12 = args[1].to_string();
state.add_input_stream(&data);
log::info!(
"enum {} {} {}",
DataId::TYPE1 as u8,
DataId::TYPE2 as u8,
DataId::TYPE3 as u8
);
log::info!("public input: {:X?}", data);
let (_total_steps, seg_num, mut state) = split_prog_into_segs(state, &seg_path, "", seg_size);
let value = state.read_public_values::<Data>();
log::info!("public value: {:X?}", value);
let _ = prove_segments(&seg_path, "", "", "", seg_num, 0, vec![]);
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_sha2_go();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/keccak/guest/src/main.rs | prover/examples/keccak/guest/src/main.rs | #![no_std]
#![no_main]
extern crate alloc;
use alloc::vec::Vec;
zkm_runtime::entrypoint!(main);
pub fn main() {
let public_input: Vec<u8> = zkm_runtime::io::read();
let input: Vec<u8> = zkm_runtime::io::read();
let output = zkm_runtime::io::keccak(&input.as_slice());
assert_eq!(output.to_vec(), public_input);
zkm_runtime::io::commit::<[u8; 32]>(&output);
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/keccak/host/build.rs | prover/examples/keccak/host/build.rs | fn main() {
zkm_build::build_program(&format!("{}/../guest", env!("CARGO_MANIFEST_DIR")));
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/keccak/host/src/main.rs | prover/examples/keccak/host/src/main.rs | use alloy_primitives::keccak256;
use std::env;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_utils::utils::prove_segments;
const ELF_PATH: &str = "../guest/elf/mips-zkm-zkvm-elf";
fn prove_keccak_rust() {
// 1. split ELF into segs
let seg_path = env::var("SEG_OUTPUT").expect("Segment output path is missing");
let seg_size = env::var("SEG_SIZE").unwrap_or("65536".to_string());
let input_length = env::var("INPUT_LEN")
.unwrap_or("680".to_string())
.parse::<_>()
.unwrap();
let seg_size = seg_size.parse::<_>().unwrap_or(0);
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
let private_input: Vec<u8> = vec![0].repeat(input_length);
let public_input = keccak256(&private_input).to_vec();
state.add_input_stream(&public_input);
state.add_input_stream(&private_input);
let (_total_steps, seg_num, mut state) = split_prog_into_segs(state, &seg_path, "", seg_size);
let value = state.read_public_values::<[u8; 32]>();
assert!(value == *public_input);
let _ = prove_segments(&seg_path, "", "", "", seg_num, 0, vec![]).unwrap();
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_keccak_rust();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/revme/guest/src/main.rs | prover/examples/revme/guest/src/main.rs | #![no_std]
#![no_main]
extern crate alloc;
use alloc::vec::Vec;
use guest::verify_revm_tx;
zkm_runtime::entrypoint!(main);
pub fn main() {
let input: Vec<u8> = zkm_runtime::io::read_vec();
assert!(verify_revm_tx(&input).unwrap());
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/revme/host/build.rs | prover/examples/revme/host/build.rs | fn main() {
zkm_build::build_program(&format!("{}/../guest", env!("CARGO_MANIFEST_DIR")));
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/prover/examples/revme/host/src/main.rs | prover/examples/revme/host/src/main.rs | use std::env;
use std::fs::File;
use std::io::Read;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_utils::utils::prove_segments;
const ELF_PATH: &str = "../guest/elf/mips-zkm-zkvm-elf";
fn prove_revm() {
// 1. split ELF into segs
let seg_path = env::var("SEG_OUTPUT").unwrap_or("output".to_owned());
let seg_size = env::var("SEG_SIZE").unwrap_or("0".to_string());
let seg_size = seg_size.parse::<_>().unwrap_or(0);
let data = if let Ok(json_path) = env::var("JSON_PATH") {
let mut f = File::open(json_path).unwrap();
let mut data = vec![];
f.read_to_end(&mut data).unwrap();
data
} else {
guest_std::TEST_DATA.to_vec()
};
let encoded = guest_std::cbor_serialize(&data).unwrap();
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
// load input
state.input_stream.push(encoded);
let (_total_steps, seg_num, mut _state) = split_prog_into_segs(state, &seg_path, "", seg_size);
println!("cycles: {:?}", _total_steps);
let _ = prove_segments(&seg_path, "", "", "", seg_num, 0, vec![]);
}
fn main() {
env_logger::try_init().unwrap_or_default();
prove_revm();
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/page.rs | emulator/src/page.rs | /// Note: 2**12 = 4 KiB, the minimum page-size in Unicorn for mmap
pub const PAGE_ADDR_SIZE: usize = 12;
pub const PAGE_KEY_SIZE: usize = 32 - PAGE_ADDR_SIZE;
pub const PAGE_SIZE: usize = 1 << PAGE_ADDR_SIZE;
pub const PAGE_ADDR_MASK: usize = PAGE_SIZE - 1;
const MAX_PAGE_COUNT: usize = 1 << PAGE_KEY_SIZE;
const PAGE_KEY_MASK: usize = MAX_PAGE_COUNT - 1;
pub const MAX_MEMORY: usize = 0x80000000;
pub const HASH_LEVEL: usize = 3;
#[derive(Debug, Clone)]
pub struct CachedPage {
pub data: [u8; PAGE_SIZE],
}
impl Default for CachedPage {
fn default() -> Self {
Self::new()
}
}
impl CachedPage {
pub fn new() -> Self {
Self {
data: [0u8; PAGE_SIZE],
}
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/lib.rs | emulator/src/lib.rs | #![allow(dead_code)]
pub mod memory;
pub mod opcode_id;
pub mod page;
pub mod state;
pub mod tests;
pub mod utils;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/tests.rs | emulator/src/tests.rs | #[allow(clippy::module_inception)]
#[cfg(test)]
mod tests {
use std::{
fs,
path::{Path, PathBuf},
};
use crate::state::{InstrumentedState, State};
use crate::utils::{get_block_path, load_elf_with_patch, split_prog_into_segs, SEGMENT_STEPS};
const END_ADDR: u32 = 0xa7ef00d0;
const OUTPUT: &str = "/tmp/segment";
fn execute_open_mips(path: PathBuf) {
if path.ends_with("oracle.bin") {
println!("oracle test needs to be updated to use syscall pre-image oracle");
return;
}
let data = fs::read(path).expect("could not read file");
let data: Box<&[u8]> = Box::new(data.as_slice());
let mut state = State::new();
state
.memory
.set_memory_range(0, data)
.expect("set memory range failed");
state.registers[31] = END_ADDR;
let mut instrumented_state = InstrumentedState::new(state, String::from(""));
for _ in 0..1000 {
if instrumented_state.state.pc == END_ADDR {
break;
}
instrumented_state.step();
}
}
#[test]
fn test_execute_open_mips() {
for file_name in fs::read_dir("./src/open_mips_tests/test/bin/").unwrap() {
let file_name_path_buf = file_name.unwrap().path();
if file_name_path_buf.ends_with(Path::new("oracle.bin")) {
continue;
}
println!("testing: {:?}", &file_name_path_buf);
execute_open_mips(file_name_path_buf);
}
}
#[test]
fn test_execute_hello() {
let state = load_elf_with_patch("test-vectors/hello", vec!["aab", "ccd"]);
let mut instrumented_state = InstrumentedState::new(state, String::from(""));
for _ in 0..40000000 {
if instrumented_state.state.exited {
break;
}
instrumented_state.step();
}
}
#[test]
fn test_execute_rust_fib() {
let state = load_elf_with_patch("test-vectors/rust_fib", vec![]);
let mut instrumented_state = InstrumentedState::new(state, String::from(""));
log::debug!("begin execute\n");
for _ in 0..400000 {
if instrumented_state.state.exited {
break;
}
instrumented_state.step();
}
}
#[test]
#[ignore = "Two slow"]
fn test_execute_minigeth() {
let mut state = load_elf_with_patch("test-vectors/minigeth", vec![]);
let block_path = get_block_path("test-vectors", "13284491", "");
state.load_input(&block_path);
let _ = split_prog_into_segs(state, OUTPUT, &block_path, SEGMENT_STEPS);
}
#[test]
fn test_execute_split_hello() {
let state = load_elf_with_patch("test-vectors/hello", vec![]);
let _ = split_prog_into_segs(state, OUTPUT, "", SEGMENT_STEPS);
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/memory.rs | emulator/src/memory.rs | #![allow(clippy::extra_unused_lifetimes)]
use std::cell::RefCell;
pub const WORD_SIZE: usize = core::mem::size_of::<u32>();
pub const INIT_SP: u32 = 0x7fffd000;
use super::page::MAX_MEMORY;
use crate::page::{CachedPage, PAGE_ADDR_MASK, PAGE_ADDR_SIZE, PAGE_SIZE};
use itertools::Itertools;
use lazy_static::lazy_static;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::field::packed::PackedField;
use plonky2::field::types::{Field, PrimeField64};
use plonky2::hash::poseidon::Poseidon;
use std::collections::BTreeMap;
use std::io::Read;
use std::rc::Rc;
pub const HASH_ADDRESS_BASE: u32 = 0x80000000;
pub const HASH_ADDRESS_END: u32 = 0x81020000;
pub const ROOT_HASH_ADDRESS_BASE: u32 = 0x81021000;
pub const END_PC_ADDRESS: u32 = ROOT_HASH_ADDRESS_BASE + 4 * 8;
pub const REGISTERS_OFFSET: usize = 0x400;
/// Operation to memory access, Read/Write
#[derive(Copy, Clone, Debug)]
pub enum MemoryOperation {
Read,
Write,
}
pub const SPONGE_RATE: usize = 8;
pub const SPONGE_CAPACITY: usize = 4;
pub const SPONGE_WIDTH: usize = SPONGE_RATE + SPONGE_CAPACITY;
pub(crate) const POSEIDON_WIDTH_BYTES: usize = 48; // 12 * 4
pub(crate) const POSEIDON_WIDTH_U32S: usize = POSEIDON_WIDTH_BYTES / 4;
pub(crate) const POSEIDON_WIDTH_MINUS_DIGEST: usize = SPONGE_WIDTH - POSEIDON_DIGEST;
pub(crate) const POSEIDON_RATE_BYTES: usize = SPONGE_RATE * 4;
pub(crate) const POSEIDON_RATE_U32S: usize = POSEIDON_RATE_BYTES / 4;
pub(crate) const POSEIDON_CAPACITY_BYTES: usize = 64;
pub(crate) const POSEIDON_CAPACITY_U32S: usize = POSEIDON_CAPACITY_BYTES / 4;
pub(crate) const POSEIDON_DIGEST_BYTES: usize = 32;
pub(crate) const POSEIDON_DIGEST: usize = 4;
pub fn poseidon(inputs: &[u8]) -> [u64; POSEIDON_DIGEST] {
let l = inputs.len();
let chunks = l / POSEIDON_RATE_BYTES + 1;
let mut input = inputs.to_owned();
input.resize(chunks * POSEIDON_RATE_BYTES, 0);
// pad10*1 rule
if l % POSEIDON_RATE_BYTES == POSEIDON_RATE_BYTES - 1 {
// Both 1s are placed in the same byte.
input[l] = 0b10000001;
} else {
input[l] = 1;
input[chunks * POSEIDON_RATE_BYTES - 1] = 0b10000000;
}
let mut state: [GoldilocksField; 12] = [PackedField::ZEROS; SPONGE_WIDTH];
for block in input.chunks(POSEIDON_RATE_BYTES) {
let block_u32s = (0..SPONGE_RATE)
.map(|i| {
Field::from_canonical_u32(u32::from_le_bytes(
block[i * 4..(i + 1) * 4].to_vec().try_into().unwrap(),
))
})
.collect_vec();
state[..SPONGE_RATE].copy_from_slice(&block_u32s);
let output = Poseidon::poseidon(state);
state.copy_from_slice(&output);
}
let hash = state
.iter()
.take(POSEIDON_DIGEST)
.map(|x| x.to_canonical_u64())
.collect_vec();
hash.try_into().unwrap()
}
pub fn hash_page(data: &[u8; 4096]) -> [u8; 32] {
let hash_u64s = poseidon(data);
let hash = hash_u64s
.iter()
.flat_map(|&num| num.to_le_bytes())
.collect::<Vec<_>>();
hash.try_into().unwrap()
}
fn zero_hash() -> [u8; 32] {
let zeros = [0u8; 4096];
hash_page(&zeros)
}
fn compute_const_hash_pages(hash: &mut [[u8; 4096]; 3], level: usize) -> [u8; 32] {
if level == 0 {
return zero_hash();
}
let base_hash = compute_const_hash_pages(hash, level - 1);
log::trace!("level {} base hash {:?}", level - 1, base_hash);
for i in 0..(4096 >> 5) {
hash[level - 1][i << 5..(i << 5) + 32].copy_from_slice(&base_hash);
}
hash_page(&hash[level - 1])
}
lazy_static! {
static ref CONST_HASH_PAGES: [[u8; 4096]; 3] = {
let mut hash = [[0u8; 4096]; 3];
let _ = compute_const_hash_pages(&mut hash, 3);
hash
};
}
#[derive(Debug)]
pub struct Memory {
/// page index -> cached page
pages: BTreeMap<u32, Rc<RefCell<CachedPage>>>,
// two caches: we often read instructions from one page, and do memory things with another page.
// this prevents map lookups each instruction
last_page_keys: [Option<u32>; 2],
last_page: [Option<Rc<RefCell<CachedPage>>>; 2],
// for implement std::io::Read trait
addr: u32,
count: u32,
rtrace: BTreeMap<u32, [u8; PAGE_SIZE]>,
wtrace: [BTreeMap<u32, Rc<RefCell<CachedPage>>>; 3],
}
pub fn hash_cached_page(page: &Rc<RefCell<CachedPage>>) -> [u8; 32] {
let data = page.borrow().data;
hash_page(&data)
}
impl Default for Memory {
fn default() -> Self {
Self::new()
}
}
impl Memory {
pub fn new() -> Self {
Self {
pages: BTreeMap::new(),
last_page_keys: Default::default(), // default to invalid keys, to not match any pages
last_page: Default::default(),
addr: 0,
count: 0,
rtrace: BTreeMap::new(),
wtrace: [BTreeMap::new(), BTreeMap::new(), BTreeMap::new()],
}
}
pub fn page_count(&self) -> u64 {
self.rtrace.len() as u64
}
pub fn for_each_page<T: Fn(u32, &Rc<RefCell<CachedPage>>) -> Result<(), String>>(
&mut self,
handler: T,
) -> Result<(), String> {
for (page_index, cached_page) in self.pages.iter() {
let r = handler(*page_index, cached_page);
r?
}
Ok(())
}
fn page_lookup(&mut self, page_index: u32) -> Option<Rc<RefCell<CachedPage>>> {
// find cache first
if Some(page_index) == self.last_page_keys[0] {
return self.last_page[0].clone();
}
if Some(page_index) == self.last_page_keys[1] {
return self.last_page[1].clone();
}
match self.pages.get(&page_index) {
None => None,
Some(cached_page) => {
self.last_page_keys[1] = self.last_page_keys[0];
self.last_page[1] = self.last_page[0].take();
self.last_page_keys[0] = Some(page_index);
self.last_page[0] = Some(cached_page.clone());
self.last_page[0].clone()
}
}
}
pub fn set_hash_trace<'a>(&mut self, page_index: u32, level: usize) {
let hash_addr = (page_index << 5) + MAX_MEMORY as u32;
let page_index = hash_addr >> PAGE_ADDR_SIZE;
let cached_page: Option<Rc<RefCell<CachedPage>>> = self.page_lookup(page_index);
let page = match cached_page {
None => self.alloc_hash_page(page_index, level),
Some(page) => page,
};
self.rtrace
.entry(page_index)
.or_insert_with(|| page.borrow().clone().data);
if level < 2 {
self.set_hash_trace(page_index, level + 1);
}
}
pub fn get_memory(&mut self, addr: u32) -> u32 {
// addr must be aligned to 4 bytes
if addr & 0x3 != 0 {
panic!("unaligned memory access: {:x?}", addr);
}
let page_index = addr >> PAGE_ADDR_SIZE;
match self.page_lookup(page_index) {
None => {
self.rtrace.insert(page_index, [0u8; PAGE_SIZE]);
self.set_hash_trace(page_index, 0);
0u32
}
Some(cached_page) => {
let cached_page = cached_page.borrow();
// lookup in page
let page_addr = (addr as usize) & PAGE_ADDR_MASK;
if let std::collections::btree_map::Entry::Vacant(e) = self.rtrace.entry(page_index)
{
e.insert(cached_page.data);
self.set_hash_trace(page_index, 0);
};
u32::from_be_bytes(
(&cached_page.data[page_addr..page_addr + 4])
.try_into()
.unwrap(),
)
}
}
}
pub fn byte(&mut self, addr: u32) -> u8 {
let word = self.get_memory(addr & 0xFFFFFFFC);
word.to_be_bytes()[(addr & 3) as usize]
}
fn alloc_page(&mut self, page_index: u32) -> Rc<RefCell<CachedPage>> {
let cached_page = Rc::new(RefCell::new(CachedPage::new()));
self.pages.insert(page_index, cached_page.clone());
cached_page
}
pub fn set_memory(&mut self, addr: u32, v: u32) {
// addr must be aligned to 4 bytes
if addr & 0x3 != 0 {
panic!("unaligned memory access: {:x?}", addr);
}
if addr as usize >= MAX_MEMORY {
log::warn!("write out of memory: {:x?}", addr);
}
let page_index = addr >> PAGE_ADDR_SIZE;
let page_addr = (addr as usize) & PAGE_ADDR_MASK;
let cached_page = match self.page_lookup(page_index) {
None => {
// allocate the page if we have not already
// Golang may mmap relatively large ranges, but we only allocate just in time.
self.alloc_page(page_index)
}
Some(cached_page) => {
// self.invalidate(addr);
cached_page
}
};
if let std::collections::btree_map::Entry::Vacant(e) = self.rtrace.entry(page_index) {
e.insert(cached_page.borrow().data);
self.set_hash_trace(page_index, 0);
};
self.wtrace[0].insert(page_index, cached_page.clone());
let mut cached_page = cached_page.borrow_mut();
cached_page.data[page_addr..page_addr + 4].copy_from_slice(&v.to_be_bytes());
}
pub fn usage(&self) -> String {
let total = self.pages.len() * PAGE_SIZE;
let unit = (1 << 10) as usize;
if total < unit {
return format!("{} B", total);
}
// KiB, MiB, GiB, TiB, ...
let (mut div, mut exp) = (unit, 0usize);
let mut n = total / div;
while n >= unit {
div *= unit;
exp += 1;
n /= unit;
}
let exp_table = b"KMGTPE";
format!("{}, {}iB", total / div, exp_table[exp] as char)
}
pub fn read_memory_range(&mut self, addr: u32, count: u32) {
self.addr = addr;
self.count = count;
}
pub fn init_memory<'a>(&mut self, addr: u32, v: u32) {
let page_index = addr >> PAGE_ADDR_SIZE;
let page_addr = (addr as usize) & PAGE_ADDR_MASK;
let cached_page = match self.page_lookup(page_index) {
None => {
// allocate the page if we have not already
// Golang may mmap relatively large ranges, but we only allocate just in time.
self.alloc_page(page_index)
}
Some(cached_page) => {
// self.invalidate(addr);
cached_page
}
};
let mut cached_page = cached_page.borrow_mut();
cached_page.data[page_addr..page_addr + 4].copy_from_slice(&v.to_le_bytes());
}
pub fn set_memory_range<'a>(
&mut self,
mut addr: u32,
mut r: Box<dyn Read + 'a>,
) -> Result<(), std::io::ErrorKind> {
loop {
if addr as usize >= MAX_MEMORY {
log::warn!("read out of memory: {:x?}", addr);
}
let page_index = addr >> PAGE_ADDR_SIZE;
let page_addr = addr & (PAGE_ADDR_MASK as u32);
let cached_page = self.page_lookup(page_index);
let page = match cached_page {
None => self.alloc_page(page_index),
Some(page) => page,
};
if let std::collections::btree_map::Entry::Vacant(e) = self.rtrace.entry(page_index) {
e.insert(page.borrow().data);
self.set_hash_trace(page_index, 0);
};
self.wtrace[0].insert(page_index, page.clone());
let mut page = page.borrow_mut();
let n = r.read(&mut page.data[(page_addr as usize)..]).unwrap();
if n == 0 {
return Ok(());
}
addr += n as u32;
}
}
fn alloc_hash_page(&mut self, page_index: u32, level: usize) -> Rc<RefCell<CachedPage>> {
let cached_page = Rc::new(RefCell::new(CachedPage::new()));
cached_page.borrow_mut().data[0..PAGE_SIZE].copy_from_slice(&CONST_HASH_PAGES[level]);
self.pages.insert(page_index, cached_page.clone());
cached_page
}
pub fn set_hash_range<'a>(
&mut self,
page_index: u32,
page_hash: [u8; 32],
level: usize,
) -> Result<(), std::io::ErrorKind> {
let hash_addr = (page_index << 5) + MAX_MEMORY as u32;
let page_index = hash_addr >> PAGE_ADDR_SIZE;
let hash_offset = hash_addr as usize & PAGE_ADDR_MASK;
let cached_page: Option<Rc<RefCell<CachedPage>>> = self.page_lookup(page_index);
let page = match cached_page {
None => self.alloc_hash_page(page_index, level),
Some(page) => page,
};
log::trace!("{:X} hash : {:?}", hash_addr, page_hash);
page.borrow_mut().data[hash_offset..hash_offset + 32].copy_from_slice(&page_hash);
if level < 2 {
self.wtrace[level + 1].insert(page_index, page.clone());
}
Ok(())
}
// return image id and page hash root
pub fn update_page_hash(&mut self) {
// MAIN MEMORY 0 .. 0x80000000
for (page_index, cached_page) in self.wtrace[0].clone().iter() {
let _ = self.set_hash_range(*page_index, hash_page(&cached_page.borrow().data), 0);
}
self.wtrace[0].clear();
// L1 HASH PAGES 0x80000000.. 0x81000000
for (page_index, cached_page) in self.wtrace[1].clone().iter() {
let _ = self.set_hash_range(*page_index, hash_page(&cached_page.borrow().data), 1);
}
self.wtrace[1].clear();
// L2 HASH PAGES 0x81000000.. 0x81020000
for (page_index, cached_page) in self.wtrace[2].clone().iter() {
let _ = self.set_hash_range(*page_index, hash_page(&cached_page.borrow().data), 2);
}
self.wtrace[2].clear();
}
pub fn compute_image_id(&mut self, pc: u32, registers: &[u8; 39 * 4]) -> ([u8; 32], [u8; 32]) {
// ROOT PAGES 0x81020000.. 0x81020400
let root_page = 0x81020u32;
let hash = match self.pages.get(&root_page) {
None => {
panic!("compute image ID fail")
}
Some(page) => {
page.borrow_mut().data[REGISTERS_OFFSET..REGISTERS_OFFSET + 39 * 4]
.copy_from_slice(registers);
hash_page(&page.borrow().data)
}
};
let mut final_data = [0u8; 36];
for i in (0..32).step_by(WORD_SIZE) {
let data = u32::from_le_bytes(hash[i..i + WORD_SIZE].try_into().unwrap());
final_data[i..i + WORD_SIZE].copy_from_slice(&data.to_be_bytes());
}
final_data[32..].copy_from_slice(&pc.to_le_bytes());
let image_id_u64s = poseidon(&final_data);
let image_id = image_id_u64s
.iter()
.flat_map(|&num| num.to_le_bytes())
.collect::<Vec<_>>();
log::trace!("page root hash: {:?}", hash);
log::trace!("end pc: {:?}", pc.to_le_bytes());
log::trace!("image id: {:?}", image_id);
(image_id.try_into().unwrap(), hash)
}
pub fn check_image_id(&mut self, pc: u32, image_id: [u8; 32]) {
// MAIN MEMORY 0 .. 0x80000000
for (page_index, cached_page) in self.pages.clone().iter() {
if *page_index == 0x81020u32 {
let root_page = 0x81020u32;
let hash = match self.pages.get(&root_page) {
None => {
panic!("compute image ID fail")
}
Some(page) => hash_page(&page.borrow().data),
};
let mut final_data = [0u8; 36];
final_data[0..4].copy_from_slice(&pc.to_be_bytes());
final_data[4..36].copy_from_slice(&hash);
let real_image_id_u64s = poseidon(&final_data);
let real_image_id = real_image_id_u64s
.iter()
.flat_map(|&num| num.to_le_bytes())
.collect::<Vec<_>>();
let real_image_id: [u8; 32] = real_image_id.try_into().unwrap();
if image_id != real_image_id {
log::error!("image_id not match {:?} {:?}", image_id, real_image_id);
}
} else {
let hash = hash_page(&cached_page.borrow().data);
let hash_addr = (page_index << 5) + MAX_MEMORY as u32;
let mut saved_hash = [0u8; 32];
saved_hash[0..4].copy_from_slice(&self.get_memory(hash_addr).to_be_bytes());
saved_hash[4..8].copy_from_slice(&self.get_memory(hash_addr + 4).to_be_bytes());
saved_hash[8..12].copy_from_slice(&self.get_memory(hash_addr + 8).to_be_bytes());
saved_hash[12..16].copy_from_slice(&self.get_memory(hash_addr + 12).to_be_bytes());
saved_hash[16..20].copy_from_slice(&self.get_memory(hash_addr + 16).to_be_bytes());
saved_hash[20..24].copy_from_slice(&self.get_memory(hash_addr + 20).to_be_bytes());
saved_hash[24..28].copy_from_slice(&self.get_memory(hash_addr + 24).to_be_bytes());
saved_hash[28..32].copy_from_slice(&self.get_memory(hash_addr + 28).to_be_bytes());
if hash != saved_hash {
log::error!(
"{:X} hash not match {:?} {:?}",
page_index,
hash,
saved_hash
);
}
}
}
}
pub fn get_input_image(&mut self) -> BTreeMap<u32, u32> {
let mut image = BTreeMap::<u32, u32>::new();
for (page_index, cached_page) in self.rtrace.iter() {
let addr = page_index << 12;
for i in 0..(PAGE_SIZE / 4) {
let mut bytes = [0u8; 4];
bytes.copy_from_slice(&cached_page[i << 2..(i << 2) + 4]);
image.insert(addr + (i << 2) as u32, u32::from_le_bytes(bytes));
}
}
self.rtrace.clear();
image
}
pub fn get_total_image(&mut self) -> BTreeMap<u32, u32> {
let mut image = BTreeMap::<u32, u32>::new();
for (page_index, cached_page) in self.pages.iter() {
let addr = page_index << 12;
for i in 0..(PAGE_SIZE / 4) {
let mut bytes = [0u8; 4];
bytes.copy_from_slice(&cached_page.borrow().data[i << 2..(i << 2) + 4]);
image.insert(addr + (i << 2) as u32, u32::from_le_bytes(bytes));
}
}
self.rtrace.clear();
image
}
}
impl Read for Memory {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
if self.count == 0 {
return Ok(0usize);
}
let end_addr = self.addr + self.count;
let page_index = self.addr >> PAGE_ADDR_SIZE;
// todo: fix bug, read too much
let (start, mut end) = (self.addr & (PAGE_ADDR_MASK as u32), PAGE_SIZE as u32);
if page_index == (end_addr >> PAGE_ADDR_SIZE) {
end = end_addr & (PAGE_ADDR_MASK as u32);
}
let cached_page: Option<Rc<RefCell<CachedPage>>> = self.page_lookup(page_index);
let n = match cached_page {
None => {
let size = buf.len().min((end - start) as usize);
for (_, element) in buf.iter_mut().enumerate().take(size) {
*element = 0;
}
size
}
Some(cached_page) => {
let page = cached_page.borrow_mut();
let size = buf.len().min((end - start) as usize);
buf[0..size].copy_from_slice(&page.data[(start as usize)..(start as usize + size)]);
size
}
};
self.addr += n as u32;
self.count -= n as u32;
Ok(n)
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/state.rs | emulator/src/state.rs | use crate::memory::{Memory, INIT_SP, POSEIDON_RATE_BYTES};
use crate::page::{PAGE_ADDR_MASK, PAGE_SIZE};
use elf::abi::{PT_LOAD, PT_TLS};
use elf::endian::AnyEndian;
use log::{trace, warn};
use serde::de::DeserializeOwned;
use serde::{Deserialize, Serialize};
use std::collections::BTreeMap;
use std::fmt::{Display, Formatter};
use std::fs;
use std::fs::File;
use std::io::BufReader;
use std::io::{stderr, stdout, Read, Write};
use std::path::Path;
pub const FD_STDIN: u32 = 0;
pub const FD_STDOUT: u32 = 1;
pub const FD_STDERR: u32 = 2;
pub const FD_PUBLIC_VALUES: u32 = 3;
pub const FD_HINT: u32 = 4;
pub const MIPS_EBADF: u32 = 9;
pub const REGISTERS_START: u32 = 0x81020400u32;
pub const PAGE_LOAD_CYCLES: u64 = 128;
pub const PAGE_HASH_CYCLES: u64 = 1;
pub const PAGE_CYCLES: u64 = PAGE_LOAD_CYCLES + PAGE_HASH_CYCLES;
pub const IMAGE_ID_CYCLES: u64 = 3;
pub const MAX_INSTRUCTION_CYCLES: u64 = PAGE_CYCLES * 6; //TOFIX
pub const RESERVE_CYCLES: u64 = IMAGE_ID_CYCLES + MAX_INSTRUCTION_CYCLES;
use keccak_hash::keccak;
// image_id = keccak(page_hash_root || end_pc)
#[derive(Serialize, Deserialize, Debug, PartialEq, Eq, Clone, Default)]
pub struct Segment {
pub mem_image: BTreeMap<u32, u32>,
pub pc: u32,
pub segment_id: u32,
pub pre_image_id: [u8; 32],
pub pre_hash_root: [u8; 32],
pub image_id: [u8; 32],
pub page_hash_root: [u8; 32],
pub end_pc: u32,
pub step: u64,
pub input_stream: Vec<Vec<u8>>,
pub input_stream_ptr: usize,
pub public_values_stream: Vec<u8>,
pub public_values_stream_ptr: usize,
}
pub struct State {
pub memory: Box<Memory>,
/// the 32 general purpose registers of MIPS.
pub registers: [u32; 32],
/// the pc register stores the current execution instruction address.
pub pc: u32,
/// the next pc stores the next execution instruction address.
next_pc: u32,
/// the hi register stores the multiplier/divider result high(remainder) part.
hi: u32,
/// the low register stores the multiplier/divider result low(quotient) part.
lo: u32,
/// heap handles the mmap syscall.
heap: u32,
/// brk handles the brk syscall
brk: u32,
/// tlb addr
local_user: u32,
/// step tracks the total step has been executed.
pub step: u64,
pub total_step: u64,
/// cycle tracks the total cycle has been executed.
pub cycle: u64,
pub total_cycle: u64,
/// A stream of input values (global to the entire program).
pub input_stream: Vec<Vec<u8>>,
/// A ptr to the current position in the input stream incremented by HINT_READ opcode.
pub input_stream_ptr: usize,
/// A stream of public values from the program (global to entire program).
pub public_values_stream: Vec<u8>,
/// A ptr to the current position in the public values stream, incremented when reading from public_values_stream.
pub public_values_stream_ptr: usize,
pub exited: bool,
pub exit_code: u8,
dump_info: bool,
}
impl Read for State {
fn read(&mut self, buf: &mut [u8]) -> std::io::Result<usize> {
self.read_public_values_slice(buf);
Ok(buf.len())
}
}
impl Display for State {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(
f,
"State {{ \n pc: 0x{:x}, next_pc: 0x{:x}, hi: {}, lo: {}, heap: 0x{:x}, step: {}, exited: {}, \
\n registers: {:?} \
\n memory: {} \n}}",
self.pc, self.next_pc, self.hi, self.lo, self.heap, self.step, self.exited, self.registers, self.memory.usage()
)
}
}
impl State {
pub fn new() -> Box<Self> {
Box::new(Self {
memory: Box::new(Memory::new()),
registers: Default::default(),
pc: 0,
next_pc: 4,
hi: 0,
lo: 0,
heap: 0,
local_user: 0,
step: 0,
total_step: 0,
cycle: 0,
total_cycle: 0,
brk: 0,
input_stream: Vec::new(),
input_stream_ptr: 0,
public_values_stream: Vec::new(),
public_values_stream_ptr: 0,
exited: false,
exit_code: 0,
dump_info: false,
})
}
pub fn load_seg(seg_path: &str) -> (Box<Self>, u64) {
let reader = BufReader::new(File::open(seg_path).unwrap());
let segment: Segment = serde_json::from_reader(reader).unwrap();
let mut s = Box::new(Self {
memory: Box::new(Memory::new()),
registers: Default::default(),
pc: segment.pc,
next_pc: 0,
hi: 0,
lo: 0,
heap: 0,
local_user: 0,
step: 0,
total_step: 0,
cycle: 0,
total_cycle: 0,
brk: 0,
input_stream: segment.input_stream,
input_stream_ptr: segment.input_stream_ptr,
public_values_stream: segment.public_values_stream,
public_values_stream_ptr: segment.public_values_stream_ptr,
exited: false,
exit_code: 0,
dump_info: false,
});
let image = segment.mem_image;
for i in 0..32 {
let data = image.get(&(REGISTERS_START + (i << 2) as u32)).unwrap();
s.registers[i] = data.to_be();
}
s.lo = image
.get(&(REGISTERS_START + (32 << 2) as u32))
.unwrap()
.to_be();
s.hi = image
.get(&(REGISTERS_START + (33 << 2) as u32))
.unwrap()
.to_be();
s.heap = image
.get(&(REGISTERS_START + (34 << 2) as u32))
.unwrap()
.to_be();
s.next_pc = image
.get(&(REGISTERS_START + (36 << 2) as u32))
.unwrap()
.to_be();
s.brk = image
.get(&(REGISTERS_START + (37 << 2) as u32))
.unwrap()
.to_be();
s.local_user = image
.get(&(REGISTERS_START + (38 << 2) as u32))
.unwrap()
.to_be();
for (addr, data) in image {
s.memory.init_memory(addr, data);
}
(s, segment.step)
}
pub fn load_elf(f: &elf::ElfBytes<AnyEndian>) -> Box<Self> {
let mut s = Box::new(Self {
memory: Box::new(Memory::new()),
registers: Default::default(),
pc: f.ehdr.e_entry as u32,
next_pc: f.ehdr.e_entry as u32 + 4,
hi: 0,
lo: 0,
heap: 0x20000000,
local_user: 0,
step: 0,
total_step: 0,
cycle: 0,
total_cycle: 0,
brk: 0,
input_stream: Vec::new(),
input_stream_ptr: 0,
public_values_stream: Vec::new(),
public_values_stream_ptr: 0,
exited: false,
exit_code: 0,
dump_info: false,
});
let mut hiaddr = 0u32;
let segments = f
.segments()
.expect("invalid ELF cause failed to parse segments.");
for segment in segments {
if segment.p_type == 0x70000003 {
continue;
}
let r = f
.segment_data(&segment)
.expect("failed to parse segment data");
let mut r = Vec::from(r);
if segment.p_filesz != segment.p_memsz {
if segment.p_type == PT_LOAD || segment.p_type == PT_TLS {
if segment.p_filesz < segment.p_memsz {
let diff = (segment.p_memsz - segment.p_filesz) as usize;
r.extend_from_slice(vec![0u8; diff].as_slice());
} else {
panic!(
"invalid PT_LOAD program segment, file size ({}) > mem size ({})",
segment.p_filesz, segment.p_memsz
);
}
} else {
panic!("has different file size ({}) than mem size ({}): filling for non PT_LOAD segments is not supported",
segment.p_filesz, segment.p_memsz);
}
}
if segment.p_vaddr + segment.p_memsz >= 1u64 << 32 {
panic!(
"program %d out of 32-bit mem range: {:x} -{:x} (size: {:x})",
segment.p_vaddr, segment.p_memsz, segment.p_memsz
);
}
let a = (segment.p_vaddr + segment.p_memsz) as u32;
if a > hiaddr {
hiaddr = a;
}
let r: Box<&[u8]> = Box::new(r.as_slice());
s.memory
.set_memory_range(segment.p_vaddr as u32, r)
.expect("failed to set memory range");
}
s.brk = hiaddr - (hiaddr & (PAGE_ADDR_MASK as u32)) + PAGE_SIZE as u32;
s
}
pub fn patch_elf(&mut self, f: &elf::ElfBytes<AnyEndian>) {
let symbols = f
.symbol_table()
.expect("failed to read symbols table, cannot patch program")
.expect("failed to parse symbols table, cannot patch program");
for symbol in symbols.0 {
match symbols.1.get(symbol.st_name as usize) {
Ok(name) => match name {
"runtime.gcenable"
| "runtime.init.5"
| "runtime.main.func1"
| "runtime.deductSweepCredit"
| "runtime.(*gcControllerState).commit"
| "github.com/prometheus/client_golang/prometheus.init"
| "github.com/prometheus/client_golang/prometheus.init.0"
| "github.com/prometheus/procfs.init"
| "github.com/prometheus/common/model.init"
| "github.com/prometheus/client_model/go.init"
| "github.com/prometheus/client_model/go.init.0"
| "github.com/prometheus/client_model/go.init.1"
| "flag.init"
| "runtime.check"
| "runtime.checkfds"
| "_dl_discover_osversion" => {
log::debug!("patch {} at {:X}", name, symbol.st_value);
let r: Vec<u8> = vec![0x03, 0xe0, 0x00, 0x08, 0, 0, 0, 0];
let r = Box::new(r.as_slice());
self.memory
.set_memory_range(symbol.st_value as u32, r)
.expect("set memory range failed");
}
"runtime.MemProfileRate" => {
let r: Vec<u8> = vec![0, 0, 0, 0];
let r = Box::new(r.as_slice());
self.memory
.set_memory_range(symbol.st_value as u32, r)
.expect("set memory range failed");
}
_ => {
if name.contains("sys_common") && name.contains("thread_info") {
log::debug!("patch {}", name);
let r: Vec<u8> = vec![0x03, 0xe0, 0x00, 0x08, 0, 0, 0, 0];
let r = Box::new(r.as_slice());
self.memory
.set_memory_range(symbol.st_value as u32, r)
.expect("set memory range failed");
}
}
},
Err(e) => {
warn!("parse symbol failed, {}", e);
continue;
}
}
}
}
/// We define the input[0] as the public input, and input[1] as the private input
pub fn patch_stack(&mut self, input: Vec<&str>) {
assert!(input.len() <= 2);
// TODO: check the arg size should less than one page??
// setup stack pointer
let sp: u32 = INIT_SP;
// allocate 1 page for the initial stack data, and 16kb = 4 pages for the stack to grow
let r: Vec<u8> = vec![0; 5 * PAGE_SIZE];
let r: Box<&[u8]> = Box::new(r.as_slice());
let addr = sp - 4 * PAGE_SIZE as u32;
self.memory
.set_memory_range(addr, r)
.expect("failed to set memory range");
self.registers[29] = sp;
let mut store_mem = |addr: u32, v: u32| {
let mut dat = [0u8; 4];
dat.copy_from_slice(&v.to_be_bytes());
let r = Box::new(dat.as_slice());
self.memory
.set_memory_range(addr, r)
.expect("failed to set memory range");
};
let mut items: BTreeMap<u32, &str> = BTreeMap::new();
let mut index = 0;
for item in input {
items.insert(index, item);
index += 1u32;
}
log::debug!("count {} items {:?}", index, items);
// init argc, argv, aux on stack
store_mem(sp, index);
let mut cur_sp = sp + 4 * (index + 1);
store_mem(cur_sp, 0x00); // argv[n] = 0 (terminating argv)
cur_sp += 4;
store_mem(cur_sp, 0x00); // envp[term] = 0 (no env vars)
cur_sp += 4;
store_mem(cur_sp, 0x06); // auxv[0] = _AT_PAGESZ = 6 (key)
store_mem(cur_sp + 4, 0x1000); // auxv[1] = page size of 4 KiB (value)
cur_sp += 8;
store_mem(cur_sp, 0x0b); // auxv[0] = AT_UID = 11 (key)
store_mem(cur_sp + 4, 0x3e8); // auxv[1] = Real uid (value)
cur_sp += 8;
store_mem(cur_sp, 0x0c); // auxv[0] = AT_EUID = 12 (key)
store_mem(cur_sp + 4, 0x3e8); // auxv[1] = Effective uid (value)
cur_sp += 8;
store_mem(cur_sp, 0x0d); // auxv[0] = AT_GID = 13 (key)
store_mem(cur_sp + 4, 0x3e8); // auxv[1] = Real gid (value)
cur_sp += 8;
store_mem(cur_sp, 0x0e); // auxv[0] = AT_EGID = 14 (key)
store_mem(cur_sp + 4, 0x3e8); // auxv[1] = Effective gid (value)
cur_sp += 8;
store_mem(cur_sp, 0x10); // auxv[0] = AT_HWCAP = 16 (key)
store_mem(cur_sp + 4, 0x00); // auxv[1] = arch dependent hints at CPU capabilities (value)
cur_sp += 8;
store_mem(cur_sp, 0x11); // auxv[0] = AT_CLKTCK = 17 (key)
store_mem(cur_sp + 4, 0x64); // auxv[1] = Frequency of times() (value)
cur_sp += 8;
store_mem(cur_sp, 0x17); // auxv[0] = AT_SECURE = 23 (key)
store_mem(cur_sp + 4, 0x00); // auxv[1] = secure mode boolean (value)
cur_sp += 8;
store_mem(cur_sp, 0x19); // auxv[4] = AT_RANDOM = 25 (key)
store_mem(cur_sp + 4, cur_sp + 12); // auxv[5] = address of 16 bytes containing random value
cur_sp += 8;
store_mem(cur_sp, 0); // auxv[term] = 0
cur_sp += 4;
store_mem(cur_sp, 0x5f28df1d); // auxv[term] = 0
store_mem(cur_sp + 4, 0x2cd1002a); // auxv[term] = 0
store_mem(cur_sp + 8, 0x5ff9f682); // auxv[term] = 0
store_mem(cur_sp + 12, 0xd4d8d538); // auxv[term] = 0
cur_sp += 16;
store_mem(cur_sp, 0x00); // auxv[term] = 0
cur_sp += 4;
let mut store_mem_str = |paddr: u32, daddr: u32, str: &str| {
let mut dat = [0u8; 4];
dat.copy_from_slice(&daddr.to_be_bytes());
let r = Box::new(dat.as_slice());
self.memory
.set_memory_range(paddr, r)
.expect("failed to set memory range");
let r = Box::new(str.as_bytes());
log::debug!("Write inputs: {} {:?}", daddr, r);
self.memory
.set_memory_range(daddr, r)
.expect("failed to set memory range");
};
for (ind, inp) in items.iter() {
let index = *ind;
store_mem_str(sp + 4 * (index + 1), cur_sp, inp);
cur_sp += inp.len() as u32 + 1;
}
}
pub fn add_input_stream<T: Serialize>(&mut self, input: &T) {
let mut buf = Vec::new();
bincode::serialize_into(&mut buf, input).expect("serialization failed");
self.input_stream.push(buf);
}
pub fn read_public_values<T: DeserializeOwned>(&mut self) -> T {
let result = bincode::deserialize_from::<_, T>(self);
result.unwrap()
}
pub fn read_public_values_slice(&mut self, buf: &mut [u8]) {
let len = buf.len();
let start = self.public_values_stream_ptr;
let end = start + len;
assert!(end <= self.public_values_stream.len());
buf.copy_from_slice(&self.public_values_stream[start..end]);
self.public_values_stream_ptr = end;
}
pub fn load_preimage(&mut self, blockpath: String) {
let mut hash_bytes = [0u8; 32];
for i in 0..8 {
hash_bytes[i * 4..i * 4 + 4].copy_from_slice(
self.memory
.get_memory((0x30001000 + i * 4) as u32)
.to_be_bytes()
.as_ref(),
)
}
let hex_string = hex::encode(hash_bytes);
let mut preiamge_path = blockpath.clone();
preiamge_path.push_str("0x");
preiamge_path.push_str(hex_string.as_str());
let data = fs::read(preiamge_path).expect("could not read file");
let data: Box<&[u8]> = Box::new(data.as_slice());
log::debug!("load preimage {}", data.len());
let data_len = data.len();
self.memory.set_memory(0x31000000, data_len as u32);
self.memory
.set_memory_range(0x31000004, data)
.expect("set memory range failed");
self.cycle += (data_len as u64 + 35) / 32;
let len = data_len & 3;
let end = data_len % POSEIDON_RATE_BYTES;
if len != 0 {
let mut bytes = [0u8; 4];
let final_addr = 0x31000004 + data_len - len;
let word = self.memory.get_memory(final_addr as u32);
bytes[0..len].copy_from_slice(&word.to_be_bytes()[0..len]);
bytes[len] = 1;
if end + 4 > POSEIDON_RATE_BYTES {
bytes[3] |= 0b10000000;
}
self.memory
.set_memory(final_addr as u32, u32::from_be_bytes(bytes));
}
}
pub fn load_input(&mut self, blockpath: &str) {
let input_path = Path::new(blockpath).join("input");
log::trace!("load input: {:?}", input_path);
let data = fs::read(input_path).expect("could not read file");
let data: Box<&[u8]> = Box::new(data.as_slice());
self.memory
.set_memory_range(0x30000000, data)
.expect("set memory range failed");
}
pub fn get_registers_bytes(&mut self) -> [u8; 39 * 4] {
let mut regs_bytes_be = [0u8; 39 * 4];
for i in 0..32 {
regs_bytes_be[i * 4..i * 4 + 4].copy_from_slice(&self.registers[i].to_be_bytes());
}
regs_bytes_be[32 * 4..32 * 4 + 4].copy_from_slice(&self.lo.to_be_bytes());
regs_bytes_be[33 * 4..33 * 4 + 4].copy_from_slice(&self.hi.to_be_bytes());
regs_bytes_be[34 * 4..34 * 4 + 4].copy_from_slice(&self.heap.to_be_bytes());
regs_bytes_be[35 * 4..35 * 4 + 4].copy_from_slice(&self.pc.to_be_bytes());
regs_bytes_be[36 * 4..36 * 4 + 4].copy_from_slice(&self.next_pc.to_be_bytes());
regs_bytes_be[37 * 4..37 * 4 + 4].copy_from_slice(&self.brk.to_be_bytes());
regs_bytes_be[38 * 4..38 * 4 + 4].copy_from_slice(&self.local_user.to_be_bytes());
regs_bytes_be
}
}
pub struct InstrumentedState {
/// state stores the state of the MIPS emulator
pub state: Box<State>,
/// writer for stdout
stdout_writer: Box<dyn Write>,
/// writer for stderr
stderr_writer: Box<dyn Write>,
pub pre_segment_id: u32,
pre_pc: u32,
pre_image_id: [u8; 32],
pre_hash_root: [u8; 32],
block_path: String,
pre_input: Vec<Vec<u8>>,
pre_input_ptr: usize,
pre_public_values: Vec<u8>,
pre_public_values_ptr: usize,
}
impl Display for InstrumentedState {
fn fmt(&self, f: &mut Formatter<'_>) -> std::fmt::Result {
write!(f, "state: {}", self.state,)
}
}
pub const SHA_COMPRESS_K: [u32; 64] = [
0x428a2f98, 0x71374491, 0xb5c0fbcf, 0xe9b5dba5, 0x3956c25b, 0x59f111f1, 0x923f82a4, 0xab1c5ed5,
0xd807aa98, 0x12835b01, 0x243185be, 0x550c7dc3, 0x72be5d74, 0x80deb1fe, 0x9bdc06a7, 0xc19bf174,
0xe49b69c1, 0xefbe4786, 0x0fc19dc6, 0x240ca1cc, 0x2de92c6f, 0x4a7484aa, 0x5cb0a9dc, 0x76f988da,
0x983e5152, 0xa831c66d, 0xb00327c8, 0xbf597fc7, 0xc6e00bf3, 0xd5a79147, 0x06ca6351, 0x14292967,
0x27b70a85, 0x2e1b2138, 0x4d2c6dfc, 0x53380d13, 0x650a7354, 0x766a0abb, 0x81c2c92e, 0x92722c85,
0xa2bfe8a1, 0xa81a664b, 0xc24b8b70, 0xc76c51a3, 0xd192e819, 0xd6990624, 0xf40e3585, 0x106aa070,
0x19a4c116, 0x1e376c08, 0x2748774c, 0x34b0bcb5, 0x391c0cb3, 0x4ed8aa4a, 0x5b9cca4f, 0x682e6ff3,
0x748f82ee, 0x78a5636f, 0x84c87814, 0x8cc70208, 0x90befffa, 0xa4506ceb, 0xbef9a3f7, 0xc67178f2,
];
impl InstrumentedState {
pub fn new(state: Box<State>, block_path: String) -> Box<Self> {
Box::new(Self {
state,
stdout_writer: Box::new(stdout()),
stderr_writer: Box::new(stderr()),
block_path,
pre_pc: 0u32,
pre_image_id: [0u8; 32],
pre_hash_root: [0u8; 32],
pre_segment_id: 0u32,
pre_input: Vec::new(),
pre_input_ptr: 0,
pre_public_values: Vec::new(),
pre_public_values_ptr: 0,
})
}
fn handle_syscall(&mut self) {
let syscall_num = self.state.registers[2]; // v0
let mut v0 = 0u32;
let mut v1 = 0u32;
let a0 = self.state.registers[4];
let a1 = self.state.registers[5];
let a2 = self.state.registers[6];
self.state.dump_info = true;
log::debug!("syscall {} {} {} {}", syscall_num, a0, a1, a2);
match syscall_num {
0x300105 => {
// SHA_EXTEND
let w_ptr = a0;
assert!(a1 == 0, "arg2 must be 0");
for i in 16..64 {
// Read w[i-15].
let w_i_minus_15 = self.state.memory.get_memory(w_ptr + (i - 15) * 4);
// Compute `s0`.
let s0 = w_i_minus_15.rotate_right(7)
^ w_i_minus_15.rotate_right(18)
^ (w_i_minus_15 >> 3);
// Read w[i-2].
let w_i_minus_2 = self.state.memory.get_memory(w_ptr + (i - 2) * 4);
// Compute `s1`.
let s1 = w_i_minus_2.rotate_right(17)
^ w_i_minus_2.rotate_right(19)
^ (w_i_minus_2 >> 10);
// Read w[i-16].
let w_i_minus_16 = self.state.memory.get_memory(w_ptr + (i - 16) * 4);
// Read w[i-7].
let w_i_minus_7 = self.state.memory.get_memory(w_ptr + (i - 7) * 4);
// Compute `w_i`.
let w_i = s1
.wrapping_add(w_i_minus_16)
.wrapping_add(s0)
.wrapping_add(w_i_minus_7);
// Write w[i].
log::debug!(
"{:X}, {:X}, {:X} {:X} {:X} {:X}",
s1,
s0,
w_i_minus_16,
w_i_minus_7,
w_i_minus_15,
w_i_minus_2
);
self.state.memory.set_memory(w_ptr + i * 4, w_i);
log::debug!("extend write {:X} {:X}", w_ptr + i * 4, w_i);
}
}
0x010106 => {
// SHA_COMPRESS
let w_ptr = a0;
let h_ptr = a1;
let mut hx = [0u32; 8];
for (i, hx_item) in hx.iter_mut().enumerate() {
*hx_item = self.state.memory.get_memory(h_ptr + i as u32 * 4);
}
let mut original_w = Vec::new();
// Execute the "compress" phase.
let mut a = hx[0];
let mut b = hx[1];
let mut c = hx[2];
let mut d = hx[3];
let mut e = hx[4];
let mut f = hx[5];
let mut g = hx[6];
let mut h = hx[7];
for i in 0..64 {
let s1 = e.rotate_right(6) ^ e.rotate_right(11) ^ e.rotate_right(25);
let ch = (e & f) ^ (!e & g);
let w_i = self.state.memory.get_memory(w_ptr + i * 4);
original_w.push(w_i);
let temp1 = h
.wrapping_add(s1)
.wrapping_add(ch)
.wrapping_add(SHA_COMPRESS_K[i as usize])
.wrapping_add(w_i);
let s0 = a.rotate_right(2) ^ a.rotate_right(13) ^ a.rotate_right(22);
let maj = (a & b) ^ (a & c) ^ (b & c);
let temp2 = s0.wrapping_add(maj);
h = g;
g = f;
f = e;
e = d.wrapping_add(temp1);
d = c;
c = b;
b = a;
a = temp1.wrapping_add(temp2);
}
// Execute the "finalize" phase.
let v = [a, b, c, d, e, f, g, h];
for i in 0..8 {
self.state
.memory
.set_memory(h_ptr + i as u32 * 4, hx[i].wrapping_add(v[i]));
log::debug!(
"write {:X} {:X}",
h_ptr + i as u32 * 4,
hx[i].wrapping_add(v[i])
);
}
}
0x010109 => {
//keccak
assert!((a0 & 3) == 0);
assert!((a2 & 3) == 0);
let bytes = (0..a1)
.map(|i| self.state.memory.byte(a0 + i))
.collect::<Vec<u8>>();
log::debug!("keccak {:X?}", bytes);
let result = keccak(&bytes).0;
log::debug!("result {:X?}", result);
let result: [u32; 8] = core::array::from_fn(|i| {
u32::from_be_bytes(core::array::from_fn(|j| result[i * 4 + j]))
});
assert!(result.len() == 8);
for (i, data) in result.iter().enumerate() {
self.state.memory.set_memory(a2 + ((i << 2) as u32), *data);
}
}
0xF0 => {
if self.state.input_stream_ptr >= self.state.input_stream.len() {
panic!("not enough vecs in hint input stream");
}
log::debug!(
"hint len {:X}",
self.state.input_stream[self.state.input_stream_ptr].len()
);
v0 = self.state.input_stream[self.state.input_stream_ptr].len() as u32
}
0xF1 => {
log::debug!("{:X} {:X} {:X}", a0, a1, a2);
if self.state.input_stream_ptr >= self.state.input_stream.len() {
warn!("not enough vecs in hint input stream");
}
let vec: &Vec<u8> = &self.state.input_stream[self.state.input_stream_ptr];
self.state.input_stream_ptr += 1;
assert_eq!(
vec.len() as u32,
a1,
"hint input stream read length mismatch"
);
log::debug!("input: {:?}", vec);
assert_eq!(a0 % 4, 0, "hint read address not aligned to 4 bytes");
if a1 >= 1 {
self.state.cycle += (a1 as u64).div_ceil(32);
}
for i in (0..a1).step_by(4) {
// Get each byte in the chunk
let b1 = vec[i as usize];
// In case the vec is not a multiple of 4, right-pad with 0s. This is fine because we
// are assuming the word is uninitialized, so filling it with 0s makes sense.
let b2 = vec.get(i as usize + 1).copied().unwrap_or(0);
let b3 = vec.get(i as usize + 2).copied().unwrap_or(0);
let b4 = vec.get(i as usize + 3).copied().unwrap_or(0);
let word = u32::from_be_bytes([b1, b2, b3, b4]);
// Save the data into runtime state so the runtime will use the desired data instead of
// 0 when first reading/writing from this address.
self.state.memory.set_memory(a0 + i, word);
}
v0 = a2
}
4020 => {
// read preimage (getpid)
self.state.load_preimage(self.block_path.clone())
}
4210 | 4090 => {
// mmap
// args: a0 = heap/hint, indicates mmap heap or hint. a1 = size
let mut size = a1;
if size & (PAGE_ADDR_MASK as u32) != 0 {
// adjust size to align with page size
size += PAGE_SIZE as u32 - (size & (PAGE_ADDR_MASK as u32));
}
if a0 == 0 {
v0 = self.state.heap;
self.state.heap += size;
trace!("mmap heap {:x?} size {:x?}", v0, size);
} else {
v0 = a0;
trace!("mmap hint {:x?} size {:x?}", v0, size);
}
}
4045 => {
// brk
if a0 > self.state.brk {
v0 = a0;
} else {
v0 = self.state.brk;
}
}
4120 => {
// clone
v0 = 1;
}
4246 => {
// exit group
self.state.exited = true;
self.state.exit_code = a0 as u8;
}
4003 => {
// read
// args: a0 = fd, a1 = addr, a2 = count
// returns: v0 = read, v1 = err code
match a0 {
FD_STDIN => {
// leave v0 and v1 zero: read nothing, no error
}
_ => {
v0 = 0xffffffff;
v1 = MIPS_EBADF;
}
}
}
4004 => {
// write
// args: a0 = fd, a1 = addr, a2 = count
// returns: v0 = written, v1 = err code
let bytes = (0..a2)
.map(|i| self.state.memory.byte(a1 + i))
.collect::<Vec<u8>>();
let slice = bytes.as_slice();
match a0 {
// todo: track memory read
FD_STDOUT => {
self.state.memory.read_memory_range(a1, a2);
if let Err(e) =
std::io::copy(self.state.memory.as_mut(), self.stdout_writer.as_mut())
{
panic!("read range from memory failed {}", e);
}
v0 = a2;
}
FD_STDERR => {
self.state.memory.read_memory_range(a1, a2);
if let Err(e) =
std::io::copy(self.state.memory.as_mut(), self.stderr_writer.as_mut())
{
panic!("read range from memory failed {}", e);
}
v0 = a2;
}
FD_PUBLIC_VALUES => {
log::debug!("commit {:X?}", slice);
self.state.public_values_stream.extend_from_slice(slice);
v0 = a2;
}
FD_HINT => {
self.state.input_stream.push(slice.to_vec());
v0 = a2;
}
_ => {
v0 = 0xffffffff;
v1 = MIPS_EBADF;
}
}
}
4055 => {
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | true |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/opcode_id.rs | emulator/src/opcode_id.rs | pub enum OpcodeId {
// Arithmetic Logic Unit
ADD,
ADDU,
SUB,
SUBU,
ADDI,
ADDIU,
AND,
ANDI,
XOR,
XORI,
OR,
ORI,
NOR,
LUI,
SLT,
SLTI,
SLTIU,
SLTU,
// Shifter
SLL,
SLLV,
SRA,
SRAV,
SRL,
SRLV,
// Multiply
MULT,
MULTU,
DIV,
DIVU,
MFHI,
MFLO,
MTHI,
MTLO,
// Branch
BEQ,
BGEZ,
BGEZAL,
BGTZ,
BLEZ,
BLTZ,
BLTZAL,
BNE,
J,
JAL,
JALR,
JR,
SYSCALL,
// Memory Access
LB,
LBU,
LH,
LHU,
LW,
SB,
SH,
SW,
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/emulator/src/utils.rs | emulator/src/utils.rs | use crate::state::{InstrumentedState, State};
use elf::{endian::AnyEndian, ElfBytes};
use std::fs;
use std::fs::File;
pub const SEGMENT_STEPS: usize = 65536;
/// From the minigeth's rule, the `block` starts with `0_`
pub fn get_block_path(basedir: &str, block: &str, file: &str) -> String {
format!("{basedir}/0_{block}/{file}")
}
pub fn load_elf_with_patch(elf_path: &str, args: Vec<&str>) -> Box<State> {
let data = fs::read(elf_path).expect("could not read file");
let file =
ElfBytes::<AnyEndian>::minimal_parse(data.as_slice()).expect("opening elf file failed");
let mut state = State::load_elf(&file);
state.patch_elf(&file);
state.patch_stack(args);
state
}
pub fn split_prog_into_segs(
state: Box<State>,
seg_path: &str,
block_path: &str,
seg_size: usize,
) -> (usize, usize, Box<State>) {
let mut instrumented_state = InstrumentedState::new(state, block_path.to_string());
std::fs::create_dir_all(seg_path).unwrap();
let new_writer = |_: &str| -> Option<std::fs::File> { None };
instrumented_state.split_segment(false, seg_path, new_writer);
let new_writer = |name: &str| -> Option<std::fs::File> { File::create(name).ok() };
loop {
let cycles = instrumented_state.step();
if instrumented_state.state.exited {
break;
}
if cycles > (seg_size as isize - 1) as u64 {
instrumented_state.split_segment(true, seg_path, new_writer);
}
}
instrumented_state.split_segment(true, seg_path, new_writer);
log::info!(
"Split done {} : {}",
instrumented_state.state.total_step,
instrumented_state.state.total_cycle
);
instrumented_state.dump_memory();
(
instrumented_state.state.total_step as usize,
instrumented_state.pre_segment_id as usize,
instrumented_state.state,
)
}
pub fn load_segment(seg_file: &str) -> (Box<State>, u64) {
State::load_seg(seg_file)
}
pub fn split_seg_into_segs(
seg_file: &str,
seg_path: &str,
block_path: &str,
seg_size: usize,
) -> (usize, usize, Box<State>) {
let (state, final_step) = load_segment(seg_file);
let mut instrumented_state = InstrumentedState::new(state, block_path.to_string());
log::debug!("start pc: {:X} {}", instrumented_state.state.pc, final_step);
std::fs::create_dir_all(seg_path).unwrap();
let new_writer = |_: &str| -> Option<std::fs::File> { None };
instrumented_state.split_segment(false, seg_path, new_writer);
let new_writer = |name: &str| -> Option<std::fs::File> { File::create(name).ok() };
loop {
let cycles = instrumented_state.step();
if instrumented_state.state.total_step + instrumented_state.state.step == final_step {
break;
}
if cycles > (seg_size as isize - 1) as u64 {
instrumented_state.split_segment(true, seg_path, new_writer);
log::debug!(
"Split at {} : {} into {}",
instrumented_state.state.total_step,
instrumented_state.state.total_cycle,
instrumented_state.pre_segment_id
);
}
}
instrumented_state.split_segment(true, seg_path, new_writer);
log::info!(
"Split done {} : {} into {}",
instrumented_state.state.total_step,
instrumented_state.state.total_cycle,
instrumented_state.pre_segment_id
);
instrumented_state.dump_memory();
(
instrumented_state.state.total_step as usize,
instrumented_state.pre_segment_id as usize,
instrumented_state.state,
)
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/recursion/build.rs | recursion/build.rs | use std::path::PathBuf;
use std::process::Command;
fn main() -> Result<(), Box<dyn std::error::Error>> {
println!("cargo:rerun-if-changed=go");
let out_dir = std::env::var("OUT_DIR").unwrap();
let dest_path = PathBuf::from(&out_dir);
let lib_name = "zkmgnark";
let dest = dest_path.join(format!("lib{}.a", lib_name));
let status = Command::new("go")
.current_dir("src/snark/libsnark")
.env("CGO_ENABLED", "1")
.args([
"build",
"-tags=debug",
"-o",
dest.to_str().unwrap(),
"-buildmode=c-archive",
".",
])
.status()
.expect("Failed to build Go library");
if !status.success() {
panic!("Go build failed");
}
// Link the Go library
println!("cargo:rustc-link-search=native={}", dest_path.display());
println!("cargo:rustc-link-lib=static={}", lib_name);
Ok(())
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/recursion/src/lib.rs | recursion/src/lib.rs | mod snark;
pub use snark::*;
use std::env;
use plonky2::field::goldilocks_field::GoldilocksField;
use plonky2::plonk::config::PoseidonGoldilocksConfig;
use plonky2::util::timing::TimingTree;
use plonky2x::backend::circuit::Groth16WrapperParameters;
use plonky2x::backend::wrapper::wrap::WrappedCircuit;
use plonky2x::frontend::builder::CircuitBuilder as WrapperBuilder;
use plonky2x::prelude::DefaultParameters;
use std::marker::PhantomData;
use std::ops::Range;
use std::time::Duration;
use plonky2::plonk::circuit_data::CircuitData;
use plonky2::util::serialization::{DefaultGateSerializer, DefaultGeneratorSerializer};
use zkm_prover::all_stark::AllStark;
use zkm_prover::config::StarkConfig;
use zkm_prover::fixed_recursive_verifier::AllRecursiveCircuits;
use zkm_prover::generation::state::Receipt;
type F = GoldilocksField;
const D: usize = 2;
type C = PoseidonGoldilocksConfig;
type InnerParameters = DefaultParameters;
type OuterParameters = Groth16WrapperParameters;
/// This can be used for all external host program, like zkm-project-template and zkm-proof-network etc.
pub const DEFAULT_DEGREE_BITS_RANGE: [Range<usize>; 12] = [
10..21,
12..22,
11..21,
8..21,
6..10,
6..10,
6..16,
6..16,
6..16,
6..16,
6..21,
13..23,
];
pub const RANGE_TABLES: [&str; 12] = [
"ARITHMETIC",
"CPU",
"POSEIDON",
"POSEIDON_SPONGE",
"KECCAK",
"KECCAK_SPONGE",
"SHA_EXTEND",
"SHA_EXTEND_SPONGE",
"SHA_COMPRESS",
"SHA_COMPRESS_SPONGE",
"LOGIC",
"MEMORY",
];
const PUBLIC_INPUT_PATH: &str = "public_values.json";
const BLOCK_PUBLIC_INPUTS_PATH: &str = "block_public_inputs.json";
pub fn create_recursive_circuit() -> AllRecursiveCircuits<F, C, D> {
let degree_bits_range = degree_from_env();
let timing = TimingTree::new("agg init all_circuits", log::Level::Info);
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
let all_circuits =
AllRecursiveCircuits::<F, C, D>::new(&all_stark, °ree_bits_range, &config);
timing.filter(Duration::from_millis(100)).print();
all_circuits
}
pub fn aggregate_proof(
all_circuits: &AllRecursiveCircuits<F, C, D>,
left: Receipt<F, C, D>,
right: Receipt<F, C, D>,
is_left_agg: bool,
is_right_agg: bool,
) -> anyhow::Result<Receipt<F, C, D>> {
let timing = TimingTree::new("agg agg", log::Level::Info);
// We can duplicate the proofs here because the state hasn't mutated.
let new_agg_receipt =
all_circuits.prove_aggregation(is_left_agg, &left, is_right_agg, &right)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_aggregation(&new_agg_receipt)?;
Ok(new_agg_receipt)
}
pub fn wrap_stark_bn254(
all_circuits: &AllRecursiveCircuits<F, C, D>,
new_agg_receipt: Receipt<F, C, D>,
output_dir: &str,
) -> anyhow::Result<()> {
let mut timing = TimingTree::new("agg prove_block", log::Level::Info);
let block_receipt = all_circuits.prove_block(None, &new_agg_receipt)?;
all_circuits.verify_block(&block_receipt)?;
timing.filter(Duration::from_millis(100)).print();
timing = TimingTree::new("agg circuit_data", log::Level::Info);
let gate_serializer = DefaultGateSerializer;
let generator_serializer = DefaultGeneratorSerializer {
_phantom: PhantomData::<C>,
};
let circuit_data = all_circuits
.block
.circuit
.to_bytes(&gate_serializer, &generator_serializer)
.unwrap();
let circuit_data = CircuitData::<F, C, D>::from_bytes(
circuit_data.as_slice(),
&gate_serializer,
&generator_serializer,
)
.unwrap();
let builder = WrapperBuilder::<DefaultParameters, 2>::new();
let mut circuit = builder.build();
circuit.set_data(circuit_data);
let mut bit_size = vec![32usize; 16];
bit_size.extend(vec![8; 32]);
bit_size.extend(vec![64; 68]);
let wrapped_circuit = WrappedCircuit::<InnerParameters, OuterParameters, D>::build(
circuit,
Some((vec![], bit_size)),
);
std::fs::create_dir_all(output_dir)?;
let wrapped_proof = wrapped_circuit.prove(&block_receipt.proof()).unwrap();
wrapped_proof.save(output_dir)?;
let src_public_inputs = match &block_receipt {
Receipt::Segments(receipt) => &receipt.proof.public_inputs,
Receipt::Composite(recepit) => &recepit.program_receipt.proof.public_inputs,
};
let outdir_path = std::path::Path::new(&output_dir);
let public_values_file = outdir_path.join(PUBLIC_INPUT_PATH);
std::fs::write(
public_values_file,
serde_json::to_string(&block_receipt.values())?,
)?;
let block_public_inputs = serde_json::json!({
"public_inputs": src_public_inputs,
});
let block_public_inputs_file = outdir_path.join(BLOCK_PUBLIC_INPUTS_PATH);
std::fs::write(
block_public_inputs_file,
serde_json::to_string(&block_public_inputs)?,
)?;
timing.filter(Duration::from_millis(100)).print();
Ok(())
}
// TODO: all the wrapped proof and groth16 proof are written into the disk, which is not friendly for distribution across the cloud
pub fn as_groth16(key_path: &str, input_dir: &str, output_dir: &str) -> anyhow::Result<()> {
snark::prove_snark(key_path, input_dir, output_dir)
}
// TODO: should setup the output path
pub fn groth16_setup(input_dir: &str) -> anyhow::Result<()> {
snark::setup_and_generate_sol_verifier(input_dir)
}
fn degree_from_env() -> [Range<usize>; 12] {
RANGE_TABLES.map(|table| {
env::var(table)
.ok()
.and_then(|val| {
let bounds: Vec<usize> = val
.split("..")
.map(|s| s.trim().parse().ok())
.collect::<Option<Vec<usize>>>()?;
if bounds.len() == 2 {
Some(Range {
start: bounds[0],
end: bounds[1],
})
} else {
None
}
})
.unwrap_or_else(|| {
let index = RANGE_TABLES.iter().position(|&r| r == table).unwrap();
DEFAULT_DEGREE_BITS_RANGE[index].clone()
})
})
}
#[allow(dead_code)]
#[cfg(test)]
pub mod tests {
use super::*;
use ethers::utils::hex::hex;
use std::fs::File;
use std::io::BufReader;
use zkm_emulator::utils::{load_elf_with_patch, split_prog_into_segs};
use zkm_prover::cpu::kernel::assembler::segment_kernel;
const ELF_PATH: &str = "./elf-files/sha2-elf";
#[test]
#[ignore]
fn sha2_test_e2e() -> anyhow::Result<()> {
env_logger::try_init().unwrap_or_default();
let seg_path = "/tmp/output";
let seg_size: usize = 8192;
let mut state = load_elf_with_patch(ELF_PATH, vec![]);
let public_input: Vec<u8> =
hex::decode("711e9609339e92b03ddc0a211827dba421f38f9ed8b9d806e1ffdd8c15ffa03d")?;
state.add_input_stream(&public_input);
let private_input = "world!".as_bytes().to_vec();
state.add_input_stream(&private_input);
let (_total_steps, seg_num, _state) = split_prog_into_segs(state, seg_path, "", seg_size);
let all_stark = AllStark::<F, D>::default();
let config = StarkConfig::standard_fast_config();
let all_circuits = create_recursive_circuit();
let seg_start_id = 0;
let assumptions = vec![];
let seg_file = format!("{}/{}", seg_path, seg_start_id);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(seg_file)?);
let input_first = segment_kernel("", "", "", seg_reader);
let mut timing = TimingTree::new("prove root first", log::Level::Info);
let mut agg_receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input_first,
&config,
&mut timing,
assumptions.clone(),
)?;
let mut base_seg = seg_start_id + 1;
let seg_file_number = seg_num;
let mut seg_num = seg_file_number - 1;
let mut is_agg = false;
println!("seg_file_number: {:?}", seg_file_number);
if seg_file_number % 2 == 0 {
let seg_file = format!("{}/{}", seg_path, seg_start_id + 1);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(seg_file)?);
let input = segment_kernel("", "", "", seg_reader);
timing = TimingTree::new("prove root second", log::Level::Info);
let receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(receipt.clone())?;
// We can duplicate the proofs here because the state hasn't mutated.
agg_receipt = aggregate_proof(&all_circuits, agg_receipt, receipt, false, false)?;
is_agg = true;
base_seg = seg_start_id + 2;
seg_num -= 1;
}
for i in 0..seg_num / 2 {
let seg_file = format!("{}/{}", seg_path, base_seg + (i << 1));
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(&seg_file)?);
let input_first = segment_kernel("", "", "", seg_reader);
let mut timing = TimingTree::new("prove root first", log::Level::Info);
let root_receipt_first = all_circuits.prove_root_with_assumption(
&all_stark,
&input_first,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(root_receipt_first.clone())?;
let seg_file = format!("{}/{}", seg_path, base_seg + (i << 1) + 1);
log::info!("Process segment {}", seg_file);
let seg_reader = BufReader::new(File::open(&seg_file)?);
let input = segment_kernel("", "", "", seg_reader);
let mut timing = TimingTree::new("prove root second", log::Level::Info);
let root_receipt = all_circuits.prove_root_with_assumption(
&all_stark,
&input,
&config,
&mut timing,
assumptions.clone(),
)?;
timing.filter(Duration::from_millis(100)).print();
all_circuits.verify_root(root_receipt.clone())?;
// We can duplicate the proofs here because the state hasn't mutated.
let new_agg_receipt = aggregate_proof(
&all_circuits,
root_receipt_first,
root_receipt,
false,
false,
)?;
// We can duplicate the proofs here because the state hasn't mutated.
agg_receipt =
aggregate_proof(&all_circuits, agg_receipt, new_agg_receipt, is_agg, true)?;
is_agg = true;
}
log::info!(
"proof size: {:?}",
serde_json::to_string(&agg_receipt.proof().proof)
.unwrap()
.len()
);
if seg_file_number > 1 {
wrap_stark_bn254(&all_circuits, agg_receipt, "/tmp/input")?;
}
log::info!("build finish");
groth16_setup("/tmp/input")?;
as_groth16("/tmp/input", "/tmp/input", "/tmp/output")?;
Ok(())
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/recursion/src/snark/snarks.rs | recursion/src/snark/snarks.rs | extern crate libc;
use anyhow::bail;
use libc::c_int;
use std::os::raw::c_char;
use std::path::Path;
extern "C" {
fn Stark2Snark(
keypath: *const c_char,
inputdir: *const c_char,
outputdir: *const c_char,
result: *mut *mut libc::c_char,
) -> c_int;
fn SetupAndGenerateSolVerifier(
inputdir: *const c_char,
result: *mut *mut libc::c_char,
) -> c_int;
}
pub fn prove_snark(keypath: &str, inputdir: &str, outputdir: &str) -> anyhow::Result<()> {
let path = Path::new(keypath);
let pk_file = path.join("proving.key");
let vk_file = path.join("verifying.key");
if !pk_file.exists() || !vk_file.exists() {
panic!(
"The vk or pk doesn't exist in the path: {}. Please first set the SNARK_SETUP=true to finish the trust setup.",inputdir
);
}
let keypath = std::ffi::CString::new(keypath).unwrap();
let inputdir = std::ffi::CString::new(inputdir).unwrap();
let outputdir = std::ffi::CString::new(outputdir).unwrap();
let mut result: *mut libc::c_char = std::ptr::null_mut();
let ret = unsafe {
Stark2Snark(
keypath.as_ptr(),
inputdir.as_ptr(),
outputdir.as_ptr(),
&mut result,
)
};
if ret == 0 {
Ok(())
} else {
let error_str = unsafe { std::ffi::CStr::from_ptr(result).to_string_lossy() };
// Free the allocated C string
unsafe { libc::free(result as *mut libc::c_void) };
//Ok(false)
bail!("prove_snark error: {}", error_str)
}
}
pub fn setup_and_generate_sol_verifier(inputdir: &str) -> anyhow::Result<()> {
let inputdir = std::ffi::CString::new(inputdir).unwrap();
let mut result: *mut libc::c_char = std::ptr::null_mut();
let ret = unsafe { SetupAndGenerateSolVerifier(inputdir.as_ptr(), &mut result) };
if ret == 0 {
Ok(())
} else {
let error_str = unsafe { std::ffi::CStr::from_ptr(result).to_string_lossy() };
// Free the allocated C string
unsafe { libc::free(result as *mut libc::c_void) };
bail!("prove_snark error: {}", error_str)
}
}
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
ProjectZKM/zkm | https://github.com/ProjectZKM/zkm/blob/eb09cf6c81bed9717bc98e4a4c9a61f54787a351/recursion/src/snark/mod.rs | recursion/src/snark/mod.rs | mod snarks;
pub use snarks::*;
| rust | MIT | eb09cf6c81bed9717bc98e4a4c9a61f54787a351 | 2026-01-04T20:21:15.281276Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/alpha_bleed.rs | src/alpha_bleed.rs | //! Changes pixels in an image that are totally transparent to the color of
//! their nearest non-transparent neighbor. This fixes artifacting when images
//! are resized in some contexts.
use std::collections::VecDeque;
use crate::image::{Image, Pixel};
pub(crate) fn alpha_bleed(image: &mut Image) {
let (w, h) = image.size();
// Tells whether a given position has been touched by the bleeding algorithm
// yet and is safe to sample colors from. In the first pass, we'll set all
// pixels that aren't totally transparent since this algorithm won't mutate
// them.
let mut can_be_sampled = Mask2::new(w, h);
// The set of images that we've already visited and don't need to queue if
// traversed again.
let mut visited = Mask2::new(w, h);
// A queue of pixels to blend with surrounding pixels with next.
//
// Populated initially with all pixels that border opaque pixels. We'll use
// it to blend outwards from each opaque pixel breadth-first.
let mut to_visit = VecDeque::new();
// An iterator of in-bounds positions adjacent to the given one.
let adjacent_positions = |x, y| {
DIRECTIONS
.into_iter()
.filter_map(move |(x_offset, y_offset)| {
let x_source = (x as i32) + x_offset;
let y_source = (y as i32) + y_offset;
if x_source < 0 || y_source < 0 || x_source >= w as i32 || y_source >= h as i32 {
return None;
}
Some((x_source as u32, y_source as u32))
})
};
// Populate the set of initial positions to visit as well as positions that
// are valid to sample from.
for y in 0..h {
for x in 0..w {
let pixel = image.get_pixel((x, y));
if pixel.a != 0 {
// This pixel is not totally transparent, so we don't need to
// modify it. We'll add it to the `can_be_sampled` set to
// indicate it's okay to sample from this pixel.
can_be_sampled.set(x, y);
visited.set(x, y);
continue;
}
// Check if any adjacent pixels have non-zero alpha.
let borders_opaque = adjacent_positions(x, y).any(|(x_source, y_source)| {
let source = image.get_pixel((x_source, y_source));
source.a != 0
});
if borders_opaque {
// This pixel is totally transparent, but borders at least one
// opaque pixel. We'll add it to the initial set of positions to
// visit.
visited.set(x, y);
to_visit.push_back((x, y));
}
}
}
while let Some((x, y)) = to_visit.pop_front() {
// Compute the average color from all surrounding pixels that are
// eligible to be sampled from.
let mut new_color = (0, 0, 0);
let mut contributing = 0;
for (x_source, y_source) in adjacent_positions(x, y) {
if can_be_sampled.get(x_source, y_source) {
let source = image.get_pixel((x_source, y_source));
contributing += 1;
new_color.0 += source.r as u16;
new_color.1 += source.g as u16;
new_color.2 += source.b as u16;
} else if !visited.get(x_source, y_source) {
visited.set(x_source, y_source);
to_visit.push_back((x_source, y_source));
}
}
let new_color = Pixel::new(
(new_color.0 / contributing) as u8,
(new_color.1 / contributing) as u8,
(new_color.2 / contributing) as u8,
0,
);
image.set_pixel((x, y), new_color);
// Now that we've bled this pixel, it's eligible to be sampled from for
// future iterations.
can_be_sampled.set(x, y);
}
}
const DIRECTIONS: &[(i32, i32)] = &[
(1, 0),
(1, 1),
(0, 1),
(-1, 1),
(-1, 0),
(-1, -1),
(0, -1),
(1, -1),
];
// TODO: We could use a more efficient bit vec here instead of Vec<bool> to cut
// our memory cost by 8x.
struct Mask2 {
size: (u32, u32),
data: Vec<bool>,
}
impl Mask2 {
fn new(w: u32, h: u32) -> Self {
Self {
size: (w, h),
data: vec![false; (w * h) as usize],
}
}
fn get(&self, x: u32, y: u32) -> bool {
let index = x + y * self.size.0;
self.data[index as usize]
}
fn set(&mut self, x: u32, y: u32) {
let index = x + y * self.size.0;
self.data[index as usize] = true;
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/sync_backend.rs | src/sync_backend.rs | use std::{borrow::Cow, io, path::Path, thread, time::Duration};
use crate::roblox_web_api::{RobloxApiClient, RobloxApiError, IMAGE};
use crate::roblox_web_api_types::{
ImageUploadData, ImageUploadMetadata, RobloxAuthenticationError,
};
use fs_err as fs;
use reqwest::StatusCode;
use thiserror::Error;
pub trait SyncBackend {
fn upload(&mut self, data: UploadInfo) -> Result<UploadResponse, Error>;
}
#[derive(Clone, Debug, PartialEq, Eq)]
pub struct UploadResponse {
pub id: u64,
}
#[derive(Clone, Debug)]
pub struct UploadInfo {
pub name: String,
pub contents: Vec<u8>,
pub hash: String,
}
pub struct RobloxSyncBackend<'a> {
api_client: &'a mut RobloxApiClient,
upload_to_group_id: Option<u64>,
}
impl<'a> RobloxSyncBackend<'a> {
pub fn new(api_client: &'a mut RobloxApiClient, upload_to_group_id: Option<u64>) -> Self {
Self {
api_client,
upload_to_group_id,
}
}
}
impl<'a> SyncBackend for RobloxSyncBackend<'a> {
fn upload(&mut self, data: UploadInfo) -> Result<UploadResponse, Error> {
log::info!("Uploading {} to Roblox", &data.name);
let upload_data = ImageUploadData {
image_data: Cow::Owned(data.contents),
image_metadata: ImageUploadMetadata::new(
IMAGE.to_string(),
data.name.to_string(),
"Uploaded by Tarmac.".to_string(),
None,
self.upload_to_group_id,
)?,
};
let result = self
.api_client
.upload_image_with_moderation_retry(upload_data);
match result {
Ok(response) => {
log::info!("Uploaded {} to ID {}", &data.name, response.asset_id);
Ok(UploadResponse {
id: response.asset_id,
})
}
Err(RobloxApiError::ResponseError {
status: StatusCode::TOO_MANY_REQUESTS,
..
}) => Err(Error::RateLimited),
Err(err) => Err(err.into()),
}
}
}
pub struct NoneSyncBackend;
impl SyncBackend for NoneSyncBackend {
fn upload(&mut self, _data: UploadInfo) -> Result<UploadResponse, Error> {
Err(Error::NoneBackend)
}
}
pub struct DebugSyncBackend {
last_id: u64,
}
impl DebugSyncBackend {
pub fn new() -> Self {
Self { last_id: 0 }
}
}
impl SyncBackend for DebugSyncBackend {
fn upload(&mut self, data: UploadInfo) -> Result<UploadResponse, Error> {
log::info!("Copying {} to local folder", &data.name);
self.last_id += 1;
let id = self.last_id;
let path = Path::new(".tarmac-debug");
fs::create_dir_all(path)?;
let file_path = path.join(id.to_string());
fs::write(&file_path, &data.contents)?;
Ok(UploadResponse { id })
}
}
/// Performs the retry logic for rate limitation errors. The struct wraps a SyncBackend so that
/// when a RateLimited error occurs, the thread sleeps for a moment and then tries to reupload the
/// data.
pub struct RetryBackend<InnerSyncBackend> {
inner: InnerSyncBackend,
delay: Duration,
attempts: usize,
}
impl<InnerSyncBackend> RetryBackend<InnerSyncBackend> {
/// Creates a new backend from another SyncBackend. The max_retries parameter gives the number
/// of times the backend will try again (so given 0, it acts just as the original SyncBackend).
/// The delay parameter provides the amount of time to wait between each upload attempt.
pub fn new(inner: InnerSyncBackend, max_retries: usize, delay: Duration) -> Self {
Self {
inner,
delay,
attempts: max_retries + 1,
}
}
}
impl<InnerSyncBackend: SyncBackend> SyncBackend for RetryBackend<InnerSyncBackend> {
fn upload(&mut self, data: UploadInfo) -> Result<UploadResponse, Error> {
for index in 0..self.attempts {
if index != 0 {
log::info!(
"tarmac is being rate limited, retrying upload ({}/{})",
index,
self.attempts - 1
);
thread::sleep(self.delay);
}
let result = self.inner.upload(data.clone());
match result {
Err(Error::RateLimited) => {}
_ => return result,
}
}
Err(Error::RateLimited)
}
}
#[derive(Debug, Error)]
pub enum Error {
#[error("Cannot upload assets with the 'none' target.")]
NoneBackend,
#[error("Tarmac was rate-limited trying to upload assets. Try again in a little bit.")]
RateLimited,
#[error(transparent)]
Io {
#[from]
source: io::Error,
},
#[error(transparent)]
RobloxError {
#[from]
source: RobloxApiError,
},
#[error(transparent)]
RobloxAuthenticationError {
#[from]
source: RobloxAuthenticationError,
},
}
#[cfg(test)]
mod test {
use super::*;
#[allow(unused_must_use)]
mod test_retry_backend {
use super::*;
struct CountUploads<'a> {
counter: &'a mut usize,
results: Vec<Result<UploadResponse, Error>>,
}
impl<'a> CountUploads<'a> {
fn new(counter: &'a mut usize) -> Self {
Self {
counter,
results: Vec::new(),
}
}
fn with_results(mut self, results: Vec<Result<UploadResponse, Error>>) -> Self {
self.results = results;
self.results.reverse();
self
}
}
impl<'a> SyncBackend for CountUploads<'a> {
fn upload(&mut self, _data: UploadInfo) -> Result<UploadResponse, Error> {
(*self.counter) += 1;
self.results.pop().unwrap_or(Err(Error::NoneBackend))
}
}
fn any_upload_info() -> UploadInfo {
UploadInfo {
name: "foo".to_owned(),
contents: Vec::new(),
hash: "hash".to_owned(),
}
}
fn retry_duration() -> Duration {
Duration::from_millis(1)
}
#[test]
fn upload_at_least_once() {
let mut counter = 0;
let mut backend =
RetryBackend::new(CountUploads::new(&mut counter), 0, retry_duration());
backend.upload(any_upload_info());
assert_eq!(counter, 1);
}
#[test]
fn upload_again_if_rate_limited() {
let mut counter = 0;
let inner = CountUploads::new(&mut counter).with_results(vec![
Err(Error::RateLimited),
Err(Error::RateLimited),
Err(Error::NoneBackend),
]);
let mut backend = RetryBackend::new(inner, 5, retry_duration());
backend.upload(any_upload_info());
assert_eq!(counter, 3);
}
#[test]
fn upload_returns_first_success_result() {
let mut counter = 0;
let success = UploadResponse { id: 10 };
let inner = CountUploads::new(&mut counter).with_results(vec![
Err(Error::RateLimited),
Err(Error::RateLimited),
Ok(success.clone()),
]);
let mut backend = RetryBackend::new(inner, 5, retry_duration());
let upload_result = backend.upload(any_upload_info()).unwrap();
assert_eq!(counter, 3);
assert_eq!(upload_result, success);
}
#[test]
fn upload_returns_rate_limited_when_retries_exhausted() {
let mut counter = 0;
let inner = CountUploads::new(&mut counter).with_results(vec![
Err(Error::RateLimited),
Err(Error::RateLimited),
Err(Error::RateLimited),
Err(Error::RateLimited),
]);
let mut backend = RetryBackend::new(inner, 2, retry_duration());
let upload_result = backend.upload(any_upload_info()).unwrap_err();
assert_eq!(counter, 3);
assert!(match upload_result {
Error::RateLimited => true,
_ => false,
});
}
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/image.rs | src/image.rs | //! Simple containers to track images and perform operations on them.
use std::io::{Read, Write};
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum ImageFormat {
Rgba8,
}
impl ImageFormat {
fn stride(&self) -> u32 {
match self {
ImageFormat::Rgba8 => 4,
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(crate) struct Pixel {
pub r: u8,
pub g: u8,
pub b: u8,
pub a: u8,
}
impl Pixel {
pub fn new(r: u8, g: u8, b: u8, a: u8) -> Self {
Self { r, g, b, a }
}
}
#[derive(Debug, Clone)]
pub(crate) struct Image {
size: (u32, u32),
data: Vec<u8>,
format: ImageFormat,
}
impl Image {
pub fn new_rgba8<D: Into<Vec<u8>>>(size: (u32, u32), data: D) -> Self {
let data = data.into();
let format = ImageFormat::Rgba8;
assert!(data.len() == (size.0 * size.1 * format.stride()) as usize);
Self { size, data, format }
}
pub fn new_empty_rgba8(size: (u32, u32)) -> Self {
let data = vec![0; (size.0 * size.1 * ImageFormat::Rgba8.stride()) as usize];
Self::new_rgba8(size, data)
}
pub fn decode_png<R: Read>(input: R) -> Result<Self, png::DecodingError> {
let decoder = png::Decoder::new(input);
// Get the metadata we need from the image and read its data into a
// buffer for processing by the sprite packing algorithm
let (info, mut reader) = decoder.read_info()?;
// TODO: Transcode images to RGBA if possible
if info.color_type != png::ColorType::RGBA {
return Err(png::DecodingError::Other(
format!(
"Color format {:?} is not supported by Tarmac.",
info.color_type
)
.into(),
));
}
let mut data = vec![0; info.buffer_size()];
reader.next_frame(&mut data)?;
let size = (info.width, info.height);
Ok(Self::new_rgba8(size, data))
}
pub fn encode_png<W: Write>(&self, output: W) -> Result<(), png::EncodingError> {
let mut encoder = png::Encoder::new(output, self.size.0, self.size.1);
match self.format {
ImageFormat::Rgba8 => {
encoder.set_color(png::ColorType::RGBA);
encoder.set_depth(png::BitDepth::Eight);
}
}
let mut output_writer = encoder.write_header()?;
output_writer.write_image_data(&self.data)?;
// On drop, output_writer will write the last chunk of the PNG file.
Ok(())
}
pub fn size(&self) -> (u32, u32) {
self.size
}
pub fn blit(&mut self, other: &Image, pos: (u32, u32)) {
assert!(self.format == ImageFormat::Rgba8 && other.format == ImageFormat::Rgba8);
let stride = self.format.stride();
let other_width_bytes = other.size.0 * stride;
let other_rows = other.data.chunks_exact((other_width_bytes) as usize);
for (other_y, other_row) in other_rows.enumerate() {
let self_y = pos.1 + other_y as u32;
let start_px = pos.0 + self.size.0 * self_y;
let start_in_bytes = (stride * start_px) as usize;
let end_in_bytes = start_in_bytes + other_row.len();
let self_row = &mut self.data[start_in_bytes..end_in_bytes];
self_row.copy_from_slice(other_row);
}
}
pub fn get_pixel(&self, pos: (u32, u32)) -> Pixel {
assert!(pos.0 < self.size.0);
assert!(pos.1 < self.size.1);
let stride = self.format.stride() as usize;
let start = stride * (pos.0 + pos.1 * self.size.0) as usize;
Pixel {
r: self.data[start],
g: self.data[start + 1],
b: self.data[start + 2],
a: self.data[start + 3],
}
}
pub fn set_pixel(&mut self, pos: (u32, u32), pixel: Pixel) {
assert!(pos.0 < self.size.0);
assert!(pos.1 < self.size.1);
let stride = self.format.stride() as usize;
let start = stride * (pos.0 + pos.1 * self.size.0) as usize;
self.data[start] = pixel.r;
self.data[start + 1] = pixel.g;
self.data[start + 2] = pixel.b;
self.data[start + 3] = pixel.a;
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn blit_zero() {
let source = Image::new_empty_rgba8((17, 20));
let mut target = Image::new_empty_rgba8((17, 20));
target.blit(&source, (0, 0));
}
#[test]
fn blit_corner() {
let source = Image::new_empty_rgba8((4, 4));
let mut target = Image::new_empty_rgba8((8, 8));
target.blit(&source, (4, 4));
}
#[test]
fn get_pixel() {
let source = Image::new_empty_rgba8((4, 4));
assert_eq!(source.get_pixel((0, 0)), Pixel::new(0, 0, 0, 0));
assert_eq!(source.get_pixel((3, 3)), Pixel::new(0, 0, 0, 0));
}
#[test]
fn set_pixel() {
let mut source = Image::new_empty_rgba8((3, 3));
source.set_pixel((0, 0), Pixel::new(1, 2, 3, 4));
assert_eq!(source.get_pixel((0, 0)), Pixel::new(1, 2, 3, 4));
source.set_pixel((2, 2), Pixel::new(5, 6, 7, 8));
assert_eq!(source.get_pixel((2, 2)), Pixel::new(5, 6, 7, 8));
assert_eq!(&source.data[0..4], &[1, 2, 3, 4]);
assert_eq!(&source.data[(source.data.len() - 4)..], &[5, 6, 7, 8]);
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/lua_ast.rs | src/lua_ast.rs | //! Defines part of a Lua AST, used for generating human-readable code in a
//! composable way for Tarmac.
//!
//! Eventually, it might be a good idea to replace this module with something
//! like full-moon (https://github.com/Kampfkarren/full-moon) or another real
//! Lua AST library.
use std::fmt::{self, Write};
/// Trait that helps turn a type into an equivalent Lua snippet.
///
/// Designed to be similar to the `Display` trait from Rust's std.
trait FmtLua {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result;
/// Used to override how this type will appear when used as a table key.
/// Some types, like strings, can have a shorter representation as a table
/// key than the default, safe approach.
fn fmt_table_key(&self, output: &mut LuaStream<'_>) -> fmt::Result {
write!(output, "[")?;
self.fmt_lua(output)?;
write!(output, "]")
}
}
/// A small wrapper macro to implement Display using a type's FmtLua
/// implementation. We can apply this to values that we want to stringify
/// directly.
macro_rules! proxy_display {
( $target: ty ) => {
impl fmt::Display for $target {
fn fmt(&self, output: &mut fmt::Formatter) -> fmt::Result {
let mut stream = LuaStream::new(output);
FmtLua::fmt_lua(self, &mut stream)
}
}
};
}
pub(crate) struct Block {
pub statements: Vec<Statement>,
}
impl From<Statement> for Block {
fn from(statement: Statement) -> Self {
Self {
statements: vec![statement],
}
}
}
impl FmtLua for Block {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
for statement in &self.statements {
statement.fmt_lua(output)?;
writeln!(output)?;
}
Ok(())
}
}
proxy_display!(Block);
pub(crate) enum Statement {
Return(Expression),
If(IfBlock),
}
impl FmtLua for Statement {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
match self {
Self::Return(literal) => {
write!(output, "return ")?;
literal.fmt_lua(output)
}
Self::If(if_block) => {
write!(output, "if ")?;
if_block.condition.fmt_lua(output)?;
writeln!(output, " then")?;
output.indent();
if_block.body.fmt_lua(output)?;
output.unindent();
for (condition, block) in &if_block.else_if_blocks {
write!(output, "elseif ")?;
condition.fmt_lua(output)?;
writeln!(output, " then")?;
output.indent();
block.fmt_lua(output)?;
output.unindent();
}
if let Some(block) = &if_block.else_block {
writeln!(output, "else")?;
output.indent();
block.fmt_lua(output)?;
output.unindent();
}
write!(output, "end")
}
}
}
}
proxy_display!(Statement);
pub(crate) struct IfBlock {
pub condition: Expression,
pub body: Block,
pub else_if_blocks: Vec<(Expression, Block)>,
pub else_block: Option<Block>,
}
impl IfBlock {
pub fn new<E: Into<Expression>, B: Into<Block>>(condition: E, body: B) -> Self {
Self {
condition: condition.into(),
body: body.into(),
else_if_blocks: Vec::new(),
else_block: None,
}
}
}
pub(crate) enum Expression {
String(String),
Table(Table),
Function(Function),
/// Used as a catch-all for when this module doesn't define a primitive we
/// need for codegen.
Raw(String),
}
impl Expression {
pub fn table(entries: Vec<(Expression, Expression)>) -> Self {
Self::Table(Table { entries })
}
}
impl FmtLua for Expression {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
match self {
Self::Table(inner) => inner.fmt_lua(output),
Self::String(inner) => inner.fmt_lua(output),
Self::Function(inner) => inner.fmt_lua(output),
Self::Raw(inner) => output.write_str(inner),
}
}
fn fmt_table_key(&self, output: &mut LuaStream<'_>) -> fmt::Result {
match self {
Self::Table(inner) => inner.fmt_table_key(output),
Self::String(inner) => inner.fmt_table_key(output),
Self::Function(inner) => inner.fmt_table_key(output),
Self::Raw(inner) => output.write_str(inner),
}
}
}
impl From<String> for Expression {
fn from(value: String) -> Self {
Self::String(value)
}
}
impl From<&'_ String> for Expression {
fn from(value: &String) -> Self {
Self::String(value.clone())
}
}
impl From<&'_ str> for Expression {
fn from(value: &str) -> Self {
Self::String(value.to_owned())
}
}
impl From<Table> for Expression {
fn from(value: Table) -> Self {
Self::Table(value)
}
}
impl FmtLua for String {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
write!(output, "\"{}\"", self)
}
fn fmt_table_key(&self, output: &mut LuaStream<'_>) -> fmt::Result {
if is_valid_ident(self) {
write!(output, "{}", self)
} else {
write!(output, "[\"{}\"]", self)
}
}
}
pub(crate) struct Table {
pub entries: Vec<(Expression, Expression)>,
}
impl Table {
pub fn new() -> Self {
Self {
entries: Vec::new(),
}
}
pub fn add_entry<K: Into<Expression>, V: Into<Expression>>(&mut self, key: K, value: V) {
self.entries.push((key.into(), value.into()));
}
}
impl FmtLua for Table {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
writeln!(output, "{{")?;
output.indent();
for (key, value) in &self.entries {
key.fmt_table_key(output)?;
write!(output, " = ")?;
value.fmt_lua(output)?;
writeln!(output, ",")?;
}
output.unindent();
write!(output, "}}")
}
}
fn is_valid_ident_char_start(value: char) -> bool {
value.is_ascii_alphabetic() || value == '_'
}
fn is_valid_ident_char(value: char) -> bool {
value.is_ascii_alphanumeric() || value == '_'
}
/// Tells whether the given string is a valid Lua identifier.
fn is_valid_ident(value: &str) -> bool {
let mut chars = value.chars();
match chars.next() {
Some(first) => {
if !is_valid_ident_char_start(first) {
return false;
}
}
None => return false,
}
chars.all(is_valid_ident_char)
}
pub(crate) struct Function {
pub args: String,
pub body: Vec<Statement>,
}
impl Function {
pub fn new(args: String, body: Vec<Statement>) -> Self {
Self { args, body }
}
}
impl FmtLua for Function {
fn fmt_lua(&self, output: &mut LuaStream<'_>) -> fmt::Result {
writeln!(output, "function({})", self.args)?;
output.indent();
for statement in &self.body {
statement.fmt_lua(output)?;
writeln!(output)?;
}
output.unindent();
write!(output, "end")
}
}
/// Wraps a `fmt::Write` with additional tracking to do pretty-printing of Lua.
///
/// Behaves similarly to `fmt::Formatter`. This trait's relationship to `LuaFmt`
/// is very similar to `Formatter`'s relationship to `Display`.
struct LuaStream<'a> {
indent_level: usize,
is_start_of_line: bool,
inner: &'a mut (dyn fmt::Write + 'a),
}
impl fmt::Write for LuaStream<'_> {
/// Method to support the `write!` and `writeln!` macros. Instead of using a
/// trait directly, these macros just call `write_str` on their first
/// argument.
///
/// This method is also available on `io::Write` and `fmt::Write`.
fn write_str(&mut self, value: &str) -> fmt::Result {
let mut is_first_line = true;
for line in value.split('\n') {
if is_first_line {
is_first_line = false;
} else {
self.line()?;
}
if !line.is_empty() {
if self.is_start_of_line {
self.is_start_of_line = false;
let indentation = "\t".repeat(self.indent_level);
self.inner.write_str(&indentation)?;
}
self.inner.write_str(line)?;
}
}
Ok(())
}
}
impl<'a> LuaStream<'a> {
fn new(inner: &'a mut (dyn fmt::Write + 'a)) -> Self {
LuaStream {
indent_level: 0,
is_start_of_line: true,
inner,
}
}
fn indent(&mut self) {
self.indent_level += 1;
}
fn unindent(&mut self) {
assert!(self.indent_level > 0);
self.indent_level -= 1;
}
fn line(&mut self) -> fmt::Result {
self.is_start_of_line = true;
self.inner.write_str("\n")
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/dpi_scale.rs | src/dpi_scale.rs | use std::path::{Path, PathBuf};
use regex::Regex;
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct DpiAwarePathInfo {
pub(crate) path_without_dpi_scale: PathBuf,
pub(crate) dpi_scale: u32,
}
impl DpiAwarePathInfo {
#[cfg(test)]
fn new(path_without_dpi_scale: &str, dpi_scale: u32) -> Self {
let path_without_dpi_scale = PathBuf::from(path_without_dpi_scale);
Self {
path_without_dpi_scale,
dpi_scale,
}
}
}
/// Given a path, extracts its intended DPI scale and constructs a path without
/// DPI scale information in it. This can be used to group together multiple
/// versions of the same image.
pub(crate) fn extract_path_info<P: AsRef<Path>>(path: P) -> DpiAwarePathInfo {
lazy_static::lazy_static! {
static ref DPI_PATTERN: Regex = Regex::new(r"^(.+?)@(\d+)x(.+?)$").unwrap();
}
let path = path.as_ref();
let file_name = match path.file_name().unwrap().to_str() {
Some(name) => name,
// If the filename isn't valid Unicode, this is an error.
None => {
panic!("Path {} had invalid Unicode", path.display());
}
};
match DPI_PATTERN.captures(file_name) {
Some(captures) => {
let file_stem = captures.get(1).unwrap().as_str().to_owned();
let scale_str = captures.get(2).unwrap().as_str();
let suffix = captures.get(3).unwrap().as_str();
let dpi_scale = scale_str.parse().unwrap();
let file_name_without_dpi_scale = format!("{}{}", file_stem, suffix);
let path_without_dpi_scale = path.with_file_name(&file_name_without_dpi_scale);
DpiAwarePathInfo {
path_without_dpi_scale,
dpi_scale,
}
}
None => DpiAwarePathInfo {
path_without_dpi_scale: path.to_owned(),
dpi_scale: 1,
},
}
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn no_attached_scale() {
assert_eq!(
extract_path_info("foo.png"),
DpiAwarePathInfo::new("foo.png", 1)
);
assert_eq!(
extract_path_info("foo.blah.png"),
DpiAwarePathInfo::new("foo.blah.png", 1)
);
assert_eq!(
extract_path_info("foo/bar/baz/hello.png"),
DpiAwarePathInfo::new("foo/bar/baz/hello.png", 1)
);
}
#[test]
fn explicit_1x() {
assert_eq!(
extract_path_info("layerify@1x.png"),
DpiAwarePathInfo::new("layerify.png", 1)
);
assert_eq!(
extract_path_info("layerify.blah@1x.png"),
DpiAwarePathInfo::new("layerify.blah.png", 1)
);
assert_eq!(
extract_path_info("layerify@1x.png.bak"),
DpiAwarePathInfo::new("layerify.png.bak", 1)
);
assert_eq!(
extract_path_info("some/path/to/image/nice@1x.png"),
DpiAwarePathInfo::new("some/path/to/image/nice.png", 1)
);
}
#[test]
fn explicit_not_1x() {
assert_eq!(
extract_path_info("cool-company@2x.png"),
DpiAwarePathInfo::new("cool-company.png", 2)
);
assert_eq!(
extract_path_info("engineers@10x.png"),
DpiAwarePathInfo::new("engineers.png", 10)
);
assert_eq!(
extract_path_info("we.like.dots@3x.png"),
DpiAwarePathInfo::new("we.like.dots.png", 3)
);
assert_eq!(
extract_path_info("backup-your-stuff@4x.png.bak"),
DpiAwarePathInfo::new("backup-your-stuff.png.bak", 4)
);
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/glob.rs | src/glob.rs | //! Wrapper around globset's Glob type that has better serialization
//! characteristics by coupling Glob and GlobMatcher into a single type.
use std::{
fmt,
path::{Path, PathBuf},
};
use globset::{Glob as InnerGlob, GlobMatcher};
use serde::{de::Error as _, Deserialize, Deserializer, Serialize, Serializer};
pub use globset::Error;
#[derive(Debug, Clone)]
pub struct Glob {
inner: InnerGlob,
matcher: GlobMatcher,
}
impl Glob {
pub fn new(glob: &str) -> Result<Self, Error> {
let inner = InnerGlob::new(glob)?;
let matcher = inner.compile_matcher();
Ok(Glob { inner, matcher })
}
pub fn is_match<P: AsRef<Path>>(&self, path: P) -> bool {
self.matcher.is_match(path)
}
pub fn get_prefix(&self) -> PathBuf {
get_non_pattern_prefix(Path::new(self.inner.glob()))
}
}
impl PartialEq for Glob {
fn eq(&self, other: &Self) -> bool {
self.inner == other.inner
}
}
impl Eq for Glob {}
impl Serialize for Glob {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(self.inner.glob())
}
}
impl<'de> Deserialize<'de> for Glob {
fn deserialize<D: Deserializer<'de>>(deserializer: D) -> Result<Self, D::Error> {
let glob = <&str as Deserialize>::deserialize(deserializer)?;
Glob::new(glob).map_err(D::Error::custom)
}
}
impl fmt::Display for Glob {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
self.inner.fmt(f)
}
}
// A basic set of characters that might indicate the use of glob pattern syntax.
// This is to distinguish portions of a glob that are fixed paths (e.g.
// "foo.png") from ones that are leveraging patterns (e.g. "*.png").
//
// This approach has false positives, as it will treat escape sequences like
// `[*]` as pattern syntax, but those should be rare enough to be acceptable
//
// Glob syntax described here: https://docs.rs/globset/0.4.4/globset/#syntax
const GLOB_PATTERN_CHARACTERS: &str = "*?{}[]";
fn get_non_pattern_prefix(glob_path: &Path) -> PathBuf {
let mut prefix = PathBuf::new();
for component in glob_path.iter() {
let component_str = component.to_str().unwrap();
if GLOB_PATTERN_CHARACTERS
.chars()
.any(|special_char| component_str.contains(special_char))
{
break;
}
prefix.push(component);
}
prefix
}
#[cfg(test)]
mod test {
use super::*;
#[test]
fn simple_prefix() {
assert_eq!(
get_non_pattern_prefix(Path::new("a/b/**/*.png")),
PathBuf::from("a/b")
);
}
#[test]
fn prefix_only() {
assert_eq!(
get_non_pattern_prefix(Path::new("a/**/b/*.png")),
PathBuf::from("a")
);
}
#[test]
fn no_prefix() {
assert_eq!(
get_non_pattern_prefix(Path::new("**/b/*.png")),
PathBuf::from("")
);
}
#[test]
fn whole_path() {
assert_eq!(
get_non_pattern_prefix(Path::new("a/b/foo.png")),
PathBuf::from("a/b/foo.png")
)
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/roblox_web_api_types.rs | src/roblox_web_api_types.rs | use serde::{Deserialize, Serialize};
use std::borrow::Cow;
use thiserror::Error;
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct CreationContext {
pub creator: Creator,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase", untagged)]
pub enum Creator {
#[serde(rename_all = "camelCase")]
User { user_id: String },
#[serde(rename_all = "camelCase")]
Group { group_id: String },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawOperationStatusResponse {
#[serde(flatten)]
pub common: RawOperationStatusResponseCommon,
pub response: Option<RawOperationStatusResponseVariants>,
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(untagged)]
pub enum RawOperationStatusResponseVariants {
#[serde(rename_all = "camelCase")]
Success {
path: String,
revision_id: String,
revision_create_time: String,
asset_id: String,
display_name: String,
description: String,
asset_type: String,
creation_context: CreationContext,
moderation_result: ModerationResult,
state: String,
},
// InProgress, InProgress is represented by None
#[serde(rename_all = "camelCase")]
Failure { code: String, message: String },
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct RawOperationStatusResponseCommon {
path: String,
operation_id: String,
done: bool,
}
#[derive(Debug, Deserialize, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ModerationResult {
moderation_state: String,
}
pub struct ImageUploadData<'a> {
pub image_data: Cow<'a, [u8]>,
pub image_metadata: ImageUploadMetadata,
}
#[derive(Debug, Serialize)]
#[serde(rename_all = "camelCase")]
pub struct ImageUploadMetadata {
pub asset_type: String,
pub display_name: String,
pub description: String,
pub creation_context: CreationContext,
}
impl ImageUploadMetadata {
pub fn new(
asset_type: String,
display_name: String,
description: String,
user_id: Option<u64>,
group_id: Option<u64>,
) -> Result<Self, RobloxAuthenticationError> {
let creator = match (user_id, group_id) {
(Some(user_id), None) => Creator::User {
user_id: user_id.to_string(),
},
(None, Some(group_id)) => Creator::Group {
group_id: group_id.to_string(),
},
_ => return Err(RobloxAuthenticationError::InvalidCreatorIdProvided),
};
Ok(Self {
asset_type: asset_type.to_string(),
display_name: display_name.to_string(),
description: description.to_string(),
creation_context: CreationContext { creator },
})
}
}
#[derive(Debug, Serialize, Deserialize)]
#[serde(rename_all = "PascalCase")]
pub struct UploadResponse {
pub asset_id: u64,
}
/// Internal representation of what the asset upload endpoint returns, before
/// we've handled any errors.
#[derive(Debug, Deserialize)]
#[serde(rename_all = "camelCase", untagged)]
pub enum RawUploadResponse {
#[serde(rename_all = "camelCase")]
Success {
path: String,
operation_id: String,
done: bool,
},
#[serde(rename_all = "camelCase")]
Error { code: String, message: String },
}
#[derive(Debug, Error)]
pub enum RobloxAuthenticationError {
#[error("Exactly one of user_id or group_id must be provided")]
InvalidCreatorIdProvided,
#[error("Exactly one of api_key or auth must be provided")]
InvalidAuthProvided,
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/roblox_web_api.rs | src/roblox_web_api.rs | use crate::auth_cookie::get_auth_cookie;
use crate::roblox_web_api_types::{
ImageUploadData, ImageUploadMetadata, RawOperationStatusResponse,
RawOperationStatusResponseVariants, RawUploadResponse, RobloxAuthenticationError,
UploadResponse,
};
use log;
use reqwest::{
header::{HeaderValue, COOKIE},
multipart, Client, Request, Response, StatusCode,
};
use std::{
fmt::{self},
time::Duration,
};
use thiserror::Error;
const OPEN_CLOUD_ASSET_UPLOAD_USER_AUTH: &str =
"https://apis.roblox.com/assets/user-auth/v1/assets";
const OPEN_CLOUD_ASSET_UPLOAD: &str = "https://apis.roblox.com/assets/v1/assets";
const OPEN_CLOUD_ASSET_OPERATIONS_USER_AUTH: &str =
"https://apis.roblox.com/assets/user-auth/v1/operations";
const OPEN_CLOUD_ASSET_OPERATIONS: &str = "https://apis.roblox.com/assets/v1/operations";
const OPEN_CLOUD_API_KEY_HEADER: &str = "X-API-Key";
pub const IMAGE: &str = "Image";
pub struct RobloxOpenCloudCredentials {
auth: RobloxOpenCloudAuth,
}
enum RobloxOpenCloudAuth {
Cookie(String),
ApiKey(String),
None,
}
impl RobloxOpenCloudCredentials {
pub fn get_credentials(
cookie: Option<String>,
api_key: Option<String>,
) -> Result<Self, RobloxAuthenticationError> {
let auth = match (cookie, api_key) {
(Some(_), Some(_)) => Err(RobloxAuthenticationError::InvalidAuthProvided),
(Some(cookie), None) => Ok(RobloxOpenCloudAuth::Cookie(cookie)),
(None, Some(api_key)) => Ok(RobloxOpenCloudAuth::ApiKey(api_key)),
(None, None) => {
log::debug!("No authentication provided, attempting to get cookie...");
if let Some(cookie) = get_auth_cookie() {
log::debug!("Cookie found");
Ok(RobloxOpenCloudAuth::Cookie(cookie))
} else {
log::debug!("No authentication provided, and failed to get cookie");
Ok(RobloxOpenCloudAuth::None)
}
}
}?;
Ok(Self { auth })
}
}
pub struct RobloxApiClient {
credentials: RobloxOpenCloudCredentials,
csrf_token: Option<HeaderValue>,
client: Client,
}
impl fmt::Debug for RobloxApiClient {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "RobloxApiClient")
}
}
impl RobloxApiClient {
pub fn new(credentials: RobloxOpenCloudCredentials) -> Self {
Self {
credentials,
csrf_token: None,
client: Client::new(),
}
}
pub fn download_image(&mut self, id: u64) -> Result<Vec<u8>, RobloxApiError> {
let url = format!("https://roblox.com/asset?id={}", id);
let mut response =
self.execute_with_csrf_retry(|client| Ok(client.get(&url).build()?))?;
let mut buffer = Vec::new();
response.copy_to(&mut buffer)?;
Ok(buffer)
}
/// Upload an image, retrying if the asset endpoint determines that the
/// asset's name is inappropriate. The asset's name will be replaced with a
/// generic known-good string.
pub fn upload_image_with_moderation_retry(
&mut self,
data: ImageUploadData,
) -> Result<UploadResponse, RobloxApiError> {
let response = self.upload_image_raw(&data)?;
match response {
RawUploadResponse::Success { operation_id, .. } => {
let asset_id = self.poll_operation_until_complete(operation_id.as_str())?;
Ok(UploadResponse {
asset_id: asset_id.parse::<u64>().unwrap(),
})
}
RawUploadResponse::Error { code: _, message } => {
if message.contains("fully moderated") {
log::warn!(
"Image name '{}' was moderated, retrying with different name...",
data.image_metadata.display_name
);
let new_data = ImageUploadData {
image_data: data.image_data,
image_metadata: ImageUploadMetadata {
display_name: "image".to_owned(),
..data.image_metadata
},
};
self.upload_image(new_data)
} else {
Err(RobloxApiError::ApiError { message })
}
}
}
}
/// Upload an image, returning an error if anything goes wrong.
pub fn upload_image(
&mut self,
data: ImageUploadData,
) -> Result<UploadResponse, RobloxApiError> {
let response = self.upload_image_raw(&data)?;
match response {
RawUploadResponse::Success {
path: _,
operation_id,
done: _,
} => {
let asset_id = self.poll_operation_until_complete(operation_id.as_str())?;
Ok(UploadResponse {
asset_id: asset_id.parse::<u64>().unwrap(),
})
}
RawUploadResponse::Error { code: _, message } => {
Err(RobloxApiError::ApiError { message })
}
}
}
/// Upload an image, returning the raw response returned by the endpoint,
/// which may have further failures to handle.
fn upload_image_raw(
&mut self,
data: &ImageUploadData,
) -> Result<RawUploadResponse, RobloxApiError> {
let url = match self.credentials.auth {
RobloxOpenCloudAuth::Cookie(_) => OPEN_CLOUD_ASSET_UPLOAD_USER_AUTH,
RobloxOpenCloudAuth::ApiKey(_) => OPEN_CLOUD_ASSET_UPLOAD,
RobloxOpenCloudAuth::None => {
return Err(RobloxApiError::ApiError {
message: "No authentication provided".to_string(),
})
}
};
let mut response = self.execute_with_csrf_retry(|client| {
let metadata = serde_json::to_string(&data.image_metadata).unwrap();
let form = multipart::Form::new().text("request", metadata).part(
"fileContent",
multipart::Part::bytes(data.image_data.clone().into_owned()).file_name("image"),
);
let request = client.post(url).multipart(form).build()?;
Ok(request)
})?;
let body = response.text()?;
// Some errors will be reported through HTTP status codes, handled here.
if response.status().is_success() {
match serde_json::from_str(&body) {
Ok(response) => Ok(response),
Err(source) => Err(RobloxApiError::BadResponseJson { body, source }),
}
} else {
Err(RobloxApiError::ResponseError {
status: response.status(),
body,
})
}
}
/// Execute a request generated by the given function, retrying if the
/// endpoint requests that the user refreshes their CSRF token.
fn execute_with_csrf_retry<F>(&mut self, make_request: F) -> Result<Response, RobloxApiError>
where
F: Fn(&Client) -> Result<Request, RobloxApiError>,
{
let mut request = make_request(&self.client)?;
self.attach_headers(&mut request);
let response = self.client.execute(request)?;
match response.status() {
StatusCode::FORBIDDEN => {
if let Some(csrf) = response.headers().get("X-CSRF-Token") {
log::debug!("Retrying request with X-CSRF-Token...");
self.csrf_token = Some(csrf.clone());
let mut new_request = make_request(&self.client)?;
self.attach_headers(&mut new_request);
Ok(self.client.execute(new_request)?)
} else {
// If the response did not return a CSRF token for us to
// retry with, this request was likely forbidden for other
// reasons.
Ok(response)
}
}
_ => Ok(response),
}
}
/// Attach required headers to a request object before sending it to a
/// Roblox API, like authentication and CSRF protection.
fn attach_headers(&self, request: &mut Request) {
let credentials = &self.credentials;
match &credentials.auth {
RobloxOpenCloudAuth::Cookie(cookie) => {
let cookie_value = format!(".ROBLOSECURITY={}", cookie);
request.headers_mut().insert(
COOKIE,
HeaderValue::from_bytes(cookie_value.as_bytes()).unwrap(),
);
}
RobloxOpenCloudAuth::ApiKey(api_key) => {
request.headers_mut().insert(
OPEN_CLOUD_API_KEY_HEADER,
HeaderValue::from_bytes(api_key.as_bytes()).unwrap(),
);
}
RobloxOpenCloudAuth::None => {}
};
if let Some(csrf) = &self.csrf_token {
request.headers_mut().insert("X-CSRF-Token", csrf.clone());
}
}
fn poll_operation_until_complete(
&mut self,
operation_id: &str,
) -> Result<String, RobloxApiError> {
let base_url = match self.credentials.auth {
RobloxOpenCloudAuth::Cookie(_) => OPEN_CLOUD_ASSET_OPERATIONS_USER_AUTH,
RobloxOpenCloudAuth::ApiKey(_) => OPEN_CLOUD_ASSET_OPERATIONS,
RobloxOpenCloudAuth::None => {
return Err(RobloxApiError::ApiError {
message: "No authentication provided".to_string(),
})
}
};
let url = format!("{}/{}", base_url, operation_id);
const FIRST_TRY: u32 = 1;
const MAX_RETRIES: u32 = 5;
const BASE_DELAY: Duration = Duration::from_millis(2000);
const STEP_DELAY: Duration = Duration::from_millis(50);
const EXPONENTIAL_BACKOFF: u32 = 2;
log::debug!("Polling operation until complete: {}", operation_id);
for attempt in 0..FIRST_TRY + MAX_RETRIES {
let mut response =
self.execute_with_csrf_retry(|client| Ok(client.get(url.as_str()).build()?))?;
let body = response.text()?;
let operation_status_response: RawOperationStatusResponse = serde_json::from_str(&body)
.map_err(|source| RobloxApiError::BadResponseJson {
body: body.clone(),
source,
})?;
match operation_status_response.response {
Some(variants) => match variants {
RawOperationStatusResponseVariants::Success { asset_id, .. } => {
return Ok(asset_id);
}
RawOperationStatusResponseVariants::Failure { code, message } => {
return Err(RobloxApiError::ApiError {
message: format!("Operation failed: {}: {}", code, message),
})
}
},
None => {
let delay = BASE_DELAY + STEP_DELAY * (attempt.pow(EXPONENTIAL_BACKOFF));
std::thread::sleep(delay);
}
};
}
Err(RobloxApiError::ApiError {
message: format!(
"polling operation: {} did not complete in time",
operation_id
),
})
}
}
#[derive(Debug, Error)]
pub enum RobloxApiError {
#[error("Roblox API HTTP error")]
Http {
#[from]
source: reqwest::Error,
},
#[error("Roblox API error: {message}")]
ApiError { message: String },
#[error("Roblox API returned success, but had malformed JSON response: {body}")]
BadResponseJson {
body: String,
source: serde_json::Error,
},
#[error("Roblox API returned HTTP {status} with body: {body}")]
ResponseError { status: StatusCode, body: String },
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/auth_cookie.rs | src/auth_cookie.rs | //! Implementation of automatically fetching authentication cookie from a Roblox
//! Studio installation.
#[cfg(windows)]
pub fn get_auth_cookie() -> Option<String> {
use winreg::{enums::HKEY_CURRENT_USER, RegKey};
let hkcu = RegKey::predef(HKEY_CURRENT_USER);
let cookies = hkcu
.open_subkey("Software\\Roblox\\RobloxStudioBrowser\\roblox.com")
.ok()?;
let entry: String = cookies.get_value(".ROBLOSECURITY").ok()?;
let mut cookie = None;
for kv_pair in entry.split(',') {
let mut pieces = kv_pair.split("::");
if let Some("COOK") = pieces.next() {
let value = match pieces.next() {
Some(value) => value,
None => {
log::warn!("Unrecognized Roblox Studio cookie value: missing COOK value");
return None;
}
};
if !value.starts_with('<') || !value.ends_with('>') {
log::warn!("Unrecognized Roblox Studio cookie value: was not wrapped in <>");
return None;
}
let value = &value[1..value.len() - 1];
cookie = Some(value);
}
}
cookie.map(Into::into)
}
#[cfg(not(windows))]
pub fn get_auth_cookie() -> Option<String> {
None
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/options.rs | src/options.rs | use std::{path::PathBuf, str::FromStr};
use structopt::StructOpt;
#[derive(Debug, StructOpt)]
#[structopt(about = env!("CARGO_PKG_DESCRIPTION"))]
pub struct Options {
#[structopt(flatten)]
pub global: GlobalOptions,
#[structopt(subcommand)]
pub command: Subcommand,
}
#[derive(Debug, StructOpt)]
pub struct GlobalOptions {
/// The authentication cookie for Tarmac to use. If not specified, Tarmac
/// will attempt to use the cookie from the Roblox Studio installation on
/// the system.
#[structopt(long, global(true))]
pub auth: Option<String>,
/// The Open Cloud API key tarmac will use to upload assets.
#[structopt(long, global(true))]
pub api_key: Option<String>,
/// Sets verbosity level. Can be specified multiple times.
#[structopt(long = "verbose", short, global(true), parse(from_occurrences))]
pub verbosity: u8,
}
#[derive(Debug, StructOpt)]
pub enum Subcommand {
/// Upload a single image to the Roblox cloud. Prints the asset ID of the
/// resulting Image asset to stdout.
UploadImage(UploadImageOptions),
/// Sync your Tarmac project, uploading any assets that have changed.
Sync(SyncOptions),
/// Downloads any packed spritesheets, then generates a file mapping asset
/// IDs to file paths.
CreateCacheMap(CreateCacheMapOptions),
/// Creates a file that lists all assets required by the project.
AssetList(AssetListOptions),
}
#[derive(Debug, StructOpt)]
pub struct UploadImageOptions {
/// The path to the image to upload.
pub path: PathBuf,
/// The name to give to the resulting Decal asset.
#[structopt(long)]
pub name: String,
/// The description to give to the resulting Decal asset.
#[structopt(long, default_value = "Uploaded by Tarmac.")]
pub description: String,
/// The ID of the user to upload to. Not compatible with `group_id`.
#[structopt(long)]
pub group_id: Option<u64>,
/// The ID of the group to upload to. Not compatible with `user_id`.
#[structopt(long)]
pub user_id: Option<u64>,
}
#[derive(Debug, StructOpt)]
pub struct SyncOptions {
/// Where Tarmac should sync the project.
///
/// Options:
///
/// - roblox: Upload to Roblox.com
///
/// - none: Do not upload. Tarmac will exit with an error if there are any
/// unsynced assets.
///
/// - debug: Copy to local debug directory for debugging output
#[structopt(long)]
pub target: SyncTarget,
/// When provided, Tarmac will upload again at most the given number of times
/// when it encounters rate limitation errors.
#[structopt(long)]
pub retry: Option<usize>,
/// The number of seconds to wait between each re-upload attempts.
#[structopt(long, default_value = "60")]
pub retry_delay: u64,
/// The path to a Tarmac config, or a folder containing a Tarmac project.
pub config_path: Option<PathBuf>,
}
#[derive(Debug, Clone, Copy)]
pub enum SyncTarget {
Roblox,
None,
Debug,
}
impl FromStr for SyncTarget {
type Err = String;
fn from_str(value: &str) -> Result<SyncTarget, Self::Err> {
match value {
"roblox" => Ok(SyncTarget::Roblox),
"none" => Ok(SyncTarget::None),
"debug" => Ok(SyncTarget::Debug),
_ => Err(String::from(
"Invalid sync target. Valid options are roblox, none, and debug.",
)),
}
}
}
#[derive(Debug, StructOpt)]
pub struct CreateCacheMapOptions {
pub project_path: Option<PathBuf>,
/// A path to a directory to put any downloaded packed images.
#[structopt(long = "cache-dir")]
pub cache_dir: PathBuf,
/// A path to a file to contain the cache mapping.
#[structopt(long = "index-file")]
pub index_file: PathBuf,
}
#[derive(Debug, StructOpt)]
pub struct AssetListOptions {
pub project_path: Option<PathBuf>,
/// A path to a file to put the asset list.
#[structopt(long = "output")]
pub output: PathBuf,
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/codegen.rs | src/codegen.rs | //! Defines how Tarmac generates Lua code for linking to assets.
//!
//! Tarmac uses a small Lua AST to build up generated code.
use std::{
collections::BTreeMap,
io::{self, Write},
path::{self, Path},
};
use fs_err::File;
use crate::{
data::ImageSlice,
data::SyncInput,
lua_ast::{Block, Expression, Function, IfBlock, Statement, Table},
};
const CODEGEN_HEADER: &str =
"-- This file was @generated by Tarmac. It is not intended for manual editing.";
pub fn perform_codegen(output_path: Option<&Path>, inputs: &[&SyncInput]) -> io::Result<()> {
if let Some(path) = output_path {
codegen_grouped(path, inputs)
} else {
codegen_individual(inputs)
}
}
/// Tree used to track and group inputs hierarchically, before turning them into
/// Lua tables.
enum GroupedItem<'a> {
Folder {
children_by_name: BTreeMap<String, GroupedItem<'a>>,
},
InputGroup {
inputs_by_dpi_scale: BTreeMap<u32, &'a SyncInput>,
},
}
/// Perform codegen for a group of inputs who have `codegen_path` defined.
///
/// We'll build up a Lua file containing nested tables that match the structure
/// of the input's path with its base path stripped away.
fn codegen_grouped(output_path: &Path, inputs: &[&SyncInput]) -> io::Result<()> {
let mut root_folder: BTreeMap<String, GroupedItem<'_>> = BTreeMap::new();
// First, collect all of the inputs and group them together into a tree
// according to their relative paths.
for &input in inputs {
// Not all inputs will be marked for codegen. We can eliminate those
// right away.
if !input.config.codegen {
continue;
}
// The extension portion of the path is not useful for code generation.
// By stripping it off, we generate the names that users expect.
let mut path_without_extension = input.path_without_dpi_scale.clone();
path_without_extension.set_extension("");
// If we can't construct a relative path, there isn't a sensible name
// that we can use to refer to this input.
let relative_path = path_without_extension
.strip_prefix(&input.config.codegen_base_path)
.expect("Input base path was not a base path for input");
// Collapse `..` path segments so that we can map this path onto our
// tree of inputs.
let mut segments = Vec::new();
for component in relative_path.components() {
match component {
path::Component::Prefix(_)
| path::Component::RootDir
| path::Component::Normal(_) => {
segments.push(component.as_os_str().to_str().unwrap())
}
path::Component::CurDir => {}
path::Component::ParentDir => assert!(segments.pop().is_some()),
}
}
// Navigate down the tree, creating any folder entries that don't exist
// yet.
let mut current_dir = &mut root_folder;
for (i, &segment) in segments.iter().enumerate() {
if i == segments.len() - 1 {
// We assume that the last segment of a path must be a file.
let input_group = match current_dir.get_mut(segment) {
Some(existing) => existing,
None => {
let input_group = GroupedItem::InputGroup {
inputs_by_dpi_scale: BTreeMap::new(),
};
current_dir.insert(segment.to_owned(), input_group);
current_dir.get_mut(segment).unwrap()
}
};
if let GroupedItem::InputGroup {
inputs_by_dpi_scale,
} = input_group
{
inputs_by_dpi_scale.insert(input.dpi_scale, input);
} else {
unreachable!();
}
} else {
let next_entry =
current_dir
.entry(segment.to_owned())
.or_insert_with(|| GroupedItem::Folder {
children_by_name: BTreeMap::new(),
});
if let GroupedItem::Folder { children_by_name } = next_entry {
current_dir = children_by_name;
} else {
unreachable!();
}
}
}
}
fn build_item(item: &GroupedItem<'_>) -> Option<Expression> {
match item {
GroupedItem::Folder { children_by_name } => {
let entries = children_by_name
.iter()
.filter_map(|(name, child)| build_item(child).map(|item| (name.into(), item)))
.collect();
Some(Expression::table(entries))
}
GroupedItem::InputGroup {
inputs_by_dpi_scale,
} => {
if inputs_by_dpi_scale.len() == 1 {
// If there is exactly one input in this group, we can
// generate code knowing that there are no high DPI variants
// to choose from.
let input = inputs_by_dpi_scale.values().next().unwrap();
match (input.id, input.slice) {
(Some(id), Some(slice)) => Some(codegen_url_and_slice(id, slice)),
(Some(id), None) => Some(codegen_just_asset_url(id)),
_ => None,
}
} else {
// In this case, we have the same asset in multiple
// different DPI scales. We can generate code to pick
// between them at runtime.
Some(codegen_with_high_dpi_options(inputs_by_dpi_scale))
}
}
}
}
let root_item = build_item(&GroupedItem::Folder {
children_by_name: root_folder,
})
.unwrap();
let ast = Statement::Return(root_item);
let mut file = File::create(output_path)?;
writeln!(file, "{}", CODEGEN_HEADER)?;
write!(file, "{}", ast)?;
Ok(())
}
/// Perform codegen for a group of inputs that don't have `codegen_path`
/// defined, and so generate individual files.
fn codegen_individual(inputs: &[&SyncInput]) -> io::Result<()> {
for input in inputs {
let expression = match (input.id, input.slice) {
(Some(id), Some(slice)) => codegen_url_and_slice(id, slice),
(Some(id), None) => codegen_just_asset_url(id),
_ => continue,
};
let ast = Statement::Return(expression);
let path = input.path.with_extension("lua");
let mut file = File::create(path)?;
writeln!(file, "{}", CODEGEN_HEADER)?;
write!(file, "{}", ast)?;
}
Ok(())
}
fn codegen_url_and_slice(id: u64, slice: ImageSlice) -> Expression {
let offset = slice.min();
let size = slice.size();
let mut table = Table::new();
table.add_entry("Image", format!("rbxassetid://{}", id));
table.add_entry(
"ImageRectOffset",
Expression::Raw(format!("Vector2.new({}, {})", offset.0, offset.1)),
);
table.add_entry(
"ImageRectSize",
Expression::Raw(format!("Vector2.new({}, {})", size.0, size.1)),
);
Expression::Table(table)
}
fn codegen_just_asset_url(id: u64) -> Expression {
Expression::String(format!("rbxassetid://{}", id))
}
fn codegen_dpi_option(input: &SyncInput) -> (Expression, Block) {
let condition = Expression::Raw(format!("dpiScale >= {}", input.dpi_scale));
// FIXME: We should probably pull data out of SyncInput at the start of
// codegen so that we can handle invariants like this.
let id = input.id.unwrap();
let value = match input.slice {
Some(slice) => codegen_url_and_slice(id, slice),
None => codegen_just_asset_url(id),
};
let body = Statement::Return(value);
(condition, body.into())
}
fn codegen_with_high_dpi_options(inputs: &BTreeMap<u32, &SyncInput>) -> Expression {
let args = "dpiScale".to_owned();
let mut options_high_to_low = inputs.values().rev().peekable();
let highest_dpi_option = options_high_to_low.next().unwrap();
let (highest_cond, highest_body) = codegen_dpi_option(highest_dpi_option);
let mut if_block = IfBlock::new(highest_cond, highest_body);
while let Some(dpi_option) = options_high_to_low.next() {
let (cond, body) = codegen_dpi_option(dpi_option);
if options_high_to_low.peek().is_some() {
if_block.else_if_blocks.push((cond, body));
} else {
if_block.else_block = Some(body);
}
}
let statements = vec![Statement::If(if_block)];
Expression::Function(Function::new(args, statements))
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/asset_name.rs | src/asset_name.rs | use std::{
fmt,
path::{self, Path},
sync::Arc,
};
use serde::{Deserialize, Serialize};
/// Represents a disambiguated and cleaned up path to an asset from a Tarmac
/// project.
///
/// This is really just a string, but by making it have an explicit type with
/// known conversions, we can avoid some kinds of error trying to use Tarmac
/// APIs.
#[derive(Debug, Clone, PartialEq, Eq, Hash, PartialOrd, Ord, Serialize, Deserialize)]
#[serde(transparent)]
pub struct AssetName(Arc<str>);
impl AssetName {
pub fn from_paths(root_path: &Path, asset_path: &Path) -> Self {
let relative = asset_path
.strip_prefix(root_path)
.expect("AssetName::from_paths expects asset_path to have root_path as a prefix.");
let displayed = format!("{}", relative.display());
// In order to make relative paths behave cross-platform, fix the path
// separator to always be / on platforms where it isn't the main separator.
let displayed = if path::MAIN_SEPARATOR == '/' {
displayed
} else {
displayed.replace(path::MAIN_SEPARATOR, "/")
};
AssetName(displayed.into())
}
#[cfg(test)]
pub(crate) fn new<S: AsRef<str>>(inner: S) -> Self {
Self(inner.as_ref().into())
}
}
impl AsRef<str> for AssetName {
fn as_ref(&self) -> &str {
&self.0
}
}
impl fmt::Display for AssetName {
fn fmt(&self, formatter: &mut fmt::Formatter) -> fmt::Result {
write!(formatter, "{}", self.0)
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/main.rs | src/main.rs | mod alpha_bleed;
mod asset_name;
mod auth_cookie;
mod codegen;
mod commands;
mod data;
mod dpi_scale;
mod glob;
mod image;
mod lua_ast;
mod options;
mod roblox_web_api;
mod roblox_web_api_types;
mod sync_backend;
use std::{env, panic, process};
use backtrace::Backtrace;
use structopt::StructOpt;
use crate::options::{Options, Subcommand};
fn run(options: Options) -> Result<(), anyhow::Error> {
match options.command {
Subcommand::UploadImage(upload_options) => {
commands::upload_image(options.global, upload_options)?
}
Subcommand::Sync(sync_options) => commands::sync(options.global, sync_options)?,
Subcommand::CreateCacheMap(sub_options) => {
commands::create_cache_map(options.global, sub_options)?
}
Subcommand::AssetList(sub_options) => commands::asset_list(options.global, sub_options)?,
}
Ok(())
}
fn main() {
panic::set_hook(Box::new(|panic_info| {
// PanicInfo's payload is usually a &'static str or String.
// See: https://doc.rust-lang.org/beta/std/panic/struct.PanicInfo.html#method.payload
let message = match panic_info.payload().downcast_ref::<&str>() {
Some(&message) => message.to_string(),
None => match panic_info.payload().downcast_ref::<String>() {
Some(message) => message.clone(),
None => "<no message>".to_string(),
},
};
log::error!("Tarmac crashed!");
log::error!("This is probably a Tarmac bug.");
log::error!("");
log::error!(
"Please consider filing an issue: {}/issues",
env!("CARGO_PKG_REPOSITORY")
);
log::error!("");
log::error!("If you can reproduce this crash, try adding the -v, -vv, or -vvv flags.");
log::error!("This might give you more information to figure out what went wrong!");
log::error!("");
log::error!("Details: {}", message);
if let Some(location) = panic_info.location() {
log::error!("in file {} on line {}", location.file(), location.line());
}
// When using the backtrace crate, we need to check the RUST_BACKTRACE
// environment variable ourselves. Once we switch to the (currently
// unstable) std::backtrace module, we won't need to do this anymore.
let should_backtrace = env::var("RUST_BACKTRACE")
.map(|var| var == "1")
.unwrap_or(false);
if should_backtrace {
eprintln!("{:?}", Backtrace::new());
} else {
eprintln!(
"note: run with `RUST_BACKTRACE=1` environment variable to display a backtrace."
);
}
process::exit(1);
}));
let options = Options::from_args();
let log_filter = match options.global.verbosity {
0 => "info",
1 => "info,tarmac=debug",
2 => "info,tarmac=trace",
_ => "trace",
};
let log_env = env_logger::Env::default().default_filter_or(log_filter);
env_logger::Builder::from_env(log_env)
.format_module_path(false)
.format_timestamp(None)
// Indent following lines equal to the log level label, like `[ERROR] `
.format_indent(Some(8))
.init();
if let Err(err) = run(options) {
log::error!("{:?}", err);
process::exit(1);
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/commands/sync.rs | src/commands/sync.rs | use std::{
collections::{BTreeMap, BTreeSet, HashMap, HashSet, VecDeque},
env,
io::{self, BufWriter, Write},
path::{Path, PathBuf},
time::Duration,
};
use fs_err as fs;
use packos::{InputItem, SimplePacker};
use thiserror::Error;
use walkdir::WalkDir;
use crate::{
alpha_bleed::alpha_bleed,
asset_name::AssetName,
codegen::perform_codegen,
data::{Config, ConfigError, ImageSlice, InputManifest, Manifest, ManifestError, SyncInput},
dpi_scale,
image::Image,
options::{GlobalOptions, SyncOptions, SyncTarget},
roblox_web_api::{RobloxApiClient, RobloxApiError, RobloxOpenCloudCredentials},
roblox_web_api_types::RobloxAuthenticationError,
sync_backend::{
DebugSyncBackend, Error as SyncBackendError, NoneSyncBackend, RetryBackend,
RobloxSyncBackend, SyncBackend, UploadInfo,
},
};
fn sync_session<B: SyncBackend>(session: &mut SyncSession, options: &SyncOptions, mut backend: B) {
if let Some(retry) = options.retry {
let mut retry_backend =
RetryBackend::new(backend, retry, Duration::from_secs(options.retry_delay));
session.sync_with_backend(&mut retry_backend);
} else {
session.sync_with_backend(&mut backend);
}
}
pub fn sync(global: GlobalOptions, options: SyncOptions) -> Result<(), SyncError> {
let fuzzy_config_path = match &options.config_path {
Some(v) => v.to_owned(),
None => env::current_dir()?,
};
let credentials = RobloxOpenCloudCredentials::get_credentials(global.auth, global.api_key)?;
let mut api_client = RobloxApiClient::new(credentials);
let mut session = SyncSession::new(&fuzzy_config_path)?;
session.discover_configs()?;
session.discover_inputs()?;
match &options.target {
SyncTarget::Roblox => {
let group_id = session.root_config().upload_to_group_id;
sync_session(
&mut session,
&options,
RobloxSyncBackend::new(&mut api_client, group_id),
);
}
SyncTarget::None => {
sync_session(&mut session, &options, NoneSyncBackend);
}
SyncTarget::Debug => {
sync_session(&mut session, &options, DebugSyncBackend::new());
}
}
session.write_manifest()?;
session.codegen()?;
session.write_asset_list()?;
session.populate_asset_cache(&mut api_client)?;
if session.sync_errors.is_empty() {
Ok(())
} else {
Err(SyncError::HadErrors {
error_count: session.sync_errors.len(),
})
}
}
/// A sync session holds all of the state for a single run of the 'tarmac sync'
/// command.
#[derive(Debug)]
struct SyncSession {
/// The set of all configs known by the sync session.
///
/// This list is always at least one element long. The first entry is the
/// root config where the sync session was started; use
/// SyncSession::root_config to retrieve it.
configs: Vec<Config>,
/// The manifest file that was present as of the beginning of the sync
/// operation.
original_manifest: Manifest,
/// All of the inputs discovered so far in the current sync.
inputs: BTreeMap<AssetName, SyncInput>,
/// Errors encountered during syncing that we ignored at the time.
sync_errors: Vec<anyhow::Error>,
}
/// Contains information to help Tarmac batch process different kinds of assets.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
struct InputKind {
packable: bool,
dpi_scale: u32,
}
struct PackedImage {
image: Image,
slices: HashMap<AssetName, ImageSlice>,
}
impl SyncSession {
fn new(fuzzy_config_path: &Path) -> Result<Self, SyncError> {
log::trace!("Starting new sync session");
let root_config = Config::read_from_folder_or_file(&fuzzy_config_path)?;
log::trace!("Starting from config \"{}\"", root_config.name);
let original_manifest = match Manifest::read_from_folder(root_config.folder()) {
Ok(manifest) => manifest,
Err(err) if err.is_not_found() => Manifest::default(),
other => other?,
};
Ok(Self {
configs: vec![root_config],
original_manifest,
inputs: BTreeMap::new(),
sync_errors: Vec::new(),
})
}
/// Raise a sync error that will fail the sync process at a later point.
fn raise_error(&mut self, error: impl Into<anyhow::Error>) {
let error = error.into();
log::error!("{:?}", error);
self.sync_errors.push(error);
}
/// The config that this sync session was started from.
fn root_config(&self) -> &Config {
&self.configs[0]
}
/// Locate all of the configs connected to our root config.
///
/// Tarmac config files can include each other via the `includes` field,
/// which will search the given path for other config files and use them as
/// part of the sync.
fn discover_configs(&mut self) -> Result<(), SyncError> {
let mut to_search = VecDeque::new();
to_search.extend(self.root_config().includes.iter().cloned());
while let Some(search_path) = to_search.pop_front() {
let search_meta = fs::metadata(&search_path)?;
if search_meta.is_file() {
// This is a file that's explicitly named by a config. We'll
// check that it's a Tarmac config and include it.
let config = Config::read_from_file(&search_path)?;
// Include any configs that this config references.
to_search.extend(config.includes.iter().cloned());
self.configs.push(config);
} else {
// If this directory contains a config file, we can stop
// traversing this branch.
match Config::read_from_folder(&search_path) {
Ok(config) => {
// We found a config, we're done here.
// Append config include paths from this config
to_search.extend(config.includes.iter().cloned());
self.configs.push(config);
}
Err(err) if err.is_not_found() => {
// We didn't find a config, keep searching down this
// branch of the filesystem.
let children = fs::read_dir(&search_path)?;
for entry in children {
let entry = entry?;
let entry_path = entry.path();
// DirEntry has a metadata method, but in the case
// of symlinks, it returns metadata about the
// symlink and not the file or folder.
let entry_meta = fs::metadata(&entry_path)?;
if entry_meta.is_dir() {
to_search.push_back(entry_path);
}
}
}
Err(err) => {
return Err(err.into());
}
}
}
}
Ok(())
}
/// Find all files on the filesystem referenced as inputs by our configs.
fn discover_inputs(&mut self) -> Result<(), SyncError> {
let inputs = &mut self.inputs;
let root_config_path = &self.configs[0].folder();
// Starting with our root config, iterate over all configs and find all
// relevant inputs
for config in &self.configs {
let config_path = config.folder();
for input_config in &config.inputs {
let base_path = config_path.join(input_config.glob.get_prefix());
log::trace!(
"Searching for inputs in '{}' matching '{}'",
base_path.display(),
input_config.glob,
);
let filtered_paths = WalkDir::new(base_path)
.into_iter()
// TODO: Properly handle WalkDir errors
.filter_map(Result::ok)
.filter(|entry| {
let match_path = entry.path().strip_prefix(config_path).unwrap();
input_config.glob.is_match(match_path)
});
for matching in filtered_paths {
let path = matching.into_path();
let name = AssetName::from_paths(&root_config_path, &path);
log::trace!("Found input {}", name);
let path_info = dpi_scale::extract_path_info(&path);
let contents = fs::read(&path)?;
let hash = generate_asset_hash(&contents);
// If this input was known during the last sync operation,
// pull the information we knew about it out.
let (id, slice) = match self.original_manifest.inputs.get(&name) {
Some(original) => (original.id, original.slice),
None => (None, None),
};
let already_found = inputs.insert(
name.clone(),
SyncInput {
name,
path,
path_without_dpi_scale: path_info.path_without_dpi_scale,
dpi_scale: path_info.dpi_scale,
config: input_config.clone(),
contents,
hash,
id,
slice,
},
);
if let Some(existing) = already_found {
return Err(SyncError::OverlappingGlobs {
path: existing.path,
});
}
}
}
}
Ok(())
}
fn sync_with_backend<S: SyncBackend>(&mut self, backend: &mut S) {
let mut compatible_input_groups = BTreeMap::new();
for (input_name, input) in &self.inputs {
if !is_image_asset(&input.path) {
log::warn!(
"Asset '{}' is not recognized by Tarmac.",
input.path.display()
);
continue;
}
let kind = InputKind {
packable: input.config.packable,
dpi_scale: input.dpi_scale,
};
let input_group = compatible_input_groups.entry(kind).or_insert_with(Vec::new);
input_group.push(input_name.clone());
}
'outer: for (kind, group) in compatible_input_groups {
if kind.packable {
if let Err(err) = self.sync_packable_images(backend, group) {
let rate_limited = err.is_rate_limited();
println!("{}: {:#?}", rate_limited, err);
self.raise_error(err);
if rate_limited {
break 'outer;
}
}
} else {
for input_name in group {
if let Err(err) = self.sync_unpackable_image(backend, &input_name) {
let rate_limited = err.is_rate_limited();
self.raise_error(err);
if rate_limited {
break 'outer;
}
}
}
}
}
// TODO: Clean up output of inputs that were present in the previous
// sync but are no longer present.
}
fn sync_packable_images<S: SyncBackend>(
&mut self,
backend: &mut S,
group: Vec<AssetName>,
) -> Result<(), SyncError> {
if self.are_inputs_unchanged(&group) {
log::info!("Skipping image packing as all inputs are unchanged.");
return Ok(());
}
log::trace!("Packing images...");
let mut packed_images = self.pack_images(&group)?;
log::trace!("Alpha-bleeding {} packed images...", packed_images.len());
for (i, packed_image) in packed_images.iter_mut().enumerate() {
log::trace!("Bleeding image {}", i);
alpha_bleed(&mut packed_image.image);
}
log::trace!("Syncing packed images...");
for packed_image in &packed_images {
self.sync_packed_image(backend, packed_image)?;
}
Ok(())
}
fn are_inputs_unchanged(&self, group: &[AssetName]) -> bool {
for name in group {
if let Some(manifest) = self.original_manifest.inputs.get(name) {
let input = &self.inputs[name];
let unchanged = input.is_unchanged_since_last_sync(manifest);
if !unchanged {
log::trace!("Input {} changed since last sync", name);
return false;
}
} else {
log::trace!(
"Input {} was not present last sync, need to re-pack spritesheets",
name
);
return false;
}
}
true
}
fn pack_images(&self, group: &[AssetName]) -> Result<Vec<PackedImage>, SyncError> {
let mut packos_inputs = Vec::new();
let mut images_by_id = HashMap::new();
for name in group {
let input = &self.inputs[&name];
let image = Image::decode_png(input.contents.as_slice())?;
let input = InputItem::new(image.size());
images_by_id.insert(input.id(), (name, image));
packos_inputs.push(input);
}
let packer = SimplePacker::new()
.max_size(self.root_config().max_spritesheet_size)
.padding(1);
let pack_results = packer.pack(packos_inputs);
let mut packed_images = Vec::new();
for bucket in pack_results.buckets() {
let mut image = Image::new_empty_rgba8(bucket.size());
let mut slices: HashMap<AssetName, _> = HashMap::new();
for item in bucket.items() {
let (name, sprite_image) = &images_by_id[&item.id()];
image.blit(sprite_image, item.position());
let slice = ImageSlice::new(item.position(), item.max());
slices.insert((*name).clone(), slice);
}
packed_images.push(PackedImage { image, slices });
}
Ok(packed_images)
}
fn sync_packed_image<S: SyncBackend>(
&mut self,
backend: &mut S,
packed_image: &PackedImage,
) -> Result<(), SyncError> {
let mut encoded_image = Vec::new();
packed_image.image.encode_png(&mut encoded_image)?;
let hash = generate_asset_hash(&encoded_image);
let upload_data = UploadInfo {
name: "spritesheet".to_owned(),
contents: encoded_image,
hash: hash.clone(),
};
let id = backend.upload(upload_data)?.id;
// Apply resolved metadata back to the inputs
for (asset_name, slice) in &packed_image.slices {
let input = self.inputs.get_mut(asset_name).unwrap();
input.id = Some(id);
input.slice = Some(*slice);
}
Ok(())
}
fn sync_unpackable_image<S: SyncBackend>(
&mut self,
backend: &mut S,
input_name: &AssetName,
) -> Result<(), SyncError> {
let input = self.inputs.get_mut(input_name).unwrap();
let upload_data = UploadInfo {
name: input.human_name(),
contents: input.contents.clone(),
hash: input.hash.clone(),
};
let id = if let Some(input_manifest) = self.original_manifest.inputs.get(&input_name) {
// This input existed during our last sync operation. We'll compare
// the current state with the previous one to see if we need to take
// action.
if input_manifest.hash != input.hash {
// The file's contents have been edited since the last sync.
log::trace!("Contents changed...");
backend.upload(upload_data)?.id
} else if input.id.is_some() {
// The file's contents are the same as the previous sync and
// this image has been uploaded previously.
if input_manifest.packable != input.config.packable {
// Only the file's config has changed.
//
// TODO: We might not need to reupload this image?
log::trace!("Config changed...");
backend.upload(upload_data)?.id
} else {
// Nothing has changed, we're good to go!
log::trace!("Input is unchanged.");
return Ok(());
}
} else {
// This image has never been uploaded, but its hash is present
// in the manifest.
log::trace!("Image has never been uploaded...");
backend.upload(upload_data)?.id
}
} else {
// This input was added since the last sync, if there was one.
log::trace!("Image was added since last sync...");
backend.upload(upload_data)?.id
};
input.id = Some(id);
Ok(())
}
fn write_manifest(&self) -> Result<(), SyncError> {
log::trace!("Generating new manifest");
let mut manifest = Manifest::default();
manifest.inputs = self
.inputs
.iter()
.map(|(name, input)| {
(
name.clone(),
InputManifest {
hash: input.hash.clone(),
id: input.id,
slice: input.slice,
packable: input.config.packable,
},
)
})
.collect();
manifest.write_to_folder(self.root_config().folder())?;
Ok(())
}
fn codegen(&self) -> Result<(), SyncError> {
log::trace!("Starting codegen");
#[derive(Debug, Clone, PartialEq, Eq, Hash)]
struct CodegenCompatibility<'a> {
output_path: Option<&'a Path>,
}
let mut compatible_codegen_groups = HashMap::new();
for (input_name, input) in &self.inputs {
let output_path = input
.config
.codegen_path
.as_ref()
.map(|path| path.as_path());
let compat = CodegenCompatibility { output_path };
let group = compatible_codegen_groups
.entry(compat)
.or_insert_with(Vec::new);
group.push(input_name.clone());
}
for (compat, names) in compatible_codegen_groups {
let inputs: Vec<_> = names.iter().map(|name| &self.inputs[name]).collect();
let output_path = compat.output_path;
perform_codegen(output_path, &inputs)?;
}
Ok(())
}
fn write_asset_list(&self) -> Result<(), SyncError> {
let list_path = match &self.root_config().asset_list_path {
Some(path) => path,
None => return Ok(()),
};
log::debug!("Writing asset list");
let list_parent = list_path.parent().unwrap();
fs_err::create_dir_all(list_parent)?;
let mut file = BufWriter::new(fs_err::File::create(list_path)?);
let known_ids: BTreeSet<u64> = self.inputs.values().filter_map(|input| input.id).collect();
for id in known_ids {
writeln!(file, "rbxassetid://{}", id)?;
}
file.flush()?;
Ok(())
}
fn populate_asset_cache(&self, api_client: &mut RobloxApiClient) -> Result<(), SyncError> {
let cache_path = match &self.root_config().asset_cache_path {
Some(path) => path,
None => return Ok(()),
};
log::debug!("Populating asset cache");
fs_err::create_dir_all(&cache_path)?;
let known_ids: HashSet<u64> = self.inputs.values().filter_map(|input| input.id).collect();
// Clean up cache items that aren't present in our current project.
for entry in fs_err::read_dir(&cache_path)? {
let entry = entry?;
let path = entry.path();
let metadata = fs_err::metadata(&path)?;
let name_as_id: Option<u64> = path
.file_name()
.and_then(|name| name.to_str())
.and_then(|name| name.parse().ok());
let should_clean_up;
if metadata.is_dir() {
// Tarmac never generates directories, so we should clean this.
should_clean_up = true;
} else if let Some(id) = name_as_id {
// This file looks like an ID. If it's not present in this
// project, we assume it's from an old sync and clean it up.
should_clean_up = !known_ids.contains(&id);
} else {
// This is some other file that we should clean up.
should_clean_up = true;
}
if should_clean_up {
if metadata.is_dir() {
fs_err::remove_dir_all(&path)?;
} else {
fs_err::remove_file(&path)?;
}
}
}
for input in self.inputs.values() {
if let Some(id) = input.id {
let input_path = cache_path.join(format!("{}", id));
match fs_err::metadata(&input_path) {
Ok(_) => {
// This asset is already downloaded, we can skip it.
continue;
}
Err(err) => {
if err.kind() != io::ErrorKind::NotFound {
return Err(err.into());
}
}
}
log::debug!("Downloading asset ID {}", id);
let contents = api_client.download_image(id)?;
fs_err::write(input_path, contents)?;
}
}
Ok(())
}
}
fn is_image_asset(path: &Path) -> bool {
match path.extension().and_then(|ext| ext.to_str()) {
// TODO: Expand the definition of images?
Some("png") | Some("jpg") => true,
_ => false,
}
}
fn generate_asset_hash(content: &[u8]) -> String {
format!("{}", blake3::hash(content).to_hex())
}
#[derive(Debug, Error)]
pub enum SyncError {
#[error("Path {} was described by more than one glob", .path.display())]
OverlappingGlobs { path: PathBuf },
#[error("'tarmac sync' completed, but with {error_count} error(s)")]
HadErrors { error_count: usize },
#[error(transparent)]
WalkDir {
#[from]
source: walkdir::Error,
},
#[error(transparent)]
Config {
#[from]
source: ConfigError,
},
#[error(transparent)]
Backend {
#[from]
source: SyncBackendError,
},
#[error(transparent)]
Manifest {
#[from]
source: ManifestError,
},
#[error(transparent)]
Io {
#[from]
source: io::Error,
},
#[error(transparent)]
PngDecode {
#[from]
source: png::DecodingError,
},
#[error(transparent)]
PngEncode {
#[from]
source: png::EncodingError,
},
#[error(transparent)]
RobloxApi {
#[from]
source: RobloxApiError,
},
#[error(transparent)]
RobloxAuthenticationError {
#[from]
source: RobloxAuthenticationError,
},
}
impl SyncError {
pub fn is_rate_limited(&self) -> bool {
match self {
Self::Backend {
source: SyncBackendError::RateLimited,
} => true,
_ => false,
}
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/commands/asset_list.rs | src/commands/asset_list.rs | use std::collections::BTreeSet;
use std::env;
use std::io::{BufWriter, Write};
use fs_err as fs;
use crate::data::Manifest;
use crate::options::{AssetListOptions, GlobalOptions};
pub fn asset_list(_global: GlobalOptions, options: AssetListOptions) -> anyhow::Result<()> {
let project_path = match options.project_path {
Some(path) => path,
None => env::current_dir()?,
};
let manifest = Manifest::read_from_folder(&project_path)?;
let mut asset_list = BTreeSet::new();
for input_manifest in manifest.inputs.values() {
if let Some(id) = input_manifest.id {
asset_list.insert(id);
}
}
let mut file = BufWriter::new(fs::File::create(&options.output)?);
for id in asset_list {
writeln!(file, "{}", id)?;
}
file.flush()?;
Ok(())
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/commands/mod.rs | src/commands/mod.rs | mod asset_list;
mod create_cache_map;
mod sync;
mod upload_image;
pub use asset_list::*;
pub use create_cache_map::*;
pub use sync::*;
pub use upload_image::*;
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/commands/upload_image.rs | src/commands/upload_image.rs | use fs_err as fs;
use crate::{
options::{GlobalOptions, UploadImageOptions},
roblox_web_api::{RobloxApiClient, RobloxOpenCloudCredentials, IMAGE},
roblox_web_api_types::{ImageUploadData, ImageUploadMetadata},
};
pub fn upload_image(
global: GlobalOptions,
options: UploadImageOptions,
) -> Result<(), anyhow::Error> {
let image_data = fs::read(options.path).expect("couldn't read input file");
let credentials = RobloxOpenCloudCredentials::get_credentials(global.auth, global.api_key)?;
let mut client = RobloxApiClient::new(credentials);
let upload_data = ImageUploadData {
image_data: image_data.into(),
image_metadata: ImageUploadMetadata::new(
IMAGE.to_string(),
options.name.to_string(),
options.description.to_string(),
options.user_id,
options.group_id,
)?,
};
let response = client.upload_image(upload_data)?;
eprintln!("Image uploaded successfully!");
println!("{}", response.asset_id);
Ok(())
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/commands/create_cache_map.rs | src/commands/create_cache_map.rs | use std::collections::BTreeMap;
use std::env;
use std::io::{BufWriter, Write};
use fs_err as fs;
use crate::asset_name::AssetName;
use crate::data::Manifest;
use crate::options::{CreateCacheMapOptions, GlobalOptions};
use crate::roblox_web_api::{RobloxApiClient, RobloxOpenCloudCredentials};
pub fn create_cache_map(
global: GlobalOptions,
options: CreateCacheMapOptions,
) -> anyhow::Result<()> {
let credentials = RobloxOpenCloudCredentials::get_credentials(global.auth, global.api_key)?;
let mut api_client = RobloxApiClient::new(credentials);
let project_path = match options.project_path {
Some(path) => path.clone(),
None => env::current_dir()?,
};
let manifest = Manifest::read_from_folder(&project_path)?;
let index_dir = options.index_file.parent().unwrap();
fs::create_dir_all(index_dir)?;
fs::create_dir_all(&options.cache_dir)?;
let mut uploaded_inputs: BTreeMap<u64, Vec<&AssetName>> = BTreeMap::new();
for (name, input_manifest) in &manifest.inputs {
if let Some(id) = input_manifest.id {
let paths = uploaded_inputs.entry(id).or_default();
paths.push(name);
}
}
let mut index: BTreeMap<u64, String> = BTreeMap::new();
for (id, contributing_assets) in uploaded_inputs {
if contributing_assets.len() == 1 {
index.insert(id, contributing_assets[0].to_string());
} else {
let contents = api_client.download_image(id)?;
let path = options.cache_dir.join(id.to_string());
fs::write(&path, contents)?;
index.insert(id, path.display().to_string());
}
}
let mut file = BufWriter::new(fs::File::create(&options.index_file)?);
serde_json::to_writer_pretty(&mut file, &index)?;
file.flush()?;
Ok(())
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/data/config.rs | src/data/config.rs | use std::{
io,
path::{Path, PathBuf},
};
use fs_err as fs;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::glob::Glob;
static CONFIG_FILENAME: &str = "tarmac.toml";
/// Configuration for Tarmac, contained in a tarmac.toml file.
///
/// Tarmac is started from a top-level tarmac.toml file. Config files can
/// include other config files.
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct Config {
/// The name of the project, currently only used in debugging.
pub name: String,
/// The maximum size that any packed spritesheets should be. Only applies if
/// this config is the root config file.
#[serde(default = "default_max_spritesheet_size")]
pub max_spritesheet_size: (u32, u32),
/// A path to a folder where any assets contained in the project should be
/// stored. Each asset's name will match its asset ID.
pub asset_cache_path: Option<PathBuf>,
/// A path to a file where Tarmac will write a list of all of the asset URLs
/// referred to by this project.
pub asset_list_path: Option<PathBuf>,
/// If specified, requires that all uploaded assets are uploaded to the
/// given group. Attempting to sync will fail if the authenticated user does
/// not have access to create assets on the group.
pub upload_to_group_id: Option<u64>,
/// A list of paths that Tarmac should search in to find other Tarmac
/// projects.
///
/// Any found projects will have their inputs merged into this project.
#[serde(default)]
pub includes: Vec<PathBuf>,
/// A list of input glob paths and options that Tarmac should use to
/// discover assets that it should manage.
#[serde(default)]
pub inputs: Vec<InputConfig>,
/// The path that this config came from. Paths from this config should be
/// relative to the folder containing this file.
#[serde(skip)]
pub file_path: PathBuf,
}
impl Config {
pub fn read_from_folder_or_file<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError> {
let path = path.as_ref();
let meta = fs::metadata(path)?;
if meta.is_file() {
Self::read_from_file(path)
} else {
Self::read_from_folder(path)
}
}
pub fn read_from_folder<P: AsRef<Path>>(folder_path: P) -> Result<Self, ConfigError> {
let folder_path = folder_path.as_ref();
let file_path = &folder_path.join(CONFIG_FILENAME);
Self::read_from_file(file_path)
}
pub fn read_from_file<P: AsRef<Path>>(path: P) -> Result<Self, ConfigError> {
let path = path.as_ref();
let contents = fs::read(path)?;
let mut config: Self = toml::from_slice(&contents).map_err(|source| ConfigError::Toml {
source,
path: path.to_owned(),
})?;
config.file_path = path.to_owned();
config.make_paths_absolute();
Ok(config)
}
/// The path that paths in this Config should be considered relative to.
pub fn folder(&self) -> &Path {
self.file_path.parent().unwrap()
}
/// Turn all relative paths referenced from this config into absolute paths.
fn make_paths_absolute(&mut self) {
let base = self.file_path.parent().unwrap();
if let Some(list_path) = self.asset_list_path.as_mut() {
make_absolute(list_path, base);
}
if let Some(cache_path) = self.asset_cache_path.as_mut() {
make_absolute(cache_path, base);
}
for include in &mut self.includes {
make_absolute(include, base);
}
for input in &mut self.inputs {
if let Some(codegen_path) = input.codegen_path.as_mut() {
make_absolute(codegen_path, base);
}
make_absolute(&mut input.codegen_base_path, base);
}
}
}
fn default_max_spritesheet_size() -> (u32, u32) {
(1024, 1024)
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(deny_unknown_fields, rename_all = "kebab-case")]
pub struct InputConfig {
/// A glob that will match all files that should be considered for this
/// group of inputs.
pub glob: Glob,
/// Defines whether Tarmac should generate code to import the assets
/// associated with this group of inputs.
#[serde(default)]
pub codegen: bool,
/// If specified, batches together all of the generated code for this group
/// of inputs into a single file created at this path.
#[serde(default)]
pub codegen_path: Option<PathBuf>,
#[serde(default)]
pub codegen_base_path: PathBuf,
/// Whether the assets affected by this config are allowed to be packed into
/// spritesheets.
///
/// This isn't enabled by default because special considerations need to be
/// made in order to correctly handle spritesheets. Not all images are able
/// to be pre-packed into spritesheets, like images used in `Decal`
/// instances.
#[serde(default)]
pub packable: bool,
}
#[derive(Debug, Error)]
pub enum ConfigError {
#[error("Error deserializing TOML from path {}", .path.display())]
Toml {
path: PathBuf,
source: toml::de::Error,
},
#[error(transparent)]
Io {
#[from]
source: io::Error,
},
}
impl ConfigError {
/// Tells whether this ConfigError originated because of a path not
/// existing.
///
/// This is intended for use with methods like `Config::read_from_folder` in
/// order to avoid needing to check if a file with the right name exists.
pub fn is_not_found(&self) -> bool {
match self {
ConfigError::Io { source } => source.kind() == io::ErrorKind::NotFound,
_ => false,
}
}
}
/// Utility to make a path absolute if it is not absolute already.
fn make_absolute(path: &mut PathBuf, base: &Path) {
if path.is_relative() {
let new_path = base.join(&*path);
*path = new_path;
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/data/sync.rs | src/data/sync.rs | use std::path::PathBuf;
use crate::{
asset_name::AssetName,
data::{ImageSlice, InputConfig, InputManifest},
};
/// In-memory representation of a Tarmac Input during the sync process.
///
/// SyncInput structs are gradually created and filled in from the filesystem,
/// results of network I/O, and from the previous Tarmac manifest file.
#[derive(Debug)]
pub struct SyncInput {
/// A unique name for this asset in the project.
pub name: AssetName,
/// The path on disk to the file this input originated from.
pub path: PathBuf,
/// The input's path with DPI scale information stripped away. This is used
/// to group inputs that are just DPI variations of eachother.
pub path_without_dpi_scale: PathBuf,
/// The DPI scale of this input, if it makes sense for this input type.
pub dpi_scale: u32,
/// The configuration that applied to this input when it was discovered.
pub config: InputConfig,
/// The contents of the file this input originated from.
pub contents: Vec<u8>,
/// A hash of `contents`.
pub hash: String,
/// If this input has been part of an upload to Roblox.com, contains the
/// asset ID that contains the data from this input.
pub id: Option<u64>,
/// If this input has been packed into a spritesheet, contains the slice of
/// the spritesheet that this input is located in.
pub slice: Option<ImageSlice>,
}
impl SyncInput {
pub fn is_unchanged_since_last_sync(&self, old_manifest: &InputManifest) -> bool {
self.hash == old_manifest.hash && self.config.packable == old_manifest.packable
}
/// Creates a non-unique, human-friendly name to refer to this input.
pub fn human_name(&self) -> String {
let file_stem = self
.path_without_dpi_scale
.file_stem()
.unwrap()
.to_str()
.unwrap();
if self.path == self.path_without_dpi_scale {
file_stem.to_owned()
} else {
format!("{} ({}x)", file_stem, self.dpi_scale)
}
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/data/manifest.rs | src/data/manifest.rs | use std::{
collections::BTreeMap,
io,
path::{Path, PathBuf},
};
use fs_err as fs;
use serde::{Deserialize, Serialize};
use thiserror::Error;
use crate::asset_name::AssetName;
static MANIFEST_FILENAME: &str = "tarmac-manifest.toml";
/// Tracks the status of all configuration, inputs, and outputs as of the last
/// sync operation.
#[derive(Debug, Clone, Default, Serialize, Deserialize)]
pub struct Manifest {
pub inputs: BTreeMap<AssetName, InputManifest>,
}
impl Manifest {
pub fn read_from_folder<P: AsRef<Path>>(folder_path: P) -> Result<Self, ManifestError> {
let folder_path = folder_path.as_ref();
let file_path = &folder_path.join(MANIFEST_FILENAME);
let contents = fs::read(file_path)?;
let config =
toml::from_slice(&contents).map_err(|source| ManifestError::DeserializeToml {
source,
file_path: file_path.to_owned(),
})?;
Ok(config)
}
pub fn write_to_folder<P: AsRef<Path>>(&self, folder_path: P) -> Result<(), ManifestError> {
let folder_path = folder_path.as_ref();
let file_path = &folder_path.join(MANIFEST_FILENAME);
let serialized = toml::to_vec(self)?;
fs::write(file_path, serialized)?;
log::trace!("Saved manifest to {}", file_path.display());
Ok(())
}
}
#[derive(Debug, Clone, Serialize, Deserialize)]
#[serde(rename_all = "kebab-case")]
pub struct InputManifest {
/// The hexadecimal encoded hash of the contents of this input the last time
/// it was part of an upload.
pub hash: String,
/// The asset ID that contains this input the last time it was uploaded.
pub id: Option<u64>,
/// If the asset is an image that was packed into a spritesheet, contains
/// the portion of the uploaded image that contains this input.
pub slice: Option<ImageSlice>,
/// Whether the config applied to this input asked for it to be packed into
/// a spritesheet.
pub packable: bool,
}
#[derive(Debug, Clone, Copy, Serialize, Deserialize)]
#[serde(transparent)]
pub struct ImageSlice {
coordinates: ((u32, u32), (u32, u32)),
}
impl ImageSlice {
pub fn new(min: (u32, u32), max: (u32, u32)) -> Self {
Self {
coordinates: (min, max),
}
}
pub fn min(&self) -> (u32, u32) {
self.coordinates.0
}
pub fn max(&self) -> (u32, u32) {
self.coordinates.1
}
pub fn size(&self) -> (u32, u32) {
let (x1, y1) = self.min();
let (x2, y2) = self.max();
(x2 - x1, y2 - y1)
}
}
#[derive(Debug, Error)]
pub enum ManifestError {
#[error("Error deserializing TOML from path {}", .file_path.display())]
DeserializeToml {
file_path: PathBuf,
source: toml::de::Error,
},
#[error(transparent)]
SerializeToml {
#[from]
source: toml::ser::Error,
},
#[error(transparent)]
Io {
#[from]
source: io::Error,
},
}
impl ManifestError {
pub fn is_not_found(&self) -> bool {
match self {
ManifestError::Io { source } => source.kind() == io::ErrorKind::NotFound,
_ => false,
}
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/src/data/mod.rs | src/data/mod.rs | mod config;
mod manifest;
mod sync;
pub use config::*;
pub use manifest::*;
pub use sync::*;
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/src/packer.rs | packos/src/packer.rs | use std::{borrow::Borrow, cmp::Reverse};
use crate::{
geometry::Rect,
types::{Bucket, InputItem, OutputItem, PackOutput},
};
/// A configurable rectangle packer using a simple packing algorithm.
#[derive(Debug, Clone)]
pub struct SimplePacker {
min_size: (u32, u32),
max_size: (u32, u32),
padding: u32,
}
impl Default for SimplePacker {
fn default() -> Self {
Self::new()
}
}
impl SimplePacker {
/// Constructs a new `SimplePacker` with the default configuration:
/// * `min_size` of 128x128
/// * `max_size` of 1024x1024
/// * `padding` of 0
pub fn new() -> Self {
Self {
min_size: (128, 128),
max_size: (1024, 1024),
padding: 0,
}
}
pub fn min_size(self, min_size: (u32, u32)) -> Self {
Self { min_size, ..self }
}
pub fn max_size(self, max_size: (u32, u32)) -> Self {
Self { max_size, ..self }
}
pub fn padding(self, padding: u32) -> Self {
Self { padding, ..self }
}
/// Pack a group of input rectangles into zero or more buckets.
///
/// Accepts any type that can turn into an iterator of anything that can
/// borrow as an `InputItem`. This helps make sure that types like
/// `Vec<InputItem>`, `&[InputItem]`, and iterators that return either
/// `InputItem` or `&InputItem` can be valid inputs.
pub fn pack<Iter, Item>(&self, items: Iter) -> PackOutput
where
Iter: IntoIterator<Item = Item>,
Item: Borrow<InputItem>,
{
let mut remaining_items: Vec<_> = items.into_iter().map(|item| *item.borrow()).collect();
remaining_items.sort_by_key(|input| Reverse(input.area()));
for item in &mut remaining_items {
item.size = (item.size.0 + self.padding, item.size.1 + self.padding);
}
let num_items = remaining_items.len();
log::trace!("Packing {} items", num_items);
let mut buckets = Vec::new();
while !remaining_items.is_empty() {
// TODO: Compute minimum size from total area of remaining images,
// rounded up to nearest po2 and clamped to max_size.
let mut current_size = self.min_size;
loop {
let (bucket, next_remaining) =
Self::pack_one_bucket(&remaining_items, current_size);
// If this size was large enough to contain the rest of the
// images, we're done packing!
if next_remaining.is_empty() {
buckets.push(bucket);
remaining_items = next_remaining;
break;
}
// Otherwise, we can try to re-pack this set of images into a
// larger bucket to try to minimize the total number of buckets
// we use.
if current_size.0 < self.max_size.0 || current_size.1 < self.max_size.1 {
current_size = (
(current_size.0 * 2).min(self.max_size.0),
(current_size.1 * 2).min(self.max_size.1),
);
} else {
// We're already at the max bucket size, so this is the
// smallest number of buckets we'll get.
buckets.push(bucket);
remaining_items = next_remaining;
break;
}
}
}
for bucket in &mut buckets {
for item in &mut bucket.items {
item.rect.size = (
item.rect.size.0 - self.padding,
item.rect.size.1 - self.padding,
);
}
}
log::trace!(
"Finished packing {} items into {} buckets",
num_items,
buckets.len()
);
PackOutput { buckets }
}
fn pack_one_bucket(
remaining_items: &[InputItem],
bucket_size: (u32, u32),
) -> (Bucket, Vec<InputItem>) {
log::trace!(
"Trying to pack {} remaining items into bucket of size {:?}",
remaining_items.len(),
bucket_size
);
let mut anchors = vec![(0, 0)];
let mut items: Vec<OutputItem> = Vec::new();
let mut unpacked_items = Vec::new();
for input_item in remaining_items {
log::trace!(
"For item {:?} ({}x{}), evaluating these anchors: {:?}",
input_item.id(),
input_item.size.0,
input_item.size.1,
anchors
);
let fit_anchor = anchors.iter().copied().position(|anchor| {
let potential_rect = Rect {
pos: anchor,
size: input_item.size,
};
let fits_with_others = items
.iter()
.all(|packed_item| !potential_rect.intersects(&packed_item.rect));
let max = potential_rect.max();
let fits_in_bucket = max.0 < bucket_size.0 && max.1 < bucket_size.1;
fits_with_others && fits_in_bucket
});
if let Some(index) = fit_anchor {
let anchor = anchors.remove(index);
log::trace!("Fit at anchor {:?}", anchor);
let new_anchor_hor = (anchor.0 + input_item.size.0, anchor.1);
if new_anchor_hor.0 < bucket_size.0 && new_anchor_hor.1 < bucket_size.1 {
anchors.push(new_anchor_hor);
}
let new_anchor_ver = (anchor.0, anchor.1 + input_item.size.1);
if new_anchor_ver.0 < bucket_size.0 && new_anchor_ver.1 < bucket_size.1 {
anchors.push(new_anchor_ver);
}
let output_item = OutputItem {
id: input_item.id(),
rect: Rect {
pos: anchor,
size: input_item.size,
},
};
items.push(output_item);
} else {
log::trace!("Did not fit in this bucket.");
unpacked_items.push(*input_item);
}
}
let bucket = Bucket {
size: bucket_size,
items,
};
(bucket, unpacked_items)
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/src/lib.rs | packos/src/lib.rs | //! Packos is a small library for packing rectangles. It was built for
//! [Tarmac](https://github.com/Roblox/tarmac), a tool that manages assets for
//! Roblox projects, including packing images into spritesheets.
//!
//! Packos currently exposes a single packing implementation,
//! [`SimplePacker`][SimplePacker]. More algorithms can be added in the future
//! using the same basic types that Packos uses.
//!
//! ## Example
//! ```
//! use packos::{InputItem, SimplePacker};
//!
//! // First, transform the rectangles you want to pack into the Packos
//! // InputItem type.
//! let my_items = &[
//! InputItem::new((128, 64)),
//! InputItem::new((64, 64)),
//! InputItem::new((1, 300)),
//! ];
//!
//! // Construct a packer and configure it with your constraints
//! let packer = SimplePacker::new().max_size((512, 512));
//!
//! // Compute a solution.
//! // SimplePacker::pack accepts anything that can turn into an iterator of
//! // InputItem or &InputItem.
//! let output = packer.pack(my_items);
//! ```
//!
//! [SimplePacker]: struct.SimplePacker.html
mod geometry;
mod id;
mod packer;
mod types;
pub use id::*;
pub use packer::*;
pub use types::*;
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/src/id.rs | packos/src/id.rs | use std::{
num::NonZeroUsize,
sync::atomic::{AtomicUsize, Ordering},
};
static LAST_ID: AtomicUsize = AtomicUsize::new(1);
/// Represents an item tracked by Packos.
#[derive(Debug, Clone, Copy, PartialEq, Eq, Hash)]
pub struct Id(NonZeroUsize);
impl Id {
pub(crate) fn new() -> Self {
let id = LAST_ID.fetch_add(1, Ordering::SeqCst);
Id(NonZeroUsize::new(id).unwrap())
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/src/geometry.rs | packos/src/geometry.rs | #[derive(Debug, Clone, Copy)]
pub(crate) struct Rect {
pub pos: (u32, u32),
pub size: (u32, u32),
}
impl Rect {
pub fn intersects(&self, other: &Rect) -> bool {
let self_max = self.max();
let other_max = other.max();
let x_intersect = self.pos.0 < other_max.0 && self_max.0 > other.pos.0;
let y_intersect = self.pos.1 < other_max.1 && self_max.1 > other.pos.1;
x_intersect && y_intersect
}
pub fn max(&self) -> (u32, u32) {
(self.pos.0 + self.size.0, self.pos.1 + self.size.1)
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/src/types.rs | packos/src/types.rs | use crate::{geometry::Rect, id::Id};
/// An input to the rectangle packing routines.
///
/// `InputItem` is just a 2D size and a Packos-generated unique identifier. It's
/// expected that consumers will assign meaning to the given IDs and then use
/// them to associate the packing results back to the application's own objects.
#[derive(Debug, Clone, Copy)]
pub struct InputItem {
pub(crate) id: Id,
pub(crate) size: (u32, u32),
}
impl InputItem {
#[inline]
pub fn new(size: (u32, u32)) -> Self {
Self {
id: Id::new(),
size,
}
}
#[inline]
pub fn size(&self) -> (u32, u32) {
self.size
}
#[inline]
pub fn id(&self) -> Id {
self.id
}
pub(crate) fn area(&self) -> u32 {
self.size.0 * self.size.1
}
}
/// An item that was placed by a packing function.
///
/// `OutputItem` corresponds 1:1 to `InputItem` objects that were passed into
/// the packing function. They expose the ID from the input, as well as position
/// and size.
#[derive(Debug, Clone, Copy)]
pub struct OutputItem {
pub(crate) id: Id,
pub(crate) rect: Rect,
}
impl OutputItem {
#[inline]
pub fn id(&self) -> Id {
self.id
}
#[inline]
pub fn position(&self) -> (u32, u32) {
self.rect.pos
}
#[inline]
pub fn size(&self) -> (u32, u32) {
self.rect.size
}
#[inline]
pub fn min(&self) -> (u32, u32) {
self.rect.pos
}
#[inline]
pub fn max(&self) -> (u32, u32) {
self.rect.max()
}
}
/// The results from running a packing function.
///
/// Currently only exposes the list of buckets that inputs were grouped into. In
/// the future, this struct may also have information about inputs that didn't
/// fit and how efficient the result is.
#[derive(Debug, Clone)]
pub struct PackOutput {
pub(crate) buckets: Vec<Bucket>,
}
impl PackOutput {
#[inline]
pub fn buckets(&self) -> &[Bucket] {
&self.buckets
}
}
/// Contains a set of `OutputItem` values that were packed together into the
/// same fixed-size containers.
#[derive(Debug, Clone)]
pub struct Bucket {
pub(crate) size: (u32, u32),
pub(crate) items: Vec<OutputItem>,
}
impl Bucket {
#[inline]
pub fn size(&self) -> (u32, u32) {
self.size
}
#[inline]
pub fn items(&self) -> &[OutputItem] {
&self.items
}
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
Roblox/tarmac | https://github.com/Roblox/tarmac/blob/01fa334ebc5dc5b72a17dd5182c500ffca7b2b00/packos/examples/simple-uniform.rs | packos/examples/simple-uniform.rs | use packos::{InputItem, SimplePacker};
fn main() {
env_logger::init();
let inputs: Vec<_> = (0..5).map(|_| InputItem::new((128, 128))).collect();
let packer = SimplePacker::new().max_size((256, 256));
let result = packer.pack(inputs);
println!("Pack result: {:#?}", result);
}
| rust | MIT | 01fa334ebc5dc5b72a17dd5182c500ffca7b2b00 | 2026-01-04T20:22:35.804591Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/lib.rs | imap-proto/src/lib.rs | pub mod builders;
pub mod parser;
pub mod types;
pub use parser::ParseResult;
pub use types::*;
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/types.rs | imap-proto/src/types.rs | use std::borrow::Cow;
use std::collections::HashMap;
use std::ops::RangeInclusive;
pub mod acls;
pub use acls::*;
fn to_owned_cow<T: ?Sized + ToOwned>(c: Cow<'_, T>) -> Cow<'static, T> {
Cow::Owned(c.into_owned())
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct Request<'a>(pub Cow<'a, [u8]>, pub Cow<'a, [u8]>);
#[derive(Copy, Clone, Debug, Eq, PartialEq)]
pub enum AttrMacro {
All,
Fast,
Full,
}
#[derive(Debug, Eq, PartialEq)]
#[non_exhaustive]
pub enum Response<'a> {
Capabilities(Vec<Capability<'a>>),
Continue {
code: Option<ResponseCode<'a>>,
information: Option<Cow<'a, str>>,
},
Done {
tag: RequestId,
status: Status,
code: Option<ResponseCode<'a>>,
information: Option<Cow<'a, str>>,
},
Data {
status: Status,
code: Option<ResponseCode<'a>>,
information: Option<Cow<'a, str>>,
},
Expunge(u32),
Vanished {
earlier: bool,
uids: Vec<std::ops::RangeInclusive<u32>>,
},
Fetch(u32, Vec<AttributeValue<'a>>),
MailboxData(MailboxDatum<'a>),
Quota(Quota<'a>),
QuotaRoot(QuotaRoot<'a>),
Id(Option<HashMap<Cow<'a, str>, Cow<'a, str>>>),
Acl(Acl<'a>),
ListRights(ListRights<'a>),
MyRights(MyRights<'a>),
}
impl<'a> Response<'a> {
pub fn from_bytes(buf: &'a [u8]) -> crate::ParseResult<'a> {
crate::parser::parse_response(buf)
}
pub fn into_owned(self) -> Response<'static> {
match self {
Response::Capabilities(capabilities) => Response::Capabilities(
capabilities
.into_iter()
.map(Capability::into_owned)
.collect(),
),
Response::Continue { code, information } => Response::Continue {
code: code.map(ResponseCode::into_owned),
information: information.map(to_owned_cow),
},
Response::Done {
tag,
status,
code,
information,
} => Response::Done {
tag,
status,
code: code.map(ResponseCode::into_owned),
information: information.map(to_owned_cow),
},
Response::Data {
status,
code,
information,
} => Response::Data {
status,
code: code.map(ResponseCode::into_owned),
information: information.map(to_owned_cow),
},
Response::Expunge(seq) => Response::Expunge(seq),
Response::Vanished { earlier, uids } => Response::Vanished { earlier, uids },
Response::Fetch(seq, attrs) => Response::Fetch(
seq,
attrs.into_iter().map(AttributeValue::into_owned).collect(),
),
Response::MailboxData(datum) => Response::MailboxData(datum.into_owned()),
Response::Quota(quota) => Response::Quota(quota.into_owned()),
Response::QuotaRoot(quota_root) => Response::QuotaRoot(quota_root.into_owned()),
Response::Id(map) => Response::Id(map.map(|m| {
m.into_iter()
.map(|(k, v)| (to_owned_cow(k), to_owned_cow(v)))
.collect()
})),
Response::Acl(acl_list) => Response::Acl(acl_list.into_owned()),
Response::ListRights(rights) => Response::ListRights(rights.into_owned()),
Response::MyRights(rights) => Response::MyRights(rights.into_owned()),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum Status {
Ok,
No,
Bad,
PreAuth,
Bye,
}
#[derive(Debug, Eq, PartialEq)]
#[non_exhaustive]
pub enum ResponseCode<'a> {
Alert,
BadCharset(Option<Vec<Cow<'a, str>>>),
Capabilities(Vec<Capability<'a>>),
HighestModSeq(u64), // RFC 4551, section 3.1.1
Parse,
PermanentFlags(Vec<Cow<'a, str>>),
ReadOnly,
ReadWrite,
TryCreate,
UidNext(u32),
UidValidity(u32),
Unseen(u32),
AppendUid(u32, Vec<UidSetMember>),
CopyUid(u32, Vec<UidSetMember>, Vec<UidSetMember>),
UidNotSticky,
MetadataLongEntries(u64), // RFC 5464, section 4.2.1
MetadataMaxSize(u64), // RFC 5464, section 4.3
MetadataTooMany, // RFC 5464, section 4.3
MetadataNoPrivate, // RFC 5464, section 4.3
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub enum UidSetMember {
UidRange(RangeInclusive<u32>),
Uid(u32),
}
impl From<RangeInclusive<u32>> for UidSetMember {
fn from(x: RangeInclusive<u32>) -> Self {
UidSetMember::UidRange(x)
}
}
impl From<u32> for UidSetMember {
fn from(x: u32) -> Self {
UidSetMember::Uid(x)
}
}
impl<'a> ResponseCode<'a> {
pub fn into_owned(self) -> ResponseCode<'static> {
match self {
ResponseCode::Alert => ResponseCode::Alert,
ResponseCode::BadCharset(v) => {
ResponseCode::BadCharset(v.map(|vs| vs.into_iter().map(to_owned_cow).collect()))
}
ResponseCode::Capabilities(v) => {
ResponseCode::Capabilities(v.into_iter().map(Capability::into_owned).collect())
}
ResponseCode::HighestModSeq(v) => ResponseCode::HighestModSeq(v),
ResponseCode::Parse => ResponseCode::Parse,
ResponseCode::PermanentFlags(v) => {
ResponseCode::PermanentFlags(v.into_iter().map(to_owned_cow).collect())
}
ResponseCode::ReadOnly => ResponseCode::ReadOnly,
ResponseCode::ReadWrite => ResponseCode::ReadWrite,
ResponseCode::TryCreate => ResponseCode::TryCreate,
ResponseCode::UidNext(v) => ResponseCode::UidNext(v),
ResponseCode::UidValidity(v) => ResponseCode::UidValidity(v),
ResponseCode::Unseen(v) => ResponseCode::Unseen(v),
ResponseCode::AppendUid(a, b) => ResponseCode::AppendUid(a, b),
ResponseCode::CopyUid(a, b, c) => ResponseCode::CopyUid(a, b, c),
ResponseCode::UidNotSticky => ResponseCode::UidNotSticky,
ResponseCode::MetadataLongEntries(v) => ResponseCode::MetadataLongEntries(v),
ResponseCode::MetadataMaxSize(v) => ResponseCode::MetadataMaxSize(v),
ResponseCode::MetadataTooMany => ResponseCode::MetadataTooMany,
ResponseCode::MetadataNoPrivate => ResponseCode::MetadataNoPrivate,
}
}
}
#[derive(Debug, Eq, PartialEq, Clone)]
#[non_exhaustive]
pub enum StatusAttribute {
HighestModSeq(u64), // RFC 4551
Messages(u32),
Recent(u32),
UidNext(u32),
UidValidity(u32),
Unseen(u32),
}
#[derive(Debug, Eq, PartialEq, Clone)]
pub struct Metadata {
pub entry: String,
pub value: Option<String>,
}
#[derive(Debug, Eq, PartialEq, Clone)]
#[non_exhaustive]
pub enum MailboxDatum<'a> {
Exists(u32),
Flags(Vec<Cow<'a, str>>),
List {
name_attributes: Vec<NameAttribute<'a>>,
delimiter: Option<Cow<'a, str>>,
name: Cow<'a, str>,
},
Search(Vec<u32>),
Sort(Vec<u32>),
Status {
mailbox: Cow<'a, str>,
status: Vec<StatusAttribute>,
},
Recent(u32),
MetadataSolicited {
mailbox: Cow<'a, str>,
values: Vec<Metadata>,
},
MetadataUnsolicited {
mailbox: Cow<'a, str>,
values: Vec<Cow<'a, str>>,
},
GmailLabels(Vec<Cow<'a, str>>),
GmailMsgId(u64),
GmailThrId(u64),
}
impl<'a> MailboxDatum<'a> {
pub fn into_owned(self) -> MailboxDatum<'static> {
match self {
MailboxDatum::Exists(seq) => MailboxDatum::Exists(seq),
MailboxDatum::Flags(flags) => {
MailboxDatum::Flags(flags.into_iter().map(to_owned_cow).collect())
}
MailboxDatum::List {
name_attributes,
delimiter,
name,
} => MailboxDatum::List {
name_attributes: name_attributes
.into_iter()
.map(|named_attribute| named_attribute.into_owned())
.collect(),
delimiter: delimiter.map(to_owned_cow),
name: to_owned_cow(name),
},
MailboxDatum::Search(seqs) => MailboxDatum::Search(seqs),
MailboxDatum::Sort(seqs) => MailboxDatum::Sort(seqs),
MailboxDatum::Status { mailbox, status } => MailboxDatum::Status {
mailbox: to_owned_cow(mailbox),
status,
},
MailboxDatum::Recent(seq) => MailboxDatum::Recent(seq),
MailboxDatum::MetadataSolicited { mailbox, values } => {
MailboxDatum::MetadataSolicited {
mailbox: to_owned_cow(mailbox),
values,
}
}
MailboxDatum::MetadataUnsolicited { mailbox, values } => {
MailboxDatum::MetadataUnsolicited {
mailbox: to_owned_cow(mailbox),
values: values.into_iter().map(to_owned_cow).collect(),
}
}
MailboxDatum::GmailLabels(labels) => {
MailboxDatum::GmailLabels(labels.into_iter().map(to_owned_cow).collect())
}
MailboxDatum::GmailMsgId(msgid) => MailboxDatum::GmailMsgId(msgid),
MailboxDatum::GmailThrId(thrid) => MailboxDatum::GmailThrId(thrid),
}
}
}
#[derive(Debug, Eq, PartialEq, Hash)]
pub enum Capability<'a> {
Imap4rev1,
Auth(Cow<'a, str>),
Atom(Cow<'a, str>),
}
impl<'a> Capability<'a> {
pub fn into_owned(self) -> Capability<'static> {
match self {
Capability::Imap4rev1 => Capability::Imap4rev1,
Capability::Auth(v) => Capability::Auth(to_owned_cow(v)),
Capability::Atom(v) => Capability::Atom(to_owned_cow(v)),
}
}
}
#[derive(Debug, Eq, PartialEq)]
#[non_exhaustive]
pub enum Attribute {
Body,
Envelope,
Flags,
InternalDate,
ModSeq, // RFC 4551, section 3.3.2
Rfc822,
Rfc822Size,
Rfc822Text,
Uid,
/// https://developers.google.com/gmail/imap/imap-extensions#access_to_gmail_labels_x-gm-labels
GmailLabels,
GmailMsgId,
GmailThrId,
}
#[derive(Debug, Eq, PartialEq)]
pub enum MessageSection {
Header,
Mime,
Text,
}
#[derive(Debug, Eq, PartialEq)]
pub enum SectionPath {
Full(MessageSection),
Part(Vec<u32>, Option<MessageSection>),
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Eq, PartialEq)]
#[non_exhaustive]
pub enum AttributeValue<'a> {
BodySection {
section: Option<SectionPath>,
index: Option<u32>,
data: Option<Cow<'a, [u8]>>,
},
BodyStructure(BodyStructure<'a>),
Envelope(Box<Envelope<'a>>),
Flags(Vec<Cow<'a, str>>),
InternalDate(Cow<'a, str>),
ModSeq(u64), // RFC 4551, section 3.3.2
Rfc822(Option<Cow<'a, [u8]>>),
Rfc822Header(Option<Cow<'a, [u8]>>),
Rfc822Size(u32),
Rfc822Text(Option<Cow<'a, [u8]>>),
Uid(u32),
/// https://developers.google.com/gmail/imap/imap-extensions#access_to_gmail_labels_x-gm-labels
GmailLabels(Vec<Cow<'a, str>>),
GmailMsgId(u64),
GmailThrId(u64),
}
impl<'a> AttributeValue<'a> {
pub fn into_owned(self) -> AttributeValue<'static> {
match self {
AttributeValue::BodySection {
section,
index,
data,
} => AttributeValue::BodySection {
section,
index,
data: data.map(to_owned_cow),
},
AttributeValue::BodyStructure(body) => AttributeValue::BodyStructure(body.into_owned()),
AttributeValue::Envelope(e) => AttributeValue::Envelope(Box::new(e.into_owned())),
AttributeValue::Flags(v) => {
AttributeValue::Flags(v.into_iter().map(to_owned_cow).collect())
}
AttributeValue::InternalDate(v) => AttributeValue::InternalDate(to_owned_cow(v)),
AttributeValue::ModSeq(v) => AttributeValue::ModSeq(v),
AttributeValue::Rfc822(v) => AttributeValue::Rfc822(v.map(to_owned_cow)),
AttributeValue::Rfc822Header(v) => AttributeValue::Rfc822Header(v.map(to_owned_cow)),
AttributeValue::Rfc822Size(v) => AttributeValue::Rfc822Size(v),
AttributeValue::Rfc822Text(v) => AttributeValue::Rfc822Text(v.map(to_owned_cow)),
AttributeValue::Uid(v) => AttributeValue::Uid(v),
AttributeValue::GmailLabels(v) => {
AttributeValue::GmailLabels(v.into_iter().map(to_owned_cow).collect())
}
AttributeValue::GmailMsgId(v) => AttributeValue::GmailMsgId(v),
AttributeValue::GmailThrId(v) => AttributeValue::GmailThrId(v),
}
}
}
#[allow(clippy::large_enum_variant)]
#[derive(Debug, Eq, PartialEq)]
pub enum BodyStructure<'a> {
Basic {
common: BodyContentCommon<'a>,
other: BodyContentSinglePart<'a>,
extension: Option<BodyExtension<'a>>,
},
Text {
common: BodyContentCommon<'a>,
other: BodyContentSinglePart<'a>,
lines: u32,
extension: Option<BodyExtension<'a>>,
},
Message {
common: BodyContentCommon<'a>,
other: BodyContentSinglePart<'a>,
envelope: Envelope<'a>,
body: Box<BodyStructure<'a>>,
lines: u32,
extension: Option<BodyExtension<'a>>,
},
Multipart {
common: BodyContentCommon<'a>,
bodies: Vec<BodyStructure<'a>>,
extension: Option<BodyExtension<'a>>,
},
}
impl<'a> BodyStructure<'a> {
pub fn into_owned(self) -> BodyStructure<'static> {
match self {
BodyStructure::Basic {
common,
other,
extension,
} => BodyStructure::Basic {
common: common.into_owned(),
other: other.into_owned(),
extension: extension.map(|v| v.into_owned()),
},
BodyStructure::Text {
common,
other,
lines,
extension,
} => BodyStructure::Text {
common: common.into_owned(),
other: other.into_owned(),
lines,
extension: extension.map(|v| v.into_owned()),
},
BodyStructure::Message {
common,
other,
envelope,
body,
lines,
extension,
} => BodyStructure::Message {
common: common.into_owned(),
other: other.into_owned(),
envelope: envelope.into_owned(),
body: Box::new(body.into_owned()),
lines,
extension: extension.map(|v| v.into_owned()),
},
BodyStructure::Multipart {
common,
bodies,
extension,
} => BodyStructure::Multipart {
common: common.into_owned(),
bodies: bodies.into_iter().map(|v| v.into_owned()).collect(),
extension: extension.map(|v| v.into_owned()),
},
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct BodyContentCommon<'a> {
pub ty: ContentType<'a>,
pub disposition: Option<ContentDisposition<'a>>,
pub language: Option<Vec<Cow<'a, str>>>,
pub location: Option<Cow<'a, str>>,
}
impl<'a> BodyContentCommon<'a> {
pub fn into_owned(self) -> BodyContentCommon<'static> {
BodyContentCommon {
ty: self.ty.into_owned(),
disposition: self.disposition.map(|v| v.into_owned()),
language: self
.language
.map(|v| v.into_iter().map(to_owned_cow).collect()),
location: self.location.map(to_owned_cow),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct BodyContentSinglePart<'a> {
pub id: Option<Cow<'a, str>>,
pub md5: Option<Cow<'a, str>>,
pub description: Option<Cow<'a, str>>,
pub transfer_encoding: ContentEncoding<'a>,
pub octets: u32,
}
impl<'a> BodyContentSinglePart<'a> {
pub fn into_owned(self) -> BodyContentSinglePart<'static> {
BodyContentSinglePart {
id: self.id.map(to_owned_cow),
md5: self.md5.map(to_owned_cow),
description: self.description.map(to_owned_cow),
transfer_encoding: self.transfer_encoding.into_owned(),
octets: self.octets,
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct ContentType<'a> {
pub ty: Cow<'a, str>,
pub subtype: Cow<'a, str>,
pub params: BodyParams<'a>,
}
impl<'a> ContentType<'a> {
pub fn into_owned(self) -> ContentType<'static> {
ContentType {
ty: to_owned_cow(self.ty),
subtype: to_owned_cow(self.subtype),
params: body_param_owned(self.params),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct ContentDisposition<'a> {
pub ty: Cow<'a, str>,
pub params: BodyParams<'a>,
}
impl<'a> ContentDisposition<'a> {
pub fn into_owned(self) -> ContentDisposition<'static> {
ContentDisposition {
ty: to_owned_cow(self.ty),
params: body_param_owned(self.params),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum ContentEncoding<'a> {
SevenBit,
EightBit,
Binary,
Base64,
QuotedPrintable,
Other(Cow<'a, str>),
}
impl<'a> ContentEncoding<'a> {
pub fn into_owned(self) -> ContentEncoding<'static> {
match self {
ContentEncoding::SevenBit => ContentEncoding::SevenBit,
ContentEncoding::EightBit => ContentEncoding::EightBit,
ContentEncoding::Binary => ContentEncoding::Binary,
ContentEncoding::Base64 => ContentEncoding::Base64,
ContentEncoding::QuotedPrintable => ContentEncoding::QuotedPrintable,
ContentEncoding::Other(v) => ContentEncoding::Other(to_owned_cow(v)),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub enum BodyExtension<'a> {
Num(u32),
Str(Option<Cow<'a, str>>),
List(Vec<BodyExtension<'a>>),
}
impl<'a> BodyExtension<'a> {
pub fn into_owned(self) -> BodyExtension<'static> {
match self {
BodyExtension::Num(v) => BodyExtension::Num(v),
BodyExtension::Str(v) => BodyExtension::Str(v.map(to_owned_cow)),
BodyExtension::List(v) => {
BodyExtension::List(v.into_iter().map(|v| v.into_owned()).collect())
}
}
}
}
pub type BodyParams<'a> = Option<Vec<(Cow<'a, str>, Cow<'a, str>)>>;
fn body_param_owned(v: BodyParams<'_>) -> BodyParams<'static> {
v.map(|v| {
v.into_iter()
.map(|(k, v)| (to_owned_cow(k), to_owned_cow(v)))
.collect()
})
}
/// An RFC 2822 envelope
///
/// See https://datatracker.ietf.org/doc/html/rfc2822#section-3.6 for more details.
#[derive(Debug, Eq, PartialEq)]
pub struct Envelope<'a> {
pub date: Option<Cow<'a, [u8]>>,
pub subject: Option<Cow<'a, [u8]>>,
/// Author of the message; mailbox responsible for writing the message
pub from: Option<Vec<Address<'a>>>,
/// Mailbox of the agent responsible for the message's transmission
pub sender: Option<Vec<Address<'a>>>,
/// Mailbox that the author of the message suggests replies be sent to
pub reply_to: Option<Vec<Address<'a>>>,
pub to: Option<Vec<Address<'a>>>,
pub cc: Option<Vec<Address<'a>>>,
pub bcc: Option<Vec<Address<'a>>>,
pub in_reply_to: Option<Cow<'a, [u8]>>,
pub message_id: Option<Cow<'a, [u8]>>,
}
impl<'a> Envelope<'a> {
pub fn into_owned(self) -> Envelope<'static> {
Envelope {
date: self.date.map(to_owned_cow),
subject: self.subject.map(to_owned_cow),
from: self
.from
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
sender: self
.sender
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
reply_to: self
.reply_to
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
to: self
.to
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
cc: self
.cc
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
bcc: self
.bcc
.map(|v| v.into_iter().map(|v| v.into_owned()).collect()),
in_reply_to: self.in_reply_to.map(to_owned_cow),
message_id: self.message_id.map(to_owned_cow),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct Address<'a> {
pub name: Option<Cow<'a, [u8]>>,
pub adl: Option<Cow<'a, [u8]>>,
pub mailbox: Option<Cow<'a, [u8]>>,
pub host: Option<Cow<'a, [u8]>>,
}
impl<'a> Address<'a> {
pub fn into_owned(self) -> Address<'static> {
Address {
name: self.name.map(to_owned_cow),
adl: self.adl.map(to_owned_cow),
mailbox: self.mailbox.map(to_owned_cow),
host: self.host.map(to_owned_cow),
}
}
}
#[derive(Clone, Debug, Eq, PartialEq)]
pub struct RequestId(pub String);
impl RequestId {
pub fn as_bytes(&self) -> &[u8] {
self.0.as_bytes()
}
}
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub enum State {
NotAuthenticated,
Authenticated,
Selected,
Logout,
}
// Body Structure
pub struct BodyFields<'a> {
pub param: BodyParams<'a>,
pub id: Option<Cow<'a, str>>,
pub description: Option<Cow<'a, str>>,
pub transfer_encoding: ContentEncoding<'a>,
pub octets: u32,
}
impl<'a> BodyFields<'a> {
pub fn into_owned(self) -> BodyFields<'static> {
BodyFields {
param: body_param_owned(self.param),
id: self.id.map(to_owned_cow),
description: self.description.map(to_owned_cow),
transfer_encoding: self.transfer_encoding.into_owned(),
octets: self.octets,
}
}
}
pub struct BodyExt1Part<'a> {
pub md5: Option<Cow<'a, str>>,
pub disposition: Option<ContentDisposition<'a>>,
pub language: Option<Vec<Cow<'a, str>>>,
pub location: Option<Cow<'a, str>>,
pub extension: Option<BodyExtension<'a>>,
}
impl<'a> BodyExt1Part<'a> {
pub fn into_owned(self) -> BodyExt1Part<'static> {
BodyExt1Part {
md5: self.md5.map(to_owned_cow),
disposition: self.disposition.map(|v| v.into_owned()),
language: self
.language
.map(|v| v.into_iter().map(to_owned_cow).collect()),
location: self.location.map(to_owned_cow),
extension: self.extension.map(|v| v.into_owned()),
}
}
}
pub struct BodyExtMPart<'a> {
pub param: BodyParams<'a>,
pub disposition: Option<ContentDisposition<'a>>,
pub language: Option<Vec<Cow<'a, str>>>,
pub location: Option<Cow<'a, str>>,
pub extension: Option<BodyExtension<'a>>,
}
impl<'a> BodyExtMPart<'a> {
pub fn into_owned(self) -> BodyExtMPart<'static> {
BodyExtMPart {
param: body_param_owned(self.param),
disposition: self.disposition.map(|v| v.into_owned()),
language: self
.language
.map(|v| v.into_iter().map(to_owned_cow).collect()),
location: self.location.map(to_owned_cow),
extension: self.extension.map(|v| v.into_owned()),
}
}
}
/// The name attributes are returned as part of a LIST response described in
/// [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2).
///
/// This enumeration additional includes values from the extension Special-Use
/// Mailboxes [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2).
#[derive(Debug, Eq, PartialEq, Clone)]
#[non_exhaustive]
pub enum NameAttribute<'a> {
/// From [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2):
///
/// > It is not possible for any child levels of hierarchy to exist
/// > under this name; no child levels exist now and none can be
/// > created in the future.
NoInferiors,
/// From [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2):
///
/// > It is not possible to use this name as a selectable mailbox.
NoSelect,
/// From [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2):
///
/// > The mailbox has been marked "interesting" by the server; the
/// > mailbox probably contains messages that have been added since
/// > the last time the mailbox was selected.
Marked,
/// From [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2):
///
/// > The mailbox does not contain any additional messages since the
/// > last time the mailbox was selected.
Unmarked,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox presents all messages in the user's message store.
/// > Implementations MAY omit some messages, such as, perhaps, those
/// > in \Trash and \Junk. When this special use is supported, it is
/// > almost certain to represent a virtual mailbox.
All,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox is used to archive messages. The meaning of an
/// > "archival" mailbox is server-dependent; typically, it will be
/// > used to get messages out of the inbox, or otherwise keep them
/// > out of the user's way, while still making them accessible.
Archive,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox is used to hold draft messages -- typically,
/// > messages that are being composed but have not yet been sent. In
/// > some server implementations, this might be a virtual mailbox,
/// > containing messages from other mailboxes that are marked with
/// > the "\Draft" message flag. Alternatively, this might just be
/// > advice that a client put drafts here.
Drafts,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox presents all messages marked in some way as
/// > "important". When this special use is supported, it is likely
/// > to represent a virtual mailbox collecting messages (from other
/// > mailboxes) that are marked with the "\Flagged" message flag.
Flagged,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox is where messages deemed to be junk mail are held.
/// > Some server implementations might put messages here
/// > automatically. Alternatively, this might just be advice to a
/// > client-side spam filter.
Junk,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2):
///
/// > This mailbox is used to hold copies of messages that have been
/// > sent. Some server implementations might put messages here
/// > automatically. Alternatively, this might just be advice that a
/// > client save sent messages here.
Sent,
/// From [RFC 6154 section 2](https://tools.ietf.org/html/rfc6154#section-2)
///
/// > This mailbox is used to hold messages that have been deleted or
/// > marked for deletion. In some server implementations, this might
/// > be a virtual mailbox, containing messages from other mailboxes
/// > that are marked with the "\Deleted" message flag.
/// > Alternatively, this might just be advice that a client that
/// > chooses not to use the IMAP "\Deleted" model should use this as
/// > its trash location. In server implementations that strictly
/// > expect the IMAP "\Deleted" model, this special use is likely not
/// > to be supported.
Trash,
/// A name attribute not defined in [RFC 3501 section 7.2.2](https://tools.ietf.org/html/rfc3501#section-7.2.2)
/// or any supported extension.
Extension(Cow<'a, str>),
}
impl<'a> NameAttribute<'a> {
pub fn into_owned(self) -> NameAttribute<'static> {
match self {
// RFC 3501
NameAttribute::NoInferiors => NameAttribute::NoInferiors,
NameAttribute::NoSelect => NameAttribute::NoSelect,
NameAttribute::Marked => NameAttribute::Marked,
NameAttribute::Unmarked => NameAttribute::Unmarked,
// RFC 6154
NameAttribute::All => NameAttribute::All,
NameAttribute::Archive => NameAttribute::Archive,
NameAttribute::Drafts => NameAttribute::Drafts,
NameAttribute::Flagged => NameAttribute::Flagged,
NameAttribute::Junk => NameAttribute::Junk,
NameAttribute::Sent => NameAttribute::Sent,
NameAttribute::Trash => NameAttribute::Trash,
// Extensions not supported by this crate
NameAttribute::Extension(s) => NameAttribute::Extension(to_owned_cow(s)),
}
}
}
// IMAP4 QUOTA extension (rfc2087)
/// https://tools.ietf.org/html/rfc2087#section-3
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub enum QuotaResourceName<'a> {
/// Sum of messages' RFC822.SIZE, in units of 1024 octets
Storage,
/// Number of messages
Message,
Atom(Cow<'a, str>),
}
impl<'a> QuotaResourceName<'a> {
pub fn into_owned(self) -> QuotaResourceName<'static> {
match self {
QuotaResourceName::Message => QuotaResourceName::Message,
QuotaResourceName::Storage => QuotaResourceName::Storage,
QuotaResourceName::Atom(v) => QuotaResourceName::Atom(to_owned_cow(v)),
}
}
}
/// 5.1. QUOTA Response (https://tools.ietf.org/html/rfc2087#section-5.1)
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct QuotaResource<'a> {
pub name: QuotaResourceName<'a>,
/// current usage of the resource
pub usage: u64,
/// resource limit
pub limit: u64,
}
impl<'a> QuotaResource<'a> {
pub fn into_owned(self) -> QuotaResource<'static> {
QuotaResource {
name: self.name.into_owned(),
usage: self.usage,
limit: self.limit,
}
}
}
/// 5.1. QUOTA Response (https://tools.ietf.org/html/rfc2087#section-5.1)
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct Quota<'a> {
/// quota root name
pub root_name: Cow<'a, str>,
pub resources: Vec<QuotaResource<'a>>,
}
impl<'a> Quota<'a> {
pub fn into_owned(self) -> Quota<'static> {
Quota {
root_name: to_owned_cow(self.root_name),
resources: self.resources.into_iter().map(|r| r.into_owned()).collect(),
}
}
}
/// 5.2. QUOTAROOT Response (https://tools.ietf.org/html/rfc2087#section-5.2)
#[derive(Debug, Eq, PartialEq, Hash, Clone)]
pub struct QuotaRoot<'a> {
/// mailbox name
pub mailbox_name: Cow<'a, str>,
/// zero or more quota root names
pub quota_root_names: Vec<Cow<'a, str>>,
}
impl<'a> QuotaRoot<'a> {
pub fn into_owned(self) -> QuotaRoot<'static> {
QuotaRoot {
mailbox_name: to_owned_cow(self.mailbox_name),
quota_root_names: self
.quota_root_names
.into_iter()
.map(to_owned_cow)
.collect(),
}
}
}
#[cfg(test)]
mod tests {
use super::*;
/// Tests that the [`NameAttribute::into_owned`] method returns the
/// same value (the ownership should only change).
#[test]
fn test_name_attribute_into_owned() {
let name_attributes = [
// RFC 3501
NameAttribute::NoInferiors,
NameAttribute::NoSelect,
NameAttribute::Marked,
NameAttribute::Unmarked,
// RFC 6154
NameAttribute::All,
NameAttribute::Archive,
NameAttribute::Drafts,
NameAttribute::Flagged,
NameAttribute::Junk,
NameAttribute::Sent,
NameAttribute::Trash,
// Extensions not supported by this crate
NameAttribute::Extension(Cow::Borrowed("Foobar")),
];
for name_attribute in name_attributes {
let owned_name_attribute = name_attribute.clone().into_owned();
assert_eq!(name_attribute, owned_name_attribute);
}
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/types/acls.rs | imap-proto/src/types/acls.rs | use super::to_owned_cow;
use std::borrow::Cow;
// IMAP4 ACL Extension 4313/2086
#[derive(Debug, Eq, PartialEq)]
pub struct Acl<'a> {
pub mailbox: Cow<'a, str>,
pub acls: Vec<AclEntry<'a>>,
}
impl<'a> Acl<'a> {
pub fn into_owned(self) -> Acl<'static> {
Acl {
mailbox: to_owned_cow(self.mailbox),
acls: self.acls.into_iter().map(AclEntry::into_owned).collect(),
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct AclEntry<'a> {
pub identifier: Cow<'a, str>,
pub rights: Vec<AclRight>,
}
impl<'a> AclEntry<'a> {
pub fn into_owned(self) -> AclEntry<'static> {
AclEntry {
identifier: to_owned_cow(self.identifier),
rights: self.rights,
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct ListRights<'a> {
pub mailbox: Cow<'a, str>,
pub identifier: Cow<'a, str>,
pub required: Vec<AclRight>,
pub optional: Vec<AclRight>,
}
impl<'a> ListRights<'a> {
pub fn into_owned(self) -> ListRights<'static> {
ListRights {
mailbox: to_owned_cow(self.mailbox),
identifier: to_owned_cow(self.identifier),
required: self.required,
optional: self.optional,
}
}
}
#[derive(Debug, Eq, PartialEq)]
pub struct MyRights<'a> {
pub mailbox: Cow<'a, str>,
pub rights: Vec<AclRight>,
}
impl<'a> MyRights<'a> {
pub fn into_owned(self) -> MyRights<'static> {
MyRights {
mailbox: to_owned_cow(self.mailbox),
rights: self.rights,
}
}
}
#[derive(Copy, Clone, Debug, Eq, PartialEq, Hash)]
pub enum AclRight {
/// l - lookup (mailbox is visible to LIST/LSUB commands, SUBSCRIBE
/// mailbox)
Lookup,
/// r - read (SELECT the mailbox, perform STATUS)
Read,
/// s - keep seen/unseen information across sessions (set or clear
/// \SEEN flag via STORE, also set \SEEN during APPEND/COPY/
/// FETCH BODY[...])
Seen,
/// w - write (set or clear flags other than \SEEN and \DELETED via
/// STORE, also set them during APPEND/COPY)
Write,
/// i - insert (perform APPEND, COPY into mailbox)
Insert,
/// p - post (send mail to submission address for mailbox,
/// not enforced by IMAP4 itself)
Post,
/// k - create mailboxes (CREATE new sub-mailboxes in any
/// implementation-defined hierarchy, parent mailbox for the new
/// mailbox name in RENAME)
CreateMailbox,
/// x - delete mailbox (DELETE mailbox, old mailbox name in RENAME)
DeleteMailbox,
/// t - delete messages (set or clear \DELETED flag via STORE, set
/// \DELETED flag during APPEND/COPY)
DeleteMessage,
/// e - perform EXPUNGE and expunge as a part of CLOSE
Expunge,
/// a - administer (perform SETACL/DELETEACL/GETACL/LISTRIGHTS)
Administer,
/// n - ability to write .shared annotations values
/// From RFC 5257
Annotation,
/// c - old (deprecated) create. Do not use. Read RFC 4314 for more information.
OldCreate,
/// d - old (deprecated) delete. Do not use. Read RFC 4314 for more information.
OldDelete,
/// A custom right
Custom(char),
}
impl From<char> for AclRight {
fn from(c: char) -> Self {
match c {
'l' => AclRight::Lookup,
'r' => AclRight::Read,
's' => AclRight::Seen,
'w' => AclRight::Write,
'i' => AclRight::Insert,
'p' => AclRight::Post,
'k' => AclRight::CreateMailbox,
'x' => AclRight::DeleteMailbox,
't' => AclRight::DeleteMessage,
'e' => AclRight::Expunge,
'a' => AclRight::Administer,
'n' => AclRight::Annotation,
'c' => AclRight::OldCreate,
'd' => AclRight::OldDelete,
_ => AclRight::Custom(c),
}
}
}
impl From<AclRight> for char {
fn from(right: AclRight) -> Self {
match right {
AclRight::Lookup => 'l',
AclRight::Read => 'r',
AclRight::Seen => 's',
AclRight::Write => 'w',
AclRight::Insert => 'i',
AclRight::Post => 'p',
AclRight::CreateMailbox => 'k',
AclRight::DeleteMailbox => 'x',
AclRight::DeleteMessage => 't',
AclRight::Expunge => 'e',
AclRight::Administer => 'a',
AclRight::Annotation => 'n',
AclRight::OldCreate => 'c',
AclRight::OldDelete => 'd',
AclRight::Custom(c) => c,
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_char_to_acl_right() {
assert_eq!(Into::<AclRight>::into('l'), AclRight::Lookup);
assert_eq!(Into::<AclRight>::into('c'), AclRight::OldCreate);
assert_eq!(Into::<AclRight>::into('k'), AclRight::CreateMailbox);
assert_eq!(Into::<AclRight>::into('0'), AclRight::Custom('0'));
}
#[test]
fn test_acl_right_to_char() {
assert_eq!(Into::<char>::into(AclRight::Lookup), 'l');
assert_eq!(Into::<char>::into(AclRight::OldCreate), 'c');
assert_eq!(Into::<char>::into(AclRight::CreateMailbox), 'k');
assert_eq!(Into::<char>::into(AclRight::Custom('0')), '0');
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/builders/command.rs | imap-proto/src/builders/command.rs | use std::borrow::Cow;
use std::marker::PhantomData;
use std::ops::{RangeFrom, RangeInclusive};
use std::str;
use crate::types::{AttrMacro, Attribute, State};
pub struct CommandBuilder {}
impl CommandBuilder {
pub fn check() -> Command {
let args = b"CHECK".to_vec();
Command {
args,
next_state: None,
}
}
pub fn close() -> Command {
let args = b"CLOSE".to_vec();
Command {
args,
next_state: Some(State::Authenticated),
}
}
pub fn examine(mailbox: &str) -> SelectCommand<select::NoParams> {
let args = format!("EXAMINE \"{}\"", quoted_string(mailbox).unwrap()).into_bytes();
SelectCommand {
args,
state: PhantomData,
}
}
pub fn fetch() -> FetchCommand<fetch::Empty> {
FetchCommand {
args: b"FETCH ".to_vec(),
state: PhantomData,
}
}
pub fn list(reference: &str, glob: &str) -> Command {
let args = format!(
"LIST \"{}\" \"{}\"",
quoted_string(reference).unwrap(),
quoted_string(glob).unwrap()
)
.into_bytes();
Command {
args,
next_state: None,
}
}
pub fn login(user_name: &str, password: &str) -> Command {
let args = format!(
"LOGIN \"{}\" \"{}\"",
quoted_string(user_name).unwrap(),
quoted_string(password).unwrap()
)
.into_bytes();
Command {
args,
next_state: Some(State::Authenticated),
}
}
pub fn select(mailbox: &str) -> SelectCommand<select::NoParams> {
let args = format!("SELECT \"{}\"", quoted_string(mailbox).unwrap()).into_bytes();
SelectCommand {
args,
state: PhantomData,
}
}
pub fn uid_fetch() -> FetchCommand<fetch::Empty> {
FetchCommand {
args: b"UID FETCH ".to_vec(),
state: PhantomData,
}
}
}
pub struct Command {
pub args: Vec<u8>,
pub next_state: Option<State>,
}
pub struct SelectCommand<T> {
args: Vec<u8>,
state: PhantomData<T>,
}
impl SelectCommand<select::NoParams> {
// RFC 4551 CONDSTORE parameter (based on RFC 4466 `select-param`)
pub fn cond_store(mut self) -> SelectCommand<select::Params> {
self.args.extend(b" (CONDSTORE");
SelectCommand {
args: self.args,
state: PhantomData,
}
}
}
impl From<SelectCommand<select::NoParams>> for Command {
fn from(cmd: SelectCommand<select::NoParams>) -> Command {
Command {
args: cmd.args,
next_state: Some(State::Selected),
}
}
}
impl From<SelectCommand<select::Params>> for Command {
fn from(mut cmd: SelectCommand<select::Params>) -> Command {
cmd.args.push(b')');
Command {
args: cmd.args,
next_state: Some(State::Selected),
}
}
}
pub mod select {
pub struct NoParams;
pub struct Params;
}
pub mod fetch {
pub struct Empty;
pub struct Messages;
pub struct Attributes;
pub struct Modifiers;
}
pub struct FetchCommand<T> {
args: Vec<u8>,
state: PhantomData<T>,
}
impl FetchCommand<fetch::Empty> {
pub fn num(mut self, num: u32) -> FetchCommand<fetch::Messages> {
sequence_num(&mut self.args, num);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
pub fn range(mut self, range: RangeInclusive<u32>) -> FetchCommand<fetch::Messages> {
sequence_range(&mut self.args, range);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
pub fn range_from(mut self, range: RangeFrom<u32>) -> FetchCommand<fetch::Messages> {
range_from(&mut self.args, range);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
}
impl FetchCommand<fetch::Messages> {
pub fn num(mut self, num: u32) -> FetchCommand<fetch::Messages> {
self.args.extend(b",");
sequence_num(&mut self.args, num);
self
}
pub fn range(mut self, range: RangeInclusive<u32>) -> FetchCommand<fetch::Messages> {
self.args.extend(b",");
sequence_range(&mut self.args, range);
self
}
pub fn range_from(mut self, range: RangeFrom<u32>) -> FetchCommand<fetch::Messages> {
self.args.extend(b",");
range_from(&mut self.args, range);
self
}
pub fn attr_macro(mut self, named: AttrMacro) -> FetchCommand<fetch::Modifiers> {
self.args.push(b' ');
self.args.extend(
match named {
AttrMacro::All => "ALL",
AttrMacro::Fast => "FAST",
AttrMacro::Full => "FULL",
}
.as_bytes(),
);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
pub fn attr(mut self, attr: Attribute) -> FetchCommand<fetch::Attributes> {
self.args.extend(b" (");
push_attr(&mut self.args, attr);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
}
fn sequence_num(cmd: &mut Vec<u8>, num: u32) {
cmd.extend(num.to_string().as_bytes());
}
fn sequence_range(cmd: &mut Vec<u8>, range: RangeInclusive<u32>) {
cmd.extend(range.start().to_string().as_bytes());
cmd.push(b':');
cmd.extend(range.end().to_string().as_bytes());
}
fn range_from(cmd: &mut Vec<u8>, range: RangeFrom<u32>) {
cmd.extend(range.start.to_string().as_bytes());
cmd.extend(b":*");
}
impl FetchCommand<fetch::Attributes> {
pub fn attr(mut self, attr: Attribute) -> FetchCommand<fetch::Attributes> {
self.args.push(b' ');
push_attr(&mut self.args, attr);
self
}
pub fn changed_since(mut self, seq: u64) -> FetchCommand<fetch::Modifiers> {
self.args.push(b')');
changed_since(&mut self.args, seq);
FetchCommand {
args: self.args,
state: PhantomData,
}
}
}
fn push_attr(cmd: &mut Vec<u8>, attr: Attribute) {
cmd.extend(
match attr {
Attribute::Body => "BODY",
Attribute::Envelope => "ENVELOPE",
Attribute::Flags => "FLAGS",
Attribute::InternalDate => "INTERNALDATE",
Attribute::ModSeq => "MODSEQ",
Attribute::Rfc822 => "RFC822",
Attribute::Rfc822Size => "RFC822.SIZE",
Attribute::Rfc822Text => "RFC822.TEXT",
Attribute::Uid => "UID",
Attribute::GmailLabels => "X-GM-LABELS",
Attribute::GmailMsgId => "X-GM-MSGID",
Attribute::GmailThrId => "X-GM-THRID",
}
.as_bytes(),
);
}
impl From<FetchCommand<fetch::Attributes>> for Command {
fn from(mut cmd: FetchCommand<fetch::Attributes>) -> Command {
cmd.args.push(b')');
Command {
args: cmd.args,
next_state: None,
}
}
}
impl From<FetchCommand<fetch::Modifiers>> for Command {
fn from(cmd: FetchCommand<fetch::Modifiers>) -> Command {
Command {
args: cmd.args,
next_state: None,
}
}
}
impl FetchCommand<fetch::Modifiers> {
pub fn changed_since(mut self, seq: u64) -> FetchCommand<fetch::Modifiers> {
changed_since(&mut self.args, seq);
self
}
}
fn changed_since(cmd: &mut Vec<u8>, seq: u64) {
cmd.extend(b" (CHANGEDSINCE ");
cmd.extend(seq.to_string().as_bytes());
cmd.push(b')');
}
/// Returns an escaped string if necessary for use as a "quoted" string per
/// the IMAPv4 RFC. Return value does not include surrounding quote characters.
/// Will return Err if the argument contains illegal characters.
///
/// Relevant definitions from RFC 3501 formal syntax:
///
/// string = quoted / literal [literal elided here]
/// quoted = DQUOTE *QUOTED-CHAR DQUOTE
/// QUOTED-CHAR = <any TEXT-CHAR except quoted-specials> / "\" quoted-specials
/// quoted-specials = DQUOTE / "\"
/// TEXT-CHAR = <any CHAR except CR and LF>
fn quoted_string(s: &str) -> Result<Cow<'_, str>, &'static str> {
let bytes = s.as_bytes();
let (mut start, mut new) = (0, Vec::<u8>::new());
for (i, b) in bytes.iter().enumerate() {
match *b {
b'\r' | b'\n' => {
return Err("CR and LF not allowed in quoted strings");
}
b'\\' | b'"' => {
if start < i {
new.extend(&bytes[start..i]);
}
new.push(b'\\');
new.push(*b);
start = i + 1;
}
_ => {}
};
}
if start == 0 {
Ok(Cow::Borrowed(s))
} else {
if start < bytes.len() {
new.extend(&bytes[start..]);
}
// Since the argument is a str, it must contain valid UTF-8. Since
// this function's transformation preserves the UTF-8 validity,
// unwrapping here should be okay.
Ok(Cow::Owned(String::from_utf8(new).unwrap()))
}
}
#[cfg(test)]
mod tests {
use super::{quoted_string, Attribute, Command, CommandBuilder};
#[test]
fn login() {
assert_eq!(
CommandBuilder::login("djc", "s3cr3t").args,
b"LOGIN \"djc\" \"s3cr3t\""
);
assert_eq!(
CommandBuilder::login("djc", "domain\\password").args,
b"LOGIN \"djc\" \"domain\\\\password\""
);
}
#[test]
fn select() {
let cmd = Command::from(CommandBuilder::select("INBOX"));
assert_eq!(&cmd.args, br#"SELECT "INBOX""#);
let cmd = Command::from(CommandBuilder::examine("INBOX").cond_store());
assert_eq!(&cmd.args, br#"EXAMINE "INBOX" (CONDSTORE)"#);
}
#[test]
fn fetch() {
let cmd: Command = CommandBuilder::fetch()
.range_from(1..)
.attr(Attribute::Uid)
.attr(Attribute::ModSeq)
.changed_since(13)
.into();
assert_eq!(cmd.args, &b"FETCH 1:* (UID MODSEQ) (CHANGEDSINCE 13)"[..]);
let cmd: Command = CommandBuilder::fetch()
.num(1)
.num(2)
.attr(Attribute::Uid)
.attr(Attribute::ModSeq)
.into();
assert_eq!(cmd.args, &b"FETCH 1,2 (UID MODSEQ)"[..]);
}
#[test]
fn test_quoted_string() {
assert_eq!(quoted_string("a").unwrap(), "a");
assert_eq!(quoted_string("").unwrap(), "");
assert_eq!(quoted_string("a\"b\\c").unwrap(), "a\\\"b\\\\c");
assert_eq!(quoted_string("\"foo\\").unwrap(), "\\\"foo\\\\");
assert!(quoted_string("\n").is_err());
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/builders/mod.rs | imap-proto/src/builders/mod.rs | pub mod command;
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc7162.rs | imap-proto/src/parser/rfc7162.rs | //!
//!
//! https://tools.ietf.org/html/rfc7162
//!
//! The IMAP QRESYNC Extensions
//!
use nom::{
bytes::streaming::tag_no_case, character::streaming::space1, combinator::opt, sequence::tuple,
IResult,
};
use crate::parser::core::sequence_set;
use crate::types::*;
// The VANISHED response reports that the specified UIDs have been
// permanently removed from the mailbox. This response is similar to
// the EXPUNGE response (RFC3501); however, it can return information
// about multiple messages, and it returns UIDs instead of message
// numbers.
// [RFC7162 - VANISHED RESPONSE](https://tools.ietf.org/html/rfc7162#section-3.2.10)
pub(crate) fn resp_vanished(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, earlier, _, uids)) = tuple((
tag_no_case("VANISHED"),
opt(tuple((space1, tag_no_case("(EARLIER)")))),
space1,
sequence_set,
))(i)?;
Ok((
rest,
Response::Vanished {
earlier: earlier.is_some(),
uids,
},
))
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc2971.rs | imap-proto/src/parser/rfc2971.rs | //!
//!
//! https://tools.ietf.org/html/rfc2971
//!
//! The IMAP4 ID extension
//!
use std::{borrow::Cow, collections::HashMap};
use nom::{
branch::alt,
bytes::complete::tag_no_case,
character::complete::{char, space0, space1},
combinator::map,
multi::many0,
sequence::{preceded, separated_pair, tuple},
IResult,
};
use crate::{
parser::core::{nil, nstring_utf8, string_utf8},
Response,
};
// A single id parameter (field and value).
// Format: string SPACE nstring
// [RFC2971 - Formal Syntax](https://tools.ietf.org/html/rfc2971#section-4)
fn id_param(i: &[u8]) -> IResult<&[u8], (&str, Option<&str>)> {
separated_pair(string_utf8, space1, nstring_utf8)(i)
}
// The non-nil case of id parameter list.
// Format: "(" #(string SPACE nstring) ")"
// [RFC2971 - Formal Syntax](https://tools.ietf.org/html/rfc2971#section-4)
fn id_param_list_not_nil(i: &[u8]) -> IResult<&[u8], HashMap<&str, &str>> {
map(
tuple((
char('('),
id_param,
many0(tuple((space1, id_param))),
preceded(space0, char(')')),
)),
|(_, first_param, rest_params, _)| {
let mut params = vec![first_param];
for (_, p) in rest_params {
params.push(p)
}
params
.into_iter()
.filter(|(_k, v)| v.is_some())
.map(|(k, v)| (k, v.unwrap()))
.collect()
},
)(i)
}
// The id parameter list of all cases
// id_params_list ::= "(" #(string SPACE nstring) ")" / nil
// [RFC2971 - Formal Syntax](https://tools.ietf.org/html/rfc2971#section-4)
fn id_param_list(i: &[u8]) -> IResult<&[u8], Option<HashMap<&str, &str>>> {
alt((map(id_param_list_not_nil, Some), map(nil, |_| None)))(i)
}
// id_response ::= "ID" SPACE id_params_list
// [RFC2971 - Formal Syntax](https://tools.ietf.org/html/rfc2971#section-4)
pub(crate) fn resp_id(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, map) = map(
tuple((tag_no_case("ID"), space1, id_param_list)),
|(_id, _sp, p)| p,
)(i)?;
Ok((
rest,
Response::Id(map.map(|m| {
m.into_iter()
.map(|(k, v)| (Cow::Borrowed(k), Cow::Borrowed(v)))
.collect()
})),
))
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
#[test]
fn test_id_param() {
assert_matches!(
id_param(br#""name" "Cyrus""#),
Ok((_, (name, value))) => {
assert_eq!(name, "name");
assert_eq!(value, Some("Cyrus"));
}
);
assert_matches!(
id_param(br#""name" NIL"#),
Ok((_, (name, value))) => {
assert_eq!(name, "name");
assert_eq!(value, None);
}
);
}
#[test]
fn test_id_param_list_not_nil() {
assert_matches!(
id_param_list_not_nil(br#"("name" "Cyrus" "version" "1.5" "os" "sunos" "os-version" "5.5" "support-url" "mailto:cyrus-bugs+@andrew.cmu.edu")"#),
Ok((_, params)) => {
assert_eq!(
params,
vec![
("name", "Cyrus"),
("version", "1.5"),
("os", "sunos"),
("os-version", "5.5"),
("support-url", "mailto:cyrus-bugs+@andrew.cmu.edu"),
].into_iter()
.collect()
);
}
);
}
#[test]
fn test_id_param_list() {
assert_matches!(
id_param_list(br#"("name" "Cyrus" "version" "1.5" "os" "sunos" "os-version" "5.5" "support-url" "mailto:cyrus-bugs+@andrew.cmu.edu")"#),
Ok((_, Some(params))) => {
assert_eq!(
params,
vec![
("name", "Cyrus"),
("version", "1.5"),
("os", "sunos"),
("os-version", "5.5"),
("support-url", "mailto:cyrus-bugs+@andrew.cmu.edu"),
].into_iter()
.collect()
);
}
);
assert_matches!(
id_param_list(br##"NIL"##),
Ok((_, params)) => {
assert_eq!(params, None);
}
);
}
#[test]
fn test_resp_id() {
assert_matches!(
resp_id(br#"ID ("name" "Cyrus" "version" "1.5" "os" "sunos" "os-version" "5.5" "support-url" "mailto:cyrus-bugs+@andrew.cmu.edu")"#),
Ok((_, Response::Id(Some(id_info)))) => {
assert_eq!(
id_info,
vec![
("name", "Cyrus"),
("version", "1.5"),
("os", "sunos"),
("os-version", "5.5"),
("support-url", "mailto:cyrus-bugs+@andrew.cmu.edu"),
].into_iter()
.map(|(k, v)| (Cow::Borrowed(k), Cow::Borrowed(v)))
.collect()
);
}
);
// Test that NILs inside parameter list don't crash the parser.
// RFC2971 allows NILs as parameter values.
assert_matches!(
resp_id(br#"ID ("name" "Cyrus" "version" "1.5" "os" NIL "os-version" NIL "support-url" "mailto:cyrus-bugs+@andrew.cmu.edu")"#),
Ok((_, Response::Id(Some(id_info)))) => {
assert_eq!(
id_info,
vec![
("name", "Cyrus"),
("version", "1.5"),
("support-url", "mailto:cyrus-bugs+@andrew.cmu.edu"),
].into_iter()
.map(|(k, v)| (Cow::Borrowed(k), Cow::Borrowed(v)))
.collect()
);
}
);
assert_matches!(
resp_id(br##"ID NIL"##),
Ok((_, Response::Id(id_info))) => {
assert_eq!(id_info, None);
}
);
assert_matches!(
resp_id(br#"ID ("name" "Archiveopteryx" "version" "3.2.0" "compile-time" "Feb 6 2023 19:59:14" "homepage-url" "http://archiveopteryx.org" "release-url" "http://archiveopteryx.org/3.2.0" )"#),
Ok((_, Response::Id(Some(id_info)))) => {
assert_eq!(
id_info,
vec![
("name", "Archiveopteryx"),
("version", "3.2.0"),
("compile-time", "Feb 6 2023 19:59:14"),
("homepage-url", "http://archiveopteryx.org"),
("release-url", "http://archiveopteryx.org/3.2.0"),
].into_iter()
.map(|(k, v)| (Cow::Borrowed(k), Cow::Borrowed(v)))
.collect()
);
}
);
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc5464.rs | imap-proto/src/parser/rfc5464.rs | //!
//! https://tools.ietf.org/html/rfc5464
//!
//! IMAP METADATA extension
//!
use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case},
combinator::map,
multi::separated_list0,
sequence::tuple,
IResult,
};
use std::borrow::Cow;
use crate::{parser::core::*, types::*};
fn is_entry_component_char(c: u8) -> bool {
c < 0x80 && c > 0x19 && c != b'*' && c != b'%' && c != b'/'
}
enum EntryParseStage<'a> {
PrivateShared,
Admin(usize),
VendorComment(usize),
Path(usize),
Done(usize),
Fail(nom::Err<&'a [u8]>),
}
fn check_private_shared(i: &[u8]) -> EntryParseStage<'_> {
if i.starts_with(b"/private") {
EntryParseStage::VendorComment(8)
} else if i.starts_with(b"/shared") {
EntryParseStage::Admin(7)
} else {
EntryParseStage::Fail(nom::Err::Error(
b"Entry Name doesn't start with /private or /shared",
))
}
}
fn check_admin(i: &[u8], l: usize) -> EntryParseStage<'_> {
if i[l..].starts_with(b"/admin") {
EntryParseStage::Path(l + 6)
} else {
EntryParseStage::VendorComment(l)
}
}
fn check_vendor_comment(i: &[u8], l: usize) -> EntryParseStage<'_> {
if i[l..].starts_with(b"/comment") {
EntryParseStage::Path(l + 8)
} else if i[l..].starts_with(b"/vendor") {
//make sure vendor name is present
if i.len() < l + 9 || i[l + 7] != b'/' || !is_entry_component_char(i[l + 8]) {
EntryParseStage::Fail(nom::Err::Incomplete(nom::Needed::Unknown))
} else {
EntryParseStage::Path(l + 7)
}
} else {
EntryParseStage::Fail(nom::Err::Error(
b"Entry name is not continued with /admin, /vendor or /comment",
))
}
}
fn check_path(i: &[u8], l: usize) -> EntryParseStage<'_> {
if i.len() == l || i[l] == b' ' || i[l] == b'\r' {
return EntryParseStage::Done(l);
} else if i[l] != b'/' {
return EntryParseStage::Fail(nom::Err::Error(b"Entry name path is corrupted"));
}
for j in 1..(i.len() - l) {
if !is_entry_component_char(i[l + j]) {
return EntryParseStage::Path(l + j);
}
}
EntryParseStage::Done(i.len())
}
fn check_entry_name(i: &[u8]) -> IResult<&[u8], &[u8]> {
let mut stage = EntryParseStage::PrivateShared;
loop {
match stage {
EntryParseStage::PrivateShared => {
stage = check_private_shared(i);
}
EntryParseStage::Admin(l) => {
stage = check_admin(i, l);
}
EntryParseStage::VendorComment(l) => {
stage = check_vendor_comment(i, l);
}
EntryParseStage::Path(l) => {
stage = check_path(i, l);
}
EntryParseStage::Done(l) => {
return Ok((&i[l..], &i[..l]));
}
EntryParseStage::Fail(nom::Err::Error(err_msg)) => {
return std::result::Result::Err(nom::Err::Error(nom::error::Error::new(
err_msg,
nom::error::ErrorKind::Verify,
)));
}
EntryParseStage::Fail(nom::Err::Incomplete(reason)) => {
return std::result::Result::Err(nom::Err::Incomplete(reason));
}
_ => panic!("Entry name verification failure"),
}
}
}
fn entry_name(i: &[u8]) -> IResult<&[u8], &[u8]> {
let astring_res = astring(i)?;
check_entry_name(astring_res.1)?;
Ok(astring_res)
}
fn slice_to_str(i: &[u8]) -> &str {
std::str::from_utf8(i).unwrap()
}
fn nil_value(i: &[u8]) -> IResult<&[u8], Option<String>> {
map(tag_no_case("NIL"), |_| None)(i)
}
fn string_value(i: &[u8]) -> IResult<&[u8], Option<String>> {
map(alt((quoted, literal)), |s| {
Some(slice_to_str(s).to_string())
})(i)
}
fn keyval_list(i: &[u8]) -> IResult<&[u8], Vec<Metadata>> {
parenthesized_nonempty_list(map(
tuple((
map(entry_name, slice_to_str),
tag(" "),
alt((nil_value, string_value)),
)),
|(key, _, value)| Metadata {
entry: key.to_string(),
value,
},
))(i)
}
fn entry_list(i: &[u8]) -> IResult<&[u8], Vec<Cow<'_, str>>> {
separated_list0(tag(" "), map(map(entry_name, slice_to_str), Cow::Borrowed))(i)
}
fn metadata_common(i: &[u8]) -> IResult<&[u8], &[u8]> {
let (i, (_, mbox, _)) = tuple((tag_no_case("METADATA "), quoted, tag(" ")))(i)?;
Ok((i, mbox))
}
// [RFC5464 - 4.4.1 METADATA Response with values]
pub(crate) fn metadata_solicited(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (i, (mailbox, values)) = tuple((metadata_common, keyval_list))(i)?;
Ok((
i,
Response::MailboxData(MailboxDatum::MetadataSolicited {
mailbox: Cow::Borrowed(slice_to_str(mailbox)),
values,
}),
))
}
// [RFC5464 - 4.4.2 Unsolicited METADATA Response without values]
pub(crate) fn metadata_unsolicited(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (i, (mailbox, values)) = tuple((metadata_common, entry_list))(i)?;
Ok((
i,
Response::MailboxData(MailboxDatum::MetadataUnsolicited {
mailbox: Cow::Borrowed(slice_to_str(mailbox)),
values,
}),
))
}
// There are any entries with values larger than the MAXSIZE limit given to GETMETADATA.
// Extends resp-test-code defined in rfc3501.
// [RFC5464 - 4.2.1 MAXSIZE GETMETADATA Command Option](https://tools.ietf.org/html/rfc5464#section-4.2.1)
// [RFC5464 - 5. Formal Syntax - resp-text-code](https://tools.ietf.org/html/rfc5464#section-5)
pub(crate) fn resp_text_code_metadata_long_entries(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
let (i, (_, num)) = tuple((tag_no_case("METADATA LONGENTRIES "), number_64))(i)?;
Ok((i, ResponseCode::MetadataLongEntries(num)))
}
// Server is unable to set an annotation because the size of its value is too large.
// Extends resp-test-code defined in rfc3501.
// [RFC5464 - 4.3 SETMETADATA Command](https://tools.ietf.org/html/rfc5464#section-4.3)
// [RFC5464 - 5. Formal Syntax - resp-text-code](https://tools.ietf.org/html/rfc5464#section-5)
pub(crate) fn resp_text_code_metadata_max_size(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
let (i, (_, num)) = tuple((tag_no_case("METADATA MAXSIZE "), number_64))(i)?;
Ok((i, ResponseCode::MetadataMaxSize(num)))
}
// Server is unable to set a new annotation because the maximum number of allowed annotations has already been reached.
// Extends resp-test-code defined in rfc3501.
// [RFC5464 - 4.3 SETMETADATA Command](https://tools.ietf.org/html/rfc5464#section-4.3)
// [RFC5464 - 5. Formal Syntax - resp-text-code](https://tools.ietf.org/html/rfc5464#section-5)
pub(crate) fn resp_text_code_metadata_too_many(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
let (i, _) = tag_no_case("METADATA TOOMANY")(i)?;
Ok((i, ResponseCode::MetadataTooMany))
}
// Server is unable to set a new annotation because it does not support private annotations on one of the specified mailboxes.
// Extends resp-test-code defined in rfc3501.
// [RFC5464 - 4.3 SETMETADATA Command](https://tools.ietf.org/html/rfc5464#section-4.3)
// [RFC5464 - 5. Formal Syntax - resp-text-code](https://tools.ietf.org/html/rfc5464#section-5)
pub(crate) fn resp_text_code_metadata_no_private(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
let (i, _) = tag_no_case("METADATA NOPRIVATE")(i)?;
Ok((i, ResponseCode::MetadataNoPrivate))
}
#[cfg(test)]
mod tests {
use super::{metadata_solicited, metadata_unsolicited};
use crate::types::*;
use std::borrow::Cow;
#[test]
fn test_solicited_fail_1() {
match metadata_solicited(b"METADATA \"\" (/asdfg \"asdf\")\r\n") {
Err(_) => {}
_ => panic!("Error required when entry name is not starting with /private or /shared"),
}
}
#[test]
fn test_solicited_fail_2() {
match metadata_solicited(b"METADATA \"\" (/shared/asdfg \"asdf\")\r\n") {
Err(_) => {}
_ => panic!(
"Error required when in entry name /shared \
is not continued with /admin, /comment or /vendor"
),
}
}
#[test]
fn test_solicited_fail_3() {
match metadata_solicited(b"METADATA \"\" (/private/admin \"asdf\")\r\n") {
Err(_) => {}
_ => panic!(
"Error required when in entry name /private \
is not continued with /comment or /vendor"
),
}
}
#[test]
fn test_solicited_fail_4() {
match metadata_solicited(b"METADATA \"\" (/shared/vendor \"asdf\")\r\n") {
Err(_) => {}
_ => panic!("Error required when vendor name is not provided."),
}
}
#[test]
fn test_solicited_success() {
match metadata_solicited(
b"METADATA \"mbox\" (/shared/vendor/vendorname \"asdf\" \
/private/comment/a \"bbb\")\r\n",
) {
Ok((i, Response::MailboxData(MailboxDatum::MetadataSolicited { mailbox, values }))) => {
assert_eq!(mailbox, "mbox");
assert_eq!(i, b"\r\n");
assert_eq!(values.len(), 2);
assert_eq!(values[0].entry, "/shared/vendor/vendorname");
assert_eq!(
values[0]
.value
.as_ref()
.expect("None value is not expected"),
"asdf"
);
assert_eq!(values[1].entry, "/private/comment/a");
assert_eq!(
values[1]
.value
.as_ref()
.expect("None value is not expected"),
"bbb"
);
}
_ => panic!("Correct METADATA response is not parsed properly."),
}
}
#[test]
fn test_literal_success() {
// match metadata_solicited(b"METADATA \"\" (/shared/vendor/vendor.coi/a \"AAA\")\r\n")
match metadata_solicited(b"METADATA \"\" (/shared/vendor/vendor.coi/a {3}\r\nAAA)\r\n") {
Ok((i, Response::MailboxData(MailboxDatum::MetadataSolicited { mailbox, values }))) => {
assert_eq!(mailbox, "");
assert_eq!(i, b"\r\n");
assert_eq!(values.len(), 1);
assert_eq!(values[0].entry, "/shared/vendor/vendor.coi/a");
assert_eq!(
values[0]
.value
.as_ref()
.expect("None value is not expected"),
"AAA"
);
}
Err(e) => panic!("ERR: {e:?}"),
_ => panic!("Strange failure"),
}
}
#[test]
fn test_nil_success() {
match metadata_solicited(b"METADATA \"\" (/shared/comment NIL /shared/admin NIL)\r\n") {
Ok((i, Response::MailboxData(MailboxDatum::MetadataSolicited { mailbox, values }))) => {
assert_eq!(mailbox, "");
assert_eq!(i, b"\r\n");
assert_eq!(values.len(), 2);
assert_eq!(values[0].entry, "/shared/comment");
assert_eq!(values[0].value, None);
assert_eq!(values[1].entry, "/shared/admin");
assert_eq!(values[1].value, None);
}
Err(e) => panic!("ERR: {e:?}"),
_ => panic!("Strange failure"),
}
}
#[test]
fn test_unsolicited_success() {
match metadata_unsolicited(b"METADATA \"theBox\" /shared/admin/qwe /private/comment/a\r\n")
{
Ok((
i,
Response::MailboxData(MailboxDatum::MetadataUnsolicited { mailbox, values }),
)) => {
assert_eq!(i, b"\r\n");
assert_eq!(mailbox, "theBox");
assert_eq!(values.len(), 2);
assert_eq!(values[0], "/shared/admin/qwe");
assert_eq!(values[1], "/private/comment/a");
}
_ => panic!("Correct METADATA response is not parsed properly."),
}
}
#[test]
fn test_response_codes() {
use crate::parser::parse_response;
match parse_response(b"* OK [METADATA LONGENTRIES 123] Some entries omitted.\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::MetadataLongEntries(123)),
information: Some(Cow::Borrowed("Some entries omitted.")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [METADATA MAXSIZE 123] Annotation too large.\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::MetadataMaxSize(123)),
information: Some(Cow::Borrowed("Annotation too large.")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [METADATA TOOMANY] Too many annotations.\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::MetadataTooMany),
information: Some(Cow::Borrowed("Too many annotations.")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [METADATA NOPRIVATE] Private annotations not supported.\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::MetadataNoPrivate),
information: Some(Cow::Borrowed("Private annotations not supported.")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/tests.rs | imap-proto/src/parser/tests.rs | use super::{bodystructure::BodyStructParser, parse_response};
use crate::types::*;
use std::borrow::Cow;
use std::num::NonZeroUsize;
#[test]
fn test_mailbox_data_response() {
match parse_response(b"* LIST (\\HasNoChildren) \".\" INBOX.Tests\r\n") {
Ok((_, Response::MailboxData(_))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
/// Test that the name attributes in [RFC 3501 Section 7.2.2](https://datatracker.ietf.org/doc/html/rfc3501#section-7.2.2)
/// and extensions can be parsed.
#[test]
fn test_name_attributes() {
match parse_response(
b"* LIST (\\Noinferiors \\Noselect \\Marked \\Unmarked \\All \\Archive \\Drafts \\Flagged \\Junk \\Sent \\Trash \\Foobar) \".\" INBOX.Tests\r\n",
) {
Ok((
_,
Response::MailboxData(MailboxDatum::List {
name_attributes, ..
}),
)) => {
assert_eq!(
name_attributes,
vec![
// RFC 3501
NameAttribute::NoInferiors,
NameAttribute::NoSelect,
NameAttribute::Marked,
NameAttribute::Unmarked,
// RFC 6154
NameAttribute::All,
NameAttribute::Archive,
NameAttribute::Drafts,
NameAttribute::Flagged,
NameAttribute::Junk,
NameAttribute::Sent,
NameAttribute::Trash,
// Extensions not supported by this crate
NameAttribute::Extension(Cow::Borrowed("\\Foobar")),
]
);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
/// Test the ACL response from RFC 4314/2086
#[test]
fn test_acl_response() {
match parse_response(b"* ACL INBOX user lrswipkxtecdan\r\n") {
Ok((_, Response::Acl(_))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_acl_attributes() {
// no rights
match parse_response(b"* ACL INBOX\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("INBOX"),
acls: vec![],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// one right pair
match parse_response(b"* ACL INBOX user lrswipkxtecdan\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("INBOX"),
acls: vec![AclEntry {
identifier: Cow::Borrowed("user"),
rights: vec![
AclRight::Lookup,
AclRight::Read,
AclRight::Seen,
AclRight::Write,
AclRight::Insert,
AclRight::Post,
AclRight::CreateMailbox,
AclRight::DeleteMailbox,
AclRight::DeleteMessage,
AclRight::Expunge,
AclRight::OldCreate,
AclRight::OldDelete,
AclRight::Administer,
AclRight::Annotation,
],
},],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// with custom rights
match parse_response(b"* ACL INBOX user lr0123\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("INBOX"),
acls: vec![AclEntry {
identifier: Cow::Borrowed("user"),
rights: vec![
AclRight::Lookup,
AclRight::Read,
AclRight::Custom('0'),
AclRight::Custom('1'),
AclRight::Custom('2'),
AclRight::Custom('3'),
],
},],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// multiple right pairs
match parse_response(b"* ACL INBOX user lrswipkxtecdan user2 lr\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("INBOX"),
acls: vec![
AclEntry {
identifier: Cow::Borrowed("user"),
rights: vec![
AclRight::Lookup,
AclRight::Read,
AclRight::Seen,
AclRight::Write,
AclRight::Insert,
AclRight::Post,
AclRight::CreateMailbox,
AclRight::DeleteMailbox,
AclRight::DeleteMessage,
AclRight::Expunge,
AclRight::OldCreate,
AclRight::OldDelete,
AclRight::Administer,
AclRight::Annotation,
],
},
AclEntry {
identifier: Cow::Borrowed("user2"),
rights: vec![AclRight::Lookup, AclRight::Read],
},
],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// quoted mailbox
match parse_response(b"* ACL \"My folder\" user lrswipkxtecdan\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("My folder"),
acls: vec![AclEntry {
identifier: Cow::Borrowed("user"),
rights: vec![
AclRight::Lookup,
AclRight::Read,
AclRight::Seen,
AclRight::Write,
AclRight::Insert,
AclRight::Post,
AclRight::CreateMailbox,
AclRight::DeleteMailbox,
AclRight::DeleteMessage,
AclRight::Expunge,
AclRight::OldCreate,
AclRight::OldDelete,
AclRight::Administer,
AclRight::Annotation,
],
},],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// quoted identifier
match parse_response(b"* ACL Trash \"user name\" lrswipkxtecdan\r\n") {
Ok((_, Response::Acl(acl))) => {
assert_eq!(
acl,
Acl {
mailbox: Cow::Borrowed("Trash"),
acls: vec![AclEntry {
identifier: Cow::Borrowed("user name"),
rights: vec![
AclRight::Lookup,
AclRight::Read,
AclRight::Seen,
AclRight::Write,
AclRight::Insert,
AclRight::Post,
AclRight::CreateMailbox,
AclRight::DeleteMailbox,
AclRight::DeleteMessage,
AclRight::Expunge,
AclRight::OldCreate,
AclRight::OldDelete,
AclRight::Administer,
AclRight::Annotation,
],
},],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
/// Test the LISTRIGHTS response from RFC 4314/2086
#[test]
fn test_list_rights_response() {
match parse_response(b"* LISTRIGHTS INBOX user lkxca r s w i p t e d n\r\n") {
Ok((_, Response::ListRights(_))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_list_rights_attributes() {
// no required/always rights, and no optional rights
match parse_response(b"* LISTRIGHTS INBOX user \"\"\r\n") {
Ok((_, Response::ListRights(rights))) => {
assert_eq!(
rights,
ListRights {
mailbox: Cow::Borrowed("INBOX"),
identifier: Cow::Borrowed("user"),
required: vec![],
optional: vec![],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// no required/always rights, and with optional rights
match parse_response(b"* LISTRIGHTS INBOX user \"\" l k x c\r\n") {
Ok((_, Response::ListRights(rights))) => {
assert_eq!(
rights,
ListRights {
mailbox: Cow::Borrowed("INBOX"),
identifier: Cow::Borrowed("user"),
required: vec![],
optional: vec![
AclRight::Lookup,
AclRight::CreateMailbox,
AclRight::DeleteMailbox,
AclRight::OldCreate,
],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// with required/always rights, and with optional rights
match parse_response(b"* LISTRIGHTS INBOX user lkr x c\r\n") {
Ok((_, Response::ListRights(rights))) => {
assert_eq!(
rights,
ListRights {
mailbox: Cow::Borrowed("INBOX"),
identifier: Cow::Borrowed("user"),
required: vec![AclRight::Lookup, AclRight::CreateMailbox, AclRight::Read],
optional: vec![AclRight::DeleteMailbox, AclRight::OldCreate],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// with required/always rights, and no optional rights
match parse_response(b"* LISTRIGHTS INBOX user lkr\r\n") {
Ok((_, Response::ListRights(rights))) => {
assert_eq!(
rights,
ListRights {
mailbox: Cow::Borrowed("INBOX"),
identifier: Cow::Borrowed("user"),
required: vec![AclRight::Lookup, AclRight::CreateMailbox, AclRight::Read],
optional: vec![],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// with mailbox with spaces
match parse_response(b"* LISTRIGHTS \"My Folder\" user lkr x c\r\n") {
Ok((_, Response::ListRights(rights))) => {
assert_eq!(
rights,
ListRights {
mailbox: Cow::Borrowed("My Folder"),
identifier: Cow::Borrowed("user"),
required: vec![AclRight::Lookup, AclRight::CreateMailbox, AclRight::Read],
optional: vec![AclRight::DeleteMailbox, AclRight::OldCreate],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
/// Test the MYRIGHTS response from RFC 4314/2086
#[test]
fn test_my_rights_response() {
match parse_response(b"* MYRIGHTS INBOX lkxca\r\n") {
Ok((_, Response::MyRights(_))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_my_rights_attributes() {
// with rights
match parse_response(b"* MYRIGHTS INBOX lkr\r\n") {
Ok((_, Response::MyRights(rights))) => {
assert_eq!(
rights,
MyRights {
mailbox: Cow::Borrowed("INBOX"),
rights: vec![AclRight::Lookup, AclRight::CreateMailbox, AclRight::Read],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
// with space in mailbox
match parse_response(b"* MYRIGHTS \"My Folder\" lkr\r\n") {
Ok((_, Response::MyRights(rights))) => {
assert_eq!(
rights,
MyRights {
mailbox: Cow::Borrowed("My Folder"),
rights: vec![AclRight::Lookup, AclRight::CreateMailbox, AclRight::Read],
}
)
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_number_overflow() {
match parse_response(b"* 2222222222222222222222222222222222222222222C\r\n") {
Err(_) => {}
_ => panic!("error required for integer overflow"),
}
}
#[test]
fn test_unseen() {
match parse_response(b"* OK [UNSEEN 3] Message 3 is first unseen\r\n").unwrap() {
(
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::Unseen(3)),
information: Some(Cow::Borrowed("Message 3 is first unseen")),
},
) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_body_text() {
match parse_response(b"* 2 FETCH (BODY[TEXT] {3}\r\nfoo)\r\n") {
Ok((_, Response::Fetch(_, attrs))) => {
let body = &attrs[0];
assert_eq!(
body,
&AttributeValue::BodySection {
section: Some(SectionPath::Full(MessageSection::Text)),
index: None,
data: Some(Cow::Borrowed(b"foo")),
},
"body = {body:?}"
);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_body_structure() {
const RESPONSE: &[u8] = b"* 15 FETCH (BODYSTRUCTURE (\"TEXT\" \"PLAIN\" (\"CHARSET\" \"iso-8859-1\") NIL NIL \"QUOTED-PRINTABLE\" 1315 42 NIL NIL NIL NIL))\r\n";
match parse_response(RESPONSE) {
Ok((_, Response::Fetch(_, attrs))) => {
let body = &attrs[0];
assert!(
matches!(*body, AttributeValue::BodyStructure(_)),
"body = {body:?}"
);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_status() {
match parse_response(b"* STATUS blurdybloop (MESSAGES 231 UIDNEXT 44292)\r\n") {
Ok((_, Response::MailboxData(MailboxDatum::Status { mailbox, status }))) => {
assert_eq!(mailbox, "blurdybloop");
assert_eq!(
status,
[
StatusAttribute::Messages(231),
StatusAttribute::UidNext(44292),
]
);
}
rsp => panic!("unexpected response {rsp:?}"),
}
// Outlook server sends a STATUS response with a space in the end.
match parse_response(b"* STATUS Sent (UIDNEXT 107) \r\n") {
Ok((_, Response::MailboxData(MailboxDatum::Status { mailbox, status }))) => {
assert_eq!(mailbox, "Sent");
assert_eq!(status, [StatusAttribute::UidNext(107),]);
}
rsp => panic!("unexpected response {rsp:?}"),
}
// mail.163.com sends a STATUS response with an empty list when asked for (UIDNEXT)
match parse_response(b"* STATUS \"INBOX\" ()\r\n") {
Ok((_, Response::MailboxData(MailboxDatum::Status { mailbox, status }))) => {
assert_eq!(mailbox, "INBOX");
assert_eq!(status, []);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_notify() {
match parse_response(b"* 3501 EXPUNGE\r\n") {
Ok((_, Response::Expunge(3501))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* 3501 EXISTS\r\n") {
Ok((_, Response::MailboxData(MailboxDatum::Exists(3501)))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"+ idling\r\n") {
Ok((
_,
Response::Continue {
code: None,
information: Some(Cow::Borrowed("idling")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_search() {
// also allow trailing whitespace in SEARCH responses
for empty_response in &["* SEARCH\r\n", "* SEARCH \r\n"] {
match parse_response(empty_response.as_bytes()) {
Ok((_, Response::MailboxData(MailboxDatum::Search(ids)))) => {
assert!(ids.is_empty());
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
for response in &["* SEARCH 12345 67890\r\n", "* SEARCH 12345 67890 \r\n"] {
match parse_response(response.as_bytes()) {
Ok((_, Response::MailboxData(MailboxDatum::Search(ids)))) => {
assert_eq!(ids[0], 12345);
assert_eq!(ids[1], 67890);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
}
#[test]
fn test_sort() {
// also allow trailing whitespace in SEARCH responses
for empty_response in &["* SORT\r\n", "* SORT \r\n"] {
match parse_response(empty_response.as_bytes()) {
Ok((_, Response::MailboxData(MailboxDatum::Sort(ids)))) => {
assert!(ids.is_empty());
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
for response in &["* SORT 12345 67890\r\n", "* SORT 12345 67890 \r\n"] {
match parse_response(response.as_bytes()) {
Ok((_, Response::MailboxData(MailboxDatum::Sort(ids)))) => {
assert_eq!(ids[0], 12345);
assert_eq!(ids[1], 67890);
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
}
#[test]
fn test_uid_fetch() {
match parse_response(b"* 4 FETCH (UID 71372 RFC822.HEADER {10275}\r\n") {
Err(nom::Err::Incomplete(nom::Needed::Size(size))) => {
assert_eq!(size, NonZeroUsize::new(10275).unwrap());
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_uid_fetch_extra_space() {
// DavMail inserts an extra space after RFC822.HEADER
match parse_response(b"* 4 FETCH (UID 71372 RFC822.HEADER {10275}\r\n") {
Err(nom::Err::Incomplete(nom::Needed::Size(size))) => {
assert_eq!(size, NonZeroUsize::new(10275).unwrap());
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_header_fields() {
const RESPONSE: &[u8] = b"* 1 FETCH (UID 1 BODY[HEADER.FIELDS (CHAT-VERSION)] {21}\r\nChat-Version: 1.0\r\n\r\n)\r\n";
match parse_response(RESPONSE) {
Ok((_, Response::Fetch(_, _))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_response_codes() {
match parse_response(b"* OK [ALERT] Alert!\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::Alert),
information: Some(Cow::Borrowed("Alert!")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [PARSE] Something\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::Parse),
information: Some(Cow::Borrowed("Something")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* OK [CAPABILITY IMAP4rev1 IDLE] Logged in\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::Capabilities(c)),
information: Some(Cow::Borrowed("Logged in")),
},
)) => {
assert_eq!(c.len(), 2);
assert_eq!(c[0], Capability::Imap4rev1);
assert_eq!(c[1], Capability::Atom(Cow::Borrowed("IDLE")));
}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* OK [CAPABILITY UIDPLUS IMAP4rev1 IDLE] Logged in\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::Capabilities(c)),
information: Some(Cow::Borrowed("Logged in")),
},
)) => {
assert_eq!(c.len(), 3);
assert_eq!(c[0], Capability::Atom(Cow::Borrowed("UIDPLUS")));
assert_eq!(c[1], Capability::Imap4rev1);
assert_eq!(c[2], Capability::Atom(Cow::Borrowed("IDLE")));
}
rsp => panic!("unexpected response {rsp:?}"),
}
// Missing IMAP4rev1
match parse_response(b"* OK [CAPABILITY UIDPLUS IDLE] Logged in\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: None,
information: Some(Cow::Borrowed("[CAPABILITY UIDPLUS IDLE] Logged in")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [BADCHARSET] error\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::BadCharset(None)),
information: Some(Cow::Borrowed("error")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [BADCHARSET (utf-8 latin1)] error\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::BadCharset(Some(v))),
information: Some(Cow::Borrowed("error")),
},
)) => {
assert_eq!(v.len(), 2);
assert_eq!(v[0], "utf-8");
assert_eq!(v[1], "latin1");
}
rsp => panic!("unexpected response {rsp:?}"),
}
match parse_response(b"* NO [BADCHARSET ()] error\r\n") {
Ok((
_,
Response::Data {
status: Status::No,
code: None,
information: Some(Cow::Borrowed("[BADCHARSET ()] error")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_incomplete_fetch() {
match parse_response(b"* 4644 FETCH (UID ") {
Err(nom::Err::Incomplete(_)) => {}
rsp => panic!("should be incomplete: {rsp:?}"),
}
}
#[test]
fn test_continuation() {
// regular RFC compliant
match parse_response(b"+ \r\n") {
Ok((
_,
Response::Continue {
code: None,
information: None,
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
// short version, sent by yandex
match parse_response(b"+\r\n") {
Ok((
_,
Response::Continue {
code: None,
information: None,
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_enabled() {
match parse_response(b"* ENABLED QRESYNC X-GOOD-IDEA\r\n") {
Ok((_, capabilities)) => assert_eq!(
capabilities,
Response::Capabilities(vec![
Capability::Atom(Cow::Borrowed("QRESYNC")),
Capability::Atom(Cow::Borrowed("X-GOOD-IDEA")),
])
),
rsp => panic!("Unexpected response: {rsp:?}"),
}
}
#[test]
fn test_flags() {
// Invalid response (FLAGS can't include \*) from Zoho Mail server.
//
// As a workaround, such response is parsed without error.
match parse_response(b"* FLAGS (\\Answered \\Flagged \\Deleted \\Seen \\Draft \\*)\r\n") {
Ok((_, capabilities)) => assert_eq!(
capabilities,
Response::MailboxData(MailboxDatum::Flags(vec![
Cow::Borrowed("\\Answered"),
Cow::Borrowed("\\Flagged"),
Cow::Borrowed("\\Deleted"),
Cow::Borrowed("\\Seen"),
Cow::Borrowed("\\Draft"),
Cow::Borrowed("\\*")
]))
),
rsp => panic!("Unexpected response: {rsp:?}"),
}
// Invalid response (FLAGS can't include ']') from some unknown providers.
//
// As a workaround, such response is parsed without error.
match parse_response(b"* FLAGS (OIB-Seen-[Gmail]/All)\r\n") {
Ok((_, capabilities)) => assert_eq!(
capabilities,
Response::MailboxData(MailboxDatum::Flags(vec![Cow::Borrowed(
"OIB-Seen-[Gmail]/All"
)]))
),
rsp => panic!("Unexpected response: {rsp:?}"),
}
}
#[test]
fn test_vanished() {
match parse_response(b"* VANISHED (EARLIER) 1,2,3:8\r\n") {
Ok((_, Response::Vanished { earlier, uids })) => {
assert!(earlier);
assert_eq!(uids.len(), 3);
let v = &uids[0];
assert_eq!(*v.start(), 1);
assert_eq!(*v.end(), 1);
let v = &uids[1];
assert_eq!(*v.start(), 2);
assert_eq!(*v.end(), 2);
let v = &uids[2];
assert_eq!(*v.start(), 3);
assert_eq!(*v.end(), 8);
}
rsp => panic!("Unexpected response: {rsp:?}"),
}
match parse_response(b"* VANISHED 1,2,3:8,10\r\n") {
Ok((_, Response::Vanished { earlier, uids })) => {
assert!(!earlier);
assert_eq!(uids.len(), 4);
}
rsp => panic!("Unexpected response: {rsp:?}"),
}
match parse_response(b"* VANISHED (EARLIER) 1\r\n") {
Ok((_, Response::Vanished { earlier, uids })) => {
assert!(earlier);
assert_eq!(uids.len(), 1);
assert_eq!(uids[0].clone().collect::<Vec<u32>>(), vec![1]);
}
rsp => panic!("Unexpected response: {rsp:?}"),
}
match parse_response(b"* VANISHED 1\r\n") {
Ok((_, Response::Vanished { earlier, uids })) => {
assert!(!earlier);
assert_eq!(uids.len(), 1);
}
rsp => panic!("Unexpected response: {rsp:?}"),
}
assert!(parse_response(b"* VANISHED \r\n").is_err());
assert!(parse_response(b"* VANISHED (EARLIER) \r\n").is_err());
}
#[test]
fn test_uidplus() {
match dbg!(parse_response(
b"* OK [APPENDUID 38505 3955] APPEND completed\r\n"
)) {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::AppendUid(38505, uid_set)),
information: Some(Cow::Borrowed("APPEND completed")),
},
)) if uid_set == [3955.into()] => {}
rsp => panic!("Unexpected response: {rsp:?}"),
}
match dbg!(parse_response(
b"* OK [COPYUID 38505 304,319:320 3956:3958] Done\r\n"
)) {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::CopyUid(38505, uid_set_src, uid_set_dst)),
information: Some(Cow::Borrowed("Done")),
},
)) if uid_set_src == [304.into(), (319..=320).into()]
&& uid_set_dst == [(3956..=3958).into()] => {}
rsp => panic!("Unexpected response: {rsp:?}"),
}
match dbg!(parse_response(
b"* NO [UIDNOTSTICKY] Non-persistent UIDs\r\n"
)) {
Ok((
_,
Response::Data {
status: Status::No,
code: Some(ResponseCode::UidNotSticky),
information: Some(Cow::Borrowed("Non-persistent UIDs")),
},
)) => {}
rsp => panic!("Unexpected response: {rsp:?}"),
}
}
#[test]
fn test_imap_body_structure() {
let test = b"\
* 1569 FETCH (\
BODYSTRUCTURE (\
(\
(\
(\
\"TEXT\" \"PLAIN\" \
(\"CHARSET\" \"ISO-8859-1\") NIL NIL \
\"QUOTED-PRINTABLE\" 833 30 NIL NIL NIL\
)\
(\
\"TEXT\" \"HTML\" \
(\"CHARSET\" \"ISO-8859-1\") NIL NIL \
\"QUOTED-PRINTABLE\" 3412 62 NIL \
(\"INLINE\" NIL) NIL\
) \
\"ALTERNATIVE\" (\"BOUNDARY\" \"2__=fgrths\") NIL NIL\
)\
(\
\"IMAGE\" \"GIF\" \
(\"NAME\" \"485039.gif\") \"<2__=lgkfjr>\" NIL \
\"BASE64\" 64 NIL (\"INLINE\" (\"FILENAME\" \"485039.gif\")) \
NIL\
) \
\"RELATED\" (\"BOUNDARY\" \"1__=fgrths\") NIL NIL\
)\
(\
\"APPLICATION\" \"PDF\" \
(\"NAME\" \"title.pdf\") \
\"<1__=lgkfjr>\" NIL \"BASE64\" 333980 NIL \
(\"ATTACHMENT\" (\"FILENAME\" \"title.pdf\")) NIL\
) \
\"MIXED\" (\"BOUNDARY\" \"0__=fgrths\") NIL NIL\
)\
)\r\n";
let (_, resp) = parse_response(test).unwrap();
match resp {
Response::Fetch(_, f) => {
let bodystructure = f
.iter()
.flat_map(|f| match f {
AttributeValue::BodyStructure(e) => Some(e),
_ => None,
})
.next()
.unwrap();
let parser = BodyStructParser::new(bodystructure);
let element = parser.search(|b: &BodyStructure| {
matches!(b, BodyStructure::Basic { ref common, .. } if common.ty.ty == "APPLICATION")
});
assert_eq!(element, Some(vec![2]));
}
_ => panic!("invalid FETCH command test"),
};
}
#[test]
fn test_parsing_of_quota_capability_in_login_response() {
match parse_response(b"* OK [CAPABILITY IMAP4rev1 IDLE QUOTA] Logged in\r\n") {
Ok((
_,
Response::Data {
status: Status::Ok,
code: Some(ResponseCode::Capabilities(c)),
information: Some(Cow::Borrowed("Logged in")),
},
)) => {
assert_eq!(c.len(), 3);
assert_eq!(c[0], Capability::Imap4rev1);
assert_eq!(c[1], Capability::Atom(Cow::Borrowed("IDLE")));
assert_eq!(c[2], Capability::Atom(Cow::Borrowed("QUOTA")));
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_parsing_of_bye_response() {
match parse_response(b"* BYE\r\n") {
Ok((
_,
Response::Data {
status: Status::Bye,
code: None,
information: None,
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
};
match parse_response(b"* BYE Autologout; idle for too long\r\n") {
Ok((
_,
Response::Data {
status: Status::Bye,
code: None,
information: Some(Cow::Borrowed("Autologout; idle for too long")),
},
)) => {}
rsp => panic!("unexpected response {rsp:?}"),
};
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc4551.rs | imap-proto/src/parser/rfc4551.rs | //!
//! https://tools.ietf.org/html/rfc4551
//!
//! IMAP Extension for Conditional STORE Operation
//! or Quick Flag Changes Resynchronization
//!
use nom::{bytes::streaming::tag_no_case, sequence::tuple, IResult};
use crate::{
parser::core::{number_64, paren_delimited},
types::*,
};
// The highest mod-sequence value of all messages in the mailbox.
// Extends resp-test-code defined in rfc3501.
// [RFC4551 - 3.6 HIGHESTMODSEQ Status Data Items](https://tools.ietf.org/html/rfc4551#section-3.6)
// [RFC4551 - 4. Formal Syntax - resp-text-code](https://tools.ietf.org/html/rfc4551#section-4)
pub(crate) fn resp_text_code_highest_mod_seq(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
let (i, (_, num)) = tuple((tag_no_case("HIGHESTMODSEQ "), number_64))(i)?;
Ok((i, ResponseCode::HighestModSeq(num)))
}
// Extends status-att/status-att-list defined in rfc3501
// [RFC4551 - 3.6 - HIGHESTMODSEQ Status Data Items](https://tools.ietf.org/html/rfc4551#section-3.6)
// [RFC4551 - 4. Formal Syntax - status-att-val](https://tools.ietf.org/html/rfc4551#section-4)
pub(crate) fn status_att_val_highest_mod_seq(i: &[u8]) -> IResult<&[u8], StatusAttribute> {
let (i, (_, num)) = tuple((tag_no_case("HIGHESTMODSEQ "), number_64))(i)?;
Ok((i, StatusAttribute::HighestModSeq(num)))
}
// [RFC4551 - 4. Formal Syntax - fetch-mod-resp](https://tools.ietf.org/html/rfc4551#section-4)
pub(crate) fn msg_att_mod_seq(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
let (i, (_, num)) = tuple((tag_no_case("MODSEQ "), paren_delimited(number_64)))(i)?;
Ok((i, AttributeValue::ModSeq(num)))
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc5256.rs | imap-proto/src/parser/rfc5256.rs | //!
//! https://tools.ietf.org/html/rfc5256
//!
//! SORT extension
//!
use nom::{
bytes::streaming::{tag, tag_no_case},
combinator::{map, opt},
multi::many0,
sequence::{preceded, terminated},
IResult,
};
use crate::{parser::core::number, types::MailboxDatum};
/// BASE.7.2.SORT. SORT Response
///
/// Data: zero or more numbers
///
/// The SORT response occurs as a result of a SORT or UID SORT
/// command. The number(s) refer to those messages that match the
/// search criteria. For SORT, these are message sequence numbers;
/// for UID SORT, these are unique identifiers. Each number is
/// delimited by a space.
///
/// Example:
///
/// ```ignore
/// S: * SORT 2 3 6
/// ```
///
/// [RFC5256 - 4 Additional Responses](https://tools.ietf.org/html/rfc5256#section-4)
pub(crate) fn mailbox_data_sort(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
// Technically, trailing whitespace is not allowed for the SEARCH command,
// but multiple email servers in the wild seem to have it anyway (see #34, #108).
// Since the SORT command extends the SEARCH command, the trailing whitespace
// is exceptionnaly allowed here (as for the SEARCH command).
terminated(
preceded(tag_no_case(b"SORT"), many0(preceded(tag(" "), number))),
opt(tag(" ")),
),
MailboxDatum::Sort,
)(i)
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/core.rs | imap-proto/src/parser/core.rs | use nom::{
branch::alt,
bytes::streaming::{escaped, tag, tag_no_case, take, take_while, take_while1},
character::streaming::{char, digit1, one_of},
combinator::{map, map_res, opt},
multi::{separated_list0, separated_list1},
sequence::{delimited, preceded, tuple},
IResult,
};
use std::str::{from_utf8, FromStr};
// ----- number -----
// number = 1*DIGIT
// ; Unsigned 32-bit integer
// ; (0 <= n < 4,294,967,296)
pub fn number(i: &[u8]) -> IResult<&[u8], u32> {
let (i, bytes) = digit1(i)?;
match from_utf8(bytes).ok().and_then(|s| u32::from_str(s).ok()) {
Some(v) => Ok((i, v)),
None => Err(nom::Err::Error(nom::error::make_error(
i,
nom::error::ErrorKind::MapRes,
))),
}
}
// same as `number` but 64-bit
pub fn number_64(i: &[u8]) -> IResult<&[u8], u64> {
let (i, bytes) = digit1(i)?;
match from_utf8(bytes).ok().and_then(|s| u64::from_str(s).ok()) {
Some(v) => Ok((i, v)),
None => Err(nom::Err::Error(nom::error::make_error(
i,
nom::error::ErrorKind::MapRes,
))),
}
}
// seq-range = seq-number ":" seq-number
// ; two seq-number values and all values between
// ; these two regardless of order.
// ; seq-number is a nz-number
pub fn sequence_range(i: &[u8]) -> IResult<&[u8], std::ops::RangeInclusive<u32>> {
map(tuple((number, tag(":"), number)), |(s, _, e)| s..=e)(i)
}
// sequence-set = (seq-number / seq-range) *("," sequence-set)
// ; set of seq-number values, regardless of order.
// ; Servers MAY coalesce overlaps and/or execute the
// ; sequence in any order.
pub fn sequence_set(i: &[u8]) -> IResult<&[u8], Vec<std::ops::RangeInclusive<u32>>> {
separated_list1(tag(","), alt((sequence_range, map(number, |n| n..=n))))(i)
}
// ----- string -----
// string = quoted / literal
pub fn string(i: &[u8]) -> IResult<&[u8], &[u8]> {
alt((quoted, literal))(i)
}
// string bytes as utf8
pub fn string_utf8(i: &[u8]) -> IResult<&[u8], &str> {
map_res(string, from_utf8)(i)
}
// quoted = DQUOTE *QUOTED-CHAR DQUOTE
pub fn quoted(i: &[u8]) -> IResult<&[u8], &[u8]> {
delimited(
char('"'),
escaped(
take_while1(|byte| is_text_char(byte) && !is_quoted_specials(byte)),
'\\',
one_of("\\\""),
),
char('"'),
)(i)
}
// quoted bytes as utf8
pub fn quoted_utf8(i: &[u8]) -> IResult<&[u8], &str> {
map_res(quoted, from_utf8)(i)
}
// quoted-specials = DQUOTE / "\"
pub fn is_quoted_specials(c: u8) -> bool {
c == b'"' || c == b'\\'
}
/// literal = "{" number "}" CRLF *CHAR8
/// ; Number represents the number of CHAR8s
pub fn literal(input: &[u8]) -> IResult<&[u8], &[u8]> {
let mut parser = tuple((tag(b"{"), number, tag(b"}"), tag("\r\n")));
let (remaining, (_, count, _, _)) = parser(input)?;
let (remaining, data) = take(count)(remaining)?;
Ok((remaining, data))
}
// ----- astring ----- atom (roughly) or string
// astring = 1*ASTRING-CHAR / string
pub fn astring(i: &[u8]) -> IResult<&[u8], &[u8]> {
alt((take_while1(is_astring_char), string))(i)
}
// astring bytes as utf8
pub fn astring_utf8(i: &[u8]) -> IResult<&[u8], &str> {
map_res(astring, from_utf8)(i)
}
// ASTRING-CHAR = ATOM-CHAR / resp-specials
pub fn is_astring_char(c: u8) -> bool {
is_atom_char(c) || is_resp_specials(c)
}
// ATOM-CHAR = <any CHAR except atom-specials>
pub fn is_atom_char(c: u8) -> bool {
is_char(c) && !is_atom_specials(c)
}
// atom-specials = "(" / ")" / "{" / SP / CTL / list-wildcards / quoted-specials / resp-specials
pub fn is_atom_specials(c: u8) -> bool {
c == b'('
|| c == b')'
|| c == b'{'
|| c == b' '
|| c < 32
|| is_list_wildcards(c)
|| is_quoted_specials(c)
|| is_resp_specials(c)
}
// resp-specials = "]"
pub fn is_resp_specials(c: u8) -> bool {
c == b']'
}
// atom = 1*ATOM-CHAR
pub fn atom(i: &[u8]) -> IResult<&[u8], &str> {
map_res(take_while1(is_atom_char), from_utf8)(i)
}
// ----- nstring ----- nil or string
// nstring = string / nil
pub fn nstring(i: &[u8]) -> IResult<&[u8], Option<&[u8]>> {
alt((map(nil, |_| None), map(string, Some)))(i)
}
// nstring bytes as utf8
pub fn nstring_utf8(i: &[u8]) -> IResult<&[u8], Option<&str>> {
alt((map(nil, |_| None), map(string_utf8, Some)))(i)
}
// nil = "NIL"
pub fn nil(i: &[u8]) -> IResult<&[u8], &[u8]> {
tag_no_case("NIL")(i)
}
// ----- text -----
// text = 1*TEXT-CHAR
pub fn text(i: &[u8]) -> IResult<&[u8], &str> {
map_res(take_while(is_text_char), from_utf8)(i)
}
// TEXT-CHAR = <any CHAR except CR and LF>
pub fn is_text_char(c: u8) -> bool {
is_char(c) && c != b'\r' && c != b'\n'
}
// CHAR = %x01-7F
// ; any 7-bit US-ASCII character,
// ; excluding NUL
// From RFC5234
pub fn is_char(c: u8) -> bool {
matches!(c, 0x01..=0x7F)
}
// ----- others -----
// list-wildcards = "%" / "*"
pub fn is_list_wildcards(c: u8) -> bool {
c == b'%' || c == b'*'
}
pub fn paren_delimited<'a, F, O, E>(f: F) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], O, E>
where
F: FnMut(&'a [u8]) -> IResult<&'a [u8], O, E>,
E: nom::error::ParseError<&'a [u8]>,
{
delimited(char('('), f, char(')'))
}
pub fn parenthesized_nonempty_list<'a, F, O, E>(
f: F,
) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], Vec<O>, E>
where
F: FnMut(&'a [u8]) -> IResult<&'a [u8], O, E>,
E: nom::error::ParseError<&'a [u8]>,
{
delimited(char('('), separated_list1(char(' '), f), char(')'))
}
pub fn parenthesized_list<'a, F, O, E>(f: F) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], Vec<O>, E>
where
F: FnMut(&'a [u8]) -> IResult<&'a [u8], O, E>,
E: nom::error::ParseError<&'a [u8]>,
{
delimited(
char('('),
separated_list0(char(' '), f),
preceded(
opt(char(' ')), // Surgemail sometimes sends a space before the closing bracket.
char(')'),
),
)
}
pub fn opt_opt<'a, F, O, E>(mut f: F) -> impl FnMut(&'a [u8]) -> IResult<&'a [u8], Option<O>, E>
where
F: FnMut(&'a [u8]) -> IResult<&'a [u8], Option<O>, E>,
{
move |i: &[u8]| match f(i) {
Ok((i, o)) => Ok((i, o)),
Err(nom::Err::Error(_)) => Ok((i, None)),
Err(e) => Err(e),
}
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
#[test]
fn test_quoted() {
let (rem, val) = quoted(br#""Hello"???"#).unwrap();
assert_eq!(rem, b"???");
assert_eq!(val, b"Hello");
// Allowed escapes...
assert_eq!(
quoted(br#""Hello \" "???"#),
Ok((&b"???"[..], &br#"Hello \" "#[..]))
);
assert_eq!(
quoted(br#""Hello \\ "???"#),
Ok((&b"???"[..], &br#"Hello \\ "#[..]))
);
// Not allowed escapes...
assert!(quoted(br#""Hello \a "???"#).is_err());
assert!(quoted(br#""Hello \z "???"#).is_err());
assert!(quoted(br#""Hello \? "???"#).is_err());
let (rem, val) = quoted(br#""Hello \"World\""???"#).unwrap();
assert_eq!(rem, br#"???"#);
// Should it be this (Hello \"World\") ...
assert_eq!(val, br#"Hello \"World\""#);
// ... or this (Hello "World")?
//assert_eq!(val, br#"Hello "World""#); // fails
// Test Incomplete
assert_matches!(quoted(br#""#), Err(nom::Err::Incomplete(_)));
assert_matches!(quoted(br#""\"#), Err(nom::Err::Incomplete(_)));
assert_matches!(quoted(br#""Hello "#), Err(nom::Err::Incomplete(_)));
// Test Error
assert_matches!(quoted(br"\"), Err(nom::Err::Error(_)));
}
#[test]
fn test_string_literal() {
match string(b"{3}\r\nXYZ") {
Ok((_, value)) => {
assert_eq!(value, b"XYZ");
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_string_literal_containing_null() {
match string(b"{5}\r\nX\0Y\0Z") {
Ok((_, value)) => {
assert_eq!(value, b"X\0Y\0Z");
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_astring() {
match astring(b"text ") {
Ok((_, value)) => {
assert_eq!(value, b"text");
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_sequence_range() {
match sequence_range(b"23:28 ") {
Ok((_, value)) => {
assert_eq!(*value.start(), 23);
assert_eq!(*value.end(), 28);
assert_eq!(value.collect::<Vec<u32>>(), vec![23, 24, 25, 26, 27, 28]);
}
rsp => panic!("Unexpected response {rsp:?}"),
}
}
#[test]
fn test_sequence_set() {
match sequence_set(b"1,2:8,10,15:30 ") {
Ok((_, value)) => {
assert_eq!(value.len(), 4);
let v = &value[0];
assert_eq!(*v.start(), 1);
assert_eq!(*v.end(), 1);
let v = &value[1];
assert_eq!(*v.start(), 2);
assert_eq!(*v.end(), 8);
let v = &value[2];
assert_eq!(*v.start(), 10);
assert_eq!(*v.end(), 10);
let v = &value[3];
assert_eq!(*v.start(), 15);
assert_eq!(*v.end(), 30);
}
rsp => panic!("Unexpected response {rsp:?}"),
}
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/gmail.rs | imap-proto/src/parser/gmail.rs | use std::borrow::Cow;
use nom::branch::alt;
use nom::bytes::streaming::tag_no_case;
use nom::combinator::map;
use nom::sequence::preceded;
use nom::IResult;
use crate::{AttributeValue, MailboxDatum};
use super::core::{number_64, parenthesized_list, quoted_utf8};
use super::rfc3501::flag;
pub(crate) fn gmail_label_list(i: &[u8]) -> IResult<&[u8], Vec<Cow<'_, str>>> {
preceded(
tag_no_case("X-GM-LABELS "),
parenthesized_list(map(alt((flag, quoted_utf8)), Cow::Borrowed)),
)(i)
}
pub(crate) fn msg_att_gmail_labels(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(gmail_label_list, AttributeValue::GmailLabels)(i)
}
pub(crate) fn mailbox_data_gmail_labels(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(gmail_label_list, MailboxDatum::GmailLabels)(i)
}
pub(crate) fn gmail_msgid(i: &[u8]) -> IResult<&[u8], u64> {
preceded(tag_no_case("X-GM-MSGID "), number_64)(i)
}
pub(crate) fn msg_att_gmail_msgid(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(gmail_msgid, AttributeValue::GmailMsgId)(i)
}
pub(crate) fn mailbox_data_gmail_msgid(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(gmail_msgid, MailboxDatum::GmailMsgId)(i)
}
pub(crate) fn gmail_thrid(i: &[u8]) -> IResult<&[u8], u64> {
preceded(tag_no_case("X-GM-THRID "), number_64)(i)
}
pub(crate) fn msg_att_gmail_thrid(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(gmail_thrid, AttributeValue::GmailThrId)(i)
}
pub(crate) fn mailbox_data_gmail_thrid(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(gmail_thrid, MailboxDatum::GmailThrId)(i)
}
#[cfg(test)]
mod tests {
use crate::types::*;
#[test]
fn test_gmail_labels() {
let env = br#"X-GM-LABELS (\Inbox \Sent Important "Muy Importante") "#;
match super::msg_att_gmail_labels(env) {
Ok((_, AttributeValue::GmailLabels(labels))) => {
println!("{labels:?}");
assert_eq!(
["\\Inbox", "\\Sent", "Important", "Muy Importante"].to_vec(),
labels
);
}
rsp => {
let e = rsp.unwrap_err();
if let nom::Err::Error(i) = &e {
println!("{:?}", std::str::from_utf8(i.input));
}
panic!("unexpected response {e:?}");
}
}
}
#[test]
fn test_gmail_msgid() {
let env = br#"X-GM-MSGID 1278455344230334865 "#;
match super::msg_att_gmail_msgid(env) {
Ok((_, AttributeValue::GmailMsgId(msgid))) => {
println!("{msgid:?}");
assert_eq!(1278455344230334865u64, msgid);
}
rsp => {
let e = rsp.unwrap_err();
if let nom::Err::Error(i) = &e {
println!("{:?}", std::str::from_utf8(i.input));
}
panic!("unexpected response {e:?}");
}
}
}
#[test]
fn test_gmail_thrid() {
let env = br#"X-GM-THRID 1278455344230334865 "#;
match super::msg_att_gmail_thrid(env) {
Ok((_, AttributeValue::GmailThrId(thrid))) => {
println!("{thrid:?}");
assert_eq!(1278455344230334865, thrid);
}
rsp => {
let e = rsp.unwrap_err();
if let nom::Err::Error(i) = &e {
println!("{:?}", std::str::from_utf8(i.input));
}
panic!("unexpected response {e:?}");
}
}
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/mod.rs | imap-proto/src/parser/mod.rs | use crate::types::Response;
use nom::{branch::alt, IResult};
pub mod core;
pub mod bodystructure;
pub mod gmail;
pub mod rfc2087;
pub mod rfc2971;
pub mod rfc3501;
pub mod rfc4314;
pub mod rfc4315;
pub mod rfc4551;
pub mod rfc5161;
pub mod rfc5256;
pub mod rfc5464;
pub mod rfc7162;
#[cfg(test)]
mod tests;
pub fn parse_response(msg: &[u8]) -> ParseResult<'_> {
alt((
rfc3501::continue_req,
rfc3501::response_data,
rfc3501::response_tagged,
))(msg)
}
pub type ParseResult<'a> = IResult<&'a [u8], Response<'a>>;
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.