repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/number.rs | src/builtin_parser/number.rs | use std::fmt::Display;
use std::ops::*;
use bevy::reflect::Reflect;
use logos::Span;
use super::{EvalError, SpanExtension, Spanned};
/// An enum that contains any type of number.
///
/// The [`Integer`](Number::Integer) and [`Float`](Number::Float) types
/// are generic types that then get downcasted when they first interact
/// with a concrete type. (i.e. calling a function, etc)
#[derive(Debug, Clone, Copy)]
#[allow(missing_docs, non_camel_case_types)]
pub enum Number {
/// Generic integer that can get downcasted.
Integer(i128),
/// Generic float that can get downcasted to a [`f64`] and [`f32`]
Float(f64),
u8(u8),
u16(u16),
u32(u32),
u64(u64),
usize(usize),
i8(i8),
i16(i16),
i32(i32),
i64(i64),
isize(isize),
f32(f32),
f64(f64),
}
impl Number {
/// Converts this into a [`Box<dyn Reflect>`](Reflect).
pub fn reflect(self, span: Span, ty: &str) -> Result<Box<dyn Reflect>, EvalError> {
match self {
Number::u8(number) => Ok(Box::new(number)),
Number::u16(number) => Ok(Box::new(number)),
Number::u32(number) => Ok(Box::new(number)),
Number::u64(number) => Ok(Box::new(number)),
Number::usize(number) => Ok(Box::new(number)),
Number::i8(number) => Ok(Box::new(number)),
Number::i16(number) => Ok(Box::new(number)),
Number::i32(number) => Ok(Box::new(number)),
Number::i64(number) => Ok(Box::new(number)),
Number::isize(number) => Ok(Box::new(number)),
Number::f32(number) => Ok(Box::new(number)),
Number::f64(number) => Ok(Box::new(number)),
Number::Integer(number) => match ty {
"u8" => Ok(Box::new(number as u8)),
"u16" => Ok(Box::new(number as u16)),
"u32" => Ok(Box::new(number as u32)),
"u64" => Ok(Box::new(number as u64)),
"usize" => Ok(Box::new(number as usize)),
"i8" => Ok(Box::new(number as i8)),
"i16" => Ok(Box::new(number as i16)),
"i32" => Ok(Box::new(number as i32)),
"i64" => Ok(Box::new(number as i64)),
"isize" => Ok(Box::new(number as isize)),
ty => Err(EvalError::IncompatibleReflectTypes {
expected: "integer".to_string(),
actual: ty.to_string(),
span,
}),
},
Number::Float(number) => match ty {
"f32" => Ok(Box::new(number as f32)),
"f64" => Ok(Box::new(number)),
ty => Err(EvalError::IncompatibleReflectTypes {
expected: "float".to_string(),
actual: ty.to_string(),
span,
}),
},
}
}
/// Returns the kind of [`Number`] as a [string slice](str).
/// You may want to use [`natural_kind`](Self::natural_kind)
/// instead for more natural sounding error messages
pub const fn kind(&self) -> &'static str {
match self {
Number::Float(_) => "float",
Number::Integer(_) => "integer",
Number::u8(_) => "u8",
Number::u16(_) => "u16",
Number::u32(_) => "u32",
Number::u64(_) => "u64",
Number::usize(_) => "usize",
Number::i8(_) => "i8",
Number::i16(_) => "i16",
Number::i32(_) => "i32",
Number::i64(_) => "i64",
Number::isize(_) => "usize",
Number::f32(_) => "f32",
Number::f64(_) => "f64",
}
}
/// Returns the kind of [`Number`] as a [string slice](str) with an `a` or `an` prepended to it.
/// Used for more natural sounding error messages.
pub const fn natural_kind(&self) -> &'static str {
match self {
Number::Float(_) => "a float",
Number::Integer(_) => "an integer",
Number::u8(_) => "a u8",
Number::u16(_) => "a u16",
Number::u32(_) => "a u32",
Number::u64(_) => "a u64",
Number::usize(_) => "a usize",
Number::i8(_) => "a i8",
Number::i16(_) => "a i16",
Number::i32(_) => "a i32",
Number::i64(_) => "a i64",
Number::isize(_) => "a usize",
Number::f32(_) => "a f32",
Number::f64(_) => "a f64",
}
}
}
impl Display for Number {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Number::Float(number) => write!(f, "{number} (float)"),
Number::Integer(number) => write!(f, "{number} (integer)"),
Number::u8(number) => write!(f, "{number} (u8)"),
Number::u16(number) => write!(f, "{number} (u16)"),
Number::u32(number) => write!(f, "{number} (u32)"),
Number::u64(number) => write!(f, "{number} (u64)"),
Number::usize(number) => write!(f, "{number} (usize)"),
Number::i8(number) => write!(f, "{number} (i8)"),
Number::i16(number) => write!(f, "{number} (i16)"),
Number::i32(number) => write!(f, "{number} (i32)"),
Number::i64(number) => write!(f, "{number} (i64)"),
Number::isize(number) => write!(f, "{number} (isize)"),
Number::f32(number) => write!(f, "{number} (f32)"),
Number::f64(number) => write!(f, "{number} (f64)"),
}
}
}
macro_rules! impl_op {
($fn:ident, $op:tt, $checked:ident)=> {
impl Number {
#[doc = concat!("Performs the `", stringify!($op), "` calculation.")]
pub fn $fn(left: Number, right: Number, span: Span) -> Result<Number, EvalError> {
let op_err = || EvalError::InvalidOperation {
left,
right,
operation: stringify!($fn),
span: span.clone(),
};
match (left, right) {
(Number::u8(left), Number::u8(right)) => Ok(Number::u8(left.$checked(right).ok_or_else(op_err)?)),
(Number::u16(left), Number::u16(right)) => Ok(Number::u16(left.$checked(right).ok_or_else(op_err)?)),
(Number::u32(left), Number::u32(right)) => Ok(Number::u32(left.$checked(right).ok_or_else(op_err)?)),
(Number::u64(left), Number::u64(right)) => Ok(Number::u64(left.$checked(right).ok_or_else(op_err)?)),
(Number::usize(left), Number::usize(right)) => Ok(Number::usize(left.$checked(right).ok_or_else(op_err)?)),
(Number::i8(left), Number::i8(right)) => Ok(Number::i8(left.$checked(right).ok_or_else(op_err)?)),
(Number::i16(left), Number::i16(right)) => Ok(Number::i16(left.$checked(right).ok_or_else(op_err)?)),
(Number::i32(left), Number::i32(right)) => Ok(Number::i32(left.$checked(right).ok_or_else(op_err)?)),
(Number::i64(left), Number::i64(right)) => Ok(Number::i64(left.$checked(right).ok_or_else(op_err)?)),
(Number::isize(left), Number::isize(right)) => Ok(Number::isize(left.$checked(right).ok_or_else(op_err)?)),
(Number::f32(left), Number::f32(right)) => Ok(Number::f32(left $op right)),
(Number::f64(left), Number::f64(right)) => Ok(Number::f64(left $op right)),
(Number::Integer(left), Number::u8(right)) => Ok(Number::u8((left as u8).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::u16(right)) => Ok(Number::u16((left as u16).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::u32(right)) => Ok(Number::u32((left as u32).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::u64(right)) => Ok(Number::u64((left as u64).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::usize(right)) => Ok(Number::usize((left as usize).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::i8(right)) => Ok(Number::i8((left as i8).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::i16(right)) => Ok(Number::i16((left as i16).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::i32(right)) => Ok(Number::i32((left as i32).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::i64(right)) => Ok(Number::i64((left as i64).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::isize(right)) => Ok(Number::isize((left as isize).$checked(right).ok_or_else(op_err)?)),
(Number::Integer(left), Number::Integer(right)) => Ok(Number::Integer(left.$checked(right).ok_or_else(op_err)?)),
(Number::u8(left), Number::Integer(right)) => Ok(Number::u8(left.$checked(right as u8).ok_or_else(op_err)?)),
(Number::u16(left), Number::Integer(right)) => Ok(Number::u16(left.$checked(right as u16).ok_or_else(op_err)?)),
(Number::u32(left), Number::Integer(right)) => Ok(Number::u32(left.$checked(right as u32).ok_or_else(op_err)?)),
(Number::u64(left), Number::Integer(right)) => Ok(Number::u64(left.$checked(right as u64).ok_or_else(op_err)?)),
(Number::usize(left), Number::Integer(right)) => Ok(Number::usize(left.$checked(right as usize).ok_or_else(op_err)?)),
(Number::i8(left), Number::Integer(right)) => Ok(Number::i8(left.$checked(right as i8).ok_or_else(op_err)?)),
(Number::i16(left), Number::Integer(right)) => Ok(Number::i16(left.$checked(right as i16).ok_or_else(op_err)?)),
(Number::i32(left), Number::Integer(right)) => Ok(Number::i32(left.$checked(right as i32).ok_or_else(op_err)?)),
(Number::i64(left), Number::Integer(right)) => Ok(Number::i64(left.$checked(right as i64).ok_or_else(op_err)?)),
(Number::isize(left), Number::Integer(right)) => Ok(Number::isize(left.$checked(right as isize).ok_or_else(op_err)?)),
(Number::Float(left), Number::f32(right)) => Ok(Number::f32(left as f32 $op right)),
(Number::Float(left), Number::f64(right)) => Ok(Number::f64(left as f64 $op right)),
(Number::Float(left), Number::Float(right)) => Ok(Number::Float(left $op right)),
(Number::f32(left), Number::Float(right)) => Ok(Number::f32(left $op right as f32)),
(Number::f64(left), Number::Float(right)) => Ok(Number::f64(left $op right as f64)),
_ => Err(EvalError::IncompatibleNumberTypes {
left: left.natural_kind(),
right: right.natural_kind(),
span
})
}
}
}
};
}
impl_op!(add, +, checked_add);
impl_op!(sub, -, checked_sub);
impl_op!(mul, *, checked_mul);
impl_op!(div, /, checked_div);
impl_op!(rem, %, checked_rem);
macro_rules! impl_op_spanned {
($trait:ident, $method:ident) => {
impl $trait<Self> for Spanned<Number> {
type Output = Result<Number, EvalError>;
fn $method(self, rhs: Self) -> Self::Output {
let span = self.span.join(rhs.span);
Number::$method(self.value, rhs.value, span)
}
}
};
}
impl_op_spanned!(Add, add);
impl_op_spanned!(Sub, sub);
impl_op_spanned!(Mul, mul);
impl_op_spanned!(Rem, rem);
impl Number {
/// Performs the unary `-` operation.
pub fn neg(self, span: Span) -> Result<Number, EvalError> {
match self {
Number::u8(_) => Err(EvalError::CannotNegateUnsignedInteger(Spanned {
span,
value: self,
})),
Number::u16(_) => Err(EvalError::CannotNegateUnsignedInteger(Spanned {
span,
value: self,
})),
Number::u32(_) => Err(EvalError::CannotNegateUnsignedInteger(Spanned {
span,
value: self,
})),
Number::u64(_) => Err(EvalError::CannotNegateUnsignedInteger(Spanned {
span,
value: self,
})),
Number::usize(_) => Err(EvalError::CannotNegateUnsignedInteger(Spanned {
span,
value: self,
})),
Number::i8(number) => Ok(Number::i8(-number)),
Number::i16(number) => Ok(Number::i16(-number)),
Number::i32(number) => Ok(Number::i32(-number)),
Number::i64(number) => Ok(Number::i64(-number)),
Number::isize(number) => Ok(Number::isize(-number)),
Number::f32(number) => Ok(Number::f32(-number)),
Number::f64(number) => Ok(Number::f64(-number)),
Number::Float(number) => Ok(Number::Float(-number)),
Number::Integer(number) => Ok(Number::Integer(-number)),
}
}
}
macro_rules! from_primitive {
($primitive:ident) => {
impl From<$primitive> for Number {
fn from(value: $primitive) -> Self {
Number::$primitive(value)
}
}
};
}
from_primitive!(u8);
from_primitive!(u16);
from_primitive!(u32);
from_primitive!(u64);
from_primitive!(i8);
from_primitive!(i16);
from_primitive!(i32);
from_primitive!(i64);
from_primitive!(f32);
from_primitive!(f64);
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner.rs | src/builtin_parser/runner.rs | //! Executes the abstract syntax tree.
use environment::Environment;
use std::collections::HashMap;
use bevy::prelude::*;
use bevy::reflect::{
DynamicEnum, DynamicTuple, ReflectMut, TypeInfo, TypeRegistration, VariantInfo,
};
use crate::ui::COMMAND_RESULT_NAME;
use self::error::EvalError;
use self::member::{eval_member_expression, eval_path, Path};
use self::reflection::{object_to_dynamic_struct, CreateRegistration, IntoResource};
use self::unique_rc::UniqueRc;
use super::parser::{Ast, Expression, Operator};
use super::{Number, SpanExtension, Spanned};
pub(super) mod environment;
pub(super) mod error;
pub(super) mod member;
pub(super) mod reflection;
pub(super) mod stdlib;
pub(super) mod unique_rc;
pub(super) mod value;
pub use value::Value;
/// Temporary macro that prevents panicking by replacing the [`todo!`] panic with an error message.
macro_rules! todo_error {
() => {
Err(EvalError::Custom {
text: concat!("todo error invoked at ", file!(), ":", line!(), ":", column!()).into(),
span: 0..0
})?
};
($($arg:tt)+) => {
Err(EvalError::Custom {
text: format!(concat!("todo error invoked at ", file!(), ":", line!(), ":", column!(), " : {}"), format_args!($($arg)+)).into(),
span: 0..0
})?
};
}
// This makes `todo_error` accessible to the runners submodules
use todo_error;
/// Container for every value needed by evaluation functions.
pub struct EvalParams<'world, 'env, 'reg> {
world: &'world mut World,
environment: &'env mut Environment,
registrations: &'reg [&'reg TypeRegistration],
}
#[derive(Debug)]
pub enum ExecutionError {
NoEnvironment,
NoTypeRegistry,
Eval(EvalError),
}
impl std::fmt::Display for ExecutionError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
match self {
Self::NoEnvironment => write!(
f,
"Environment resource doesn't exist, not executing command."
),
Self::NoTypeRegistry => write!(
f,
"The AppTypeRegistry doesn't exist, not executing command. "
),
Self::Eval(run_error) => <EvalError as std::fmt::Display>::fmt(run_error, f),
}
}
}
impl std::error::Error for ExecutionError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
ExecutionError::Eval(eval) => Some(eval),
_ => None,
}
}
}
impl From<EvalError> for ExecutionError {
fn from(value: EvalError) -> Self {
Self::Eval(value)
}
}
pub fn run(ast: Ast, world: &mut World) -> Result<(), ExecutionError> {
// Temporarily remove the [`Environment`] resource to gain
// mutability without needing a mutable reference.
let mut environment = world
.remove_non_send_resource::<Environment>()
.ok_or(ExecutionError::NoEnvironment)?;
// Same thing here (this time we are doing it because we are passing a `&mut World` to `eval_expression`)
let Some(registry) = world.remove_resource::<AppTypeRegistry>() else {
// Make sure to re-insert the resource on failure
world.insert_non_send_resource(environment);
return Err(ExecutionError::NoTypeRegistry);
};
let result = (|| {
let registry_read = registry.read();
let registrations: Vec<_> = registry_read
.iter()
.filter(|registration| {
world
.components()
.get_resource_id(registration.type_id())
.is_some()
})
.collect();
for mut statement in ast {
// Automatically borrow variables
statement.value = match statement.value {
Expression::Variable(variable) => Expression::Borrow(Box::new(Spanned {
span: statement.span.clone(),
value: Expression::Variable(variable),
})),
expr => expr,
};
let span = statement.span.clone();
let value = eval_expression(
statement,
EvalParams {
world,
environment: &mut environment,
registrations: ®istrations,
},
)?;
match value {
Value::None => {}
value => {
let value = value.try_format(span, world, ®istrations)?;
info!(name: COMMAND_RESULT_NAME, "{}{value}", crate::ui::COMMAND_RESULT_PREFIX);
}
}
}
Ok(())
})();
// Add back the resources
world.insert_resource(registry);
world.insert_non_send_resource(environment);
result
}
fn eval_expression(
expr: Spanned<Expression>,
EvalParams {
world,
environment,
registrations,
}: EvalParams,
) -> Result<Value, EvalError> {
match expr.value {
Expression::VarAssign {
name,
value: value_expr,
} => match eval_path(
*name,
EvalParams {
world,
environment,
registrations,
},
)?
.value
{
Path::Variable(variable) => {
let value = eval_expression(
*value_expr,
EvalParams {
world,
environment,
registrations,
},
)?;
match variable.upgrade() {
Some(strong) => *strong.borrow_mut() = value,
None => todo_error!("cannot "),
}
Ok(Value::Reference(variable))
}
Path::NewVariable(variable) => {
let value = eval_expression(
*value_expr,
EvalParams {
world,
environment,
registrations,
},
)?;
let rc = UniqueRc::new(value);
let weak = rc.borrow();
environment.set(variable, rc);
Ok(Value::Reference(weak))
}
Path::Resource(resource) => {
let registration = registrations.create_registration(resource.id);
let mut dyn_reflect = resource.mut_dyn_reflect(world, registration);
let reflect = dyn_reflect
.reflect_path_mut(resource.path.as_str())
.unwrap();
match reflect.reflect_mut() {
ReflectMut::Enum(dyn_enum) => {
let TypeInfo::Enum(enum_info) = registration.type_info() else {
unreachable!()
};
let Spanned { span, value } = *value_expr;
match value {
Expression::Variable(name) => {
let variant_info = match enum_info.variant(&name) {
Some(variant_info) => variant_info,
None => {
return Err(EvalError::EnumVariantNotFound(span.wrap(name)))
}
};
let VariantInfo::Unit(_) = variant_info else {
return todo_error!("{variant_info:?}");
};
let new_enum = DynamicEnum::new(name, ());
dyn_enum.apply(&new_enum);
}
Expression::StructObject { name, map } => {
let variant_info = match enum_info.variant(&name) {
Some(variant_info) => variant_info,
None => {
return Err(EvalError::EnumVariantNotFound(span.wrap(name)))
}
};
let VariantInfo::Struct(variant_info) = variant_info else {
return todo_error!("{variant_info:?}");
};
let map: HashMap<_, _> = map
.into_iter()
.map(|(k, v)| {
let ty = match variant_info.field(&k) {
Some(field) => Ok(field.type_path_table().short_path()),
None => {
Err(EvalError::EnumVariantStructFieldNotFound {
field_name: k.clone(),
variant_name: name.clone(),
span: span.clone(),
})
}
}?;
let span = v.span.clone();
Ok((
k,
(
eval_expression(
v,
EvalParams {
world,
environment,
registrations,
},
)?,
span,
ty,
),
))
})
.collect::<Result<_, _>>()?;
let new_enum =
DynamicEnum::new(name, object_to_dynamic_struct(map)?);
let mut dyn_reflect =
resource.mut_dyn_reflect(world, registrations);
let dyn_enum = dyn_reflect
.reflect_path_mut(resource.path.as_str())
.unwrap();
dyn_enum.apply(&new_enum);
}
Expression::StructTuple { name, tuple } => {
let variant_info = match enum_info.variant(&name) {
Some(variant_info) => variant_info,
None => {
return Err(EvalError::EnumVariantNotFound(span.wrap(name)))
}
};
let VariantInfo::Tuple(variant_info) = variant_info else {
return todo_error!("{variant_info:?}");
};
let tuple = eval_tuple(
tuple,
EvalParams {
world,
environment,
registrations,
},
)?;
let mut dynamic_tuple = DynamicTuple::default();
for (index, element) in tuple.into_vec().into_iter().enumerate() {
let ty = match variant_info.field_at(index) {
Some(field) => Ok(field.type_path_table().short_path()),
None => Err(EvalError::EnumVariantTupleFieldNotFound {
field_index: index,
variant_name: name.clone(),
span: span.clone(),
}),
}?;
dynamic_tuple.insert_boxed(
element.value.into_inner().reflect(element.span, ty)?,
);
}
let new_enum = DynamicEnum::new(name, dynamic_tuple);
let mut dyn_reflect =
resource.mut_dyn_reflect(world, registrations);
let dyn_enum = dyn_reflect
.reflect_path_mut(resource.path.as_str())
.unwrap();
dyn_enum.apply(&new_enum);
}
_ => todo_error!(),
}
}
_ => {
let span = value_expr.span.clone();
let ty = reflect.reflect_short_type_path().to_owned();
let value = eval_expression(
*value_expr,
EvalParams {
world,
environment,
registrations,
},
)?;
let value_reflect = value.reflect(span.clone(), &ty)?;
let mut dyn_reflect = resource.mut_dyn_reflect(world, registrations);
let reflect = dyn_reflect
.reflect_path_mut(resource.path.as_str())
.unwrap();
reflect.set(value_reflect).map_err(|value_reflect| {
EvalError::IncompatibleReflectTypes {
span,
expected: reflect.reflect_type_path().to_string(),
actual: value_reflect.reflect_type_path().to_string(),
}
})?;
}
}
Ok(Value::Resource(resource))
}
},
Expression::String(string) => Ok(Value::String(string)),
Expression::Number(number) => Ok(Value::Number(number)),
Expression::Variable(variable) => {
if registrations
.iter()
.any(|v| v.type_info().type_path_table().short_path() == variable)
{
Err(EvalError::CannotMoveOutOfResource(Spanned {
span: expr.span,
value: variable,
}))
} else {
environment.move_var(&variable, expr.span)
}
}
Expression::StructObject { name, map } => {
let hashmap = eval_object(
map,
EvalParams {
world,
environment,
registrations,
},
)?;
Ok(Value::StructObject { name, map: hashmap })
}
Expression::Object(map) => {
let hashmap = eval_object(
map,
EvalParams {
world,
environment,
registrations,
},
)?;
Ok(Value::Object(hashmap))
}
Expression::Tuple(tuple) => {
let tuple = eval_tuple(
tuple,
EvalParams {
world,
environment,
registrations,
},
)?;
Ok(Value::Tuple(tuple))
}
Expression::StructTuple { name, tuple } => {
let tuple = eval_tuple(
tuple,
EvalParams {
world,
environment,
registrations,
},
)?;
Ok(Value::StructTuple { name, tuple })
}
Expression::BinaryOp {
left,
operator,
right,
} => {
let left = eval_expression(
*left,
EvalParams {
world,
environment,
registrations,
},
)?;
let right = eval_expression(
*right,
EvalParams {
world,
environment,
registrations,
},
)?;
match (left, right) {
(Value::Number(left), Value::Number(right)) => Ok(Value::Number(match operator {
Operator::Add => Number::add(left, right, expr.span)?,
Operator::Sub => Number::sub(left, right, expr.span)?,
Operator::Mul => Number::mul(left, right, expr.span)?,
Operator::Div => Number::div(left, right, expr.span)?,
Operator::Mod => Number::rem(left, right, expr.span)?,
})),
(left, right) => todo_error!("{left:#?}, {right:#?}"),
}
}
Expression::ForLoop {
index_name,
loop_count,
block,
} => todo_error!("for loop {index_name}, {loop_count}, {block:#?}"),
Expression::Member { left, right } => eval_member_expression(
*left,
right,
EvalParams {
world,
environment,
registrations,
},
),
Expression::UnaryOp(sub_expr) => {
let span = sub_expr.span.clone();
let value = eval_expression(
*sub_expr,
EvalParams {
world,
environment,
registrations,
},
)?;
if let Value::Number(number) = value {
Ok(Value::Number(number.neg(span)?))
} else {
Err(EvalError::ExpectedNumberAfterUnaryOperator(Spanned {
span,
value,
}))
}
}
Expression::Dereference(inner) => {
if let Expression::Variable(variable) = inner.value {
let var = environment.get(&variable, inner.span)?;
match &*var.borrow_inner().borrow() {
Value::Reference(reference) => {
let reference = reference
.upgrade()
.ok_or(EvalError::ReferenceToMovedData(expr.span))?;
let owned = reference.borrow().clone();
Ok(owned)
}
value => Ok(value.clone()),
}
} else {
Err(EvalError::CannotDereferenceValue(
expr.span.wrap(inner.value.kind()),
))
}
}
Expression::Borrow(inner) => {
if let Expression::Variable(variable) = inner.value {
if let Some(registration) = registrations
.iter()
.find(|v| v.type_info().type_path_table().short_path() == variable)
{
Ok(Value::Resource(IntoResource::new(registration.type_id())))
} else {
let rc = environment.get(&variable, inner.span)?;
let weak = rc.borrow();
Ok(Value::Reference(weak))
}
} else {
Err(EvalError::CannotBorrowValue(
expr.span.wrap(inner.value.kind()),
))
}
}
Expression::None => Ok(Value::None),
Expression::Boolean(bool) => Ok(Value::Boolean(bool)),
Expression::Function { name, arguments } => {
environment.function_scope(&name, move |environment, function| {
(function.body)(
arguments,
EvalParams {
world,
environment,
registrations,
},
)
})
}
}
}
fn eval_object(
map: HashMap<String, Spanned<Expression>>,
EvalParams {
world,
environment,
registrations,
}: EvalParams,
) -> Result<HashMap<String, UniqueRc<Value>>, EvalError> {
let map = map
.into_iter()
.map(
|(key, expr)| -> Result<(String, UniqueRc<Value>), EvalError> {
Ok((
key,
UniqueRc::new(eval_expression(
expr,
EvalParams {
world,
environment,
registrations,
},
)?),
))
},
)
.collect::<Result<_, _>>()?;
Ok(map)
}
fn eval_tuple(
tuple: Vec<Spanned<Expression>>,
EvalParams {
world,
environment,
registrations,
}: EvalParams,
) -> Result<Box<[Spanned<UniqueRc<Value>>]>, EvalError> {
tuple
.into_iter()
.map(|expr| {
let span = expr.span.clone();
let value = UniqueRc::new(eval_expression(
expr,
EvalParams {
world,
environment,
registrations,
},
)?);
Ok(span.wrap(value))
})
.collect::<Result<_, _>>()
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/parser.rs | src/builtin_parser/parser.rs | //! Generates an abstract syntax tree from a list of tokens.
use logos::Span;
use std::collections::HashMap;
use std::num::IntErrorKind;
use crate::command::{CommandHint, CommandHintColor};
use super::lexer::{FailedToLexCharacter, Token, TokenStream};
use super::number::Number;
use super::runner::environment::Function;
use super::{Environment, SpanExtension, Spanned};
/// An [Abstract Syntax Tree](https://en.wikipedia.org/wiki/Abstract_syntax_tree).
///
/// This type represents a list of expressions, which is what makes up a command.
pub type Ast = Vec<Spanned<Expression>>;
macro_rules! expect {
($tokens:ident, $($token:tt)+) => {
match $tokens.next() {
Some(Ok($($token)+)) => ($($token)+) ,
Some(Ok(token)) => {
return Err(ParseError::ExpectedTokenButGot {
expected: $($token)+,
got: token,
span: $tokens.span(),
})
}
Some(Err(FailedToLexCharacter)) => {
return Err(ParseError::FailedToLexCharacters($tokens.span().wrap($tokens.slice().to_string())))
}
None => return Err(ParseError::ExpectedMoreTokens($tokens.span())),
}
};
}
/// A type that represents an expression.
#[derive(Debug, Clone)]
pub enum Expression {
// Primitives
None,
Boolean(bool),
Number(Number),
Variable(String),
String(String),
Borrow(Box<Spanned<Expression>>),
Dereference(Box<Spanned<Expression>>),
Object(HashMap<String, Spanned<Expression>>),
StructObject {
name: String,
map: HashMap<String, Spanned<Expression>>,
},
Tuple(Vec<Spanned<Expression>>),
StructTuple {
name: String,
tuple: Vec<Spanned<Expression>>,
},
// Expressions
BinaryOp {
left: Box<Spanned<Expression>>,
operator: Operator,
right: Box<Spanned<Expression>>,
},
UnaryOp(Box<Spanned<Expression>>),
Member {
left: Box<Spanned<Expression>>,
right: Spanned<Access>,
},
// Statement-like
VarAssign {
name: Box<Spanned<Expression>>,
value: Box<Spanned<Expression>>,
},
Function {
name: String,
arguments: Vec<Spanned<Expression>>,
},
ForLoop {
index_name: String,
loop_count: u64,
block: Ast,
},
}
/// A singular element access within a [`Expression::Member`].
///
/// Based on `bevy_reflect`'s `Access`.
#[derive(Debug, Clone)]
pub enum Access {
/// A name-based field access on a struct.
Field(String),
/// An index-based access on a tuple.
TupleIndex(usize),
// /// An index-based access on a list.
// ListIndex(usize),
}
pub enum AccessKind {
Field,
TupleIndex,
}
impl Access {
pub const fn kind(&self) -> AccessKind {
match self {
Access::Field(_) => AccessKind::Field,
Access::TupleIndex(_) => AccessKind::TupleIndex,
}
}
/// Returns the kind of [`Access`] as a [string slice](str) with an `a` or `an` prepended to it.
/// Used for more natural sounding error messages.
pub const fn natural_kind(&self) -> &'static str {
self.kind().natural()
}
}
impl AccessKind {
/// Returns the kind of [`Access`] as a [string slice](str) with an `a` or `an` prepended to it.
/// Used for more natural sounding error messages.
pub const fn natural(&self) -> &'static str {
match self {
AccessKind::Field => "a field",
AccessKind::TupleIndex => "a tuple",
}
}
}
/// Get the access if its of a certain type, if not, return a [`EvalError`](super::runner::error::EvalError).
///
/// For examples, take a look at existing uses in the code.
macro_rules! access_unwrap {
($expected:literal, $($variant:ident($variant_inner:ident))|+ = $val:expr => $block:block) => {{
let val = $val;
if let $(Access::$variant($variant_inner))|+ = val.value $block else {
use $crate::builtin_parser::parser::AccessKind;
use $crate::builtin_parser::runner::error::EvalError;
// We have to put this in a `const` first to avoid a
// `temporary value dropped while borrowed` error.
const EXPECTED_ACCESS: &[&str] = &[$(AccessKind::$variant.natural()),+];
Err(EvalError::IncorrectAccessOperation {
span: val.span,
expected_access: EXPECTED_ACCESS,
expected_type: $expected,
got: val.value,
})?
}
}};
}
pub(crate) use access_unwrap;
impl Expression {
pub const fn kind(&self) -> &'static str {
match self {
Expression::None => "nothing",
Expression::Boolean(..) => "a boolean",
Expression::Number(..) => "a number",
Expression::Variable(..) => "a variable name",
Expression::String(..) => "a string",
Expression::Borrow(..) => "a borrow",
Expression::Dereference(..) => "a dereference",
Expression::Object(..) => "an object",
Expression::StructObject { .. } => "a struct object",
Expression::Tuple(..) => "a tuple",
Expression::StructTuple { .. } => "a struct tuple",
Expression::BinaryOp { .. } => "a binary operation",
Expression::UnaryOp(..) => "a unary operation",
Expression::Member { .. } => "a member expression",
Expression::VarAssign { .. } => "a variable assignment",
Expression::Function { .. } => "a function call",
Expression::ForLoop { .. } => "a for loop",
}
}
}
#[derive(Debug, Clone)]
pub enum Operator {
Add,
Sub,
Mul,
Div,
Mod,
}
#[derive(Debug)]
pub enum ParseError {
FailedToLexCharacters(Spanned<String>),
ExpectedMoreTokens(Span),
ExpectedTokenButGot {
expected: Token,
got: Token,
span: Span,
},
ExpectedEndline(Spanned<Token>),
ExpectedLiteral(Spanned<Token>),
InvalidSuffixForFloat(Spanned<String>),
NegativeIntOverflow {
span: Span,
number: String,
number_kind: &'static str,
},
PositiveIntOverflow {
span: Span,
number: String,
number_kind: &'static str,
},
ExpectedObjectContinuation(Spanned<Option<Result<Token, FailedToLexCharacter>>>),
ExpectedIndexer {
got: Token,
span: Span,
},
UnsupportedLoop {
ty: &'static str,
span: Span,
},
}
impl ParseError {
pub fn span(&self) -> Span {
use ParseError as E;
match self {
E::FailedToLexCharacters(Spanned { span, value: _ }) => span,
E::ExpectedMoreTokens(span) => span,
E::ExpectedTokenButGot { span, .. } => span,
E::ExpectedEndline(Spanned { span, value: _ }) => span,
E::ExpectedLiteral(Spanned { span, value: _ }) => span,
E::InvalidSuffixForFloat(Spanned { span, value: _ }) => span,
E::PositiveIntOverflow { span, .. } => span,
E::NegativeIntOverflow { span, .. } => span,
E::ExpectedObjectContinuation(Spanned { span, value: _ }) => span,
E::ExpectedIndexer { got: _, span } => span,
E::UnsupportedLoop { ty: _, span } => span,
}
.clone()
}
pub fn hint(&self) -> CommandHint {
CommandHint {
color: CommandHintColor::Error,
span: self.span(),
description: self.to_string().into(),
}
}
}
impl std::fmt::Display for ParseError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use ParseError as E;
match self {
E::FailedToLexCharacters(Spanned { span: _, value }) => write!(f, "Invalid character(s) \"{value}\" (Did you mean to use a string?)"),
E::ExpectedMoreTokens(_) => write!(f, "Expected more tokens, got nothing."),
E::ExpectedTokenButGot {
expected,
got,
span: _,
} => write!(f, "Expected token {expected:?}, got token {got:?} instead."),
E::ExpectedEndline(_) => write!(f, "Expected a semicolon or endline after a complete statement, but got more tokens than expected."),
E::ExpectedLiteral(Spanned { span: _, value }) => write!(f, "Expected a literal token, got {value:?} which is not a valid literal."),
E::InvalidSuffixForFloat(Spanned { span: _, value: suffix }) => write!(f, r#""{suffix}" is an invalid suffix for a float. The only valid suffixes are "f32" and "f64"."#),
E::NegativeIntOverflow { span: _, number, number_kind } => write!(f, "{number} cannot be represented as a {number_kind} as it is too small."),
E::PositiveIntOverflow { span: _, number, number_kind } => write!(f, "{number} cannot be represented as a {number_kind} as it is too large."),
E::ExpectedObjectContinuation(Spanned { span: _, value: got }) => write!(f, "Expected a continuation to the object declaration (such as a comma or a closing bracket), but got {got:?} instead."),
E::ExpectedIndexer { got, span: _ } => write!(f, "Expected an identifier or integer when accessing member of variable, got {got:?} instead."),
E::UnsupportedLoop { ty, span : _} => write!(f, "{ty} loops are not yet supported. See issue #8.")
}
}
}
impl std::error::Error for ParseError {}
const FLOAT_PARSE_EXPECT_REASON: &str =
"Float parsing errors are handled by the lexer, and floats cannot overflow.";
const NUMBER_TYPE_WILDCARD_UNREACHABLE_REASON: &str =
"Lexer guarantees `NumberType`'s slice to be included one of the match arms.";
pub fn parse(tokens: &mut TokenStream, environment: &Environment) -> Result<Ast, ParseError> {
let mut ast = Vec::new();
while tokens.peek().is_some() {
ast.push(parse_expression(tokens, environment)?);
match tokens.next() {
Some(Ok(Token::SemiColon)) => continue,
Some(Ok(token)) => return Err(ParseError::ExpectedEndline(tokens.span().wrap(token))),
Some(Err(FailedToLexCharacter)) => {
return Err(ParseError::FailedToLexCharacters(
tokens.span().wrap(tokens.slice().to_string()),
))
}
None => break,
}
}
Ok(ast)
}
fn parse_expression(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
match tokens.peek() {
Some(Ok(Token::Loop)) => Err(ParseError::UnsupportedLoop {
ty: "infinite",
span: tokens.peek_span(),
}),
Some(Ok(Token::While)) => Err(ParseError::UnsupportedLoop {
ty: "while",
span: tokens.peek_span(),
}),
Some(Ok(Token::For)) => Err(ParseError::UnsupportedLoop {
ty: "for",
span: tokens.peek_span(),
}),
Some(Ok(_)) => {
let expr = parse_additive(tokens, environment)?;
match tokens.peek() {
Some(Ok(Token::Equals)) => Ok(parse_var_assign(expr, tokens, environment)?),
_ => Ok(expr),
}
}
Some(Err(FailedToLexCharacter)) => Err(ParseError::FailedToLexCharacters(
tokens.peek_span().wrap(tokens.slice().to_string()),
)),
None => Err(ParseError::ExpectedMoreTokens(tokens.peek_span())),
}
}
fn _parse_block(tokens: &mut TokenStream, environment: &Environment) -> Result<Ast, ParseError> {
expect!(tokens, Token::LeftBracket);
let ast = parse(tokens, environment)?;
expect!(tokens, Token::RightBracket);
Ok(ast)
}
fn parse_additive(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
let mut node = parse_multiplicitive(tokens, environment)?;
while let Some(Ok(Token::Plus | Token::Minus)) = tokens.peek() {
let operator = match tokens.next() {
Some(Ok(Token::Plus)) => Operator::Add,
Some(Ok(Token::Minus)) => Operator::Sub,
_ => unreachable!(),
};
let right = parse_multiplicitive(tokens, environment)?;
node = Spanned {
span: node.span.start..right.span.end,
value: Expression::BinaryOp {
left: Box::new(node),
operator,
right: Box::new(right),
},
};
}
Ok(node)
}
fn parse_multiplicitive(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
let mut node = parse_value(tokens, environment)?;
while let Some(Ok(Token::Asterisk | Token::Slash | Token::Modulo)) = tokens.peek() {
let operator = match tokens.next() {
Some(Ok(Token::Asterisk)) => Operator::Mul,
Some(Ok(Token::Slash)) => Operator::Div,
Some(Ok(Token::Modulo)) => Operator::Mod,
_ => unreachable!(),
};
let right = parse_value(tokens, environment)?;
node = Spanned {
span: node.span.start..right.span.end,
value: Expression::BinaryOp {
left: Box::new(node),
operator,
right: Box::new(right),
},
};
}
Ok(node)
}
fn parse_value(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
/// Parses a literal (value without member expressions)
fn parse_literal(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
match tokens.next() {
Some(Ok(Token::LeftParen)) => {
let start = tokens.span().start;
if let Some(Ok(Token::RightParen)) = tokens.peek() {
tokens.next();
Ok(Spanned {
span: start..tokens.span().end,
value: Expression::None,
})
} else {
let expr = parse_expression(tokens, environment)?;
if let Some(Ok(Token::Comma)) = tokens.peek() {
let mut tuple = vec![expr];
while let Some(Ok(Token::Comma)) = tokens.peek() {
tokens.next();
let expr = parse_expression(tokens, environment)?;
tuple.push(expr);
}
expect!(tokens, Token::RightParen);
Ok(Spanned {
span: start..tokens.span().end,
value: Expression::Tuple(tuple),
})
} else {
expect!(tokens, Token::RightParen);
Ok(expr)
}
}
}
Some(Ok(Token::Identifier)) => {
let start = tokens.span().start;
let name = tokens.slice().to_string();
match tokens.peek() {
Some(Ok(Token::LeftParen)) => {
tokens.next();
let expr = parse_expression(tokens, environment)?;
let mut tuple = vec![expr];
while let Some(Ok(Token::Comma)) = tokens.peek() {
tokens.next();
let expr = parse_expression(tokens, environment)?;
tuple.push(expr);
}
expect!(tokens, Token::RightParen);
Ok(Spanned {
span: start..tokens.span().end,
value: Expression::StructTuple { name, tuple },
})
}
Some(Ok(Token::LeftBracket)) => {
tokens.next();
let map = parse_object(tokens, environment)?;
Ok(Spanned {
span: tokens.span(),
value: Expression::StructObject { name, map },
})
}
_ => {
if let Some(Function { argument_count, .. }) =
environment.get_function(&name)
{
dbg!(argument_count);
let mut arguments = Vec::new();
for _ in 0..(*argument_count) {
let expr = parse_expression(tokens, environment)?;
arguments.push(expr);
}
Ok(Spanned {
span: start..tokens.span().end,
value: Expression::Function { name, arguments },
})
} else {
Ok(tokens.span().wrap(Expression::Variable(name)))
}
}
}
}
Some(Ok(Token::LeftBracket)) => {
let map = parse_object(tokens, environment)?;
Ok(Spanned {
span: tokens.span(),
value: Expression::Object(map),
})
}
Some(Ok(Token::String)) => {
let slice = tokens.slice();
let string = slice[1..slice.len() - 1].to_string();
Ok(tokens.span().wrap(Expression::String(string)))
}
Some(Ok(Token::Minus)) => {
let expr = parse_literal(tokens, environment)?;
Ok(tokens.span().wrap(Expression::UnaryOp(Box::new(expr))))
}
Some(Ok(Token::Ampersand)) => {
let expr = parse_literal(tokens, environment)?;
Ok(tokens.span().wrap(Expression::Borrow(Box::new(expr))))
}
Some(Ok(Token::Asterisk)) => {
let expr = parse_literal(tokens, environment)?;
Ok(tokens.span().wrap(Expression::Dereference(Box::new(expr))))
}
Some(Ok(Token::IntegerNumber)) => {
parse_number(tokens).map(|s| s.map(Expression::Number))
}
Some(Ok(Token::FloatNumber)) => {
if let Some(Ok(Token::NumberType)) = tokens.peek() {
let number: Number = match tokens.peek_slice() {
"u8" | "u16" | "u32" | "u64" | "usize" | "i8" | "i16" | "i32" | "i64"
| "isize" => Err(ParseError::InvalidSuffixForFloat(
tokens.span().wrap(tokens.slice().to_string()),
))?,
"f32" => {
Number::f32(tokens.slice().parse().expect(FLOAT_PARSE_EXPECT_REASON))
}
"f64" => {
Number::f64(tokens.slice().parse().expect(FLOAT_PARSE_EXPECT_REASON))
}
_ => unreachable!("{NUMBER_TYPE_WILDCARD_UNREACHABLE_REASON}"),
};
let start_span = tokens.span().end;
tokens.next();
Ok(Spanned {
span: start_span..tokens.span().end,
value: Expression::Number(number),
})
} else {
let number = Number::Float(tokens.slice().parse().unwrap());
Ok(Spanned {
span: tokens.span(),
value: Expression::Number(number),
})
}
}
Some(Ok(Token::True)) => Ok(tokens.span().wrap(Expression::Boolean(true))),
Some(Ok(Token::False)) => Ok(tokens.span().wrap(Expression::Boolean(false))),
Some(Ok(token)) => Err(ParseError::ExpectedLiteral(tokens.span().wrap(token))),
Some(Err(FailedToLexCharacter)) => Err(ParseError::FailedToLexCharacters(
tokens.span().wrap(tokens.slice().to_string()),
)),
None => Err(ParseError::ExpectedMoreTokens(tokens.span())),
}
}
let mut expr = parse_literal(tokens, environment)?;
// If theres a dot after the expression, do a member expression:
while let Some(Ok(Token::Dot)) = tokens.peek() {
tokens.next(); // Skip the dot
match tokens.next() {
Some(Ok(Token::Identifier)) => {
let right = tokens.slice().to_string();
expr = Spanned {
span: expr.span.start..tokens.span().end,
value: Expression::Member {
left: Box::new(expr),
right: tokens.span().wrap(Access::Field(right)),
},
};
}
Some(Ok(Token::IntegerNumber)) => {
let right = tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"usize",
))?;
expr = Spanned {
span: expr.span.start..tokens.span().end,
value: Expression::Member {
left: Box::new(expr),
right: tokens.span().wrap(Access::TupleIndex(right)),
},
};
}
Some(Ok(token)) => {
return Err(ParseError::ExpectedIndexer {
got: token,
span: tokens.span(),
})
}
Some(Err(FailedToLexCharacter)) => {
return Err(ParseError::FailedToLexCharacters(
tokens.span().wrap(tokens.slice().to_string()),
))
}
None => return Err(ParseError::ExpectedMoreTokens(tokens.span())),
}
}
Ok(expr)
}
fn map_parseint_error<'s>(
span: Span,
slice: &'s str,
number_kind: &'static str,
) -> impl FnOnce(std::num::ParseIntError) -> ParseError + 's {
move |error| match error.kind() {
IntErrorKind::PosOverflow => ParseError::PositiveIntOverflow {
span,
number: slice.to_string(),
number_kind,
},
IntErrorKind::NegOverflow => ParseError::NegativeIntOverflow {
span,
number: slice.to_string(),
number_kind,
},
IntErrorKind::Empty | IntErrorKind::InvalidDigit | IntErrorKind::Zero => unreachable!(
"Lexer makes sure other errors aren't possible. Create an bevy_dev_console issue!"
),
_ => unimplemented!(), // Required due to IntErrorKind being #[non_exhaustive]
}
}
fn parse_number(tokens: &mut TokenStream) -> Result<Spanned<Number>, ParseError> {
if let Some(Ok(Token::NumberType)) = tokens.peek() {
let number: Number = match tokens.peek_slice() {
"u8" => Number::u8(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"u8",
))?),
"u16" => Number::u16(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"u16",
))?),
"u32" => Number::u32(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"u32",
))?),
"u64" => Number::u64(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"u64",
))?),
"usize" => Number::usize(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"usize",
))?),
"i8" => Number::i8(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"i8",
))?),
"i16" => Number::i16(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"i16",
))?),
"i32" => Number::i32(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"i32",
))?),
"isize" => Number::isize(tokens.slice().parse().map_err(map_parseint_error(
tokens.span(),
tokens.slice(),
"isize",
))?),
"f32" => Number::f32(tokens.slice().parse().expect(FLOAT_PARSE_EXPECT_REASON)),
"f64" => Number::f64(tokens.slice().parse().expect(FLOAT_PARSE_EXPECT_REASON)),
_ => unreachable!("{}", NUMBER_TYPE_WILDCARD_UNREACHABLE_REASON),
};
let start_span = tokens.span().end;
tokens.next();
Ok(Spanned {
span: start_span..tokens.span().end,
value: number,
})
} else {
let number = Number::Integer(tokens.slice().parse().unwrap());
Ok(Spanned {
span: tokens.span(),
value: number,
})
}
}
fn parse_var_assign(
name: Spanned<Expression>,
tokens: &mut TokenStream<'_>,
environment: &Environment,
) -> Result<Spanned<Expression>, ParseError> {
tokens.next(); // We already know that the next token is an equals
let value = parse_additive(tokens, environment)?;
Ok(Spanned {
span: name.span.start..value.span.end,
value: Expression::VarAssign {
name: Box::new(name),
value: Box::new(value),
},
})
}
/// Parses an object.
///
/// - `{}`
/// - `{test: 4}`
/// - `{str: "sup!", num: -6.2}`
fn parse_object(
tokens: &mut TokenStream,
environment: &Environment,
) -> Result<HashMap<String, Spanned<Expression>>, ParseError> {
let mut map = HashMap::new();
while let Some(Ok(Token::Identifier)) = tokens.peek() {
tokens.next();
let ident = tokens.slice().to_string();
expect!(tokens, Token::Colon);
let expr = parse_expression(tokens, environment)?;
map.insert(ident, expr);
match tokens.peek() {
Some(Ok(Token::RightBracket)) => break,
Some(Ok(Token::Comma)) => {
tokens.next();
}
token => Err(ParseError::ExpectedObjectContinuation(
tokens.span().wrap(token.clone()),
))?,
}
}
expect!(tokens, Token::RightBracket);
Ok(map)
}
#[cfg(test)]
mod tests {
use super::super::lexer::TokenStream;
use super::super::Environment;
use super::parse;
#[test]
fn var_assign() {
let mut lexer = TokenStream::new("x = 1 + 2 - 30 + y");
let environment = Environment::default();
let ast = parse(&mut lexer, &environment);
assert!(ast.is_ok());
// TODO: figure out how to assert ast
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/completions.rs | src/builtin_parser/completions.rs | use bevy::prelude::*;
use super::runner::environment::Variable;
use super::Environment;
/// Stores the names of variables and functions for fast async access.
#[derive(Resource)]
pub struct EnvironmentCache {
pub function_names: Vec<String>,
pub variable_names: Vec<String>,
}
impl FromWorld for EnvironmentCache {
fn from_world(world: &mut World) -> Self {
if let Some(environment) = world.get_non_send_resource::<Environment>() {
store_in_cache(environment)
} else {
Self::empty()
}
}
}
impl EnvironmentCache {
pub const fn empty() -> Self {
EnvironmentCache {
function_names: Vec::new(),
variable_names: Vec::new(),
}
}
}
pub fn store_in_cache(environment: &Environment) -> EnvironmentCache {
let mut function_names = Vec::new();
let mut variable_names = Vec::new();
store_in_cache_vec(environment, &mut function_names, &mut variable_names);
EnvironmentCache {
function_names,
variable_names,
}
}
fn store_in_cache_vec(
environment: &Environment,
function_names: &mut Vec<String>,
variable_names: &mut Vec<String>,
) {
for (name, variable) in &environment.variables {
match variable {
Variable::Function(_) => function_names.push(name.clone()),
Variable::Unmoved(_) => variable_names.push(name.clone()),
Variable::Moved => {}
}
}
if let Some(environment) = &environment.parent {
store_in_cache_vec(environment, function_names, variable_names);
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/environment.rs | src/builtin_parser/runner/environment.rs | //! Environment and function registration
use std::collections::HashMap;
use std::fmt::Debug;
use crate::builtin_parser::SpanExtension;
use bevy::ecs::world::World;
use bevy::log::warn;
use bevy::reflect::TypeRegistration;
use logos::Span;
use super::super::parser::Expression;
use super::super::Spanned;
use super::error::EvalError;
use super::unique_rc::UniqueRc;
use super::{eval_expression, stdlib, EvalParams, Value};
/// Macro for mass registering functions.
///
/// ```
/// fn a() {}
/// fn b() {}
/// fn c() {}
///
/// # use bevy_dev_console::register;
/// # let mut environment = bevy_dev_console::builtin_parser::Environment::default();
/// register!(environment => {
/// fn a;
/// fn b;
/// fn c;
/// });
/// ```
#[macro_export]
macro_rules! register {
{
$environment:expr => fn $fn_name:ident;
} => {
$environment
.register_fn(stringify!($fn_name), $fn_name)
};
{
$environment:expr => {
$(
fn $fn_name:ident $(as $renamed:expr)?;
)*
}
} => {
$(
#[allow(unused_mut, unused_assignments)]
let mut name = stringify!($fn_name);
$(name = $renamed;)?
$environment.register_fn(name, $fn_name);
)*
};
}
/// Get around implementation of Result causing stupid errors
pub(super) struct ResultContainer<T, E>(pub Result<T, E>);
impl<T: Into<Value>> From<T> for ResultContainer<Value, EvalError> {
fn from(value: T) -> Self {
ResultContainer(Ok(value.into()))
}
}
impl<T, E> From<ResultContainer<T, E>> for Result<T, E> {
fn from(ResultContainer(result): ResultContainer<T, E>) -> Self {
result
}
}
impl<T: Into<Value>, E> From<Result<T, E>> for ResultContainer<Value, E> {
fn from(result: Result<T, E>) -> Self {
ResultContainer(result.map(|v| v.into()))
}
}
/// A parameter in a [`Function`].
pub trait FunctionParam: Sized {
/// TODO: Add `Self` as default when <https://github.com/rust-lang/rust/issues/29661> gets merged
type Item<'world, 'env, 'reg>;
/// Whether this parameter requires a [`Spanned<Value>`].
/// If `false` then `FunctionParam::get`'s `value` will be [`None`], and vice versa.
const USES_VALUE: bool;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
world: &mut Option<&'world mut World>,
environment: &mut Option<&'env mut Environment>,
registrations: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError>;
}
pub type FunctionType = dyn FnMut(Vec<Spanned<Expression>>, EvalParams) -> Result<Value, EvalError>;
pub struct Function {
pub argument_count: usize,
pub body: Box<FunctionType>,
}
impl Debug for Function {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
f.debug_struct("Function")
.field("argument_count", &self.argument_count)
.finish_non_exhaustive()
}
}
/// Trait that represents a [`Fn`] that can be turned into a [`Function`].
pub trait IntoFunction<T> {
fn into_function(self) -> Function;
}
macro_rules! impl_into_function {
(
$($(
$params:ident
),+)?
) => {
#[allow(non_snake_case)]
impl<F: 'static $(, $($params: FunctionParam),+ )?, R> IntoFunction<( $($($params,)+)? )> for F
where
for<'a, 'world, 'env, 'reg> &'a mut F:
FnMut( $($($params),*)? ) -> R +
FnMut( $($(<$params as FunctionParam>::Item<'world, 'env, 'reg>),*)? ) -> R,
R: Into<ResultContainer<Value, EvalError>>,
{
fn into_function(mut self) -> Function {
#[allow(unused_variables, unused_mut)]
let body = Box::new(move |args: Vec<Spanned<Expression>>, params: EvalParams| {
let EvalParams {
world,
environment,
registrations,
} = params;
let mut args = args.into_iter().map(|expr| {
Ok(Spanned {
span: expr.span.clone(),
value: eval_expression(
expr,
EvalParams {
world,
environment,
registrations,
}
)?
})
}).collect::<Result<Vec<_>, EvalError>>()?.into_iter();
let world = &mut Some(world);
let environment = &mut Some(environment);
#[allow(clippy::too_many_arguments)]
fn call_inner<R: Into<ResultContainer<Value, EvalError>>, $($($params),*)?>(
mut f: impl FnMut($($($params),*)?) -> R,
$($($params: $params),*)?
) -> R {
f($($($params),*)?)
}
call_inner(
&mut self,
$($({
let arg = if $params::USES_VALUE {
Some(args.next().unwrap())
} else {
None
};
let res = $params::get(
arg,
world,
environment,
registrations
)?;
res
}),+)?
)
.into().into()
});
let argument_count = $($(
$params::USES_VALUE as usize +
)+)? 0;
Function { body, argument_count }
}
}
}
}
impl_into_function!();
impl_into_function!(T1);
impl_into_function!(T1, T2);
impl_into_function!(T1, T2, T3);
impl_into_function!(T1, T2, T3, T4);
impl_into_function!(T1, T2, T3, T4, T5);
impl_into_function!(T1, T2, T3, T4, T5, T6);
impl_into_function!(T1, T2, T3, T4, T5, T6, T7);
impl_into_function!(T1, T2, T3, T4, T5, T6, T7, T8);
/// A variable inside the [`Environment`].
#[derive(Debug)]
pub enum Variable {
Unmoved(UniqueRc<Value>),
Moved,
Function(Function),
}
/// The environment stores all variables and functions.
pub struct Environment {
pub(crate) parent: Option<Box<Environment>>,
pub(crate) variables: HashMap<String, Variable>,
}
impl Default for Environment {
fn default() -> Self {
let mut env = Self {
parent: None,
variables: HashMap::new(),
};
stdlib::register(&mut env);
env
}
}
impl Environment {
/// Set a variable.
pub fn set(&mut self, name: impl Into<String>, value: UniqueRc<Value>) {
self.variables.insert(name.into(), Variable::Unmoved(value));
}
/// Returns a reference to a function if it exists.
pub fn get_function(&self, name: &str) -> Option<&Function> {
let (env, _) = self.resolve(name, 0..0).ok()?;
match env.variables.get(name) {
Some(Variable::Function(function)) => Some(function),
_ => None,
}
}
pub(crate) fn function_scope<T>(
&mut self,
name: &str,
function: impl FnOnce(&mut Self, &mut Function) -> T,
) -> T {
let (env, _) = self.resolve_mut(name, 0..0).unwrap();
let return_result;
let var = env.variables.get_mut(name);
let fn_obj = match var {
Some(Variable::Function(_)) => {
let Variable::Function(mut fn_obj) =
std::mem::replace(var.unwrap(), Variable::Moved)
else {
unreachable!()
};
return_result = function(env, &mut fn_obj);
fn_obj
}
_ => unreachable!(),
};
let var = env.variables.get_mut(name);
let _ = std::mem::replace(var.unwrap(), Variable::Function(fn_obj));
return_result
}
/// Returns a reference to a variable.
pub fn get(&self, name: &str, span: Span) -> Result<&UniqueRc<Value>, EvalError> {
let (env, span) = self.resolve(name, span)?;
match env.variables.get(name) {
Some(Variable::Unmoved(value)) => Ok(value),
Some(Variable::Moved) => Err(EvalError::VariableMoved(span.wrap(name.to_string()))),
Some(Variable::Function(_)) => Err(EvalError::ExpectedVariableGotFunction(
span.wrap(name.to_owned()),
)),
None => Err(EvalError::VariableNotFound(span.wrap(name.to_string()))),
}
}
/// "Moves" a variable, giving you ownership over it.
///
/// However it will no longer be able to be used unless it's a [`Value::None`],
/// [`Value::Boolean`], or [`Value::Number`] in which case it will be copied.
pub fn move_var(&mut self, name: &str, span: Span) -> Result<Value, EvalError> {
let (env, span) = self.resolve_mut(name, span)?;
match env.variables.get_mut(name) {
Some(Variable::Moved) => Err(EvalError::VariableMoved(span.wrap(name.to_string()))),
Some(Variable::Function(_)) => Err(EvalError::ExpectedVariableGotFunction(
span.wrap(name.to_owned()),
)),
Some(variable_reference) => {
let Variable::Unmoved(reference) = variable_reference else {
unreachable!()
};
// This is a pretty bad way of handling something similar to rust's [`Copy`] trait but whatever.
match &*reference.borrow_inner().borrow() {
Value::None => return Ok(Value::None),
Value::Boolean(bool) => return Ok(Value::Boolean(*bool)),
Value::Number(number) => return Ok(Value::Number(*number)),
_ => {}
};
let Variable::Unmoved(value) =
std::mem::replace(variable_reference, Variable::Moved)
else {
unreachable!()
};
Ok(value.into_inner())
}
None => Err(EvalError::VariableNotFound(span.wrap(name.to_string()))),
}
}
fn resolve(&self, name: &str, span: Span) -> Result<(&Self, Span), EvalError> {
if self.variables.contains_key(name) {
return Ok((self, span));
}
match &self.parent {
Some(parent) => parent.resolve(name, span),
None => Err(EvalError::VariableNotFound(span.wrap(name.to_string()))),
}
}
fn resolve_mut(&mut self, name: &str, span: Span) -> Result<(&mut Self, Span), EvalError> {
if self.variables.contains_key(name) {
return Ok((self, span));
}
match &mut self.parent {
Some(parent) => parent.resolve_mut(name, span),
None => Err(EvalError::VariableNotFound(span.wrap(name.to_string()))),
}
}
/// Registers a function for use inside the language.
///
/// All parameters must implement [`FunctionParam`].
/// There is a limit of 8 parameters.
///
/// The return value of the function must implement [`Into<Value>`]
///
/// You should take a look at the [Standard Library](super::stdlib) for examples.
pub fn register_fn<T>(
&mut self,
name: impl Into<String>,
function: impl IntoFunction<T>,
) -> &mut Self {
let name = name.into();
if self.variables.contains_key(&name) {
warn!("Function {name} declared twice.");
}
self.variables
.insert(name, Variable::Function(function.into_function()));
self
}
/// Iterate over all the variables and functions in the current scope of the environment.
///
/// Does not include variables and functions from higher scopes.
pub fn iter(&self) -> std::collections::hash_map::Iter<String, Variable> {
self.variables.iter()
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/stdlib.rs | src/builtin_parser/runner/stdlib.rs | use crate::builtin_parser::runner::environment::Variable;
use crate::register;
use bevy::ecs::world::World;
use bevy::log::info;
use bevy::reflect::TypeRegistration;
use std::cell::Ref;
use std::ops::Range;
mod math;
use super::error::EvalError;
use super::{Environment, Spanned, Value};
fn print(
value: Spanned<Value>,
world: &mut World,
registrations: &[&TypeRegistration],
) -> Result<(), EvalError> {
match value.value {
Value::String(string) => info!("{string}"),
_ => {
let string = value.value.try_format(value.span, world, registrations)?;
info!("{string}");
}
}
Ok(())
}
fn dbg(any: Value) {
info!("Value::{any:?}");
}
fn ref_depth(Spanned { span, value }: Spanned<Value>) -> Result<usize, EvalError> {
fn ref_depth_reference(value: Ref<Value>, span: Range<usize>) -> Result<usize, EvalError> {
Ok(match &*value {
Value::Reference(reference) => {
ref_depth_reference(
reference
.upgrade()
.ok_or(EvalError::ReferenceToMovedData(span.clone()))?
.borrow(),
span,
)? + 1
}
_ => 0,
})
}
Ok(match value {
Value::Reference(reference) => {
ref_depth_reference(
reference
.upgrade()
.ok_or(EvalError::ReferenceToMovedData(span.clone()))?
.borrow(),
span,
)? + 1
}
_ => 0,
})
}
fn print_env(env: &mut Environment) {
for (name, variable) in env.iter() {
match variable {
Variable::Moved => info!("{name}: Moved"),
Variable::Unmoved(rc) => info!("{name}: {:?}", rc.borrow_inner().borrow()),
Variable::Function(_) => {}
}
}
}
fn typeof_value(value: Value) -> String {
value.kind().to_string()
}
/// Disposes of a [`Value`].
fn drop(_: Value) {}
pub fn register(environment: &mut Environment) {
math::register(environment);
register!(environment => {
fn print;
fn dbg;
fn ref_depth;
fn drop;
fn print_env;
fn typeof_value as "typeof";
});
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/value.rs | src/builtin_parser/runner/value.rs | use std::collections::HashMap;
use std::fmt::Debug;
use crate::builtin_parser::number::Number;
use crate::builtin_parser::{Environment, StrongRef, UniqueRc};
use super::super::Spanned;
use super::environment::FunctionParam;
use super::error::EvalError;
use super::reflection::{CreateRegistration, IntoResource};
use super::unique_rc::WeakRef;
use bevy::ecs::world::World;
use bevy::reflect::{
DynamicStruct, DynamicTuple, GetPath, Reflect, ReflectRef, TypeInfo, TypeRegistration,
VariantInfo, VariantType,
};
use logos::Span;
/// A runtime value
#[derive(Debug, Clone)]
pub enum Value {
/// Nothing at all
None,
/// A number.
Number(Number),
/// `true` or `false`. Thats it...
Boolean(bool),
/// A string... there isn't much to say about this one.
String(String),
/// A reference.
///
/// References are very similar to rust's ownership and borrowing.
/// We achieve this by storing every variable as a [`UniqueRc<T>`]
/// (which is essentially just [`Rc<RefCell<T>>`] but having only
/// the owner of the value have a strong reference, while every
/// other value has a weak reference. This causes
/// [`Rc::try_unwrap`] to succeed every time)
Reference(WeakRef<Value>),
/// A dynamic [`HashMap`].
Object(HashMap<String, UniqueRc<Value>>),
/// An [`Object`](Value::Object) with a name attached to it.
StructObject {
/// The name of the struct
name: String,
/// The [`Object`](Value::Object) [`HashMap`].
map: HashMap<String, UniqueRc<Value>>,
},
/// A fixed size list of values that can have different types.
Tuple(Box<[Spanned<UniqueRc<Value>>]>),
/// A [`Tuple`](Value::Tuple) with a name attached to it.
StructTuple {
/// The name of the tuple
name: String,
/// The [`Object`](Value::Object) slice.
tuple: Box<[Spanned<UniqueRc<Value>>]>,
},
/// A reference to a dynamic value. (aka a reference)
Resource(IntoResource),
}
impl Value {
/// Converts this value into a [`Box<dyn Reflect>`].
///
/// `ty` is used for type inference.
pub fn reflect(self, span: Span, ty: &str) -> Result<Box<dyn Reflect>, EvalError> {
match self {
Value::None => Ok(Box::new(())),
Value::Number(number) => number.reflect(span, ty),
Value::Boolean(boolean) => Ok(Box::new(boolean)),
Value::String(string) => Ok(Box::new(string)),
Value::Reference(_reference) => Err(EvalError::CannotReflectReference(span)),
Value::Object(object) | Value::StructObject { map: object, .. } => {
let mut dyn_struct = DynamicStruct::default();
for (name, value) in object {
dyn_struct.insert_boxed(&name, value.into_inner().reflect(span.clone(), ty)?);
}
Ok(Box::new(dyn_struct))
}
Value::Tuple(tuple) | Value::StructTuple { tuple, .. } => {
let mut dyn_tuple = DynamicTuple::default();
for element in Vec::from(tuple).into_iter() {
dyn_tuple.insert_boxed(element.value.into_inner().reflect(element.span, ty)?);
}
Ok(Box::new(dyn_tuple))
}
Value::Resource(_) => Err(EvalError::CannotReflectResource(span)),
}
}
/// Attempts to format this [`Value`].
///
/// Returns an error if the [`Value`] is a reference to moved data.
pub fn try_format(
&self,
span: Span,
world: &World,
registrations: &[&TypeRegistration],
) -> Result<String, EvalError> {
const TAB: &str = " ";
match self {
Value::None => Ok(format!("()")),
Value::Number(number) => Ok(format!("{number}")),
Value::Boolean(bool) => Ok(format!("{bool}")),
Value::String(string) => Ok(format!("\"{string}\"")),
Value::Reference(reference) => {
if let Some(rc) = reference.upgrade() {
Ok(rc.borrow().try_format(span, world, registrations)?)
} else {
Err(EvalError::ReferenceToMovedData(span))
}
}
Value::Object(map) => {
let mut string = String::new();
string.push('{');
for (key, value) in map {
string += &format!(
"\n{TAB}{key}: {},",
value.borrow_inner().borrow().try_format(
span.clone(),
world,
registrations
)?
);
}
if !map.is_empty() {
string.push('\n');
}
string.push('}');
Ok(string)
}
Value::StructObject { name, map } => {
let mut string = String::new();
string += &format!("{name} {{");
for (key, value) in map {
string += &format!(
"\n{TAB}{key}: {},",
value.borrow_inner().borrow().try_format(
span.clone(),
world,
registrations
)?
);
}
if !map.is_empty() {
string.push('\n');
}
string.push('}');
Ok(string)
}
Value::Tuple(tuple) => {
let mut string = String::new();
string.push('(');
for element in tuple.iter() {
string += &format!(
"\n{TAB}{},",
element.value.borrow_inner().borrow().try_format(
span.clone(),
world,
registrations
)?
);
}
if !tuple.is_empty() {
string.push('\n');
}
string.push(')');
Ok(string)
}
Value::StructTuple { name, tuple } => {
let mut string = String::new();
string.push_str(name);
string.push('(');
for element in tuple.iter() {
string += &format!(
"\n{TAB}{},",
element.value.borrow_inner().borrow().try_format(
span.clone(),
world,
registrations
)?
);
}
if !tuple.is_empty() {
string.push('\n');
}
string.push(')');
Ok(string)
}
Value::Resource(resource) => Ok(fancy_debug_print(resource, world, registrations)),
}
}
/// Returns the kind of [`Value`] as a [string slice](str).
/// You may want to use [`natural_kind`](Self::natural_kind)
/// instead for more natural sounding error messages
pub const fn kind(&self) -> &'static str {
match self {
Value::None => "none",
Value::Number(number) => number.kind(),
Value::Boolean(..) => "boolean",
Value::String(..) => "string",
Value::Reference(..) => "reference",
Value::Object(..) => "object",
Value::StructObject { .. } => "struct object",
Value::Tuple(..) => "tuple",
Value::StructTuple { .. } => "struct tuple",
Value::Resource(..) => "resource",
}
}
/// Returns the kind of [`Value`] as a [string slice](str) with an `a` or `an` prepended to it.
/// Used for more natural sounding error messages.
pub const fn natural_kind(&self) -> &'static str {
match self {
Value::None => "nothing",
Value::Number(number) => number.natural_kind(),
Value::Boolean(..) => "a boolean",
Value::String(..) => "a string",
Value::Reference(..) => "a reference",
Value::Object(..) => "a object",
Value::StructObject { .. } => "a struct object",
Value::Tuple(..) => "a tuple",
Value::StructTuple { .. } => "a struct tuple",
Value::Resource(..) => "a resource",
}
}
}
/// A massive function that takes in a type registration and the world and then
/// does all the hard work of printing out the type nicely.
fn fancy_debug_print(
resource: &IntoResource,
world: &World,
registrations: &[&TypeRegistration],
) -> String {
const TAB: &str = " ";
let registration = registrations.create_registration(resource.id);
let dyn_reflect = resource.ref_dyn_reflect(world, registration);
let reflect = dyn_reflect.reflect_path(resource.path.as_str()).unwrap();
fn debug_subprint(reflect: &dyn Reflect, indentation: usize) -> String {
let mut f = String::new();
let reflect_ref = reflect.reflect_ref();
let indentation_string = TAB.repeat(indentation);
match reflect_ref {
ReflectRef::Struct(struct_info) => {
f += "{\n";
for i in 0..struct_info.field_len() {
let field = struct_info.field_at(i).unwrap();
let field_name = struct_info.name_at(i).unwrap();
let field_value = debug_subprint(field, indentation + 1);
f += &format!(
"{indentation_string}{TAB}{field_name}: {} = {field_value},\n",
field.reflect_short_type_path(),
);
}
f += &indentation_string;
f += "}";
}
ReflectRef::TupleStruct(_) => todo!(),
ReflectRef::Tuple(tuple_info) => {
f += "(\n";
for field in tuple_info.iter_fields() {
let field_value = debug_subprint(field, indentation + 1);
f += &format!("{indentation_string}{TAB}{field_value},\n",);
}
f += &indentation_string;
f += ")";
}
ReflectRef::List(_) => todo!(),
ReflectRef::Array(_) => todo!(),
ReflectRef::Map(_) => todo!(),
ReflectRef::Enum(variant) => {
// Print out the enum types
f += variant.variant_name();
match variant.variant_type() {
VariantType::Struct => {
f += " {\n";
for field in variant.iter_fields() {
f += &format!(
"{indentation_string}{TAB}{}: {} = {},\n",
field.name().unwrap(),
field.value().reflect_short_type_path(),
debug_subprint(field.value(), indentation + 1)
);
}
f += &indentation_string;
f += "}";
}
VariantType::Tuple => {
f += "(\n";
for field in variant.iter_fields() {
f += &format!(
"{indentation_string}{TAB}{} = {},\n",
field.value().reflect_short_type_path(),
debug_subprint(field.value(), indentation + 1)
);
}
f += &indentation_string;
f += ")";
}
VariantType::Unit => {}
}
}
ReflectRef::Value(_) => {
f += &format!("{reflect:?}");
}
}
f
}
let mut f = String::new();
let reflect_ref = reflect.reflect_ref();
match reflect_ref {
ReflectRef::Struct(struct_info) => {
f += &format!("struct {} {{\n", struct_info.reflect_short_type_path());
for i in 0..struct_info.field_len() {
let field = struct_info.field_at(i).unwrap();
let field_name = struct_info.name_at(i).unwrap();
let field_value = debug_subprint(field, 1);
f += &format!(
"{TAB}{}: {} = {},\n",
field_name,
field.reflect_short_type_path(),
field_value
);
}
f += "}";
}
ReflectRef::TupleStruct(_) => todo!(),
ReflectRef::Tuple(_) => todo!(),
ReflectRef::List(_) => todo!(),
ReflectRef::Array(_) => todo!(),
ReflectRef::Map(_) => todo!(),
ReflectRef::Enum(set_variant_info) => {
// Print out the enum types
f += &format!("enum {} {{\n", set_variant_info.reflect_short_type_path());
let TypeInfo::Enum(enum_info) = registration.type_info() else {
unreachable!()
};
for variant in enum_info.iter() {
f += "\t";
f += variant.name();
match variant {
VariantInfo::Struct(variant) => {
f += " {\n";
for field in variant.iter() {
f += &format!(
"{TAB}{TAB}{}: {},\n",
field.name(),
field.type_path_table().short_path()
);
}
f += TAB;
f += "}";
}
VariantInfo::Tuple(variant) => {
f += "(";
let mut iter = variant.iter();
if let Some(first) = iter.next() {
f += &format!("{}", first.type_path_table().short_path());
for field in iter {
f += &format!(", {}", field.type_path_table().short_path());
}
}
f += ")";
}
VariantInfo::Unit(_) => {}
}
f += ",\n";
}
// Print out the current value
f += "} = ";
f += set_variant_info.variant_name();
match set_variant_info.variant_type() {
VariantType::Struct => {
f += " {\n";
for field in set_variant_info.iter_fields() {
f += &format!("{TAB}{}: {:?},\n", field.name().unwrap(), field.value());
}
f += "}";
}
VariantType::Tuple => {
f += "(\n";
for field in set_variant_info.iter_fields() {
f += &format!("{TAB}{:?},\n", field.value());
}
f += ")";
}
VariantType::Unit => {}
}
}
ReflectRef::Value(value) => {
f += &format!("{value:?}");
}
}
f
}
impl From<()> for Value {
fn from((): ()) -> Self {
Value::None
}
}
macro_rules! from_t {
(impl $type:ty: $var:ident => $expr:expr) => {
impl From<$type> for Value {
fn from($var: $type) -> Self {
$expr
}
}
};
}
macro_rules! from_number {
($($number:ident),*$(,)?) => {
$(
from_t!(impl $number: number => Value::Number(Number::$number(number)));
)*
};
}
from_number!(u8, u16, u32, u64, usize, i8, i16, i32, i64, isize, f32, f64);
from_t!(impl String: string => Value::String(string));
from_t!(impl bool: bool => Value::Boolean(bool));
from_t!(impl Number: number => Value::Number(number));
from_t!(impl HashMap<String, UniqueRc<Value>>: hashmap => Value::Object(hashmap));
from_t!(impl HashMap<String, Value>: hashmap => Value::Object(
hashmap
.into_iter()
.map(|(k, v)| (k, UniqueRc::new(v)))
.collect(),
));
impl FunctionParam for Spanned<Value> {
type Item<'world, 'env, 'reg> = Self;
const USES_VALUE: bool = true;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
Ok(value.unwrap())
}
}
impl<T: TryFrom<Spanned<Value>, Error = EvalError>> FunctionParam for Spanned<T> {
type Item<'world, 'env, 'reg> = Self;
const USES_VALUE: bool = true;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
let value = value.unwrap();
Ok(Spanned {
span: value.span.clone(),
value: T::try_from(value)?,
})
}
}
impl FunctionParam for Value {
type Item<'world, 'env, 'reg> = Self;
const USES_VALUE: bool = true;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
Ok(value.unwrap().value)
}
}
macro_rules! impl_function_param_for_value {
(impl $type:ty: $value_pattern:pat => $return:expr) => {
impl FunctionParam for $type {
type Item<'world, 'env, 'reg> = Self;
const USES_VALUE: bool = true;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
let value = value.unwrap();
if let $value_pattern = value.value {
Ok($return)
} else {
Err(EvalError::IncompatibleFunctionParameter {
expected: stringify!($type),
actual: value.value.natural_kind(),
span: value.span,
})
}
}
}
impl TryFrom<Spanned<Value>> for $type {
type Error = EvalError;
fn try_from(value: Spanned<Value>) -> Result<Self, Self::Error> {
if let $value_pattern = value.value {
Ok($return)
} else {
todo!()
}
}
}
};
}
macro_rules! impl_function_param_for_numbers {
($generic:ident ($($number:ident),*$(,)?)) => {
$(
impl FunctionParam for $number {
type Item<'world, 'env, 'reg> = Self;
const USES_VALUE: bool = true;
fn get<'world, 'env, 'reg>(
value: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
let value = value.unwrap();
match value.value {
Value::Number(Number::$number(value)) => Ok(value),
Value::Number(Number::$generic(value)) => Ok(value as $number),
_ => Err(EvalError::IncompatibleFunctionParameter {
expected: concat!("a ", stringify!($number)),
actual: value.value.natural_kind(),
span: value.span,
})
}
}
}
impl TryFrom<Spanned<Value>> for $number {
type Error = EvalError;
fn try_from(value: Spanned<Value>) -> Result<Self, Self::Error> {
match value.value {
Value::Number(Number::$number(value)) => Ok(value),
Value::Number(Number::$generic(value)) => Ok(value as $number),
_ => Err(EvalError::IncompatibleFunctionParameter {
expected: concat!("a ", stringify!($number)),
actual: value.value.natural_kind(),
span: value.span
})
}
}
}
)*
};
}
impl_function_param_for_numbers!(Float(f32, f64));
impl_function_param_for_numbers!(Integer(u8, u16, u32, u64, usize, i8, i16, i32, i64, isize));
impl_function_param_for_value!(impl bool: Value::Boolean(boolean) => boolean);
impl_function_param_for_value!(impl Number: Value::Number(number) => number);
impl_function_param_for_value!(impl String: Value::String(string) => string);
// impl_function_param_for_value!(impl HashMap<String, UniqueRc<Value>>: Value::Object(object) => object);
impl_function_param_for_value!(impl HashMap<String, Value>: Value::Object(object) => {
object.into_iter().map(|(k, v)| (k, v.into_inner())).collect()
});
impl_function_param_for_value!(impl StrongRef<Value>: Value::Reference(reference) => reference.upgrade().unwrap());
impl FunctionParam for &mut World {
type Item<'world, 'env, 'reg> = &'world mut World;
const USES_VALUE: bool = false;
fn get<'world, 'env, 'reg>(
_: Option<Spanned<Value>>,
world: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
let Some(world) = world.take() else {
// make this unreachable by checking the function when it gets registered
todo!("world borrowed twice");
};
Ok(world)
}
}
// This probably isn't a good idea. But eh who cares, more power to the user.
impl FunctionParam for &mut Environment {
type Item<'world, 'env, 'reg> = &'env mut Environment;
const USES_VALUE: bool = false;
fn get<'world, 'env, 'reg>(
_: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
environment: &mut Option<&'env mut Environment>,
_: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
Ok(environment.take().unwrap())
}
}
impl FunctionParam for &[&TypeRegistration] {
type Item<'world, 'env, 'reg> = &'reg [&'reg TypeRegistration];
const USES_VALUE: bool = false;
fn get<'world, 'env, 'reg>(
_: Option<Spanned<Value>>,
_: &mut Option<&'world mut World>,
_: &mut Option<&'env mut Environment>,
registrations: &'reg [&'reg TypeRegistration],
) -> Result<Self::Item<'world, 'env, 'reg>, EvalError> {
Ok(registrations)
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/error.rs | src/builtin_parser/runner/error.rs | use std::borrow::Cow;
use logos::Span;
use crate::builtin_parser::number::Number;
use crate::builtin_parser::parser::Access;
use crate::builtin_parser::Spanned;
use crate::command::{CommandHint, CommandHintColor};
use super::Value;
/// An error occurring during the while executing the [`AST`](Ast) of the command.
#[derive(Debug)]
#[allow(missing_docs)]
pub enum EvalError {
/// A custom text message. Contains very little contextual information, try to find an existing error instead.
Custom {
/// The text of the message
text: Cow<'static, str>,
span: Span,
},
InvalidOperation {
left: Number,
right: Number,
operation: &'static str,
span: Span,
},
VariableNotFound(Spanned<String>),
ExpectedNumberAfterUnaryOperator(Spanned<Value>),
CannotIndexValue(Spanned<Value>),
ReferenceToMovedData(Span),
VariableMoved(Spanned<String>),
CannotDereferenceValue(Spanned<&'static str>),
CannotBorrowValue(Spanned<&'static str>),
IncompatibleReflectTypes {
expected: String,
actual: String,
span: Span,
},
EnumVariantNotFound(Spanned<String>),
CannotMoveOutOfResource(Spanned<String>),
CannotNegateUnsignedInteger(Spanned<Number>),
IncompatibleNumberTypes {
left: &'static str,
right: &'static str,
span: Span,
},
IncompatibleFunctionParameter {
expected: &'static str,
actual: &'static str,
span: Span,
},
EnumVariantStructFieldNotFound {
field_name: String,
variant_name: String,
span: Span,
},
ExpectedVariableGotFunction(Spanned<String>),
CannotReflectReference(Span),
CannotReflectResource(Span),
EnumVariantTupleFieldNotFound {
span: Span,
field_index: usize,
variant_name: String,
},
IncorrectAccessOperation {
span: Span,
expected_access: &'static [&'static str],
expected_type: &'static str,
got: Access,
},
FieldNotFoundInStruct(Spanned<String>),
FieldNotFoundInTuple {
span: Span,
field_index: usize,
tuple_size: usize,
},
}
impl EvalError {
/// Get all the locations of the error in the source.
pub fn spans(&self) -> Vec<Span> {
use EvalError as E;
match self {
E::Custom { span, .. } => vec![span.clone()],
E::VariableNotFound(Spanned { span, .. }) => vec![span.clone()],
E::ExpectedNumberAfterUnaryOperator(Spanned { span, .. }) => vec![span.clone()],
E::CannotIndexValue(Spanned { span, .. }) => vec![span.clone()],
E::FieldNotFoundInStruct(Spanned { span, value: _ }) => vec![span.clone()],
E::CannotDereferenceValue(Spanned { span, .. }) => vec![span.clone()],
E::ReferenceToMovedData(span) => vec![span.clone()],
E::VariableMoved(Spanned { span, .. }) => vec![span.clone()],
E::CannotBorrowValue(Spanned { span, .. }) => vec![span.clone()],
E::IncompatibleReflectTypes { span, .. } => vec![span.clone()],
E::EnumVariantNotFound(Spanned { span, .. }) => vec![span.clone()],
E::EnumVariantStructFieldNotFound { span, .. } => vec![span.clone()],
E::EnumVariantTupleFieldNotFound { span, .. } => vec![span.clone()],
E::CannotMoveOutOfResource(Spanned { span, .. }) => vec![span.clone()],
E::CannotNegateUnsignedInteger(Spanned { span, .. }) => vec![span.clone()],
E::IncompatibleNumberTypes { span, .. } => vec![span.clone()],
E::IncompatibleFunctionParameter { span, .. } => vec![span.clone()],
E::ExpectedVariableGotFunction(Spanned { span, .. }) => vec![span.clone()],
E::CannotReflectReference(span) => vec![span.clone()],
E::CannotReflectResource(span) => vec![span.clone()],
E::InvalidOperation { span, .. } => vec![span.clone()],
E::IncorrectAccessOperation { span, .. } => vec![span.clone()],
E::FieldNotFoundInTuple { span, .. } => vec![span.clone()],
}
}
/// Returns all the hints for this error.
pub fn hints(&self) -> Vec<CommandHint> {
self.spans()
.into_iter()
.map(|span| CommandHint::new(span, CommandHintColor::Error, self.to_string()))
.collect()
}
}
impl std::fmt::Display for EvalError {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
use EvalError as E;
match self {
E::Custom { text, .. } => f.write_str(text),
E::VariableNotFound(Spanned { value, .. }) => {
write!(f, "Variable `{value}` not found.")
}
E::ExpectedNumberAfterUnaryOperator(Spanned { value, .. }) => write!(
f,
"Expected a number after unary operator (-) but got {} instead.",
value.natural_kind()
),
E::CannotIndexValue(Spanned { span: _, value }) => {
write!(f, "Cannot index {} with a member expression.", value.kind())
}
E::ReferenceToMovedData(_) => write!(f, "Cannot access reference to moved data."),
E::VariableMoved(Spanned { value, .. }) => {
write!(f, "Variable `{value}` was moved.")
}
E::CannotDereferenceValue(Spanned { value: kind, .. }) => {
write!(f, "Cannot dereference {kind}.")
}
E::CannotBorrowValue(Spanned { value: kind, .. }) => {
write!(f, "Cannot borrow {kind}. Only variables can be borrowed.")
}
E::IncompatibleReflectTypes {
expected, actual, ..
} => write!(
f,
"Cannot set incompatible reflect types. Expected `{expected}`, got `{actual}`"
),
E::EnumVariantNotFound(Spanned { value: name, .. }) => {
write!(f, "Enum variant `{name}` was not found.")
}
E::EnumVariantStructFieldNotFound {
field_name,
variant_name,
..
} => write!(
f,
"Field `{field_name}` doesn't exist on struct variant `{variant_name}`."
),
E::EnumVariantTupleFieldNotFound {
field_index,
variant_name,
..
} => write!(
f,
"Field `{field_index}` doesn't exist on tuple variant `{variant_name}`."
),
E::CannotMoveOutOfResource(Spanned { value, .. }) => write!(
f,
"Cannot move out of resource `{value}`, try borrowing it instead."
),
E::CannotNegateUnsignedInteger(Spanned { value, .. }) => write!(
f,
"Unsigned integers cannot be negated. (Type: {})",
value.natural_kind()
),
E::IncompatibleNumberTypes { left, right, .. } => write!(
f,
"Incompatible number types; `{left}` and `{right}` are incompatible."
),
E::IncompatibleFunctionParameter {
expected, actual, ..
} => write!(
f,
"Mismatched function parameter type. Expected {expected} but got {actual}"
),
E::ExpectedVariableGotFunction(Spanned { value, .. }) => write!(
f,
"Expected `{value}` to be a variable, but got a function instead."
),
E::CannotReflectReference(_) => {
write!(
f,
"Cannot reflect a reference. Try dereferencing it instead."
)
}
E::CannotReflectResource(_) => {
write!(
f,
"Cannot reflecting resources is not possible at the moment."
)
}
E::InvalidOperation {
left,
right,
operation,
span: _,
} => write!(f, "Invalid operation: Cannot {operation} {left} by {right}"),
E::IncorrectAccessOperation {
expected_access,
expected_type,
got,
span: _,
} => write!(
f,
"Expected {} access to access {expected_type} but got {}",
expected_access.join(" and "),
got.natural_kind()
),
E::FieldNotFoundInStruct(Spanned { span: _, value }) => {
write!(f, "Field {value} not found in struct")
}
E::FieldNotFoundInTuple {
field_index,
tuple_size,
span: _,
} => write!(
f,
"Field {field_index} is out of bounds for tuple of size {tuple_size}"
),
}
}
}
impl std::error::Error for EvalError {}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/reflection.rs | src/builtin_parser/runner/reflection.rs | use std::any::TypeId;
use std::collections::HashMap;
use bevy::prelude::*;
use bevy::reflect::{DynamicStruct, ReflectFromPtr, TypeRegistration};
use logos::Span;
use crate::builtin_parser::EvalError;
use super::Value;
/// [`IntoResource`] allows `bevy_dev_console` to create a long-lasting resource "reference"
/// that can be "unwrapped" into the appropriate resource.
#[derive(Debug, Clone)]
pub struct IntoResource {
pub id: TypeId,
pub path: String,
}
impl IntoResource {
pub const fn new(id: TypeId) -> Self {
Self {
id,
path: String::new(),
}
}
pub fn ref_dyn_reflect<'a>(
&self,
world: &'a World,
registration: impl CreateRegistration,
) -> &'a dyn Reflect {
let registration = registration.create_registration(self.id);
let ref_dyn_reflect = ref_dyn_reflect(world, registration).unwrap();
ref_dyn_reflect
}
pub fn mut_dyn_reflect<'a>(
&self,
world: &'a mut World,
registration: impl CreateRegistration,
) -> Mut<'a, dyn Reflect> {
let registration = registration.create_registration(self.id);
let ref_dyn_reflect = mut_dyn_reflect(world, registration).unwrap();
ref_dyn_reflect
}
}
pub fn object_to_dynamic_struct(
hashmap: HashMap<String, (Value, Span, &'static str)>,
) -> Result<DynamicStruct, EvalError> {
let mut dynamic_struct = DynamicStruct::default();
for (key, (value, span, reflect)) in hashmap {
dynamic_struct.insert_boxed(&key, value.reflect(span, reflect)?);
}
Ok(dynamic_struct)
}
pub fn mut_dyn_reflect<'a>(
world: &'a mut World,
registration: &TypeRegistration,
) -> Option<Mut<'a, dyn Reflect>> {
let Some(component_id) = world.components().get_resource_id(registration.type_id()) else {
error!(
"Couldn't get the component id of the {} resource.",
registration.type_info().type_path()
);
return None;
};
let resource = world.get_resource_mut_by_id(component_id).unwrap();
let reflect_from_ptr = registration.data::<ReflectFromPtr>().unwrap();
// SAFETY: from the context it is known that `ReflectFromPtr` was made for the type of the `MutUntyped`
let val: Mut<dyn Reflect> =
resource.map_unchanged(|ptr| unsafe { reflect_from_ptr.as_reflect_mut(ptr) });
Some(val)
}
pub fn ref_dyn_reflect<'a>(
world: &'a World,
registration: &TypeRegistration,
) -> Option<&'a dyn Reflect> {
let Some(component_id) = world.components().get_resource_id(registration.type_id()) else {
error!(
"Couldn't get the component id of the {} resource.",
registration.type_info().type_path()
);
return None;
};
let resource = world.get_resource_by_id(component_id).unwrap();
let reflect_from_ptr = registration.data::<ReflectFromPtr>().unwrap();
// SAFETY: from the context it is known that `ReflectFromPtr` was made for the type of the `MutUntyped`
let val: &dyn Reflect = unsafe { reflect_from_ptr.as_reflect(resource) };
Some(val)
}
pub trait CreateRegistration {
fn create_registration(&self, type_id: TypeId) -> &TypeRegistration;
}
impl CreateRegistration for &TypeRegistration {
fn create_registration(&self, type_id: TypeId) -> &TypeRegistration {
assert!(self.type_id() == type_id);
self
}
}
impl CreateRegistration for &[&TypeRegistration] {
fn create_registration(&self, type_id: TypeId) -> &TypeRegistration {
self.iter()
.find(|reg| reg.type_id() == type_id)
.expect("registration no longer exists")
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/member.rs | src/builtin_parser/runner/member.rs | //! Evaluation for member expressions and paths
use crate::builtin_parser::parser::{access_unwrap, Access, Expression};
use crate::builtin_parser::{EvalError, SpanExtension, Spanned, WeakRef};
use super::reflection::IntoResource;
use super::{eval_expression, todo_error, EvalParams, Value};
/// Evaluate a member expression.
///
/// A member expression allows indexing of values to access their inner fields.
///
/// # Examples
///
/// ```text
/// $ {a: 5}.a
/// > 5
/// $ x = {key: "hi"}
/// $ &x.key
/// > "hi"
/// $ x
/// > {key: "hi"}
/// $ x.key
/// > "hi"
/// $ x
/// Error: Variable moved
/// ```
pub fn eval_member_expression(
left: Spanned<Expression>,
right: Spanned<Access>,
EvalParams {
world,
environment,
registrations,
}: EvalParams,
) -> Result<Value, EvalError> {
let left_span = left.span.clone();
let span = left.span.start..right.span.end;
let left = eval_expression(
left,
EvalParams {
world,
environment,
registrations,
},
)?;
match left {
Value::Reference(reference) => {
let Some(strong) = reference.upgrade() else {
return Err(EvalError::ReferenceToMovedData(left_span));
};
let reference = strong.borrow();
match &&*reference {
Value::Object(map) | Value::StructObject { map, .. } => {
access_unwrap!("an object reference", Field(field) = right => {
let value = map.get(&field).ok_or(EvalError::FieldNotFoundInStruct(span.wrap(field)))?;
Ok(Value::Reference(value.borrow()))
})
}
Value::Tuple(tuple) | Value::StructTuple { tuple, .. } => {
access_unwrap!("a tuple reference", TupleIndex(index) = right => {
let Spanned { span: _, value } =
tuple.get(index).ok_or(EvalError::FieldNotFoundInTuple {
span,
field_index: index,
tuple_size: tuple.len(),
})?;
Ok(Value::Reference(value.borrow()))
})
}
Value::Resource(resource) => {
access_unwrap!("a resource", Field(field) = right => {
let mut resource = resource.clone();
resource.path.push('.');
resource.path += &field;
Ok(Value::Resource(resource))
})
}
var => Err(EvalError::CannotIndexValue(left_span.wrap((*var).clone()))),
}
}
Value::Object(mut map) | Value::StructObject { mut map, .. } => {
access_unwrap!("an object", Field(field) = right => {
let value = map
.remove(&field)
.ok_or(EvalError::FieldNotFoundInStruct(span.wrap(field)))?;
Ok(value.into_inner())
})
}
Value::Tuple(tuple) | Value::StructTuple { tuple, .. } => {
access_unwrap!("a tuple reference", TupleIndex(field_index) = right => {
let tuple_size = tuple.len();
let Spanned { span: _, value } =
tuple
.into_vec()
.into_iter()
.nth(field_index)
.ok_or(EvalError::FieldNotFoundInTuple {
span,
field_index,
tuple_size,
})?;
Ok(value.into_inner())
})
}
Value::Resource(mut resource) => {
access_unwrap!("a resource", Field(field) = right => {
resource.path.push('.');
resource.path += &field;
Ok(Value::Resource(resource))
})
}
_ => Err(EvalError::CannotIndexValue(left_span.wrap(left))),
}
}
pub enum Path {
Variable(WeakRef<Value>),
NewVariable(String),
Resource(IntoResource),
}
/// Evaluate a path expression.
///
/// A path expression, in contrast to a member expression, is for creating new variables or assigning to existing ones.
///
/// # Examples
///
/// ```text
/// a -> Path::NewVariable("a")
/// a.b -> if a.b exists, returns Path::Variable(a.b)
/// MyResource.field -> appends "field" to the IntoResource path and returns Resource
/// a.b.c -> if a.b.c, returns Path::Variable(a.b.c) (wow look its recursive)
/// ```
pub fn eval_path(
expr: Spanned<Expression>,
EvalParams {
world,
environment,
registrations,
}: EvalParams,
) -> Result<Spanned<Path>, EvalError> {
match expr.value {
Expression::Variable(variable) => {
if let Some(registration) = registrations
.iter()
.find(|v| v.type_info().type_path_table().short_path() == variable)
{
Ok(Spanned {
span: expr.span,
value: Path::Resource(IntoResource::new(registration.type_id())),
})
} else if let Ok(variable) = environment.get(&variable, expr.span.clone()) {
Ok(Spanned {
span: expr.span,
value: Path::Variable(variable.borrow()),
})
} else {
Ok(Spanned {
span: expr.span,
value: Path::NewVariable(variable),
})
}
}
Expression::Member { left, right } => {
let left = eval_path(
*left,
EvalParams {
world,
environment,
registrations,
},
)?;
match left.value {
Path::Variable(variable) => match &*variable.upgrade().unwrap().borrow() {
Value::Resource(resource) => {
access_unwrap!("a resource", Field(field) = right => {
let mut resource = resource.clone();
resource.path.push('.');
resource.path += &field;
Ok(left.span.wrap(Path::Resource(resource)))
})
}
Value::Object(object) | Value::StructObject { map: object, .. } => {
let span = left.span.start..right.span.end;
access_unwrap!("an object", Field(field) = right => {
let weak = match object.get(&field) {
Some(rc) => rc.borrow(),
None => {
return Err(EvalError::FieldNotFoundInStruct(span.wrap(field)))
}
};
Ok(span.wrap(Path::Variable(weak)))
})
}
Value::Tuple(tuple) | Value::StructTuple { tuple, .. } => {
let span = left.span.start..right.span.end;
access_unwrap!("a tuple", TupleIndex(index) = right => {
let weak = match tuple.get(index) {
Some(Spanned { value: rc, span: _ }) => rc.borrow(),
None => {
return Err(EvalError::FieldNotFoundInTuple {
span,
field_index: index,
tuple_size: tuple.len(),
})
}
};
Ok(span.wrap(Path::Variable(weak)))
})
}
value => todo_error!("{value:?}"),
},
Path::Resource(mut resource) => {
access_unwrap!("a resource", Field(field) = right => {
resource.path.push('.');
resource.path += &field;
Ok(left.span.wrap(Path::Resource(resource)))
})
}
Path::NewVariable(name) => Err(EvalError::VariableNotFound(left.span.wrap(name))),
}
}
Expression::Dereference(inner) => {
let path = eval_path(
*inner,
EvalParams {
world,
environment,
registrations,
},
)?;
match path.value {
Path::Variable(value) => {
let strong = value
.upgrade()
.ok_or(EvalError::ReferenceToMovedData(path.span))?;
let borrow = strong.borrow();
if let Value::Reference(ref reference) = &*borrow {
Ok(expr.span.wrap(Path::Variable(reference.clone())))
} else {
Err(EvalError::CannotDereferenceValue(
expr.span.wrap(borrow.natural_kind()),
))
}
}
Path::NewVariable(_) => todo_error!(),
Path::Resource(_) => todo_error!(),
}
}
expr => todo_error!("can't eval path of this expr: {expr:#?}"),
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/unique_rc.rs | src/builtin_parser/runner/unique_rc.rs | use std::cell::{Ref, RefCell, RefMut};
use std::fmt::Debug;
use std::ops::{Deref, DerefMut};
use std::rc::{Rc, Weak};
/// A uniquely owned [`Rc`] with interior mutability. Interior mutability is abstracted away with [`WeakRef`].
///
/// This represents an [`Rc`] that is known to be uniquely owned -- that is, have exactly one strong
/// reference.
///
/// **TODO:** This is actually going to be a standard library feature. Use [`alloc::rc::UniqueRc`] when it is stabilized.
#[derive(Debug)]
pub struct UniqueRc<T: ?Sized>(Rc<RefCell<T>>);
impl<T: ?Sized> UniqueRc<T> {
/// Get a reference to the inner [`Rc`] of [`UniqueRc`].
///
/// # Safety
///
/// This function is unsafe because it allows direct access to the [`Rc`].
/// If cloned then the guarantee that there is only ever one strong reference is no longer satisfied.
const unsafe fn get_rc(&self) -> &Rc<RefCell<T>> {
&self.0
}
pub(crate) fn borrow_inner(&self) -> &RefCell<T> {
&self.0
}
/// Create a new weak pointer to this [`UniqueRc`].
pub fn borrow(&self) -> WeakRef<T> {
WeakRef::new(self)
}
}
impl<T> UniqueRc<T> {
/// Create a new [`UniqueRc`].
pub fn new(value: T) -> UniqueRc<T> {
UniqueRc(Rc::new(RefCell::new(value)))
}
/// Get the inner value (`T`) of this [`UniqueRc<T>`].
pub fn into_inner(self) -> T {
Rc::try_unwrap(self.0)
.unwrap_or_else(|rc| {
panic!(
"There are {} strong pointers to a UniqueRc!",
Rc::strong_count(&rc)
)
})
.into_inner()
}
}
impl<T: ?Sized + Clone> Clone for UniqueRc<T> {
fn clone(&self) -> Self {
let t = self.borrow_inner().clone().into_inner();
Self::new(t)
}
}
impl<T> Deref for UniqueRc<T> {
type Target = RefCell<T>;
fn deref(&self) -> &Self::Target {
self.0.as_ref()
}
}
impl<T> DerefMut for UniqueRc<T> {
fn deref_mut(&mut self) -> &mut Self::Target {
Rc::get_mut(&mut self.0).unwrap()
}
}
/// A weak reference to a [`UniqueRc`] that may or may not exist.
#[derive(Debug)]
pub struct WeakRef<T: ?Sized> {
reference: Weak<RefCell<T>>,
}
impl<T: ?Sized> Clone for WeakRef<T> {
fn clone(&self) -> Self {
Self {
reference: self.reference.clone(),
}
}
}
impl<T: ?Sized> WeakRef<T> {
fn new(unique_rc: &UniqueRc<T>) -> Self {
// SAFETY: We are not cloning the `Rc`, so this is fine.
let rc = unsafe { unique_rc.get_rc() };
Self {
reference: Rc::downgrade(rc),
}
}
/// Converts this [`WeakRef`] into a [`StrongRef`] (may be unsafe, see [`StrongRef`]'s documentation).
pub fn upgrade(&self) -> Option<StrongRef<T>> {
Some(StrongRef(self.reference.upgrade()?))
}
}
/// A strong reference to value `T`.
///
/// This value is *technically* unsafe due to [`UniqueRc`] expecting only one strong reference to its inner value.
/// However in practice the only way you could obtain it is by having it passed into a custom function.
/// In which case it is safe (probably).
///
/// ```
/// use bevy_dev_console::builtin_parser::{Value, StrongRef};
///
/// fn add_to_reference(my_reference: StrongRef<Value>, add: String) {
/// // currently you can only do it with `Value` (TODO)
/// if let Value::String(string) = &mut *my_reference.borrow_mut() {
/// *string += &add;
/// } else {
/// todo!();
/// }
/// }
/// ```
#[derive(Debug)]
pub struct StrongRef<T: ?Sized>(Rc<RefCell<T>>);
impl<T: ?Sized> StrongRef<T> {
/// Immutably borrows the wrapped value.
pub fn borrow(&self) -> Ref<T> {
self.0.borrow()
}
/// Mutably borrows the wrapped value.
pub fn borrow_mut(&self) -> RefMut<T> {
self.0.borrow_mut()
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
#[should_panic]
fn strong_ref_panic() {
let rc = UniqueRc::new(0);
let weak = rc.borrow();
let strong = weak.upgrade().unwrap();
println!("{}", rc.into_inner()); // Panic!
println!("{}", strong.borrow());
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/builtin_parser/runner/stdlib/math.rs | src/builtin_parser/runner/stdlib/math.rs | use crate::builtin_parser::{Environment, EvalError, Number, Spanned};
use crate::register;
macro_rules! float_calc_op {
($fn:ident, $name:expr) => {
fn $fn(number: Spanned<Number>) -> Result<Number, EvalError> {
match number.value {
Number::Float(number) => Ok(Number::Float(number.$fn())),
Number::f32(number) => Ok(Number::f32(number.$fn())),
Number::f64(number) => Ok(Number::f64(number.$fn())),
_ => Err(EvalError::Custom {
text: concat!("Cannot calculate the ", $name, " of a non-float value").into(),
span: number.span,
}),
}
}
};
}
float_calc_op!(sqrt, "square root");
float_calc_op!(sin, "sine");
float_calc_op!(cos, "cosine");
float_calc_op!(tan, "tangent");
float_calc_op!(abs, "absolute value");
float_calc_op!(ceil, "rounded-up value");
float_calc_op!(floor, "rounded-down value");
float_calc_op!(round, "rounded value");
float_calc_op!(trunc, "truncuated value");
pub fn register(env: &mut Environment) {
register!(env => {
fn sqrt;
fn sin;
fn cos;
fn tan;
fn abs;
fn ceil;
fn floor;
fn round;
fn trunc;
});
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/src/ui/completions.rs | src/ui/completions.rs | use bevy::ecs::system::Commands;
use bevy_egui::egui;
use crate::command::{AutoCompletions, CompletionSuggestion, UpdateAutoComplete};
use crate::prelude::ConsoleConfig;
use super::ConsoleUiState;
/// The max amount of completion suggestions shown at once.
pub const MAX_COMPLETION_SUGGESTIONS: usize = 6;
pub fn completions(
text_edit: egui::text_edit::TextEditOutput,
text_edit_id: egui::Id,
state: &mut ConsoleUiState,
ui: &mut egui::Ui,
commands: &mut Commands,
completions: &AutoCompletions,
config: &ConsoleConfig,
) {
let text_edit_complete_id = ui.make_persistent_id("text_edit_complete");
if let Some(cursor_range) = text_edit.state.cursor.char_range() {
let [primary, secondary] = cursor_range.sorted();
fn non_keyword(character: char) -> bool {
!(character.is_alphanumeric() || character == '_')
}
let cursor_index = (|| {
// Convert the cursor's char index into a byte index
// aswell as returning the character at the cursor's position position
let (primary_index, char) = state
.command
.char_indices()
.nth(primary.index.saturating_sub(1))?;
if non_keyword(char) {
return None;
}
Some(primary_index)
})();
if text_edit.response.changed() {
state.selected_completion = 0;
}
// todo check cursor position changed https://github.com/emilk/egui/discussions/4540
// if text_edit.response.changed() {
if true {
if let Some(cursor_index) = cursor_index {
ui.memory_mut(|mem| {
if !completions.is_empty() {
mem.open_popup(text_edit_complete_id)
}
});
let before_cursor = &state.command[..=cursor_index];
let keyword_before = match before_cursor.rfind(non_keyword) {
// If found, return the slice from the end of the non-alphanumeric character to the cursor position
Some(index) => &before_cursor[(index + 1)..],
// If not found, the whole substring is a word
None => before_cursor,
};
commands.add(UpdateAutoComplete(keyword_before.to_owned()));
} else {
ui.memory_mut(|mem| {
if mem.is_popup_open(text_edit_complete_id) {
mem.close_popup();
}
});
}
}
if let Some(cursor_index) = cursor_index {
if ui.input(|i| i.key_pressed(egui::Key::Tab)) {
// Remove the old text
let before_cursor = &state.command[..=cursor_index];
let index_before = match before_cursor.rfind(non_keyword) {
Some(index) => index + 1,
None => 0,
};
let after_cursor = &state.command[cursor_index..];
match after_cursor.find(non_keyword) {
Some(characters_after) => state
.command
.drain(index_before..cursor_index + characters_after),
None => state.command.drain(index_before..),
};
// Add the completed text
let completed_text = &completions.0[state.selected_completion].suggestion;
state.command.insert_str(index_before, completed_text);
// Set the cursor position
let mut text_edit_state = text_edit.state;
let mut cursor_range = egui::text::CCursorRange::two(primary, secondary);
cursor_range.primary.index +=
completed_text.len() - (cursor_index - index_before) - 1;
cursor_range.secondary.index +=
completed_text.len() - (cursor_index - index_before) - 1;
text_edit_state.cursor.set_char_range(Some(cursor_range));
egui::TextEdit::store_state(ui.ctx(), text_edit_id, text_edit_state);
}
}
}
egui::popup_below_widget(
ui,
text_edit_complete_id,
&text_edit.response,
egui::PopupCloseBehavior::CloseOnClickOutside,
|ui| {
ui.vertical(|ui| {
for (
i,
CompletionSuggestion {
suggestion,
highlighted_indices,
},
) in completions.iter().take(6).enumerate()
{
let mut layout = egui::text::LayoutJob::default();
for (i, _) in suggestion.char_indices() {
layout.append(
&suggestion[i..=i],
0.0,
if highlighted_indices.contains(&i) {
config.theme.format_bold()
} else {
config.theme.format_text()
},
);
}
let res = ui.label(layout);
if i == state.selected_completion {
res.highlight();
}
}
})
},
);
}
/// Also consumes the up and down arrow keys.
pub fn change_selected_completion(
ui: &mut egui::Ui,
state: &mut ConsoleUiState,
completions: &[CompletionSuggestion],
) {
if ui.input_mut(|i| i.consume_key(egui::Modifiers::NONE, egui::Key::ArrowUp)) {
state.selected_completion = state.selected_completion.saturating_sub(1);
}
if ui.input_mut(|i| i.consume_key(egui::Modifiers::NONE, egui::Key::ArrowDown)) {
state.selected_completion = state
.selected_completion
.saturating_add(1)
.min(completions.len() - 1);
}
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/examples/resource.rs | examples/resource.rs | //! Example of modifying resources via the console via reflection.
//!
//! **Warning:** This is very experimental, might not work.
use bevy::log::LogPlugin;
use bevy::prelude::*;
use bevy_dev_console::prelude::*;
#[derive(Resource, Reflect, Default, Debug)]
enum MyEnum {
#[default]
None,
Numero1,
Structio {
a: f64,
b: String,
},
Tupleo(String, f64),
}
#[derive(Resource, Reflect, Default, Debug)]
struct MyStruct {
number: f64,
string: String,
struct_in_struct: SubStruct,
tuple: (i32, u8),
}
#[derive(Reflect, Default, Debug)]
struct SubStruct {
boolean: bool,
enume: MyEnum,
}
fn main() {
App::new()
.register_type::<MyEnum>()
.init_resource::<MyEnum>()
.insert_resource(MyStruct {
number: 5.6,
string: "hi there :)".to_string(),
struct_in_struct: SubStruct {
boolean: false,
enume: MyEnum::Tupleo("nooo".to_string(), 5.),
},
tuple: (-5, 255),
})
.register_type::<MyStruct>()
.add_plugins((
DefaultPlugins.set(LogPlugin {
custom_layer: custom_log_layer,
..default()
}),
DevConsolePlugin,
))
.run();
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/examples/custom_functions.rs | examples/custom_functions.rs | //! An example showing how to create custom functions
use bevy::log::LogPlugin;
use bevy::prelude::*;
use bevy_dev_console::builtin_parser::{Environment, EvalError, Number, Spanned, StrongRef, Value};
use bevy_dev_console::prelude::*;
use bevy_dev_console::register;
use web_time as time;
// Declare the functions we want to create:
/// Basic function
fn time_since_epoch() {
let time = time::SystemTime::now()
.duration_since(time::UNIX_EPOCH)
.unwrap();
info!("The unix epoch was {} seconds ago", time.as_secs());
}
/// Function with parameters and return value.
///
/// Note that this will cause an error if an integer is passed onto this function.
fn add(num1: f64, num2: f64) -> f64 {
num1 + num2
}
/// Function with any value + span
fn print_debug_info(value: Spanned<Value>) {
info!(
"Location in command: {:?}, Value: {:?}",
value.span, value.value
)
}
#[derive(Resource)]
struct MyCounter(u32);
/// Function with [`World`]
fn increment_global_counter(world: &mut World) -> u32 {
world.resource_scope(|_, mut counter: Mut<MyCounter>| {
counter.0 += 1;
counter.0
})
}
// Function with reference (Syntax subject to change soon)
fn increment_number(number: Spanned<StrongRef<Value>>) -> Result<(), EvalError> {
let span = number.span;
let mut reference = number.value.borrow_mut();
if let Value::Number(number) = &mut *reference {
*number = Number::add(*number, Number::Integer(1), span).unwrap();
Ok(())
} else {
Err(EvalError::Custom {
text: "Oh nooo".into(),
span,
})
}
}
// For more examples take a look at the standard library.
// Register our functions by creating and inserting our own environment
fn custom_environment() -> Environment {
let mut environment = Environment::default();
// The register macro allows us to easily add functions to the environment.
register!(&mut environment => {
fn time_since_epoch;
fn add;
fn print_debug_info;
fn increment_global_counter;
fn increment_number;
});
environment
}
fn main() {
App::new()
.insert_resource(MyCounter(0))
// Insert our new environment
.insert_non_send_resource(custom_environment())
.add_plugins((
DefaultPlugins.set(LogPlugin {
custom_layer: custom_log_layer,
..default()
}),
DevConsolePlugin,
))
.run();
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
doonv/bevy_dev_console | https://github.com/doonv/bevy_dev_console/blob/98355f16b7d83ad0db983f2204426079780aa2ae/examples/basic.rs | examples/basic.rs | //! A simple example showing how to setup the developer console plugin.
use bevy::log::LogPlugin;
use bevy::prelude::*;
use bevy_dev_console::prelude::*;
fn main() {
App::new()
.add_plugins((
// Add the log plugin with the custom log layer
DefaultPlugins.set(LogPlugin {
custom_layer: custom_log_layer,
// Add a filter to the log plugin that shows all log levels from this example
filter: format!("wgpu=error,naga=warn,{}=trace", module_path!()),
..default()
}),
// Add the dev console plugin itself.
DevConsolePlugin,
))
.add_systems(Startup, test)
.run();
}
fn test() {
trace!("tracing");
debug!("solving issues...");
info!("hello :)");
warn!("spooky warning");
error!("scary error");
}
| rust | Apache-2.0 | 98355f16b7d83ad0db983f2204426079780aa2ae | 2026-01-04T20:24:35.637367Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-build/src/lib.rs | prost-reflect-build/src/lib.rs | //! `prost-reflect-build` contains [`Builder`] to configure [`prost_build::Config`]
//! to derive [`prost_reflect::ReflectMessage`] for all messages in protocol buffers.
//!
//! The simplest way to generate [`prost_reflect::ReflectMessage`] is:
//!
//! ```no_run
//! // build.rs
//! use prost_reflect_build::Builder;
//!
//! Builder::new()
//! .descriptor_pool("crate::DESCRIPTOR_POOL")
//! .compile_protos(&["path/to/protobuf.proto"], &["path/to/include"])
//! .expect("Failed to compile protos");
//! ```
//!
//! Either [`Builder::descriptor_pool`] or [`Builder::file_descriptor_set_bytes`] must be set to an expression giving the implementation access to descriptors.
//! For example when using `descriptor_pool` a static instance of [`DescriptorPool`] must be available:
//!
//! ```ignore
//! static DESCRIPTOR_POOL: Lazy<DescriptorPool> = Lazy::new(|| DescriptorPool::decode(
//! include_bytes!(concat!(env!("OUT_DIR"), "file_descriptor_set.bin")).as_ref()
//! ).unwrap());
//!
//! // `include!` generated code may appear anywhere in the crate.
//! include!(concat!(env!("OUT_DIR"), "protobuf.rs"));
//! ```
#![warn(missing_debug_implementations, missing_docs)]
use std::{
env, fs, io,
path::{Path, PathBuf},
};
use prost_reflect::DescriptorPool;
/// Configuration builder for prost-reflect code generation.
///
/// The simplest way to generate prost APIs deriving [`prost_reflect::ReflectMessage`]:
///
/// ```no_run
/// # use prost_reflect_build::Builder;
/// Builder::new()
/// .compile_protos(&["path/to/protobuf.proto"], &["path/to/include"])
/// .unwrap();
/// ```
#[derive(Debug, Clone)]
pub struct Builder {
file_descriptor_set_path: PathBuf,
descriptor_pool_expr: Option<String>,
file_descriptor_set_bytes_expr: Option<String>,
}
impl Default for Builder {
fn default() -> Self {
let file_descriptor_set_path = env::var_os("OUT_DIR")
.map(PathBuf::from)
.unwrap_or_else(|| PathBuf::from("."))
.join("file_descriptor_set.bin");
Self {
file_descriptor_set_path,
descriptor_pool_expr: None,
file_descriptor_set_bytes_expr: None,
}
}
}
impl Builder {
/// Create a new builder with default parameters.
pub fn new() -> Self {
Self::default()
}
/// Set the path where the encoded file descriptor set is created.
/// By default, it is created at `$OUT_DIR/file_descriptor_set.bin`.
///
/// This overrides the path specified by
/// [`prost_build::Config::file_descriptor_set_path`].
pub fn file_descriptor_set_path<P>(&mut self, path: P) -> &mut Self
where
P: Into<PathBuf>,
{
self.file_descriptor_set_path = path.into();
self
}
/// Set the file descriptor expression for reflection.
///
/// This should resolve to an instance of `DescriptorPool`. For example, if this
/// value is set to `crate::DescriptorPool`, then `lib.rs` should contain the following
///
/// ```ignore
/// static DESCRIPTOR_POOL: Lazy<DescriptorPool> = Lazy::new(||
/// DescriptorPool::decode(include_bytes!(
/// concat!(env!("OUT_DIR"), "/file_descriptor_set.bin")
/// ).as_ref()).unwrap()
/// );
/// ```
pub fn descriptor_pool<P>(&mut self, expr: P) -> &mut Self
where
P: Into<String>,
{
self.descriptor_pool_expr = Some(expr.into());
self
}
/// Set the file descriptor bytes to use for reflection.
///
/// This should typically be the contents of the file at `file_descriptor_set_path`. For example,
/// if this value is set to `crate::FILE_DESCRIPTOR_SET_BYTES`, then `lib.rs` should contain the following
///
/// ```ignore
/// const FILE_DESCRIPTOR_SET_BYTES: &'static [u8] = include_bytes!(concat!(env!("OUT_DIR"), "/file_descriptor_set.bin"));
/// ```
pub fn file_descriptor_set_bytes<P>(&mut self, expr: P) -> &mut Self
where
P: Into<String>,
{
self.file_descriptor_set_bytes_expr = Some(expr.into());
self
}
/// Configure `config` to derive [`prost_reflect::ReflectMessage`] for all messages included in `protos`.
/// This method does not generate prost-reflect compatible code,
/// but `config` may be used later to compile protocol buffers independently of [`Builder`].
/// `protos` and `includes` should be the same when [`prost_build::Config::compile_protos`] is called on `config`.
///
/// ```ignore
/// let mut config = Config::new();
///
/// // Customize config here
///
/// Builder::new()
/// .configure(&mut config, &["path/to/protobuf.proto"], &["path/to/include"])
/// .expect("Failed to configure for reflection");
///
/// // Custom compilation process with `config`
/// config.compile_protos(&["path/to/protobuf.proto"], &["path/to/includes"])
/// .expect("Failed to compile protocol buffers");
/// ```
pub fn configure(
&mut self,
config: &mut prost_build::Config,
protos: &[impl AsRef<Path>],
includes: &[impl AsRef<Path>],
) -> io::Result<()> {
config
.file_descriptor_set_path(&self.file_descriptor_set_path)
.compile_protos(protos, includes)?;
let buf = fs::read(&self.file_descriptor_set_path)?;
let descriptor = DescriptorPool::decode(buf.as_ref()).expect("Invalid file descriptor");
let pool_attribute = if let Some(descriptor_pool) = &self.descriptor_pool_expr {
format!(r#"#[prost_reflect(descriptor_pool = "{descriptor_pool}")]"#,)
} else if let Some(file_descriptor_set_bytes) = &self.file_descriptor_set_bytes_expr {
format!(
r#"#[prost_reflect(file_descriptor_set_bytes = "{file_descriptor_set_bytes}")]"#,
)
} else {
return Err(io::Error::other(
"either 'descriptor_pool' or 'file_descriptor_set_bytes' must be set",
));
};
for message in descriptor.all_messages() {
let full_name = message.full_name();
config
.type_attribute(full_name, "#[derive(::prost_reflect::ReflectMessage)]")
.type_attribute(
full_name,
format!(r#"#[prost_reflect(message_name = "{full_name}")]"#),
)
.type_attribute(full_name, &pool_attribute);
}
Ok(())
}
/// Compile protocol buffers into Rust with given [`prost_build::Config`].
pub fn compile_protos_with_config(
&mut self,
mut config: prost_build::Config,
protos: &[impl AsRef<Path>],
includes: &[impl AsRef<Path>],
) -> io::Result<()> {
self.configure(&mut config, protos, includes)?;
config.skip_protoc_run().compile_protos(protos, includes)
}
/// Compile protocol buffers into Rust.
pub fn compile_protos(
&mut self,
protos: &[impl AsRef<Path>],
includes: &[impl AsRef<Path>],
) -> io::Result<()> {
self.compile_protos_with_config(prost_build::Config::new(), protos, includes)
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_config() {
let mut config = prost_build::Config::new();
let mut builder = Builder::new();
let tmpdir = std::env::temp_dir();
config.out_dir(tmpdir.clone());
builder
.file_descriptor_set_path(tmpdir.join("file_descriptor_set.bin"))
.descriptor_pool("crate::DESCRIPTOR_POOL")
.compile_protos_with_config(config, &["src/test.proto"], &["src"])
.unwrap();
assert!(tmpdir.join("my.test.rs").exists());
let buf = fs::read_to_string(tmpdir.join("my.test.rs")).unwrap();
let num_derive = buf
.lines()
.filter(|line| line.trim_start() == "#[derive(::prost_reflect::ReflectMessage)]")
.count();
assert_eq!(num_derive, 3);
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/lib.rs | prost-reflect/src/lib.rs | #![doc = include_str!("../doc/intro.md")]
#![doc = "# Example - decoding"]
#![doc = include_str!("../doc/decoding.md")]
#![cfg_attr(feature = "serde", doc = "# Example - JSON mapping")]
#![cfg_attr(feature = "serde", doc = include_str!("../doc/json.md"))]
#![cfg_attr(feature = "derive", doc = "# Implementing [`ReflectMessage`]")]
#![cfg_attr(feature = "derive", doc = include_str!("../doc/reflect.md"))]
#![cfg_attr(docsrs, feature(doc_cfg))]
#![warn(missing_debug_implementations, missing_docs)]
#![deny(unsafe_code)]
mod descriptor;
mod dynamic;
mod reflect;
pub use {prost, prost::bytes, prost_types};
pub use self::descriptor::{
Cardinality, DescriptorError, DescriptorPool, EnumDescriptor, EnumValueDescriptor,
ExtensionDescriptor, FieldDescriptor, FileDescriptor, Kind, MessageDescriptor,
MethodDescriptor, OneofDescriptor, ServiceDescriptor, Syntax,
};
pub use self::dynamic::{DynamicMessage, MapKey, SetFieldError, UnknownField, Value};
pub use self::reflect::ReflectMessage;
#[cfg(feature = "serde")]
pub use self::dynamic::{DeserializeOptions, SerializeOptions};
#[cfg(feature = "derive")]
#[cfg_attr(docsrs, doc(cfg(feature = "derive")))]
pub use prost_reflect_derive::ReflectMessage;
#[cfg(feature = "text-format")]
pub use self::dynamic::text_format;
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/unknown.rs | prost-reflect/src/dynamic/unknown.rs | use std::{fmt, slice, vec};
use prost::{
bytes::{Buf, BufMut, Bytes},
encoding::{self, DecodeContext, WireType},
DecodeError, Message,
};
use crate::dynamic::text_format;
/// An unknown field found when deserializing a protobuf message.
///
/// A field is unknown if the message descriptor does not contain a field with the given number. This is often the
/// result of a new field being added to the message definition.
///
/// The [`Message`](prost::Message) implementation of [`DynamicMessage`](crate::DynamicMessage) will preserve any unknown
/// fields.
#[derive(Debug, Clone, PartialEq)]
pub struct UnknownField {
number: u32,
value: UnknownFieldValue,
}
/// The vaalue of an unknown field in a protobuf message.
#[derive(Debug, Clone, PartialEq)]
pub(crate) enum UnknownFieldValue {
/// An unknown field with the `Varint` wire type.
Varint(u64),
/// An unknown field with the `SixtyFourBit` wire type.
SixtyFourBit([u8; 8]),
/// An unknown field with the `LengthDelimited` wire type.
LengthDelimited(Bytes),
/// An unknown field with the group wire type.
Group(UnknownFieldSet),
/// An unknown field with the `ThirtyTwoBit` wire type.
ThirtyTwoBit([u8; 4]),
}
#[derive(Debug, Default, Clone, PartialEq)]
pub(crate) struct UnknownFieldSet {
fields: Vec<UnknownField>,
}
impl UnknownField {
/// The number of this field as found during decoding.
pub fn number(&self) -> u32 {
self.number
}
/// The wire type of this field as found during decoding.
pub fn wire_type(&self) -> WireType {
match &self.value {
UnknownFieldValue::Varint(_) => WireType::Varint,
UnknownFieldValue::SixtyFourBit(_) => WireType::SixtyFourBit,
UnknownFieldValue::LengthDelimited(_) => WireType::LengthDelimited,
UnknownFieldValue::Group(_) => WireType::StartGroup,
UnknownFieldValue::ThirtyTwoBit(_) => WireType::ThirtyTwoBit,
}
}
pub(crate) fn value(&self) -> &UnknownFieldValue {
&self.value
}
/// Encodes this field into its byte representation.
pub fn encode<B>(&self, buf: &mut B)
where
B: BufMut,
{
match &self.value {
UnknownFieldValue::Varint(value) => {
encoding::encode_key(self.number, WireType::Varint, buf);
encoding::encode_varint(*value, buf);
}
UnknownFieldValue::SixtyFourBit(value) => {
encoding::encode_key(self.number, WireType::SixtyFourBit, buf);
buf.put_slice(value);
}
UnknownFieldValue::LengthDelimited(value) => {
encoding::bytes::encode(self.number, value, buf);
}
UnknownFieldValue::Group(value) => {
encoding::group::encode(self.number, value, buf);
}
UnknownFieldValue::ThirtyTwoBit(value) => {
encoding::encode_key(self.number, WireType::ThirtyTwoBit, buf);
buf.put_slice(value);
}
}
}
/// Decodes an unknown field from the given buffer.
///
/// This method will read the field number and wire type from the buffer. Normally, it is useful to know
/// the field number before deciding whether to treat a field as unknown. See [`decode_value`](UnknownField::decode_value)
/// if you have already read the number.
///
/// # Examples
///
/// ```
/// # use prost_reflect::{DescriptorPool, UnknownField};
/// # use prost::encoding::{DecodeContext, WireType};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("google.protobuf.Empty").unwrap();
/// let unknown_field = UnknownField::decode(&mut b"\x1a\x02\x10\x42".as_ref(), DecodeContext::default()).unwrap();
/// assert_eq!(unknown_field.number(), 3);
/// assert_eq!(unknown_field.wire_type(), WireType::LengthDelimited);
/// ```
pub fn decode<B>(buf: &mut B, ctx: DecodeContext) -> Result<Self, DecodeError>
where
B: Buf,
{
let (number, wire_type) = encoding::decode_key(buf)?;
Self::decode_value(number, wire_type, buf, ctx)
}
/// Given a field number and wire type, decodes the value of an unknown field.
///
/// This method assumes the field number and wire type have already been read from the buffer.
/// See also [`decode`](UnknownField::decode).
///
/// # Examples
///
/// ```
/// # use prost_reflect::{DescriptorPool, UnknownField};
/// # use prost::encoding::{DecodeContext, WireType};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("google.protobuf.Empty").unwrap();
/// let unknown_field = UnknownField::decode_value(3, WireType::LengthDelimited, &mut b"\x02\x10\x42".as_ref(), DecodeContext::default()).unwrap();
/// assert_eq!(unknown_field.number(), 3);
/// assert_eq!(unknown_field.wire_type(), WireType::LengthDelimited);
///
/// let mut buf = Vec::new();
/// unknown_field.encode(&mut buf);
/// assert_eq!(buf, b"\x1a\x02\x10\x42");
/// ```
pub fn decode_value<B>(
number: u32,
wire_type: WireType,
buf: &mut B,
ctx: DecodeContext,
) -> Result<Self, DecodeError>
where
B: Buf,
{
let value = match wire_type {
WireType::Varint => {
let value = encoding::decode_varint(buf)?;
UnknownFieldValue::Varint(value)
}
WireType::SixtyFourBit => {
let mut value = 0u64;
encoding::fixed64::merge(wire_type, &mut value, buf, ctx)?;
UnknownFieldValue::SixtyFourBit(value.to_le_bytes())
}
WireType::LengthDelimited => {
let mut value = Bytes::default();
encoding::bytes::merge(wire_type, &mut value, buf, ctx)?;
UnknownFieldValue::LengthDelimited(value)
}
WireType::StartGroup => {
let mut value = UnknownFieldSet::default();
encoding::group::merge(number, wire_type, &mut value, buf, ctx)?;
UnknownFieldValue::Group(value)
}
WireType::EndGroup => {
encoding::check_wire_type(WireType::StartGroup, wire_type)?;
unreachable!()
}
WireType::ThirtyTwoBit => {
let mut value = 0u32;
encoding::fixed32::merge(wire_type, &mut value, buf, ctx)?;
UnknownFieldValue::ThirtyTwoBit(value.to_le_bytes())
}
};
Ok(UnknownField { number, value })
}
/// Gets the length of this field when encoded to its byte representation.
pub fn encoded_len(&self) -> usize {
match &self.value {
UnknownFieldValue::Varint(value) => {
encoding::key_len(self.number) + encoding::encoded_len_varint(*value)
}
UnknownFieldValue::SixtyFourBit(value) => encoding::key_len(self.number) + value.len(),
UnknownFieldValue::LengthDelimited(value) => {
encoding::bytes::encoded_len(self.number, value)
}
UnknownFieldValue::Group(value) => encoding::group::encoded_len(self.number, value),
UnknownFieldValue::ThirtyTwoBit(value) => encoding::key_len(self.number) + value.len(),
}
}
}
impl fmt::Display for UnknownField {
/// Formats this unknown field using the protobuf text format.
///
/// The protobuf format does not include type information, so the formatter will attempt to infer types.
///
/// # Examples
///
/// ```
/// # use prost_reflect::{DescriptorPool, UnknownField};
/// # use prost::encoding::DecodeContext;
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("google.protobuf.Empty").unwrap();
/// let unknown_field = UnknownField::decode(&mut b"\x1a\x02\x10\x42".as_ref(), DecodeContext::default()).unwrap();
/// assert_eq!(format!("{}", unknown_field), "3{2:66}");
/// // The alternate format specifier may be used to indent the output
/// assert_eq!(format!("{:#}", unknown_field), "3 {\n 2: 66\n}");
/// ```
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
text_format::Writer::new(text_format::FormatOptions::new().pretty(f.alternate()), f)
.fmt_unknown_field(self)
}
}
impl UnknownFieldSet {
pub(crate) fn is_empty(&self) -> bool {
self.fields.is_empty()
}
pub(crate) fn iter(&self) -> slice::Iter<'_, UnknownField> {
self.fields.iter()
}
pub(crate) fn into_iter(self) -> vec::IntoIter<UnknownField> {
self.fields.into_iter()
}
pub(crate) fn insert(&mut self, unknown: UnknownField) {
self.fields.push(unknown);
}
}
impl Message for UnknownFieldSet {
fn encode_raw(&self, buf: &mut impl BufMut)
where
Self: Sized,
{
for field in &self.fields {
field.encode(buf)
}
}
fn merge_field(
&mut self,
number: u32,
wire_type: WireType,
buf: &mut impl Buf,
ctx: DecodeContext,
) -> Result<(), DecodeError>
where
Self: Sized,
{
let field = UnknownField::decode_value(number, wire_type, buf, ctx)?;
self.fields.push(field);
Ok(())
}
fn encoded_len(&self) -> usize {
let mut len = 0;
for field in &self.fields {
len += field.encoded_len();
}
len
}
fn clear(&mut self) {
self.fields.clear();
}
}
impl FromIterator<UnknownField> for UnknownFieldSet {
fn from_iter<T>(iter: T) -> Self
where
T: IntoIterator<Item = UnknownField>,
{
UnknownFieldSet {
fields: Vec::from_iter(iter),
}
}
}
#[cfg(test)]
mod tests {
use prost::{
bytes::Bytes,
encoding::{DecodeContext, WireType},
};
use super::{UnknownField, UnknownFieldSet, UnknownFieldValue};
fn assert_roundtrip(expected: &[u8], value: &UnknownField) {
assert_eq!(expected.len(), value.encoded_len());
let mut actual = Vec::with_capacity(expected.len());
value.encode(&mut actual);
assert_eq!(expected, actual.as_slice());
}
#[test]
fn sixty_four_bit() {
let bytes = b"\x09\x9a\x99\x99\x99\x99\x99\xf1\x3ftail";
let mut buf = bytes.as_ref();
let value = UnknownField::decode(&mut buf, DecodeContext::default()).unwrap();
assert_eq!(value.number(), 1);
assert_eq!(value.wire_type(), WireType::SixtyFourBit);
assert_eq!(
value.value(),
&UnknownFieldValue::SixtyFourBit(*b"\x9a\x99\x99\x99\x99\x99\xf1\x3f")
);
assert_eq!(buf, b"tail");
assert_roundtrip(bytes.strip_suffix(buf).unwrap(), &value);
}
#[test]
fn thirty_two_bit() {
let bytes = b"\x15\xcd\xcc\x0c\x40tail";
let mut buf = bytes.as_ref();
let value = UnknownField::decode(&mut buf, DecodeContext::default()).unwrap();
assert_eq!(value.number(), 2);
assert_eq!(value.wire_type(), WireType::ThirtyTwoBit);
assert_eq!(
value.value(),
&UnknownFieldValue::ThirtyTwoBit(*b"\xcd\xcc\x0c\x40")
);
assert_eq!(buf, b"tail");
assert_roundtrip(bytes.strip_suffix(buf).unwrap(), &value);
}
#[test]
fn varint() {
let bytes = b"\x18\x03tail";
let mut buf = bytes.as_ref();
let value = UnknownField::decode(&mut buf, DecodeContext::default()).unwrap();
assert_eq!(value.number(), 3);
assert_eq!(value.wire_type(), WireType::Varint);
assert_eq!(value.value(), &UnknownFieldValue::Varint(3));
assert_eq!(buf, b"tail");
assert_roundtrip(bytes.strip_suffix(buf).unwrap(), &value);
}
#[test]
fn length_delimited() {
let bytes = b"\x7a\x07\x69\xa6\xbe\x6d\xb6\xff\x58tail";
let mut buf = bytes.as_ref();
let value = UnknownField::decode(&mut buf, DecodeContext::default()).unwrap();
assert_eq!(value.number(), 15);
assert_eq!(value.wire_type(), WireType::LengthDelimited);
assert_eq!(
value.value(),
&UnknownFieldValue::LengthDelimited(Bytes::from_static(
b"\x69\xa6\xbe\x6d\xb6\xff\x58"
))
);
assert_eq!(buf, b"tail");
assert_roundtrip(bytes.strip_suffix(buf).unwrap(), &value);
}
#[test]
fn group() {
let bytes = b"\x1b\x0a\x05\x68\x65\x6c\x6c\x6f\x10\x0a\x10\x0b\x1ctail";
let mut buf = bytes.as_ref();
let value = UnknownField::decode(&mut buf, DecodeContext::default()).unwrap();
assert_eq!(value.number(), 3);
assert_eq!(value.wire_type(), WireType::StartGroup);
assert_eq!(
value.value(),
&UnknownFieldValue::Group(UnknownFieldSet::from_iter([
UnknownField {
number: 1,
value: UnknownFieldValue::LengthDelimited(Bytes::from_static(b"hello"))
},
UnknownField {
number: 2,
value: UnknownFieldValue::Varint(10)
},
UnknownField {
number: 2,
value: UnknownFieldValue::Varint(11)
},
]))
);
assert_eq!(buf, b"tail");
assert_roundtrip(bytes.strip_suffix(buf).unwrap(), &value);
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/fields.rs | prost-reflect/src/dynamic/fields.rs | use std::{
borrow::Cow,
collections::btree_map::{self, BTreeMap},
fmt,
mem::replace,
};
use crate::{
ExtensionDescriptor, FieldDescriptor, Kind, MessageDescriptor, OneofDescriptor, Value,
};
use super::{
unknown::{UnknownField, UnknownFieldSet},
Either,
};
pub(crate) trait FieldDescriptorLike: fmt::Debug {
#[cfg(feature = "text-format")]
fn text_name(&self) -> &str;
fn number(&self) -> u32;
fn default_value(&self) -> Value;
fn is_default_value(&self, value: &Value) -> bool;
fn is_valid(&self, value: &Value) -> bool;
fn containing_oneof(&self) -> Option<OneofDescriptor>;
fn supports_presence(&self) -> bool;
fn kind(&self) -> Kind;
fn is_group(&self) -> bool;
fn is_list(&self) -> bool;
fn is_map(&self) -> bool;
fn is_packed(&self) -> bool;
fn is_packable(&self) -> bool;
fn has(&self, value: &Value) -> bool {
self.supports_presence() || !self.is_default_value(value)
}
}
/// A set of extension fields in a protobuf message.
#[derive(Default, Debug, Clone, PartialEq)]
pub(super) struct DynamicMessageFieldSet {
fields: BTreeMap<u32, ValueOrUnknown>,
}
#[derive(Debug, Clone, PartialEq)]
pub(super) enum ValueOrUnknown {
/// Used to implement draining iterators.
Taken,
/// A protobuf value with known field type.
Value(Value),
/// One or more unknown fields.
Unknown(UnknownFieldSet),
}
pub(super) enum ValueAndDescriptor<'a> {
Field(Cow<'a, Value>, FieldDescriptor),
Extension(Cow<'a, Value>, ExtensionDescriptor),
Unknown(&'a UnknownFieldSet),
}
impl DynamicMessageFieldSet {
fn get_value(&self, number: u32) -> Option<&Value> {
match self.fields.get(&number) {
Some(ValueOrUnknown::Value(value)) => Some(value),
Some(ValueOrUnknown::Unknown(_) | ValueOrUnknown::Taken) | None => None,
}
}
pub(super) fn has(&self, desc: &impl FieldDescriptorLike) -> bool {
self.get_value(desc.number())
.map(|value| desc.has(value))
.unwrap_or(false)
}
pub(super) fn get(&self, desc: &impl FieldDescriptorLike) -> Cow<'_, Value> {
match self.get_value(desc.number()) {
Some(value) => Cow::Borrowed(value),
None => Cow::Owned(desc.default_value()),
}
}
pub(super) fn get_mut(&mut self, desc: &impl FieldDescriptorLike) -> &mut Value {
self.clear_oneof_fields(desc);
match self.fields.entry(desc.number()) {
btree_map::Entry::Occupied(entry) => match entry.into_mut() {
ValueOrUnknown::Value(value) => value,
value => {
*value = ValueOrUnknown::Value(desc.default_value());
value.unwrap_value_mut()
}
},
btree_map::Entry::Vacant(entry) => entry
.insert(ValueOrUnknown::Value(desc.default_value()))
.unwrap_value_mut(),
}
}
pub(super) fn set(&mut self, desc: &impl FieldDescriptorLike, value: Value) {
debug_assert!(
desc.is_valid(&value),
"invalid value {value:?} for field {desc:?}",
);
self.clear_oneof_fields(desc);
self.fields
.insert(desc.number(), ValueOrUnknown::Value(value));
}
fn clear_oneof_fields(&mut self, desc: &impl FieldDescriptorLike) {
if let Some(oneof_desc) = desc.containing_oneof() {
for oneof_field in oneof_desc.fields() {
if oneof_field.number() != desc.number() {
self.clear(&oneof_field);
}
}
}
}
pub(crate) fn add_unknown(&mut self, number: u32, unknown: UnknownField) {
match self.fields.entry(number) {
btree_map::Entry::Occupied(mut entry) => match entry.get_mut() {
ValueOrUnknown::Value(_) => {
panic!("expected no field to be found with number {number}")
}
value @ ValueOrUnknown::Taken => {
*value = ValueOrUnknown::Unknown(UnknownFieldSet::from_iter([unknown]))
}
ValueOrUnknown::Unknown(unknowns) => unknowns.insert(unknown),
},
btree_map::Entry::Vacant(entry) => {
entry.insert(ValueOrUnknown::Unknown(UnknownFieldSet::from_iter([
unknown,
])));
}
}
}
pub(super) fn clear(&mut self, desc: &impl FieldDescriptorLike) {
self.fields.remove(&desc.number());
}
pub(crate) fn take(&mut self, desc: &impl FieldDescriptorLike) -> Option<Value> {
match self.fields.remove(&desc.number()) {
Some(ValueOrUnknown::Value(value)) if desc.has(&value) => Some(value),
_ => None,
}
}
/// Iterates over the fields in the message.
///
/// If `include_default` is `true`, fields with their default value will be included.
/// If `index_order` is `true`, fields will be iterated in the order they were defined in the source code. Otherwise, they will be iterated in field number order.
pub(crate) fn iter<'a>(
&'a self,
message: &'a MessageDescriptor,
include_default: bool,
index_order: bool,
) -> impl Iterator<Item = ValueAndDescriptor<'a>> + 'a {
let field_descriptors = if index_order {
Either::Left(message.fields_in_index_order())
} else {
Either::Right(message.fields())
};
let fields = field_descriptors
.filter(move |f| {
if include_default {
!f.supports_presence() || self.has(f)
} else {
self.has(f)
}
})
.map(|f| ValueAndDescriptor::Field(self.get(&f), f));
let extensions_unknowns =
self.fields
.iter()
.filter_map(move |(&number, value)| match value {
ValueOrUnknown::Value(value) => {
if let Some(extension) = message.get_extension(number) {
if extension.has(value) {
Some(ValueAndDescriptor::Extension(
Cow::Borrowed(value),
extension,
))
} else {
None
}
} else {
None
}
}
ValueOrUnknown::Unknown(unknown) => Some(ValueAndDescriptor::Unknown(unknown)),
ValueOrUnknown::Taken => None,
});
fields.chain(extensions_unknowns)
}
pub(crate) fn iter_fields<'a>(
&'a self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (FieldDescriptor, &'a Value)> + 'a {
self.fields.iter().filter_map(move |(&number, value)| {
let value = match value {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_field(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((field, value))
} else {
None
}
})
}
pub(crate) fn iter_extensions<'a>(
&'a self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (ExtensionDescriptor, &'a Value)> + 'a {
self.fields.iter().filter_map(move |(&number, value)| {
let value = match value {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_extension(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((field, value))
} else {
None
}
})
}
pub(super) fn iter_unknown(&self) -> impl Iterator<Item = &'_ UnknownField> {
self.fields.values().flat_map(move |value| match value {
ValueOrUnknown::Taken | ValueOrUnknown::Value(_) => [].iter(),
ValueOrUnknown::Unknown(unknowns) => unknowns.iter(),
})
}
pub(crate) fn iter_fields_mut<'a>(
&'a mut self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (FieldDescriptor, &'a mut Value)> + 'a {
self.fields.iter_mut().filter_map(move |(&number, value)| {
let value = match value {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_field(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((field, value))
} else {
None
}
})
}
pub(crate) fn iter_extensions_mut<'a>(
&'a mut self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (ExtensionDescriptor, &'a mut Value)> + 'a {
self.fields.iter_mut().filter_map(move |(&number, value)| {
let value = match value {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_extension(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((field, value))
} else {
None
}
})
}
pub(crate) fn take_fields<'a>(
&'a mut self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (FieldDescriptor, Value)> + 'a {
self.fields
.iter_mut()
.filter_map(move |(&number, value_or_unknown)| {
let value = match value_or_unknown {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_field(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((
field,
replace(value_or_unknown, ValueOrUnknown::Taken).unwrap_value(),
))
} else {
None
}
})
}
pub(crate) fn take_extensions<'a>(
&'a mut self,
message: &'a MessageDescriptor,
) -> impl Iterator<Item = (ExtensionDescriptor, Value)> + 'a {
self.fields
.iter_mut()
.filter_map(move |(&number, value_or_unknown)| {
let value = match value_or_unknown {
ValueOrUnknown::Value(value) => value,
_ => return None,
};
let field = match message.get_extension(number) {
Some(field) => field,
_ => return None,
};
if field.has(value) {
Some((
field,
replace(value_or_unknown, ValueOrUnknown::Taken).unwrap_value(),
))
} else {
None
}
})
}
pub(crate) fn take_unknown(&mut self) -> impl Iterator<Item = UnknownField> + '_ {
self.fields
.values_mut()
.flat_map(move |value_or_unknown| match value_or_unknown {
ValueOrUnknown::Unknown(_) => replace(value_or_unknown, ValueOrUnknown::Taken)
.unwrap_unknown()
.into_iter(),
_ => vec![].into_iter(),
})
}
pub(super) fn clear_all(&mut self) {
self.fields.clear();
}
}
impl ValueOrUnknown {
fn unwrap_value_mut(&mut self) -> &mut Value {
match self {
ValueOrUnknown::Value(value) => value,
ValueOrUnknown::Unknown(_) | ValueOrUnknown::Taken => unreachable!(),
}
}
fn unwrap_value(self) -> Value {
match self {
ValueOrUnknown::Value(value) => value,
ValueOrUnknown::Unknown(_) | ValueOrUnknown::Taken => unreachable!(),
}
}
fn unwrap_unknown(self) -> UnknownFieldSet {
match self {
ValueOrUnknown::Unknown(unknowns) => unknowns,
ValueOrUnknown::Value(_) | ValueOrUnknown::Taken => unreachable!(),
}
}
}
impl FieldDescriptorLike for FieldDescriptor {
#[cfg(feature = "text-format")]
fn text_name(&self) -> &str {
self.name()
}
fn number(&self) -> u32 {
self.number()
}
fn default_value(&self) -> Value {
Value::default_value_for_field(self)
}
fn is_default_value(&self, value: &Value) -> bool {
value.is_default_for_field(self)
}
fn is_valid(&self, value: &Value) -> bool {
value.is_valid_for_field(self)
}
fn containing_oneof(&self) -> Option<OneofDescriptor> {
self.containing_oneof()
}
fn supports_presence(&self) -> bool {
self.supports_presence()
}
fn kind(&self) -> Kind {
self.kind()
}
fn is_group(&self) -> bool {
self.is_group()
}
fn is_list(&self) -> bool {
self.is_list()
}
fn is_map(&self) -> bool {
self.is_map()
}
fn is_packed(&self) -> bool {
self.is_packed()
}
fn is_packable(&self) -> bool {
self.is_packable()
}
}
impl FieldDescriptorLike for ExtensionDescriptor {
#[cfg(feature = "text-format")]
fn text_name(&self) -> &str {
self.json_name()
}
fn number(&self) -> u32 {
self.number()
}
fn default_value(&self) -> Value {
Value::default_value_for_extension(self)
}
fn is_default_value(&self, value: &Value) -> bool {
value.is_default_for_extension(self)
}
fn is_valid(&self, value: &Value) -> bool {
value.is_valid_for_extension(self)
}
fn containing_oneof(&self) -> Option<OneofDescriptor> {
None
}
fn supports_presence(&self) -> bool {
self.supports_presence()
}
fn kind(&self) -> Kind {
self.kind()
}
fn is_group(&self) -> bool {
self.is_group()
}
fn is_list(&self) -> bool {
self.is_list()
}
fn is_map(&self) -> bool {
self.is_map()
}
fn is_packed(&self) -> bool {
self.is_packed()
}
fn is_packable(&self) -> bool {
self.is_packable()
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/mod.rs | prost-reflect/src/dynamic/mod.rs | /// Parsing and formatting for the protobuf [text format](https://developers.google.com/protocol-buffers/docs/text-format-spec).
///
/// This module contains options for customizing the text format output. See the associated functions [`DynamicMessage::parse_text_format()`] and
/// [`DynamicMessage::to_text_format()`].
#[cfg(feature = "text-format")]
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub mod text_format;
mod fields;
mod message;
#[cfg(feature = "serde")]
mod serde;
#[cfg(not(feature = "text-format"))]
mod text_format;
mod unknown;
use std::{borrow::Cow, collections::HashMap, error::Error, fmt};
#[cfg(feature = "serde")]
pub use self::serde::{DeserializeOptions, SerializeOptions};
pub use self::unknown::UnknownField;
pub(crate) use self::fields::FieldDescriptorLike;
use prost::{
bytes::{Buf, Bytes},
DecodeError, Message,
};
use self::fields::DynamicMessageFieldSet;
use crate::{
descriptor::Kind, ExtensionDescriptor, FieldDescriptor, MessageDescriptor, ReflectMessage,
};
/// [`DynamicMessage`] provides encoding, decoding and reflection of a protobuf message.
///
/// It wraps a [`MessageDescriptor`] and the [`Value`] for each field of the message, and implements
/// [`Message`][`prost::Message`].
#[derive(Debug, Clone, PartialEq)]
pub struct DynamicMessage {
desc: MessageDescriptor,
fields: DynamicMessageFieldSet,
}
/// A dynamically-typed protobuf value.
///
/// Note this type may map to multiple possible protobuf wire formats, so it must be
/// serialized as part of a DynamicMessage.
#[derive(Debug, Clone, PartialEq)]
pub enum Value {
/// A boolean value, encoded as the `bool` protobuf type.
Bool(bool),
/// A 32-bit signed integer, encoded as one of the `int32`, `sint32` or `sfixed32` protobuf types.
I32(i32),
/// A 64-bit signed integer, encoded as one of the `int64`, `sint64` or `sfixed64` protobuf types.
I64(i64),
/// A 32-bit unsigned integer, encoded as one of the `uint32` or `ufixed32` protobuf types.
U32(u32),
/// A 64-bit unsigned integer, encoded as one of the `uint64` or `ufixed64` protobuf types.
U64(u64),
/// A 32-bit floating point number, encoded as the `float` protobuf type.
F32(f32),
/// A 64-bit floating point number, encoded as the `double` protobuf type.
F64(f64),
/// A string, encoded as the `string` protobuf type.
String(String),
/// A byte string, encoded as the `bytes` protobuf type.
Bytes(Bytes),
/// An enumeration value, encoded as a protobuf enum.
EnumNumber(i32),
/// A protobuf message.
Message(DynamicMessage),
/// A list of values, encoded as a protobuf repeated field.
List(Vec<Value>),
/// A map of values, encoded as a protobuf map field.
Map(HashMap<MapKey, Value>),
}
/// A dynamically-typed key for a protobuf map.
#[derive(Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum MapKey {
/// A boolean value, encoded as the `bool` protobuf type.
Bool(bool),
/// A 32-bit signed integer, encoded as one of the `int32`, `sint32` or `sfixed32` protobuf types.
I32(i32),
/// A 64-bit signed integer, encoded as one of the `int64`, `sint64` or `sfixed64` protobuf types.
I64(i64),
/// A 32-bit unsigned integer, encoded as one of the `uint32` or `ufixed32` protobuf types.
U32(u32),
/// A 64-bit unsigned integer, encoded as one of the `uint64` or `ufixed64` protobuf types.
U64(u64),
/// A string, encoded as the `string` protobuf type.
String(String),
}
/// Error type returned by [`DynamicMessage::try_set_field()`].
#[derive(Debug, Clone, PartialEq)]
pub enum SetFieldError {
/// The field was not found.
NotFound,
/// The value type was not compatible with the field type (see [`Value::is_valid_for_field`]).
InvalidType {
/// The descriptor for the field which could not be set.
field: FieldDescriptor,
/// The invalid value.
value: Value,
},
}
impl DynamicMessage {
/// Creates a new, empty instance of [`DynamicMessage`] for the message type specified by the [`MessageDescriptor`].
pub fn new(desc: MessageDescriptor) -> Self {
DynamicMessage {
fields: DynamicMessageFieldSet::default(),
desc,
}
}
/// Decodes an instance of the message type specified by the [`MessageDescriptor`] from the buffer and merges it into a
/// new instance of [`DynamicMessage`].
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::decode(message_descriptor, b"\x08\x96\x01".as_ref()).unwrap();
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
pub fn decode<B>(desc: MessageDescriptor, buf: B) -> Result<Self, DecodeError>
where
B: Buf,
{
let mut message = DynamicMessage::new(desc);
message.merge(buf)?;
Ok(message)
}
/// Returns `true` if this message has the given field set.
///
/// If the field type supports distinguishing whether a value has been set (see [`supports_presence`][FieldDescriptor::supports_presence]),
/// such as for messages, then this method returns `true` only if a value has been set. For
/// other types, such as integers, it returns `true` if the value is set to a non-default value.
///
/// If this method returns `false`, then the field will not be included in the encoded bytes
/// of this message.
///
/// # Examples
///
/// This example uses the following message definition:
///
/// ```lang-protobuf
/// message MyMessage {
/// int32 foo = 1;
///
/// oneof optional {
/// int32 bar = 2;
/// }
/// }
/// ```
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let foo = message_descriptor.get_field_by_name("foo").unwrap();
/// let bar = message_descriptor.get_field_by_name("bar").unwrap();
///
/// assert!(!foo.supports_presence());
/// assert!(bar.supports_presence());
///
/// let mut dynamic_message = DynamicMessage::new(message_descriptor);
/// assert!(!dynamic_message.has_field(&foo));
/// assert!(!dynamic_message.has_field(&bar));
///
/// dynamic_message.set_field(&foo, Value::I32(0));
/// dynamic_message.set_field(&bar, Value::I32(0));
/// assert!(!dynamic_message.has_field(&foo));
/// assert!(dynamic_message.has_field(&bar));
///
/// dynamic_message.set_field(&foo, Value::I32(5));
/// dynamic_message.set_field(&bar, Value::I32(6));
/// assert!(dynamic_message.has_field(&foo));
/// assert!(dynamic_message.has_field(&bar));
/// ```
pub fn has_field(&self, field_desc: &FieldDescriptor) -> bool {
self.fields.has(field_desc)
}
/// Gets the value of the given field, or the default value if it is unset.
pub fn get_field(&self, field_desc: &FieldDescriptor) -> Cow<'_, Value> {
self.fields.get(field_desc)
}
/// Gets a mutable reference to the value ofthe given field. If the field is not set,
/// it is inserted with its default value.
pub fn get_field_mut(&mut self, field_desc: &FieldDescriptor) -> &mut Value {
self.fields.get_mut(field_desc)
}
/// Sets the value of the given field.
///
/// # Panics
///
/// This method may panic if the value type is not compatible with the field type, as defined
/// by [`Value::is_valid_for_field`]. Consider using [`try_set_field()`](DynamicMessage::try_set_field)
/// for a non-panicking version.
pub fn set_field(&mut self, field_desc: &FieldDescriptor, value: Value) {
self.try_set_field(field_desc, value).unwrap()
}
/// Tries to set the value of the given field, returning an error if the value is an invalid type.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, SetFieldError};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let mut dynamic_message = DynamicMessage::new(message_descriptor.clone());
/// let foo = message_descriptor.get_field_by_name("foo").unwrap();
/// assert_eq!(dynamic_message.try_set_field(&foo, Value::I32(5)), Ok(()));
/// assert_eq!(dynamic_message.try_set_field(&foo, Value::String("bar".to_owned())), Err(SetFieldError::InvalidType {
/// field: foo,
/// value: Value::String("bar".to_owned()),
/// }));
/// ```
pub fn try_set_field(
&mut self,
field_desc: &FieldDescriptor,
value: Value,
) -> Result<(), SetFieldError> {
if value.is_valid_for_field(field_desc) {
self.fields.set(field_desc, value);
Ok(())
} else {
Err(SetFieldError::InvalidType {
field: field_desc.clone(),
value,
})
}
}
/// Clears the given field.
///
/// After calling this method, `has_field` will return false for the field,
/// and it will not be included in the encoded bytes of this message.
pub fn clear_field(&mut self, field_desc: &FieldDescriptor) {
self.fields.clear(field_desc);
}
/// Returns `true` if this message has a field set with the given number.
///
/// See [`has_field`][Self::has_field] for more details.
pub fn has_field_by_number(&self, number: u32) -> bool {
self.desc
.get_field(number)
.is_some_and(|field_desc| self.has_field(&field_desc))
}
/// Gets the value of the field with the given number, or the default value if it is unset.
///
/// If the message has no field with the given number, `None` is returned.
///
/// See [`get_field`][Self::get_field] for more details.
pub fn get_field_by_number(&self, number: u32) -> Option<Cow<'_, Value>> {
self.desc
.get_field(number)
.map(|field_desc| self.get_field(&field_desc))
}
/// Gets a mutable reference to the value of the field with the given number. If the field
/// is not set, it is inserted with its default value.
///
/// If the message has no field with the given number, `None` is returned.
///
/// See [`get_field_mut`][Self::get_field_mut] for more details.
pub fn get_field_by_number_mut(&mut self, number: u32) -> Option<&mut Value> {
self.desc
.get_field(number)
.map(move |field_desc| self.get_field_mut(&field_desc))
}
/// Sets the value of the field with number `number`.
///
/// If no field with the given number exists, this method does nothing.
///
/// See [`set_field`][Self::set_field] for more details.
pub fn set_field_by_number(&mut self, number: u32, value: Value) {
if let Some(field_desc) = self.desc.get_field(number) {
self.set_field(&field_desc, value)
}
}
/// Tries to set the value of the field with number `number`, returning an error if the value is an invalid type or does not exist.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, SetFieldError};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let mut dynamic_message = DynamicMessage::new(message_descriptor.clone());
/// assert_eq!(dynamic_message.try_set_field_by_number(1, Value::I32(5)), Ok(()));
/// assert_eq!(dynamic_message.try_set_field_by_number(1, Value::String("bar".to_owned())), Err(SetFieldError::InvalidType {
/// field: message_descriptor.get_field(1).unwrap(),
/// value: Value::String("bar".to_owned()),
/// }));
/// assert_eq!(dynamic_message.try_set_field_by_number(42, Value::I32(5)), Err(SetFieldError::NotFound));
/// ```
pub fn try_set_field_by_number(
&mut self,
number: u32,
value: Value,
) -> Result<(), SetFieldError> {
if let Some(field_desc) = self.desc.get_field(number) {
self.try_set_field(&field_desc, value)
} else {
Err(SetFieldError::NotFound)
}
}
/// Clears the field with the given number.
///
/// If no field with the given number exists, this method does nothing.
///
/// See [`clear_field`][Self::clear_field] for more details.
pub fn clear_field_by_number(&mut self, number: u32) {
if let Some(field_desc) = self.desc.get_field(number) {
self.clear_field(&field_desc);
}
}
/// Returns `true` if this message has a field set with the given name.
///
/// See [`has_field`][Self::has_field] for more details.
pub fn has_field_by_name(&self, name: &str) -> bool {
self.desc
.get_field_by_name(name)
.is_some_and(|field_desc| self.has_field(&field_desc))
}
/// Gets the value of the field with the given name, or the default value if it is unset.
///
/// If the message has no field with the given name, `None` is returned.
///
/// See [`get_field`][Self::get_field] for more details.
pub fn get_field_by_name(&self, name: &str) -> Option<Cow<'_, Value>> {
self.desc
.get_field_by_name(name)
.map(|field_desc| self.get_field(&field_desc))
}
/// Gets a mutable reference to the value of the field with the given name. If the field
/// is not set, it is inserted with its default value.
///
/// If the message has no field with the given name, `None` is returned.
///
/// See [`get_field_mut`][Self::get_field_mut] for more details.
pub fn get_field_by_name_mut(&mut self, name: &str) -> Option<&mut Value> {
self.desc
.get_field_by_name(name)
.map(move |field_desc| self.get_field_mut(&field_desc))
}
/// Sets the value of the field with name `name`.
///
/// If no field with the given name exists, this method does nothing.
///
/// See [`set_field`][Self::set_field] for more details.
pub fn set_field_by_name(&mut self, name: &str, value: Value) {
if let Some(field_desc) = self.desc.get_field_by_name(name) {
self.set_field(&field_desc, value)
}
}
/// Tries to set the value of the field with name `name`, returning an error if the value is an invalid type or does not exist.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, SetFieldError};
/// # let pool = DescriptorPool::decode(include_bytes!("../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let mut dynamic_message = DynamicMessage::new(message_descriptor.clone());
/// assert_eq!(dynamic_message.try_set_field_by_name("foo", Value::I32(5)), Ok(()));
/// assert_eq!(dynamic_message.try_set_field_by_name("foo", Value::String("bar".to_owned())), Err(SetFieldError::InvalidType {
/// field: message_descriptor.get_field_by_name("foo").unwrap(),
/// value: Value::String("bar".to_owned()),
/// }));
/// assert_eq!(dynamic_message.try_set_field_by_name("notfound", Value::I32(5)), Err(SetFieldError::NotFound));
/// ```
pub fn try_set_field_by_name(&mut self, name: &str, value: Value) -> Result<(), SetFieldError> {
if let Some(field_desc) = self.desc.get_field_by_name(name) {
self.try_set_field(&field_desc, value)
} else {
Err(SetFieldError::NotFound)
}
}
/// Clears the field with the given name.
///
/// If no field with the given name exists, this method does nothing.
///
/// See [`clear_field`][Self::clear_field] for more details.
pub fn clear_field_by_name(&mut self, name: &str) {
if let Some(field_desc) = self.desc.get_field_by_name(name) {
self.clear_field(&field_desc);
}
}
/// Clears the value for the given field, and returns it.
///
/// Returns the value if [`has_field`](Self::has_field) was `true`, or `None` otherwise.
pub fn take_field(&mut self, field_desc: &FieldDescriptor) -> Option<Value> {
self.fields.take(field_desc)
}
/// Clears the value for the field with the given name, and returns it.
///
/// Returns the value if [`has_field_by_name`](Self::has_field_by_name) was `true`, or `None` otherwise.
pub fn take_field_by_name(&mut self, name: &str) -> Option<Value> {
if let Some(field_desc) = self.desc.get_field_by_name(name) {
self.fields.take(&field_desc)
} else {
None
}
}
/// Clears the value for the field with the given number, and returns it.
///
/// Returns the value if [`has_field_by_number`](Self::has_field_by_number) was `true`, or `None` otherwise.
pub fn take_field_by_number(&mut self, number: u32) -> Option<Value> {
if let Some(field_desc) = self.desc.get_field(number) {
self.fields.take(&field_desc)
} else {
None
}
}
/// Returns `true` if this message has the given extension field set.
///
/// See [`has_field`][Self::has_field] for more details.
pub fn has_extension(&self, extension_desc: &ExtensionDescriptor) -> bool {
self.fields.has(extension_desc)
}
/// Gets the value of the given extension field, or the default value if it is unset.
///
/// See [`get_field`][Self::get_field] for more details.
pub fn get_extension(&self, extension_desc: &ExtensionDescriptor) -> Cow<'_, Value> {
self.fields.get(extension_desc)
}
/// Gets a mutable reference to the value of the given extension field. If the
/// field is not set, it is inserted with its default value.
///
/// See [`get_field_mut`][Self::get_field_mut] for more details.
pub fn get_extension_mut(&mut self, extension_desc: &ExtensionDescriptor) -> &mut Value {
self.fields.get_mut(extension_desc)
}
/// Sets the value of the given extension field.
///
/// See [`set_field`][Self::set_field] for more details.
pub fn set_extension(&mut self, extension_desc: &ExtensionDescriptor, value: Value) {
self.fields.set(extension_desc, value)
}
/// Clears the given extension field.
///
/// See [`clear_field`][Self::clear_field] for more details.
pub fn clear_extension(&mut self, extension_desc: &ExtensionDescriptor) {
self.fields.clear(extension_desc)
}
/// Clears the value for the given extension field, and returns it.
///
/// Returns the value if [`has_extension`](Self::has_extension) was `true`, or `None` otherwise.
pub fn take_extension(&mut self, extension_desc: &ExtensionDescriptor) -> Option<Value> {
self.fields.take(extension_desc)
}
/// Gets an iterator over all fields of this message.
///
/// The iterator will yield all fields for which [`has_field`](Self::has_field) returns `true`.
pub fn fields(&self) -> impl Iterator<Item = (FieldDescriptor, &'_ Value)> {
self.fields.iter_fields(&self.desc)
}
/// Gets an iterator returning mutable references to all fields of this message.
///
/// The iterator will yield all fields for which [`has_field`](Self::has_field) returns `true`.
pub fn fields_mut(&mut self) -> impl Iterator<Item = (FieldDescriptor, &'_ mut Value)> {
self.fields.iter_fields_mut(&self.desc)
}
/// Clears all fields from the message and returns an iterator yielding the values.
///
/// The iterator will yield all fields for which [`has_field`](Self::has_field) returns `true`.
///
/// If the iterator is dropped before completing the iteration, it is unspecified how many fields are removed.
pub fn take_fields(&mut self) -> impl Iterator<Item = (FieldDescriptor, Value)> + '_ {
self.fields.take_fields(&self.desc)
}
/// Gets an iterator over all extension fields of this message.
///
/// The iterator will yield all extension fields for which [`has_extension`](Self::has_extension) returns `true`.
pub fn extensions(&self) -> impl Iterator<Item = (ExtensionDescriptor, &'_ Value)> {
self.fields.iter_extensions(&self.desc)
}
/// Gets an iterator returning mutable references to all extension fields of this message.
///
/// The iterator will yield all extension fields for which [`has_extension`](Self::has_extension) returns `true`.
pub fn extensions_mut(&mut self) -> impl Iterator<Item = (ExtensionDescriptor, &'_ mut Value)> {
self.fields.iter_extensions_mut(&self.desc)
}
/// Clears all extension fields from the message and returns an iterator yielding the values.
///
/// The iterator will yield all extension fields for which [`has_extension`](Self::has_extension) returns `true`.
///
/// If the iterator is dropped before completing the iteration, it is unspecified how many fields are removed.
pub fn take_extensions(&mut self) -> impl Iterator<Item = (ExtensionDescriptor, Value)> + '_ {
self.fields.take_extensions(&self.desc)
}
/// Gets an iterator over unknown fields for this message.
///
/// A field is unknown if the message descriptor does not contain a field with the given number. This is often the
/// result of a new field being added to the message definition.
///
/// Unknown fields are preserved when decoding and re-encoding a message.
pub fn unknown_fields(&self) -> impl Iterator<Item = &'_ UnknownField> {
self.fields.iter_unknown()
}
/// Clears all unknown fields from the message and returns an iterator yielding the values.
///
/// If the iterator is dropped before completing the iteration, it is unspecified how many fields are removed.
pub fn take_unknown_fields(&mut self) -> impl Iterator<Item = UnknownField> + '_ {
self.fields.take_unknown()
}
/// Merge a strongly-typed message into this one.
///
/// The message should be compatible with the type specified by
/// [`descriptor`][Self::descriptor], or the merge will likely fail with
/// a [`DecodeError`].
pub fn transcode_from<T>(&mut self, value: &T) -> Result<(), DecodeError>
where
T: Message,
{
let buf = value.encode_to_vec();
self.merge(buf.as_slice())
}
/// Convert this dynamic message into a strongly typed value.
///
/// The message should be compatible with the type specified by
/// [`descriptor`][Self::descriptor], or the conversion will likely fail with
/// a [`DecodeError`].
pub fn transcode_to<T>(&self) -> Result<T, DecodeError>
where
T: Message + Default,
{
let buf = self.encode_to_vec();
T::decode(buf.as_slice())
}
}
impl ReflectMessage for DynamicMessage {
fn descriptor(&self) -> MessageDescriptor {
self.desc.clone()
}
fn transcode_to_dynamic(&self) -> DynamicMessage
where
Self: Sized,
{
self.clone()
}
}
impl Value {
/// Returns the default value for the given protobuf field.
///
/// See [FieldDescriptor::default_value] for more details.
pub fn default_value_for_field(field_desc: &FieldDescriptor) -> Self {
field_desc.default_value()
}
/// Returns the default value for the given protobuf extension field.
///
/// See [ExtensionDescriptor::default_value] for more details.
pub fn default_value_for_extension(extension_desc: &ExtensionDescriptor) -> Self {
extension_desc.default_value()
}
/// Returns the default value for the given protobuf type `kind`.
///
/// See [Kind::default_value] for more details.
pub fn default_value(kind: &Kind) -> Self {
kind.default_value()
}
/// Returns `true` if this is the default value for the given protobuf field.
pub fn is_default_for_field(&self, field_desc: &FieldDescriptor) -> bool {
*self == Value::default_value_for_field(field_desc)
}
/// Returns `true` if this is the default value for the given protobuf extension field.
pub fn is_default_for_extension(&self, extension_desc: &ExtensionDescriptor) -> bool {
*self == Value::default_value_for_extension(extension_desc)
}
/// Returns `true` if this is the default value for the given protobuf type `kind`.
pub fn is_default(&self, kind: &Kind) -> bool {
*self == kind.default_value()
}
/// Returns `true` if this value can be set for a given field.
///
/// Note this only checks if the value can be successfully encoded. It doesn't
/// check, for example, that enum values are in the defined range.
pub fn is_valid_for_field(&self, field_desc: &FieldDescriptor) -> bool {
match (self, field_desc.kind()) {
(Value::List(list), kind) if field_desc.is_list() => {
list.iter().all(|value| value.is_valid(&kind))
}
(Value::Map(map), Kind::Message(message_desc)) if field_desc.is_map() => {
let key_desc = message_desc.map_entry_key_field().kind();
let value_desc = message_desc.map_entry_value_field();
map.iter().all(|(key, value)| {
key.is_valid(&key_desc) && value.is_valid_for_field(&value_desc)
})
}
(value, kind) => value.is_valid(&kind),
}
}
/// Returns `true` if this value can be set for a given extension field.
///
/// See [`is_valid_for_field`][Value::is_valid_for_field] for more details.
pub fn is_valid_for_extension(&self, extension_desc: &ExtensionDescriptor) -> bool {
match (self, extension_desc.kind()) {
(Value::List(list), kind) if extension_desc.is_list() => {
list.iter().all(|value| value.is_valid(&kind))
}
(Value::Map(map), Kind::Message(message_desc)) if extension_desc.is_map() => {
let key_desc = message_desc.map_entry_key_field().kind();
let value_desc = message_desc.map_entry_value_field();
map.iter().all(|(key, value)| {
key.is_valid(&key_desc) && value.is_valid_for_field(&value_desc)
})
}
(value, kind) => value.is_valid(&kind),
}
}
/// Returns `true` if this value can be encoded as the given [`Kind`].
///
/// Unlike [`is_valid_for_field`](Value::is_valid_for_field), this method does not
/// look at field cardinality, so it will never return `true` for lists or maps.
pub fn is_valid(&self, kind: &Kind) -> bool {
matches!(
(self, kind),
(Value::Bool(_), Kind::Bool)
| (Value::I32(_), Kind::Int32 | Kind::Sint32 | Kind::Sfixed32)
| (Value::I64(_), Kind::Int64 | Kind::Sint64 | Kind::Sfixed64)
| (Value::U32(_), Kind::Uint32 | Kind::Fixed32)
| (Value::U64(_), Kind::Uint64 | Kind::Fixed64)
| (Value::F32(_), Kind::Float)
| (Value::F64(_), Kind::Double)
| (Value::String(_), Kind::String)
| (Value::Bytes(_), Kind::Bytes)
| (Value::EnumNumber(_), Kind::Enum(_))
| (Value::Message(_), Kind::Message(_))
)
}
/// Returns the value if it is a `Value::Bool`, or `None` if it is any other type.
pub fn as_bool(&self) -> Option<bool> {
match *self {
Value::Bool(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::Bool`, or `None` if it is any other type.
pub fn as_bool_mut(&mut self) -> Option<&mut bool> {
match self {
Value::Bool(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::U32`, or `None` if it is any other type.
pub fn as_u32(&self) -> Option<u32> {
match *self {
Value::U32(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::U32`, or `None` if it is any other type.
pub fn as_u32_mut(&mut self) -> Option<&mut u32> {
match self {
Value::U32(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::U64`, or `None` if it is any other type.
pub fn as_u64(&self) -> Option<u64> {
match *self {
Value::U64(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::U64`, or `None` if it is any other type.
pub fn as_u64_mut(&mut self) -> Option<&mut u64> {
match self {
Value::U64(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::I64`, or `None` if it is any other type.
pub fn as_i64(&self) -> Option<i64> {
match *self {
Value::I64(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::I64`, or `None` if it is any other type.
pub fn as_i64_mut(&mut self) -> Option<&mut i64> {
match self {
Value::I64(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::I32`, or `None` if it is any other type.
pub fn as_i32(&self) -> Option<i32> {
match *self {
Value::I32(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::I32`, or `None` if it is any other type.
pub fn as_i32_mut(&mut self) -> Option<&mut i32> {
match self {
Value::I32(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::F32`, or `None` if it is any other type.
pub fn as_f32(&self) -> Option<f32> {
match *self {
Value::F32(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::F32`, or `None` if it is any other type.
pub fn as_f32_mut(&mut self) -> Option<&mut f32> {
match self {
Value::F32(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::F64`, or `None` if it is any other type.
pub fn as_f64(&self) -> Option<f64> {
match *self {
Value::F64(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::F64`, or `None` if it is any other type.
pub fn as_f64_mut(&mut self) -> Option<&mut f64> {
match self {
Value::F64(value) => Some(value),
_ => None,
}
}
/// Returns the value if it is a `Value::EnumNumber`, or `None` if it is any other type.
pub fn as_enum_number(&self) -> Option<i32> {
match *self {
Value::EnumNumber(value) => Some(value),
_ => None,
}
}
/// Returns a mutable reference to the value if it is a `Value::EnumNumber`, or `None` if it is any other type.
pub fn as_enum_number_mut(&mut self) -> Option<&mut i32> {
match self {
Value::EnumNumber(value) => Some(value),
_ => None,
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | true |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/message.rs | prost-reflect/src/dynamic/message.rs | use prost::{
bytes::{Buf, BufMut},
encoding::{DecodeContext, WireType},
DecodeError, Message,
};
use crate::{
descriptor::{FieldDescriptor, Kind, MAP_ENTRY_KEY_NUMBER, MAP_ENTRY_VALUE_NUMBER},
DynamicMessage, MapKey, Value,
};
use super::{
fields::{FieldDescriptorLike, ValueAndDescriptor},
unknown::UnknownField,
};
impl Message for DynamicMessage {
fn encode_raw(&self, buf: &mut impl BufMut)
where
Self: Sized,
{
for field in self.fields.iter(&self.desc, false, false) {
match field {
ValueAndDescriptor::Field(value, field_desc) => {
value.encode_field(&field_desc, buf)
}
ValueAndDescriptor::Extension(value, extension_desc) => {
value.encode_field(&extension_desc, buf)
}
ValueAndDescriptor::Unknown(unknowns) => unknowns.encode_raw(buf),
}
}
}
fn merge_field(
&mut self,
number: u32,
wire_type: WireType,
buf: &mut impl Buf,
ctx: DecodeContext,
) -> Result<(), DecodeError>
where
Self: Sized,
{
if let Some(field_desc) = self.desc.get_field(number) {
self.get_field_mut(&field_desc)
.merge_field(&field_desc, wire_type, buf, ctx)
} else if let Some(extension_desc) = self.desc.get_extension(number) {
self.get_extension_mut(&extension_desc).merge_field(
&extension_desc,
wire_type,
buf,
ctx,
)
} else {
let field = UnknownField::decode_value(number, wire_type, buf, ctx)?;
self.fields.add_unknown(number, field);
Ok(())
}
}
fn encoded_len(&self) -> usize {
let mut len = 0;
for field in self.fields.iter(&self.desc, false, false) {
match field {
ValueAndDescriptor::Field(value, field_desc) => {
len += value.encoded_len(&field_desc);
}
ValueAndDescriptor::Extension(value, extension_desc) => {
len += value.encoded_len(&extension_desc);
}
ValueAndDescriptor::Unknown(unknowns) => len += unknowns.encoded_len(),
}
}
len
}
fn clear(&mut self) {
self.fields.clear_all();
}
}
impl Value {
pub(super) fn encode_field<B>(&self, field_desc: &impl FieldDescriptorLike, buf: &mut B)
where
B: BufMut,
{
if !field_desc.supports_presence() && field_desc.is_default_value(self) {
return;
}
let number = field_desc.number();
match (self, field_desc.kind()) {
(Value::Bool(value), Kind::Bool) => prost::encoding::bool::encode(number, value, buf),
(Value::I32(value), Kind::Int32) => prost::encoding::int32::encode(number, value, buf),
(Value::I32(value), Kind::Sint32) => {
prost::encoding::sint32::encode(number, value, buf)
}
(Value::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::encode(number, value, buf)
}
(Value::I64(value), Kind::Int64) => prost::encoding::int64::encode(number, value, buf),
(Value::I64(value), Kind::Sint64) => {
prost::encoding::sint64::encode(number, value, buf)
}
(Value::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::encode(number, value, buf)
}
(Value::U32(value), Kind::Uint32) => {
prost::encoding::uint32::encode(number, value, buf)
}
(Value::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::encode(number, value, buf)
}
(Value::U64(value), Kind::Uint64) => {
prost::encoding::uint64::encode(number, value, buf)
}
(Value::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::encode(number, value, buf)
}
(Value::F32(value), Kind::Float) => prost::encoding::float::encode(number, value, buf),
(Value::F64(value), Kind::Double) => {
prost::encoding::double::encode(number, value, buf)
}
(Value::String(value), Kind::String) => {
prost::encoding::string::encode(number, value, buf)
}
(Value::Bytes(value), Kind::Bytes) => {
prost::encoding::bytes::encode(number, value, buf)
}
(Value::EnumNumber(value), Kind::Enum(_)) => {
prost::encoding::int32::encode(number, value, buf)
}
(Value::Message(message), Kind::Message(_)) => {
if field_desc.is_group() {
prost::encoding::group::encode(number, message, buf)
} else {
prost::encoding::message::encode(number, message, buf)
}
}
(Value::List(values), _) if field_desc.is_list() => {
if field_desc.is_packed() {
match field_desc.kind() {
Kind::Enum(_) => encode_packed_list(
number,
values
.iter()
.map(|v| v.as_enum_number().expect("expected enum number")),
buf,
|v, b| prost::encoding::encode_varint(v as u64, b),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Double => encode_packed_list(
number,
values.iter().map(|v| v.as_f64().expect("expected double")),
buf,
|v, b| b.put_f64_le(v),
|_| 8,
),
Kind::Float => encode_packed_list(
number,
values.iter().map(|v| v.as_f32().expect("expected float")),
buf,
|v, b| b.put_f32_le(v),
|_| 4,
),
Kind::Int32 => encode_packed_list(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
buf,
|v, b| prost::encoding::encode_varint(v as u64, b),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Int64 => encode_packed_list(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
buf,
|v, b| prost::encoding::encode_varint(v as u64, b),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Uint32 => encode_packed_list(
number,
values.iter().map(|v| v.as_u32().expect("expected u32")),
buf,
|v, b| prost::encoding::encode_varint(v as u64, b),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Uint64 => encode_packed_list(
number,
values.iter().map(|v| v.as_u64().expect("expected u64")),
buf,
|v, b| prost::encoding::encode_varint(v, b),
prost::encoding::encoded_len_varint,
),
Kind::Sint32 => encode_packed_list(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
buf,
|v, b| prost::encoding::encode_varint(from_sint32(v) as u64, b),
|v| prost::encoding::encoded_len_varint(from_sint32(v) as u64),
),
Kind::Sint64 => encode_packed_list(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
buf,
|v, b| prost::encoding::encode_varint(from_sint64(v), b),
|v| prost::encoding::encoded_len_varint(from_sint64(v)),
),
Kind::Fixed32 => encode_packed_list(
number,
values.iter().map(|v| v.as_u32().expect("expected u32")),
buf,
|v, b| b.put_u32_le(v),
|_| 4,
),
Kind::Fixed64 => encode_packed_list(
number,
values.iter().map(|v| v.as_u64().expect("expected u64")),
buf,
|v, b| b.put_u64_le(v),
|_| 8,
),
Kind::Sfixed32 => encode_packed_list(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
buf,
|v, b| b.put_i32_le(v),
|_| 4,
),
Kind::Sfixed64 => encode_packed_list(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
buf,
|v, b| b.put_i64_le(v),
|_| 8,
),
Kind::Bool => encode_packed_list(
number,
values.iter().map(|v| v.as_bool().expect("expected bool")),
buf,
|v, b| prost::encoding::encode_varint(v as u64, b),
|v| prost::encoding::encoded_len_varint(v as u64),
),
_ => panic!("invalid type for packed field in DynamicMessage"),
}
} else {
for value in values {
value.encode_field(field_desc, buf);
}
}
}
(Value::Map(values), Kind::Message(map_entry)) if field_desc.is_map() => {
let key_desc = map_entry.get_field(MAP_ENTRY_KEY_NUMBER).unwrap();
let value_desc = map_entry.get_field(MAP_ENTRY_VALUE_NUMBER).unwrap();
for (key, value) in values {
let len = key.encoded_len(&key_desc) + value.encoded_len(&value_desc);
prost::encoding::encode_key(number, WireType::LengthDelimited, buf);
prost::encoding::encode_varint(len as u64, buf);
key.encode_field(&key_desc, buf);
value.encode_field(&value_desc, buf);
}
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
pub(super) fn merge_field<B>(
&mut self,
field_desc: &impl FieldDescriptorLike,
wire_type: WireType,
buf: &mut B,
ctx: DecodeContext,
) -> Result<(), DecodeError>
where
B: Buf,
{
match (self, field_desc.kind()) {
(Value::Bool(value), Kind::Bool) => {
prost::encoding::bool::merge(wire_type, value, buf, ctx)
}
(Value::I32(value), Kind::Int32) => {
prost::encoding::int32::merge(wire_type, value, buf, ctx)
}
(Value::I32(value), Kind::Sint32) => {
prost::encoding::sint32::merge(wire_type, value, buf, ctx)
}
(Value::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::merge(wire_type, value, buf, ctx)
}
(Value::I64(value), Kind::Int64) => {
prost::encoding::int64::merge(wire_type, value, buf, ctx)
}
(Value::I64(value), Kind::Sint64) => {
prost::encoding::sint64::merge(wire_type, value, buf, ctx)
}
(Value::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::merge(wire_type, value, buf, ctx)
}
(Value::U32(value), Kind::Uint32) => {
prost::encoding::uint32::merge(wire_type, value, buf, ctx)
}
(Value::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::merge(wire_type, value, buf, ctx)
}
(Value::U64(value), Kind::Uint64) => {
prost::encoding::uint64::merge(wire_type, value, buf, ctx)
}
(Value::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::merge(wire_type, value, buf, ctx)
}
(Value::F32(value), Kind::Float) => {
prost::encoding::float::merge(wire_type, value, buf, ctx)
}
(Value::F64(value), Kind::Double) => {
prost::encoding::double::merge(wire_type, value, buf, ctx)
}
(Value::String(value), Kind::String) => {
prost::encoding::string::merge(wire_type, value, buf, ctx)
}
(Value::Bytes(value), Kind::Bytes) => {
prost::encoding::bytes::merge(wire_type, value, buf, ctx)
}
(Value::EnumNumber(value), Kind::Enum(_)) => {
prost::encoding::int32::merge(wire_type, value, buf, ctx)
}
(Value::Message(message), Kind::Message(_)) => {
if field_desc.is_group() {
prost::encoding::group::merge(field_desc.number(), wire_type, message, buf, ctx)
} else {
prost::encoding::message::merge(wire_type, message, buf, ctx)
}
}
(Value::List(values), field_kind) if field_desc.is_list() => {
if wire_type == WireType::LengthDelimited && field_desc.is_packable() {
prost::encoding::merge_loop(values, buf, ctx, |values, buf, ctx| {
let mut value = Value::default_value(&field_kind);
value.merge_field(field_desc, field_kind.wire_type(), buf, ctx)?;
values.push(value);
Ok(())
})
} else {
let mut value = Value::default_value(&field_kind);
value.merge_field(field_desc, wire_type, buf, ctx)?;
values.push(value);
Ok(())
}
}
(Value::Map(values), Kind::Message(map_entry)) if field_desc.is_map() => {
let key_desc = map_entry.get_field(MAP_ENTRY_KEY_NUMBER).unwrap();
let value_desc = map_entry.get_field(MAP_ENTRY_VALUE_NUMBER).unwrap();
let mut key = MapKey::default_value(&key_desc.kind());
let mut value = Value::default_value_for_field(&value_desc);
prost::encoding::merge_loop(
&mut (&mut key, &mut value),
buf,
ctx,
|(key, value), buf, ctx| {
let (number, wire_type) = prost::encoding::decode_key(buf)?;
match number {
MAP_ENTRY_KEY_NUMBER => key.merge_field(&key_desc, wire_type, buf, ctx),
MAP_ENTRY_VALUE_NUMBER => {
value.merge_field(&value_desc, wire_type, buf, ctx)
}
_ => prost::encoding::skip_field(wire_type, number, buf, ctx),
}
},
)?;
values.insert(key, value);
Ok(())
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
pub(super) fn encoded_len(&self, field_desc: &impl FieldDescriptorLike) -> usize {
if !field_desc.supports_presence() && field_desc.is_default_value(self) {
return 0;
}
let number = field_desc.number();
match (self, field_desc.kind()) {
(Value::Bool(value), Kind::Bool) => prost::encoding::bool::encoded_len(number, value),
(Value::I32(value), Kind::Int32) => prost::encoding::int32::encoded_len(number, value),
(Value::I32(value), Kind::Sint32) => {
prost::encoding::sint32::encoded_len(number, value)
}
(Value::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::encoded_len(number, value)
}
(Value::I64(value), Kind::Int64) => prost::encoding::int64::encoded_len(number, value),
(Value::I64(value), Kind::Sint64) => {
prost::encoding::sint64::encoded_len(number, value)
}
(Value::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::encoded_len(number, value)
}
(Value::U32(value), Kind::Uint32) => {
prost::encoding::uint32::encoded_len(number, value)
}
(Value::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::encoded_len(number, value)
}
(Value::U64(value), Kind::Uint64) => {
prost::encoding::uint64::encoded_len(number, value)
}
(Value::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::encoded_len(number, value)
}
(Value::F32(value), Kind::Float) => prost::encoding::float::encoded_len(number, value),
(Value::F64(value), Kind::Double) => {
prost::encoding::double::encoded_len(number, value)
}
(Value::String(value), Kind::String) => {
prost::encoding::string::encoded_len(number, value)
}
(Value::Bytes(value), Kind::Bytes) => {
prost::encoding::bytes::encoded_len(number, value)
}
(Value::EnumNumber(value), Kind::Enum(_)) => {
prost::encoding::int32::encoded_len(number, value)
}
(Value::Message(message), Kind::Message(_)) => {
if field_desc.is_group() {
prost::encoding::group::encoded_len(number, message)
} else {
prost::encoding::message::encoded_len(number, message)
}
}
(Value::List(values), _) if field_desc.is_list() => {
if field_desc.is_packed() {
match field_desc.kind() {
Kind::Enum(_) => packed_list_encoded_len(
number,
values
.iter()
.map(|v| v.as_enum_number().expect("expected enum number")),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Double => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_f64().expect("expected double")),
|_| 8,
),
Kind::Float => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_f32().expect("expected float")),
|_| 4,
),
Kind::Int32 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Int64 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Uint32 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_u32().expect("expected u32")),
|v| prost::encoding::encoded_len_varint(v as u64),
),
Kind::Uint64 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_u64().expect("expected u64")),
prost::encoding::encoded_len_varint,
),
Kind::Sint32 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
|v| prost::encoding::encoded_len_varint(from_sint32(v) as u64),
),
Kind::Sint64 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
|v| prost::encoding::encoded_len_varint(from_sint64(v)),
),
Kind::Fixed32 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_u32().expect("expected u32")),
|_| 4,
),
Kind::Fixed64 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_u64().expect("expected u64")),
|_| 8,
),
Kind::Sfixed32 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i32().expect("expected i32")),
|_| 4,
),
Kind::Sfixed64 => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_i64().expect("expected i64")),
|_| 8,
),
Kind::Bool => packed_list_encoded_len(
number,
values.iter().map(|v| v.as_bool().expect("expected bool")),
|v| prost::encoding::encoded_len_varint(v as u64),
),
_ => panic!("invalid type for packed field in DynamicMessage"),
}
} else {
values
.iter()
.map(|value| value.encoded_len(field_desc))
.sum()
}
}
(Value::Map(values), Kind::Message(map_entry)) if field_desc.is_map() => {
let key_desc = map_entry.map_entry_key_field();
let value_desc = map_entry.map_entry_value_field();
let key_len = prost::encoding::key_len(number);
values
.iter()
.map(|(key, value)| {
let len = key.encoded_len(&key_desc) + value.encoded_len(&value_desc);
key_len + prost::encoding::encoded_len_varint(len as u64) + len
})
.sum::<usize>()
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
}
impl MapKey {
fn encode_field<B>(&self, field_desc: &FieldDescriptor, buf: &mut B)
where
B: BufMut,
{
if !field_desc.supports_presence() && self.is_default(&field_desc.kind()) {
return;
}
let number = field_desc.number();
match (self, field_desc.kind()) {
(MapKey::Bool(value), Kind::Bool) => prost::encoding::bool::encode(number, value, buf),
(MapKey::I32(value), Kind::Int32) => prost::encoding::int32::encode(number, value, buf),
(MapKey::I32(value), Kind::Sint32) => {
prost::encoding::sint32::encode(number, value, buf)
}
(MapKey::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::encode(number, value, buf)
}
(MapKey::I64(value), Kind::Int64) => prost::encoding::int64::encode(number, value, buf),
(MapKey::I64(value), Kind::Sint64) => {
prost::encoding::sint64::encode(number, value, buf)
}
(MapKey::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::encode(number, value, buf)
}
(MapKey::U32(value), Kind::Uint32) => {
prost::encoding::uint32::encode(number, value, buf)
}
(MapKey::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::encode(number, value, buf)
}
(MapKey::U64(value), Kind::Uint64) => {
prost::encoding::uint64::encode(number, value, buf)
}
(MapKey::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::encode(number, value, buf)
}
(MapKey::String(value), Kind::String) => {
prost::encoding::string::encode(number, value, buf)
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
fn merge_field<B>(
&mut self,
field_desc: &FieldDescriptor,
wire_type: WireType,
buf: &mut B,
ctx: DecodeContext,
) -> Result<(), DecodeError>
where
B: Buf,
{
match (self, field_desc.kind()) {
(MapKey::Bool(value), Kind::Bool) => {
prost::encoding::bool::merge(wire_type, value, buf, ctx)
}
(MapKey::I32(value), Kind::Int32) => {
prost::encoding::int32::merge(wire_type, value, buf, ctx)
}
(MapKey::I32(value), Kind::Sint32) => {
prost::encoding::sint32::merge(wire_type, value, buf, ctx)
}
(MapKey::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::merge(wire_type, value, buf, ctx)
}
(MapKey::I64(value), Kind::Int64) => {
prost::encoding::int64::merge(wire_type, value, buf, ctx)
}
(MapKey::I64(value), Kind::Sint64) => {
prost::encoding::sint64::merge(wire_type, value, buf, ctx)
}
(MapKey::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::merge(wire_type, value, buf, ctx)
}
(MapKey::U32(value), Kind::Uint32) => {
prost::encoding::uint32::merge(wire_type, value, buf, ctx)
}
(MapKey::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::merge(wire_type, value, buf, ctx)
}
(MapKey::U64(value), Kind::Uint64) => {
prost::encoding::uint64::merge(wire_type, value, buf, ctx)
}
(MapKey::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::merge(wire_type, value, buf, ctx)
}
(MapKey::String(value), Kind::String) => {
prost::encoding::string::merge(wire_type, value, buf, ctx)
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
fn encoded_len(&self, field_desc: &FieldDescriptor) -> usize {
if !field_desc.supports_presence() && self.is_default(&field_desc.kind()) {
return 0;
}
let number = field_desc.number();
match (self, field_desc.kind()) {
(MapKey::Bool(value), Kind::Bool) => prost::encoding::bool::encoded_len(number, value),
(MapKey::I32(value), Kind::Int32) => prost::encoding::int32::encoded_len(number, value),
(MapKey::I32(value), Kind::Sint32) => {
prost::encoding::sint32::encoded_len(number, value)
}
(MapKey::I32(value), Kind::Sfixed32) => {
prost::encoding::sfixed32::encoded_len(number, value)
}
(MapKey::I64(value), Kind::Int64) => prost::encoding::int64::encoded_len(number, value),
(MapKey::I64(value), Kind::Sint64) => {
prost::encoding::sint64::encoded_len(number, value)
}
(MapKey::I64(value), Kind::Sfixed64) => {
prost::encoding::sfixed64::encoded_len(number, value)
}
(MapKey::U32(value), Kind::Uint32) => {
prost::encoding::uint32::encoded_len(number, value)
}
(MapKey::U32(value), Kind::Fixed32) => {
prost::encoding::fixed32::encoded_len(number, value)
}
(MapKey::U64(value), Kind::Uint64) => {
prost::encoding::uint64::encoded_len(number, value)
}
(MapKey::U64(value), Kind::Fixed64) => {
prost::encoding::fixed64::encoded_len(number, value)
}
(MapKey::String(value), Kind::String) => {
prost::encoding::string::encoded_len(number, value)
}
(value, ty) => {
panic!("mismatch between DynamicMessage value {value:?} and type {ty:?}")
}
}
}
}
fn encode_packed_list<T, I, B, E, L>(number: u32, iter: I, buf: &mut B, encode: E, encoded_len: L)
where
I: IntoIterator<Item = T> + Clone,
B: BufMut,
E: Fn(T, &mut B),
L: Fn(T) -> usize,
{
prost::encoding::encode_key(number, WireType::LengthDelimited, buf);
let len: usize = iter.clone().into_iter().map(encoded_len).sum();
prost::encoding::encode_varint(len as u64, buf);
for value in iter {
encode(value, buf);
}
}
fn packed_list_encoded_len<T, I, L>(number: u32, iter: I, encoded_len: L) -> usize
where
I: IntoIterator<Item = T>,
L: Fn(T) -> usize,
{
let len: usize = iter.into_iter().map(encoded_len).sum();
prost::encoding::key_len(number) + prost::encoding::encoded_len_varint(len as u64) + len
}
fn from_sint32(value: i32) -> u32 {
((value << 1) ^ (value >> 31)) as u32
}
// fn to_sint32(value: u32) -> i32 {
// ((value >> 1) as i32) ^ (-((value & 1) as i32))
// }
fn from_sint64(value: i64) -> u64 {
((value << 1) ^ (value >> 63)) as u64
}
// fn to_sint64(value: u64) -> i64 {
// ((value >> 1) as i64) ^ (-((value & 1) as i64))
// }
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/case.rs | prost-reflect/src/dynamic/serde/case.rs | /// Convert `src` from snake case to camel case, returning an error if roundtripping
/// back to snake case would not be possible.
pub(crate) fn snake_case_to_camel_case(dst: &mut String, src: &str) -> Result<(), ()> {
let mut ucase_next = false;
for ch in src.chars() {
if ch.is_ascii_uppercase() {
return Err(());
}
if ucase_next {
let upper_ch = ch.to_ascii_uppercase();
if upper_ch == ch {
return Err(());
}
dst.push(upper_ch);
ucase_next = false;
} else if ch == '_' {
ucase_next = true;
} else {
dst.push(ch)
}
}
Ok(())
}
pub(crate) fn camel_case_to_snake_case(result: &mut String, part: &str) -> Result<(), ()> {
for ch in part.chars() {
if ch.is_ascii_uppercase() {
result.push('_');
result.push(ch.to_ascii_lowercase());
} else if ch == '_' {
return Err(());
} else {
result.push(ch);
}
}
Ok(())
}
#[cfg(test)]
mod tests {
use proptest::prelude::*;
use super::*;
#[test]
fn snake_to_camel() {
let mut buf = String::new();
snake_case_to_camel_case(&mut buf, "foo").unwrap();
assert_eq!(&buf, "foo");
buf.clear();
snake_case_to_camel_case(&mut buf, "foo_bar").unwrap();
assert_eq!(&buf, "fooBar");
buf.clear();
}
#[test]
fn camel_to_snake() {
let mut buf = String::new();
camel_case_to_snake_case(&mut buf, "foo").unwrap();
assert_eq!(&buf, "foo");
buf.clear();
camel_case_to_snake_case(&mut buf, "fooBar").unwrap();
assert_eq!(&buf, "foo_bar");
buf.clear();
}
#[test]
fn bad_roundtrips() {
let mut buf = String::new();
assert!(snake_case_to_camel_case(&mut buf, "fooBar").is_err());
assert!(snake_case_to_camel_case(&mut buf, "foo_3_bar").is_err());
assert!(snake_case_to_camel_case(&mut buf, "foo__bar").is_err());
}
proptest! {
#[test]
fn roundtrip_cases(snake_case in "[a-zA-Z0-9]+") {
let mut camel_case = String::new();
if snake_case_to_camel_case(&mut camel_case, &snake_case).is_ok() {
let mut roundtripped_snake_case = String::new();
camel_case_to_snake_case(&mut roundtripped_snake_case, &camel_case).unwrap();
prop_assert_eq!(snake_case, roundtripped_snake_case);
}
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/mod.rs | prost-reflect/src/dynamic/serde/mod.rs | mod case;
mod de;
mod ser;
use serde::{
de::{DeserializeSeed, Deserializer},
ser::{Serialize, Serializer},
};
use crate::{DynamicMessage, MessageDescriptor};
/// Options to control serialization of messages.
///
/// Used by [`DynamicMessage::serialize_with_options()`].
#[derive(Debug, Clone)]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub struct SerializeOptions {
stringify_64_bit_integers: bool,
use_enum_numbers: bool,
use_proto_field_name: bool,
skip_default_fields: bool,
}
/// Options to control deserialization of messages.
///
/// Used by [`DynamicMessage::deserialize_with_options()`].
#[derive(Debug, Clone)]
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub struct DeserializeOptions {
deny_unknown_fields: bool,
}
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl Serialize for DynamicMessage {
/// Serialize this message into `serializer` using the [canonical JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json).
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # use serde::Serialize;
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::decode(message_descriptor, b"\x08\x96\x01".as_ref()).unwrap();
/// let mut serializer = serde_json::Serializer::new(vec![]);
/// dynamic_message.serialize(&mut serializer).unwrap();
/// assert_eq!(serializer.into_inner(), b"{\"foo\":150}");
/// ```
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
self.serialize_with_options(serializer, &Default::default())
}
}
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
impl<'de> DeserializeSeed<'de> for MessageDescriptor {
type Value = DynamicMessage;
/// Deserialize a [`DynamicMessage`] from `deserializer` using the [canonical JSON encoding](https://developers.google.com/protocol-buffers/docs/proto3#json).
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// use serde::de::DeserializeSeed;
///
/// let json = r#"{ "foo": 150 }"#;
/// let mut deserializer = serde_json::de::Deserializer::from_str(json);
/// let dynamic_message = message_descriptor.deserialize(&mut deserializer).unwrap();
/// deserializer.end().unwrap();
///
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
DynamicMessage::deserialize(self, deserializer)
}
}
impl DynamicMessage {
/// Serialize this message into `serializer` using the encoding specified by `options`.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, SerializeOptions};
/// # use serde::Serialize;
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::new(message_descriptor);
/// let mut serializer = serde_json::Serializer::new(vec![]);
/// let mut options = SerializeOptions::new().skip_default_fields(false);
/// dynamic_message.serialize_with_options(&mut serializer, &options).unwrap();
/// assert_eq!(serializer.into_inner(), b"{\"foo\":0}");
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub fn serialize_with_options<S>(
&self,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
ser::serialize_message(self, serializer, options)
}
/// Deserialize an instance of the message type described by `desc` from `deserializer`.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let json = r#"{ "foo": 150 }"#;
/// let mut deserializer = serde_json::de::Deserializer::from_str(json);
/// let dynamic_message = DynamicMessage::deserialize(message_descriptor, &mut deserializer).unwrap();
/// deserializer.end().unwrap();
///
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub fn deserialize<'de, D>(desc: MessageDescriptor, deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
Self::deserialize_with_options(desc, deserializer, &Default::default())
}
/// Deserialize an instance of the message type described by `desc` from `deserializer`, using
/// the encoding specified by `options`.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, DeserializeOptions};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let json = r#"{ "foo": 150, "unknown": true }"#;
/// let mut deserializer = serde_json::de::Deserializer::from_str(json);
/// let options = DeserializeOptions::new().deny_unknown_fields(false);
/// let dynamic_message = DynamicMessage::deserialize_with_options(message_descriptor, &mut deserializer, &options).unwrap();
/// deserializer.end().unwrap();
///
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "serde")))]
pub fn deserialize_with_options<'de, D>(
desc: MessageDescriptor,
deserializer: D,
options: &DeserializeOptions,
) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
de::deserialize_message(&desc, deserializer, options)
}
}
impl DeserializeOptions {
/// Creates a new instance of [`DeserializeOptions`], with the default options chosen to conform to
/// the standard JSON mapping.
pub const fn new() -> Self {
DeserializeOptions {
deny_unknown_fields: true,
}
}
/// Whether to error during deserialization when encountering unknown message fields.
///
/// The default value is `true`.
pub const fn deny_unknown_fields(mut self, yes: bool) -> Self {
self.deny_unknown_fields = yes;
self
}
}
impl Default for DeserializeOptions {
fn default() -> Self {
Self::new()
}
}
impl SerializeOptions {
/// Creates a new instance of [`SerializeOptions`], with the default options chosen to conform to
/// the standard JSON mapping.
pub const fn new() -> Self {
SerializeOptions {
stringify_64_bit_integers: true,
use_enum_numbers: false,
use_proto_field_name: false,
skip_default_fields: true,
}
}
/// Whether to encode 64-bit integral types as strings.
///
/// The spec requires encoding 64-bit integers as strings, to prevent loss of precision in JSON
/// when the value cannot be represented exactly by a double. If this option is disabled, all
/// numbers will be serialized as their corresponding serde types instead.
///
/// The default value is `true`.
pub const fn stringify_64_bit_integers(mut self, yes: bool) -> Self {
self.stringify_64_bit_integers = yes;
self
}
/// Whether to encode enum values as their numeric value.
///
/// If `true`, enum values will be serialized as their integer values. Otherwise, they will be
/// serialized as the string value specified in the proto file.
///
/// The default value is `false`.
pub const fn use_enum_numbers(mut self, yes: bool) -> Self {
self.use_enum_numbers = yes;
self
}
/// Whether to use the proto field name instead of the lowerCamelCase name in JSON field names.
///
/// The default value is `false`.
pub const fn use_proto_field_name(mut self, yes: bool) -> Self {
self.use_proto_field_name = yes;
self
}
/// Whether to skip fields which have their default value.
///
/// If `true`, any fields for which [`has_field`][DynamicMessage::has_field] returns `false` will
/// not be serialized. If `false`, they will be serialized with their default value.
///
/// The default value is `true`.
pub const fn skip_default_fields(mut self, yes: bool) -> Self {
self.skip_default_fields = yes;
self
}
}
impl Default for SerializeOptions {
fn default() -> Self {
Self::new()
}
}
const MAX_DURATION_SECONDS: u64 = 315_576_000_000;
const MAX_DURATION_NANOS: u32 = 999_999_999;
const MIN_TIMESTAMP_SECONDS: i64 = -62135596800;
const MAX_TIMESTAMP_SECONDS: i64 = 253402300799;
fn is_well_known_type(full_name: &str) -> bool {
matches!(
full_name,
"google.protobuf.Any"
| "google.protobuf.Timestamp"
| "google.protobuf.Duration"
| "google.protobuf.Struct"
| "google.protobuf.FloatValue"
| "google.protobuf.DoubleValue"
| "google.protobuf.Int32Value"
| "google.protobuf.Int64Value"
| "google.protobuf.UInt32Value"
| "google.protobuf.UInt64Value"
| "google.protobuf.BoolValue"
| "google.protobuf.StringValue"
| "google.protobuf.BytesValue"
| "google.protobuf.FieldMask"
| "google.protobuf.ListValue"
| "google.protobuf.Value"
| "google.protobuf.Empty"
)
}
fn check_duration(duration: &prost_types::Duration) -> Result<(), &'static str> {
if duration.seconds.unsigned_abs() > MAX_DURATION_SECONDS
|| duration.nanos.unsigned_abs() > MAX_DURATION_NANOS
{
Err("duration out of range")
} else {
Ok(())
}
}
fn check_timestamp(timestamp: &prost_types::Timestamp) -> Result<(), &'static str> {
if timestamp.seconds < MIN_TIMESTAMP_SECONDS || MAX_TIMESTAMP_SECONDS < timestamp.seconds {
Err("timestamp out of range")
} else {
Ok(())
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/de/kind.rs | prost-reflect/src/dynamic/serde/de/kind.rs | use std::{borrow::Cow, collections::HashMap, convert::TryInto, fmt, str::FromStr};
use prost::bytes::Bytes;
use serde::de::{DeserializeSeed, Deserializer, Error, IgnoredAny, MapAccess, SeqAccess, Visitor};
use crate::{
dynamic::{serde::DeserializeOptions, DynamicMessage, MapKey, Value},
EnumDescriptor, Kind, MessageDescriptor, ReflectMessage,
};
use super::{
deserialize_enum, deserialize_message, FieldDescriptorSeed, OptionalFieldDescriptorSeed,
};
pub struct KindSeed<'a>(pub &'a Kind, pub &'a DeserializeOptions);
impl<'de> DeserializeSeed<'de> for KindSeed<'_> {
type Value = Option<Value>;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
match self.0 {
Kind::Double => Ok(Some(Value::F64(
deserializer.deserialize_any(DoubleVisitor)?,
))),
Kind::Float => Ok(Some(Value::F32(
deserializer.deserialize_any(FloatVisitor)?,
))),
Kind::Int32 | Kind::Sint32 | Kind::Sfixed32 => Ok(Some(Value::I32(
deserializer.deserialize_any(Int32Visitor)?,
))),
Kind::Int64 | Kind::Sint64 | Kind::Sfixed64 => Ok(Some(Value::I64(
deserializer.deserialize_any(Int64Visitor)?,
))),
Kind::Uint32 | Kind::Fixed32 => Ok(Some(Value::U32(
deserializer.deserialize_any(Uint32Visitor)?,
))),
Kind::Uint64 | Kind::Fixed64 => Ok(Some(Value::U64(
deserializer.deserialize_any(Uint64Visitor)?,
))),
Kind::Bool => Ok(Some(Value::Bool(
deserializer.deserialize_any(BoolVisitor)?,
))),
Kind::String => Ok(Some(Value::String(
deserializer.deserialize_string(StringVisitor)?,
))),
Kind::Bytes => Ok(Some(Value::Bytes(
deserializer.deserialize_str(BytesVisitor)?,
))),
Kind::Message(desc) => Ok(Some(Value::Message(deserialize_message(
desc,
deserializer,
self.1,
)?))),
Kind::Enum(desc) => {
Ok(deserialize_enum(desc, deserializer, self.1)?.map(Value::EnumNumber))
}
}
}
}
pub struct ListVisitor<'a>(pub &'a Kind, pub &'a DeserializeOptions);
pub struct MapVisitor<'a>(pub &'a Kind, pub &'a DeserializeOptions);
pub struct DoubleVisitor;
pub struct FloatVisitor;
pub struct Int32Visitor;
pub struct Uint32Visitor;
pub struct Int64Visitor;
pub struct Uint64Visitor;
pub struct StringVisitor;
pub struct BoolVisitor;
pub struct BytesVisitor;
pub struct MessageVisitor<'a>(pub &'a MessageDescriptor, pub &'a DeserializeOptions);
pub struct MessageVisitorInner<'a>(pub &'a mut DynamicMessage, pub &'a DeserializeOptions);
pub struct EnumVisitor<'a>(pub &'a EnumDescriptor, pub &'a DeserializeOptions);
impl<'de> Visitor<'de> for ListVisitor<'_> {
type Value = Vec<Value>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a list")
}
#[inline]
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut result = Vec::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(value) = seq.next_element_seed(KindSeed(self.0, self.1))? {
if let Some(value) = value {
result.push(value)
}
}
Ok(result)
}
}
impl<'de> Visitor<'de> for MapVisitor<'_> {
type Value = HashMap<MapKey, Value>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut result = HashMap::with_capacity(map.size_hint().unwrap_or(0));
let map_entry_message = self.0.as_message().unwrap();
let key_kind = map_entry_message.map_entry_key_field().kind();
let value_desc = map_entry_message.map_entry_value_field();
while let Some(key_str) = map.next_key::<Cow<str>>()? {
let key = match key_kind {
Kind::Int32 | Kind::Sint32 | Kind::Sfixed32 => {
MapKey::I32(i32::from_str(key_str.as_ref()).map_err(Error::custom)?)
}
Kind::Int64 | Kind::Sint64 | Kind::Sfixed64 => {
MapKey::I64(i64::from_str(key_str.as_ref()).map_err(Error::custom)?)
}
Kind::Uint32 | Kind::Fixed32 => {
MapKey::U32(u32::from_str(key_str.as_ref()).map_err(Error::custom)?)
}
Kind::Uint64 | Kind::Fixed64 => {
MapKey::U64(u64::from_str(key_str.as_ref()).map_err(Error::custom)?)
}
Kind::Bool => {
MapKey::Bool(bool::from_str(key_str.as_ref()).map_err(Error::custom)?)
}
Kind::String => MapKey::String(key_str.into_owned()),
_ => unreachable!("invalid type for map key"),
};
let value = map.next_value_seed(FieldDescriptorSeed(&value_desc, self.1))?;
if let Some(value) = value {
result.insert(key, value);
}
}
Ok(result)
}
}
impl Visitor<'_> for DoubleVisitor {
type Value = f64;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 64-bit floating point value")
}
#[inline]
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v as Self::Value)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v as Self::Value)
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
match f64::from_str(v) {
Ok(value) => Ok(value),
Err(_) if v == "Infinity" => Ok(f64::INFINITY),
Err(_) if v == "-Infinity" => Ok(f64::NEG_INFINITY),
Err(_) if v == "NaN" => Ok(f64::NAN),
Err(err) => Err(Error::custom(err)),
}
}
}
impl Visitor<'_> for FloatVisitor {
type Value = f32;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 32-bit floating point value")
}
#[inline]
fn visit_f32<E>(self, v: f32) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
if v < (f32::MIN as f64) || v > (f32::MAX as f64) {
Err(Error::custom("float value out of range"))
} else {
Ok(v as f32)
}
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v as Self::Value)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v as Self::Value)
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
match f32::from_str(v) {
Ok(value) => Ok(value),
Err(_) if v == "Infinity" => Ok(f32::INFINITY),
Err(_) if v == "-Infinity" => Ok(f32::NEG_INFINITY),
Err(_) if v == "NaN" => Ok(f32::NAN),
Err(err) => Err(Error::custom(err)),
}
}
}
impl Visitor<'_> for Int32Visitor {
type Value = i32;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 32-bit signed integer")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
v.parse().map_err(Error::custom)
}
#[inline]
fn visit_i32<E>(self, v: i32) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
if v.fract() != 0.0 {
return Err(Error::custom("expected integer value"));
}
if v < (i32::MIN as f64) || v > (i32::MAX as f64) {
return Err(Error::custom("float value out of range"));
}
Ok(v as i32)
}
}
impl Visitor<'_> for Uint32Visitor {
type Value = u32;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 32-bit unsigned integer or decimal string")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
v.parse().map_err(Error::custom)
}
#[inline]
fn visit_u32<E>(self, v: u32) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
if v.fract() != 0.0 {
return Err(Error::custom("expected integer value"));
}
if v < (u32::MIN as f64) || v > (u32::MAX as f64) {
return Err(Error::custom("float value out of range"));
}
Ok(v as u32)
}
}
impl Visitor<'_> for Int64Visitor {
type Value = i64;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 64-bit signed integer or decimal string")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
v.parse().map_err(Error::custom)
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
if v.fract() != 0.0 {
return Err(Error::custom("expected integer value"));
}
if v < (i64::MIN as f64) || v > (i64::MAX as f64) {
return Err(Error::custom("float value out of range"));
}
Ok(v as i64)
}
}
impl Visitor<'_> for Uint64Visitor {
type Value = u64;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a 64-bit unsigned integer or decimal string")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
v.parse().map_err(Error::custom)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
v.try_into().map_err(Error::custom)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
if v.fract() != 0.0 {
return Err(Error::custom("expected integer value"));
}
if v < (u64::MIN as f64) || v > (u64::MAX as f64) {
return Err(Error::custom("float value out of range"));
}
Ok(v as u64)
}
}
impl Visitor<'_> for StringVisitor {
type Value = String;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a string")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v.to_owned())
}
#[inline]
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
}
impl Visitor<'_> for BoolVisitor {
type Value = bool;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a boolean")
}
#[inline]
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: Error,
{
Ok(v)
}
}
impl Visitor<'_> for BytesVisitor {
type Value = Bytes;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a base64-encoded string")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
use base64::{
alphabet,
engine::DecodePaddingMode,
engine::{GeneralPurpose, GeneralPurposeConfig},
DecodeError, Engine,
};
const CONFIG: GeneralPurposeConfig = GeneralPurposeConfig::new()
.with_decode_allow_trailing_bits(true)
.with_decode_padding_mode(DecodePaddingMode::Indifferent);
const STANDARD: GeneralPurpose = GeneralPurpose::new(&alphabet::STANDARD, CONFIG);
const URL_SAFE: GeneralPurpose = GeneralPurpose::new(&alphabet::URL_SAFE, CONFIG);
let mut buf = Vec::new();
match STANDARD.decode_vec(v, &mut buf) {
Ok(()) => Ok(buf.into()),
Err(DecodeError::InvalidByte(_, b'-')) | Err(DecodeError::InvalidByte(_, b'_')) => {
buf.clear();
match URL_SAFE.decode_vec(v, &mut buf) {
Ok(()) => Ok(buf.into()),
Err(err) => Err(Error::custom(format!("invalid base64: {err}"))),
}
}
Err(err) => Err(Error::custom(format!("invalid base64: {err}"))),
}
}
}
impl<'de> Visitor<'de> for MessageVisitor<'_> {
type Value = DynamicMessage;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a map")
}
fn visit_map<A>(self, map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut message = DynamicMessage::new(self.0.clone());
MessageVisitorInner(&mut message, self.1).visit_map(map)?;
Ok(message)
}
}
impl<'de> Visitor<'de> for MessageVisitorInner<'_> {
type Value = ();
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let desc = self.0.descriptor();
while let Some(key) = map.next_key::<Cow<str>>()? {
if let Some(field) = desc
.get_field_by_json_name(key.as_ref())
.or_else(|| desc.get_field_by_name(key.as_ref()))
{
if let Some(value) =
map.next_value_seed(OptionalFieldDescriptorSeed(&field, self.1))?
{
if let Some(oneof_desc) = field.containing_oneof() {
for oneof_field in oneof_desc.fields() {
if self.0.has_field(&oneof_field) {
return Err(Error::custom(format!(
"multiple fields provided for oneof '{}'",
oneof_desc.name()
)));
}
}
}
self.0.set_field(&field, value);
}
} else if let Some(extension_desc) = desc.get_extension_by_json_name(key.as_ref()) {
if let Some(value) =
map.next_value_seed(OptionalFieldDescriptorSeed(&extension_desc, self.1))?
{
self.0.set_extension(&extension_desc, value);
}
} else if self.1.deny_unknown_fields {
return Err(Error::custom(format!("unrecognized field name '{key}'")));
} else {
let _ = map.next_value::<IgnoredAny>()?;
}
}
Ok(())
}
}
impl Visitor<'_> for EnumVisitor<'_> {
type Value = Option<i32>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a string or integer")
}
#[inline]
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
match self.0.get_value_by_name(v) {
Some(e) => Ok(Some(e.number())),
None => {
if self.1.deny_unknown_fields {
Err(Error::custom(format!("unrecognized enum value '{v}'")))
} else {
Ok(None)
}
}
}
}
#[inline]
fn visit_i32<E>(self, v: i32) -> Result<Self::Value, E>
where
E: Error,
{
Ok(Some(v))
}
#[inline]
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_i32(v.try_into().map_err(Error::custom)?)
}
#[inline]
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_i32(v.try_into().map_err(Error::custom)?)
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/de/mod.rs | prost-reflect/src/dynamic/serde/de/mod.rs | mod kind;
mod wkt;
use std::fmt;
use prost::Message;
use serde::de::{DeserializeSeed, Deserializer, Error, Visitor};
use crate::{
dynamic::{fields::FieldDescriptorLike, serde::DeserializeOptions, DynamicMessage, Value},
EnumDescriptor, Kind, MessageDescriptor,
};
pub(super) fn deserialize_message<'de, D>(
desc: &MessageDescriptor,
deserializer: D,
options: &DeserializeOptions,
) -> Result<DynamicMessage, D::Error>
where
D: Deserializer<'de>,
{
match desc.full_name() {
"google.protobuf.Any" => deserializer
.deserialize_any(wkt::GoogleProtobufAnyVisitor(desc.parent_pool(), options))
.and_then(|timestamp| make_message(desc, timestamp)),
"google.protobuf.Timestamp" => deserializer
.deserialize_str(wkt::GoogleProtobufTimestampVisitor)
.and_then(|timestamp| make_message(desc, timestamp)),
"google.protobuf.Duration" => deserializer
.deserialize_str(wkt::GoogleProtobufDurationVisitor)
.and_then(|duration| make_message(desc, duration)),
"google.protobuf.FloatValue" => deserializer
.deserialize_any(kind::FloatVisitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.DoubleValue" => deserializer
.deserialize_any(kind::DoubleVisitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.Int32Value" => deserializer
.deserialize_any(kind::Int32Visitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.Int64Value" => deserializer
.deserialize_any(kind::Int64Visitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.UInt32Value" => deserializer
.deserialize_any(kind::Uint32Visitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.UInt64Value" => deserializer
.deserialize_any(kind::Uint64Visitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.BoolValue" => deserializer
.deserialize_any(kind::BoolVisitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.StringValue" => deserializer
.deserialize_any(kind::StringVisitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.BytesValue" => deserializer
.deserialize_any(kind::BytesVisitor)
.and_then(|v| make_message(desc, v)),
"google.protobuf.FieldMask" => deserializer
.deserialize_str(wkt::GoogleProtobufFieldMaskVisitor)
.and_then(|field_mask| make_message(desc, field_mask)),
"google.protobuf.Struct" => deserializer
.deserialize_map(wkt::GoogleProtobufStructVisitor)
.and_then(|value| make_message(desc, value)),
"google.protobuf.ListValue" => deserializer
.deserialize_seq(wkt::GoogleProtobufListVisitor)
.and_then(|list| make_message(desc, list)),
"google.protobuf.Value" => deserializer
.deserialize_any(wkt::GoogleProtobufValueVisitor)
.and_then(|value| make_message(desc, value)),
"google.protobuf.Empty" => deserializer
.deserialize_map(wkt::GoogleProtobufEmptyVisitor)
.and_then(|empty| make_message(desc, empty)),
_ => deserializer.deserialize_map(kind::MessageVisitor(desc, options)),
}
}
fn deserialize_enum<'de, D>(
desc: &EnumDescriptor,
deserializer: D,
options: &DeserializeOptions,
) -> Result<Option<i32>, D::Error>
where
D: Deserializer<'de>,
{
match desc.full_name() {
"google.protobuf.NullValue" => {
deserializer.deserialize_any(wkt::GoogleProtobufNullVisitor(options))
}
_ => deserializer.deserialize_any(kind::EnumVisitor(desc, options)),
}
}
struct MessageSeed<'a>(&'a MessageDescriptor, &'a DeserializeOptions);
impl<'de> DeserializeSeed<'de> for MessageSeed<'_> {
type Value = DynamicMessage;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserialize_message(self.0, deserializer, self.1)
}
}
struct FieldDescriptorSeed<'a, T>(&'a T, &'a DeserializeOptions);
impl<'de, T> DeserializeSeed<'de> for FieldDescriptorSeed<'_, T>
where
T: FieldDescriptorLike,
{
type Value = Option<Value>;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
if self.0.is_list() {
Ok(Some(Value::List(deserializer.deserialize_any(
kind::ListVisitor(&self.0.kind(), self.1),
)?)))
} else if self.0.is_map() {
Ok(Some(Value::Map(deserializer.deserialize_any(
kind::MapVisitor(&self.0.kind(), self.1),
)?)))
} else {
kind::KindSeed(&self.0.kind(), self.1).deserialize(deserializer)
}
}
}
struct OptionalFieldDescriptorSeed<'a, T>(&'a T, &'a DeserializeOptions);
impl<'de, T> DeserializeSeed<'de> for OptionalFieldDescriptorSeed<'_, T>
where
T: FieldDescriptorLike,
{
type Value = Option<Value>;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_option(self)
}
}
impl<'de, T> Visitor<'de> for OptionalFieldDescriptorSeed<'_, T>
where
T: FieldDescriptorLike,
{
type Value = Option<Value>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "option")
}
#[inline]
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_none()
}
#[inline]
fn visit_none<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
match self.0.kind() {
Kind::Message(message_desc) if message_desc.full_name() == "google.protobuf.Value" => {
make_message(
&message_desc,
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
},
)
.map(|v| Some(Value::Message(v)))
}
Kind::Enum(enum_desc) if enum_desc.full_name() == "google.protobuf.NullValue" => {
Ok(Some(Value::EnumNumber(0)))
}
_ => Ok(None),
}
}
#[inline]
fn visit_some<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
FieldDescriptorSeed(self.0, self.1).deserialize(deserializer)
}
}
fn make_message<E: Error, T: Message>(
desc: &MessageDescriptor,
message: T,
) -> Result<DynamicMessage, E> {
let mut dynamic = DynamicMessage::new(desc.clone());
dynamic
.transcode_from(&message)
.map_err(|err| Error::custom(format!("error decoding: {err}")))?;
Ok(dynamic)
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/de/wkt.rs | prost-reflect/src/dynamic/serde/de/wkt.rs | use std::{
borrow::Cow,
collections::{BTreeMap, HashMap},
fmt,
marker::PhantomData,
};
use prost::Message;
use serde::de::{
DeserializeSeed, Deserializer, Error, IgnoredAny, IntoDeserializer, MapAccess, SeqAccess,
Visitor,
};
use crate::{
dynamic::{
get_type_url_message_name,
serde::{
case::camel_case_to_snake_case, check_duration, check_timestamp, is_well_known_type,
DeserializeOptions,
},
DynamicMessage,
},
DescriptorPool,
};
use super::{deserialize_message, kind::MessageVisitorInner, MessageSeed};
pub struct GoogleProtobufAnyVisitor<'a>(pub &'a DescriptorPool, pub &'a DeserializeOptions);
pub struct GoogleProtobufNullVisitor<'a>(pub &'a DeserializeOptions);
pub struct GoogleProtobufTimestampVisitor;
pub struct GoogleProtobufDurationVisitor;
pub struct GoogleProtobufFieldMaskVisitor;
pub struct GoogleProtobufListVisitor;
pub struct GoogleProtobufStructVisitor;
pub struct GoogleProtobufValueVisitor;
pub struct GoogleProtobufEmptyVisitor;
impl<'de> Visitor<'de> for GoogleProtobufAnyVisitor<'_> {
type Value = prost_types::Any;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut buffered_entries = HashMap::new();
let type_url = find_field(
&mut map,
&mut buffered_entries,
"@type",
PhantomData::<String>,
)?;
let message_name = get_type_url_message_name(&type_url).map_err(Error::custom)?;
let message_desc = self
.0
.get_message_by_name(message_name)
.ok_or_else(|| Error::custom(format!("message '{message_name}' not found")))?;
let payload_message = if is_well_known_type(message_name) {
let payload_message = match buffered_entries.remove("value") {
Some(value) => {
deserialize_message(&message_desc, value, self.1).map_err(Error::custom)?
}
None => find_field(
&mut map,
&mut buffered_entries,
"value",
MessageSeed(&message_desc, self.1),
)?,
};
if self.1.deny_unknown_fields {
if let Some(key) = buffered_entries.keys().next() {
return Err(Error::custom(format!("unrecognized field name '{key}'")));
}
if let Some(key) = map.next_key::<Cow<str>>()? {
return Err(Error::custom(format!("unrecognized field name '{key}'")));
}
} else {
drop(buffered_entries);
while map.next_entry::<IgnoredAny, IgnoredAny>()?.is_some() {}
}
payload_message
} else {
let mut payload_message = DynamicMessage::new(message_desc);
buffered_entries
.into_deserializer()
.deserialize_map(MessageVisitorInner(&mut payload_message, self.1))
.map_err(Error::custom)?;
MessageVisitorInner(&mut payload_message, self.1).visit_map(map)?;
payload_message
};
let value = payload_message.encode_to_vec();
Ok(prost_types::Any { type_url, value })
}
}
impl Visitor<'_> for GoogleProtobufNullVisitor<'_> {
type Value = Option<i32>;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "null")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
if v == "NULL_VALUE" {
Ok(Some(0))
} else if self.0.deny_unknown_fields {
Err(Error::custom("expected null"))
} else {
Ok(None)
}
}
#[inline]
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
Ok(Some(0))
}
}
impl Visitor<'_> for GoogleProtobufTimestampVisitor {
type Value = prost_types::Timestamp;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a rfc3339 timestamp string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
validate_strict_rfc3339(v).map_err(Error::custom)?;
let timestamp: prost_types::Timestamp = v.parse().map_err(Error::custom)?;
check_timestamp(×tamp).map_err(Error::custom)?;
Ok(timestamp)
}
}
impl Visitor<'_> for GoogleProtobufDurationVisitor {
type Value = prost_types::Duration;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a duration string")
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
let duration: prost_types::Duration = v.parse().map_err(Error::custom)?;
check_duration(&duration).map_err(Error::custom)?;
Ok(duration)
}
}
impl Visitor<'_> for GoogleProtobufFieldMaskVisitor {
type Value = prost_types::FieldMask;
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
let paths = v
.split(',')
.filter(|path| !path.is_empty())
.map(|path| {
let mut result = String::new();
let mut parts = path.split('.');
if let Some(part) = parts.next() {
camel_case_to_snake_case(&mut result, part)?;
}
for part in parts {
result.push('.');
camel_case_to_snake_case(&mut result, part)?;
}
Ok(result)
})
.collect::<Result<_, ()>>()
.map_err(|()| Error::custom("invalid field mask"))?;
Ok(prost_types::FieldMask { paths })
}
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a field mask string")
}
}
impl<'de> DeserializeSeed<'de> for GoogleProtobufValueVisitor {
type Value = prost_types::Value;
fn deserialize<D>(self, deserializer: D) -> Result<Self::Value, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_any(self)
}
}
impl<'de> Visitor<'de> for GoogleProtobufListVisitor {
type Value = prost_types::ListValue;
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut values = Vec::with_capacity(seq.size_hint().unwrap_or(0));
while let Some(value) = seq.next_element_seed(GoogleProtobufValueVisitor)? {
values.push(value);
}
Ok(prost_types::ListValue { values })
}
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a list")
}
}
impl<'de> Visitor<'de> for GoogleProtobufStructVisitor {
type Value = prost_types::Struct;
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
let mut fields = BTreeMap::new();
while let Some(key) = map.next_key::<String>()? {
let value = map.next_value_seed(GoogleProtobufValueVisitor)?;
fields.insert(key, value);
}
Ok(prost_types::Struct { fields })
}
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a map")
}
}
impl<'de> Visitor<'de> for GoogleProtobufValueVisitor {
type Value = prost_types::Value;
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "a value")
}
fn visit_bool<E>(self, v: bool) -> Result<Self::Value, E>
where
E: Error,
{
Ok(prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(v)),
})
}
fn visit_i64<E>(self, v: i64) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_f64(v as f64)
}
fn visit_u64<E>(self, v: u64) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_f64(v as f64)
}
fn visit_f64<E>(self, v: f64) -> Result<Self::Value, E>
where
E: Error,
{
Ok(prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(v)),
})
}
fn visit_str<E>(self, v: &str) -> Result<Self::Value, E>
where
E: Error,
{
self.visit_string(v.to_owned())
}
fn visit_string<E>(self, v: String) -> Result<Self::Value, E>
where
E: Error,
{
Ok(prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue(v)),
})
}
#[inline]
fn visit_unit<E>(self) -> Result<Self::Value, E>
where
E: Error,
{
Ok(prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
})
}
fn visit_seq<A>(self, seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
GoogleProtobufListVisitor
.visit_seq(seq)
.map(|l| prost_types::Value {
kind: Some(prost_types::value::Kind::ListValue(l)),
})
}
fn visit_map<A>(self, map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
GoogleProtobufStructVisitor
.visit_map(map)
.map(|s| prost_types::Value {
kind: Some(prost_types::value::Kind::StructValue(s)),
})
}
}
impl<'de> Visitor<'de> for GoogleProtobufEmptyVisitor {
type Value = ();
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: MapAccess<'de>,
{
if map.next_entry::<IgnoredAny, IgnoredAny>()?.is_some() {
return Err(Error::custom("unexpected value in map"));
}
Ok(())
}
fn expecting(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "an empty map")
}
}
fn find_field<'de, A, D>(
map: &mut A,
buffered_entries: &mut HashMap<Cow<str>, serde_value::Value>,
expected: &str,
value_seed: D,
) -> Result<D::Value, A::Error>
where
A: MapAccess<'de>,
D: DeserializeSeed<'de>,
{
loop {
match map.next_key::<Cow<str>>()? {
Some(key) if key == expected => return map.next_value_seed(value_seed),
Some(key) => {
buffered_entries.insert(key, map.next_value()?);
}
None => return Err(Error::custom(format!("expected '{expected}' field"))),
}
}
}
/// Validates the string is a valid RFC3339 timestamp, requiring upper-case
/// 'T' and 'Z' characters as recommended by the conformance tests.
fn validate_strict_rfc3339(v: &str) -> Result<(), String> {
use std::{ascii, iter::Peekable, str::Bytes};
fn pop_digit(bytes: &mut Peekable<Bytes>) -> bool {
bytes.next_if(u8::is_ascii_digit).is_some()
}
fn pop_digits(bytes: &mut Peekable<Bytes>, n: usize) -> bool {
(0..n).all(|_| pop_digit(bytes))
}
fn pop_char(p: &mut Peekable<Bytes>, c: u8) -> bool {
p.next_if_eq(&c).is_some()
}
fn fmt_next(p: &mut Peekable<Bytes>) -> String {
match p.peek() {
Some(&ch) => format!("'{}'", ascii::escape_default(ch)),
None => "end of string".to_owned(),
}
}
let mut v = v.bytes().peekable();
if !(pop_digits(&mut v, 4)
&& pop_char(&mut v, b'-')
&& pop_digits(&mut v, 2)
&& pop_char(&mut v, b'-')
&& pop_digits(&mut v, 2))
{
return Err("invalid rfc3339 timestamp: invalid date".to_owned());
}
if !pop_char(&mut v, b'T') {
return Err(format!(
"invalid rfc3339 timestamp: expected 'T' but found {}",
fmt_next(&mut v)
));
}
if !(pop_digits(&mut v, 2)
&& pop_char(&mut v, b':')
&& pop_digits(&mut v, 2)
&& pop_char(&mut v, b':')
&& pop_digits(&mut v, 2))
{
return Err("invalid rfc3339 timestamp: invalid time".to_owned());
}
if pop_char(&mut v, b'.') {
if !pop_digit(&mut v) {
return Err("invalid rfc3339 timestamp: empty fractional seconds".to_owned());
}
while pop_digit(&mut v) {}
}
if v.next_if(|&ch| ch == b'+' || ch == b'-').is_some() {
if !(pop_digits(&mut v, 2) && pop_char(&mut v, b':') && pop_digits(&mut v, 2)) {
return Err("invalid rfc3339 timestamp: invalid offset".to_owned());
}
} else if !pop_char(&mut v, b'Z') {
return Err(format!(
"invalid rfc3339 timestamp: expected 'Z', '+' or '-' but found {}",
fmt_next(&mut v)
));
}
if v.peek().is_some() {
return Err(format!(
"invalid rfc3339 timestamp: expected end of string but found {}",
fmt_next(&mut v)
));
}
Ok(())
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn test_validate_strict_rfc3339() {
macro_rules! case {
($s:expr => Ok) => {
assert_eq!(validate_strict_rfc3339($s), Ok(()))
};
($s:expr => Err($e:expr)) => {
assert_eq!(validate_strict_rfc3339($s).unwrap_err().to_string(), $e)
};
}
case!("1972-06-30T23:59:60Z" => Ok);
case!("2019-03-26T14:00:00.9Z" => Ok);
case!("2019-03-26T14:00:00.4999Z" => Ok);
case!("2019-03-26T14:00:00.4999+10:00" => Ok);
case!("2019-03-26t14:00Z" => Err("invalid rfc3339 timestamp: expected 'T' but found 't'"));
case!("2019-03-26T14:00z" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26T14:00:00,999Z" => Err("invalid rfc3339 timestamp: expected 'Z', '+' or '-' but found ','"));
case!("2019-03-26T10:00-04" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26T14:00.9Z" => Err("invalid rfc3339 timestamp: invalid time"));
case!("20190326T1400Z" => Err("invalid rfc3339 timestamp: invalid date"));
case!("2019-02-30" => Err("invalid rfc3339 timestamp: expected 'T' but found end of string"));
case!("2019-03-25T24:01Z" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26T14:00+24:00" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26Z" => Err("invalid rfc3339 timestamp: expected 'T' but found 'Z'"));
case!("2019-03-26+01:00" => Err("invalid rfc3339 timestamp: expected 'T' but found '+'"));
case!("2019-03-26-04:00" => Err("invalid rfc3339 timestamp: expected 'T' but found '-'"));
case!("2019-03-26T10:00-0400" => Err("invalid rfc3339 timestamp: invalid time"));
case!("+0002019-03-26T14:00Z" => Err("invalid rfc3339 timestamp: invalid date"));
case!("+2019-03-26T14:00Z" => Err("invalid rfc3339 timestamp: invalid date"));
case!("002019-03-26T14:00Z" => Err("invalid rfc3339 timestamp: invalid date"));
case!("019-03-26T14:00Z" => Err("invalid rfc3339 timestamp: invalid date"));
case!("2019-03-26T10:00Q" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26T10:00T" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26Q" => Err("invalid rfc3339 timestamp: expected 'T' but found 'Q'"));
case!("2019-03-26T" => Err("invalid rfc3339 timestamp: invalid time"));
case!("2019-03-26 14:00Z" => Err("invalid rfc3339 timestamp: expected 'T' but found ' '"));
case!("2019-03-26T14:00:00." => Err("invalid rfc3339 timestamp: empty fractional seconds"));
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/ser/mod.rs | prost-reflect/src/dynamic/serde/ser/mod.rs | mod wkt;
use base64::{display::Base64Display, prelude::BASE64_STANDARD};
use serde::ser::{Serialize, SerializeMap, SerializeSeq, Serializer};
use crate::{
descriptor::Kind,
dynamic::{fields::ValueAndDescriptor, serde::SerializeOptions, DynamicMessage, MapKey, Value},
ReflectMessage,
};
struct SerializeWrapper<'a, T> {
value: &'a T,
options: &'a SerializeOptions,
}
pub(super) fn serialize_message<S>(
message: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
SerializeWrapper {
value: message,
options,
}
.serialize(serializer)
}
impl Serialize for SerializeWrapper<'_, DynamicMessage> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let message_desc = self.value.descriptor();
if let Some(serialize) = wkt::get_well_known_type_serializer(message_desc.full_name()) {
serialize(self.value, serializer, self.options)
} else {
let mut map = serializer.serialize_map(None)?;
serialize_dynamic_message_fields(&mut map, self.value, self.options)?;
map.end()
}
}
}
fn serialize_dynamic_message_fields<S>(
map: &mut S,
value: &DynamicMessage,
options: &SerializeOptions,
) -> Result<(), S::Error>
where
S: SerializeMap,
{
let fields = value
.fields
.iter(&value.desc, !options.skip_default_fields, false);
for field in fields {
let (name, value, ref kind) = match field {
ValueAndDescriptor::Field(value, ref field_desc) => {
let name = if options.use_proto_field_name {
field_desc.name()
} else {
field_desc.json_name()
};
(name, value, field_desc.kind())
}
ValueAndDescriptor::Extension(value, ref extension_desc) => {
(extension_desc.json_name(), value, extension_desc.kind())
}
ValueAndDescriptor::Unknown(_) => continue,
};
map.serialize_entry(
name,
&SerializeWrapper {
value: &ValueAndKind {
value: value.as_ref(),
kind,
},
options,
},
)?;
}
Ok(())
}
struct ValueAndKind<'a> {
value: &'a Value,
kind: &'a Kind,
}
impl<'a> Serialize for SerializeWrapper<'a, ValueAndKind<'a>> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self.value.value {
Value::Bool(value) => serializer.serialize_bool(*value),
Value::I32(value) => serializer.serialize_i32(*value),
Value::I64(value) => {
if self.options.stringify_64_bit_integers {
serializer.collect_str(value)
} else {
serializer.serialize_i64(*value)
}
}
Value::U32(value) => serializer.serialize_u32(*value),
Value::U64(value) => {
if self.options.stringify_64_bit_integers {
serializer.collect_str(value)
} else {
serializer.serialize_u64(*value)
}
}
Value::F32(value) => {
if value.is_finite() {
serializer.serialize_f32(*value)
} else if *value == f32::INFINITY {
serializer.serialize_str("Infinity")
} else if *value == f32::NEG_INFINITY {
serializer.serialize_str("-Infinity")
} else {
debug_assert!(value.is_nan());
serializer.serialize_str("NaN")
}
}
Value::F64(value) => {
if value.is_finite() {
serializer.serialize_f64(*value)
} else if *value == f64::INFINITY {
serializer.serialize_str("Infinity")
} else if *value == f64::NEG_INFINITY {
serializer.serialize_str("-Infinity")
} else {
debug_assert!(value.is_nan());
serializer.serialize_str("NaN")
}
}
Value::String(value) => serializer.serialize_str(value),
Value::Bytes(value) => {
serializer.collect_str(&Base64Display::new(value, &BASE64_STANDARD))
}
Value::EnumNumber(number) => {
let enum_ty = match self.value.kind {
Kind::Enum(enum_ty) => enum_ty,
_ => panic!(
"mismatch between DynamicMessage value {:?} and type {:?}",
self.value.value, self.value.kind
),
};
if enum_ty.full_name() == "google.protobuf.NullValue" {
serializer.serialize_none()
} else if self.options.use_enum_numbers {
serializer.serialize_i32(*number)
} else if let Some(enum_value) = enum_ty.get_value(*number) {
serializer.serialize_str(enum_value.name())
} else {
serializer.serialize_i32(*number)
}
}
Value::Message(message) => message.serialize_with_options(serializer, self.options),
Value::List(values) => {
let mut list = serializer.serialize_seq(Some(values.len()))?;
for value in values {
list.serialize_element(&SerializeWrapper {
value: &ValueAndKind {
value,
kind: self.value.kind,
},
options: self.options,
})?;
}
list.end()
}
Value::Map(values) => {
let value_kind = match self.value.kind {
Kind::Message(message) if message.is_map_entry() => {
message.map_entry_value_field().kind()
}
_ => panic!(
"mismatch between DynamicMessage value {:?} and type {:?}",
self.value.value, self.value.kind
),
};
let mut map = serializer.serialize_map(Some(values.len()))?;
for (key, value) in values {
map.serialize_entry(
&SerializeWrapper {
value: key,
options: self.options,
},
&SerializeWrapper {
value: &ValueAndKind {
value,
kind: &value_kind,
},
options: self.options,
},
)?;
}
map.end()
}
}
}
}
impl Serialize for SerializeWrapper<'_, MapKey> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match self.value {
MapKey::Bool(value) => serializer.collect_str(value),
MapKey::I32(value) => serializer.collect_str(value),
MapKey::I64(value) => serializer.collect_str(value),
MapKey::U32(value) => serializer.collect_str(value),
MapKey::U64(value) => serializer.collect_str(value),
MapKey::String(value) => serializer.serialize_str(value),
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/serde/ser/wkt.rs | prost-reflect/src/dynamic/serde/ser/wkt.rs | use base64::{display::Base64Display, prelude::BASE64_STANDARD};
use prost::{DecodeError, Message};
use serde::ser::{Error, Serialize, SerializeMap, SerializeSeq, Serializer};
use crate::{
dynamic::{
get_type_url_message_name,
serde::{
case::snake_case_to_camel_case, check_duration, check_timestamp, is_well_known_type,
SerializeOptions,
},
DynamicMessage,
},
ReflectMessage,
};
use super::{serialize_dynamic_message_fields, SerializeWrapper};
#[allow(type_alias_bounds)]
type WellKnownTypeSerializer<S: Serializer> =
fn(&DynamicMessage, S, &SerializeOptions) -> Result<S::Ok, S::Error>;
pub fn get_well_known_type_serializer<S>(full_name: &str) -> Option<WellKnownTypeSerializer<S>>
where
S: Serializer,
{
match full_name {
"google.protobuf.Any" => Some(serialize_any),
"google.protobuf.Timestamp" => Some(serialize_timestamp),
"google.protobuf.Duration" => Some(serialize_duration),
"google.protobuf.Struct" => Some(serialize_struct),
"google.protobuf.FloatValue" => Some(serialize_float),
"google.protobuf.DoubleValue" => Some(serialize_double),
"google.protobuf.Int32Value" => Some(serialize_int32),
"google.protobuf.Int64Value" => Some(serialize_int64),
"google.protobuf.UInt32Value" => Some(serialize_uint32),
"google.protobuf.UInt64Value" => Some(serialize_uint64),
"google.protobuf.BoolValue" => Some(serialize_bool),
"google.protobuf.StringValue" => Some(serialize_string),
"google.protobuf.BytesValue" => Some(serialize_bytes),
"google.protobuf.FieldMask" => Some(serialize_field_mask),
"google.protobuf.ListValue" => Some(serialize_list),
"google.protobuf.Value" => Some(serialize_value),
"google.protobuf.Empty" => Some(serialize_empty),
_ => {
debug_assert!(!is_well_known_type(full_name));
None
}
}
}
fn serialize_any<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: prost_types::Any = msg.transcode_to().map_err(decode_to_ser_err)?;
let message_name = get_type_url_message_name(&raw.type_url).map_err(Error::custom)?;
let message_desc = msg
.descriptor()
.parent_pool()
.get_message_by_name(message_name)
.ok_or_else(|| Error::custom(format!("message '{message_name}' not found")))?;
let mut payload_message = DynamicMessage::new(message_desc);
payload_message
.merge(raw.value.as_ref())
.map_err(decode_to_ser_err)?;
if is_well_known_type(message_name) {
let mut map = serializer.serialize_map(Some(2))?;
map.serialize_entry("@type", &raw.type_url)?;
map.serialize_entry(
"value",
&SerializeWrapper {
value: &payload_message,
options,
},
)?;
map.end()
} else {
let mut map = serializer.serialize_map(None)?;
map.serialize_entry("@type", &raw.type_url)?;
serialize_dynamic_message_fields(&mut map, &payload_message, options)?;
map.end()
}
}
fn serialize_timestamp<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let timestamp: prost_types::Timestamp = msg.transcode_to().map_err(decode_to_ser_err)?;
check_timestamp(×tamp).map_err(Error::custom)?;
serializer.collect_str(×tamp)
}
fn serialize_duration<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let duration: prost_types::Duration = msg.transcode_to().map_err(decode_to_ser_err)?;
check_duration(&duration).map_err(Error::custom)?;
serializer.collect_str(&duration)
}
fn serialize_float<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: f32 = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_f32(raw)
}
fn serialize_double<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: f64 = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_f64(raw)
}
fn serialize_int32<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: i32 = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_i32(raw)
}
fn serialize_int64<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: i64 = msg.transcode_to().map_err(decode_to_ser_err)?;
if options.stringify_64_bit_integers {
serializer.collect_str(&raw)
} else {
serializer.serialize_i64(raw)
}
}
fn serialize_uint32<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: u32 = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_u32(raw)
}
fn serialize_uint64<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: u64 = msg.transcode_to().map_err(decode_to_ser_err)?;
if options.stringify_64_bit_integers {
serializer.collect_str(&raw)
} else {
serializer.serialize_u64(raw)
}
}
fn serialize_bool<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: bool = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_bool(raw)
}
fn serialize_string<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: String = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.serialize_str(&raw)
}
fn serialize_bytes<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: Vec<u8> = msg.transcode_to().map_err(decode_to_ser_err)?;
serializer.collect_str(&Base64Display::new(&raw, &BASE64_STANDARD))
}
fn serialize_field_mask<S>(
msg: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: prost_types::FieldMask = msg.transcode_to().map_err(decode_to_ser_err)?;
let mut result = String::new();
for path in raw.paths {
if !result.is_empty() {
result.push(',');
}
let mut first = true;
for part in path.split('.') {
if !first {
result.push('.');
}
snake_case_to_camel_case(&mut result, part)
.map_err(|()| Error::custom("cannot roundtrip field name through camelcase"))?;
first = false;
}
}
serializer.serialize_str(&result)
}
fn serialize_empty<S>(
_: &DynamicMessage,
serializer: S,
_options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.collect_map(std::iter::empty::<((), ())>())
}
fn serialize_value<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: prost_types::Value = msg.transcode_to().map_err(decode_to_ser_err)?;
serialize_value_inner(&raw, serializer, options)
}
fn serialize_struct<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: prost_types::Struct = msg.transcode_to().map_err(decode_to_ser_err)?;
serialize_struct_inner(&raw, serializer, options)
}
fn serialize_list<S>(
msg: &DynamicMessage,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let raw: prost_types::ListValue = msg.transcode_to().map_err(decode_to_ser_err)?;
serialize_list_inner(&raw, serializer, options)
}
impl Serialize for SerializeWrapper<'_, prost_types::Value> {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serialize_value_inner(self.value, serializer, self.options)
}
}
fn serialize_value_inner<S>(
raw: &prost_types::Value,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
match &raw.kind {
None | Some(prost_types::value::Kind::NullValue(_)) => serializer.serialize_none(),
Some(prost_types::value::Kind::BoolValue(value)) => serializer.serialize_bool(*value),
Some(prost_types::value::Kind::NumberValue(number)) => {
if number.is_finite() {
serializer.serialize_f64(*number)
} else {
Err(Error::custom(
"cannot serialize non-finite double in google.protobuf.Value",
))
}
}
Some(prost_types::value::Kind::StringValue(value)) => serializer.serialize_str(value),
Some(prost_types::value::Kind::ListValue(value)) => {
serialize_list_inner(value, serializer, options)
}
Some(prost_types::value::Kind::StructValue(value)) => {
serialize_struct_inner(value, serializer, options)
}
}
}
fn serialize_struct_inner<S>(
raw: &prost_types::Struct,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut map = serializer.serialize_map(Some(raw.fields.len()))?;
for (key, value) in &raw.fields {
map.serialize_entry(key, &SerializeWrapper { value, options })?;
}
map.end()
}
fn serialize_list_inner<S>(
raw: &prost_types::ListValue,
serializer: S,
options: &SerializeOptions,
) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
let mut list = serializer.serialize_seq(Some(raw.values.len()))?;
for value in &raw.values {
list.serialize_element(&SerializeWrapper { value, options })?;
}
list.end()
}
fn decode_to_ser_err<E>(err: DecodeError) -> E
where
E: Error,
{
Error::custom(format!("error decoding: {err}"))
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/text_format/mod.rs | prost-reflect/src/dynamic/text_format/mod.rs | mod format;
#[cfg(feature = "text-format")]
mod parse;
#[cfg(feature = "text-format")]
pub use self::parse::ParseError;
#[cfg(feature = "text-format")]
use crate::{DynamicMessage, MessageDescriptor};
pub(super) use self::format::Writer;
/// Options to control printing of the protobuf text format.
///
/// Used by [`DynamicMessage::to_text_format_with_options()`].
#[derive(Debug, Clone)]
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub struct FormatOptions {
pretty: bool,
skip_unknown_fields: bool,
expand_any: bool,
skip_default_fields: bool,
print_message_fields_in_index_order: bool,
}
#[cfg(feature = "text-format")]
impl DynamicMessage {
/// Parse a [`DynamicMessage`] from the given message encoded using the [text format](https://developers.google.com/protocol-buffers/docs/text-format-spec).
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::parse_text_format(message_descriptor, "foo: 150").unwrap();
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub fn parse_text_format(desc: MessageDescriptor, input: &str) -> Result<Self, ParseError> {
let mut message = DynamicMessage::new(desc);
message.merge_text_format(input)?;
Ok(message)
}
/// Merges the given message encoded using the [text format](https://developers.google.com/protocol-buffers/docs/text-format-spec) into this message.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let mut dynamic_message = DynamicMessage::new(message_descriptor);
/// dynamic_message.merge_text_format("foo: 150").unwrap();
/// assert_eq!(dynamic_message.get_field_by_name("foo").unwrap().as_ref(), &Value::I32(150));
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub fn merge_text_format(&mut self, input: &str) -> Result<(), ParseError> {
parse::Parser::new(input)
.parse_message(self)
.map_err(|kind| ParseError::new(kind, input))
}
/// Formats this dynamic message using the protobuf text format, with default options.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, text_format::FormatOptions};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::decode(message_descriptor, b"\x08\x96\x01\x1a\x02\x10\x42".as_ref()).unwrap();
/// assert_eq!(dynamic_message.to_text_format(), "foo:150,nested{bar:66}");
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub fn to_text_format(&self) -> String {
self.to_text_format_with_options(&FormatOptions::new())
}
/// Formats this dynamic message using the protobuf text format, with custom options.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, text_format::FormatOptions};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("package.MyMessage").unwrap();
/// let dynamic_message = DynamicMessage::decode(message_descriptor, b"\x08\x96\x01\x1a\x02\x10\x42".as_ref()).unwrap();
/// let options = FormatOptions::new().pretty(true);
/// assert_eq!(dynamic_message.to_text_format_with_options(&options), "foo: 150\nnested {\n bar: 66\n}");
/// ```
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub fn to_text_format_with_options(&self, options: &FormatOptions) -> String {
let mut result = String::new();
format::Writer::new(options.clone(), &mut result)
.fmt_message(self)
.expect("writing to string cannot fail");
result
}
}
impl FormatOptions {
/// Creates new instance of [`FormatOptions`] with default options.
pub fn new() -> Self {
FormatOptions::default()
}
/// Whether to prettify the format output.
///
/// If set to `true`, each field will be printed on a new line, and nested messages will be indented.
///
/// The default value is `false`.
pub fn pretty(mut self, yes: bool) -> Self {
self.pretty = yes;
self
}
/// Whether to include unknown fields in the output.
///
/// If set to `false`, unknown fields will be printed. The protobuf format does not include type information,
/// so the formatter will attempt to infer types.
///
/// The default value is `true`.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, text_format::FormatOptions};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// # let message_descriptor = pool.get_message_by_name("google.protobuf.Empty").unwrap();
/// let dynamic_message = DynamicMessage::decode(message_descriptor, b"\x08\x96\x01\x1a\x02\x10\x42".as_ref()).unwrap();
/// assert_eq!(dynamic_message.to_text_format(), "");
/// let options = FormatOptions::new().skip_unknown_fields(false);
/// assert_eq!(dynamic_message.to_text_format_with_options(&options), "1:150,3{2:66}");
/// ```
#[cfg(feature = "text-format")]
pub fn skip_unknown_fields(mut self, yes: bool) -> Self {
self.skip_unknown_fields = yes;
self
}
/// Whether to skip fields which have their default value.
///
/// If `true`, any fields for which [`has_field`][DynamicMessage::has_field] returns `false` will
/// not be included. If `false`, they will be included with their default value.
///
/// The default value is `true`.
#[cfg(feature = "text-format")]
pub fn skip_default_fields(mut self, yes: bool) -> Self {
self.skip_default_fields = yes;
self
}
/// Whether to print message fields in the order they were defined in source code.
///
/// If set to `true`, message fields will be printed in the order they were defined in the source code.
/// Otherwise, they will be printed in field number order.
///
/// The default value is `false`.
#[cfg(feature = "text-format")]
pub fn print_message_fields_in_index_order(mut self, yes: bool) -> Self {
self.print_message_fields_in_index_order = yes;
self
}
/// Whether to use the expanded form of the `google.protobuf.Any` type.
///
/// If set to `true`, `Any` fields will use an expanded form:
///
/// ```textproto
/// [type.googleapis.com/package.MyMessage] {
/// foo: 150
/// }
/// ```
///
/// If set to `false`, the normal text format representation will be used:
///
/// ```textproto
/// type_url: "type.googleapis.com/package.MyMessage"
/// value: "\x08\x96\x01"
/// ```
///
/// The default value is `true`.
///
/// # Examples
///
/// ```
/// # use prost::Message;
/// # use prost_types::FileDescriptorSet;
/// # use prost_reflect::{DynamicMessage, DescriptorPool, Value, text_format::FormatOptions, bytes::Bytes};
/// # let pool = DescriptorPool::decode(include_bytes!("../../file_descriptor_set.bin").as_ref()).unwrap();
/// let message_descriptor = pool.get_message_by_name("google.protobuf.Any").unwrap();
/// let mut dynamic_message = DynamicMessage::new(message_descriptor);
/// dynamic_message.set_field_by_name("type_url", Value::String("type.googleapis.com/package.MyMessage".to_owned()));
/// dynamic_message.set_field_by_name("value", Value::Bytes(Bytes::from_static(b"\x08\x96\x01\x1a\x02\x10\x42".as_ref())));
///
/// assert_eq!(dynamic_message.to_text_format(), "[type.googleapis.com/package.MyMessage]{foo:150,nested{bar:66}}");
/// let options = FormatOptions::new().expand_any(false);
/// assert_eq!(dynamic_message.to_text_format_with_options(&options), r#"type_url:"type.googleapis.com/package.MyMessage",value:"\010\226\001\032\002\020B""#);
/// ```
#[cfg(feature = "text-format")]
pub fn expand_any(mut self, yes: bool) -> Self {
self.expand_any = yes;
self
}
}
impl Default for FormatOptions {
fn default() -> Self {
FormatOptions {
pretty: false,
skip_unknown_fields: true,
expand_any: true,
skip_default_fields: true,
print_message_fields_in_index_order: false,
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/text_format/format.rs | prost-reflect/src/dynamic/text_format/format.rs | use std::fmt::{self, Write};
use prost::Message;
use crate::{
dynamic::{
fields::ValueAndDescriptor,
fmt_string, get_type_url_message_name,
text_format::FormatOptions,
unknown::{UnknownField, UnknownFieldSet, UnknownFieldValue},
},
DynamicMessage, Kind, MapKey, Value,
};
pub(in crate::dynamic) struct Writer<'a, W> {
options: FormatOptions,
f: &'a mut W,
indent_level: u32,
}
impl<'a, W> Writer<'a, W>
where
W: Write,
{
pub fn new(options: FormatOptions, f: &'a mut W) -> Self {
Writer {
options,
f,
indent_level: 0,
}
}
pub fn fmt_message(&mut self, message: &DynamicMessage) -> fmt::Result {
if self.options.expand_any {
if let Some((type_url, body)) = as_any(message) {
self.f.write_char('[')?;
self.f.write_str(&type_url)?;
self.f.write_str("]")?;
self.fmt_field_value(&Value::Message(body), None)?;
return Ok(());
}
}
let fields = message.fields.iter(
&message.desc,
!self.options.skip_default_fields,
self.options.print_message_fields_in_index_order,
);
if self.options.skip_unknown_fields {
self.fmt_delimited(
fields.filter(|f| !matches!(f, ValueAndDescriptor::Unknown(..))),
Writer::fmt_message_field,
)
} else {
self.fmt_delimited(fields, Writer::fmt_message_field)
}
}
pub fn fmt_value(&mut self, value: &Value, kind: Option<&Kind>) -> fmt::Result {
match value {
Value::Bool(value) => write!(self.f, "{value}"),
Value::I32(value) => write!(self.f, "{value}"),
Value::I64(value) => write!(self.f, "{value}"),
Value::U32(value) => write!(self.f, "{value}"),
Value::U64(value) => write!(self.f, "{value}"),
Value::F32(value) => {
if value.fract() == 0.0 {
write!(self.f, "{value:.1}")
} else {
write!(self.f, "{value}")
}
}
Value::F64(value) => {
if value.fract() == 0.0 {
write!(self.f, "{value:.1}")
} else {
write!(self.f, "{value}")
}
}
Value::String(s) => self.fmt_string(s.as_bytes()),
Value::Bytes(s) => self.fmt_string(s.as_ref()),
Value::EnumNumber(value) => {
if let Some(Kind::Enum(desc)) = kind {
if let Some(value) = desc.get_value(*value) {
return self.f.write_str(value.name());
}
}
write!(self.f, "{value}")
}
Value::Message(message) => {
let mut fields = message.fields.iter(
&message.desc,
!self.options.skip_default_fields,
self.options.print_message_fields_in_index_order,
);
if fields.all(|f| {
self.options.skip_unknown_fields && matches!(f, ValueAndDescriptor::Unknown(..))
}) {
self.f.write_str("{}")
} else if self.options.pretty {
self.f.write_char('{')?;
self.indent_level += 2;
self.fmt_newline()?;
self.fmt_message(message)?;
self.indent_level -= 2;
self.fmt_newline()?;
self.f.write_char('}')
} else {
self.f.write_char('{')?;
self.fmt_message(message)?;
self.f.write_char('}')
}
}
Value::List(list) => {
self.fmt_list(list.iter(), |this, value| this.fmt_value(value, kind))
}
Value::Map(map) => {
let value_kind = kind
.and_then(|k| k.as_message())
.map(|m| m.map_entry_value_field().kind());
self.fmt_list(map.iter(), |this, (key, value)| {
if this.options.pretty {
this.f.write_str("{")?;
this.indent_level += 2;
this.fmt_newline()?;
this.f.write_str("key: ")?;
this.fmt_map_key(key)?;
this.fmt_newline()?;
this.f.write_str("value")?;
this.fmt_field_value(value, value_kind.as_ref())?;
this.indent_level -= 2;
this.fmt_newline()?;
this.f.write_char('}')
} else {
this.f.write_str("{key:")?;
this.fmt_map_key(key)?;
this.f.write_str(",value")?;
this.fmt_field_value(value, value_kind.as_ref())?;
this.f.write_char('}')
}
})
}
}
}
fn fmt_map_key(&mut self, value: &MapKey) -> fmt::Result {
match value {
MapKey::Bool(value) => write!(self.f, "{value}"),
MapKey::I32(value) => write!(self.f, "{value}"),
MapKey::I64(value) => write!(self.f, "{value}"),
MapKey::U32(value) => write!(self.f, "{value}"),
MapKey::U64(value) => write!(self.f, "{value}"),
MapKey::String(s) => self.fmt_string(s.as_bytes()),
}
}
fn fmt_message_field(&mut self, field: ValueAndDescriptor) -> fmt::Result {
match field {
ValueAndDescriptor::Field(value, desc) => {
if desc.is_group() {
write!(self.f, "{}", desc.kind().as_message().unwrap().name())?;
} else {
write!(self.f, "{}", desc.name())?;
}
self.fmt_field_value(&value, Some(&desc.kind()))
}
ValueAndDescriptor::Extension(value, desc) => {
write!(self.f, "[{}]", desc.full_name())?;
self.fmt_field_value(&value, Some(&desc.kind()))
}
ValueAndDescriptor::Unknown(values) => {
self.fmt_delimited(values.iter(), Writer::fmt_unknown_field)
}
}
}
fn fmt_field_value(&mut self, value: &Value, kind: Option<&Kind>) -> fmt::Result {
if !matches!(value, Value::Message(_)) {
self.f.write_char(':')?;
}
self.fmt_padding()?;
self.fmt_value(value, kind)
}
pub fn fmt_unknown_field(&mut self, field: &UnknownField) -> fmt::Result {
write!(self.f, "{}", field.number())?;
match field.value() {
UnknownFieldValue::Varint(int) => {
self.f.write_char(':')?;
self.fmt_padding()?;
write!(self.f, "{int}")
}
UnknownFieldValue::ThirtyTwoBit(bytes) => {
self.f.write_char(':')?;
self.fmt_padding()?;
write!(self.f, "0x{:08x}", u32::from_le_bytes(*bytes))
}
UnknownFieldValue::SixtyFourBit(bytes) => {
self.f.write_char(':')?;
self.fmt_padding()?;
write!(self.f, "0x{:016x}", u64::from_le_bytes(*bytes))
}
UnknownFieldValue::LengthDelimited(bytes) => {
if !bytes.is_empty() {
if let Ok(set) = UnknownFieldSet::decode(bytes.clone()) {
self.fmt_padding()?;
return self.fmt_unknown_field_set(&set);
}
}
self.f.write_char(':')?;
self.fmt_padding()?;
self.fmt_string(bytes.as_ref())
}
UnknownFieldValue::Group(set) => {
self.fmt_padding()?;
self.fmt_unknown_field_set(set)
}
}
}
fn fmt_unknown_field_set(&mut self, set: &UnknownFieldSet) -> fmt::Result {
if set.is_empty() {
self.f.write_str("{}")
} else if self.options.pretty {
self.f.write_char('{')?;
self.indent_level += 2;
self.fmt_newline()?;
self.fmt_delimited(set.iter(), Writer::fmt_unknown_field)?;
self.indent_level -= 2;
self.fmt_newline()?;
self.f.write_char('}')
} else {
self.f.write_char('{')?;
self.fmt_delimited(set.iter(), Writer::fmt_unknown_field)?;
self.f.write_char('}')
}
}
fn fmt_string(&mut self, bytes: &[u8]) -> fmt::Result {
fmt_string(&mut self.f, bytes)
}
fn fmt_delimited<T>(
&mut self,
mut iter: impl Iterator<Item = T>,
f: impl Fn(&mut Self, T) -> fmt::Result,
) -> fmt::Result {
if let Some(first) = iter.next() {
f(self, first)?;
}
for item in iter {
if self.options.pretty {
self.fmt_newline()?;
} else {
self.f.write_char(',')?;
}
f(self, item)?;
}
Ok(())
}
fn fmt_list<I>(
&mut self,
mut iter: impl Iterator<Item = I>,
f: impl Fn(&mut Self, I) -> fmt::Result,
) -> fmt::Result {
self.f.write_char('[')?;
if let Some(first) = iter.next() {
f(self, first)?;
}
for item in iter {
self.f.write_char(',')?;
self.fmt_padding()?;
f(self, item)?;
}
self.f.write_char(']')
}
fn fmt_padding(&mut self) -> fmt::Result {
if self.options.pretty {
self.f.write_char(' ')?;
}
Ok(())
}
fn fmt_newline(&mut self) -> fmt::Result {
self.f.write_char('\n')?;
for _ in 0..self.indent_level {
self.f.write_char(' ')?;
}
Ok(())
}
}
fn as_any(message: &DynamicMessage) -> Option<(String, DynamicMessage)> {
if message.desc.full_name() != "google.protobuf.Any" {
return None;
}
let any = message.transcode_to::<prost_types::Any>().ok()?;
let message_name = get_type_url_message_name(&any.type_url).ok()?;
let desc = message
.desc
.parent_pool()
.get_message_by_name(message_name)?;
let body = DynamicMessage::decode(desc, any.value.as_slice()).ok()?;
Some((any.type_url, body))
}
#[cfg(test)]
#[cfg(feature = "text-format")]
mod tests {
use super::*;
use crate::ReflectMessage;
fn fmt_unknown(value: &UnknownFieldSet) -> String {
let mut string = String::new();
Writer::new(FormatOptions::new().skip_unknown_fields(false), &mut string)
.fmt_delimited(value.iter(), Writer::fmt_unknown_field)
.unwrap();
string
}
fn fmt_unknown_pretty(value: &UnknownFieldSet) -> String {
let mut string = String::new();
Writer::new(
FormatOptions::new().skip_unknown_fields(false).pretty(true),
&mut string,
)
.fmt_delimited(value.iter(), Writer::fmt_unknown_field)
.unwrap();
string
}
#[test]
fn fmt_unknown_scalar() {
let value = UnknownFieldSet::decode(b"\x09\x9a\x99\x99\x99\x99\x99\xf1\x3f\x15\xcd\xcc\x0c\x40\x18\x03\x20\x04\x28\x05\x30\x06\x38\x0e\x40\x10\x4d\x09\x00\x00\x00\x51\x0a\x00\x00\x00\x00\x00\x00\x00\x5d\x0b\x00\x00\x00\x61\x0c\x00\x00\x00\x00\x00\x00\x00\x68\x01\x72\x01\x35\x7a\x07\x69\xa6\xbe\x6d\xb6\xff\x58".as_ref()).unwrap();
assert_eq!(
fmt_unknown(&value),
r#"1:0x3ff199999999999a,2:0x400ccccd,3:3,4:4,5:5,6:6,7:14,8:16,9:0x00000009,10:0x000000000000000a,11:0x0000000b,12:0x000000000000000c,13:1,14:"5",15:"i\246\276m\266\377X""#
);
assert_eq!(
fmt_unknown_pretty(&value),
r#"1: 0x3ff199999999999a
2: 0x400ccccd
3: 3
4: 4
5: 5
6: 6
7: 14
8: 16
9: 0x00000009
10: 0x000000000000000a
11: 0x0000000b
12: 0x000000000000000c
13: 1
14: "5"
15: "i\246\276m\266\377X""#
);
}
#[test]
fn fmt_unknown_complex_type() {
let value = UnknownFieldSet::decode(b"\x0a\x15\x0a\x01\x31\x12\x10\x09\x9a\x99\x99\x99\x99\x99\xf1\x3f\x15\xcd\xcc\x0c\x40\x18\x03\x12\x0d\x08\x03\x12\x09\x38\x0e\x40\x10\x4d\x09\x00\x00\x00\x1a\x16\x5d\x0b\x00\x00\x00\x61\x0c\x00\x00\x00\x00\x00\x00\x00\x68\x01\x72\x01\x35\x7a\x01\x36\x22\x0e\x00\x01\x02\x03\xfc\xff\xff\xff\xff\xff\xff\xff\xff\x01\x28\x01".as_ref()).unwrap();
assert_eq!(
fmt_unknown(&value),
r#"1{1:"1",2{1:0x3ff199999999999a,2:0x400ccccd,3:3}},2{1:3,2{7:14,8:16,9:0x00000009}},3{11:0x0000000b,12:0x000000000000000c,13:1,14:"5",15:"6"},4:"\000\001\002\003\374\377\377\377\377\377\377\377\377\001",5:1"#
);
assert_eq!(
fmt_unknown_pretty(&value),
r#"1 {
1: "1"
2 {
1: 0x3ff199999999999a
2: 0x400ccccd
3: 3
}
}
2 {
1: 3
2 {
7: 14
8: 16
9: 0x00000009
}
}
3 {
11: 0x0000000b
12: 0x000000000000000c
13: 1
14: "5"
15: "6"
}
4: "\000\001\002\003\374\377\377\377\377\377\377\377\377\001"
5: 1"#
);
}
#[test]
fn fmt_unknown_group() {
let value = UnknownFieldSet::decode(b"\x0b\x0a\x03\x62\x61\x72\x0c\x13\x0a\x03\x66\x6f\x6f\x10\xfb\xff\xff\xff\xff\xff\xff\xff\xff\x01\x14\x1b\x0a\x00\x1c\x1b\x0a\x05\x68\x65\x6c\x6c\x6f\x10\x0a\x1c".as_ref()).unwrap();
assert_eq!(
fmt_unknown(&value),
r#"1{1:"bar"},2{1:"foo",2:18446744073709551611},3{1:""},3{1:"hello",2:10}"#
);
assert_eq!(
fmt_unknown_pretty(&value),
r#"1 {
1: "bar"
}
2 {
1: "foo"
2: 18446744073709551611
}
3 {
1: ""
}
3 {
1: "hello"
2: 10
}"#
);
}
#[test]
fn fmt_include_default() {
let timestamp: prost_types::Timestamp = Default::default();
let message = timestamp.transcode_to_dynamic();
let mut string = String::new();
Writer::new(FormatOptions::new().skip_default_fields(false), &mut string)
.fmt_message(&message)
.unwrap();
assert_eq!(string, "seconds:0,nanos:0");
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/text_format/parse/error.rs | prost-reflect/src/dynamic/text_format/parse/error.rs | use logos::Span;
use std::{
error::Error,
fmt::{self, Display},
};
/// An error that may occur while parsing the protobuf text format.
#[derive(Debug)]
#[cfg_attr(docsrs, doc(cfg(feature = "text-format")))]
pub struct ParseError {
kind: Box<ParseErrorKind>,
#[cfg(feature = "miette")]
source: String,
}
impl ParseError {
#[cfg(feature = "miette")]
pub(crate) fn new(kind: ParseErrorKind, source: &str) -> Self {
ParseError {
kind: Box::new(kind),
source: source.to_owned(),
}
}
#[cfg(not(feature = "miette"))]
pub(crate) fn new(kind: ParseErrorKind, _: &str) -> Self {
ParseError {
kind: Box::new(kind),
}
}
}
#[derive(Debug, Clone, PartialEq)]
#[cfg_attr(feature = "miette", derive(miette::Diagnostic))]
pub(crate) enum ParseErrorKind {
InvalidToken {
#[cfg_attr(feature = "miette", label("found here"))]
span: Span,
},
InvalidStringCharacters {
#[cfg_attr(feature = "miette", label("invalid characters"))]
span: Span,
},
InvalidStringEscape {
#[cfg_attr(feature = "miette", label("defined here"))]
span: Span,
},
InvalidUtf8String {
#[cfg_attr(feature = "miette", label("defined here"))]
span: Span,
},
NoSpaceBetweenIntAndIdent {
#[cfg_attr(feature = "miette", label("found here"))]
span: Span,
},
UnexpectedToken {
expected: String,
found: String,
#[cfg_attr(feature = "miette", label("found here"))]
span: Span,
},
UnexpectedEof {
expected: String,
},
#[cfg_attr(
feature = "miette",
diagnostic(help("the value must be between {min} and {max} inclusive"))
)]
IntegerValueOutOfRange {
expected: String,
actual: String,
min: String,
max: String,
#[cfg_attr(feature = "miette", label("defined here"))]
span: Span,
},
FieldNotFound {
field_name: String,
message_name: String,
#[cfg_attr(feature = "miette", label("set here"))]
span: Span,
},
FieldAlreadySet {
field_name: String,
#[cfg_attr(feature = "miette", label("set here"))]
span: Span,
},
OneofAlreadySet {
oneof_name: String,
#[cfg_attr(feature = "miette", label("set here"))]
span: Span,
},
ExtensionNotFound {
extension_name: String,
message_name: String,
#[cfg_attr(feature = "miette", label("set here"))]
span: Span,
},
MessageNotFound {
message_name: String,
#[cfg_attr(feature = "miette", label("used here"))]
span: Span,
},
EnumValueNotFound {
value_name: String,
enum_name: String,
#[cfg_attr(feature = "miette", label("used here"))]
span: Span,
},
InvalidTypeForAny {
#[cfg_attr(feature = "miette", label("used here"))]
span: Span,
},
InvalidMapKey,
}
impl Display for ParseErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ParseErrorKind::InvalidToken { .. } => write!(f, "invalid token"),
ParseErrorKind::InvalidStringCharacters { .. } => write!(f, "invalid string character"),
ParseErrorKind::InvalidStringEscape { .. } => write!(f, "invalid string escape"),
ParseErrorKind::InvalidUtf8String { .. } => write!(f, "string is not valid utf-8"),
ParseErrorKind::NoSpaceBetweenIntAndIdent { .. } => write!(
f,
"whitespace is required between an integer literal and an identifier"
),
ParseErrorKind::UnexpectedToken {
expected, found, ..
} => write!(f, "expected {expected}, but found '{found}'"),
ParseErrorKind::UnexpectedEof { expected, .. } => {
write!(f, "expected {expected}, but reached end of input")
}
ParseErrorKind::IntegerValueOutOfRange {
expected, actual, ..
} => write!(
f,
"expected value to be {expected}, but the value {actual} is out of range"
),
ParseErrorKind::FieldNotFound {
field_name,
message_name,
..
} => write!(
f,
"field '{field_name}' not found for message '{message_name}'"
),
ParseErrorKind::FieldAlreadySet { field_name, .. } => {
write!(f, "'{field_name}' is already set")
}
ParseErrorKind::OneofAlreadySet { oneof_name, .. } => {
write!(f, "a value is already set for oneof '{oneof_name}'")
}
ParseErrorKind::ExtensionNotFound {
extension_name,
message_name,
..
} => {
write!(
f,
"extension '{extension_name}' not found for message '{message_name}'"
)
}
ParseErrorKind::MessageNotFound { message_name, .. } => {
write!(f, "message type '{message_name}' not found")
}
ParseErrorKind::EnumValueNotFound {
value_name,
enum_name,
..
} => {
write!(
f,
"value '{value_name}' was not found for enum '{enum_name}'"
)
}
ParseErrorKind::InvalidTypeForAny { .. } => write!(
f,
"the field type must be 'google.protobuf.Any' to use Any expansion syntax"
),
ParseErrorKind::InvalidMapKey => write!(f, "invalid value type for map key"),
}
}
}
impl Error for ParseErrorKind {}
impl Display for ParseError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.kind.fmt(f)
}
}
impl Error for ParseError {}
#[cfg(feature = "miette")]
#[cfg_attr(docsrs, doc(cfg(feature = "miette")))]
impl miette::Diagnostic for ParseError {
fn code<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
self.kind.code()
}
fn severity(&self) -> Option<miette::Severity> {
self.kind.severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
self.kind.help()
}
fn url<'a>(&'a self) -> Option<Box<dyn Display + 'a>> {
self.kind.url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
Some(&self.source)
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.kind.labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
self.kind.related()
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.kind.diagnostic_source()
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/text_format/parse/mod.rs | prost-reflect/src/dynamic/text_format/parse/mod.rs | mod error;
mod lex;
use std::{borrow::Cow, convert::TryFrom, iter::once};
use logos::{Lexer, Logos, Span};
use prost::Message;
pub use self::error::ParseError;
use self::{
error::ParseErrorKind,
lex::{Int, Token},
};
use crate::{
descriptor::{MAP_ENTRY_KEY_NUMBER, MAP_ENTRY_VALUE_NUMBER},
dynamic::fields::FieldDescriptorLike,
DynamicMessage, EnumDescriptor, FieldDescriptor, Kind, MapKey, MessageDescriptor, Value,
};
pub(in crate::dynamic::text_format) struct Parser<'a> {
lexer: Lexer<'a, Token<'a>>,
peek: Option<Result<(Token<'a>, Span), ParseErrorKind>>,
}
enum FieldName {
Ident(String),
Extension(String),
Any(String, String),
}
impl<'a> Parser<'a> {
pub fn new(input: &'a str) -> Self {
Parser {
lexer: Token::lexer(input),
peek: None,
}
}
pub fn parse_message(&mut self, message: &mut DynamicMessage) -> Result<(), ParseErrorKind> {
while self.peek()?.is_some() {
self.parse_field(message)?;
}
Ok(())
}
fn parse_message_value(
&mut self,
message: &mut DynamicMessage,
) -> Result<Span, ParseErrorKind> {
let (terminator, start) = match self.peek()? {
Some((Token::LeftBrace, _)) => (Token::RightBrace, self.bump()),
Some((Token::LeftAngleBracket, _)) => (Token::RightAngleBracket, self.bump()),
_ => self.unexpected_token("'{' or '<'")?,
};
loop {
match self.peek()? {
Some((Token::Ident(_) | Token::LeftBracket, _)) => self.parse_field(message)?,
Some((tok, _)) if tok == terminator => {
let end = self.bump();
return Ok(join_span(start, end));
}
_ => self.unexpected_token(format!("'{terminator}' or a field name"))?,
}
}
}
fn parse_field(&mut self, message: &mut DynamicMessage) -> Result<(), ParseErrorKind> {
let (name, span) = self.parse_field_name()?;
match self.peek()? {
Some((Token::Colon, _)) => {
self.bump();
}
Some((Token::LeftBrace | Token::LeftAngleBracket, _)) => (),
_ => self.unexpected_token("':' or a message value")?,
};
match name {
FieldName::Ident(field_name) => {
let field = find_field(&message.desc, &field_name).ok_or_else(|| {
ParseErrorKind::FieldNotFound {
field_name,
message_name: message.desc.full_name().to_owned(),
span,
}
})?;
self.parse_field_value(message, &field)?;
}
FieldName::Extension(extension_name) => {
let extension = message
.desc
.get_extension_by_full_name(&extension_name)
.ok_or_else(|| ParseErrorKind::ExtensionNotFound {
extension_name,
message_name: message.desc.full_name().to_owned(),
span,
})?;
self.parse_field_value(message, &extension)?;
}
FieldName::Any(domain, message_name) => {
let value_message = match message
.desc
.parent_pool()
.get_message_by_name(&message_name)
{
Some(msg) => msg,
None => return Err(ParseErrorKind::MessageNotFound { message_name, span }),
};
let mut value = DynamicMessage::new(value_message);
self.parse_message_value(&mut value)?;
let type_url = format!("{domain}/{message_name}");
let value = value.encode_to_vec();
if !(message.desc.full_name() == "google.protobuf.Any"
&& message
.try_set_field_by_number(1, Value::String(type_url))
.is_ok()
&& message
.try_set_field_by_number(2, Value::Bytes(value.into()))
.is_ok())
{
return Err(ParseErrorKind::InvalidTypeForAny { span });
}
}
}
if matches!(self.peek()?, Some((Token::Comma | Token::Semicolon, _))) {
self.bump();
}
Ok(())
}
fn parse_field_name(&mut self) -> Result<(FieldName, Span), ParseErrorKind> {
match self.peek()? {
Some((Token::Ident(ident), _)) => Ok((FieldName::Ident(ident.to_owned()), self.bump())),
Some((Token::LeftBracket, _)) => {
let start = self.bump();
let name_or_domain = self
.parse_full_ident(&[Token::RightBracket, Token::ForwardSlash])?
.into_owned();
match self.peek()? {
Some((Token::RightBracket, _)) => {
let end = self.bump();
Ok((FieldName::Extension(name_or_domain), join_span(start, end)))
}
Some((Token::ForwardSlash, _)) => {
self.bump();
let type_name = self.parse_full_ident(&[Token::RightBracket])?;
let end = self.expect(Token::RightBracket)?;
Ok((
FieldName::Any(name_or_domain, type_name.into_owned()),
join_span(start, end),
))
}
_ => self.unexpected_token("']' or '/'")?,
}
}
_ => self.unexpected_token("a field name")?,
}
}
fn parse_field_value(
&mut self,
message: &mut DynamicMessage,
field: &impl FieldDescriptorLike,
) -> Result<(), ParseErrorKind> {
if field.is_list() {
let (value, _) = self.parse_repeated_value(&field.kind())?;
let result = message.fields.get_mut(field).as_list_mut().unwrap();
if let Value::List(values) = value {
result.extend(values);
} else {
result.push(value);
}
Ok(())
} else if field.is_map() {
fn unpack(value: Value) -> Result<(MapKey, Value), ParseErrorKind> {
match value {
Value::Message(msg) => {
let key = msg
.get_field_by_number(MAP_ENTRY_KEY_NUMBER)
.unwrap()
.into_owned()
.into_map_key()
.ok_or(ParseErrorKind::InvalidMapKey)?;
let value = msg
.get_field_by_number(MAP_ENTRY_VALUE_NUMBER)
.unwrap()
.into_owned();
Ok((key, value))
}
_ => panic!("map entry must be message"),
}
}
let (value, _) = self.parse_repeated_value(&field.kind())?;
let result = message.fields.get_mut(field).as_map_mut().unwrap();
if let Value::List(values) = value {
for value in values {
let (key, value) = unpack(value)?;
result.insert(key, value);
}
} else {
let (key, value) = unpack(value)?;
result.insert(key, value);
}
Ok(())
} else {
let kind = field.kind();
let (value, span) = self.parse_value(&kind)?;
if message.fields.has(field) {
return Err(ParseErrorKind::FieldAlreadySet {
field_name: field.text_name().to_owned(),
span,
});
} else if let Some(oneof) = field.containing_oneof() {
for oneof_field in oneof.fields() {
if message.has_field(&oneof_field) {
return Err(ParseErrorKind::OneofAlreadySet {
oneof_name: oneof.name().to_owned(),
span,
});
}
}
}
message.fields.set(field, value);
Ok(())
}
}
fn parse_repeated_value(&mut self, kind: &Kind) -> Result<(Value, Span), ParseErrorKind> {
match self.peek()? {
Some((Token::LeftBracket, _)) => {
let start = self.bump();
let mut result = Vec::new();
// Check for empty list first
if let Some((Token::RightBracket, _)) = self.peek()? {
let end = self.bump();
return Ok((Value::List(result), join_span(start, end)));
}
result.push(self.parse_value(kind)?.0);
loop {
match self.peek()? {
Some((Token::Comma, _)) => {
self.bump();
result.push(self.parse_value(kind)?.0);
}
Some((Token::RightBracket, _)) => {
let end = self.bump();
return Ok((Value::List(result), join_span(start, end)));
}
_ => self.unexpected_token("',' or ']'")?,
}
}
}
_ => self.parse_value(kind),
}
}
fn parse_value(&mut self, kind: &Kind) -> Result<(Value, Span), ParseErrorKind> {
match kind {
Kind::Float => {
let (value, span) = self.parse_float()?;
Ok((Value::F32(value as f32), span))
}
Kind::Double => {
let (value, span) = self.parse_float()?;
Ok((Value::F64(value), span))
}
Kind::Int32 | Kind::Sint32 | Kind::Sfixed32 => {
let (value, span) = self.parse_i32()?;
Ok((Value::I32(value), span))
}
Kind::Int64 | Kind::Sint64 | Kind::Sfixed64 => {
let (value, span) = self.parse_i64()?;
Ok((Value::I64(value), span))
}
Kind::Uint32 | Kind::Fixed32 => {
let (value, span) = self.parse_u32()?;
Ok((Value::U32(value), span))
}
Kind::Uint64 | Kind::Fixed64 => {
let (value, span) = self.parse_u64()?;
Ok((Value::U64(value), span))
}
Kind::Bool => {
let (value, span) = self.parse_bool()?;
Ok((Value::Bool(value), span))
}
Kind::String => {
let (value, span) = self.parse_bytes()?;
match String::from_utf8(value) {
Ok(value) => Ok((Value::String(value), span)),
Err(_) => Err(ParseErrorKind::InvalidUtf8String { span }),
}
}
Kind::Bytes => {
let (value, span) = self.parse_bytes()?;
Ok((Value::Bytes(value.into()), span))
}
Kind::Message(desc) => {
let mut message = DynamicMessage::new(desc.clone());
let span = self.parse_message_value(&mut message)?;
Ok((Value::Message(message), span))
}
Kind::Enum(desc) => {
let (value, span) = self.parse_enum(desc)?;
Ok((Value::EnumNumber(value), span))
}
}
}
fn parse_float(&mut self) -> Result<(f64, Span), ParseErrorKind> {
let (negative, start) = match self.peek()? {
Some((Token::Minus, _)) => (true, self.bump()),
Some((_, span)) => (false, span),
None => self.unexpected_token("a number")?,
};
let (value, end) = match self.peek()? {
Some((Token::FloatLiteral(value), _)) => (value, self.bump()),
Some((Token::IntLiteral(Int { value, radix: 10 }), _)) => {
(value.parse().unwrap(), self.bump())
}
Some((Token::Ident(value), _))
if value.eq_ignore_ascii_case("inf") || value.eq_ignore_ascii_case("infinity") =>
{
(f64::INFINITY, self.bump())
}
Some((Token::Ident(value), _)) if value.eq_ignore_ascii_case("nan") => {
(f64::NAN, self.bump())
}
_ => self.unexpected_token("a number")?,
};
if negative {
Ok((-value, join_span(start, end)))
} else {
Ok((value, join_span(start, end)))
}
}
fn parse_i32(&mut self) -> Result<(i32, Span), ParseErrorKind> {
let (negative, int, span) = self.parse_int()?;
let converted_value = if negative {
u32::from_str_radix(int.value, int.radix)
.ok()
.and_then(|value| {
if value == (i32::MAX as u32 + 1) {
Some(i32::MIN)
} else {
i32::try_from(value).map(|value| -value).ok()
}
})
} else {
i32::from_str_radix(int.value, int.radix).ok()
};
match converted_value {
Some(value) => Ok((value, span)),
None => Err(ParseErrorKind::IntegerValueOutOfRange {
expected: "a signed 32-bit integer".to_owned(),
actual: if negative {
format!("-{}", int.value)
} else {
int.value.to_owned()
},
min: i32::MIN.to_string(),
max: i32::MAX.to_string(),
span,
}),
}
}
fn parse_i64(&mut self) -> Result<(i64, Span), ParseErrorKind> {
let (negative, int, span) = self.parse_int()?;
let converted_value = if negative {
u64::from_str_radix(int.value, int.radix)
.ok()
.and_then(|value| {
if value == (i64::MAX as u64 + 1) {
Some(i64::MIN)
} else {
i64::try_from(value).map(|value| -value).ok()
}
})
} else {
i64::from_str_radix(int.value, int.radix).ok()
};
match converted_value {
Some(value) => Ok((value, span)),
None => Err(ParseErrorKind::IntegerValueOutOfRange {
expected: "a signed 64-bit integer".to_owned(),
actual: if negative {
format!("-{}", int.value)
} else {
int.value.to_owned()
},
min: i64::MIN.to_string(),
max: i64::MAX.to_string(),
span,
}),
}
}
fn parse_u32(&mut self) -> Result<(u32, Span), ParseErrorKind> {
let (negative, int, span) = self.parse_int()?;
let converted_value = if negative {
None
} else {
u32::from_str_radix(int.value, int.radix).ok()
};
match converted_value {
Some(value) => Ok((value, span)),
None => Err(ParseErrorKind::IntegerValueOutOfRange {
expected: "an unsigned 32-bit integer".to_owned(),
actual: if negative {
format!("-{}", int.value)
} else {
int.value.to_string()
},
min: u32::MIN.to_string(),
max: u32::MAX.to_string(),
span,
}),
}
}
fn parse_u64(&mut self) -> Result<(u64, Span), ParseErrorKind> {
let (negative, int, span) = self.parse_int()?;
let converted_value = if negative {
None
} else {
u64::from_str_radix(int.value, int.radix).ok()
};
match converted_value {
Some(value) => Ok((value, span)),
None => Err(ParseErrorKind::IntegerValueOutOfRange {
expected: "an unsigned 64-bit integer".to_owned(),
actual: if negative {
format!("-{}", int.value)
} else {
int.value.to_string()
},
min: u64::MIN.to_string(),
max: u64::MAX.to_string(),
span,
}),
}
}
fn parse_int(&mut self) -> Result<(bool, Int<'a>, Span), ParseErrorKind> {
let (negative, start) = match self.peek()? {
Some((Token::Minus, _)) => (true, self.bump()),
Some((_, span)) => (false, span),
None => self.unexpected_token("an integer")?,
};
let (value, end) = match self.peek()? {
Some((Token::IntLiteral(value), _)) => (value, self.bump()),
_ => self.unexpected_token("an integer")?,
};
Ok((negative, value, join_span(start, end)))
}
fn parse_bool(&mut self) -> Result<(bool, Span), ParseErrorKind> {
match self.peek()? {
Some((Token::Ident("false"), _))
| Some((Token::Ident("False"), _))
| Some((Token::Ident("f"), _)) => Ok((false, self.bump())),
Some((Token::Ident("true"), _))
| Some((Token::Ident("True"), _))
| Some((Token::Ident("t"), _)) => Ok((true, self.bump())),
Some((Token::IntLiteral(v), _)) => {
let value = match u8::from_str_radix(v.value, v.radix) {
Ok(v) => v,
Err(_e) => return self.unexpected_token("0 or 1"),
};
if value == 1 {
Ok((true, self.bump()))
} else if value == 0 {
Ok((false, self.bump()))
} else {
self.unexpected_token("0 or 1")
}
}
_ => self.unexpected_token("'true' or 'false'"),
}
}
fn parse_bytes(&mut self) -> Result<(Vec<u8>, Span), ParseErrorKind> {
let (mut result, mut span) = match self.peek()? {
Some((Token::StringLiteral(value), _)) => (value, self.bump()),
_ => self.unexpected_token("a string")?,
};
while let Some((Token::StringLiteral(value), _)) = self.peek()? {
result.extend_from_slice(&value);
span = join_span(span, self.bump());
}
Ok((result, span))
}
fn parse_enum(&mut self, desc: &EnumDescriptor) -> Result<(i32, Span), ParseErrorKind> {
match self.peek()? {
Some((Token::Ident(name), _)) => {
let span = self.bump();
if let Some(value) = desc.get_value_by_name(name) {
Ok((value.number(), span))
} else {
Err(ParseErrorKind::EnumValueNotFound {
value_name: name.to_owned(),
enum_name: desc.full_name().to_owned(),
span,
})
}
}
Some((Token::Minus | Token::IntLiteral(_), _)) => self.parse_i32(),
_ => self.unexpected_token("an enum value")?,
}
}
fn parse_full_ident(&mut self, terminators: &[Token]) -> Result<Cow<'a, str>, ParseErrorKind> {
let mut result = match self.peek()? {
Some((Token::Ident(ident), _)) => Cow::Borrowed(ident),
_ => self.unexpected_token("an identifier")?,
};
self.bump();
loop {
match self.peek()? {
Some((Token::Dot, _)) => {
self.bump();
}
Some((tok, _)) if terminators.contains(&tok) => return Ok(result),
_ => self.unexpected_token(fmt_expected(
once(Token::Dot).chain(terminators.iter().cloned()),
))?,
}
match self.peek()? {
Some((Token::Ident(ident), _)) => {
let result = result.to_mut();
result.push('.');
result.push_str(ident);
self.bump();
}
_ => self.unexpected_token("an identifier")?,
};
}
}
fn expect(&mut self, expected: Token) -> Result<Span, ParseErrorKind> {
if let Some((tok, _)) = self.peek()? {
if tok == expected {
return Ok(self.bump());
}
};
self.unexpected_token(expected)?
}
fn bump(&mut self) -> Span {
let (_, span) = self
.peek
.take()
.expect("called bump without peek returning Some()")
.expect("called bump on invalid token");
span
}
fn peek(&mut self) -> Result<Option<(Token<'a>, Span)>, ParseErrorKind> {
if self.peek.is_none() {
self.peek = self.next();
}
self.peek.clone().transpose()
}
fn next(&mut self) -> Option<Result<(Token<'a>, Span), ParseErrorKind>> {
debug_assert!(self.peek.is_none());
match self.lexer.next() {
Some(Err(())) => Some(Err(self.lexer.extras.error.take().unwrap_or_else(|| {
ParseErrorKind::InvalidToken {
span: self.lexer.span(),
}
}))),
Some(Ok(tok)) => Some(Ok((tok, self.lexer.span()))),
None => None,
}
}
fn unexpected_token<T>(&mut self, expected: impl ToString) -> Result<T, ParseErrorKind> {
match self.peek()? {
Some((found, span)) => Err(ParseErrorKind::UnexpectedToken {
expected: expected.to_string(),
found: found.to_string(),
span,
}),
None => Err(ParseErrorKind::UnexpectedEof {
expected: expected.to_string(),
}),
}
}
}
fn find_field(desc: &MessageDescriptor, name: &str) -> Option<FieldDescriptor> {
if let Some(field) = desc.get_field_by_name(name) {
if !field.is_group() {
return Some(field);
}
}
if let Some(field) = desc.get_field_by_name(&name.to_ascii_lowercase()) {
if field.is_group() && name == field.kind().as_message().unwrap().name() {
return Some(field);
}
}
None
}
fn fmt_expected<'a>(ts: impl Iterator<Item = Token<'a>>) -> String {
use std::fmt::Write;
let ts: Vec<_> = ts.collect();
let mut s = String::with_capacity(32);
write!(s, "'{}'", ts[0]).unwrap();
if ts.len() > 1 {
for t in &ts[1..][..ts.len() - 2] {
s.push_str(", ");
write!(s, "'{t}'").unwrap();
}
s.push_str(" or ");
write!(s, "'{}'", ts[ts.len() - 1]).unwrap();
}
s
}
fn join_span(start: Span, end: Span) -> Span {
start.start..end.end
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/dynamic/text_format/parse/lex.rs | prost-reflect/src/dynamic/text_format/parse/lex.rs | use std::{ascii, convert::TryInto, fmt};
use logos::{Lexer, Logos};
use super::error::ParseErrorKind;
#[derive(Debug, Clone, Logos, PartialEq)]
#[logos(extras = TokenExtras)]
#[logos(skip r"[\t\v\f\r\n ]+")]
#[logos(skip r"#[^\n]*\n?")]
#[logos(subpattern exponent = r"[eE][+\-]?[0-9]+")]
pub(crate) enum Token<'a> {
#[regex("[A-Za-z_][A-Za-z0-9_]*")]
Ident(&'a str),
#[regex("0", |lex| int(lex, 10, 0))]
#[regex("[1-9][0-9]*", |lex| int(lex, 10, 0))]
#[regex("0[0-7]+", |lex| int(lex, 8, 1))]
#[regex("0[xX][0-9A-Fa-f]+", |lex| int(lex, 16, 2))]
IntLiteral(Int<'a>),
#[regex("0[fF]", float)]
#[regex("[1-9][0-9]*[fF]", float)]
#[regex(r#"[0-9]+\.[0-9]*(?&exponent)?[fF]?"#, float)]
#[regex(r#"[0-9]+(?&exponent)[fF]?"#, float)]
#[regex(r#"\.[0-9]+(?&exponent)?[fF]?"#, float)]
FloatLiteral(f64),
#[regex(r#"'|""#, string)]
StringLiteral(Vec<u8>),
#[token(".")]
Dot,
#[token("-")]
Minus,
#[token("{")]
LeftBrace,
#[token("}")]
RightBrace,
#[token("[")]
LeftBracket,
#[token("]")]
RightBracket,
#[token("<")]
LeftAngleBracket,
#[token(">")]
RightAngleBracket,
#[token(",")]
Comma,
#[token(":")]
Colon,
#[token(";")]
Semicolon,
#[token("/")]
ForwardSlash,
}
#[derive(Debug, Clone, PartialEq)]
pub(crate) struct Int<'a> {
pub value: &'a str,
pub radix: u32,
}
#[derive(Default)]
pub(crate) struct TokenExtras {
pub error: Option<ParseErrorKind>,
}
impl fmt::Display for Token<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Token::Ident(value) => write!(f, "{value}"),
Token::IntLiteral(value) => write!(f, "{}", value.value),
Token::FloatLiteral(value) => {
if value.fract() == 0.0 {
write!(f, "{value:.1}")
} else {
write!(f, "{value}")
}
}
Token::StringLiteral(bytes) => {
write!(f, "\"")?;
for &ch in bytes {
write!(f, "{}", ascii::escape_default(ch))?;
}
write!(f, "\"")?;
Ok(())
}
Token::Dot => write!(f, "."),
Token::Minus => write!(f, "-"),
Token::LeftBrace => write!(f, "{{"),
Token::RightBrace => write!(f, "}}"),
Token::LeftBracket => write!(f, "["),
Token::RightBracket => write!(f, "]"),
Token::LeftAngleBracket => write!(f, "<"),
Token::RightAngleBracket => write!(f, ">"),
Token::Comma => write!(f, ","),
Token::Colon => write!(f, ":"),
Token::Semicolon => write!(f, ";"),
Token::ForwardSlash => write!(f, "/"),
}
}
}
fn int<'a>(lex: &mut Lexer<'a, Token<'a>>, radix: u32, prefix_len: usize) -> Result<Int<'a>, ()> {
debug_assert!(lex.slice().len() > prefix_len);
let span = lex.span().start + prefix_len..lex.span().end;
if matches!(lex.remainder().chars().next(), Some(ch) if ch.is_ascii_alphabetic() || ch == '_') {
let mut end = span.end + 1;
while end < lex.source().len() && lex.source().as_bytes()[end].is_ascii_alphabetic() {
end += 1;
}
lex.extras.error = Some(ParseErrorKind::NoSpaceBetweenIntAndIdent {
span: span.start..end,
});
return Err(());
}
Ok(Int {
value: &lex.source()[span],
radix,
})
}
fn float<'a>(lex: &mut Lexer<'a, Token<'a>>) -> f64 {
let start = lex.span().start;
let last = lex.span().end - 1;
let s = match lex.source().as_bytes()[last] {
b'f' | b'F' => &lex.source()[start..last],
_ => lex.slice(),
};
s.parse().expect("failed to parse float")
}
fn string<'a>(lex: &mut Lexer<'a, Token<'a>>) -> Result<Vec<u8>, ()> {
#[derive(Logos)]
#[logos(subpattern hex = r"[0-9A-Fa-f]")]
enum Component<'a> {
#[regex(r#"[^\x00\n\\'"]+"#)]
Unescaped(&'a str),
#[regex(r#"['"]"#, terminator)]
Terminator(u8),
#[regex(r#"\\[xX](?&hex)(?&hex)?"#, hex_escape)]
#[regex(r#"\\[0-7][0-7]?[0-7]?"#, oct_escape)]
#[regex(r#"\\[abfnrtv?\\'"]"#, char_escape)]
Byte(u8),
#[regex(r#"\\u(?&hex)(?&hex)(?&hex)(?&hex)"#, unicode_escape)]
#[regex(
r#"\\U(?&hex)(?&hex)(?&hex)(?&hex)(?&hex)(?&hex)(?&hex)(?&hex)"#,
unicode_escape
)]
Char(char),
}
fn terminator<'a>(lex: &mut Lexer<'a, Component<'a>>) -> u8 {
debug_assert_eq!(lex.slice().len(), 1);
lex.slice().bytes().next().unwrap()
}
fn hex_escape<'a>(lex: &mut Lexer<'a, Component<'a>>) -> u8 {
u32::from_str_radix(&lex.slice()[2..], 16)
.expect("expected valid hex escape")
.try_into()
.expect("two-digit hex escape should be valid byte")
}
fn oct_escape<'a>(lex: &mut Lexer<'a, Component<'a>>) -> Result<u8, ()> {
u32::from_str_radix(&lex.slice()[1..], 8)
.expect("expected valid oct escape")
.try_into()
.map_err(drop)
}
fn char_escape<'a>(lex: &mut Lexer<'a, Component<'a>>) -> u8 {
match lex.slice().as_bytes()[1] {
b'a' => b'\x07',
b'b' => b'\x08',
b'f' => b'\x0c',
b'n' => b'\n',
b'r' => b'\r',
b't' => b'\t',
b'v' => b'\x0b',
b'?' => b'?',
b'\\' => b'\\',
b'\'' => b'\'',
b'"' => b'"',
_ => panic!("failed to parse char escape"),
}
}
fn unicode_escape<'a>(lex: &mut Lexer<'a, Component<'a>>) -> Option<char> {
let value = u32::from_str_radix(&lex.slice()[2..], 16).expect("expected valid hex escape");
char::from_u32(value)
}
let mut result = Vec::new();
let mut char_lexer = Component::lexer(lex.remainder());
let terminator = lex.slice().as_bytes()[0];
loop {
match char_lexer.next() {
Some(Ok(Component::Unescaped(s))) => result.extend_from_slice(s.as_bytes()),
Some(Ok(Component::Terminator(t))) if t == terminator => {
break;
}
Some(Ok(Component::Terminator(ch) | Component::Byte(ch))) => result.push(ch),
Some(Ok(Component::Char(ch))) => {
let mut buf = [0; 4];
result.extend_from_slice(ch.encode_utf8(&mut buf).as_bytes());
}
Some(Err(())) => {
let start = lex.span().end + char_lexer.span().start;
let end = lex.span().end + char_lexer.span().end;
if char_lexer.slice().starts_with('\\') {
lex.extras.error =
Some(ParseErrorKind::InvalidStringEscape { span: start..end });
} else {
lex.extras.error =
Some(ParseErrorKind::InvalidStringCharacters { span: start..end });
}
return Err(());
}
None => {
lex.extras.error = Some(ParseErrorKind::UnexpectedEof {
expected: "string terminator".to_owned(),
});
return Err(());
}
}
}
lex.bump(char_lexer.span().end);
Ok(result)
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn simple_tokens() {
let source = r#"hell0 052 42 0x2A 5. 0.5 0.42e+2 2e-4 .2e+3 52e3 true
false "hello \a\b\f\n\r\t\v\?\\\'\" \052 \x2a" #comment
'hello 😀' _foo"#;
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next().unwrap(), Ok(Token::Ident("hell0")));
assert_eq!(
lexer.next().unwrap(),
Ok(Token::IntLiteral(Int {
value: "52",
radix: 8,
}))
);
assert_eq!(
lexer.next().unwrap(),
Ok(Token::IntLiteral(Int {
value: "42",
radix: 10,
}))
);
assert_eq!(
lexer.next().unwrap(),
Ok(Token::IntLiteral(Int {
value: "2A",
radix: 16,
}))
);
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(5.)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.5)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.42e+2)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(2e-4)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.2e+3)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(52e3)));
assert_eq!(lexer.next().unwrap(), Ok(Token::Ident("true")));
assert_eq!(lexer.next().unwrap(), Ok(Token::Ident("false")));
assert_eq!(
lexer.next().unwrap(),
Ok(Token::StringLiteral(
b"hello \x07\x08\x0c\n\r\t\x0b?\\'\" * *".as_ref().into()
))
);
assert_eq!(
lexer.next().unwrap(),
Ok(Token::StringLiteral(
b"hello \xF0\x9F\x98\x80".as_ref().into()
))
);
assert_eq!(lexer.next().unwrap(), Ok(Token::Ident("_foo")));
assert_eq!(lexer.next(), None);
assert_eq!(lexer.extras.error, None);
}
#[test]
fn integer_overflow() {
let source = "99999999999999999999999999999999999999";
let mut lexer = Token::lexer(source);
assert_eq!(
lexer.next(),
Some(Ok(Token::IntLiteral(Int {
value: "99999999999999999999999999999999999999",
radix: 10,
})))
);
assert_eq!(lexer.next(), None);
assert_eq!(lexer.extras.error, None);
}
#[test]
fn float_suffix() {
let source = "10f 5.f 0.5f 0.42e+2f 2e-4f .2e+3f";
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(10.)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(5.)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.5)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.42e+2)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(2e-4)));
assert_eq!(lexer.next().unwrap(), Ok(Token::FloatLiteral(0.2e+3)));
assert_eq!(lexer.next(), None);
assert_eq!(lexer.extras.error, None);
}
#[test]
fn invalid_token() {
let source = "@ foo";
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(lexer.extras.error, None);
}
#[test]
fn invalid_string_char() {
let source = "\"\x00\" foo";
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::InvalidStringCharacters { span: 1..2 })
);
}
#[test]
fn unterminated_string() {
let source = "\"hello \n foo";
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::InvalidStringCharacters { span: 7..8 })
);
}
#[test]
fn invalid_string_escape() {
let source = r#""\m""#;
let mut lexer = Token::lexer(source);
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::InvalidStringEscape { span: 1..2 })
);
}
#[test]
fn string_escape_invalid_utf8() {
let source = r#""\xFF""#;
let mut lexer = Token::lexer(source);
assert_eq!(
lexer.next(),
Some(Ok(Token::StringLiteral([0xff].as_ref().into())))
);
assert_eq!(lexer.next(), None);
}
#[test]
fn string_unicode_escape() {
let source = r"'\u0068\u0065\u006c\u006c\u006f\u0020\U0001f600'";
let mut lexer = Token::lexer(source);
assert_eq!(
lexer.next(),
Some(Ok(Token::StringLiteral(
b"hello \xF0\x9F\x98\x80".as_ref().into()
)))
);
assert_eq!(lexer.next(), None);
assert_eq!(lexer.extras.error, None);
}
#[test]
fn string_invalid_unicode_escape() {
let mut lexer = Token::lexer(r"'\Uffffffff'");
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::InvalidStringEscape { span: 1..11 })
);
}
#[test]
fn whitespace() {
assert_eq!(
Token::lexer("value: -2.0").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("value")),
Ok(Token::Colon),
Ok(Token::Minus),
Ok(Token::FloatLiteral(2.0)),
]
);
assert_eq!(
Token::lexer("value: - 2.0").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("value")),
Ok(Token::Colon),
Ok(Token::Minus),
Ok(Token::FloatLiteral(2.0)),
]
);
assert_eq!(
Token::lexer("value: -\n #comment\n 2.0").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("value")),
Ok(Token::Colon),
Ok(Token::Minus),
Ok(Token::FloatLiteral(2.0)),
]
);
assert_eq!(
Token::lexer("value: 2 . 0").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("value")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "2",
radix: 10,
})),
Ok(Token::Dot),
Ok(Token::IntLiteral(Int {
value: "0",
radix: 10,
})),
]
);
assert_eq!(
Token::lexer("foo: 10 bar: 20").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("foo")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "10",
radix: 10,
})),
Ok(Token::Ident("bar")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "20",
radix: 10,
})),
]
);
assert_eq!(
Token::lexer("foo: 10,bar: 20").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("foo")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "10",
radix: 10,
})),
Ok(Token::Comma),
Ok(Token::Ident("bar")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "20",
radix: 10,
})),
]
);
assert_eq!(
Token::lexer("foo: 10[com.foo.ext]: 20").collect::<Vec<_>>(),
vec![
Ok(Token::Ident("foo")),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "10",
radix: 10,
})),
Ok(Token::LeftBracket),
Ok(Token::Ident("com")),
Ok(Token::Dot),
Ok(Token::Ident("foo")),
Ok(Token::Dot),
Ok(Token::Ident("ext")),
Ok(Token::RightBracket),
Ok(Token::Colon),
Ok(Token::IntLiteral(Int {
value: "20",
radix: 10,
})),
]
);
let mut lexer = Token::lexer("foo: 10bar: 20");
assert_eq!(lexer.next(), Some(Ok(Token::Ident("foo"))));
assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::NoSpaceBetweenIntAndIdent { span: 5..10 })
);
let mut lexer = Token::lexer("bar: 20_foo");
assert_eq!(lexer.next(), Some(Ok(Token::Ident("bar"))));
assert_eq!(lexer.next(), Some(Ok(Token::Colon)));
assert_eq!(lexer.next(), Some(Err(())));
assert_eq!(
lexer.extras.error,
Some(ParseErrorKind::NoSpaceBetweenIntAndIdent { span: 5..11 })
);
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/reflect/mod.rs | prost-reflect/src/reflect/mod.rs | mod wkt;
pub(crate) use wkt::make_wkt_descriptor_pool;
use prost::Message;
use crate::{DynamicMessage, MessageDescriptor};
/// Trait for message types that support reflection.
pub trait ReflectMessage: Message {
/// Gets a [`MessageDescriptor`] describing the type of this message.
fn descriptor(&self) -> MessageDescriptor;
/// Converts this message into an instance of [`DynamicMessage`] by going
/// through the byte representation.
fn transcode_to_dynamic(&self) -> DynamicMessage
where
Self: Sized,
{
let mut message = DynamicMessage::new(self.descriptor());
// This can only fail if `self.descriptor` returns a descriptor incompatible with the
// actual serialized bytes.
message
.transcode_from(self)
.expect("error converting to dynamic message");
message
}
}
impl<M> ReflectMessage for Box<M>
where
M: ReflectMessage,
{
fn descriptor(&self) -> MessageDescriptor {
(**self).descriptor()
}
}
#[test]
fn assert_object_safe() {
fn _foo(_: Box<dyn ReflectMessage>) {}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/reflect/wkt.rs | prost-reflect/src/reflect/wkt.rs | use crate::descriptor::types::descriptor_proto::ExtensionRange;
use crate::descriptor::types::descriptor_proto::ReservedRange;
use crate::descriptor::types::field_descriptor_proto::Label::*;
use crate::descriptor::types::field_descriptor_proto::Type::*;
use crate::descriptor::types::DescriptorProto;
use crate::descriptor::types::EnumDescriptorProto;
use crate::descriptor::types::EnumValueDescriptorProto;
use crate::descriptor::types::FieldDescriptorProto;
use crate::descriptor::types::FieldOptions;
use crate::descriptor::types::FileDescriptorProto;
use crate::descriptor::types::FileDescriptorSet;
use crate::descriptor::types::FileOptions;
use crate::descriptor::types::MessageOptions;
use crate::descriptor::types::OneofDescriptorProto;
use crate::descriptor::types::Options;
use crate::prost_types::file_options::OptimizeMode::Speed;
use crate::DescriptorError;
use crate::{DescriptorPool, MessageDescriptor, ReflectMessage};
#[allow(deprecated)]
fn make_descriptor() -> FileDescriptorSet {
FileDescriptorSet {
file: vec![
FileDescriptorProto {
name: Some("google/protobuf/any.proto".into()),
package: Some("google.protobuf".into()),
dependency: vec![],
public_dependency: vec![],
weak_dependency: vec![],
message_type: vec![DescriptorProto {
name: Some("Any".into()),
field: vec![
FieldDescriptorProto {
name: Some("type_url".into()),
number: Some(1),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("typeUrl".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("value".into()),
number: Some(2),
label: Some(Optional.into()),
r#type: Some(Bytes.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("value".into()),
options: None,
proto3_optional: None,
},
],
extension: vec![],
nested_type: vec![],
enum_type: vec![],
extension_range: vec![],
oneof_decl: vec![],
options: None,
reserved_range: vec![],
reserved_name: vec![],
}],
enum_type: vec![],
service: vec![],
extension: vec![],
options: Some(Options::from_prost(FileOptions {
java_package: Some("com.google.protobuf".into()),
java_outer_classname: Some("AnyProto".into()),
java_multiple_files: Some(true),
java_generate_equals_and_hash: None,
java_string_check_utf8: None,
optimize_for: None,
go_package: Some("google.golang.org/protobuf/types/known/anypb".into()),
cc_generic_services: None,
java_generic_services: None,
py_generic_services: None,
php_generic_services: None,
deprecated: None,
cc_enable_arenas: None,
objc_class_prefix: Some("GPB".into()),
csharp_namespace: Some("Google.Protobuf.WellKnownTypes".into()),
swift_prefix: None,
php_class_prefix: None,
php_namespace: None,
php_metadata_namespace: None,
ruby_package: None,
uninterpreted_option: vec![],
})),
source_code_info: None,
syntax: Some("proto3".into()),
},
FileDescriptorProto {
name: Some("google/protobuf/source_context.proto".into()),
package: Some("google.protobuf".into()),
dependency: vec![],
public_dependency: vec![],
weak_dependency: vec![],
message_type: vec![DescriptorProto {
name: Some("SourceContext".into()),
field: vec![FieldDescriptorProto {
name: Some("file_name".into()),
number: Some(1),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("fileName".into()),
options: None,
proto3_optional: None,
}],
extension: vec![],
nested_type: vec![],
enum_type: vec![],
extension_range: vec![],
oneof_decl: vec![],
options: None,
reserved_range: vec![],
reserved_name: vec![],
}],
enum_type: vec![],
service: vec![],
extension: vec![],
options: Some(Options::from_prost(FileOptions {
java_package: Some("com.google.protobuf".into()),
java_outer_classname: Some("SourceContextProto".into()),
java_multiple_files: Some(true),
java_generate_equals_and_hash: None,
java_string_check_utf8: None,
optimize_for: None,
go_package: Some(
"google.golang.org/protobuf/types/known/sourcecontextpb".into(),
),
cc_generic_services: None,
java_generic_services: None,
py_generic_services: None,
php_generic_services: None,
deprecated: None,
cc_enable_arenas: None,
objc_class_prefix: Some("GPB".into()),
csharp_namespace: Some("Google.Protobuf.WellKnownTypes".into()),
swift_prefix: None,
php_class_prefix: None,
php_namespace: None,
php_metadata_namespace: None,
ruby_package: None,
uninterpreted_option: vec![],
})),
source_code_info: None,
syntax: Some("proto3".into()),
},
FileDescriptorProto {
name: Some("google/protobuf/type.proto".into()),
package: Some("google.protobuf".into()),
dependency: vec![
"google/protobuf/any.proto".into(),
"google/protobuf/source_context.proto".into(),
],
public_dependency: vec![],
weak_dependency: vec![],
message_type: vec![
DescriptorProto {
name: Some("Type".into()),
field: vec![
FieldDescriptorProto {
name: Some("name".into()),
number: Some(1),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("name".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("fields".into()),
number: Some(2),
label: Some(Repeated.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.Field".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("fields".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("oneofs".into()),
number: Some(3),
label: Some(Repeated.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("oneofs".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("options".into()),
number: Some(4),
label: Some(Repeated.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.Option".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("options".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("source_context".into()),
number: Some(5),
label: Some(Optional.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.SourceContext".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("sourceContext".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("syntax".into()),
number: Some(6),
label: Some(Optional.into()),
r#type: Some(Enum.into()),
type_name: Some(".google.protobuf.Syntax".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("syntax".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("edition".to_owned()),
number: Some(7),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("edition".to_owned()),
options: None,
proto3_optional: None,
},
],
extension: vec![],
nested_type: vec![],
enum_type: vec![],
extension_range: vec![],
oneof_decl: vec![],
options: None,
reserved_range: vec![],
reserved_name: vec![],
},
DescriptorProto {
name: Some("Field".into()),
field: vec![
FieldDescriptorProto {
name: Some("kind".into()),
number: Some(1),
label: Some(Optional.into()),
r#type: Some(Enum.into()),
type_name: Some(".google.protobuf.Field.Kind".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("kind".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("cardinality".into()),
number: Some(2),
label: Some(Optional.into()),
r#type: Some(Enum.into()),
type_name: Some(".google.protobuf.Field.Cardinality".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("cardinality".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("number".into()),
number: Some(3),
label: Some(Optional.into()),
r#type: Some(Int32.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("number".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("name".into()),
number: Some(4),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("name".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("type_url".into()),
number: Some(6),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("typeUrl".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("oneof_index".into()),
number: Some(7),
label: Some(Optional.into()),
r#type: Some(Int32.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("oneofIndex".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("packed".into()),
number: Some(8),
label: Some(Optional.into()),
r#type: Some(Bool.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("packed".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("options".into()),
number: Some(9),
label: Some(Repeated.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.Option".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("options".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("json_name".into()),
number: Some(10),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("jsonName".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("default_value".into()),
number: Some(11),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("defaultValue".into()),
options: None,
proto3_optional: None,
},
],
extension: vec![],
nested_type: vec![],
enum_type: vec![
EnumDescriptorProto {
name: Some("Kind".into()),
value: vec![
EnumValueDescriptorProto {
name: Some("TYPE_UNKNOWN".into()),
number: Some(0),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_DOUBLE".into()),
number: Some(1),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_FLOAT".into()),
number: Some(2),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_INT64".into()),
number: Some(3),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_UINT64".into()),
number: Some(4),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_INT32".into()),
number: Some(5),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_FIXED64".into()),
number: Some(6),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_FIXED32".into()),
number: Some(7),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_BOOL".into()),
number: Some(8),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_STRING".into()),
number: Some(9),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_GROUP".into()),
number: Some(10),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_MESSAGE".into()),
number: Some(11),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_BYTES".into()),
number: Some(12),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_UINT32".into()),
number: Some(13),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_ENUM".into()),
number: Some(14),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_SFIXED32".into()),
number: Some(15),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_SFIXED64".into()),
number: Some(16),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_SINT32".into()),
number: Some(17),
options: None,
},
EnumValueDescriptorProto {
name: Some("TYPE_SINT64".into()),
number: Some(18),
options: None,
},
],
options: None,
reserved_range: vec![],
reserved_name: vec![],
},
EnumDescriptorProto {
name: Some("Cardinality".into()),
value: vec![
EnumValueDescriptorProto {
name: Some("CARDINALITY_UNKNOWN".into()),
number: Some(0),
options: None,
},
EnumValueDescriptorProto {
name: Some("CARDINALITY_OPTIONAL".into()),
number: Some(1),
options: None,
},
EnumValueDescriptorProto {
name: Some("CARDINALITY_REQUIRED".into()),
number: Some(2),
options: None,
},
EnumValueDescriptorProto {
name: Some("CARDINALITY_REPEATED".into()),
number: Some(3),
options: None,
},
],
options: None,
reserved_range: vec![],
reserved_name: vec![],
},
],
extension_range: vec![],
oneof_decl: vec![],
options: None,
reserved_range: vec![],
reserved_name: vec![],
},
DescriptorProto {
name: Some("Enum".into()),
field: vec![
FieldDescriptorProto {
name: Some("name".into()),
number: Some(1),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("name".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("enumvalue".into()),
number: Some(2),
label: Some(Repeated.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.EnumValue".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("enumvalue".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("options".into()),
number: Some(3),
label: Some(Repeated.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.Option".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("options".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("source_context".into()),
number: Some(4),
label: Some(Optional.into()),
r#type: Some(Message.into()),
type_name: Some(".google.protobuf.SourceContext".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("sourceContext".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("syntax".into()),
number: Some(5),
label: Some(Optional.into()),
r#type: Some(Enum.into()),
type_name: Some(".google.protobuf.Syntax".into()),
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("syntax".into()),
options: None,
proto3_optional: None,
},
FieldDescriptorProto {
name: Some("edition".to_owned()),
number: Some(6),
label: Some(Optional.into()),
r#type: Some(String.into()),
type_name: None,
extendee: None,
default_value: None,
oneof_index: None,
json_name: Some("edition".to_owned()),
options: None,
proto3_optional: None,
},
],
extension: vec![],
nested_type: vec![],
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | true |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/global.rs | prost-reflect/src/descriptor/global.rs | use std::sync::{Mutex, MutexGuard, OnceLock};
use prost::bytes::Buf;
use prost_types::FileDescriptorProto;
use crate::{DescriptorError, DescriptorPool};
static INSTANCE: OnceLock<Mutex<DescriptorPool>> = OnceLock::new();
fn instance() -> MutexGuard<'static, DescriptorPool> {
INSTANCE
.get_or_init(|| Mutex::new(crate::reflect::make_wkt_descriptor_pool().unwrap()))
.lock()
.unwrap()
}
impl DescriptorPool {
/// Gets a copy of the global descriptor pool. By default, this just contains the google well-known types.
///
/// The global descriptor pool is typically used as a convenient place to store descriptors for `ReflectMessage` implementations.
///
/// Note that modifications to the returned pool won't affect the global pool - use
/// [`decode_global_file_descriptor_set`](DescriptorPool::decode_global_file_descriptor_set) or
/// [`add_global_file_descriptor_proto`](DescriptorPool::add_global_file_descriptor_proto) to modify the global pool.
pub fn global() -> DescriptorPool {
instance().clone()
}
/// Decodes and adds a set of file descriptors to the global pool.
///
/// See [`DescriptorPool::decode_file_descriptor_set`] for more details.
pub fn decode_global_file_descriptor_set<B>(bytes: B) -> Result<(), DescriptorError>
where
B: Buf,
{
instance().decode_file_descriptor_set(bytes)?;
Ok(())
}
/// Adds a single file descriptor to the global pool.
///
/// See [`DescriptorPool::add_file_descriptor_proto`] for more details.
pub fn add_global_file_descriptor_proto(
file: FileDescriptorProto,
) -> Result<(), DescriptorError> {
instance().add_file_descriptor_proto(file)?;
Ok(())
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/tests.rs | prost-reflect/src/descriptor/tests.rs | use prost_types::{
field_descriptor_proto::{self, Label, Type},
source_code_info::Location,
DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto,
FileDescriptorProto, FileDescriptorSet, MethodDescriptorProto, ServiceDescriptorProto,
SourceCodeInfo,
};
use crate::DescriptorPool;
#[test]
fn resolve_service_name() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
service: vec![ServiceDescriptorProto {
name: Some("MyService".to_owned()),
method: vec![MethodDescriptorProto {
name: Some("my_method".to_owned()),
input_type: Some("MyMessage".to_owned()),
output_type: Some(".my.package.MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let service = descriptor_pool.services().next().unwrap();
let method = service.methods().next().unwrap();
assert_eq!(method.input().full_name(), "my.package.MyMessage");
assert_eq!(method.output().full_name(), "my.package.MyMessage");
}
#[test]
fn resolve_service_name_other_package() {
let file_descriptor_set = FileDescriptorSet {
file: vec![
FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
dependency: vec!["myfile2.proto".to_owned()],
syntax: Some("proto3".to_owned()),
service: vec![ServiceDescriptorProto {
name: Some("MyService".to_owned()),
method: vec![MethodDescriptorProto {
name: Some("my_method".to_owned()),
input_type: Some("other.package.MyMessage".to_owned()),
output_type: Some(".other.package.MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
},
FileDescriptorProto {
name: Some("myfile2.proto".to_owned()),
package: Some("other.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
},
],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let service = descriptor_pool.services().next().unwrap();
let method = service.methods().next().unwrap();
assert_eq!(method.input().full_name(), "other.package.MyMessage");
assert_eq!(method.output().full_name(), "other.package.MyMessage");
}
#[test]
fn resolve_message_name() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![
DescriptorProto {
name: Some("MyFieldMessage".to_owned()),
..Default::default()
},
DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![FieldDescriptorProto {
name: Some("my_field".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: Some(Type::Message as i32),
type_name: Some("MyFieldMessage".to_owned()),
json_name: Some("myfield".to_owned()),
..Default::default()
}],
..Default::default()
},
],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let field = message.get_field_by_name("my_field").unwrap();
assert_eq!(
field.kind().as_message().unwrap().full_name(),
"my.package.MyFieldMessage"
);
}
#[test]
fn resolve_message_name_conflict_with_field_name() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![FieldDescriptorProto {
name: Some("MyMessage".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: Some(Type::Message as i32),
type_name: Some("MyMessage".to_owned()),
json_name: Some("mymessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let field = message.get_field_by_name("MyMessage").unwrap();
assert_eq!(
field.kind().as_message().unwrap().full_name(),
"my.package.MyMessage"
);
}
#[test]
fn resolve_message_name_nested() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![FieldDescriptorProto {
name: Some("my_field".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: Some(Type::Message as i32),
type_name: Some("MyFieldMessage".to_owned()),
json_name: Some("myfield".to_owned()),
..Default::default()
}],
nested_type: vec![DescriptorProto {
name: Some("MyFieldMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let field = message.get_field_by_name("my_field").unwrap();
assert_eq!(
field.kind().as_message().unwrap().full_name(),
"my.package.MyMessage.MyFieldMessage"
);
}
#[test]
fn message_field_type_not_set() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![
DescriptorProto {
name: Some("MyFieldMessage".to_owned()),
..Default::default()
},
DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![FieldDescriptorProto {
name: Some("my_field".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: None,
type_name: Some(".my.package.MyFieldMessage".to_owned()),
json_name: Some("myfield".to_owned()),
..Default::default()
}],
..Default::default()
},
],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let field = message.get_field_by_name("my_field").unwrap();
assert_eq!(
field.kind().as_message().unwrap().full_name(),
"my.package.MyFieldMessage"
);
}
#[test]
fn reference_type_in_previously_added_file() {
let file_descriptor_set1 = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile1.proto".to_owned()),
package: Some("my.package1".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyFieldMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
};
let file_descriptor_set2 = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile2.proto".to_owned()),
package: Some("my.package2".to_owned()),
syntax: Some("proto3".to_owned()),
dependency: vec!["myfile1.proto".to_owned()],
public_dependency: vec![0],
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![FieldDescriptorProto {
name: Some("my_field".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: None,
type_name: Some(".my.package1.MyFieldMessage".to_owned()),
json_name: Some("myfield".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
}],
};
let mut pool = DescriptorPool::new();
pool.add_file_descriptor_set(file_descriptor_set1).unwrap();
pool.add_file_descriptor_set(file_descriptor_set2).unwrap();
assert_eq!(pool.get_file_by_name("notfound"), None);
let file1 = pool.get_file_by_name("myfile1.proto").unwrap();
let file2 = pool.get_file_by_name("myfile2.proto").unwrap();
assert_ne!(file1, file2);
assert_eq!(file1.dependencies().collect::<Vec<_>>(), vec![]);
assert_eq!(
file2.dependencies().collect::<Vec<_>>(),
vec![file1.clone()]
);
assert_eq!(file1.name(), "myfile1.proto");
assert_eq!(file1.package_name(), "my.package1");
assert_eq!(file2.name(), "myfile2.proto");
assert_eq!(file2.package_name(), "my.package2");
let message = pool.get_message_by_name("my.package2.MyMessage").unwrap();
assert_eq!(message.parent_file(), file2);
let field = message.get_field_by_name("my_field").unwrap();
assert_eq!(
field.kind().as_message().unwrap().full_name(),
"my.package1.MyFieldMessage"
);
assert_eq!(field.kind().as_message().unwrap().parent_file(), file1);
}
#[test]
fn add_duplicate_file() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
};
let mut pool = DescriptorPool::new();
pool.add_file_descriptor_set(file_descriptor_set.clone())
.unwrap();
pool.add_file_descriptor_set(file_descriptor_set).unwrap();
assert_eq!(pool.file_descriptor_protos().len(), 1);
}
#[test]
fn add_file_rollback_on_error() {
let bad_file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
service: vec![ServiceDescriptorProto {
name: Some("MyService".to_owned()),
method: vec![MethodDescriptorProto {
name: Some("my_method".to_owned()),
input_type: Some(".my.package.NopeMessage".to_owned()),
output_type: Some(".my.package.NopeMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
};
let mut pool = DescriptorPool::new();
let err = pool
.add_file_descriptor_set(bad_file_descriptor_set)
.unwrap_err();
assert_eq!(
err.to_string(),
"name '.my.package.NopeMessage' is not defined"
);
assert_eq!(pool.file_descriptor_protos().count(), 0);
assert_eq!(pool.get_message_by_name(".my.package.MyMessage"), None);
}
#[test]
fn add_file_missing_dependency() {
let bad_file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
dependency: vec!["notfound.proto".to_owned()],
public_dependency: vec![0],
syntax: Some("proto3".to_owned()),
..Default::default()
}],
};
let mut pool = DescriptorPool::new();
let err = pool
.add_file_descriptor_set(bad_file_descriptor_set)
.unwrap_err();
assert_eq!(
err.to_string(),
"imported file 'notfound.proto' has not been added"
);
}
#[test]
fn service_method_type_not_message() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
service: vec![ServiceDescriptorProto {
name: Some("MyService".to_owned()),
method: vec![MethodDescriptorProto {
name: Some("my_method".to_owned()),
input_type: Some(".my.package.MyMessage".to_owned()),
output_type: Some(".my.package.MyMessage".to_owned()),
..Default::default()
}],
..Default::default()
}],
enum_type: vec![EnumDescriptorProto {
name: Some("MyMessage".to_owned()),
value: vec![EnumValueDescriptorProto {
name: Some("DEFAULT".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
}],
};
let err = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap_err();
assert_eq!(
err.to_string(),
"'my.package.MyMessage' is not a message type"
);
}
#[test]
fn extension_extendee_type_not_message() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
extension: vec![FieldDescriptorProto {
name: Some("my_extension".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: Some(field_descriptor_proto::Type::Int32 as i32),
extendee: Some("my.package.MyMessage".to_owned()),
json_name: Some("myExtension".to_owned()),
..Default::default()
}],
enum_type: vec![EnumDescriptorProto {
name: Some("MyMessage".to_owned()),
value: vec![EnumValueDescriptorProto {
name: Some("DEFAULT".to_owned()),
..Default::default()
}],
..Default::default()
}],
..Default::default()
}],
};
let err = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap_err();
assert_eq!(
err.to_string(),
"'my.package.MyMessage' is not a message type"
);
}
#[test]
fn error_source_location() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![
DescriptorProto {
name: Some("Foo".to_owned()),
..Default::default()
},
DescriptorProto {
name: Some("Foo".to_owned()),
..Default::default()
},
],
source_code_info: Some(SourceCodeInfo {
location: vec![
Location {
path: vec![4, 0, 1],
span: vec![0, 8, 11],
..Default::default()
},
Location {
path: vec![4, 1, 1],
span: vec![1, 8, 11],
..Default::default()
},
],
}),
..Default::default()
}],
};
let err = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap_err();
assert_eq!(err.file(), Some("myfile.proto"));
assert_eq!(err.line(), Some(1));
assert_eq!(err.column(), Some(8));
assert_eq!(err.to_string(), "name 'my.package.Foo' is defined twice");
assert_eq!(
format!("{err:?}"),
"myfile.proto:2:9: name 'my.package.Foo' is defined twice"
);
}
#[test]
fn field_is_required_proto2() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto2".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![
FieldDescriptorProto {
name: Some("required_field".to_owned()),
number: Some(1),
label: Some(Label::Required as i32),
r#type: Some(Type::String as i32),
..Default::default()
},
FieldDescriptorProto {
name: Some("optional_field".to_owned()),
number: Some(2),
label: Some(Label::Optional as i32),
r#type: Some(Type::String as i32),
..Default::default()
},
],
..Default::default()
}],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let required_field = message.get_field_by_name("required_field").unwrap();
let optional_field = message.get_field_by_name("optional_field").unwrap();
assert!(required_field.is_required());
assert!(!optional_field.is_required());
}
#[test]
fn field_is_required_proto3() {
let file_descriptor_set = FileDescriptorSet {
file: vec![FileDescriptorProto {
name: Some("myfile.proto".to_owned()),
package: Some("my.package".to_owned()),
syntax: Some("proto3".to_owned()),
message_type: vec![DescriptorProto {
name: Some("MyMessage".to_owned()),
field: vec![
FieldDescriptorProto {
name: Some("optional_field".to_owned()),
number: Some(1),
label: Some(Label::Optional as i32),
r#type: Some(Type::String as i32),
..Default::default()
},
FieldDescriptorProto {
name: Some("repeated_field".to_owned()),
number: Some(2),
label: Some(Label::Repeated as i32),
r#type: Some(Type::String as i32),
..Default::default()
},
],
..Default::default()
}],
..Default::default()
}],
};
let descriptor_pool = DescriptorPool::from_file_descriptor_set(file_descriptor_set).unwrap();
let message = descriptor_pool
.get_message_by_name("my.package.MyMessage")
.unwrap();
let optional_field = message.get_field_by_name("optional_field").unwrap();
let repeated_field = message.get_field_by_name("repeated_field").unwrap();
// In proto3, there are no required fields
assert!(!optional_field.is_required());
assert!(!repeated_field.is_required());
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/error.rs | prost-reflect/src/descriptor/error.rs | use std::{
fmt,
ops::{Range, RangeInclusive},
};
use crate::descriptor::{FileDescriptorInner, FileIndex};
/// An error that may occur while creating a [`DescriptorPool`][crate::DescriptorPool].
pub struct DescriptorError {
errors: Box<[DescriptorErrorKind]>,
#[cfg(feature = "miette")]
source: Option<miette::NamedSource<String>>,
}
#[derive(Debug)]
pub(super) enum DescriptorErrorKind {
MissingRequiredField {
label: Label,
},
UnknownSyntax {
syntax: String,
found: Label,
},
DuplicateFileName {
name: String,
},
FileNotFound {
name: String,
found: Label,
},
InvalidImportIndex,
InvalidOneofIndex,
DuplicateName {
name: String,
first: Label,
second: Label,
},
DuplicateFieldNumber {
number: u32,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
first: Label,
second: Label,
},
DuplicateFieldJsonName {
name: String,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
first: Label,
second: Label,
},
DuplicateFieldCamelCaseName {
first_name: String,
second_name: String,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
first: Label,
second: Label,
},
InvalidFieldNumber {
number: i32,
found: Label,
},
FieldNumberInReservedRange {
number: i32,
range: Range<i32>,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
defined: Label,
found: Label,
},
FieldNumberInExtensionRange {
number: i32,
range: Range<i32>,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
defined: Label,
found: Label,
},
ExtensionNumberOutOfRange {
number: i32,
message: String,
found: Label,
},
NameNotFound {
name: String,
found: Label,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
help: Option<String>,
},
NameShadowed {
name: String,
shadowed_name: String,
found: Label,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
help: Option<String>,
},
InvalidType {
name: String,
expected: String,
found: Label,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
defined: Label,
},
InvalidFieldDefault {
value: String,
kind: String,
found: Label,
},
EmptyEnum {
found: Label,
},
InvalidProto3EnumDefault {
found: Label,
},
DuplicateEnumNumber {
number: i32,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
first: Label,
second: Label,
},
EnumNumberInReservedRange {
number: i32,
range: RangeInclusive<i32>,
#[cfg_attr(not(feature = "miette"), allow(dead_code))]
defined: Label,
found: Label,
},
OptionNotFound {
name: String,
found: Label,
},
InvalidOptionType {
name: String,
ty: String,
value: String,
is_last: bool,
found: Label,
},
InvalidOptionExtendee {
name: String,
expected_extendee: String,
actual_extendee: String,
found: Label,
},
#[cfg(feature = "text-format")]
InvalidMessageOption {
name: String,
ty: String,
found: Label,
err: crate::text_format::ParseError,
},
DuplicateOption {
name: String,
found: Label,
},
DecodeFileDescriptorSet {
err: prost::DecodeError,
},
}
#[derive(Debug)]
pub(super) struct Label {
file: String,
path: Box<[i32]>,
span: Option<[i32; 4]>,
#[cfg(feature = "miette")]
message: String,
#[cfg(feature = "miette")]
resolved: Option<miette::SourceSpan>,
}
impl DescriptorError {
pub(super) fn new(errors: Vec<DescriptorErrorKind>) -> DescriptorError {
debug_assert!(!errors.is_empty());
DescriptorError {
errors: errors.into(),
#[cfg(feature = "miette")]
source: None,
}
}
/// The primary file in which this error occurred.
pub fn file(&self) -> Option<&str> {
self.first().label().map(|l| l.file.as_str())
}
/// The 0-based line number at which this error occurred, if available.
///
/// This field may be `None` if the error is not associated with a particular source location, or the
/// [`source_code_info`](prost_types::FileDescriptorProto::source_code_info) field was not populated for the input file.
pub fn line(&self) -> Option<usize> {
self.first()
.label()
.and_then(|l| l.span)
.map(|s| s[0] as usize)
}
/// The 0-based column number at which this error occurred, if available.
///
/// This field may be `None` if the error is not associated with a particular source location, or the
/// [`source_code_info`](prost_types::FileDescriptorProto::source_code_info) field was not populated for the input file.
pub fn column(&self) -> Option<usize> {
self.first()
.label()
.and_then(|l| l.span)
.map(|s| s[1] as usize)
}
/// Gets the path where this error occurred in the [`FileDescriptorProto`][prost_types::FileDescriptorProto], if available.
///
/// See [`path`][prost_types::source_code_info::Location::path] for more details on the structure of the path.
pub fn path(&self) -> Option<&[i32]> {
self.first().label().map(|l| l.path.as_ref())
}
#[cfg(feature = "miette")]
#[cfg_attr(docsrs, doc(cfg(feature = "miette")))]
/// Provide source code information for this error.
///
/// The source should correspond to the contents of [`file()`][DescriptorError::file].
pub fn with_source_code(mut self, source: &str) -> Self {
if let Some(file) = self.file() {
let file = file.to_owned();
self.source = Some(miette::NamedSource::new(&file, source.to_owned()));
for error in self.errors.as_mut() {
error.add_source_code(&file, source);
}
}
self
}
fn first(&self) -> &DescriptorErrorKind {
&self.errors[0]
}
}
impl std::error::Error for DescriptorError {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
self.first().source()
}
}
impl fmt::Display for DescriptorError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.first().fmt(f)
}
}
impl fmt::Debug for DescriptorError {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(file) = self.file() {
write!(f, "{file}:")?;
if let (Some(line), Some(column)) = (self.line(), self.column()) {
write!(f, "{}:{}:", line + 1, column + 1)?;
}
write!(f, " ")?;
}
write!(f, "{self}")
}
}
#[cfg(feature = "miette")]
#[cfg_attr(docsrs, doc(cfg(feature = "miette")))]
impl miette::Diagnostic for DescriptorError {
fn code<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
self.first().code()
}
fn severity(&self) -> Option<miette::Severity> {
self.first().severity()
}
fn help<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
self.first().help()
}
fn url<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
self.first().url()
}
fn source_code(&self) -> Option<&dyn miette::SourceCode> {
match &self.source {
Some(source) => Some(source),
None => None,
}
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
self.first().labels()
}
fn related<'a>(&'a self) -> Option<Box<dyn Iterator<Item = &'a dyn miette::Diagnostic> + 'a>> {
if self.errors.len() > 1 {
Some(Box::new(
self.errors
.iter()
.map(|e| e as &dyn miette::Diagnostic)
.skip(1),
))
} else {
None
}
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
self.first().diagnostic_source()
}
}
impl DescriptorErrorKind {
fn label(&self) -> Option<&Label> {
match self {
DescriptorErrorKind::MissingRequiredField { label } => Some(label),
DescriptorErrorKind::UnknownSyntax { found, .. } => Some(found),
DescriptorErrorKind::DuplicateFileName { .. } => None,
DescriptorErrorKind::FileNotFound { found, .. } => Some(found),
DescriptorErrorKind::InvalidImportIndex => None,
DescriptorErrorKind::InvalidOneofIndex => None,
DescriptorErrorKind::DuplicateName { second, .. } => Some(second),
DescriptorErrorKind::DuplicateFieldNumber { second, .. } => Some(second),
DescriptorErrorKind::DuplicateFieldJsonName { second, .. } => Some(second),
DescriptorErrorKind::DuplicateFieldCamelCaseName { second, .. } => Some(second),
DescriptorErrorKind::InvalidFieldNumber { found, .. } => Some(found),
DescriptorErrorKind::FieldNumberInReservedRange { found, .. } => Some(found),
DescriptorErrorKind::FieldNumberInExtensionRange { found, .. } => Some(found),
DescriptorErrorKind::ExtensionNumberOutOfRange { found, .. } => Some(found),
DescriptorErrorKind::NameNotFound { found, .. } => Some(found),
DescriptorErrorKind::NameShadowed { found, .. } => Some(found),
DescriptorErrorKind::InvalidType { found, .. } => Some(found),
DescriptorErrorKind::InvalidFieldDefault { found, .. } => Some(found),
DescriptorErrorKind::EmptyEnum { found } => Some(found),
DescriptorErrorKind::InvalidProto3EnumDefault { found } => Some(found),
DescriptorErrorKind::DuplicateEnumNumber { second, .. } => Some(second),
DescriptorErrorKind::EnumNumberInReservedRange { found, .. } => Some(found),
DescriptorErrorKind::OptionNotFound { found, .. } => Some(found),
DescriptorErrorKind::InvalidOptionType { found, .. } => Some(found),
DescriptorErrorKind::InvalidOptionExtendee { found, .. } => Some(found),
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { found, .. } => Some(found),
DescriptorErrorKind::DuplicateOption { found, .. } => Some(found),
DescriptorErrorKind::DecodeFileDescriptorSet { .. } => None,
}
}
#[cfg(feature = "miette")]
fn add_source_code(&mut self, file: &str, source: &str) {
match self {
DescriptorErrorKind::MissingRequiredField { label } => {
label.resolve_span(file, source);
}
DescriptorErrorKind::UnknownSyntax { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateFileName { .. } => {}
DescriptorErrorKind::FileNotFound { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::InvalidImportIndex => {}
DescriptorErrorKind::InvalidOneofIndex => {}
DescriptorErrorKind::DuplicateName { first, second, .. } => {
first.resolve_span(file, source);
second.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateFieldNumber { first, second, .. } => {
first.resolve_span(file, source);
second.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateFieldJsonName { first, second, .. } => {
first.resolve_span(file, source);
second.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateFieldCamelCaseName { first, second, .. } => {
first.resolve_span(file, source);
second.resolve_span(file, source);
}
DescriptorErrorKind::InvalidFieldNumber { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::FieldNumberInReservedRange { defined, found, .. } => {
defined.resolve_span(file, source);
found.resolve_span(file, source);
}
DescriptorErrorKind::FieldNumberInExtensionRange { defined, found, .. } => {
defined.resolve_span(file, source);
found.resolve_span(file, source);
}
DescriptorErrorKind::ExtensionNumberOutOfRange { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::NameNotFound { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::NameShadowed { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::InvalidType { found, defined, .. } => {
found.resolve_span(file, source);
defined.resolve_span(file, source);
}
DescriptorErrorKind::InvalidFieldDefault { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::EmptyEnum { found } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::InvalidProto3EnumDefault { found } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateEnumNumber { first, second, .. } => {
first.resolve_span(file, source);
second.resolve_span(file, source);
}
DescriptorErrorKind::EnumNumberInReservedRange { defined, found, .. } => {
found.resolve_span(file, source);
defined.resolve_span(file, source);
}
DescriptorErrorKind::OptionNotFound { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::InvalidOptionType { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::InvalidOptionExtendee { found, .. } => {
found.resolve_span(file, source);
}
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::DuplicateOption { found, .. } => {
found.resolve_span(file, source);
}
DescriptorErrorKind::DecodeFileDescriptorSet { .. } => {}
}
}
}
impl std::error::Error for DescriptorErrorKind {
fn source(&self) -> Option<&(dyn std::error::Error + 'static)> {
match self {
DescriptorErrorKind::DecodeFileDescriptorSet { err } => Some(err),
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { err, .. } => Some(err),
_ => None,
}
}
}
impl fmt::Display for DescriptorErrorKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
DescriptorErrorKind::MissingRequiredField { label } => {
write!(f, "missing required field at {:?}", label.path)
}
DescriptorErrorKind::UnknownSyntax { syntax, .. } => {
write!(f, "unknown syntax '{syntax}'")
}
DescriptorErrorKind::DuplicateFileName { name, .. } => {
write!(f, "a different file named '{name}' has already been added")
}
DescriptorErrorKind::FileNotFound { name, .. } => {
write!(f, "imported file '{name}' has not been added")
}
DescriptorErrorKind::InvalidImportIndex => {
write!(f, "invalid import index")
}
DescriptorErrorKind::InvalidOneofIndex => {
write!(f, "invalid oneof index")
}
DescriptorErrorKind::DuplicateName {
name,
first,
second,
} => {
if first.file == second.file {
write!(f, "name '{name}' is defined twice")
} else {
write!(
f,
"name '{}' is already defined in file '{}'",
name, first.file
)
}
}
DescriptorErrorKind::DuplicateFieldNumber { number, .. } => {
write!(f, "field number '{number}' is already used")
}
DescriptorErrorKind::DuplicateFieldJsonName { name, .. } => {
write!(f, "a field with JSON name '{name}' is already defined")
}
DescriptorErrorKind::DuplicateFieldCamelCaseName {
first_name,
second_name,
..
} => {
write!(
f,
"camel-case name of field '{first_name}' conflicts with field '{second_name}'"
)
}
DescriptorErrorKind::InvalidFieldNumber { number, .. } => {
write!(f, "invalid field number '{number}'")
}
DescriptorErrorKind::FieldNumberInReservedRange { number, range, .. } => {
write!(
f,
"field number '{}' conflicts with reserved range '{} to {}'",
number,
range.start,
range.end - 1
)
}
DescriptorErrorKind::FieldNumberInExtensionRange { number, range, .. } => {
write!(
f,
"field number '{}' conflicts with extension range '{} to {}'",
number,
range.start,
range.end - 1
)
}
DescriptorErrorKind::ExtensionNumberOutOfRange {
number, message, ..
} => {
write!(
f,
"message '{message}' does not define '{number}' as an extension number"
)
}
DescriptorErrorKind::NameNotFound { name, .. } => {
write!(f, "name '{name}' is not defined")
}
DescriptorErrorKind::NameShadowed {
name,
shadowed_name,
..
} => {
write!(
f,
"'{name}' resolves to '{shadowed_name}', which is not defined",
)
}
DescriptorErrorKind::InvalidType { name, expected, .. } => {
write!(f, "'{name}' is not {expected}")
}
DescriptorErrorKind::InvalidFieldDefault { value, kind, .. } => {
write!(f, "invalid default value '{value}' for type '{kind}'")
}
DescriptorErrorKind::EmptyEnum { .. } => {
write!(f, "enums must have at least one value")
}
DescriptorErrorKind::InvalidProto3EnumDefault { .. } => {
write!(f, "the first value for proto3 enums must be 0")
}
DescriptorErrorKind::DuplicateEnumNumber { number, .. } => {
write!(f, "enum number '{number}' has already been used")
}
DescriptorErrorKind::EnumNumberInReservedRange { number, range, .. } => {
write!(
f,
"enum number '{}' conflicts with reserved range '{} to {}'",
number,
range.start(),
range.end()
)
}
DescriptorErrorKind::OptionNotFound { name, .. } => {
write!(f, "option field '{name}' is not defined")
}
DescriptorErrorKind::InvalidOptionType {
name,
ty,
value,
is_last,
..
} => {
if *is_last {
write!(
f,
"expected a value of type '{ty}' for option '{name}', but found '{value}'"
)
} else {
write!(
f,
"cannot set field for option '{name}' value of type '{ty}'"
)
}
}
DescriptorErrorKind::InvalidOptionExtendee {
name,
expected_extendee,
actual_extendee,
..
} => {
write!(
f,
"expected an extension to type '{expected_extendee}', but '{name}' extends '{actual_extendee}'"
)
}
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { name, ty, .. } => {
write!(f, "invalid value of type '{ty}' for option '{name}'")
}
DescriptorErrorKind::DuplicateOption { name, .. } => {
write!(f, "option field '{name}' has already been set")
}
DescriptorErrorKind::DecodeFileDescriptorSet { .. } => {
write!(f, "failed to decode file descriptor set")
}
}
}
}
#[cfg(feature = "miette")]
#[cfg_attr(docsrs, doc(cfg(feature = "miette")))]
impl miette::Diagnostic for DescriptorErrorKind {
fn help<'a>(&'a self) -> Option<Box<dyn fmt::Display + 'a>> {
use crate::descriptor::{RESERVED_MESSAGE_FIELD_NUMBERS, VALID_MESSAGE_FIELD_NUMBERS};
match self {
DescriptorErrorKind::MissingRequiredField { .. } => None,
DescriptorErrorKind::UnknownSyntax { .. } => {
Some(Box::new("valid values are 'proto2' and 'proto3'"))
}
DescriptorErrorKind::DuplicateFileName { .. } => None,
DescriptorErrorKind::FileNotFound { .. } => None,
DescriptorErrorKind::InvalidImportIndex => None,
DescriptorErrorKind::InvalidOneofIndex => None,
DescriptorErrorKind::DuplicateName { .. } => None,
DescriptorErrorKind::DuplicateFieldNumber { .. } => None,
DescriptorErrorKind::InvalidFieldNumber { number, .. } => {
if !VALID_MESSAGE_FIELD_NUMBERS.contains(number) {
Some(Box::new(format!(
"field numbers must be between {} and {}",
VALID_MESSAGE_FIELD_NUMBERS.start,
VALID_MESSAGE_FIELD_NUMBERS.end - 1
)))
} else if RESERVED_MESSAGE_FIELD_NUMBERS.contains(number) {
Some(Box::new(format!(
"field numbers {} to {} are reserved",
RESERVED_MESSAGE_FIELD_NUMBERS.start,
RESERVED_MESSAGE_FIELD_NUMBERS.end - 1
)))
} else {
None
}
}
DescriptorErrorKind::FieldNumberInReservedRange { .. } => None,
DescriptorErrorKind::FieldNumberInExtensionRange { .. } => None,
DescriptorErrorKind::DuplicateFieldJsonName { .. } => None,
DescriptorErrorKind::DuplicateFieldCamelCaseName { .. } => None,
DescriptorErrorKind::NameNotFound { help, .. }
| DescriptorErrorKind::NameShadowed { help, .. } => help
.as_ref()
.map(|h| -> Box<dyn fmt::Display> { Box::new(h.clone()) }),
DescriptorErrorKind::InvalidType { .. } => None,
DescriptorErrorKind::InvalidFieldDefault { .. } => None,
DescriptorErrorKind::EmptyEnum { .. } => None,
DescriptorErrorKind::InvalidProto3EnumDefault { .. } => None,
DescriptorErrorKind::DuplicateEnumNumber { .. } => Some(Box::new(
"set the 'allow_alias' option allow re-using enum numbers",
)),
DescriptorErrorKind::EnumNumberInReservedRange { .. } => None,
DescriptorErrorKind::OptionNotFound { .. } => None,
DescriptorErrorKind::InvalidOptionType { .. } => None,
DescriptorErrorKind::InvalidOptionExtendee { .. } => None,
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { .. } => None,
DescriptorErrorKind::DuplicateOption { .. } => None,
DescriptorErrorKind::DecodeFileDescriptorSet { .. } => None,
DescriptorErrorKind::ExtensionNumberOutOfRange { .. } => None,
}
}
fn labels(&self) -> Option<Box<dyn Iterator<Item = miette::LabeledSpan> + '_>> {
let mut spans = Vec::new();
match self {
DescriptorErrorKind::MissingRequiredField { label } => spans.extend(label.to_span()),
DescriptorErrorKind::UnknownSyntax { found: defined, .. } => {
spans.extend(defined.to_span());
}
DescriptorErrorKind::DuplicateFileName { .. } => {}
DescriptorErrorKind::FileNotFound { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidImportIndex => {}
DescriptorErrorKind::InvalidOneofIndex => {}
DescriptorErrorKind::DuplicateName { first, second, .. } => {
spans.extend(first.to_span());
spans.extend(second.to_span());
}
DescriptorErrorKind::DuplicateFieldNumber { first, second, .. } => {
spans.extend(first.to_span());
spans.extend(second.to_span());
}
DescriptorErrorKind::DuplicateFieldJsonName { first, second, .. } => {
spans.extend(first.to_span());
spans.extend(second.to_span());
}
DescriptorErrorKind::DuplicateFieldCamelCaseName { first, second, .. } => {
spans.extend(first.to_span());
spans.extend(second.to_span());
}
DescriptorErrorKind::NameNotFound { found, .. }
| DescriptorErrorKind::NameShadowed { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidFieldNumber { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::FieldNumberInReservedRange { defined, found, .. } => {
spans.extend(defined.to_span());
spans.extend(found.to_span());
}
DescriptorErrorKind::FieldNumberInExtensionRange { defined, found, .. } => {
spans.extend(defined.to_span());
spans.extend(found.to_span());
}
DescriptorErrorKind::ExtensionNumberOutOfRange { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidType { found, defined, .. } => {
spans.extend(found.to_span());
spans.extend(defined.to_span());
}
DescriptorErrorKind::InvalidFieldDefault { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::EmptyEnum { found } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidProto3EnumDefault { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::DuplicateEnumNumber { first, second, .. } => {
spans.extend(first.to_span());
spans.extend(second.to_span());
}
DescriptorErrorKind::EnumNumberInReservedRange { defined, found, .. } => {
spans.extend(found.to_span());
spans.extend(defined.to_span());
}
DescriptorErrorKind::OptionNotFound { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidOptionType { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::InvalidOptionExtendee { found, .. } => {
spans.extend(found.to_span());
}
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::DuplicateOption { found, .. } => {
spans.extend(found.to_span());
}
DescriptorErrorKind::DecodeFileDescriptorSet { .. } => {}
}
if spans.is_empty() {
None
} else {
Some(Box::new(spans.into_iter()))
}
}
fn diagnostic_source(&self) -> Option<&dyn miette::Diagnostic> {
match self {
#[cfg(feature = "text-format")]
DescriptorErrorKind::InvalidMessageOption { err, .. } => Some(err),
_ => None,
}
}
}
impl Label {
pub fn new(
files: &[FileDescriptorInner],
#[cfg_attr(not(feature = "miette"), allow(unused_variables))] message: impl ToString,
file: FileIndex,
path: Box<[i32]>,
) -> Self {
let file = &files[file as usize].raw;
let span = file
.source_code_info
.as_ref()
.and_then(|s| s.location.iter().find(|l| *l.path == *path))
.and_then(|l| match *l.span {
[start_line, start_col, end_col] => {
Some([start_line, start_col, start_line, end_col])
}
[start_line, start_col, end_line, end_col] => {
Some([start_line, start_col, end_line, end_col])
}
_ => None,
});
Label {
file: file.name().to_owned(),
span,
path,
#[cfg(feature = "miette")]
message: message.to_string(),
#[cfg(feature = "miette")]
resolved: None,
}
}
#[cfg(feature = "miette")]
pub fn resolve_span(&mut self, file: &str, source: &str) {
if file == self.file {
if let Some([start_line, start_col, end_line, end_col]) = self.span {
let start = miette::SourceOffset::from_location(
source,
start_line.saturating_add(1) as _,
start_col.saturating_add(1) as _,
)
.offset();
let end = miette::SourceOffset::from_location(
source,
end_line.saturating_add(1) as _,
end_col.saturating_add(1) as _,
)
.offset();
self.resolved = Some(miette::SourceSpan::from(start..end));
}
}
}
#[cfg(feature = "miette")]
fn to_span(&self) -> Option<miette::LabeledSpan> {
match self.resolved {
Some(span) if !span.is_empty() => Some(miette::LabeledSpan::new_with_span(
Some(self.message.clone()),
span,
)),
_ => None,
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/api.rs | prost-reflect/src/descriptor/api.rs | use std::{
borrow::Cow,
collections::HashMap,
fmt, iter,
ops::{Range, RangeInclusive},
sync::Arc,
};
use prost::{
bytes::{Buf, BufMut, Bytes},
encoding::{self, WireType},
DecodeError, EncodeError, Message,
};
use prost_types::{
DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto,
FileDescriptorProto, FileDescriptorSet, MethodDescriptorProto, OneofDescriptorProto,
ServiceDescriptorProto,
};
use crate::{
descriptor::{
error::DescriptorErrorKind,
find_enum_proto, find_message_proto, tag, to_index,
types::{self, Options},
Definition, DefinitionKind, DescriptorIndex, EnumDescriptorInner, EnumValueDescriptorInner,
ExtensionDescriptorInner, FieldDescriptorInner, FileDescriptorInner, KindIndex,
MessageDescriptorInner, MethodDescriptorInner, OneofDescriptorInner,
ServiceDescriptorInner, MAP_ENTRY_KEY_NUMBER, MAP_ENTRY_VALUE_NUMBER,
},
Cardinality, DescriptorError, DescriptorPool, DynamicMessage, EnumDescriptor,
EnumValueDescriptor, ExtensionDescriptor, FieldDescriptor, FileDescriptor, Kind,
MessageDescriptor, MethodDescriptor, OneofDescriptor, ServiceDescriptor, Syntax, Value,
};
impl fmt::Debug for Syntax {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match *self {
Syntax::Proto2 => write!(f, "proto2"),
Syntax::Proto3 => write!(f, "proto3"),
}
}
}
impl Kind {
fn new(pool: &DescriptorPool, kind: KindIndex) -> Self {
match kind {
KindIndex::Double => Kind::Double,
KindIndex::Float => Kind::Float,
KindIndex::Int64 => Kind::Int64,
KindIndex::Uint64 => Kind::Uint64,
KindIndex::Int32 => Kind::Int32,
KindIndex::Fixed64 => Kind::Fixed64,
KindIndex::Fixed32 => Kind::Fixed32,
KindIndex::Bool => Kind::Bool,
KindIndex::String => Kind::String,
KindIndex::Bytes => Kind::Bytes,
KindIndex::Uint32 => Kind::Uint32,
KindIndex::Sfixed32 => Kind::Sfixed32,
KindIndex::Sfixed64 => Kind::Sfixed64,
KindIndex::Sint32 => Kind::Sint32,
KindIndex::Sint64 => Kind::Sint64,
KindIndex::Message(index) | KindIndex::Group(index) => {
Kind::Message(MessageDescriptor {
pool: pool.clone(),
index,
})
}
KindIndex::Enum(index) => Kind::Enum(EnumDescriptor {
pool: pool.clone(),
index,
}),
}
}
/// Gets a reference to the [`MessageDescriptor`] if this is a message type,
/// or `None` otherwise.
pub fn as_message(&self) -> Option<&MessageDescriptor> {
match self {
Kind::Message(desc) => Some(desc),
_ => None,
}
}
/// Gets a reference to the [`EnumDescriptor`] if this is an enum type,
/// or `None` otherwise.
pub fn as_enum(&self) -> Option<&EnumDescriptor> {
match self {
Kind::Enum(desc) => Some(desc),
_ => None,
}
}
/// Returns the [`WireType`] used to encode this type.
///
/// Note: The [`Kind::Message`] returns [` WireType::LengthDelimited`],
/// as [groups are deprecated](https://protobuf.dev/programming-guides/encoding/#groups).
pub fn wire_type(&self) -> WireType {
match self {
Kind::Double | Kind::Fixed64 | Kind::Sfixed64 => WireType::SixtyFourBit,
Kind::Float | Kind::Fixed32 | Kind::Sfixed32 => WireType::ThirtyTwoBit,
Kind::Enum(_)
| Kind::Int32
| Kind::Int64
| Kind::Uint32
| Kind::Uint64
| Kind::Sint32
| Kind::Sint64
| Kind::Bool => WireType::Varint,
Kind::String | Kind::Bytes | Kind::Message(_) => WireType::LengthDelimited,
}
}
/// Returns the default value for the given protobuf type `kind`.
///
/// Unlike [`FieldDescriptor::default_value`], this method does not
/// look at field cardinality, so it will never return a list or map.
pub fn default_value(&self) -> Value {
match self {
Kind::Message(desc) => Value::Message(DynamicMessage::new(desc.clone())),
Kind::Enum(enum_ty) => Value::EnumNumber(enum_ty.default_value().number()),
Kind::Double => Value::F64(0.0),
Kind::Float => Value::F32(0.0),
Kind::Int32 | Kind::Sint32 | Kind::Sfixed32 => Value::I32(0),
Kind::Int64 | Kind::Sint64 | Kind::Sfixed64 => Value::I64(0),
Kind::Uint32 | Kind::Fixed32 => Value::U32(0),
Kind::Uint64 | Kind::Fixed64 => Value::U64(0),
Kind::Bool => Value::Bool(false),
Kind::String => Value::String(String::default()),
Kind::Bytes => Value::Bytes(Bytes::default()),
}
}
}
impl fmt::Debug for Kind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Double => write!(f, "double"),
Self::Float => write!(f, "float"),
Self::Int32 => write!(f, "int32"),
Self::Int64 => write!(f, "int64"),
Self::Uint32 => write!(f, "uint32"),
Self::Uint64 => write!(f, "uint64"),
Self::Sint32 => write!(f, "sint32"),
Self::Sint64 => write!(f, "sint64"),
Self::Fixed32 => write!(f, "fixed32"),
Self::Fixed64 => write!(f, "fixed64"),
Self::Sfixed32 => write!(f, "sfixed32"),
Self::Sfixed64 => write!(f, "sfixed64"),
Self::Bool => write!(f, "bool"),
Self::String => write!(f, "string"),
Self::Bytes => write!(f, "bytes"),
Self::Message(m) => write!(f, "{}", m.full_name()),
Self::Enum(e) => write!(f, "{}", e.full_name()),
}
}
}
impl DescriptorPool {
/// Creates a new, empty [`DescriptorPool`].
///
/// For the common case of creating a `DescriptorPool` from a single [`FileDescriptorSet`], see
/// [`DescriptorPool::from_file_descriptor_set`] or [`DescriptorPool::decode`].
pub fn new() -> Self {
DescriptorPool::default()
}
/// Creates a [`DescriptorPool`] from a [`FileDescriptorSet`].
///
/// A file descriptor set may be generated by running the protobuf compiler with the
/// `--descriptor_set_out` flag. If you are using [`prost-build`](https://crates.io/crates/prost-build),
/// then [`Config::file_descriptor_set_path`](https://docs.rs/prost-build/latest/prost_build/struct.Config.html#method..file_descriptor_set_path)
/// is a convenient way to generate it as part of your build.
pub fn from_file_descriptor_set(
file_descriptor_set: FileDescriptorSet,
) -> Result<Self, DescriptorError> {
let mut pool = DescriptorPool::new();
pool.add_file_descriptor_set(file_descriptor_set)?;
Ok(pool)
}
/// Decodes and adds a set of file descriptors to the pool.
///
/// A file descriptor set may be generated by running the protobuf compiler with the
/// `--descriptor_set_out` flag. If you are using [`prost-build`](https://crates.io/crates/prost-build),
/// then [`Config::file_descriptor_set_path`](https://docs.rs/prost-build/latest/prost_build/struct.Config.html#method..file_descriptor_set_path)
/// is a convenient way to generate it as part of your build.
///
/// Unlike when using [`DescriptorPool::from_file_descriptor_set`], any extension options
/// defined in the file descriptors are preserved.
///
/// # Errors
///
/// Returns an error if the given bytes are not a valid protobuf-encoded file descriptor set, or if the descriptor set itself
/// is invalid. When using a file descriptor set generated by the protobuf compiler, this method will always succeed.
pub fn decode<B>(bytes: B) -> Result<Self, DescriptorError>
where
B: Buf,
{
let file_descriptor_set = types::FileDescriptorSet::decode(bytes).map_err(|err| {
DescriptorError::new(vec![DescriptorErrorKind::DecodeFileDescriptorSet { err }])
})?;
let mut pool = DescriptorPool::new();
pool.build_files(file_descriptor_set.file.into_iter())?;
Ok(pool)
}
/// Adds a new [`FileDescriptorSet`] to this [`DescriptorPool`].
///
/// A file descriptor set may be generated by running the protobuf compiler with the
/// `--descriptor_set_out` flag. If you are using [`prost-build`](https://crates.io/crates/prost-build),
/// then [`Config::file_descriptor_set_path`](https://docs.rs/prost-build/latest/prost_build/struct.Config.html#method..file_descriptor_set_path)
/// is a convenient way to generate it as part of your build.
///
/// Any duplicates of files already in the pool will be skipped. Note this may cause issues when trying to add two different versions of a file with the same name.
///
/// # Errors
///
/// Returns an error if the descriptor set is invalid, for example if it references types not yet added
/// to the pool. When using a file descriptor set generated by the protobuf compiler, this method will
/// always succeed.
pub fn add_file_descriptor_set(
&mut self,
file_descriptor_set: FileDescriptorSet,
) -> Result<(), DescriptorError> {
self.add_file_descriptor_protos(file_descriptor_set.file)
}
/// Adds a collection of file descriptors to this pool.
///
/// The file descriptors may be provided in any order, however all types referenced must be defined
/// either in one of the files provided, or in a file previously added to the pool.
///
/// Any duplicates of files already in the pool will be skipped. Note this may cause issues when trying to add two different versions of a file with the same name.
///
/// # Errors
///
/// Returns an error if any of the given file descriptor is invalid, for example if they reference
/// types not yet added to the pool.
pub fn add_file_descriptor_protos<I>(&mut self, files: I) -> Result<(), DescriptorError>
where
I: IntoIterator<Item = FileDescriptorProto>,
{
self.build_files(
files
.into_iter()
.map(types::FileDescriptorProto::from_prost),
)
}
/// Add a single file descriptor to the pool.
///
/// All types referenced by the file must be defined either in the file itself, or in a file
/// previously added to the pool.
///
/// If the file is a duplicate of a file already in the pool, it will be skipped. Note this may cause issues when trying to add two different versions of a file with the same name.
///
/// # Errors
///
/// Returns an error if the given file descriptor is invalid, for example if it references types not yet added
/// to the pool.
pub fn add_file_descriptor_proto(
&mut self,
file: FileDescriptorProto,
) -> Result<(), DescriptorError> {
self.add_file_descriptor_protos(iter::once(file))
}
/// Decode and add a single file descriptor to the pool.
///
/// All types referenced by the file must be defined either in the file itself, or in a file
/// previously added to the pool.
///
/// Unlike when using [`add_file_descriptor_proto()`][DescriptorPool::add_file_descriptor_proto], any extension options
/// defined in the file descriptor are preserved.
///
/// If the file is a duplicate of a file already in the pool, it will be skipped. Note this may cause issues when trying to add two different versions of a file with the same name.
///
/// # Errors
///
/// Returns an error if the given bytes are not a valid protobuf-encoded file descriptor, or if the file descriptor itself
/// is invalid, for example if it references types not yet added to the pool.
pub fn decode_file_descriptor_proto<B>(&mut self, bytes: B) -> Result<(), DescriptorError>
where
B: Buf,
{
let file = types::FileDescriptorProto::decode(bytes).map_err(|err| {
DescriptorError::new(vec![DescriptorErrorKind::DecodeFileDescriptorSet { err }])
})?;
self.build_files(iter::once(file))
}
/// Decode and add a set of file descriptors to the pool.
///
/// A file descriptor set may be generated by running the protobuf compiler with the
/// `--descriptor_set_out` flag. If you are using [`prost-build`](https://crates.io/crates/prost-build),
/// then [`Config::file_descriptor_set_path`](https://docs.rs/prost-build/latest/prost_build/struct.Config.html#method..file_descriptor_set_path)
/// is a convenient way to generate it as part of your build.
///
/// Unlike when using [`add_file_descriptor_set()`][DescriptorPool::add_file_descriptor_set], any extension options
/// defined in the file descriptors are preserved.
///
/// Any duplicates of files already in the pool will be skipped. Note this may cause issues when trying to add two different versions of a file with the same name.
///
/// # Errors
///
/// Returns an error if the given bytes are not a valid protobuf-encoded file descriptor set, or if the descriptor set itself
/// is invalid. When using a file descriptor set generated by the protobuf compiler, this method will always succeed.
pub fn decode_file_descriptor_set<B>(&mut self, bytes: B) -> Result<(), DescriptorError>
where
B: Buf,
{
let file = types::FileDescriptorSet::decode(bytes).map_err(|err| {
DescriptorError::new(vec![DescriptorErrorKind::DecodeFileDescriptorSet { err }])
})?;
self.build_files(file.file)
}
/// Gets an iterator over the file descriptors added to this pool.
pub fn files(&self) -> impl ExactSizeIterator<Item = FileDescriptor> + '_ {
indices(&self.inner.files).map(|index| FileDescriptor {
pool: self.clone(),
index,
})
}
/// Gets a file descriptor by its name, or `None` if no such file has been added.
pub fn get_file_by_name(&self, name: &str) -> Option<FileDescriptor> {
if let Some(&index) = self.inner.file_names.get(name) {
Some(FileDescriptor {
pool: self.clone(),
index,
})
} else {
None
}
}
/// Gets a iterator over the raw [`FileDescriptorProto`] instances wrapped by this [`DescriptorPool`].
pub fn file_descriptor_protos(
&self,
) -> impl ExactSizeIterator<Item = &FileDescriptorProto> + '_ {
indices(&self.inner.files).map(|index| &self.inner.files[index as usize].prost)
}
/// Encodes the files contained within this [`DescriptorPool`] to their byte representation.
///
/// The encoded message is equivalent to a [`FileDescriptorSet`], however also includes
/// any extension options that were defined.
pub fn encode<B>(&self, buf: B) -> Result<(), EncodeError>
where
B: BufMut,
{
use prost::encoding::{encoded_len_varint, DecodeContext};
struct FileDescriptorSet<'a> {
files: &'a [FileDescriptorInner],
}
impl fmt::Debug for FileDescriptorSet<'_> {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FileDescriptorSet").finish_non_exhaustive()
}
}
impl Message for FileDescriptorSet<'_> {
fn encode_raw(&self, buf: &mut impl BufMut)
where
Self: Sized,
{
for file in self.files {
encoding::message::encode(
tag::file_descriptor_set::FILE as u32,
&file.raw,
buf,
);
}
}
fn encoded_len(&self) -> usize {
encoding::key_len(tag::file_descriptor_set::FILE as u32) * self.files.len()
+ self
.files
.iter()
.map(|f| &f.raw)
.map(Message::encoded_len)
.map(|len| len + encoded_len_varint(len as u64))
.sum::<usize>()
}
fn merge_field(
&mut self,
_: u32,
_: WireType,
_: &mut impl Buf,
_: DecodeContext,
) -> Result<(), DecodeError>
where
Self: Sized,
{
unimplemented!()
}
fn clear(&mut self) {
unimplemented!()
}
}
let mut buf = buf;
FileDescriptorSet {
files: &self.inner.files,
}
.encode(&mut buf)
}
/// Encodes the files contained within this [`DescriptorPool`] to a newly allocated buffer.
///
/// The encoded message is equivalent to a [`FileDescriptorSet`], however also includes
/// any extension options that were defined.
pub fn encode_to_vec(&self) -> Vec<u8> {
let mut buf = Vec::new();
self.encode(&mut buf).expect("vec should have capacity");
buf
}
/// Gets an iterator over the services defined in these protobuf files.
pub fn services(&self) -> impl ExactSizeIterator<Item = ServiceDescriptor> + '_ {
indices(&self.inner.services).map(|index| ServiceDescriptor {
pool: self.clone(),
index,
})
}
/// Gets an iterator over all message types defined in these protobuf files.
///
/// The iterator includes nested messages defined in another message.
pub fn all_messages(&self) -> impl ExactSizeIterator<Item = MessageDescriptor> + '_ {
indices(&self.inner.messages).map(|index| MessageDescriptor {
pool: self.clone(),
index,
})
}
/// Gets an iterator over all enum types defined in these protobuf files.
///
/// The iterator includes nested enums defined in another message.
pub fn all_enums(&self) -> impl ExactSizeIterator<Item = EnumDescriptor> + '_ {
indices(&self.inner.enums).map(|index| EnumDescriptor {
pool: self.clone(),
index,
})
}
/// Gets an iterator over all extension fields defined in these protobuf files.
///
/// The iterator includes nested extension fields defined in another message.
pub fn all_extensions(&self) -> impl ExactSizeIterator<Item = ExtensionDescriptor> + '_ {
indices(&self.inner.extensions).map(|index| ExtensionDescriptor {
pool: self.clone(),
index,
})
}
/// Gets a [`MessageDescriptor`] by its fully qualified name, for example `my.package.MessageName`.
pub fn get_message_by_name(&self, name: &str) -> Option<MessageDescriptor> {
match self.inner.get_by_name(name) {
Some(&Definition {
kind: DefinitionKind::Message(index),
..
}) => Some(MessageDescriptor {
pool: self.clone(),
index,
}),
_ => None,
}
}
/// Gets an [`EnumDescriptor`] by its fully qualified name, for example `my.package.EnumName`.
pub fn get_enum_by_name(&self, name: &str) -> Option<EnumDescriptor> {
match self.inner.get_by_name(name) {
Some(&Definition {
kind: DefinitionKind::Enum(index),
..
}) => Some(EnumDescriptor {
pool: self.clone(),
index,
}),
_ => None,
}
}
/// Gets an [`ExtensionDescriptor`] by its fully qualified name, for example `my.package.my_extension`.
pub fn get_extension_by_name(&self, name: &str) -> Option<ExtensionDescriptor> {
match self.inner.get_by_name(name) {
Some(&Definition {
kind: DefinitionKind::Extension(index),
..
}) => Some(ExtensionDescriptor {
pool: self.clone(),
index,
}),
_ => None,
}
}
/// Gets an [`ServiceDescriptor`] by its fully qualified name, for example `my.package.MyService`.
pub fn get_service_by_name(&self, name: &str) -> Option<ServiceDescriptor> {
match self.inner.get_by_name(name) {
Some(&Definition {
kind: DefinitionKind::Service(index),
..
}) => Some(ServiceDescriptor {
pool: self.clone(),
index,
}),
_ => None,
}
}
}
impl fmt::Debug for DescriptorPool {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("DescriptorPool")
.field("files", &debug_fmt_iter(self.files()))
.field("services", &debug_fmt_iter(self.services()))
.field("all_messages", &debug_fmt_iter(self.all_messages()))
.field("all_enums", &debug_fmt_iter(self.all_enums()))
.field("all_extensions", &debug_fmt_iter(self.all_extensions()))
.finish()
}
}
impl PartialEq for DescriptorPool {
fn eq(&self, other: &Self) -> bool {
Arc::ptr_eq(&self.inner, &other.inner)
}
}
impl Eq for DescriptorPool {}
impl FileDescriptor {
/// Create a new [`FileDescriptor`] referencing the file at `index` within the given [`DescriptorPool`].
///
/// # Panics
///
/// Panics if `index` is out-of-bounds.
pub fn new(descriptor_pool: DescriptorPool, index: usize) -> Self {
debug_assert!(index < descriptor_pool.files().len());
FileDescriptor {
pool: descriptor_pool,
index: to_index(index),
}
}
/// Gets a reference to the [`DescriptorPool`] this file is included in.
pub fn parent_pool(&self) -> &DescriptorPool {
&self.pool
}
/// Gets the unique name of this file relative to the root of the source tree,
/// e.g. `path/to/my_package.proto`.
pub fn name(&self) -> &str {
self.inner().prost.name()
}
/// Gets the name of the package specifier for a file, e.g. `my.package`.
///
/// If no package name is set, an empty string is returned.
pub fn package_name(&self) -> &str {
self.inner().prost.package()
}
/// Gets the index of this file within the parent [`DescriptorPool`].
pub fn index(&self) -> usize {
self.index as usize
}
/// Gets the syntax of this protobuf file.
pub fn syntax(&self) -> Syntax {
self.inner().syntax
}
/// Gets the dependencies of this file.
///
/// This corresponds to the [`FileDescriptorProto::dependency`] field.
pub fn dependencies(&self) -> impl ExactSizeIterator<Item = FileDescriptor> + '_ {
let pool = self.parent_pool();
self.file_descriptor_proto()
.dependency
.iter()
.map(|name| pool.get_file_by_name(name).expect("file not found"))
}
/// Gets the public dependencies of this file.
///
/// This corresponds to the [`FileDescriptorProto::public_dependency`] field.
pub fn public_dependencies(&self) -> impl ExactSizeIterator<Item = FileDescriptor> + '_ {
let pool = self.parent_pool();
let raw = self.file_descriptor_proto();
raw.public_dependency.iter().map(|&index| {
pool.get_file_by_name(&raw.dependency[index as usize])
.expect("file not found")
})
}
/// Gets the top-level message types defined within this file.
///
/// This does not include nested messages defined within another message.
pub fn messages(&self) -> impl ExactSizeIterator<Item = MessageDescriptor> + '_ {
let pool = self.parent_pool();
let raw_file = self.file_descriptor_proto();
raw_file.message_type.iter().map(move |raw_message| {
pool.get_message_by_name(join_name(raw_file.package(), raw_message.name()).as_ref())
.expect("message not found")
})
}
/// Gets the top-level enum types defined within this file.
///
/// This does not include nested enums defined within another message.
pub fn enums(&self) -> impl ExactSizeIterator<Item = EnumDescriptor> + '_ {
let pool = self.parent_pool();
let raw_file = self.file_descriptor_proto();
raw_file.enum_type.iter().map(move |raw_enum| {
pool.get_enum_by_name(join_name(raw_file.package(), raw_enum.name()).as_ref())
.expect("enum not found")
})
}
/// Gets the top-level extension fields defined within this file.
///
/// This does not include nested extensions defined within another message.
pub fn extensions(&self) -> impl ExactSizeIterator<Item = ExtensionDescriptor> + '_ {
let pool = self.parent_pool();
let raw_file = self.file_descriptor_proto();
raw_file.extension.iter().map(move |raw_extension| {
pool.get_extension_by_name(join_name(raw_file.package(), raw_extension.name()).as_ref())
.expect("extension not found")
})
}
/// Gets the services defined within this file.
pub fn services(&self) -> impl ExactSizeIterator<Item = ServiceDescriptor> + '_ {
let pool = self.parent_pool();
let raw_file = self.file_descriptor_proto();
raw_file.service.iter().map(move |raw_service| {
pool.get_service_by_name(join_name(raw_file.package(), raw_service.name()).as_ref())
.expect("service not found")
})
}
/// Gets a reference to the raw [`FileDescriptorProto`] wrapped by this [`FileDescriptor`].
pub fn file_descriptor_proto(&self) -> &FileDescriptorProto {
&self.inner().prost
}
/// Encodes this file descriptor to its byte representation.
///
/// The encoded message is equivalent to a [`FileDescriptorProto`], however also includes
/// any extension options that were defined.
pub fn encode<B>(&self, buf: B) -> Result<(), EncodeError>
where
B: BufMut,
{
let mut buf = buf;
self.inner().raw.encode(&mut buf)
}
/// Encodes this file descriptor to a newly allocated buffer.
///
/// The encoded message is equivalent to a [`FileDescriptorProto`], however also includes
/// any extension options that were defined.
pub fn encode_to_vec(&self) -> Vec<u8> {
let mut buf = Vec::new();
self.encode(&mut buf).expect("vec should have capacity");
buf
}
/// Decodes the options defined for this [`FileDescriptor`], including any extension options.
pub fn options(&self) -> DynamicMessage {
decode_options(
self.parent_pool(),
"google.protobuf.FileOptions",
&self.inner().raw.options,
)
}
fn inner(&self) -> &FileDescriptorInner {
&self.pool.inner.files[self.index as usize]
}
}
impl fmt::Debug for FileDescriptor {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.debug_struct("FileDescriptor")
.field("name", &self.name())
.field("package_name", &self.package_name())
.finish()
}
}
impl MessageDescriptor {
/// Gets a reference to the [`DescriptorPool`] this message is defined in.
pub fn parent_pool(&self) -> &DescriptorPool {
&self.pool
}
/// Gets the [`FileDescriptor`] this message is defined in.
pub fn parent_file(&self) -> FileDescriptor {
FileDescriptor {
pool: self.pool.clone(),
index: self.inner().id.file,
}
}
/// Gets the parent message type if this message type is nested inside a another message, or `None` otherwise
pub fn parent_message(&self) -> Option<MessageDescriptor> {
self.inner().parent.map(|index| MessageDescriptor {
pool: self.pool.clone(),
index,
})
}
/// Gets the short name of the message type, e.g. `MyMessage`.
pub fn name(&self) -> &str {
self.inner().id.name()
}
/// Gets the full name of the message type, e.g. `my.package.MyMessage`.
pub fn full_name(&self) -> &str {
self.inner().id.full_name()
}
/// Gets the name of the package this message type is defined in, e.g. `my.package`.
///
/// If no package name is set, an empty string is returned.
pub fn package_name(&self) -> &str {
self.raw_file().package()
}
/// Gets the path where this message is defined within the [`FileDescriptorProto`][FileDescriptorProto], e.g. `[4, 0]`.
///
/// See [`path`][prost_types::source_code_info::Location::path] for more details on the structure of the path.
pub fn path(&self) -> &[i32] {
&self.inner().id.path
}
/// Gets a reference to the [`FileDescriptorProto`] in which this message is defined.
pub fn parent_file_descriptor_proto(&self) -> &FileDescriptorProto {
&self.pool.inner.files[self.inner().id.file as usize].prost
}
/// Gets a reference to the raw [`DescriptorProto`] wrapped by this [`MessageDescriptor`].
pub fn descriptor_proto(&self) -> &DescriptorProto {
find_message_proto_prost(self.parent_file_descriptor_proto(), self.path())
}
/// Decodes the options defined for this [`MessageDescriptor`], including any extension options.
pub fn options(&self) -> DynamicMessage {
decode_options(
self.parent_pool(),
"google.protobuf.MessageOptions",
&self.raw().options,
)
}
/// Gets an iterator yielding a [`FieldDescriptor`] for each field defined in this message.
pub fn fields(&self) -> impl ExactSizeIterator<Item = FieldDescriptor> + '_ {
self.inner()
.field_numbers
.values()
.map(|&index| FieldDescriptor {
message: self.clone(),
index,
})
}
pub(crate) fn fields_in_index_order(
&self,
) -> impl ExactSizeIterator<Item = FieldDescriptor> + '_ {
self.inner()
.fields
.iter()
.enumerate()
.map(|(index, _)| FieldDescriptor {
message: self.clone(),
index: index as u32,
})
}
/// Gets an iterator yielding a [`OneofDescriptor`] for each oneof field defined in this message.
pub fn oneofs(&self) -> impl ExactSizeIterator<Item = OneofDescriptor> + '_ {
indices(&self.inner().oneofs).map(|index| OneofDescriptor {
message: self.clone(),
index,
})
}
/// Gets the nested message types defined within this message.
pub fn child_messages(&self) -> impl ExactSizeIterator<Item = MessageDescriptor> + '_ {
let pool = self.parent_pool();
let namespace = self.full_name();
let raw_message = self.descriptor_proto();
raw_message.nested_type.iter().map(move |raw_message| {
pool.get_message_by_name(join_name(namespace, raw_message.name()).as_ref())
.expect("message not found")
})
}
/// Gets the nested enum types defined within this message.
pub fn child_enums(&self) -> impl ExactSizeIterator<Item = EnumDescriptor> + '_ {
let pool = self.parent_pool();
let namespace = self.full_name();
let raw_message = self.descriptor_proto();
raw_message.enum_type.iter().map(move |raw_enum| {
pool.get_enum_by_name(join_name(namespace, raw_enum.name()).as_ref())
.expect("enum not found")
})
}
/// Gets the nested extension fields defined within this message.
///
/// Note this only returns extensions defined nested within this message. See
/// [`MessageDescriptor::extensions`] to get fields defined anywhere that extend this message.
pub fn child_extensions(&self) -> impl ExactSizeIterator<Item = ExtensionDescriptor> + '_ {
let pool = self.parent_pool();
let namespace = self.full_name();
let raw_message = self.descriptor_proto();
raw_message.extension.iter().map(move |raw_extension| {
pool.get_extension_by_name(join_name(namespace, raw_extension.name()).as_ref())
.expect("extension not found")
})
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | true |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/types.rs | prost-reflect/src/descriptor/types.rs | use std::fmt;
use prost::{
bytes::{Buf, BufMut},
encoding::{encode_key, skip_field, DecodeContext, WireType},
DecodeError, Message,
};
pub(crate) use prost_types::{
enum_descriptor_proto, field_descriptor_proto, uninterpreted_option, EnumOptions,
EnumValueOptions, ExtensionRangeOptions, FieldOptions, FileOptions, MessageOptions,
MethodOptions, OneofOptions, ServiceOptions, SourceCodeInfo, UninterpretedOption,
};
#[derive(Clone, PartialEq, Message)]
pub(crate) struct FileDescriptorSet {
#[prost(message, repeated, tag = "1")]
pub file: Vec<FileDescriptorProto>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct FileDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(string, optional, tag = "2")]
pub package: Option<String>,
#[prost(string, repeated, tag = "3")]
pub dependency: Vec<String>,
#[prost(int32, repeated, packed = "false", tag = "10")]
pub public_dependency: Vec<i32>,
#[prost(int32, repeated, packed = "false", tag = "11")]
pub weak_dependency: Vec<i32>,
#[prost(message, repeated, tag = "4")]
pub message_type: Vec<DescriptorProto>,
#[prost(message, repeated, tag = "5")]
pub(crate) enum_type: Vec<EnumDescriptorProto>,
#[prost(message, repeated, tag = "6")]
pub service: Vec<ServiceDescriptorProto>,
#[prost(message, repeated, tag = "7")]
pub extension: Vec<FieldDescriptorProto>,
#[prost(message, optional, tag = "8")]
pub options: Option<Options<FileOptions>>,
#[prost(message, optional, tag = "9")]
pub source_code_info: Option<SourceCodeInfo>,
#[prost(string, optional, tag = "12")]
pub syntax: Option<String>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct DescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(message, repeated, tag = "2")]
pub field: Vec<FieldDescriptorProto>,
#[prost(message, repeated, tag = "6")]
pub extension: Vec<FieldDescriptorProto>,
#[prost(message, repeated, tag = "3")]
pub nested_type: Vec<DescriptorProto>,
#[prost(message, repeated, tag = "4")]
pub(crate) enum_type: Vec<EnumDescriptorProto>,
#[prost(message, repeated, tag = "5")]
pub extension_range: Vec<descriptor_proto::ExtensionRange>,
#[prost(message, repeated, tag = "8")]
pub oneof_decl: Vec<OneofDescriptorProto>,
#[prost(message, optional, tag = "7")]
pub options: Option<Options<MessageOptions>>,
#[prost(message, repeated, tag = "9")]
pub reserved_range: Vec<descriptor_proto::ReservedRange>,
#[prost(string, repeated, tag = "10")]
pub reserved_name: Vec<String>,
}
pub(crate) mod descriptor_proto {
pub(crate) use prost_types::descriptor_proto::ReservedRange;
use super::*;
#[derive(Clone, PartialEq, Message)]
pub(crate) struct ExtensionRange {
#[prost(int32, optional, tag = "1")]
pub start: Option<i32>,
#[prost(int32, optional, tag = "2")]
pub end: Option<i32>,
#[prost(message, optional, tag = "3")]
pub options: Option<Options<ExtensionRangeOptions>>,
}
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct FieldDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(int32, optional, tag = "3")]
pub number: Option<i32>,
#[prost(enumeration = "field_descriptor_proto::Label", optional, tag = "4")]
pub label: Option<i32>,
#[prost(enumeration = "field_descriptor_proto::Type", optional, tag = "5")]
pub r#type: Option<i32>,
#[prost(string, optional, tag = "6")]
pub type_name: Option<String>,
#[prost(string, optional, tag = "2")]
pub extendee: Option<String>,
#[prost(string, optional, tag = "7")]
pub default_value: Option<String>,
#[prost(int32, optional, tag = "9")]
pub oneof_index: Option<i32>,
#[prost(string, optional, tag = "10")]
pub json_name: Option<String>,
#[prost(message, optional, tag = "8")]
pub options: Option<Options<FieldOptions>>,
#[prost(bool, optional, tag = "17")]
pub proto3_optional: Option<bool>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct OneofDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(message, optional, tag = "2")]
pub options: Option<Options<OneofOptions>>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct EnumDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(message, repeated, tag = "2")]
pub value: Vec<EnumValueDescriptorProto>,
#[prost(message, optional, tag = "3")]
pub options: Option<Options<EnumOptions>>,
#[prost(message, repeated, tag = "4")]
pub reserved_range: Vec<enum_descriptor_proto::EnumReservedRange>,
#[prost(string, repeated, tag = "5")]
pub reserved_name: Vec<String>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct EnumValueDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(int32, optional, tag = "2")]
pub number: Option<i32>,
#[prost(message, optional, tag = "3")]
pub options: Option<Options<EnumValueOptions>>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct ServiceDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(message, repeated, tag = "2")]
pub method: Vec<MethodDescriptorProto>,
#[prost(message, optional, tag = "3")]
pub options: Option<Options<ServiceOptions>>,
}
#[derive(Clone, PartialEq, Message)]
pub(crate) struct MethodDescriptorProto {
#[prost(string, optional, tag = "1")]
pub name: Option<String>,
#[prost(string, optional, tag = "2")]
pub input_type: Option<String>,
#[prost(string, optional, tag = "3")]
pub output_type: Option<String>,
#[prost(message, optional, tag = "4")]
pub options: Option<Options<MethodOptions>>,
#[prost(bool, optional, tag = "5", default = "false")]
pub client_streaming: Option<bool>,
#[prost(bool, optional, tag = "6", default = "false")]
pub server_streaming: Option<bool>,
}
#[derive(Clone, Default, PartialEq)]
pub(crate) struct Options<T> {
pub(crate) encoded: Vec<u8>,
pub(crate) value: T,
}
impl FileDescriptorProto {
pub(crate) fn from_prost(file: prost_types::FileDescriptorProto) -> FileDescriptorProto {
FileDescriptorProto {
name: file.name,
package: file.package,
dependency: file.dependency,
public_dependency: file.public_dependency,
weak_dependency: file.weak_dependency,
message_type: file
.message_type
.into_iter()
.map(DescriptorProto::from_prost)
.collect(),
enum_type: file
.enum_type
.into_iter()
.map(EnumDescriptorProto::from_prost)
.collect(),
service: file
.service
.into_iter()
.map(ServiceDescriptorProto::from_prost)
.collect(),
extension: file
.extension
.into_iter()
.map(FieldDescriptorProto::from_prost)
.collect(),
options: file.options.map(Options::from_prost),
source_code_info: file.source_code_info,
syntax: file.syntax,
}
}
pub(crate) fn to_prost(&self) -> prost_types::FileDescriptorProto {
prost_types::FileDescriptorProto {
name: self.name.clone(),
package: self.package.clone(),
dependency: self.dependency.clone(),
public_dependency: self.public_dependency.clone(),
weak_dependency: self.weak_dependency.clone(),
message_type: self
.message_type
.iter()
.map(DescriptorProto::to_prost)
.collect(),
enum_type: self
.enum_type
.iter()
.map(EnumDescriptorProto::to_prost)
.collect(),
service: self
.service
.iter()
.map(ServiceDescriptorProto::to_prost)
.collect(),
extension: self
.extension
.iter()
.map(FieldDescriptorProto::to_prost)
.collect(),
options: self.options.as_ref().map(Options::to_prost),
source_code_info: self.source_code_info.clone(),
syntax: self.syntax.clone(),
}
}
}
impl DescriptorProto {
pub(crate) fn from_prost(file: prost_types::DescriptorProto) -> DescriptorProto {
DescriptorProto {
name: file.name,
field: file
.field
.into_iter()
.map(FieldDescriptorProto::from_prost)
.collect(),
extension: file
.extension
.into_iter()
.map(FieldDescriptorProto::from_prost)
.collect(),
nested_type: file
.nested_type
.into_iter()
.map(DescriptorProto::from_prost)
.collect(),
enum_type: file
.enum_type
.into_iter()
.map(EnumDescriptorProto::from_prost)
.collect(),
extension_range: file
.extension_range
.into_iter()
.map(descriptor_proto::ExtensionRange::from_prost)
.collect(),
oneof_decl: file
.oneof_decl
.into_iter()
.map(OneofDescriptorProto::from_prost)
.collect(),
options: file.options.map(Options::from_prost),
reserved_range: file.reserved_range,
reserved_name: file.reserved_name,
}
}
pub(crate) fn to_prost(&self) -> prost_types::DescriptorProto {
prost_types::DescriptorProto {
name: self.name.clone(),
field: self
.field
.iter()
.map(FieldDescriptorProto::to_prost)
.collect(),
extension: self
.extension
.iter()
.map(FieldDescriptorProto::to_prost)
.collect(),
nested_type: self
.nested_type
.iter()
.map(DescriptorProto::to_prost)
.collect(),
enum_type: self
.enum_type
.iter()
.map(EnumDescriptorProto::to_prost)
.collect(),
extension_range: self
.extension_range
.iter()
.map(descriptor_proto::ExtensionRange::to_prost)
.collect(),
oneof_decl: self
.oneof_decl
.iter()
.map(OneofDescriptorProto::to_prost)
.collect(),
options: self.options.as_ref().map(Options::to_prost),
reserved_range: self.reserved_range.clone(),
reserved_name: self.reserved_name.clone(),
}
}
}
impl FieldDescriptorProto {
pub(crate) fn from_prost(file: prost_types::FieldDescriptorProto) -> FieldDescriptorProto {
FieldDescriptorProto {
name: file.name,
number: file.number,
label: file.label,
r#type: file.r#type,
type_name: file.type_name,
extendee: file.extendee,
default_value: file.default_value,
oneof_index: file.oneof_index,
json_name: file.json_name,
options: file.options.map(Options::from_prost),
proto3_optional: file.proto3_optional,
}
}
pub(crate) fn to_prost(&self) -> prost_types::FieldDescriptorProto {
prost_types::FieldDescriptorProto {
name: self.name.clone(),
number: self.number,
label: self.label,
r#type: self.r#type,
type_name: self.type_name.clone(),
extendee: self.extendee.clone(),
default_value: self.default_value.clone(),
oneof_index: self.oneof_index,
json_name: self.json_name.clone(),
options: self.options.as_ref().map(Options::to_prost),
proto3_optional: self.proto3_optional,
}
}
}
impl OneofDescriptorProto {
pub(crate) fn from_prost(file: prost_types::OneofDescriptorProto) -> OneofDescriptorProto {
OneofDescriptorProto {
name: file.name,
options: file.options.map(Options::from_prost),
}
}
pub(crate) fn to_prost(&self) -> prost_types::OneofDescriptorProto {
prost_types::OneofDescriptorProto {
name: self.name.clone(),
options: self.options.as_ref().map(Options::to_prost),
}
}
}
impl descriptor_proto::ExtensionRange {
pub(crate) fn from_prost(
file: prost_types::descriptor_proto::ExtensionRange,
) -> descriptor_proto::ExtensionRange {
descriptor_proto::ExtensionRange {
start: file.start,
end: file.end,
options: file.options.map(Options::from_prost),
}
}
pub(crate) fn to_prost(&self) -> prost_types::descriptor_proto::ExtensionRange {
prost_types::descriptor_proto::ExtensionRange {
start: self.start,
end: self.end,
options: self.options.as_ref().map(Options::to_prost),
}
}
}
impl EnumDescriptorProto {
pub(crate) fn from_prost(file: prost_types::EnumDescriptorProto) -> EnumDescriptorProto {
EnumDescriptorProto {
name: file.name,
value: file
.value
.into_iter()
.map(EnumValueDescriptorProto::from_prost)
.collect(),
options: file.options.map(Options::from_prost),
reserved_range: file.reserved_range,
reserved_name: file.reserved_name,
}
}
pub(crate) fn to_prost(&self) -> prost_types::EnumDescriptorProto {
prost_types::EnumDescriptorProto {
name: self.name.clone(),
value: self
.value
.iter()
.map(EnumValueDescriptorProto::to_prost)
.collect(),
options: self.options.as_ref().map(Options::to_prost),
reserved_range: self.reserved_range.clone(),
reserved_name: self.reserved_name.clone(),
}
}
}
impl EnumValueDescriptorProto {
pub(crate) fn from_prost(
file: prost_types::EnumValueDescriptorProto,
) -> EnumValueDescriptorProto {
EnumValueDescriptorProto {
name: file.name,
number: file.number,
options: file.options.map(Options::from_prost),
}
}
pub(crate) fn to_prost(&self) -> prost_types::EnumValueDescriptorProto {
prost_types::EnumValueDescriptorProto {
name: self.name.clone(),
number: self.number,
options: self.options.as_ref().map(Options::to_prost),
}
}
}
impl ServiceDescriptorProto {
pub(crate) fn from_prost(file: prost_types::ServiceDescriptorProto) -> ServiceDescriptorProto {
ServiceDescriptorProto {
name: file.name,
method: file
.method
.into_iter()
.map(MethodDescriptorProto::from_prost)
.collect(),
options: file.options.map(Options::from_prost),
}
}
pub(crate) fn to_prost(&self) -> prost_types::ServiceDescriptorProto {
prost_types::ServiceDescriptorProto {
name: self.name.clone(),
method: self
.method
.iter()
.map(MethodDescriptorProto::to_prost)
.collect(),
options: self.options.as_ref().map(Options::to_prost),
}
}
}
impl MethodDescriptorProto {
pub(crate) fn from_prost(file: prost_types::MethodDescriptorProto) -> MethodDescriptorProto {
MethodDescriptorProto {
name: file.name,
input_type: file.input_type,
output_type: file.output_type,
options: file.options.map(Options::from_prost),
client_streaming: file.client_streaming,
server_streaming: file.server_streaming,
}
}
pub(crate) fn to_prost(&self) -> prost_types::MethodDescriptorProto {
prost_types::MethodDescriptorProto {
name: self.name.clone(),
input_type: self.input_type.clone(),
output_type: self.output_type.clone(),
options: self.options.as_ref().map(Options::to_prost),
client_streaming: self.client_streaming,
server_streaming: self.server_streaming,
}
}
}
impl<T> Options<T>
where
T: Message + Clone,
{
pub(crate) fn from_prost(options: T) -> Self {
Options {
encoded: options.encode_to_vec(),
value: options,
}
}
fn to_prost(&self) -> T {
self.value.clone()
}
}
impl<T> fmt::Debug for Options<T>
where
T: fmt::Debug,
{
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.value.fmt(f)
}
}
impl<T> Message for Options<T>
where
T: Message + Default,
{
fn encode_raw(&self, buf: &mut impl BufMut)
where
Self: Sized,
{
buf.put(self.encoded.as_slice());
}
fn merge_field(
&mut self,
tag: u32,
wire_type: WireType,
buf: &mut impl Buf,
ctx: DecodeContext,
) -> Result<(), DecodeError>
where
Self: Sized,
{
struct CopyBufAdapter<'a, B> {
dest: &'a mut Vec<u8>,
src: &'a mut B,
}
impl<B> Buf for CopyBufAdapter<'_, B>
where
B: Buf,
{
fn advance(&mut self, cnt: usize) {
self.dest.put((&mut self.src).take(cnt));
}
fn chunk(&self) -> &[u8] {
self.src.chunk()
}
fn remaining(&self) -> usize {
self.src.remaining()
}
}
encode_key(tag, wire_type, &mut self.encoded);
let start = self.encoded.len();
skip_field(
wire_type,
tag,
&mut CopyBufAdapter {
dest: &mut self.encoded,
src: buf,
},
ctx.clone(),
)?;
self.value
.merge_field(tag, wire_type, &mut &self.encoded[start..], ctx)?;
Ok(())
}
fn encoded_len(&self) -> usize {
self.encoded.len()
}
fn clear(&mut self) {
self.encoded.clear();
self.value.clear();
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/mod.rs | prost-reflect/src/descriptor/mod.rs | mod api;
mod build;
mod error;
mod global;
mod tag;
#[cfg(test)]
mod tests;
pub(crate) mod types;
pub use self::error::DescriptorError;
use self::types::{DescriptorProto, EnumDescriptorProto};
use std::{
collections::{BTreeMap, HashMap, HashSet},
convert::TryInto,
fmt,
ops::Range,
sync::Arc,
};
use crate::{descriptor::types::FileDescriptorProto, Value};
pub(crate) const MAP_ENTRY_KEY_NUMBER: u32 = 1;
pub(crate) const MAP_ENTRY_VALUE_NUMBER: u32 = 2;
pub(crate) const RESERVED_MESSAGE_FIELD_NUMBERS: Range<i32> = 19_000..20_000;
pub(crate) const VALID_MESSAGE_FIELD_NUMBERS: Range<i32> = 1..536_870_912;
/// Cardinality determines whether a field is optional, required, or repeated.
#[derive(Debug, Clone, Copy, PartialEq, Eq, PartialOrd, Ord, Hash)]
pub enum Cardinality {
/// The field appears zero or one times.
Optional,
/// The field appears exactly one time. This cardinality is invalid with Proto3.
Required,
/// The field appears zero or more times.
Repeated,
}
/// The syntax of a proto file.
#[derive(Copy, Clone, PartialEq, Eq, Hash)]
pub enum Syntax {
/// The `proto2` syntax.
Proto2,
/// The `proto3` syntax.
Proto3,
}
/// The type of a protobuf message field.
#[derive(Clone, PartialEq, Eq)]
pub enum Kind {
/// The protobuf `double` type.
Double,
/// The protobuf `float` type.
Float,
/// The protobuf `int32` type.
Int32,
/// The protobuf `int64` type.
Int64,
/// The protobuf `uint32` type.
Uint32,
/// The protobuf `uint64` type.
Uint64,
/// The protobuf `sint32` type.
Sint32,
/// The protobuf `sint64` type.
Sint64,
/// The protobuf `fixed32` type.
Fixed32,
/// The protobuf `fixed64` type.
Fixed64,
/// The protobuf `sfixed32` type.
Sfixed32,
/// The protobuf `sfixed64` type.
Sfixed64,
/// The protobuf `bool` type.
Bool,
/// The protobuf `string` type.
String,
/// The protobuf `bytes` type.
Bytes,
/// A protobuf message type.
Message(MessageDescriptor),
/// A protobuf enum type.
Enum(EnumDescriptor),
}
#[derive(Copy, Clone)]
enum KindIndex {
Double,
Float,
Int32,
Int64,
Uint32,
Uint64,
Sint32,
Sint64,
Fixed32,
Fixed64,
Sfixed32,
Sfixed64,
Bool,
String,
Bytes,
Message(MessageIndex),
Enum(EnumIndex),
Group(MessageIndex),
}
type DescriptorIndex = u32;
type FileIndex = DescriptorIndex;
type ServiceIndex = DescriptorIndex;
type MethodIndex = DescriptorIndex;
type MessageIndex = DescriptorIndex;
type FieldIndex = DescriptorIndex;
type OneofIndex = DescriptorIndex;
type ExtensionIndex = DescriptorIndex;
type EnumIndex = DescriptorIndex;
type EnumValueIndex = DescriptorIndex;
/// A `DescriptorPool` is a collection of related descriptors. Typically it will be created from
/// a [`FileDescriptorSet`][prost_types::FileDescriptorSet] output by the protobuf compiler
/// (see [`DescriptorPool::from_file_descriptor_set`]) but it may also be built up by adding files individually.
///
/// Methods like [`MessageDescriptor::extensions`] will be scoped to just the files contained within the parent
/// `DescriptorPool`.
///
/// This type uses reference counting internally so it is cheap to clone. Modifying an instance of a
/// pool will not update any existing clones of the instance.
#[derive(Clone, Default)]
pub struct DescriptorPool {
inner: Arc<DescriptorPoolInner>,
}
#[derive(Clone, Default)]
struct DescriptorPoolInner {
names: HashMap<Box<str>, Definition>,
file_names: HashMap<Box<str>, FileIndex>,
files: Vec<FileDescriptorInner>,
messages: Vec<MessageDescriptorInner>,
enums: Vec<EnumDescriptorInner>,
extensions: Vec<ExtensionDescriptorInner>,
services: Vec<ServiceDescriptorInner>,
}
#[derive(Clone)]
struct Identity {
file: FileIndex,
path: Box<[i32]>,
full_name: Box<str>,
name_index: usize,
}
#[derive(Clone, Debug)]
struct Definition {
file: FileIndex,
path: Box<[i32]>,
kind: DefinitionKind,
}
#[derive(Copy, Clone, Debug)]
enum DefinitionKind {
Package,
Message(MessageIndex),
Field(MessageIndex),
Oneof(MessageIndex),
Service(ServiceIndex),
Method(ServiceIndex),
Enum(EnumIndex),
EnumValue(EnumIndex),
Extension(ExtensionIndex),
}
/// A single source file containing protobuf messages and services.
#[derive(Clone, PartialEq, Eq)]
pub struct FileDescriptor {
pool: DescriptorPool,
index: FileIndex,
}
#[derive(Clone)]
struct FileDescriptorInner {
syntax: Syntax,
raw: FileDescriptorProto,
prost: prost_types::FileDescriptorProto,
dependencies: Vec<FileIndex>,
transitive_dependencies: HashSet<FileIndex>,
}
/// A protobuf message definition.
#[derive(Clone, PartialEq, Eq)]
pub struct MessageDescriptor {
pool: DescriptorPool,
index: MessageIndex,
}
#[derive(Clone)]
struct MessageDescriptorInner {
id: Identity,
parent: Option<MessageIndex>,
extensions: Vec<ExtensionIndex>,
fields: Vec<FieldDescriptorInner>,
field_numbers: BTreeMap<u32, FieldIndex>,
field_names: HashMap<Box<str>, FieldIndex>,
field_json_names: HashMap<Box<str>, FieldIndex>,
oneofs: Vec<OneofDescriptorInner>,
}
/// A oneof field in a protobuf message.
#[derive(Clone, PartialEq, Eq)]
pub struct OneofDescriptor {
message: MessageDescriptor,
index: OneofIndex,
}
#[derive(Clone)]
struct OneofDescriptorInner {
id: Identity,
fields: Vec<FieldIndex>,
}
/// A protobuf message definition.
#[derive(Clone, PartialEq, Eq)]
pub struct FieldDescriptor {
message: MessageDescriptor,
index: FieldIndex,
}
#[derive(Clone)]
struct FieldDescriptorInner {
id: Identity,
number: u32,
json_name: Box<str>,
kind: KindIndex,
oneof: Option<OneofIndex>,
is_packed: bool,
supports_presence: bool,
cardinality: Cardinality,
default: Option<Value>,
}
/// A protobuf extension field definition.
#[derive(Clone, PartialEq, Eq)]
pub struct ExtensionDescriptor {
pool: DescriptorPool,
index: ExtensionIndex,
}
#[derive(Clone)]
pub struct ExtensionDescriptorInner {
id: Identity,
parent: Option<MessageIndex>,
number: u32,
json_name: Box<str>,
extendee: MessageIndex,
kind: KindIndex,
is_packed: bool,
cardinality: Cardinality,
default: Option<Value>,
}
/// A protobuf enum type.
#[derive(Clone, PartialEq, Eq)]
pub struct EnumDescriptor {
pool: DescriptorPool,
index: EnumIndex,
}
#[derive(Clone)]
struct EnumDescriptorInner {
id: Identity,
parent: Option<MessageIndex>,
values: Vec<EnumValueDescriptorInner>,
value_numbers: Vec<(i32, EnumValueIndex)>,
value_names: HashMap<Box<str>, EnumValueIndex>,
allow_alias: bool,
}
/// A value in a protobuf enum type.
#[derive(Clone, PartialEq, Eq)]
pub struct EnumValueDescriptor {
parent: EnumDescriptor,
index: EnumValueIndex,
}
#[derive(Clone)]
struct EnumValueDescriptorInner {
id: Identity,
number: i32,
}
/// A protobuf service definition.
#[derive(Clone, PartialEq, Eq)]
pub struct ServiceDescriptor {
pool: DescriptorPool,
index: ServiceIndex,
}
#[derive(Clone)]
struct ServiceDescriptorInner {
id: Identity,
methods: Vec<MethodDescriptorInner>,
}
/// A method definition for a [`ServiceDescriptor`].
#[derive(Clone, PartialEq, Eq)]
pub struct MethodDescriptor {
service: ServiceDescriptor,
index: MethodIndex,
}
#[derive(Clone)]
struct MethodDescriptorInner {
id: Identity,
input: MessageIndex,
output: MessageIndex,
}
impl Identity {
fn new(file: FileIndex, path: &[i32], full_name: &str, name: &str) -> Identity {
debug_assert!(full_name.ends_with(name));
let name_index = full_name.len() - name.len();
debug_assert!(name_index == 0 || full_name.as_bytes()[name_index - 1] == b'.');
Identity {
file,
path: path.into(),
full_name: full_name.into(),
name_index,
}
}
fn full_name(&self) -> &str {
&self.full_name
}
fn name(&self) -> &str {
&self.full_name[self.name_index..]
}
}
impl KindIndex {
fn is_packable(&self) -> bool {
match self {
KindIndex::Double
| KindIndex::Float
| KindIndex::Int32
| KindIndex::Int64
| KindIndex::Uint32
| KindIndex::Uint64
| KindIndex::Sint32
| KindIndex::Sint64
| KindIndex::Fixed32
| KindIndex::Fixed64
| KindIndex::Sfixed32
| KindIndex::Sfixed64
| KindIndex::Bool
| KindIndex::Enum(_) => true,
KindIndex::String | KindIndex::Bytes | KindIndex::Message(_) | KindIndex::Group(_) => {
false
}
}
}
fn is_message(&self) -> bool {
matches!(self, KindIndex::Message(_) | KindIndex::Group(_))
}
}
impl fmt::Debug for KindIndex {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
KindIndex::Double => write!(f, "double"),
KindIndex::Float => write!(f, "float"),
KindIndex::Int32 => write!(f, "int32"),
KindIndex::Int64 => write!(f, "int64"),
KindIndex::Uint32 => write!(f, "uint32"),
KindIndex::Uint64 => write!(f, "uint64"),
KindIndex::Sint32 => write!(f, "sint32"),
KindIndex::Sint64 => write!(f, "sint64"),
KindIndex::Fixed32 => write!(f, "fixed32"),
KindIndex::Fixed64 => write!(f, "fixed64"),
KindIndex::Sfixed32 => write!(f, "sfixed32"),
KindIndex::Sfixed64 => write!(f, "sfixed64"),
KindIndex::Bool => write!(f, "bool"),
KindIndex::String => write!(f, "string"),
KindIndex::Bytes => write!(f, "bytes"),
KindIndex::Message(_) | KindIndex::Group(_) => write!(f, "message"),
KindIndex::Enum(_) => write!(f, "enum"),
}
}
}
impl DefinitionKind {
fn is_parent(&self) -> bool {
match self {
DefinitionKind::Package => true,
DefinitionKind::Message(_) => true,
DefinitionKind::Field(_) => false,
DefinitionKind::Oneof(_) => false,
DefinitionKind::Service(_) => true,
DefinitionKind::Method(_) => false,
DefinitionKind::Enum(_) => true,
DefinitionKind::EnumValue(_) => false,
DefinitionKind::Extension(_) => false,
}
}
}
impl DescriptorPoolInner {
fn get_by_name(&self, name: &str) -> Option<&Definition> {
let name = name.strip_prefix('.').unwrap_or(name);
self.names.get(name)
}
}
fn to_index(i: usize) -> DescriptorIndex {
i.try_into().expect("index too large")
}
fn find_message_proto<'a>(file: &'a FileDescriptorProto, path: &[i32]) -> &'a DescriptorProto {
debug_assert_ne!(path.len(), 0);
debug_assert_eq!(path.len() % 2, 0);
let mut message: Option<&'a types::DescriptorProto> = None;
for part in path.chunks(2) {
match part[0] {
tag::file::MESSAGE_TYPE => message = Some(&file.message_type[part[1] as usize]),
tag::message::NESTED_TYPE => {
message = Some(&message.unwrap().nested_type[part[1] as usize])
}
_ => panic!("invalid message path"),
}
}
message.unwrap()
}
fn find_enum_proto<'a>(file: &'a FileDescriptorProto, path: &[i32]) -> &'a EnumDescriptorProto {
debug_assert_ne!(path.len(), 0);
debug_assert_eq!(path.len() % 2, 0);
if path.len() == 2 {
debug_assert_eq!(path[0], tag::file::ENUM_TYPE);
&file.enum_type[path[1] as usize]
} else {
let message = find_message_proto(file, &path[..path.len() - 2]);
debug_assert_eq!(path[path.len() - 2], tag::message::ENUM_TYPE);
&message.enum_type[path[path.len() - 1] as usize]
}
}
#[test]
fn assert_descriptor_send_sync() {
fn test_send_sync<T: Send + Sync>() {}
test_send_sync::<DescriptorPool>();
test_send_sync::<Kind>();
test_send_sync::<DescriptorError>();
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/tag.rs | prost-reflect/src/descriptor/tag.rs | #![allow(dead_code)]
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
pub(crate) mod file_descriptor_set {
pub(crate) const FILE: i32 = 1;
}
pub(crate) mod file {
pub(crate) const PACKAGE: i32 = 2;
pub(crate) const DEPENDENCY: i32 = 3;
pub(crate) const PUBLIC_DEPENDENCY: i32 = 10;
pub(crate) const WEAK_DEPENDENCY: i32 = 11;
pub(crate) const MESSAGE_TYPE: i32 = 4;
pub(crate) const ENUM_TYPE: i32 = 5;
pub(crate) const SERVICE: i32 = 6;
pub(crate) const EXTENSION: i32 = 7;
pub(crate) const OPTIONS: i32 = 8;
pub(crate) const SYNTAX: i32 = 12;
pub(crate) mod options {
pub(crate) const JAVA_PACKAGE: i32 = 1;
pub(crate) const JAVA_OUTER_CLASSNAME: i32 = 8;
pub(crate) const JAVA_MULTIPLE_FILES: i32 = 10;
pub(crate) const JAVA_GENERATE_EQUALS_AND_HASH: i32 = 20;
pub(crate) const JAVA_STRING_CHECK_UTF8: i32 = 27;
pub(crate) const OPTIMIZE_FOR: i32 = 9;
pub(crate) const GO_PACKAGE: i32 = 11;
pub(crate) const CC_GENERIC_SERVICES: i32 = 16;
pub(crate) const JAVA_GENERIC_SERVICES: i32 = 17;
pub(crate) const PY_GENERIC_SERVICES: i32 = 18;
pub(crate) const PHP_GENERIC_SERVICES: i32 = 42;
pub(crate) const DEPRECATED: i32 = 23;
pub(crate) const CC_ENABLE_ARENAS: i32 = 31;
pub(crate) const OBJC_CLASS_PREFIX: i32 = 36;
pub(crate) const CSHARP_NAMESPACE: i32 = 37;
pub(crate) const SWIFT_PREFIX: i32 = 39;
pub(crate) const PHP_CLASS_PREFIX: i32 = 40;
pub(crate) const PHP_NAMESPACE: i32 = 41;
pub(crate) const PHP_METADATA_NAMESPACE: i32 = 44;
pub(crate) const RUBY_PACKAGE: i32 = 45;
pub(crate) const FILE_UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod message {
pub(crate) const NAME: i32 = 1;
pub(crate) const FIELD: i32 = 2;
pub(crate) const EXTENSION: i32 = 6;
pub(crate) const NESTED_TYPE: i32 = 3;
pub(crate) const ENUM_TYPE: i32 = 4;
pub(crate) const EXTENSION_RANGE: i32 = 5;
pub(crate) const OPTIONS: i32 = 7;
pub(crate) const ONEOF_DECL: i32 = 8;
pub(crate) const RESERVED_RANGE: i32 = 9;
pub(crate) const RESERVED_NAME: i32 = 10;
pub(crate) mod extension_range {
pub(crate) const START: i32 = 1;
pub(crate) const END: i32 = 2;
pub(crate) const OPTIONS: i32 = 3;
}
pub(crate) mod reserved_range {
pub(crate) const START: i32 = 1;
pub(crate) const END: i32 = 2;
}
pub(crate) mod options {
pub(crate) const MESSAGE_SET_WIRE_FORMAT: i32 = 1;
pub(crate) const NO_STANDARD_DESCRIPTOR_ACCESSOR: i32 = 2;
pub(crate) const DEPRECATED: i32 = 3;
pub(crate) const MAP_ENTRY: i32 = 7;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod field {
pub(crate) const NAME: i32 = 1;
pub(crate) const EXTENDEE: i32 = 2;
pub(crate) const NUMBER: i32 = 3;
pub(crate) const LABEL: i32 = 4;
pub(crate) const TYPE: i32 = 5;
pub(crate) const TYPE_NAME: i32 = 6;
pub(crate) const DEFAULT_VALUE: i32 = 7;
pub(crate) const JSON_NAME: i32 = 10;
pub(crate) const OPTIONS: i32 = 8;
pub(crate) mod options {
pub(crate) const CTYPE: i32 = 1;
pub(crate) const PACKED: i32 = 2;
pub(crate) const JSTYPE: i32 = 6;
pub(crate) const LAZY: i32 = 5;
pub(crate) const DEPRECATED: i32 = 3;
pub(crate) const WEAK: i32 = 10;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod oneof {
pub(crate) const NAME: i32 = 1;
pub(crate) const OPTIONS: i32 = 2;
}
pub(crate) mod enum_ {
pub(crate) const NAME: i32 = 1;
pub(crate) const VALUE: i32 = 2;
pub(crate) const OPTIONS: i32 = 3;
pub(crate) const RESERVED_RANGE: i32 = 4;
pub(crate) const RESERVED_NAME: i32 = 5;
pub(crate) mod reserved_range {
pub(crate) const START: i32 = 1;
pub(crate) const END: i32 = 2;
}
pub(crate) mod options {
pub(crate) const ALLOW_ALIAS: i32 = 2;
pub(crate) const DEPRECATED: i32 = 3;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod enum_value {
pub(crate) const NAME: i32 = 1;
pub(crate) const NUMBER: i32 = 2;
pub(crate) const OPTIONS: i32 = 3;
pub(crate) mod options {
pub(crate) const DEPRECATED: i32 = 1;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod service {
pub(crate) const NAME: i32 = 1;
pub(crate) const METHOD: i32 = 2;
pub(crate) const OPTIONS: i32 = 3;
pub(crate) mod options {
pub(crate) const DEPRECATED: i32 = 33;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
pub(crate) mod method {
pub(crate) const NAME: i32 = 1;
pub(crate) const INPUT_TYPE: i32 = 2;
pub(crate) const OUTPUT_TYPE: i32 = 3;
pub(crate) const OPTIONS: i32 = 4;
pub(crate) const CLIENT_STREAMING: i32 = 5;
pub(crate) const SERVER_STREAMING: i32 = 6;
pub(crate) mod options {
pub(crate) const DEPRECATED: i32 = 33;
pub(crate) const IDEMPOTENCY_LEVEL: i32 = 34;
pub(crate) const UNINTERPRETED_OPTION: i32 = 999;
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/build/names.rs | prost-reflect/src/descriptor/build/names.rs | use std::collections::{hash_map, BTreeMap, HashMap, HashSet};
use crate::{
descriptor::{
build::{
join_path,
options::option_to_bool,
visit::{visit, Visitor},
DescriptorPoolOffsets,
},
error::{DescriptorError, DescriptorErrorKind, Label},
tag, to_index,
types::{
DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto,
FileDescriptorProto, MethodDescriptorProto, OneofDescriptorProto,
ServiceDescriptorProto,
},
Definition, DefinitionKind, DescriptorPoolInner, EnumDescriptorInner, EnumIndex,
EnumValueDescriptorInner, EnumValueIndex, ExtensionIndex, FieldIndex, FileDescriptorInner,
FileIndex, Identity, MessageDescriptorInner, MessageIndex, MethodIndex,
OneofDescriptorInner, OneofIndex, ServiceIndex,
},
Syntax,
};
impl DescriptorPoolInner {
pub(super) fn collect_names(
&mut self,
offsets: DescriptorPoolOffsets,
files: &[FileDescriptorProto],
) -> Result<(), DescriptorError> {
let mut visitor = NameVisitor {
pool: self,
errors: vec![],
};
visit(offsets, files, &mut visitor);
if visitor.errors.is_empty() {
Ok(())
} else {
Err(DescriptorError::new(visitor.errors))
}
}
}
struct NameVisitor<'a> {
pool: &'a mut DescriptorPoolInner,
errors: Vec<DescriptorErrorKind>,
}
impl Visitor for NameVisitor<'_> {
fn visit_file(&mut self, path: &[i32], index: FileIndex, file: &FileDescriptorProto) {
debug_assert_eq!(to_index(self.pool.files.len()), index);
let syntax = match file.syntax.as_deref() {
None | Some("proto2") => Syntax::Proto2,
Some("proto3") => Syntax::Proto3,
Some(syntax) => {
self.errors.push(DescriptorErrorKind::UnknownSyntax {
syntax: syntax.to_owned(),
found: Label::new(
&self.pool.files,
"found here",
index,
join_path(path, &[tag::file::SYNTAX]),
),
});
return;
}
};
if self
.pool
.file_names
.insert(file.name().into(), index)
.is_some()
{
self.errors.push(DescriptorErrorKind::DuplicateFileName {
name: file.name().to_owned(),
});
}
self.pool.files.push(FileDescriptorInner {
syntax,
raw: file.clone(),
prost: Default::default(), // the prost descriptor is initialized from the internal descriptor once resolution is complete, to avoid needing to duplicate all modifications
dependencies: Vec::with_capacity(file.dependency.len()),
transitive_dependencies: HashSet::default(),
});
if !file.package().is_empty() {
for (i, _) in file.package().match_indices('.') {
self.add_name(
index,
&file.package()[..i],
path,
&[tag::file::PACKAGE],
DefinitionKind::Package,
);
}
self.add_name(
index,
file.package(),
path,
&[tag::file::PACKAGE],
DefinitionKind::Package,
);
}
}
fn visit_message(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
parent: Option<MessageIndex>,
index: MessageIndex,
message: &DescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::message::NAME],
DefinitionKind::Message(index),
);
debug_assert_eq!(to_index(self.pool.messages.len()), index);
self.pool.messages.push(MessageDescriptorInner {
id: Identity::new(file, path, full_name, message.name()),
fields: Vec::with_capacity(message.field.len()),
field_numbers: BTreeMap::new(),
field_names: HashMap::with_capacity(message.field.len()),
field_json_names: HashMap::with_capacity(message.field.len()),
oneofs: Vec::with_capacity(message.oneof_decl.len()),
extensions: Vec::new(),
parent,
});
if self.pool.files[file as usize].syntax != Syntax::Proto2 {
self.check_message_field_camel_case_names(file, path, message);
}
}
fn visit_field(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
message: MessageIndex,
_: FieldIndex,
_: &FieldDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::field::NAME],
DefinitionKind::Field(message),
);
}
fn visit_oneof(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
message: MessageIndex,
index: OneofIndex,
oneof: &OneofDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::oneof::NAME],
DefinitionKind::Oneof(message),
);
debug_assert_eq!(
to_index(self.pool.messages[message as usize].oneofs.len()),
index
);
self.pool.messages[message as usize]
.oneofs
.push(OneofDescriptorInner {
id: Identity::new(file, path, full_name, oneof.name()),
fields: Vec::new(),
});
}
fn visit_service(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
index: ServiceIndex,
_: &ServiceDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::service::NAME],
DefinitionKind::Service(index),
);
}
fn visit_method(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
service: ServiceIndex,
_: MethodIndex,
_: &MethodDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::service::NAME],
DefinitionKind::Method(service),
);
}
fn visit_enum(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
parent: Option<MessageIndex>,
index: EnumIndex,
enum_: &EnumDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::enum_::NAME],
DefinitionKind::Enum(index),
);
if enum_.value.is_empty() {
self.errors.push(DescriptorErrorKind::EmptyEnum {
found: Label::new(&self.pool.files, "enum defined here", file, path.into()),
});
} else if self.pool.files[file as usize].syntax != Syntax::Proto2
&& enum_.value[0].number() != 0
{
self.errors
.push(DescriptorErrorKind::InvalidProto3EnumDefault {
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::enum_::VALUE, 0, tag::enum_value::NUMBER]),
),
});
}
let allow_alias = enum_.options.as_ref().is_some_and(|o| {
o.value.allow_alias()
|| o.value.uninterpreted_option.iter().any(|u| {
u.name.len() == 1
&& u.name[0].name_part == "allow_alias"
&& !u.name[0].is_extension
&& option_to_bool(u).unwrap_or(false)
})
});
debug_assert_eq!(to_index(self.pool.enums.len()), index);
self.pool.enums.push(EnumDescriptorInner {
id: Identity::new(file, path, full_name, enum_.name()),
parent,
values: Vec::with_capacity(enum_.value.len()),
value_numbers: Vec::with_capacity(enum_.value.len()),
value_names: HashMap::with_capacity(enum_.value.len()),
allow_alias,
});
}
fn visit_enum_value(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
enum_: EnumIndex,
index: EnumValueIndex,
value: &EnumValueDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::enum_value::NAME],
DefinitionKind::EnumValue(enum_),
);
debug_assert_eq!(
to_index(self.pool.enums[enum_ as usize].values.len()),
index
);
self.pool.enums[enum_ as usize]
.values
.push(EnumValueDescriptorInner {
id: Identity::new(file, path, full_name, value.name()),
number: value.number(),
});
}
fn visit_extension(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: Option<MessageIndex>,
index: ExtensionIndex,
_: &FieldDescriptorProto,
) {
self.add_name(
file,
full_name,
path,
&[tag::field::NAME],
DefinitionKind::Extension(index),
);
}
}
impl NameVisitor<'_> {
fn add_name(
&mut self,
file: FileIndex,
name: &str,
path1: &[i32],
path2: &[i32],
kind: DefinitionKind,
) {
let path = join_path(path1, path2);
match self.pool.names.entry(name.into()) {
hash_map::Entry::Vacant(entry) => {
entry.insert(Definition { file, kind, path });
}
hash_map::Entry::Occupied(_) => {
let entry = &self.pool.names[name];
if matches!(kind, DefinitionKind::Package)
&& matches!(entry.kind, DefinitionKind::Package)
{
return;
}
self.errors.push(DescriptorErrorKind::DuplicateName {
name: name.to_owned(),
first: Label::new(
&self.pool.files,
"first defined here",
entry.file,
entry.path.clone(),
),
second: Label::new(&self.pool.files, "defined again here", file, path),
})
}
}
}
fn check_message_field_camel_case_names(
&mut self,
file: FileIndex,
path: &[i32],
message: &DescriptorProto,
) {
let mut names: HashMap<String, (&str, i32)> = HashMap::new();
for (index, field) in message.field.iter().enumerate() {
let name = field.name();
let index = index as i32;
match names.entry(to_lower_without_underscores(name)) {
hash_map::Entry::Occupied(entry) => {
self.errors
.push(DescriptorErrorKind::DuplicateFieldCamelCaseName {
first_name: entry.get().0.to_owned(),
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
path,
&[tag::message::FIELD, entry.get().1, tag::field::NAME],
),
),
second_name: name.to_owned(),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::message::FIELD, index, tag::field::NAME]),
),
})
}
hash_map::Entry::Vacant(entry) => {
entry.insert((name, index));
}
}
}
}
}
fn to_lower_without_underscores(name: &str) -> String {
name.chars()
.filter_map(|ch| match ch {
'_' => None,
_ => Some(ch.to_ascii_lowercase()),
})
.collect()
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/build/visit.rs | prost-reflect/src/descriptor/build/visit.rs | use crate::descriptor::{
build::DescriptorPoolOffsets,
tag, to_index,
types::{
DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto, FieldDescriptorProto,
FileDescriptorProto, MethodDescriptorProto, OneofDescriptorProto, ServiceDescriptorProto,
},
EnumIndex, EnumValueIndex, ExtensionIndex, FieldIndex, FileIndex, MessageIndex, MethodIndex,
OneofIndex, ServiceIndex,
};
pub(super) trait Visitor {
fn visit_file(&mut self, _path: &[i32], _index: FileIndex, _file: &FileDescriptorProto) {}
fn visit_message(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_parent_message: Option<MessageIndex>,
_index: MessageIndex,
_message: &DescriptorProto,
) {
}
fn visit_field(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_message: MessageIndex,
_index: FieldIndex,
_field: &FieldDescriptorProto,
) {
}
fn visit_oneof(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_message: MessageIndex,
_index: OneofIndex,
_oneof: &OneofDescriptorProto,
) {
}
fn visit_service(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_index: ServiceIndex,
_service: &ServiceDescriptorProto,
) {
}
fn visit_method(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_service: ServiceIndex,
_index: MethodIndex,
_method: &MethodDescriptorProto,
) {
}
fn visit_enum(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_parent_message: Option<MessageIndex>,
_index: EnumIndex,
_enum: &EnumDescriptorProto,
) {
}
fn visit_enum_value(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_enum_: EnumIndex,
_index: EnumValueIndex,
_value: &EnumValueDescriptorProto,
) {
}
fn visit_extension(
&mut self,
_path: &[i32],
_full_name: &str,
_file: FileIndex,
_parent_message: Option<MessageIndex>,
_index: ExtensionIndex,
_extension: &FieldDescriptorProto,
) {
}
}
pub(super) fn visit(
offsets: DescriptorPoolOffsets,
files: &[FileDescriptorProto],
visitor: &mut dyn Visitor,
) {
let mut context = Context {
path: Vec::new(),
scope: String::new(),
offsets,
};
for file in files {
context.visit_file(file, visitor);
}
}
struct Context {
path: Vec<i32>,
scope: String,
offsets: DescriptorPoolOffsets,
}
impl Context {
fn visit_file(&mut self, file: &FileDescriptorProto, visitor: &mut dyn Visitor) {
if !file.package().is_empty() {
self.push_scope(file.package());
}
let index = post_inc(&mut self.offsets.file);
visitor.visit_file(&self.path, index, file);
self.push_path(tag::file::MESSAGE_TYPE);
for (i, message) in file.message_type.iter().enumerate() {
self.push_path(i as i32);
self.visit_message(message, visitor, index, None);
self.pop_path();
}
self.pop_path();
self.push_path(tag::file::ENUM_TYPE);
for (i, enum_) in file.enum_type.iter().enumerate() {
self.push_path(i as i32);
self.visit_enum(enum_, visitor, index, None);
self.pop_path();
}
self.pop_path();
self.push_path(tag::file::SERVICE);
for (i, service) in file.service.iter().enumerate() {
self.push_path(i as i32);
self.visit_service(service, visitor, index);
self.pop_path();
}
self.pop_path();
self.push_path(tag::file::EXTENSION);
for (i, extension) in file.extension.iter().enumerate() {
self.push_path(i as i32);
self.visit_extension(extension, visitor, index, None);
self.pop_path();
}
self.pop_path();
if !file.package().is_empty() {
self.pop_scope(file.package());
}
}
fn visit_message(
&mut self,
message: &DescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
parent_message: Option<MessageIndex>,
) {
self.push_scope(message.name());
let index = post_inc(&mut self.offsets.message);
visitor.visit_message(
&self.path,
&self.scope,
file,
parent_message,
index,
message,
);
self.push_path(tag::message::ONEOF_DECL);
for (i, oneof) in message.oneof_decl.iter().enumerate() {
self.push_path(i as i32);
self.visit_oneof(oneof, visitor, file, index, to_index(i));
self.pop_path();
}
self.pop_path();
self.push_path(tag::message::FIELD);
for (i, field) in message.field.iter().enumerate() {
self.push_path(i as i32);
self.visit_field(field, visitor, file, index, to_index(i));
self.pop_path();
}
self.pop_path();
self.push_path(tag::message::NESTED_TYPE);
for (i, nested) in message.nested_type.iter().enumerate() {
self.push_path(i as i32);
self.visit_message(nested, visitor, file, Some(index));
self.pop_path();
}
self.pop_path();
self.push_path(tag::message::ENUM_TYPE);
for (i, enum_) in message.enum_type.iter().enumerate() {
self.push_path(i as i32);
self.visit_enum(enum_, visitor, file, Some(index));
self.pop_path();
}
self.pop_path();
self.push_path(tag::message::EXTENSION);
for (i, extension) in message.extension.iter().enumerate() {
self.push_path(i as i32);
self.visit_extension(extension, visitor, file, Some(index));
self.pop_path();
}
self.pop_path();
self.pop_scope(message.name());
}
fn visit_field(
&mut self,
field: &FieldDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
message: MessageIndex,
index: FieldIndex,
) {
self.push_scope(field.name());
visitor.visit_field(&self.path, &self.scope, file, message, index, field);
self.pop_scope(field.name());
}
fn visit_oneof(
&mut self,
oneof: &OneofDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
message: MessageIndex,
index: OneofIndex,
) {
self.push_scope(oneof.name());
visitor.visit_oneof(&self.path, &self.scope, file, message, index, oneof);
self.pop_scope(oneof.name());
}
fn visit_service(
&mut self,
service: &ServiceDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
) {
self.push_scope(service.name());
let index = post_inc(&mut self.offsets.service);
visitor.visit_service(&self.path, &self.scope, file, index, service);
self.push_path(tag::service::METHOD);
for (i, method) in service.method.iter().enumerate() {
self.push_path(i as i32);
self.visit_method(method, visitor, file, index, to_index(i));
self.pop_path();
}
self.pop_path();
self.pop_scope(service.name());
}
fn visit_method(
&mut self,
method: &MethodDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
service: ServiceIndex,
index: MethodIndex,
) {
self.push_scope(method.name());
visitor.visit_method(&self.path, &self.scope, file, service, index, method);
self.pop_scope(method.name());
}
fn visit_enum(
&mut self,
enum_: &EnumDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
parent_message: Option<MessageIndex>,
) {
self.push_scope(enum_.name());
let index = post_inc(&mut self.offsets.enum_);
visitor.visit_enum(&self.path, &self.scope, file, parent_message, index, enum_);
self.pop_scope(enum_.name());
self.push_path(tag::enum_::VALUE);
for (i, method) in enum_.value.iter().enumerate() {
self.push_path(i as i32);
self.visit_enum_value(method, visitor, file, index, to_index(i));
self.pop_path();
}
self.pop_path();
}
fn visit_enum_value(
&mut self,
value: &EnumValueDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
enum_: EnumIndex,
index: EnumValueIndex,
) {
self.push_scope(value.name());
visitor.visit_enum_value(&self.path, &self.scope, file, enum_, index, value);
self.pop_scope(value.name());
}
fn visit_extension(
&mut self,
extension: &FieldDescriptorProto,
visitor: &mut dyn Visitor,
file: FileIndex,
parent_message: Option<MessageIndex>,
) {
self.push_scope(extension.name());
let index = post_inc(&mut self.offsets.extension);
visitor.visit_extension(
&self.path,
&self.scope,
file,
parent_message,
index,
extension,
);
self.pop_scope(extension.name());
}
fn push_path(&mut self, path: i32) {
self.path.push(path);
}
fn pop_path(&mut self) {
self.path.pop().unwrap();
}
fn push_scope(&mut self, scope: &str) {
if !self.scope.is_empty() {
self.scope.push('.');
}
self.scope.push_str(scope);
}
fn pop_scope(&mut self, scope: &str) {
debug_assert!(self.scope.ends_with(scope));
self.scope
.truncate((self.scope.len() - scope.len()).saturating_sub(1));
}
}
fn post_inc(index: &mut u32) -> u32 {
let value = *index;
*index = value + 1;
value
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/build/options.rs | prost-reflect/src/descriptor/build/options.rs | use std::sync::Arc;
use prost::{bytes::Bytes, Message};
use crate::{
descriptor::{
build::{
join_path, resolve_name,
visit::{visit, Visitor},
DescriptorPoolOffsets, ResolveNameFilter,
},
error::{DescriptorErrorKind, Label},
tag,
types::{
uninterpreted_option, DescriptorProto, EnumDescriptorProto, EnumValueDescriptorProto,
FieldDescriptorProto, FileDescriptorProto, MethodDescriptorProto, OneofDescriptorProto,
Options, ServiceDescriptorProto, UninterpretedOption,
},
Definition, DefinitionKind, EnumIndex, EnumValueIndex, ExtensionIndex, FieldIndex,
FileIndex, MessageIndex, MethodIndex, OneofIndex, ServiceIndex, MAP_ENTRY_KEY_NUMBER,
MAP_ENTRY_VALUE_NUMBER,
},
dynamic::{fmt_string, FieldDescriptorLike},
Cardinality, DescriptorError, DescriptorPool, DynamicMessage, EnumDescriptor,
ExtensionDescriptor, MapKey, MessageDescriptor, ReflectMessage, Value,
};
impl DescriptorPool {
pub(super) fn resolve_options(
&mut self,
offsets: DescriptorPoolOffsets,
files: &[FileDescriptorProto],
) -> Result<(), DescriptorError> {
debug_assert_eq!(Arc::strong_count(&self.inner), 1);
let mut visitor = OptionsVisitor {
pool: self,
errors: Vec::new(),
options: Vec::new(),
locations: Vec::new(),
};
visit(offsets, files, &mut visitor);
if !visitor.errors.is_empty() {
return Err(DescriptorError::new(visitor.errors));
}
debug_assert_eq!(Arc::strong_count(&visitor.pool.inner), 1);
let inner = Arc::get_mut(&mut visitor.pool.inner).unwrap();
for (file, path, encoded) in visitor.options {
let file = &mut inner.files[file as usize].raw;
set_file_option(file, &path, &encoded);
}
for (file, from, to) in visitor.locations {
let file = &mut inner.files[file as usize].raw;
if let Some(source_code_info) = &mut file.source_code_info {
for location in &mut source_code_info.location {
if location.path.starts_with(&from) {
location.path.splice(..from.len(), to.iter().copied());
}
}
}
}
Ok(())
}
}
struct OptionsVisitor<'a> {
pool: &'a mut DescriptorPool,
errors: Vec<DescriptorErrorKind>,
options: Vec<(FileIndex, Box<[i32]>, Vec<u8>)>,
#[allow(clippy::type_complexity)]
locations: Vec<(FileIndex, Box<[i32]>, Box<[i32]>)>,
}
impl Visitor for OptionsVisitor<'_> {
fn visit_file(&mut self, path: &[i32], index: FileIndex, file: &FileDescriptorProto) {
if let Some(options) = &file.options {
let path = join_path(path, &[tag::file::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.FileOptions",
options,
&options.value.uninterpreted_option,
file.package(),
index,
&path,
);
self.options.push((index, path, encoded));
}
}
fn visit_message(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: Option<MessageIndex>,
_: MessageIndex,
message: &DescriptorProto,
) {
if let Some(options) = &message.options {
let path = join_path(path, &[tag::message::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.MessageOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
for (i, extension_range) in message.extension_range.iter().enumerate() {
let path = join_path(
path,
&[
tag::message::EXTENSION_RANGE,
i as i32,
tag::message::extension_range::OPTIONS,
],
);
if let Some(options) = &extension_range.options {
let encoded = self.resolve_options(
"google.protobuf.ExtensionRangeOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
}
fn visit_field(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: MessageIndex,
_: FieldIndex,
field: &FieldDescriptorProto,
) {
if let Some(options) = &field.options {
let path = join_path(path, &[tag::field::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.FieldOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_oneof(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: MessageIndex,
_: OneofIndex,
oneof: &OneofDescriptorProto,
) {
if let Some(options) = &oneof.options {
let path = join_path(path, &[tag::oneof::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.OneofOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_service(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: ServiceIndex,
service: &ServiceDescriptorProto,
) {
if let Some(options) = &service.options {
let path = join_path(path, &[tag::service::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.ServiceOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_method(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: ServiceIndex,
_: MethodIndex,
method: &MethodDescriptorProto,
) {
if let Some(options) = &method.options {
let path = join_path(path, &[tag::method::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.MethodOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_enum(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: Option<MessageIndex>,
_: EnumIndex,
enum_: &EnumDescriptorProto,
) {
if let Some(options) = &enum_.options {
let path = join_path(path, &[tag::enum_::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.EnumOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_enum_value(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: EnumIndex,
_: EnumValueIndex,
value: &EnumValueDescriptorProto,
) {
if let Some(options) = &value.options {
let path = join_path(path, &[tag::enum_value::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.EnumValueOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
fn visit_extension(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
_: Option<MessageIndex>,
_: ExtensionIndex,
extension: &FieldDescriptorProto,
) {
if let Some(options) = &extension.options {
let path = join_path(path, &[tag::field::OPTIONS]);
let encoded = self.resolve_options(
"google.protobuf.FieldOptions",
options,
&options.value.uninterpreted_option,
full_name,
file,
&path,
);
self.options.push((file, path, encoded));
}
}
}
impl OptionsVisitor<'_> {
fn resolve_options<T>(
&mut self,
desc_name: &str,
options: &Options<T>,
uninterpreted: &[UninterpretedOption],
scope: &str,
file: FileIndex,
path: &[i32],
) -> Vec<u8> {
let desc = self.pool.get_message_by_name(desc_name).unwrap_or_else(|| {
DescriptorPool::global()
.get_message_by_name(desc_name)
.unwrap()
});
let mut message = match DynamicMessage::decode(desc, options.encoded.as_slice()) {
Ok(message) => message,
Err(err) => {
self.errors
.push(DescriptorErrorKind::DecodeFileDescriptorSet { err });
return Vec::new();
}
};
for (i, option) in uninterpreted.iter().enumerate() {
if let Err(err) = self.set_option(
&mut message,
option,
scope,
file,
join_path(path, &[tag::UNINTERPRETED_OPTION, i as i32]),
) {
self.errors.push(err);
}
}
message.clear_field_by_number(tag::UNINTERPRETED_OPTION as u32);
message.encode_to_vec()
}
#[allow(clippy::result_large_err)]
fn set_option(
&mut self,
mut message: &mut DynamicMessage,
option: &UninterpretedOption,
scope: &str,
file: FileIndex,
path: Box<[i32]>,
) -> Result<(), DescriptorErrorKind> {
let mut resolved_path = Vec::with_capacity(path.len() - 2 + option.name.len());
resolved_path.extend_from_slice(&path[..path.len() - 2]);
for (i, part) in option.name.iter().enumerate() {
let is_last = i == option.name.len() - 1;
let desc = message.descriptor();
if part.is_extension {
let extension_desc =
self.find_extension(scope, &part.name_part, file, &path, &desc)?;
resolved_path.push(extension_desc.number() as i32);
if is_last {
if extension_desc.cardinality() != Cardinality::Repeated
&& message.has_extension(&extension_desc)
{
return Err(DescriptorErrorKind::DuplicateOption {
name: fmt_option_name(&option.name),
found: Label::new(&self.pool.inner.files, "found here", file, path),
});
} else {
self.set_field_value(
message.get_extension_mut(&extension_desc),
&mut resolved_path,
&extension_desc,
option,
file,
&path,
)?;
}
} else if let Value::Message(submessage) =
message.get_extension_mut(&extension_desc)
{
message = submessage;
} else {
return Err(DescriptorErrorKind::InvalidOptionType {
name: fmt_option_name(&option.name[..i + 1]),
ty: fmt_field_ty(&extension_desc),
value: fmt_value(option),
is_last,
found: Label::new(&self.pool.inner.files, "found here", file, path),
});
}
} else {
match desc.get_field_by_name(&part.name_part) {
Some(field_desc) => {
resolved_path.push(field_desc.number() as i32);
if is_last {
if field_desc.cardinality() != Cardinality::Repeated
&& message.has_field(&field_desc)
{
return Err(DescriptorErrorKind::DuplicateOption {
name: fmt_option_name(&option.name),
found: Label::new(
&self.pool.inner.files,
"found here",
file,
path,
),
});
} else {
self.set_field_value(
message.get_field_mut(&field_desc),
&mut resolved_path,
&field_desc,
option,
file,
&path,
)?;
}
} else if let Value::Message(submessage) =
message.get_field_mut(&field_desc)
{
message = submessage;
} else {
return Err(DescriptorErrorKind::InvalidOptionType {
name: fmt_option_name(&option.name[..i + 1]),
ty: fmt_field_ty(&field_desc),
value: fmt_value(option),
is_last,
found: Label::new(&self.pool.inner.files, "found here", file, path),
});
}
}
None => {
return Err(DescriptorErrorKind::OptionNotFound {
name: fmt_option_name(&option.name[..i + 1]),
found: Label::new(&self.pool.inner.files, "found here", file, path),
})
}
}
}
}
self.locations.push((file, path, resolved_path.into()));
Ok(())
}
#[allow(clippy::result_large_err)]
fn set_field_value(
&self,
value: &mut Value,
resolved_path: &mut Vec<i32>,
desc: &impl FieldDescriptorLike,
option: &UninterpretedOption,
file: FileIndex,
path: &[i32],
) -> Result<(), DescriptorErrorKind> {
let err = |()| DescriptorErrorKind::InvalidOptionType {
name: fmt_option_name(&option.name),
ty: fmt_field_ty(desc),
value: fmt_value(option),
is_last: true,
found: Label::new(&self.pool.inner.files, "found here", file, path.into()),
};
let parse_err = |parse_err| match parse_err {
#[cfg(feature = "text-format")]
Some(parse_err) => DescriptorErrorKind::InvalidMessageOption {
name: fmt_option_name(&option.name),
ty: fmt_field_ty(desc),
found: Label::new(&self.pool.inner.files, "found here", file, path.into()),
err: parse_err,
},
_ => err(()),
};
match value {
Value::Bool(value) => *value = option_to_bool(option).map_err(err)?,
Value::I32(value) => *value = option_to_int(option).map_err(err)?,
Value::I64(value) => *value = option_to_int(option).map_err(err)?,
Value::U32(value) => *value = option_to_int(option).map_err(err)?,
Value::U64(value) => *value = option_to_int(option).map_err(err)?,
Value::F32(value) => *value = option_to_float(option).map_err(err)? as f32,
Value::F64(value) => *value = option_to_float(option).map_err(err)?,
Value::String(value) => *value = option_to_string(option).map_err(err)?,
Value::Bytes(value) => *value = option_to_bytes(option).map_err(err)?,
Value::EnumNumber(value) => {
*value = option_to_enum(option, desc.kind().as_enum().unwrap()).map_err(err)?
}
Value::Message(value) => {
*value = option_to_message(option, desc.kind().as_message().unwrap())
.map_err(parse_err)?
}
Value::List(value) => {
resolved_path.push(value.len() as i32);
let mut entry = Value::default_value(&desc.kind());
self.set_field_value(&mut entry, resolved_path, desc, option, file, path)?;
value.push(entry);
}
Value::Map(value) => {
let (entry_key, entry_value) =
option_to_map_entry(option, desc.kind().as_message().unwrap())
.map_err(parse_err)?;
value.insert(entry_key, entry_value);
}
}
Ok(())
}
#[allow(clippy::result_large_err)]
fn find_extension(
&self,
scope: &str,
name: &str,
file: FileIndex,
path: &[i32],
extendee: &MessageDescriptor,
) -> Result<ExtensionDescriptor, DescriptorErrorKind> {
let (_, def) = resolve_name(
&self.pool.inner.files[file as usize].transitive_dependencies,
&self.pool.inner.names,
scope,
name,
ResolveNameFilter::Extension,
)
.into_result(name, &self.pool.inner.files, file, path, &[])?;
let &Definition {
kind: DefinitionKind::Extension(index),
..
} = def
else {
unreachable!()
};
let desc = ExtensionDescriptor {
pool: self.pool.clone(),
index,
};
if desc.containing_message() == *extendee {
Ok(desc)
} else {
Err(DescriptorErrorKind::InvalidOptionExtendee {
name: desc.full_name().to_owned(),
expected_extendee: extendee.full_name().to_owned(),
actual_extendee: desc.containing_message().full_name().to_owned(),
found: Label::new(&self.pool.inner.files, "found here", file, path.into()),
})
}
}
}
fn fmt_option_name(parts: &[uninterpreted_option::NamePart]) -> String {
let mut result = String::new();
for part in parts {
if !result.is_empty() {
result.push('.');
}
if part.is_extension {
result.push('(');
result.push_str(&part.name_part);
result.push(')');
} else {
result.push_str(&part.name_part);
}
}
result
}
pub(super) fn option_to_bool(option: &UninterpretedOption) -> Result<bool, ()> {
match option.identifier_value.as_deref() {
Some("true") => Ok(true),
Some("false") => Ok(false),
_ => Err(()),
}
}
pub(super) fn option_to_int<T>(option: &UninterpretedOption) -> Result<T, ()>
where
T: TryFrom<u64> + TryFrom<i64>,
{
if let Some(int) = option.positive_int_value {
int.try_into().map_err(drop)
} else if let Some(int) = option.negative_int_value {
int.try_into().map_err(drop)
} else {
Err(())
}
}
pub(super) fn option_to_float(option: &UninterpretedOption) -> Result<f64, ()> {
if let Some(float) = option.double_value {
Ok(float)
} else if let Some(int) = option.positive_int_value {
Ok(int as f64)
} else if let Some(int) = option.negative_int_value {
Ok(int as f64)
} else {
Err(())
}
}
pub(super) fn option_to_string(option: &UninterpretedOption) -> Result<String, ()> {
if let Some(bytes) = &option.string_value {
String::from_utf8(bytes.clone()).map_err(drop)
} else {
Err(())
}
}
pub(super) fn option_to_bytes(option: &UninterpretedOption) -> Result<Bytes, ()> {
if let Some(bytes) = &option.string_value {
Ok(Bytes::copy_from_slice(bytes))
} else {
Err(())
}
}
pub(super) fn option_to_enum(
option: &UninterpretedOption,
desc: &EnumDescriptor,
) -> Result<i32, ()> {
if let Some(ident) = &option.identifier_value {
if let Some(value) = desc.get_value_by_name(ident) {
Ok(value.number())
} else {
Err(())
}
} else {
Err(())
}
}
#[cfg(feature = "text-format")]
type ParseError = crate::text_format::ParseError;
#[cfg(not(feature = "text-format"))]
type ParseError = ();
#[cfg(feature = "text-format")]
pub(super) fn option_to_message(
option: &UninterpretedOption,
desc: &MessageDescriptor,
) -> Result<DynamicMessage, Option<ParseError>> {
if let Some(text_format) = &option.aggregate_value {
DynamicMessage::parse_text_format(desc.clone(), text_format).map_err(Some)
} else {
Err(None)
}
}
#[cfg(not(feature = "text-format"))]
pub(super) fn option_to_message(
option: &UninterpretedOption,
desc: &MessageDescriptor,
) -> Result<DynamicMessage, Option<ParseError>> {
if option.aggregate_value.is_some() {
Ok(DynamicMessage::new(desc.clone()))
} else {
Err(None)
}
}
pub(super) fn option_to_map_entry(
option: &UninterpretedOption,
desc: &MessageDescriptor,
) -> Result<(MapKey, Value), Option<ParseError>> {
debug_assert!(desc.is_map_entry());
let entry = option_to_message(option, desc)?;
let key = entry
.get_field_by_number(MAP_ENTRY_KEY_NUMBER)
.ok_or(None)?
.into_owned()
.into_map_key()
.ok_or(None)?;
let value = entry
.get_field_by_number(MAP_ENTRY_VALUE_NUMBER)
.ok_or(None)?
.into_owned();
Ok((key, value))
}
fn fmt_field_ty(field: &impl FieldDescriptorLike) -> String {
if field.is_map() {
let entry = field.kind();
let entry = entry.as_message().unwrap();
format!(
"map<{:?}, {:?}>",
entry.map_entry_key_field().kind(),
entry.map_entry_value_field().kind()
)
} else if field.is_list() {
format!("repeated {:?}", field.kind())
} else {
format!("{:?}", field.kind())
}
}
fn fmt_value(option: &UninterpretedOption) -> String {
if let Some(value) = &option.identifier_value {
value.clone()
} else if let Some(value) = &option.positive_int_value {
value.to_string()
} else if let Some(value) = &option.negative_int_value {
value.to_string()
} else if let Some(value) = &option.double_value {
value.to_string()
} else if let Some(value) = &option.string_value {
let mut string = String::new();
fmt_string(&mut string, value).unwrap();
string
} else if let Some(value) = &option.aggregate_value {
value.clone()
} else {
String::new()
}
}
fn set_file_option(file: &mut FileDescriptorProto, path: &[i32], encoded: &[u8]) {
match path[0] {
tag::file::OPTIONS => {
debug_assert_eq!(path.len(), 1);
file.options = Some(Options::decode(encoded).unwrap());
}
tag::file::MESSAGE_TYPE => {
let message = &mut file.message_type[path[1] as usize];
set_message_option(message, &path[2..], encoded);
}
tag::file::ENUM_TYPE => {
let enum_ = &mut file.enum_type[path[1] as usize];
set_enum_option(enum_, &path[2..], encoded);
}
tag::file::SERVICE => {
let service = &mut file.service[path[1] as usize];
match path[2] {
tag::service::OPTIONS => service.options = Some(Options::decode(encoded).unwrap()),
tag::service::METHOD => {
debug_assert_eq!(path.len(), 5);
debug_assert_eq!(path[4], tag::method::OPTIONS);
let value = &mut service.method[path[3] as usize];
value.options = Some(Options::decode(encoded).unwrap());
}
p => panic!("unknown path element {p}"),
}
}
tag::file::EXTENSION => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::field::OPTIONS);
let field = &mut file.extension[path[1] as usize];
field.options = Some(Options::decode(encoded).unwrap());
}
p => panic!("unknown path element {p}"),
}
}
fn set_message_option(message: &mut DescriptorProto, path: &[i32], encoded: &[u8]) {
match path[0] {
tag::message::OPTIONS => {
debug_assert_eq!(path.len(), 1);
message.options = Some(Options::decode(encoded).unwrap());
}
tag::message::EXTENSION_RANGE => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::message::extension_range::OPTIONS);
let extension_range = &mut message.extension_range[path[1] as usize];
extension_range.options = Some(Options::decode(encoded).unwrap());
}
tag::message::FIELD => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::field::OPTIONS);
let field = &mut message.field[path[1] as usize];
field.options = Some(Options::decode(encoded).unwrap());
}
tag::message::ONEOF_DECL => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::oneof::OPTIONS);
let field = &mut message.oneof_decl[path[1] as usize];
field.options = Some(Options::decode(encoded).unwrap());
}
tag::message::NESTED_TYPE => {
let nested_message = &mut message.nested_type[path[1] as usize];
set_message_option(nested_message, &path[2..], encoded);
}
tag::message::ENUM_TYPE => {
let enum_ = &mut message.enum_type[path[1] as usize];
set_enum_option(enum_, &path[2..], encoded);
}
tag::message::EXTENSION => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::field::OPTIONS);
let field = &mut message.extension[path[1] as usize];
field.options = Some(Options::decode(encoded).unwrap());
}
p => panic!("unknown path element {p}"),
}
}
fn set_enum_option(enum_: &mut EnumDescriptorProto, path: &[i32], encoded: &[u8]) {
match path[0] {
tag::enum_::OPTIONS => enum_.options = Some(Options::decode(encoded).unwrap()),
tag::enum_::VALUE => {
debug_assert_eq!(path.len(), 3);
debug_assert_eq!(path[2], tag::enum_value::OPTIONS);
let value = &mut enum_.value[path[1] as usize];
value.options = Some(Options::decode(encoded).unwrap());
}
p => panic!("unknown path element {p}"),
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/build/mod.rs | prost-reflect/src/descriptor/build/mod.rs | mod names;
mod options;
mod resolve;
mod visit;
use core::fmt;
use std::{
borrow::Cow,
collections::{HashMap, HashSet},
iter,
sync::Arc,
};
use crate::{
descriptor::{
error::{DescriptorErrorKind, Label},
to_index,
types::FileDescriptorProto,
Definition, DefinitionKind, DescriptorPoolInner, EnumIndex, ExtensionIndex,
FileDescriptorInner, FileIndex, MessageIndex, ServiceIndex,
},
DescriptorError, DescriptorPool,
};
#[derive(Clone, Copy)]
struct DescriptorPoolOffsets {
file: FileIndex,
message: MessageIndex,
enum_: EnumIndex,
service: ServiceIndex,
extension: ExtensionIndex,
}
#[derive(Copy, Clone, Debug)]
enum ResolveNameFilter {
Message,
Extension,
FieldType,
}
enum ResolveNameResult<'a, 'b> {
Found {
name: Cow<'b, str>,
def: &'a Definition,
},
InvalidType {
name: Cow<'b, str>,
def: &'a Definition,
filter: ResolveNameFilter,
},
NotImported {
name: Cow<'b, str>,
file: FileIndex,
},
Shadowed {
name: Cow<'b, str>,
shadowed_name: Cow<'b, str>,
},
NotFound,
}
impl DescriptorPoolOffsets {
fn new(pool: &DescriptorPoolInner) -> Self {
DescriptorPoolOffsets {
file: to_index(pool.files.len()),
message: to_index(pool.messages.len()),
enum_: to_index(pool.enums.len()),
service: to_index(pool.services.len()),
extension: to_index(pool.extensions.len()),
}
}
fn rollback(&self, pool: &mut DescriptorPoolInner) {
pool.files.truncate(self.file as usize);
pool.messages.truncate(self.message as usize);
pool.enums.truncate(self.enum_ as usize);
pool.extensions.truncate(self.extension as usize);
pool.services.truncate(self.service as usize);
pool.names.retain(|name, definition| match definition.kind {
DefinitionKind::Package => pool.files.iter().any(|f| {
f.prost.package().starts_with(name.as_ref())
&& matches!(
f.prost.package().as_bytes().get(name.len()),
None | Some(&b'.')
)
}),
DefinitionKind::Message(message)
| DefinitionKind::Field(message)
| DefinitionKind::Oneof(message) => message < self.message,
DefinitionKind::Service(service) | DefinitionKind::Method(service) => {
service < self.service
}
DefinitionKind::Enum(enum_) | DefinitionKind::EnumValue(enum_) => enum_ < self.enum_,
DefinitionKind::Extension(extension) => extension < self.extension,
});
pool.file_names.retain(|_, &mut file| file < self.file);
for message in &mut pool.messages {
message.extensions.retain(|&message| message < self.message);
}
}
}
impl DescriptorPool {
pub(crate) fn build_files<I>(&mut self, files: I) -> Result<(), DescriptorError>
where
I: IntoIterator<Item = FileDescriptorProto>,
{
let offsets = DescriptorPoolOffsets::new(&self.inner);
let deduped_files: Vec<_> = files
.into_iter()
.filter(|f| !self.inner.file_names.contains_key(f.name()))
.collect();
let result = self.build_files_deduped(offsets, &deduped_files);
if result.is_err() {
debug_assert_eq!(Arc::strong_count(&self.inner), 1);
offsets.rollback(Arc::get_mut(&mut self.inner).unwrap());
}
result
}
fn build_files_deduped(
&mut self,
offsets: DescriptorPoolOffsets,
deduped_files: &[FileDescriptorProto],
) -> Result<(), DescriptorError> {
if deduped_files.is_empty() {
return Ok(());
}
let inner = Arc::make_mut(&mut self.inner);
inner.collect_names(offsets, deduped_files)?;
inner.resolve_names(offsets, deduped_files)?;
self.resolve_options(offsets, deduped_files)?;
debug_assert_eq!(Arc::strong_count(&self.inner), 1);
let inner = Arc::get_mut(&mut self.inner).unwrap();
for file in &mut inner.files[offsets.file as usize..] {
file.prost = file.raw.to_prost();
}
Ok(())
}
}
impl ResolveNameFilter {
fn is_match(&self, def: &DefinitionKind) -> bool {
matches!(
(self, def),
(ResolveNameFilter::Message, DefinitionKind::Message(_))
| (ResolveNameFilter::Extension, DefinitionKind::Extension(_))
| (
ResolveNameFilter::FieldType,
DefinitionKind::Message(_) | DefinitionKind::Enum(_),
)
)
}
}
impl fmt::Display for ResolveNameFilter {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ResolveNameFilter::Message => f.write_str("a message type"),
ResolveNameFilter::Extension => f.write_str("an extension"),
ResolveNameFilter::FieldType => f.write_str("a message or enum type"),
}
}
}
impl<'a, 'b> ResolveNameResult<'a, 'b> {
fn new(
dependencies: &HashSet<FileIndex>,
names: &'a HashMap<Box<str>, Definition>,
name: impl Into<Cow<'b, str>>,
filter: ResolveNameFilter,
) -> Self {
let name = name.into();
if let Some(def) = names.get(name.as_ref()) {
if !dependencies.contains(&def.file) {
ResolveNameResult::NotImported {
name,
file: def.file,
}
} else if !filter.is_match(&def.kind) {
ResolveNameResult::InvalidType { name, def, filter }
} else {
ResolveNameResult::Found { name, def }
}
} else {
ResolveNameResult::NotFound
}
}
fn into_owned(self) -> ResolveNameResult<'a, 'static> {
match self {
ResolveNameResult::Found { name, def } => ResolveNameResult::Found {
name: Cow::Owned(name.into_owned()),
def,
},
ResolveNameResult::InvalidType { name, def, filter } => {
ResolveNameResult::InvalidType {
name: Cow::Owned(name.into_owned()),
def,
filter,
}
}
ResolveNameResult::NotImported { name, file } => ResolveNameResult::NotImported {
name: Cow::Owned(name.into_owned()),
file,
},
ResolveNameResult::Shadowed {
name,
shadowed_name,
} => ResolveNameResult::Shadowed {
name: Cow::Owned(name.into_owned()),
shadowed_name: Cow::Owned(shadowed_name.into_owned()),
},
ResolveNameResult::NotFound => ResolveNameResult::NotFound,
}
}
fn is_found(&self) -> bool {
matches!(self, ResolveNameResult::Found { .. })
}
#[allow(clippy::result_large_err)]
fn into_result(
self,
orig_name: impl Into<String>,
files: &[FileDescriptorInner],
found_file: FileIndex,
found_path1: &[i32],
found_path2: &[i32],
) -> Result<(Cow<'b, str>, &'a Definition), DescriptorErrorKind> {
match self {
ResolveNameResult::Found { name, def } => Ok((name, def)),
ResolveNameResult::InvalidType { name, def, filter } => {
Err(DescriptorErrorKind::InvalidType {
name: name.into_owned(),
expected: filter.to_string(),
found: Label::new(
files,
"found here",
found_file,
join_path(found_path1, found_path2),
),
defined: Label::new(files, "defined here", def.file, def.path.clone()),
})
}
ResolveNameResult::NotImported { name, file } => {
let root_name = files[found_file as usize].raw.name();
let dep_name = files[file as usize].raw.name();
Err(DescriptorErrorKind::NameNotFound {
found: Label::new(
files,
"found here",
found_file,
join_path(found_path1, found_path2),
),
help: Some(format!(
"'{name}' is defined in '{dep_name}', which is not imported by '{root_name}'"
)),
name: name.into_owned(),
})
}
ResolveNameResult::NotFound => Err(DescriptorErrorKind::NameNotFound {
name: orig_name.into(),
found: Label::new(
files,
"found here",
found_file,
join_path(found_path1, found_path2),
),
help: None,
}),
ResolveNameResult::Shadowed { name, shadowed_name } => Err(DescriptorErrorKind::NameShadowed {
found: Label::new(
files,
"found here",
found_file,
join_path(found_path1, found_path2),
),
help: Some(format!(
"The innermost scope is searched first in name resolution. Consider using a leading '.' (i.e., '.{name}') to start from the outermost scope.",
)),
name: name.into_owned(),
shadowed_name: shadowed_name.into_owned(),
}),
}
}
}
fn to_json_name(name: &str) -> String {
let mut result = String::with_capacity(name.len());
let mut uppercase_next = false;
for ch in name.chars() {
if ch == '_' {
uppercase_next = true
} else if uppercase_next {
result.push(ch.to_ascii_uppercase());
uppercase_next = false;
} else {
result.push(ch);
}
}
result
}
fn resolve_name<'a, 'b>(
dependencies: &HashSet<FileIndex>,
names: &'a HashMap<Box<str>, Definition>,
scope: &str,
name: &'b str,
filter: ResolveNameFilter,
) -> ResolveNameResult<'a, 'b> {
match name.strip_prefix('.') {
Some(full_name) => ResolveNameResult::new(dependencies, names, full_name, filter),
None if scope.is_empty() => ResolveNameResult::new(dependencies, names, name, filter),
None => resolve_relative_name(dependencies, names, scope, name, filter),
}
}
fn resolve_relative_name<'a, 'b>(
dependencies: &HashSet<FileIndex>,
names: &'a HashMap<Box<str>, Definition>,
scope: &str,
relative_name: &'b str,
filter: ResolveNameFilter,
) -> ResolveNameResult<'a, 'b> {
let mut err = ResolveNameResult::NotFound;
let relative_first_part = relative_name.split('.').next().unwrap_or_default();
for candidate_parent in resolve_relative_candidate_parents(scope) {
let candidate = match candidate_parent {
"" => Cow::Borrowed(relative_first_part),
_ => Cow::Owned(format!("{candidate_parent}.{relative_first_part}")),
};
if relative_first_part.len() == relative_name.len() {
// Looking up a simple name e.g. `Foo`
let res = ResolveNameResult::new(dependencies, names, candidate, filter);
if res.is_found() {
return res.into_owned();
} else if matches!(err, ResolveNameResult::NotFound) {
err = res;
}
} else {
// Looking up a name including a namespace e.g. `foo.Foo`. First determine the scope using the first component of the name.
match names.get(candidate.as_ref()) {
Some(def) if def.kind.is_parent() => {
let candidate_full = match candidate_parent {
"" => Cow::Borrowed(relative_name),
_ => Cow::Owned(format!("{candidate_parent}.{relative_name}")),
};
let res =
ResolveNameResult::new(dependencies, names, candidate_full.clone(), filter);
if matches!(res, ResolveNameResult::NotFound) {
return ResolveNameResult::Shadowed {
name: Cow::Borrowed(relative_name),
shadowed_name: candidate_full,
};
} else {
return res;
}
}
_ => continue,
}
}
}
err.into_owned()
}
fn resolve_relative_candidate_parents(scope: &str) -> impl Iterator<Item = &str> {
iter::once(scope)
.chain(scope.rmatch_indices('.').map(move |(i, _)| &scope[..i]))
.chain(iter::once(""))
}
fn join_path(path1: &[i32], path2: &[i32]) -> Box<[i32]> {
let mut path = Vec::with_capacity(path1.len() + path2.len());
path.extend_from_slice(path1);
path.extend_from_slice(path2);
path.into_boxed_slice()
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/src/descriptor/build/resolve.rs | prost-reflect/src/descriptor/build/resolve.rs | use std::collections::HashSet;
use prost::bytes::Bytes;
use crate::{
descriptor::{
build::{
join_path, resolve_name, to_json_name,
visit::{visit, Visitor},
DescriptorPoolOffsets, ResolveNameFilter,
},
error::{DescriptorError, DescriptorErrorKind, Label},
find_enum_proto, find_message_proto, tag, to_index,
types::{
field_descriptor_proto, DescriptorProto, EnumValueDescriptorProto,
FieldDescriptorProto, FileDescriptorProto, MethodDescriptorProto,
ServiceDescriptorProto,
},
Definition, DefinitionKind, DescriptorPoolInner, EnumIndex, EnumValueIndex,
ExtensionDescriptorInner, ExtensionIndex, FieldDescriptorInner, FieldIndex, FileIndex,
Identity, KindIndex, MessageIndex, MethodDescriptorInner, MethodIndex, OneofIndex,
ServiceDescriptorInner, ServiceIndex, RESERVED_MESSAGE_FIELD_NUMBERS,
VALID_MESSAGE_FIELD_NUMBERS,
},
Cardinality, Syntax, Value,
};
impl DescriptorPoolInner {
pub(super) fn resolve_names(
&mut self,
offsets: DescriptorPoolOffsets,
files: &[FileDescriptorProto],
) -> Result<(), DescriptorError> {
let mut visitor = ResolveVisitor {
pool: self,
errors: vec![],
};
visit(offsets, files, &mut visitor);
if visitor.errors.is_empty() {
Ok(())
} else {
Err(DescriptorError::new(visitor.errors))
}
}
}
struct ResolveVisitor<'a> {
pool: &'a mut DescriptorPoolInner,
errors: Vec<DescriptorErrorKind>,
}
impl Visitor for ResolveVisitor<'_> {
fn visit_file(&mut self, path: &[i32], index: FileIndex, file: &FileDescriptorProto) {
let mut transitive_dependencies = HashSet::with_capacity(file.dependency.len() + 1);
transitive_dependencies.insert(index);
for (i, dependency) in file.dependency.iter().enumerate() {
if let Some(&dependency_index) = self.pool.file_names.get(dependency.as_str()) {
self.pool.files[index as usize]
.dependencies
.push(dependency_index);
transitive_dependencies.insert(dependency_index);
self.resolve_public_dependencies(&mut transitive_dependencies, dependency_index);
} else {
self.errors.push(DescriptorErrorKind::FileNotFound {
name: dependency.clone(),
found: Label::new(
&self.pool.files,
"found here",
index,
join_path(path, &[tag::file::DEPENDENCY, i as i32]),
),
});
}
}
self.pool.files[index as usize].transitive_dependencies = transitive_dependencies;
for &public_dependency in &file.public_dependency {
if !matches!(usize::try_from(public_dependency), Ok(i) if i < file.dependency.len()) {
self.errors.push(DescriptorErrorKind::InvalidImportIndex);
}
}
for &weak_dependency in &file.weak_dependency {
if !matches!(usize::try_from(weak_dependency), Ok(i) if i < file.dependency.len()) {
self.errors.push(DescriptorErrorKind::InvalidImportIndex);
}
}
}
fn visit_field(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
message: MessageIndex,
index: FieldIndex,
field: &FieldDescriptorProto,
) {
debug_assert_eq!(
to_index(self.pool.messages[message as usize].fields.len()),
index
);
let syntax = self.pool.files[file as usize].syntax;
self.check_field_number(message, field, file, path);
let cardinality = match field.label() {
field_descriptor_proto::Label::Optional => Cardinality::Optional,
field_descriptor_proto::Label::Required => Cardinality::Required,
field_descriptor_proto::Label::Repeated => Cardinality::Repeated,
};
let kind =
self.resolve_field_type(field.r#type(), field.type_name(), full_name, file, path);
let json_name: Box<str> = self.resolve_field_json_name(field, file, path).into();
let is_packed = cardinality == Cardinality::Repeated
&& kind.is_some_and(|k| k.is_packable())
&& (field
.options
.as_ref()
.map_or(syntax == Syntax::Proto3, |o| o.value.packed()));
let supports_presence = field.proto3_optional()
|| field.oneof_index.is_some()
|| (cardinality != Cardinality::Repeated
&& (kind.is_some_and(|k| k.is_message()) || syntax == Syntax::Proto2));
let default = kind.and_then(|kind| {
self.parse_field_default_value(kind, field.default_value.as_deref(), file, path)
});
let message = &mut self.pool.messages[message as usize];
let oneof = field.oneof_index.and_then(|oneof_index| {
if oneof_index < 0 || oneof_index as usize >= message.oneofs.len() {
self.errors.push(DescriptorErrorKind::InvalidOneofIndex);
None
} else {
message.oneofs[oneof_index as usize].fields.push(index);
Some(oneof_index as OneofIndex)
}
});
message.fields.push(FieldDescriptorInner {
id: Identity::new(file, path, full_name, field.name()),
number: field.number() as u32,
kind: kind.unwrap_or(KindIndex::Double),
oneof,
is_packed,
supports_presence,
json_name: json_name.clone(),
cardinality,
default,
});
if let Some(existing) = message.field_numbers.insert(field.number() as u32, index) {
self.errors.push(DescriptorErrorKind::DuplicateFieldNumber {
number: field.number() as u32,
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
&message.fields[existing as usize].id.path,
&[tag::field::NUMBER],
),
),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
if let Some(existing) = message.field_names.insert(field.name().into(), index) {
self.errors.push(DescriptorErrorKind::DuplicateName {
name: full_name.to_owned(),
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
&message.fields[existing as usize].id.path,
&[tag::field::NAME],
),
),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::field::NAME]),
),
});
}
if let Some(existing) = message.field_json_names.insert(json_name, index) {
self.errors
.push(DescriptorErrorKind::DuplicateFieldJsonName {
name: field.json_name().to_owned(),
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
&message.fields[existing as usize].id.path,
&[tag::field::NAME],
),
),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::field::NAME]),
),
});
}
}
fn visit_service(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
index: ServiceIndex,
service: &ServiceDescriptorProto,
) {
debug_assert_eq!(to_index(self.pool.services.len()), index);
self.pool.services.push(ServiceDescriptorInner {
id: Identity::new(file, path, full_name, service.name()),
methods: Vec::with_capacity(service.method.len()),
});
}
fn visit_method(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
service: ServiceIndex,
index: MethodIndex,
method: &MethodDescriptorProto,
) {
debug_assert_eq!(
to_index(self.pool.services[service as usize].methods.len()),
index
);
let input = self
.find_message(
full_name,
method.input_type(),
file,
path,
tag::method::INPUT_TYPE,
)
.unwrap_or(MessageIndex::MAX);
let output = self
.find_message(
full_name,
method.output_type(),
file,
path,
tag::method::OUTPUT_TYPE,
)
.unwrap_or(MessageIndex::MAX);
self.pool.services[service as usize]
.methods
.push(MethodDescriptorInner {
id: Identity::new(file, path, full_name, method.name()),
input,
output,
});
}
fn visit_enum_value(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
enum_index: EnumIndex,
index: EnumValueIndex,
value: &EnumValueDescriptorProto,
) {
self.check_enum_number(enum_index, value, file, path);
let enum_ = &mut self.pool.enums[enum_index as usize];
let value_numbers_index = match enum_
.value_numbers
.binary_search_by(|(number, _)| number.cmp(&value.number()))
{
Ok(existing_index) => {
if !enum_.allow_alias {
let existing = enum_.value_numbers[existing_index].1;
self.errors.push(DescriptorErrorKind::DuplicateEnumNumber {
number: value.number(),
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
&enum_.values[existing as usize].id.path,
&[tag::enum_value::NUMBER],
),
),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::enum_value::NUMBER]),
),
});
}
existing_index
}
Err(index) => index,
};
enum_
.value_numbers
.insert(value_numbers_index, (value.number(), index));
if let Some(existing) = enum_.value_names.insert(value.name().into(), index) {
self.errors.push(DescriptorErrorKind::DuplicateName {
name: full_name.to_owned(),
first: Label::new(
&self.pool.files,
"first defined here",
file,
join_path(
&enum_.values[existing as usize].id.path,
&[tag::enum_value::NAME],
),
),
second: Label::new(
&self.pool.files,
"defined again here",
file,
join_path(path, &[tag::enum_value::NAME]),
),
});
}
}
fn visit_extension(
&mut self,
path: &[i32],
full_name: &str,
file: FileIndex,
parent_message: Option<MessageIndex>,
index: ExtensionIndex,
extension: &FieldDescriptorProto,
) {
debug_assert_eq!(to_index(self.pool.extensions.len()), index);
let extendee = self.find_message(
full_name,
extension.extendee(),
file,
path,
tag::field::EXTENDEE,
);
if let Some(extendee) = extendee {
self.pool.messages[extendee as usize].extensions.push(index);
self.check_field_number(extendee, extension, file, path);
}
let syntax = self.pool.files[file as usize].syntax;
let cardinality = match extension.label() {
field_descriptor_proto::Label::Optional => Cardinality::Optional,
field_descriptor_proto::Label::Required => Cardinality::Required,
field_descriptor_proto::Label::Repeated => Cardinality::Repeated,
};
let kind = self.resolve_field_type(
extension.r#type(),
extension.type_name(),
full_name,
file,
path,
);
self.resolve_field_json_name(extension, file, path);
let is_packed = cardinality == Cardinality::Repeated
&& kind.is_some_and(|k| k.is_packable())
&& (extension
.options
.as_ref()
.map_or(syntax == Syntax::Proto3, |o| o.value.packed()));
let default = kind.and_then(|kind| {
self.parse_field_default_value(kind, extension.default_value.as_deref(), file, path)
});
self.pool.extensions.push(ExtensionDescriptorInner {
id: Identity::new(file, path, full_name, extension.name()),
parent: parent_message,
number: extension.number() as u32,
json_name: format!("[{full_name}]").into(),
extendee: extendee.unwrap_or(MessageIndex::MAX),
kind: kind.unwrap_or(KindIndex::Double),
is_packed,
cardinality,
default,
});
}
}
impl ResolveVisitor<'_> {
fn resolve_public_dependencies(&self, dependencies: &mut HashSet<FileIndex>, index: FileIndex) {
let file = &self.pool.files[index as usize];
for (i, dependency) in file.raw.dependency.iter().enumerate() {
if let Some(&dependency_index) = self.pool.file_names.get(dependency.as_str()) {
if file.raw.public_dependency.contains(&(i as i32))
&& !dependencies.insert(dependency_index)
{
self.resolve_public_dependencies(dependencies, dependency_index);
}
}
}
}
fn check_field_number(
&mut self,
message: MessageIndex,
field: &FieldDescriptorProto,
file: FileIndex,
path: &[i32],
) {
if !VALID_MESSAGE_FIELD_NUMBERS.contains(&field.number())
|| RESERVED_MESSAGE_FIELD_NUMBERS.contains(&field.number())
{
self.errors.push(DescriptorErrorKind::InvalidFieldNumber {
number: field.number(),
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
let message = &self.pool.messages[message as usize];
let message_proto = find_message_proto(
&self.pool.files[message.id.file as usize].raw,
&message.id.path,
);
for (i, range) in message_proto.reserved_range.iter().enumerate() {
if range.start() <= field.number() && field.number() < range.end() {
self.errors
.push(DescriptorErrorKind::FieldNumberInReservedRange {
number: field.number(),
range: range.start()..range.end(),
defined: Label::new(
&self.pool.files,
"reserved range defined here",
message.id.file,
join_path(&message.id.path, &[tag::message::RESERVED_RANGE, i as i32]),
),
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
}
let extension_range = message_proto
.extension_range
.iter()
.enumerate()
.find(|(_, range)| range.start() <= field.number() && field.number() < range.end());
match (&field.extendee, extension_range) {
(None, None) | (Some(_), Some(_)) => (),
(None, Some((i, range))) => {
self.errors
.push(DescriptorErrorKind::FieldNumberInExtensionRange {
number: field.number(),
range: range.start()..range.end(),
defined: Label::new(
&self.pool.files,
"extension range defined here",
message.id.file,
join_path(&message.id.path, &[tag::message::EXTENSION_RANGE, i as i32]),
),
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
(Some(_), None) => {
self.errors
.push(DescriptorErrorKind::ExtensionNumberOutOfRange {
number: field.number(),
message: message.id.full_name().to_owned(),
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
}
}
fn check_enum_number(
&mut self,
enum_: EnumIndex,
value: &EnumValueDescriptorProto,
file: FileIndex,
path: &[i32],
) {
let enum_ = &self.pool.enums[enum_ as usize];
let enum_proto =
find_enum_proto(&self.pool.files[enum_.id.file as usize].raw, &enum_.id.path);
for (i, range) in enum_proto.reserved_range.iter().enumerate() {
if range.start() <= value.number() && value.number() <= range.end() {
self.errors
.push(DescriptorErrorKind::EnumNumberInReservedRange {
number: value.number(),
range: range.start()..=range.end(),
defined: Label::new(
&self.pool.files,
"reserved range defined here",
enum_.id.file,
join_path(&enum_.id.path, &[tag::enum_::RESERVED_RANGE, i as i32]),
),
found: Label::new(
&self.pool.files,
"defined here",
file,
join_path(path, &[tag::field::NUMBER]),
),
});
}
}
}
fn resolve_field_json_name<'b>(
&'b mut self,
field: &'b FieldDescriptorProto,
file: FileIndex,
path: &[i32],
) -> &'b str {
if let Some(json_name) = &field.json_name {
json_name
} else {
let field = find_file_field_proto_mut(&mut self.pool.files[file as usize].raw, path);
field.json_name.insert(to_json_name(field.name()))
}
}
fn resolve_field_type(
&mut self,
ty: field_descriptor_proto::Type,
ty_name: &str,
scope: &str,
file: FileIndex,
path: &[i32],
) -> Option<KindIndex> {
if ty_name.is_empty() {
match ty {
field_descriptor_proto::Type::Double => Some(KindIndex::Double),
field_descriptor_proto::Type::Float => Some(KindIndex::Float),
field_descriptor_proto::Type::Int64 => Some(KindIndex::Int64),
field_descriptor_proto::Type::Uint64 => Some(KindIndex::Uint64),
field_descriptor_proto::Type::Int32 => Some(KindIndex::Int32),
field_descriptor_proto::Type::Fixed64 => Some(KindIndex::Fixed64),
field_descriptor_proto::Type::Fixed32 => Some(KindIndex::Fixed32),
field_descriptor_proto::Type::Bool => Some(KindIndex::Bool),
field_descriptor_proto::Type::String => Some(KindIndex::String),
field_descriptor_proto::Type::Bytes => Some(KindIndex::Bytes),
field_descriptor_proto::Type::Uint32 => Some(KindIndex::Uint32),
field_descriptor_proto::Type::Sfixed32 => Some(KindIndex::Sfixed32),
field_descriptor_proto::Type::Sfixed64 => Some(KindIndex::Sfixed64),
field_descriptor_proto::Type::Sint32 => Some(KindIndex::Sint32),
field_descriptor_proto::Type::Sint64 => Some(KindIndex::Sint64),
field_descriptor_proto::Type::Group
| field_descriptor_proto::Type::Message
| field_descriptor_proto::Type::Enum => {
self.add_missing_required_field_error(
file,
join_path(path, &[tag::field::TYPE_NAME]),
);
None
}
}
} else {
let def = self.resolve_name(
scope,
ty_name,
file,
path,
tag::field::TYPE_NAME,
ResolveNameFilter::FieldType,
)?;
match def.kind {
DefinitionKind::Message(message) => {
if ty == field_descriptor_proto::Type::Group {
Some(KindIndex::Group(message))
} else {
Some(KindIndex::Message(message))
}
}
DefinitionKind::Enum(enum_) => Some(KindIndex::Enum(enum_)),
_ => unreachable!(),
}
}
}
fn parse_field_default_value(
&mut self,
kind: KindIndex,
default_value: Option<&str>,
file: FileIndex,
path: &[i32],
) -> Option<Value> {
let default_value = default_value?;
match kind {
KindIndex::Double
| KindIndex::Float
| KindIndex::Int32
| KindIndex::Int64
| KindIndex::Uint32
| KindIndex::Uint64
| KindIndex::Sint32
| KindIndex::Sint64
| KindIndex::Fixed32
| KindIndex::Fixed64
| KindIndex::Sfixed32
| KindIndex::Sfixed64
| KindIndex::Bool
| KindIndex::String
| KindIndex::Bytes => match parse_simple_value(kind, default_value) {
Ok(value) => Some(value),
Err(_) => {
self.errors.push(DescriptorErrorKind::InvalidFieldDefault {
value: default_value.to_owned(),
kind: format!("{kind:?}"),
found: Label::new(
&self.pool.files,
"found here",
file,
join_path(path, &[tag::field::DEFAULT_VALUE]),
),
});
None
}
},
KindIndex::Enum(enum_) => {
let enum_ = &self.pool.enums[enum_ as usize];
if let Some(value) = enum_.values.iter().find(|v| v.id.name() == default_value) {
Some(Value::EnumNumber(value.number))
} else {
self.errors.push(DescriptorErrorKind::InvalidFieldDefault {
value: default_value.to_owned(),
kind: enum_.id.full_name().to_owned(),
found: Label::new(
&self.pool.files,
"found here",
file,
join_path(path, &[tag::field::DEFAULT_VALUE]),
),
});
None
}
}
_ => {
self.errors.push(DescriptorErrorKind::InvalidFieldDefault {
value: default_value.to_owned(),
kind: "message type".to_owned(),
found: Label::new(
&self.pool.files,
"found here",
file,
join_path(path, &[tag::field::DEFAULT_VALUE]),
),
});
None
}
}
}
fn find_message(
&mut self,
scope: &str,
name: &str,
file: FileIndex,
path1: &[i32],
path2: i32,
) -> Option<MessageIndex> {
let def = self.resolve_name(scope, name, file, path1, path2, ResolveNameFilter::Message)?;
match def.kind {
DefinitionKind::Message(message) => Some(message),
_ => unreachable!(),
}
}
fn resolve_name(
&mut self,
scope: &str,
name: &str,
file: FileIndex,
path: &[i32],
tag: i32,
filter: ResolveNameFilter,
) -> Option<&Definition> {
let (type_name, def) = match resolve_name(
&self.pool.files[file as usize].transitive_dependencies,
&self.pool.names,
scope,
name,
filter,
)
.into_result(name, &self.pool.files, file, path, &[tag])
{
Ok((type_name, def)) => (type_name, def),
Err(err) => {
self.errors.push(err);
return None;
}
};
let ty = if matches!(
def,
Definition {
kind: DefinitionKind::Message(_),
..
}
) {
field_descriptor_proto::Type::Message
} else {
field_descriptor_proto::Type::Enum
};
set_type_name(
&mut self.pool.files[file as usize].raw,
path,
tag,
format!(".{type_name}"),
ty,
);
Some(def)
}
fn add_missing_required_field_error(&mut self, file: FileIndex, path: Box<[i32]>) {
self.errors.push(DescriptorErrorKind::MissingRequiredField {
label: Label::new(&self.pool.files, "found here", file, path),
});
}
}
fn parse_simple_value(
kind: KindIndex,
value: &str,
) -> Result<Value, Box<dyn std::error::Error + Send + Sync>> {
let value = match kind {
KindIndex::Double => value.parse().map(Value::F64)?,
KindIndex::Float => value.parse().map(Value::F32)?,
KindIndex::Int32 | KindIndex::Sint32 | KindIndex::Sfixed32 => {
value.parse().map(Value::I32)?
}
KindIndex::Int64 | KindIndex::Sint64 | KindIndex::Sfixed64 => {
value.parse().map(Value::I64)?
}
KindIndex::Uint32 | KindIndex::Fixed32 => value.parse().map(Value::U32)?,
KindIndex::Uint64 | KindIndex::Fixed64 => value.parse().map(Value::U64)?,
KindIndex::Bool => value.parse().map(Value::Bool)?,
KindIndex::String => Value::String(value.to_owned()),
KindIndex::Bytes => unescape_c_escape_string(value).map(Value::Bytes)?,
KindIndex::Enum(_) | KindIndex::Message(_) | KindIndex::Group(_) => unreachable!(),
};
Ok(value)
}
/// From https://github.com/tokio-rs/prost/blob/c3b7037a7f2c56cef327b41ca32a8c4e9ce5a41c/prost-build/src/code_generator.rs#L887
/// Based on [`google::protobuf::UnescapeCEscapeString`][1]
/// [1]: https://github.com/google/protobuf/blob/3.3.x/src/google/protobuf/stubs/strutil.cc#L312-L322
fn unescape_c_escape_string(s: &str) -> Result<Bytes, &'static str> {
let src = s.as_bytes();
let len = src.len();
let mut dst = Vec::new();
let mut p = 0;
while p < len {
if src[p] != b'\\' {
dst.push(src[p]);
p += 1;
} else {
p += 1;
if p == len {
return Err("missing escape character");
}
match src[p] {
b'a' => {
dst.push(0x07);
p += 1;
}
b'b' => {
dst.push(0x08);
p += 1;
}
b'f' => {
dst.push(0x0C);
p += 1;
}
b'n' => {
dst.push(0x0A);
p += 1;
}
b'r' => {
dst.push(0x0D);
p += 1;
}
b't' => {
dst.push(0x09);
p += 1;
}
b'v' => {
dst.push(0x0B);
p += 1;
}
b'\\' => {
dst.push(0x5C);
p += 1;
}
b'?' => {
dst.push(0x3F);
p += 1;
}
b'\'' => {
dst.push(0x27);
p += 1;
}
b'"' => {
dst.push(0x22);
p += 1;
}
b'0'..=b'7' => {
let mut octal = 0;
for _ in 0..3 {
if p < len && src[p] >= b'0' && src[p] <= b'7' {
octal = octal * 8 + (src[p] - b'0');
p += 1;
} else {
break;
}
}
dst.push(octal);
}
b'x' | b'X' => {
if p + 3 > len {
return Err("hex escape must contain two characters");
}
match u8::from_str_radix(&s[p + 1..p + 3], 16) {
Ok(b) => dst.push(b),
_ => return Err("invalid hex escape"),
}
p += 3;
}
_ => return Err("invalid escape character"),
}
}
}
Ok(dst.into())
}
fn set_type_name(
file: &mut FileDescriptorProto,
path: &[i32],
tag: i32,
type_name: String,
ty: field_descriptor_proto::Type,
) {
match path[0] {
tag::file::SERVICE => {
debug_assert_eq!(path.len(), 4);
let service = &mut file.service[path[1] as usize];
debug_assert_eq!(path[2], tag::service::METHOD);
let method = &mut service.method[path[3] as usize];
match tag {
tag::method::INPUT_TYPE => method.input_type = Some(type_name),
tag::method::OUTPUT_TYPE => method.output_type = Some(type_name),
p => panic!("unknown path element {p}"),
}
}
tag::file::MESSAGE_TYPE | tag::file::EXTENSION => {
let field = find_file_field_proto_mut(file, path);
match tag {
tag::field::TYPE_NAME => {
field.type_name = Some(type_name);
if field.r#type() != field_descriptor_proto::Type::Group {
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | true |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect/tests/main.rs | prost-reflect/tests/main.rs | use std::{
env, fs,
path::{Path, PathBuf},
};
use insta::assert_yaml_snapshot;
use miette::JSONReportHandler;
use prost::Message;
use prost_reflect::{DescriptorError, DescriptorPool, DynamicMessage, ReflectMessage};
use prost_types::FileDescriptorSet;
fn test_data_dir() -> PathBuf {
PathBuf::from(env::var_os("CARGO_MANIFEST_DIR").unwrap()).join("tests/data")
}
fn read_file_descriptor_set(path: impl AsRef<Path>) -> DynamicMessage {
let yaml_bytes = fs::read(test_data_dir().join(path)).unwrap();
let deserializer = serde_yaml::Deserializer::from_slice(&yaml_bytes);
DynamicMessage::deserialize(FileDescriptorSet::default().descriptor(), deserializer).unwrap()
}
fn check(name: &str, add_wkt: bool) -> Result<DescriptorPool, DescriptorError> {
let input = read_file_descriptor_set(format!("{}.yml", name));
let proto_bytes = input.encode_to_vec();
let mut pool = if add_wkt {
FileDescriptorSet::default()
.descriptor()
.parent_pool()
.clone()
} else {
DescriptorPool::new()
};
pool.decode_file_descriptor_set(proto_bytes.as_slice())?;
Ok(pool)
}
fn check_ok(name: &str, add_wkt: bool) {
let pool = check(name, add_wkt).unwrap();
let set_desc = pool
.get_message_by_name("google.protobuf.FileDescriptorSet")
.unwrap_or_else(|| FileDescriptorSet::default().descriptor());
let mut actual = DynamicMessage::decode(set_desc, pool.encode_to_vec().as_slice()).unwrap();
if add_wkt {
actual
.get_field_by_name_mut("file")
.unwrap()
.as_list_mut()
.unwrap()
.retain(|f| {
!f.as_message()
.unwrap()
.get_field_by_name("package")
.unwrap()
.as_str()
.unwrap()
.starts_with("google.protobuf")
});
}
insta::with_settings!({ sort_maps => true }, {
assert_yaml_snapshot!(name, actual);
});
}
fn check_err(name: &str, add_wkt: bool) {
let actual_err = check(name, add_wkt).unwrap_err();
let mut actual_json = String::new();
JSONReportHandler::new()
.render_report(&mut actual_json, &actual_err)
.unwrap();
let actual = serde_json::from_str::<serde_json::Value>(&actual_json).unwrap();
insta::with_settings!({ sort_maps => true }, {
assert_yaml_snapshot!(name, actual);
});
}
macro_rules! check_ok {
($name:ident) => {
#[test]
fn $name() {
check_ok(stringify!($name), false);
}
};
($name:ident, add_wkt: true) => {
#[test]
fn $name() {
check_ok(stringify!($name), true);
}
};
}
macro_rules! check_err {
($name:ident) => {
#[test]
fn $name() {
check_err(stringify!($name), false);
}
};
($name:ident, add_wkt: true) => {
#[test]
fn $name() {
check_err(stringify!($name), true);
}
};
}
check_err!(name_conflict_in_imported_files);
check_err!(name_conflict_with_import);
check_err!(name_conflict_package1);
check_err!(name_conflict_package2);
check_ok!(name_conflict_package3);
check_err!(name_conflict_field_camel_case1);
check_err!(name_conflict_field_camel_case2);
check_ok!(name_conflict_field_camel_case3);
check_err!(name_conflict1);
check_err!(name_conflict2);
check_err!(name_conflict3);
check_err!(invalid_message_number1);
check_err!(invalid_message_number2);
check_err!(generate_map_entry_message_name_conflict);
check_err!(generate_group_message_name_conflict);
check_err!(generate_synthetic_oneof_name_conflict);
check_err!(invalid_service_type1);
check_err!(invalid_service_type2);
check_err!(invalid_service_type3);
check_err!(name_resolution1);
check_err!(name_resolution2);
check_ok!(name_resolution3);
check_err!(name_resolution4);
check_ok!(name_resolution5);
check_ok!(name_resolution6);
check_err!(name_collision1);
check_err!(name_collision2);
check_err!(name_collision3);
check_err!(name_collision4);
check_err!(name_collision5);
check_err!(field_default_value1);
check_ok!(field_default_value2);
check_ok!(field_set_json_name);
check_err!(enum_field_invalid_default1);
check_err!(enum_field_invalid_default2);
check_err!(enum_field_invalid_default3);
check_err!(enum_field_invalid_default4);
check_ok!(enum_field_invalid_default5);
check_err!(enum_field_invalid_default6);
check_ok!(enum_field_invalid_default7);
check_err!(enum_field_invalid_default8);
check_ok!(enum_field_invalid_default9);
check_err!(field_default_invalid_type1);
check_err!(field_default_invalid_type2);
check_err!(field_default_invalid_type3);
check_err!(message_field_duplicate_number1);
check_err!(message_field_duplicate_number2);
check_err!(message_reserved_range_overlap_with_field1);
check_err!(message_reserved_range_overlap_with_field2);
check_ok!(message_reserved_range_message_set1);
check_ok!(message_reserved_range_message_set2);
check_ok!(extend_group_field);
check_err!(extend_field_number_not_in_extensions1);
check_err!(extend_field_number_not_in_extensions2);
check_ok!(oneof_group_field);
check_err!(enum_reserved_range_overlap_with_value1);
check_err!(enum_reserved_range_overlap_with_value2);
check_err!(enum_reserved_range_overlap_with_value3);
check_err!(enum_duplicate_number1);
check_err!(enum_duplicate_number2);
check_ok!(enum_duplicate_number3);
check_ok!(enum_default1);
check_err!(enum_default2);
check_ok!(enum_default3);
check_err!(option_unknown_field);
check_err!(option_unknown_extension);
check_err!(option_extension_dependency_not_imported, add_wkt: true);
check_ok!(option_extension_dependency_transitive, add_wkt: true);
check_err!(option_extension_wrong_extendee, add_wkt: true);
check_err!(option_extension_invalid_type);
check_err!(option_already_set);
check_ok!(option_map_entry_set_explicitly);
check_ok!(option_resolution1, add_wkt: true);
check_ok!(option_resolution2, add_wkt: true);
check_ok!(option_resolution3, add_wkt: true);
check_ok!(option_resolution4, add_wkt: true);
check_ok!(option_resolution5, add_wkt: true);
check_ok!(option_resolution6, add_wkt: true);
check_ok!(option_resolution7, add_wkt: true);
check_ok!(option_resolution8, add_wkt: true);
check_ok!(option_resolution9, add_wkt: true);
check_ok!(option_resolution10, add_wkt: true);
check_ok!(option_resolution11, add_wkt: true);
check_ok!(option_resolution12, add_wkt: true);
check_ok!(option_resolution13, add_wkt: true);
check_ok!(option_resolution14, add_wkt: true);
check_ok!(option_resolution15, add_wkt: true);
check_ok!(option_resolution16, add_wkt: true);
check_ok!(option_resolution17, add_wkt: true);
check_err!(option_resolution18, add_wkt: true);
check_ok!(option_resolution19);
check_ok!(option_resolution20, add_wkt: true);
check_ok!(option_resolution21, add_wkt: true);
check_ok!(option_resolution22, add_wkt: true);
check_ok!(option_resolution23, add_wkt: true);
check_ok!(option_resolution24, add_wkt: true);
check_err!(dependency_not_imported);
check_ok!(dependency_resolution_transitive);
check_ok!(dependency_resolution_transitive2);
check_err!(dependency_resolution_transitive3);
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/build.rs | prost-reflect-tests/build.rs | use std::io;
fn main() -> io::Result<()> {
let mut config = prost_build::Config::new();
config
.type_attribute(".test.Scalars", "#[cfg_attr(test, derive(::proptest_derive::Arbitrary))]")
.type_attribute(".test.ScalarArrays", "#[cfg_attr(test, derive(::proptest_derive::Arbitrary))]")
.type_attribute(".test.ComplexType", "#[cfg_attr(test, derive(::proptest_derive::Arbitrary))]")
.type_attribute(".test.WellKnownTypes", "#[cfg_attr(test, derive(::proptest_derive::Arbitrary))]")
.field_attribute(
".test.WellKnownTypes.timestamp",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(crate::arbitrary::timestamp())\"))]",
)
.field_attribute(
".test.WellKnownTypes.duration",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(crate::arbitrary::duration())\"))]",
)
.field_attribute(
".test.WellKnownTypes.struct",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(crate::arbitrary::struct_())\"))]",
)
.field_attribute(
".test.WellKnownTypes.list",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(crate::arbitrary::list())\"))]",
)
.field_attribute(
".test.WellKnownTypes.mask",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(crate::arbitrary::mask())\"))]",
)
.field_attribute(
".test.WellKnownTypes.empty",
"#[cfg_attr(test, proptest(strategy = \"::proptest::option::of(::proptest::strategy::Just(()))\"))]",
)
.field_attribute(".test.WellKnownTypes.null", "#[cfg_attr(test, proptest(value= \"0\"))]");
prost_reflect_build::Builder::new()
.file_descriptor_set_bytes("crate::DESCRIPTOR_POOL_BYTES")
.compile_protos_with_config(
config,
&[
"src/test.proto",
"src/test2.proto",
"src/desc.proto",
"src/desc2.proto",
"src/desc_no_package.proto",
"src/imports.proto",
"src/ext.proto",
"src/options.proto",
],
&["src/"],
)?;
Ok(())
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/lib.rs | prost-reflect-tests/src/lib.rs | use prost_reflect::{DescriptorPool, ReflectMessage};
use proto::Scalars;
#[cfg(test)]
mod arbitrary;
#[cfg(test)]
mod decode;
#[cfg(test)]
mod desc;
#[cfg(test)]
mod json;
#[cfg(test)]
mod text_format;
pub mod proto {
#![allow(clippy::all)]
include!(concat!(env!("OUT_DIR"), "/test.rs"));
include!(concat!(env!("OUT_DIR"), "/test2.rs"));
pub mod options {
include!(concat!(env!("OUT_DIR"), "/custom.options.rs"));
}
}
const DESCRIPTOR_POOL_BYTES: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/file_descriptor_set.bin"));
pub fn test_file_descriptor() -> DescriptorPool {
// Ensure global pool is populated with test descriptors.
let _ = Scalars::default().descriptor();
DescriptorPool::global()
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/desc.rs | prost-reflect-tests/src/desc.rs | use std::collections::HashMap;
use prost::{bytes::Bytes, Message};
use prost_reflect::{DescriptorPool, DynamicMessage, MapKey, ReflectMessage, Syntax, Value};
use crate::{
proto::{self, ComplexType, Scalars},
test_file_descriptor, DESCRIPTOR_POOL_BYTES,
};
#[test]
fn test_descriptor_methods() {
let file_desc = test_file_descriptor()
.get_file_by_name("desc.proto")
.unwrap();
assert_eq!(file_desc.name(), "desc.proto");
assert_eq!(file_desc.package_name(), "my.package");
assert_eq!(file_desc.syntax(), Syntax::Proto3);
let message_desc = test_file_descriptor()
.get_message_by_name("my.package.MyMessage")
.unwrap();
assert_eq!(message_desc.name(), "MyMessage");
assert_eq!(message_desc.full_name(), "my.package.MyMessage");
assert_eq!(message_desc.parent_file(), file_desc);
assert_eq!(message_desc.parent_message(), None);
assert_eq!(message_desc.package_name(), "my.package");
assert_eq!(
message_desc.reserved_ranges().flatten().collect::<Vec<_>>(),
vec![2, 15, 9, 10, 11]
);
assert_eq!(
message_desc.reserved_names().collect::<Vec<_>>(),
vec!["foo", "bar"]
);
assert_eq!(message_desc.extension_ranges().count(), 0,);
let field_desc = message_desc.get_field_by_name("my_field").unwrap();
assert_eq!(field_desc.name(), "my_field");
assert_eq!(field_desc.full_name(), "my.package.MyMessage.my_field");
let nested_message_desc = test_file_descriptor()
.get_message_by_name("my.package.MyMessage.MyNestedMessage")
.unwrap();
assert_eq!(nested_message_desc.name(), "MyNestedMessage");
assert_eq!(
nested_message_desc.full_name(),
"my.package.MyMessage.MyNestedMessage"
);
assert_eq!(
nested_message_desc.parent_message(),
Some(message_desc.clone())
);
assert_eq!(nested_message_desc.package_name(), "my.package");
let enum_desc = test_file_descriptor()
.get_enum_by_name("my.package.MyEnum")
.unwrap();
assert_eq!(enum_desc.name(), "MyEnum");
assert_eq!(enum_desc.full_name(), "my.package.MyEnum");
assert_eq!(enum_desc.parent_message(), None);
assert_eq!(enum_desc.package_name(), "my.package");
assert_eq!(
enum_desc.reserved_ranges().flatten().collect::<Vec<_>>(),
vec![-2, 15, 9, 10, 11]
);
assert_eq!(
enum_desc.reserved_names().collect::<Vec<_>>(),
vec!["FOO", "BAR"]
);
let enum_value_desc = enum_desc.get_value_by_name("MY_VALUE").unwrap();
assert_eq!(enum_value_desc.name(), "MY_VALUE");
assert_eq!(enum_value_desc.full_name(), "my.package.MY_VALUE");
let nested_enum_desc = test_file_descriptor()
.get_enum_by_name("my.package.MyMessage.MyNestedEnum")
.unwrap();
assert_eq!(nested_enum_desc.name(), "MyNestedEnum");
assert_eq!(
nested_enum_desc.full_name(),
"my.package.MyMessage.MyNestedEnum"
);
assert_eq!(nested_enum_desc.parent_message(), Some(message_desc));
assert_eq!(nested_enum_desc.package_name(), "my.package");
let service_desc = test_file_descriptor()
.services()
.find(|s| s.full_name() == "my.package.MyService")
.unwrap();
assert_eq!(service_desc.name(), "MyService");
assert_eq!(service_desc.full_name(), "my.package.MyService");
assert_eq!(service_desc.package_name(), "my.package");
let method_desc = service_desc
.methods()
.find(|m| m.name() == "MyMethod")
.unwrap();
assert_eq!(method_desc.name(), "MyMethod");
assert_eq!(method_desc.full_name(), "my.package.MyService.MyMethod");
}
#[test]
fn test_descriptor_methods_proto2() {
let file_desc = test_file_descriptor()
.get_file_by_name("desc2.proto")
.unwrap();
assert_eq!(file_desc.name(), "desc2.proto");
assert_eq!(file_desc.package_name(), "my.package2");
assert_eq!(file_desc.syntax(), Syntax::Proto2);
let message_desc = test_file_descriptor()
.get_message_by_name("my.package2.MyMessage")
.unwrap();
assert_eq!(message_desc.name(), "MyMessage");
assert_eq!(message_desc.full_name(), "my.package2.MyMessage");
assert_eq!(message_desc.parent_file(), file_desc);
assert_eq!(message_desc.parent_message(), None);
assert_eq!(message_desc.package_name(), "my.package2");
assert_eq!(
message_desc
.extension_ranges()
.flatten()
.collect::<Vec<_>>(),
vec![100, 110, 111, 112, 113, 114, 115],
);
let mut extensions: Vec<_> = test_file_descriptor()
.all_extensions()
.filter(|ext| ext.parent_file() == file_desc)
.collect();
extensions.sort_by_key(|e| e.full_name().to_owned());
assert_eq!(extensions.len(), 3);
assert_eq!(
extensions[0].full_name(),
"my.package2.MyMessage.in_extendee"
);
assert_eq!(
extensions[0].parent_message().unwrap().full_name(),
"my.package2.MyMessage"
);
assert_eq!(extensions[0].parent_file(), file_desc);
assert_eq!(
extensions[0].containing_message().full_name(),
"my.package2.MyMessage"
);
assert_eq!(
extensions[0].json_name(),
"[my.package2.MyMessage.in_extendee]"
);
assert_eq!(
extensions[1].full_name(),
"my.package2.OtherMessage.in_other"
);
assert_eq!(
extensions[1].parent_message().unwrap().full_name(),
"my.package2.OtherMessage"
);
assert_eq!(extensions[1].parent_file(), file_desc);
assert_eq!(
extensions[1].containing_message().full_name(),
"my.package2.MyMessage"
);
assert_eq!(
extensions[1].json_name(),
"[my.package2.OtherMessage.in_other]"
);
assert_eq!(extensions[2].full_name(), "my.package2.in_file");
assert!(extensions[2].parent_message().is_none());
assert_eq!(extensions[2].parent_file(), file_desc);
assert_eq!(
extensions[2].containing_message().full_name(),
"my.package2.MyMessage"
);
assert_eq!(extensions[2].json_name(), "[my.package2.in_file]");
}
#[test]
fn test_descriptor_names_no_package() {
let message_desc = test_file_descriptor()
.get_message_by_name("MyMessage")
.unwrap();
assert_eq!(message_desc.name(), "MyMessage");
assert_eq!(message_desc.full_name(), "MyMessage");
assert_eq!(message_desc.parent_message(), None);
assert_eq!(message_desc.package_name(), "");
let field_desc = message_desc.get_field_by_name("my_field").unwrap();
assert_eq!(field_desc.name(), "my_field");
assert_eq!(field_desc.full_name(), "MyMessage.my_field");
let nested_message_desc = test_file_descriptor()
.get_message_by_name("MyMessage.MyNestedMessage")
.unwrap();
assert_eq!(nested_message_desc.name(), "MyNestedMessage");
assert_eq!(nested_message_desc.full_name(), "MyMessage.MyNestedMessage");
assert_eq!(
nested_message_desc.parent_message(),
Some(message_desc.clone())
);
assert_eq!(nested_message_desc.package_name(), "");
let enum_desc = test_file_descriptor().get_enum_by_name("MyEnum").unwrap();
assert_eq!(enum_desc.name(), "MyEnum");
assert_eq!(enum_desc.full_name(), "MyEnum");
assert_eq!(enum_desc.parent_message(), None);
assert_eq!(enum_desc.package_name(), "");
let enum_value_desc = enum_desc.get_value_by_name("MY_VALUE").unwrap();
assert_eq!(enum_value_desc.name(), "MY_VALUE");
assert_eq!(enum_value_desc.full_name(), "MY_VALUE");
let nested_enum_desc = test_file_descriptor()
.get_enum_by_name("MyMessage.MyNestedEnum")
.unwrap();
assert_eq!(nested_enum_desc.name(), "MyNestedEnum");
assert_eq!(nested_enum_desc.full_name(), "MyMessage.MyNestedEnum");
assert_eq!(nested_enum_desc.parent_message(), Some(message_desc));
assert_eq!(nested_enum_desc.package_name(), "");
let service_desc = test_file_descriptor()
.services()
.find(|s| s.full_name() == "MyService")
.unwrap();
assert_eq!(service_desc.name(), "MyService");
assert_eq!(service_desc.full_name(), "MyService");
assert_eq!(service_desc.package_name(), "");
let method_desc = service_desc
.methods()
.find(|m| m.name() == "MyMethod")
.unwrap();
assert_eq!(method_desc.name(), "MyMethod");
assert_eq!(method_desc.full_name(), "MyService.MyMethod");
}
#[test]
fn test_debug_impls() {
// Check none of the debug impls accidentally recurse infinitely
let _ = format!("{:?}", test_file_descriptor());
for service in test_file_descriptor().services() {
let _ = format!("{service:?}");
for method in service.methods() {
let _ = format!("{method:?}");
}
}
for file in test_file_descriptor().files() {
let _ = format!("{file:?}");
}
for message in test_file_descriptor().all_messages() {
let _ = format!("{message:?}");
for field in message.fields() {
let _ = format!("{field:?}");
}
for oneof in message.oneofs() {
let _ = format!("{oneof:?}");
}
}
for enum_ in test_file_descriptor().all_enums() {
let _ = format!("{enum_:?}");
for value in enum_.values() {
let _ = format!("{value:?}");
}
}
for extension in test_file_descriptor().all_extensions() {
let _ = format!("{extension:?}");
}
}
#[test]
fn test_raw_getters() {
let _ = format!("{:?}", test_file_descriptor());
for file in test_file_descriptor().files() {
assert_eq!(file.file_descriptor_proto().name(), file.name());
assert!(file.messages().eq(test_file_descriptor()
.all_messages()
.filter(|m| m.parent_message().is_none() && m.parent_file() == file)));
assert!(file.enums().eq(test_file_descriptor()
.all_enums()
.filter(|m| m.parent_message().is_none() && m.parent_file() == file)));
assert!(file.extensions().eq(test_file_descriptor()
.all_extensions()
.filter(|m| m.parent_message().is_none() && m.parent_file() == file)));
assert!(file.services().eq(test_file_descriptor()
.services()
.filter(|m| m.parent_file() == file)));
}
for service in test_file_descriptor().services() {
assert_eq!(service.service_descriptor_proto().name(), service.name());
for method in service.methods() {
assert_eq!(method.method_descriptor_proto().name(), method.name());
}
}
for message in test_file_descriptor().all_messages() {
assert_eq!(message.descriptor_proto().name(), message.name());
for field in message.fields() {
assert_eq!(field.field_descriptor_proto().name(), field.name());
}
for oneof in message.oneofs() {
assert_eq!(oneof.oneof_descriptor_proto().name(), oneof.name());
}
assert!(message.extensions().eq(test_file_descriptor()
.all_extensions()
.filter(|m| m.containing_message() == message)));
assert!(message.child_messages().eq(test_file_descriptor()
.all_messages()
.filter(|m| m.parent_message() == Some(message.clone()))));
assert!(message.child_enums().eq(test_file_descriptor()
.all_enums()
.filter(|m| m.parent_message() == Some(message.clone()))));
assert!(message.child_extensions().eq(test_file_descriptor()
.all_extensions()
.filter(|m| m.parent_message() == Some(message.clone()))));
}
for enum_ in test_file_descriptor().all_enums() {
assert_eq!(enum_.enum_descriptor_proto().name(), enum_.name());
for value in enum_.values() {
assert_eq!(value.enum_value_descriptor_proto().name(), value.name());
}
}
for extension in test_file_descriptor().all_extensions() {
assert_eq!(extension.field_descriptor_proto().name(), extension.name());
}
}
#[test]
fn descriptor_pool_add_individual_files() {
let original = test_file_descriptor();
let mut roundtripped = DescriptorPool::new();
// These should be sorted into topological order by the protobuf compiler.
for file in original.file_descriptor_protos() {
roundtripped
.add_file_descriptor_proto(file.clone())
.unwrap();
}
assert_ne!(original, roundtripped);
assert!(original
.all_messages()
.map(|m| m.full_name().to_owned())
.eq(roundtripped
.all_messages()
.map(|m| m.full_name().to_owned())));
let message_desc = roundtripped
.get_message_by_name("my.package.MyMessage")
.unwrap();
assert_eq!(message_desc.name(), "MyMessage");
assert_eq!(message_desc.full_name(), "my.package.MyMessage");
assert_eq!(message_desc.parent_pool(), &roundtripped);
assert_eq!(message_desc.parent_message(), None);
assert_eq!(message_desc.package_name(), "my.package");
}
#[test]
fn test_enum_alias() {
let enum_desc = test_file_descriptor()
.get_enum_by_name("test.EnumWithAlias")
.unwrap();
assert_eq!(enum_desc.name(), "EnumWithAlias");
assert_eq!(enum_desc.full_name(), "test.EnumWithAlias");
assert_eq!(enum_desc.parent_message(), None);
assert_eq!(enum_desc.package_name(), "test");
assert_eq!(enum_desc.get_value_by_name("FOO").unwrap().number(), 0);
assert_eq!(enum_desc.get_value_by_name("BAR").unwrap().number(), 0);
assert_eq!(enum_desc.get_value_by_name("A").unwrap().number(), 1);
assert_eq!(enum_desc.get_value_by_name("B").unwrap().number(), 1);
assert_eq!(enum_desc.get_value_by_name("C").unwrap().number(), 1);
assert_eq!(enum_desc.get_value_by_name("TWO").unwrap().number(), 2);
assert_eq!(enum_desc.get_value(0).unwrap().number(), 0);
assert!(matches!(
enum_desc.get_value(0).unwrap().name(),
"FOO" | "BAR"
));
assert_eq!(enum_desc.get_value(1).unwrap().number(), 1);
assert!(matches!(
enum_desc.get_value(1).unwrap().name(),
"A" | "B" | "C"
));
assert_eq!(enum_desc.get_value(2).unwrap().number(), 2);
assert_eq!(enum_desc.get_value(2).unwrap().name(), "TWO");
assert_eq!(enum_desc.get_value(3), None);
}
#[test]
fn test_get_extension() {
let file_descriptor_set = test_file_descriptor()
.get_message_by_name("google.protobuf.FileDescriptorSet")
.unwrap();
let mut dynamic_message = prost_reflect::DynamicMessage::new(file_descriptor_set);
dynamic_message.merge(DESCRIPTOR_POOL_BYTES).unwrap();
let extension = test_file_descriptor()
.get_message_by_name("google.protobuf.EnumValueOptions")
.unwrap()
.get_extension_by_full_name("demo.len")
.unwrap();
assert_eq!(
dynamic_message
.get_field_by_name("file")
.unwrap()
.as_list()
.unwrap()
.iter()
.map(|f| f.as_message().unwrap())
.find(|f| f.get_field_by_name("name").unwrap().as_str() == Some("ext.proto"))
.unwrap()
.get_field_by_name("enum_type")
.unwrap()
.as_list()
.unwrap()[0]
.as_message()
.unwrap()
.get_field_by_name("value")
.unwrap()
.as_list()
.unwrap()[1]
.as_message()
.unwrap()
.get_field_by_name("options")
.unwrap()
.as_message()
.unwrap()
.get_extension(&extension)
.as_ref(),
&Value::U32(1)
);
let e = test_file_descriptor().get_enum_by_name("demo.Foo").unwrap();
assert!(e.get_value(0).unwrap().options().has_extension(&extension));
assert_eq!(
e.get_value(0)
.unwrap()
.options()
.get_extension(&extension)
.as_ref(),
&Value::U32(0)
);
assert!(e.get_value(1).unwrap().options().has_extension(&extension));
assert_eq!(
e.get_value(1)
.unwrap()
.options()
.get_extension(&extension)
.as_ref(),
&Value::U32(1)
);
assert!(e.get_value(2).unwrap().options().has_extension(&extension));
assert_eq!(
e.get_value(2)
.unwrap()
.options()
.get_extension(&extension)
.as_ref(),
&Value::U32(2)
);
}
#[test]
fn test_file_extension_options() {
let pool = test_file_descriptor();
let file = pool.get_file_by_name("options.proto").unwrap();
let file_ext = pool.get_extension_by_name("custom.options.file").unwrap();
assert_eq!(
file.options().get_extension(&file_ext).as_ref(),
&Value::I32(-1)
);
}
#[test]
fn test_message_extension_options() {
let pool = test_file_descriptor();
let message = pool
.get_message_by_name("custom.options.Aggregate")
.unwrap();
let message_ext = pool
.get_extension_by_name("custom.options.message")
.unwrap();
assert_eq!(
message.options().get_extension(&message_ext).as_ref(),
&Value::String("abc".into())
);
let field = message.get_field_by_name("a").unwrap();
let field_ext = pool.get_extension_by_name("custom.options.field").unwrap();
assert_eq!(
field.options().get_extension(&field_ext).as_ref(),
&Value::Bytes(b"\x08".as_ref().into())
);
let oneof = message.oneofs().find(|o| o.name() == "O").unwrap();
let oneof_ext = pool.get_extension_by_name("custom.options.oneof").unwrap();
assert_eq!(
oneof.options().get_extension(&oneof_ext).as_ref(),
&Value::List(vec![Value::F32(5.5), Value::F32(-5.0), Value::F32(5.0)]),
);
}
#[test]
fn test_extension_extension_options() {
let pool = test_file_descriptor();
let ext = pool.get_extension_by_name("custom.options.field").unwrap();
assert_eq!(
ext.options().get_extension(&ext).as_ref(),
&Value::Bytes("extension".into())
);
}
#[test]
fn test_service_extension_options() {
let pool = test_file_descriptor();
let service = pool.get_service_by_name("custom.options.Service").unwrap();
let service_ext = pool
.get_extension_by_name("custom.options.service")
.unwrap();
assert_eq!(
service.options().get_extension(&service_ext).as_ref(),
&Value::Bool(true)
);
let method = service.methods().next().unwrap();
let method_ext = pool.get_extension_by_name("custom.options.method").unwrap();
assert_eq!(
method.options().get_extension(&method_ext).as_ref(),
&Value::U64(6)
);
}
#[test]
fn test_enum_extension_options() {
let pool = test_file_descriptor();
let enum_ = pool.get_enum_by_name("custom.options.Enum").unwrap();
let enum_ext = pool.get_extension_by_name("custom.options.enum").unwrap();
assert_eq!(
enum_.options().get_extension(&enum_ext).as_ref(),
&Value::Message(
proto::options::Aggregate {
a: 32,
o: Some(proto::options::aggregate::O::B("abc".into()))
}
.transcode_to_dynamic()
),
);
let value = enum_.get_value_by_name("VALUE").unwrap();
let value_ext = pool.get_extension_by_name("custom.options.value").unwrap();
assert_eq!(
value.options().get_extension(&value_ext).as_ref(),
&Value::EnumNumber(1)
);
}
#[test]
fn message_default_value_presence() {
let mut message = Scalars::default().transcode_to_dynamic();
let field = message.descriptor().get_field_by_name("int32").unwrap();
assert_eq!(message.fields().count(), 0);
assert!(!message.has_field(&field));
assert_eq!(message.get_field(&field).as_ref(), &Value::I32(0));
message.set_field(&field, Value::I32(0));
assert_eq!(message.fields().count(), 0);
assert!(!message.has_field(&field));
assert_eq!(message.get_field(&field).as_ref(), &Value::I32(0));
message.get_field_mut(&field);
assert_eq!(message.fields().count(), 0);
assert!(!message.has_field(&field));
assert_eq!(message.get_field(&field).as_ref(), &Value::I32(0));
}
#[test]
fn message_list_fields_scalars() {
let message = Scalars {
double: 0.0,
float: 2.2,
int32: 3,
int64: 0,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 0,
fixed32: 9,
fixed64: 0,
sfixed32: 11,
sfixed64: 12,
r#bool: false,
string: "5".to_owned(),
bytes: b"6".to_vec(),
}
.transcode_to_dynamic();
assert_eq!(
message.fields().collect::<Vec<_>>(),
vec![
(
message.descriptor().get_field_by_name("float").unwrap(),
&Value::F32(2.2)
),
(
message.descriptor().get_field_by_name("int32").unwrap(),
&Value::I32(3)
),
(
message.descriptor().get_field_by_name("uint32").unwrap(),
&Value::U32(5)
),
(
message.descriptor().get_field_by_name("uint64").unwrap(),
&Value::U64(6)
),
(
message.descriptor().get_field_by_name("sint32").unwrap(),
&Value::I32(7)
),
(
message.descriptor().get_field_by_name("fixed32").unwrap(),
&Value::U32(9)
),
(
message.descriptor().get_field_by_name("sfixed32").unwrap(),
&Value::I32(11)
),
(
message.descriptor().get_field_by_name("sfixed64").unwrap(),
&Value::I64(12)
),
(
message.descriptor().get_field_by_name("string").unwrap(),
&Value::String("5".to_owned())
),
(
message.descriptor().get_field_by_name("bytes").unwrap(),
&Value::Bytes(Bytes::from_static(b"6"))
),
]
);
}
#[test]
fn message_list_extensions() {
let message_desc = test_file_descriptor()
.get_message_by_name("my.package2.MyMessage")
.unwrap();
let field_desc = message_desc.get_field_by_name("int").unwrap();
let extension_desc = message_desc.get_extension(113).unwrap();
let mut dynamic_message = DynamicMessage::new(message_desc.clone());
assert_eq!(dynamic_message.fields().count(), 0);
assert_eq!(dynamic_message.extensions().count(), 0);
assert_eq!(dynamic_message.fields_mut().count(), 0);
assert_eq!(dynamic_message.extensions_mut().count(), 0);
dynamic_message.set_field(&field_desc, Value::I32(0));
dynamic_message.set_extension(&extension_desc, Value::F64(42.0));
assert!(dynamic_message
.fields()
.eq([(field_desc.clone(), &Value::I32(0))]));
assert!(dynamic_message
.extensions()
.eq([(extension_desc.clone(), &Value::F64(42.0))]));
assert!(dynamic_message
.fields_mut()
.eq([(field_desc, &mut Value::I32(0))]));
assert!(dynamic_message
.extensions_mut()
.eq([(extension_desc, &mut Value::F64(42.0))]));
}
#[test]
fn message_take_field() {
let mut message = ComplexType {
string_map: HashMap::from_iter([("foo".to_owned(), Scalars::default())]),
nested: Some(Scalars {
int32: 3,
..Default::default()
}),
optional_enum: 3,
..Default::default()
}
.transcode_to_dynamic();
let map = Value::Map(HashMap::from_iter([(
MapKey::String("foo".to_owned()),
Value::Message(Scalars::default().transcode_to_dynamic()),
)]));
let nested = Value::Message(
Scalars {
int32: 3,
..Default::default()
}
.transcode_to_dynamic(),
);
let num = Value::EnumNumber(3);
assert!(message.fields().eq([
(
message
.descriptor()
.get_field_by_name("string_map")
.unwrap(),
&map
),
(
message.descriptor().get_field_by_name("nested").unwrap(),
&nested
),
(
message
.descriptor()
.get_field_by_name("optional_enum")
.unwrap(),
&num
)
]));
assert_eq!(message.take_field_by_name("int_map"), None);
assert_eq!(message.take_field_by_name("notfound"), None);
assert_eq!(message.take_field_by_name("string_map"), Some(map));
assert_eq!(message.take_field_by_name("nested"), Some(nested));
assert_eq!(message.take_field_by_name("optional_enum"), Some(num));
assert!(message.fields().eq([]));
}
#[test]
fn message_take_fields() {
let message_desc = test_file_descriptor()
.get_message_by_name("my.package2.MyMessage")
.unwrap();
let field_desc = message_desc.get_field_by_name("int").unwrap();
let extension_desc = message_desc.get_extension(113).unwrap();
let mut dynamic_message = DynamicMessage::new(message_desc.clone());
assert_eq!(dynamic_message.fields().count(), 0);
assert_eq!(dynamic_message.extensions().count(), 0);
assert_eq!(dynamic_message.fields_mut().count(), 0);
assert_eq!(dynamic_message.extensions_mut().count(), 0);
dynamic_message.set_field(&field_desc, Value::I32(0));
dynamic_message.set_extension(&extension_desc, Value::F64(42.0));
assert!(dynamic_message
.take_fields()
.eq([(field_desc, Value::I32(0))]));
assert!(dynamic_message
.take_extensions()
.eq([(extension_desc, Value::F64(42.0))]));
assert_eq!(dynamic_message.fields().count(), 0);
assert_eq!(dynamic_message.extensions().count(), 0);
assert_eq!(dynamic_message.fields_mut().count(), 0);
assert_eq!(dynamic_message.extensions_mut().count(), 0);
}
#[test]
fn oneof_not_synthetic() {
let message_desc = test_file_descriptor()
.get_message_by_name("test.MessageWithOneof")
.unwrap();
assert_eq!(message_desc.oneofs().len(), 1);
let oneof_desc = message_desc.oneofs().next().unwrap();
assert_eq!(oneof_desc.name(), "test_oneof");
assert!(!oneof_desc.is_synthetic());
}
#[test]
fn proto3_optional_field() {
let message_desc = test_file_descriptor()
.get_message_by_name("test.MessageWithOptionalEnum")
.unwrap();
let field_desc = message_desc.get_field_by_name("optional_enum").unwrap();
let oneof_desc = field_desc.containing_oneof().unwrap();
assert!(field_desc.supports_presence());
assert_eq!(oneof_desc.name(), "_optional_enum");
assert!(oneof_desc.is_synthetic());
assert!(oneof_desc.fields().eq([field_desc.clone()]));
assert_eq!(message_desc.oneofs().len(), 1);
assert!(message_desc.oneofs().eq([oneof_desc.clone()]));
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/decode.rs | prost-reflect-tests/src/decode.rs | use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
iter::FromIterator,
};
use proptest::{prelude::*, test_runner::TestCaseError};
use prost::{bytes::Bytes, encoding::WireType, Message};
use prost_reflect::{DynamicMessage, MapKey, ReflectMessage, Value};
use prost_types::FileDescriptorSet;
use crate::{
proto::{
contains_group, message_with_oneof, ComplexType, ContainsGroup, MessageWithOneof,
ScalarArrays, Scalars, WellKnownTypes,
},
test_file_descriptor,
};
#[test]
fn clear_message() {
let mut dynamic = Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
}
.transcode_to_dynamic();
dynamic.clear();
assert!(!dynamic.has_field_by_name("double"));
assert!(!dynamic.has_field_by_name("float"));
assert!(!dynamic.has_field_by_name("int32"));
assert!(!dynamic.has_field_by_name("int64"));
assert!(!dynamic.has_field_by_name("uint32"));
assert!(!dynamic.has_field_by_name("uint64"));
assert!(!dynamic.has_field_by_name("sint32"));
assert!(!dynamic.has_field_by_name("sint64"));
assert!(!dynamic.has_field_by_name("fixed32"));
assert!(!dynamic.has_field_by_name("fixed64"));
assert!(!dynamic.has_field_by_name("sfixed32"));
assert!(!dynamic.has_field_by_name("sfixed64"));
assert!(!dynamic.has_field_by_name("bool"));
assert!(!dynamic.has_field_by_name("string"));
assert!(!dynamic.has_field_by_name("bytes"));
let encoded_bytes = dynamic.encode_to_vec();
assert!(encoded_bytes.is_empty());
}
#[test]
#[should_panic(expected = "InvalidType")]
fn set_field_validates_type() {
let mut dynamic = {
let message = &Scalars::default();
message.transcode_to_dynamic()
};
dynamic.set_field_by_name("double", Value::U32(5));
}
#[test]
fn try_set_field_validates_type() {
let mut dynamic = {
let message = &Scalars::default();
message.transcode_to_dynamic()
};
assert_eq!(
dynamic
.try_set_field_by_name("double", Value::U32(5))
.unwrap_err()
.to_string(),
"expected a value of type 'double', but found '5'"
);
}
#[test]
fn decode_scalars() {
let dynamic = Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
}
.transcode_to_dynamic();
assert_eq!(
dynamic.get_field_by_name("double").unwrap().as_f64(),
Some(1.1)
);
assert_eq!(
dynamic.get_field_by_name("float").unwrap().as_f32(),
Some(2.2)
);
assert_eq!(
dynamic.get_field_by_name("int32").unwrap().as_i32(),
Some(3)
);
assert_eq!(
dynamic.get_field_by_name("int64").unwrap().as_i64(),
Some(4)
);
assert_eq!(
dynamic.get_field_by_name("uint32").unwrap().as_u32(),
Some(5)
);
assert_eq!(
dynamic.get_field_by_name("uint64").unwrap().as_u64(),
Some(6)
);
assert_eq!(
dynamic.get_field_by_name("sint32").unwrap().as_i32(),
Some(7)
);
assert_eq!(
dynamic.get_field_by_name("sint64").unwrap().as_i64(),
Some(8)
);
assert_eq!(
dynamic.get_field_by_name("fixed32").unwrap().as_u32(),
Some(9)
);
assert_eq!(
dynamic.get_field_by_name("fixed64").unwrap().as_u64(),
Some(10)
);
assert_eq!(
dynamic.get_field_by_name("sfixed32").unwrap().as_i32(),
Some(11)
);
assert_eq!(
dynamic.get_field_by_name("sfixed64").unwrap().as_i64(),
Some(12)
);
assert_eq!(
dynamic.get_field_by_name("bool").unwrap().as_bool(),
Some(true)
);
assert_eq!(
dynamic.get_field_by_name("string").unwrap().as_str(),
Some("5")
);
assert_eq!(
dynamic.get_field_by_name("bytes").unwrap().as_bytes(),
Some(&Bytes::from_static(b"6"))
);
}
#[test]
fn decode_scalar_arrays() {
let dynamic = ScalarArrays {
double: vec![1.1, 2.2],
float: vec![3.3f32, 4.4f32],
int32: vec![5, -6],
int64: vec![7, -8],
uint32: vec![9, 10],
uint64: vec![11, 12],
sint32: vec![13, -14],
sint64: vec![15, -16],
fixed32: vec![17, 18],
fixed64: vec![19, 20],
sfixed32: vec![21, -22],
sfixed64: vec![23, -24],
r#bool: vec![true, false],
string: vec!["25".to_owned(), "26".to_owned()],
bytes: vec![b"27".to_vec(), b"28".to_vec()],
}
.transcode_to_dynamic();
assert_eq!(
dynamic.get_field_by_name("double").unwrap().as_list(),
Some([Value::F64(1.1), Value::F64(2.2)].as_ref())
);
assert_eq!(
dynamic.get_field_by_name("float").unwrap().as_list(),
Some([Value::F32(3.3f32), Value::F32(4.4f32)].as_ref())
);
assert_eq!(
dynamic.get_field_by_name("int32").unwrap().as_list(),
Some([Value::I32(5), Value::I32(-6)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("int64").unwrap().as_list(),
Some([Value::I64(7), Value::I64(-8)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("uint32").unwrap().as_list(),
Some([Value::U32(9), Value::U32(10)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("uint64").unwrap().as_list(),
Some([Value::U64(11), Value::U64(12)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("sint32").unwrap().as_list(),
Some([Value::I32(13), Value::I32(-14)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("sint64").unwrap().as_list(),
Some([Value::I64(15), Value::I64(-16)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("fixed32").unwrap().as_list(),
Some([Value::U32(17), Value::U32(18)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("fixed64").unwrap().as_list(),
Some([Value::U64(19), Value::U64(20)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("sfixed32").unwrap().as_list(),
Some([Value::I32(21), Value::I32(-22)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("sfixed64").unwrap().as_list(),
Some([Value::I64(23), Value::I64(-24)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("bool").unwrap().as_list(),
Some([Value::Bool(true), Value::Bool(false)].as_ref()),
);
assert_eq!(
dynamic.get_field_by_name("string").unwrap().as_list(),
Some(
[
Value::String("25".to_owned()),
Value::String("26".to_owned())
]
.as_ref()
),
);
assert_eq!(
dynamic.get_field_by_name("bytes").unwrap().as_list(),
Some(
[
Value::Bytes(Bytes::from_static(b"27")),
Value::Bytes(Bytes::from_static(b"28"))
]
.as_ref()
),
);
}
#[test]
fn decode_complex_type() {
let dynamic = ComplexType {
string_map: HashMap::from_iter([
(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
),
(
"2".to_owned(),
Scalars {
int64: 4,
uint32: 5,
uint64: 6,
..Default::default()
},
),
]),
int_map: HashMap::from_iter([
(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
),
(
4,
Scalars {
sint64: 8,
fixed32: 9,
fixed64: 10,
..Default::default()
},
),
]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1), (2, 2)]),
}
.transcode_to_dynamic();
fn empty_scalars() -> DynamicMessage {
DynamicMessage::new(
test_file_descriptor()
.get_message_by_name(".test.Scalars")
.unwrap(),
)
}
assert_eq!(
dynamic.get_field_by_name("string_map").unwrap().as_map(),
Some(&HashMap::from_iter([
(MapKey::String("1".to_owned()), {
let mut msg = empty_scalars();
msg.set_field_by_name("double", Value::F64(1.1));
msg.set_field_by_name("float", Value::F32(2.2));
msg.set_field_by_name("int32", Value::I32(3));
Value::Message(msg)
}),
(MapKey::String("2".to_owned()), {
let mut msg = empty_scalars();
msg.set_field_by_name("int64", Value::I64(4));
msg.set_field_by_name("uint32", Value::U32(5));
msg.set_field_by_name("uint64", Value::U64(6));
Value::Message(msg)
})
])),
);
assert_eq!(
dynamic.get_field_by_name("int_map").unwrap().as_map(),
Some(&HashMap::from_iter([
(MapKey::I32(3), {
let mut msg = empty_scalars();
msg.set_field_by_name("sint32", Value::I32(7));
msg.set_field_by_name("sint64", Value::I64(8));
msg.set_field_by_name("fixed32", Value::U32(9));
Value::Message(msg)
}),
(MapKey::I32(4), {
let mut msg = empty_scalars();
msg.set_field_by_name("sint64", Value::I64(8));
msg.set_field_by_name("fixed32", Value::U32(9));
msg.set_field_by_name("fixed64", Value::U64(10));
Value::Message(msg)
})
])),
);
assert_eq!(
dynamic.get_field_by_name("nested").unwrap().as_message(),
Some(&{
let mut msg = empty_scalars();
msg.set_field_by_name("sfixed32", Value::I32(11));
msg.set_field_by_name("sfixed64", Value::I64(12));
msg.set_field_by_name("bool", Value::Bool(true));
msg.set_field_by_name("string", Value::String("5".to_owned()));
msg.set_field_by_name("bytes", Value::Bytes(Bytes::from_static(b"6")));
msg
})
);
assert_eq!(
dynamic.get_field_by_name("my_enum").unwrap().as_list(),
Some(
[
Value::EnumNumber(0),
Value::EnumNumber(1),
Value::EnumNumber(2),
Value::EnumNumber(3),
Value::EnumNumber(-4),
]
.as_ref()
),
);
assert_eq!(
dynamic
.get_field_by_name("optional_enum")
.unwrap()
.as_enum_number(),
Some(1),
);
assert_eq!(
dynamic.get_field_by_name("enum_map").unwrap().as_map(),
Some(&HashMap::from_iter([
(MapKey::I32(1), Value::EnumNumber(1)),
(MapKey::I32(2), Value::EnumNumber(2)),
])),
);
}
#[test]
fn decode_default_values() {
let dynamic = DynamicMessage::new(
test_file_descriptor()
.get_message_by_name(".test2.DefaultValues")
.unwrap(),
);
assert_eq!(
dynamic.get_field_by_name("double").unwrap().as_f64(),
Some(1.1)
);
assert_eq!(
dynamic.get_field_by_name("float").unwrap().as_f32(),
Some(2.2)
);
assert_eq!(
dynamic.get_field_by_name("int32").unwrap().as_i32(),
Some(-3)
);
assert_eq!(
dynamic.get_field_by_name("int64").unwrap().as_i64(),
Some(4)
);
assert_eq!(
dynamic.get_field_by_name("uint32").unwrap().as_u32(),
Some(5)
);
assert_eq!(
dynamic.get_field_by_name("uint64").unwrap().as_u64(),
Some(6)
);
assert_eq!(
dynamic.get_field_by_name("sint32").unwrap().as_i32(),
Some(-7)
);
assert_eq!(
dynamic.get_field_by_name("sint64").unwrap().as_i64(),
Some(8)
);
assert_eq!(
dynamic.get_field_by_name("fixed32").unwrap().as_u32(),
Some(9)
);
assert_eq!(
dynamic.get_field_by_name("fixed64").unwrap().as_u64(),
Some(10)
);
assert_eq!(
dynamic.get_field_by_name("sfixed32").unwrap().as_i32(),
Some(-11)
);
assert_eq!(
dynamic.get_field_by_name("sfixed64").unwrap().as_i64(),
Some(12)
);
assert_eq!(
dynamic.get_field_by_name("bool").unwrap().as_bool(),
Some(true)
);
assert_eq!(
dynamic.get_field_by_name("string").unwrap().as_str(),
Some("hello")
);
assert_eq!(
dynamic.get_field_by_name("bytes").unwrap().as_bytes(),
Some(&Bytes::from_static(
b"\0\x01\x07\x08\x0C\n\r\t\x0B\\\'\"\xFE"
))
);
assert_eq!(
dynamic
.get_field_by_name("defaulted_enum")
.unwrap()
.as_enum_number(),
Some(3)
);
assert_eq!(
dynamic.get_field_by_name("enum").unwrap().as_enum_number(),
Some(2)
);
}
#[test]
fn set_oneof() {
let mut dynamic = DynamicMessage::new(
test_file_descriptor()
.get_message_by_name(".test.MessageWithOneof")
.unwrap(),
);
assert_eq!(
dynamic.descriptor().oneofs().next().unwrap().name(),
"test_oneof"
);
assert!(!dynamic.has_field_by_name("oneof_field_1"));
assert!(!dynamic.has_field_by_name("oneof_field_2"));
dynamic.set_field_by_name("oneof_field_1", Value::String("hello".to_owned()));
assert!(dynamic.has_field_by_name("oneof_field_1"));
assert!(!dynamic.has_field_by_name("oneof_field_2"));
dynamic.set_field_by_name("oneof_field_2", Value::I32(5));
assert!(dynamic.has_field_by_name("oneof_field_2"));
assert!(!dynamic.has_field_by_name("oneof_field_1"));
}
#[test]
fn set_oneof_to_default() {
let mut dynamic = DynamicMessage::new(
test_file_descriptor()
.get_message_by_name(".test.MessageWithOneof")
.unwrap(),
);
assert_eq!(
dynamic.descriptor().oneofs().next().unwrap().name(),
"test_oneof"
);
assert!(!dynamic.has_field_by_name("oneof_field_1"));
assert!(!dynamic.has_field_by_name("oneof_field_2"));
dynamic.set_field_by_name("oneof_field_1", Value::String("".to_owned()));
assert!(dynamic.has_field_by_name("oneof_field_1"));
assert!(!dynamic.has_field_by_name("oneof_field_2"));
dynamic.set_field_by_name("oneof_field_2", Value::I32(0));
assert!(dynamic.has_field_by_name("oneof_field_2"));
assert!(!dynamic.has_field_by_name("oneof_field_1"));
}
#[test]
fn roundtrip_scalars() {
roundtrip(&Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
})
.unwrap();
}
#[test]
fn roundtrip_scalar_arrays() {
roundtrip(&ScalarArrays {
double: vec![1.1, 2.2],
float: vec![3.3f32, 4.4f32],
int32: vec![5, -6],
int64: vec![7, -8],
uint32: vec![9, 10],
uint64: vec![11, 12],
sint32: vec![13, -14],
sint64: vec![15, -16],
fixed32: vec![17, 18],
fixed64: vec![19, 20],
sfixed32: vec![21, -22],
sfixed64: vec![23, 24],
r#bool: vec![true, false],
string: vec!["25".to_owned(), "26".to_owned()],
bytes: vec![b"27".to_vec(), b"28".to_vec()],
})
.unwrap();
}
#[test]
fn roundtrip_complex_type() {
roundtrip(&ComplexType {
string_map: HashMap::from_iter([
(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
),
(
"2".to_owned(),
Scalars {
int64: 4,
uint32: 5,
uint64: 6,
..Default::default()
},
),
]),
int_map: HashMap::from_iter([
(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
),
(
4,
Scalars {
sint64: 8,
fixed32: 9,
fixed64: 10,
..Default::default()
},
),
]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1), (2, 2)]),
})
.unwrap();
}
#[test]
fn roundtrip_well_known_types() {
roundtrip(&WellKnownTypes {
timestamp: Some(prost_types::Timestamp {
seconds: 63_108_020,
nanos: 21_000_000,
}),
duration: Some(prost_types::Duration {
seconds: 1,
nanos: 340_012,
}),
r#struct: Some(prost_types::Struct {
fields: BTreeMap::from_iter([
(
"number".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(42.)),
},
),
(
"null".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
},
),
]),
}),
float: Some(42.1),
double: Some(12.4),
int32: Some(1),
int64: Some(-2),
uint32: Some(3),
uint64: Some(4),
bool: Some(false),
string: Some("hello".to_owned()),
bytes: Some(b"hello".to_vec()),
mask: Some(prost_types::FieldMask {
paths: vec!["field_one".to_owned(), "field_two.b.d".to_owned()],
}),
list: Some(prost_types::ListValue {
values: vec![
prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue("foo".to_owned())),
},
prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(false)),
},
],
}),
null: 0,
empty: Some(()),
})
.unwrap();
}
proptest! {
#![proptest_config(ProptestConfig {
cases: 32,
.. ProptestConfig::default()
})]
#[test]
fn roundtrip_arb_scalars(message: Scalars) {
roundtrip(&message)?;
}
#[test]
fn roundtrip_arb_scalar_arrays(message: ScalarArrays) {
roundtrip(&message)?;
}
#[test]
fn roundtrip_arb_complex_type(message: ComplexType) {
roundtrip(&message)?;
}
#[test]
fn roundtrip_arb_well_known_types(message: WellKnownTypes) {
roundtrip(&message)?;
}
}
#[test]
fn unpacked_fields_accept_packed_bytes() {
let desc = test_file_descriptor()
.get_message_by_name("test2.UnpackedScalarArray")
.unwrap();
assert!(desc.get_field_by_name("unpacked_double").unwrap().is_list());
assert!(!desc
.get_field_by_name("unpacked_double")
.unwrap()
.is_packed());
let mut message = DynamicMessage::new(desc);
message
.merge(
[
0o322, 0o2, b' ', 0, 0, 0, 0, 0, 0, 0, 0, 0o232, 0o231, 0o231, 0o231, 0o231, 0o231,
0o271, b'?', 0o377, 0o377, 0o377, 0o377, 0o377, 0o377, 0o357, 0o177, 0, 0, 0, 0, 0,
0, 0o20, 0,
]
.as_ref(),
)
.unwrap();
assert_eq!(
message
.get_field_by_name("unpacked_double")
.unwrap()
.as_list(),
Some([
Value::F64(0.0),
Value::F64(0.1),
Value::F64(179769313486231570000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000.0),
Value::F64(0.000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000000022250738585072014),
].as_ref())
);
}
#[test]
fn unknown_fields_are_roundtripped() {
const BYTES: &[u8] = b"\x08\x96\x01";
let desc = test_file_descriptor()
.get_message_by_name("google.protobuf.Empty")
.unwrap();
let mut message = DynamicMessage::new(desc);
message.merge(BYTES).unwrap();
assert_eq!(&message.encode_to_vec(), BYTES);
let unknown_fields = message.unknown_fields().cloned().collect::<Vec<_>>();
assert_eq!(unknown_fields.len(), 1);
assert_eq!(unknown_fields[0].number(), 1);
assert_eq!(unknown_fields[0].wire_type(), WireType::Varint);
assert_eq!(unknown_fields[0].encoded_len(), 3);
let mut field_buf = Vec::new();
unknown_fields[0].encode(&mut field_buf);
assert_eq!(field_buf, BYTES);
assert!(message.take_unknown_fields().eq(unknown_fields));
assert!(message.encode_to_vec().is_empty());
assert_eq!(message.unknown_fields().count(), 0);
}
#[test]
fn proto3_default_fields_are_not_encoded() {
let message = ComplexType {
optional_enum: 0,
..Default::default()
}
.transcode_to_dynamic();
assert!(message.encode_to_vec().is_empty());
}
#[test]
fn oneof_set_multiple_values() {
let mut value = Vec::new();
MessageWithOneof {
test_oneof: Some(message_with_oneof::TestOneof::OneofField1(
"hello".to_owned(),
)),
}
.encode(&mut value)
.unwrap();
MessageWithOneof {
test_oneof: Some(message_with_oneof::TestOneof::OneofField2(5)),
}
.encode(&mut value)
.unwrap();
let dynamic_message = DynamicMessage::decode(
test_file_descriptor()
.get_message_by_name("test.MessageWithOneof")
.unwrap(),
value.as_ref(),
)
.unwrap();
assert!(!dynamic_message.has_field_by_name("oneof_field_1"));
assert!(dynamic_message.has_field_by_name("oneof_field_2"));
assert_eq!(dynamic_message.encode_to_vec().as_slice(), b"\x10\x05");
}
#[test]
fn roundtrip_extension() {
let message_desc = test_file_descriptor()
.get_message_by_name("my.package2.MyMessage")
.unwrap();
let extension_desc = message_desc.get_extension(113).unwrap();
assert_eq!(
message_desc.get_extension_by_json_name(extension_desc.json_name()),
Some(extension_desc.clone())
);
let mut dynamic_message = DynamicMessage::new(message_desc.clone());
dynamic_message.set_extension(&extension_desc, Value::F64(42.0));
let bytes = dynamic_message.encode_to_vec();
let roundtripped_dynamic_message =
DynamicMessage::decode(message_desc, bytes.as_ref()).unwrap();
assert!(roundtripped_dynamic_message.has_extension(&extension_desc));
assert_eq!(
roundtripped_dynamic_message
.get_extension(&extension_desc)
.as_ref(),
&Value::F64(42.0)
);
}
#[test]
fn roundtrip_file_descriptor_set() {
let file: Vec<_> = test_file_descriptor()
.file_descriptor_protos()
.cloned()
.collect();
roundtrip(&FileDescriptorSet { file }).unwrap();
}
#[test]
fn roundtrip_group() {
let message = test_file_descriptor()
.get_message_by_name("test2.ContainsGroup")
.unwrap();
assert!(message
.get_field_by_name("requiredgroup")
.unwrap()
.is_group());
assert!(message
.get_field_by_name("optionalgroup")
.unwrap()
.is_group());
assert!(message
.get_field_by_name("repeatedgroup")
.unwrap()
.is_group());
assert!(message
.get_field_by_name("repeatedgroup")
.unwrap()
.is_list());
roundtrip(&ContainsGroup {
requiredgroup: Some(contains_group::RequiredGroup {
a: "bar".to_string(),
b: None,
}),
optionalgroup: Some(contains_group::OptionalGroup {
c: "foo".to_string(),
d: Some(-5),
}),
repeatedgroup: vec![
contains_group::RepeatedGroup {
..Default::default()
},
contains_group::RepeatedGroup {
e: "hello".to_string(),
f: Some(10),
},
],
})
.unwrap();
}
fn roundtrip<T>(message: &T) -> Result<(), TestCaseError>
where
T: PartialEq + Debug + ReflectMessage + Default,
{
let dynamic_message = message.transcode_to_dynamic();
let roundtripped_message: T = dynamic_message.transcode_to().unwrap();
prop_assert_eq!(message, &roundtripped_message);
// Check roundtripping through unknown fields works
let mut empty_message = DynamicMessage::new(
test_file_descriptor()
.get_message_by_name("google.protobuf.Empty")
.unwrap(),
);
empty_message.transcode_from(message).unwrap();
let unknown_roundtripped_message: T = empty_message.transcode_to().unwrap();
prop_assert_eq!(
message,
&unknown_roundtripped_message,
"roundtrip through unknown fields failed"
);
// Check that transcoding to a new dynamic message is equivalent to just cloning it.
let mut duplicate_message = DynamicMessage::new(dynamic_message.descriptor());
duplicate_message.transcode_from(&dynamic_message).unwrap();
assert_eq!(dynamic_message, duplicate_message);
Ok(())
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/text_format.rs | prost-reflect-tests/src/text_format.rs | use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
iter::FromIterator,
};
use proptest::prelude::*;
use prost::Message;
use prost_reflect::{text_format::FormatOptions, DynamicMessage, ReflectMessage, Value};
use crate::{
proto::{
contains_group, ComplexType, ContainsGroup, IndexOrder, MessageWithAliasedEnum, Point,
ScalarArrays, Scalars, WellKnownTypes,
},
test_file_descriptor,
};
#[test]
fn fmt_scalars() {
let value = Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"i\xa6\xbem\xb6\xffX".to_vec(),
}
.transcode_to_dynamic();
assert_eq!(
value.to_text_format(),
r#"double:1.1,float:2.2,int32:3,int64:4,uint32:5,uint64:6,sint32:7,sint64:8,fixed32:9,fixed64:10,sfixed32:11,sfixed64:12,bool:true,string:"5",bytes:"i\246\276m\266\377X""#
);
assert_eq!(value.to_text_format_with_options(&FormatOptions::new().pretty(true)), "double: 1.1\nfloat: 2.2\nint32: 3\nint64: 4\nuint32: 5\nuint64: 6\nsint32: 7\nsint64: 8\nfixed32: 9\nfixed64: 10\nsfixed32: 11\nsfixed64: 12\nbool: true\nstring: \"5\"\nbytes: \"i\\246\\276m\\266\\377X\"");
}
#[test]
fn fmt_scalars_float_extrema() {
let inf = Scalars {
float: f32::INFINITY,
double: f64::INFINITY,
..Default::default()
}
.transcode_to_dynamic();
let neg_inf = Scalars {
float: f32::NEG_INFINITY,
double: f64::NEG_INFINITY,
..Default::default()
}
.transcode_to_dynamic();
let nan = Scalars {
float: f32::NAN,
double: f64::NAN,
..Default::default()
}
.transcode_to_dynamic();
assert_eq!(inf.to_text_format(), "double:inf,float:inf");
assert_eq!(
inf.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"double: inf\nfloat: inf"
);
assert_eq!(neg_inf.to_text_format(), "double:-inf,float:-inf");
assert_eq!(
neg_inf.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"double: -inf\nfloat: -inf"
);
assert_eq!(nan.to_text_format(), "double:NaN,float:NaN");
assert_eq!(
nan.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"double: NaN\nfloat: NaN"
);
}
#[test]
fn fmt_scalars_default() {
let value = Scalars::default().transcode_to_dynamic();
assert_eq!(value.to_text_format(), "");
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
""
);
}
#[test]
fn fmt_scalar_array() {
let value = ScalarArrays {
double: vec![1.1, 2.2],
float: vec![3.3f32, 4.4f32],
int32: vec![5, -6],
int64: vec![7, -8],
uint32: vec![9, 10],
uint64: vec![11, 12],
sint32: vec![13, -14],
sint64: vec![15, -16],
fixed32: vec![17, 18],
fixed64: vec![19, 20],
sfixed32: vec![21, -22],
sfixed64: vec![23, 24],
r#bool: vec![true, false],
string: vec!["25".to_owned(), "26".to_owned()],
bytes: vec![b"27".to_vec(), b"28".to_vec()],
}
.transcode_to_dynamic();
assert_eq!(value.to_text_format(), "double:[1.1,2.2],float:[3.3,4.4],int32:[5,-6],int64:[7,-8],uint32:[9,10],uint64:[11,12],sint32:[13,-14],sint64:[15,-16],fixed32:[17,18],fixed64:[19,20],sfixed32:[21,-22],sfixed64:[23,24],bool:[true,false],string:[\"25\",\"26\"],bytes:[\"27\",\"28\"]");
assert_eq!(value.to_text_format_with_options(&FormatOptions::new().pretty(true)), "double: [1.1, 2.2]\nfloat: [3.3, 4.4]\nint32: [5, -6]\nint64: [7, -8]\nuint32: [9, 10]\nuint64: [11, 12]\nsint32: [13, -14]\nsint64: [15, -16]\nfixed32: [17, 18]\nfixed64: [19, 20]\nsfixed32: [21, -22]\nsfixed64: [23, 24]\nbool: [true, false]\nstring: [\"25\", \"26\"]\nbytes: [\"27\", \"28\"]");
}
#[test]
fn fmt_complex_type() {
let value = ComplexType {
string_map: HashMap::from_iter([(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
)]),
int_map: HashMap::from_iter([(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
)]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1)]),
}
.transcode_to_dynamic();
assert_eq!(value.to_text_format(), "string_map:[{key:\"1\",value{double:1.1,float:2.2,int32:3}}],int_map:[{key:3,value{sint32:7,sint64:8,fixed32:9}}],nested{sfixed32:11,sfixed64:12,bool:true,string:\"5\",bytes:\"6\"},my_enum:[DEFAULT,FOO,2,BAR,NEG],optional_enum:FOO,enum_map:[{key:1,value:FOO}]");
assert_eq!(value.to_text_format_with_options(&FormatOptions::new().pretty(true)), "string_map: [{\n key: \"1\"\n value {\n double: 1.1\n float: 2.2\n int32: 3\n }\n}]\nint_map: [{\n key: 3\n value {\n sint32: 7\n sint64: 8\n fixed32: 9\n }\n}]\nnested {\n sfixed32: 11\n sfixed64: 12\n bool: true\n string: \"5\"\n bytes: \"6\"\n}\nmy_enum: [DEFAULT, FOO, 2, BAR, NEG]\noptional_enum: FOO\nenum_map: [{\n key: 1\n value: FOO\n}]");
}
#[test]
fn fmt_well_known_types() {
let value = WellKnownTypes {
timestamp: Some(prost_types::Timestamp {
seconds: 63_108_020,
nanos: 21_000_000,
}),
duration: Some(prost_types::Duration {
seconds: 1,
nanos: 340_012,
}),
r#struct: Some(prost_types::Struct {
fields: BTreeMap::from_iter([(
"number".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(42.)),
},
)]),
}),
float: Some(42.1),
double: Some(12.4),
int32: Some(1),
int64: Some(-2),
uint32: Some(3),
uint64: Some(4),
bool: Some(false),
string: Some("hello".to_owned()),
bytes: Some(b"hello".to_vec()),
mask: Some(prost_types::FieldMask {
paths: vec!["field_one".to_owned(), "field_two.b.d".to_owned()],
}),
list: Some(prost_types::ListValue {
values: vec![
prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue("foo".to_owned())),
},
prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(false)),
},
],
}),
null: 0,
empty: Some(()),
}
.transcode_to_dynamic();
assert_eq!(value.to_text_format(), "timestamp{seconds:63108020,nanos:21000000},duration{seconds:1,nanos:340012},struct{fields:[{key:\"number\",value{number_value:42.0}}]},float{value:42.1},double{value:12.4},int32{value:1},int64{value:-2},uint32{value:3},uint64{value:4},bool{},string{value:\"hello\"},bytes{value:\"hello\"},mask{paths:[\"field_one\",\"field_two.b.d\"]},list{values:[{string_value:\"foo\"},{bool_value:false}]},empty{}");
assert_eq!(value.to_text_format_with_options(&FormatOptions::new().pretty(true)), "timestamp {\n seconds: 63108020\n nanos: 21000000\n}\nduration {\n seconds: 1\n nanos: 340012\n}\nstruct {\n fields: [{\n key: \"number\"\n value {\n number_value: 42.0\n }\n }]\n}\nfloat {\n value: 42.1\n}\ndouble {\n value: 12.4\n}\nint32 {\n value: 1\n}\nint64 {\n value: -2\n}\nuint32 {\n value: 3\n}\nuint64 {\n value: 4\n}\nbool {}\nstring {\n value: \"hello\"\n}\nbytes {\n value: \"hello\"\n}\nmask {\n paths: [\"field_one\", \"field_two.b.d\"]\n}\nlist {\n values: [{\n string_value: \"foo\"\n }, {\n bool_value: false\n }]\n}\nempty {}");
}
#[test]
fn fmt_empty() {
let value = ().transcode_to_dynamic();
assert_eq!(value.to_text_format(), "");
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
""
);
}
#[test]
fn fmt_any() {
let value = transcode_any(&prost_types::Any {
type_url: "type.googleapis.com/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
});
assert_eq!(
value.to_text_format(),
"[type.googleapis.com/test.Point]{latitude:2,longitude:1}"
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"[type.googleapis.com/test.Point] {\n latitude: 2\n longitude: 1\n}"
);
}
#[test]
fn fmt_any_wkt() {
let value = transcode_any(&prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Int32Value".to_owned(),
value: 5i32.encode_to_vec(),
});
assert_eq!(
value.to_text_format(),
r#"[type.googleapis.com/google.protobuf.Int32Value]{value:5}"#
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"[type.googleapis.com/google.protobuf.Int32Value] {\n value: 5\n}"
);
}
#[test]
fn fmt_any_empty() {
let value = transcode_any(&prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Empty".to_owned(),
value: vec![],
});
assert_eq!(
value.to_text_format(),
r#"[type.googleapis.com/google.protobuf.Empty]{}"#
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"[type.googleapis.com/google.protobuf.Empty] {}"
);
}
#[test]
fn fmt_any_invalid_type_name() {
let value = transcode_any(&prost_types::Any {
type_url: "hello".to_owned(),
value: vec![],
});
assert_eq!(value.to_text_format(), "type_url:\"hello\"",);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"type_url: \"hello\"",
);
}
#[test]
fn fmt_any_type_name_not_found() {
let value = transcode_any(&prost_types::Any {
type_url: "type.googleapis.com/NotFound".to_owned(),
value: vec![],
});
assert_eq!(
value.to_text_format(),
"type_url:\"type.googleapis.com/NotFound\"",
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"type_url: \"type.googleapis.com/NotFound\"",
);
}
#[test]
fn fmt_any_invalid_bytes() {
let value = transcode_any(&prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Empty".to_owned(),
value: b"hello".to_vec(),
});
assert_eq!(
value.to_text_format(),
"type_url:\"type.googleapis.com/google.protobuf.Empty\",value:\"hello\"",
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"type_url: \"type.googleapis.com/google.protobuf.Empty\"\nvalue: \"hello\""
);
}
#[test]
fn fmt_group() {
let value = ContainsGroup {
requiredgroup: Some(contains_group::RequiredGroup {
a: "bar".to_owned(),
b: None,
}),
optionalgroup: Some(contains_group::OptionalGroup {
c: "foo".to_owned(),
d: Some(-5),
}),
repeatedgroup: vec![
contains_group::RepeatedGroup {
..Default::default()
},
contains_group::RepeatedGroup {
e: "hello".to_owned(),
f: Some(10),
},
],
}
.transcode_to_dynamic();
assert_eq!(
value.to_text_format(),
"RequiredGroup{a:\"bar\"},OptionalGroup{c:\"foo\",d:-5},RepeatedGroup:[{e:\"\"},{e:\"hello\",f:10}]",
);
assert_eq!(
value.to_text_format_with_options(&FormatOptions::new().pretty(true)),
"RequiredGroup {\n a: \"bar\"\n}\nOptionalGroup {\n c: \"foo\"\n d: -5\n}\nRepeatedGroup: [{\n e: \"\"\n}, {\n e: \"hello\"\n f: 10\n}]"
);
}
#[test]
fn fmt_index_order() {
let value = IndexOrder { a: 1, b: 2, c: 3 }.transcode_to_dynamic();
assert_eq!(
value.to_text_format_with_options(
&FormatOptions::new().print_message_fields_in_index_order(true)
),
"a:1,b:2,c:3"
);
assert_eq!(
value.to_text_format_with_options(
&FormatOptions::new().print_message_fields_in_index_order(false)
),
"c:3,b:2,a:1"
);
}
#[test]
fn parse_group() {
let value = ContainsGroup {
requiredgroup: Some(contains_group::RequiredGroup {
a: "bar".to_owned(),
b: None,
}),
optionalgroup: Some(contains_group::OptionalGroup {
c: "foo".to_owned(),
d: Some(-5),
}),
repeatedgroup: vec![
contains_group::RepeatedGroup {
..Default::default()
},
contains_group::RepeatedGroup {
e: "hello".to_owned(),
f: Some(10),
},
],
}
.transcode_to_dynamic();
assert_eq!(
DynamicMessage::parse_text_format(value.descriptor(), "RequiredGroup{a:\"bar\"},OptionalGroup{c:\"foo\",d:-5},RepeatedGroup:[{e:\"\"},{e:\"hello\",f:10}]").unwrap(),
value,
);
assert_eq!(
DynamicMessage::parse_text_format(value.descriptor(), "requiredgroup{a:\"bar\"}")
.unwrap_err()
.to_string(),
"field 'requiredgroup' not found for message 'test2.ContainsGroup'"
);
}
#[test]
fn parse_scalars() {
let value: Scalars = from_text(
"
double: 1.1,
float: 2.2f,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
bool: true,
string: \"5\",
bytes: \"abc\\366\\xFE\\a\\b\\f\\n\\r\\t\\v\\\\\\'\\\"\\x00\",
",
);
assert_eq!(
value,
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"abc\xf6\xfe\x07\x08\x0c\n\r\t\x0b\\'\"\x00".to_vec(),
},
);
}
#[test]
fn parse_bool() {
fn parse(s: &str) -> bool {
from_text::<Scalars>(s).bool
}
assert!(!parse("bool: false"));
assert!(!parse("bool: False"));
assert!(!parse("bool: f"));
assert!(!parse("bool: 0"));
assert!(!parse("bool: 00"));
assert!(!parse("bool: 0x0"));
assert!(parse("bool: true"));
assert!(parse("bool: True"));
assert!(parse("bool: t"));
assert!(parse("bool: t"));
assert!(parse("bool: 1"));
assert!(parse("bool: 01"));
assert!(parse("bool: 0x1"));
}
#[test]
fn parse_scalars_float_extrema() {
assert_eq!(
from_text::<Scalars>("double: infinity, float: -INF"),
Scalars {
double: f64::INFINITY,
float: f32::NEG_INFINITY,
..Default::default()
}
);
let nan: Scalars = from_text("double: NaN, float: -nan");
assert!(nan.float.is_nan());
assert!(nan.double.is_nan());
}
#[test]
fn parse_scalars_empty() {
let value: Scalars = from_text("");
assert_eq!(value, Scalars::default());
}
#[test]
fn parse_aliased_enum() {
let value1: MessageWithAliasedEnum = from_text("aliased: A");
let value2: MessageWithAliasedEnum = from_text("aliased: B");
assert_eq!(value1, MessageWithAliasedEnum { aliased: 1 },);
assert_eq!(value2, MessageWithAliasedEnum { aliased: 1 },);
}
#[test]
fn parse_array() {
let value: ScalarArrays = from_text("double: [1.1, 2f] , float: 3 ; float: inf ; int32: [ ]");
assert_eq!(
value,
ScalarArrays {
double: vec![1.1, 2.0],
float: vec![3.0, f32::INFINITY],
..Default::default()
},
);
}
#[test]
fn parse_complex_type() {
let value: ComplexType = from_text(
"
string_map: [{
key: '1';
value: {
double: 1.1;
float: 2.2f;
int32: 3;
};
}, {
key: \"2\" \"3\";
value: {
int64: 4,
uint32: 5,
uint64: 6,
};
}],
int_map: [{
key: 3,
value: <
sint32: 7
sint64: 8
fixed32: 9
>
}, {
key: 4,
value: <
sint64: 8
fixed32: 9
fixed64: 10
>
}],
nested {
sfixed32: 11
sfixed64: 12
bool: true
string: '5'
bytes: \"6\"
},
my_enum: [DEFAULT, FOO]
my_enum: [2, BAR]
my_enum: NEG
my_enum: []
optional_enum: FOO
enum_map: [{
key: 1
value: FOO
}, {
key: 2
value: 2
}]
",
);
assert_eq!(
value,
ComplexType {
string_map: HashMap::from_iter([
(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
),
(
"23".to_owned(),
Scalars {
int64: 4,
uint32: 5,
uint64: 6,
..Default::default()
},
),
]),
int_map: HashMap::from_iter([
(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
),
(
4,
Scalars {
sint64: 8,
fixed32: 9,
fixed64: 10,
..Default::default()
},
),
]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1), (2, 2),]),
}
);
}
#[test]
fn deserialize_any() {
let desc = test_file_descriptor()
.get_message_by_name("google.protobuf.Any")
.unwrap();
let value: prost_types::Any = DynamicMessage::parse_text_format(
desc,
"[type.googleapis.com/test.Point]: {
longitude: 1,
latitude: 2,
}",
)
.unwrap()
.transcode_to()
.unwrap();
assert_eq!(
value,
prost_types::Any {
type_url: "type.googleapis.com/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
}
);
}
#[test]
fn parse_error() {
fn error(s: &str) -> String {
let desc = test_file_descriptor()
.get_message_by_name("test.Scalars")
.unwrap();
DynamicMessage::parse_text_format(desc, s)
.unwrap_err()
.to_string()
}
fn any_error(s: &str) -> String {
let desc = test_file_descriptor()
.get_message_by_name("google.protobuf.Any")
.unwrap();
DynamicMessage::parse_text_format(desc, s)
.unwrap_err()
.to_string()
}
fn ct_error(s: &str) -> String {
let desc = test_file_descriptor()
.get_message_by_name("test.ComplexType")
.unwrap();
DynamicMessage::parse_text_format(desc, s)
.unwrap_err()
.to_string()
}
assert_eq!(
error(r#"string: -'string'"#),
"expected a string, but found '-'"
);
assert_eq!(
error(r#"float: -"#),
"expected a number, but reached end of input"
);
assert_eq!(
error(r#"double"#),
"expected ':' or a message value, but reached end of input"
);
assert_eq!(error(r#"int32: {"#), "expected an integer, but found '{'");
assert_eq!(
error(r#"float: {} foo: 10f"#),
"expected a number, but found '{'"
);
assert_eq!(error(r#"uint32: <"#), "expected an integer, but found '<'");
assert_eq!(
error(r#"sfixed64 'foo'"#),
"expected ':' or a message value, but found '\"foo\"'"
);
assert_eq!(error(r#"sfixed64 =="#), "invalid token");
assert_eq!(error(r#"string: "\xFF""#), "string is not valid utf-8");
assert_eq!(error(r#"int32: 3074457345618258432"#), "expected value to be a signed 32-bit integer, but the value 3074457345618258432 is out of range");
assert_eq!(
error(r#"uint32: -7483648"#),
"expected value to be an unsigned 32-bit integer, but the value -7483648 is out of range"
);
assert_eq!(error(r#"int64: -18446744073709551615"#), "expected value to be a signed 64-bit integer, but the value -18446744073709551615 is out of range");
assert_eq!(
error(r#"uint64: -1"#),
"expected value to be an unsigned 64-bit integer, but the value -1 is out of range"
);
assert_eq!(error("double: '1'"), "expected a number, but found '\"1\"'");
assert_eq!(error("double: FOO"), "expected a number, but found 'FOO'");
assert_eq!(
error("int32: '1'"),
"expected an integer, but found '\"1\"'"
);
assert_eq!(
error("fixed64: BAR"),
"expected an integer, but found 'BAR'"
);
assert_eq!(
error("bool: TRUE"),
"expected 'true' or 'false', but found 'TRUE'"
);
assert_eq!(
error("bool: tRuE"),
"expected 'true' or 'false', but found 'tRuE'"
);
assert_eq!(error("bool: 3"), "expected 0 or 1, but found '3'");
assert_eq!(
error("bool: 99999999999999"),
"expected 0 or 1, but found '99999999999999'"
);
assert_eq!(error("bytes: 1.2"), "expected a string, but found '1.2'");
assert_eq!(error("bytes: TRUE"), "expected a string, but found 'TRUE'");
assert_eq!(error("bytes: '\\x'"), "invalid string escape");
assert_eq!(error("bytes {}"), "expected a string, but found '{'");
assert_eq!(
error("notfound: 5"),
"field 'notfound' not found for message 'test.Scalars'"
);
assert_eq!(
error("string: '5' ; string: '6'"),
"'string' is already set"
);
assert_eq!(
error("[my . ext]: '5'"),
"extension 'my.ext' not found for message 'test.Scalars'"
);
assert_eq!(
any_error("[type.googleapis.com/namespace.NotFound] {}"),
"message type 'namespace.NotFound' not found"
);
assert_eq!(
any_error("[type.googleapis.com/test.Scalars]: 5"),
"expected '{' or '<', but found '5'"
);
assert_eq!(
ct_error("my_enum: NOTFOUND"),
"value 'NOTFOUND' was not found for enum 'test.ComplexType.MyEnum'"
);
assert_eq!(
ct_error("my_enum: 4.2"),
"expected an enum value, but found '4.2'"
);
assert_eq!(
ct_error("my_enum: 'nope'"),
"expected an enum value, but found '\"nope\"'"
);
}
#[test]
fn duplicate_oneof_field() {
let desc = test_file_descriptor()
.get_message_by_name("test.MessageWithOneof")
.unwrap();
assert_eq!(
DynamicMessage::parse_text_format(desc.clone(), "oneof_field_1: 'hello', oneof_field_2: 5")
.unwrap_err()
.to_string(),
"a value is already set for oneof 'test_oneof'"
);
let d = DynamicMessage::parse_text_format(desc, "oneof_field_1: 'hello'").unwrap();
assert_eq!(
d.get_field_by_name("oneof_field_1").unwrap().as_ref(),
&Value::String("hello".to_owned())
);
}
proptest! {
#![proptest_config(ProptestConfig {
cases: 32,
.. ProptestConfig::default()
})]
#[test]
fn roundtrip_arb_scalars(message: Scalars) {
roundtrip_text(&message);
}
#[test]
fn roundtrip_arb_scalar_arrays(message: ScalarArrays) {
roundtrip_text(&message);
}
#[test]
fn roundtrip_arb_complex_type(message: ComplexType) {
roundtrip_text(&message);
}
#[test]
fn roundtrip_arb_well_known_types(message: WellKnownTypes) {
roundtrip_text(&message);
}
// TODO Disabled for now due to logos bug: https://github.com/maciejhirsz/logos/issues/255
// #[test]
// fn deserialize_error_scalars(s in ".{2,256}") {
// let _: Scalars = from_text(&s);
// }
// #[test]
// fn deserialize_error_scalar_arrays(s in ".{2,256}") {
// let _: Scalars = from_text(&s);
// }
// #[test]
// fn deserialize_error_complex_type(s in ".{2,256}") {
// let _: Scalars = from_text(&s);
// }
// #[test]
// fn deserialize_error_well_known_types(s in ".{2,256}") {
// let _: Scalars = from_text(&s);
// }
}
#[track_caller]
fn from_text<T>(text: &str) -> T
where
T: PartialEq + Debug + ReflectMessage + Default,
{
DynamicMessage::parse_text_format(T::default().descriptor(), text)
.unwrap()
.transcode_to()
.unwrap()
}
#[track_caller]
fn roundtrip_text<T>(value: &T)
where
T: PartialEq + Debug + ReflectMessage + Default,
{
let dynamic = value.transcode_to_dynamic();
let text = dynamic.to_text_format();
let parsed_text: T = from_text(&text);
assert_eq!(value, &parsed_text);
let pretty = dynamic.to_text_format_with_options(&FormatOptions::new().pretty(true));
let parsed_pretty: T = from_text(&pretty);
assert_eq!(value, &parsed_pretty);
}
fn transcode_any(t: &prost_types::Any) -> DynamicMessage {
// Look up the type in the test pool instead of the global pool used for google types,
// so we can find the payload.
let desc = test_file_descriptor()
.get_message_by_name(t.descriptor().full_name())
.unwrap();
let mut message = DynamicMessage::new(desc);
message.transcode_from(t).unwrap();
message
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/json.rs | prost-reflect-tests/src/json.rs | use std::{
collections::{BTreeMap, HashMap},
fmt::Debug,
iter::FromIterator,
};
use proptest::{prelude::*, test_runner::TestCaseError};
use prost::Message;
use prost_reflect::{DeserializeOptions, DynamicMessage, ReflectMessage, SerializeOptions};
use prost_types::FileDescriptorSet;
use serde_json::json;
use crate::{
arbitrary,
proto::{
contains_group, message_with_oneof, ComplexType, ContainsGroup, MessageWithAliasedEnum,
MessageWithOneof, Point, ScalarArrays, Scalars, WellKnownTypes,
},
test_file_descriptor,
};
#[test]
fn serialize_scalars() {
let value = to_json(&Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"i\xa6\xbem\xb6\xffX".to_vec(),
});
assert_eq!(
value,
json!({
"double": 1.1,
"float": 2.2f32,
"int32": 3,
"int64": "4",
"uint32": 5,
"uint64": "6",
"sint32": 7,
"sint64": "8",
"fixed32": 9,
"fixed64": "10",
"sfixed32": 11,
"sfixed64": "12",
"bool": true,
"string": "5",
"bytes": "aaa+bbb/WA==",
})
);
}
#[test]
fn serialize_scalars_float_extrema() {
let inf = to_json(&Scalars {
float: f32::INFINITY,
double: f64::INFINITY,
..Default::default()
});
let neg_inf = to_json(&Scalars {
float: f32::NEG_INFINITY,
double: f64::NEG_INFINITY,
..Default::default()
});
let nan = to_json(&Scalars {
float: f32::NAN,
double: f64::NAN,
..Default::default()
});
assert_eq!(
inf,
json!({
"double": "Infinity",
"float": "Infinity",
})
);
assert_eq!(
neg_inf,
json!({
"double": "-Infinity",
"float": "-Infinity",
})
);
assert_eq!(
nan,
json!({
"double": "NaN",
"float": "NaN",
})
);
}
#[test]
fn serialize_scalars_default() {
let value = to_json(&Scalars::default());
assert_eq!(value, json!({}));
}
#[test]
fn serialize_array() {
let value = to_json(&ScalarArrays {
double: vec![1.1, 2.2],
..Default::default()
});
assert_eq!(
value,
json!({
"double": vec![1.1, 2.2],
})
);
}
#[test]
fn serialize_complex_type() {
let value = to_json(&ComplexType {
string_map: HashMap::from_iter([
(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
),
(
"2".to_owned(),
Scalars {
int64: 4,
uint32: 5,
uint64: 6,
..Default::default()
},
),
]),
int_map: HashMap::from_iter([
(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
),
(
4,
Scalars {
sint64: 8,
fixed32: 9,
fixed64: 10,
..Default::default()
},
),
]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1), (2, 2)]),
});
assert_eq!(
value,
json!({
"stringMap": {
"1": {
"double": 1.1,
"float": 2.2f32,
"int32": 3,
},
"2": {
"int64": "4",
"uint32": 5,
"uint64": "6",
},
},
"intMap": {
"3": {
"sint32": 7,
"sint64": "8",
"fixed32": 9,
},
"4": {
"sint64": "8",
"fixed32": 9,
"fixed64": "10",
},
},
"nested": {
"sfixed32": 11,
"sfixed64": "12",
"bool": true,
"string": "5",
"bytes": "Ng==",
},
"myEnum": ["DEFAULT", "FOO", 2, "BAR", "NEG"],
"optionalEnum": "FOO",
"enumMap": {
"1": "FOO",
"2": 2,
},
})
);
}
#[test]
fn serialize_well_known_types() {
let value = to_json(&WellKnownTypes {
timestamp: Some(prost_types::Timestamp {
seconds: 63_108_020,
nanos: 21_000_000,
}),
duration: Some(prost_types::Duration {
seconds: 1,
nanos: 340_012,
}),
r#struct: Some(prost_types::Struct {
fields: BTreeMap::from_iter([
(
"number".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(42.)),
},
),
(
"null".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
},
),
]),
}),
float: Some(42.1),
double: Some(12.4),
int32: Some(1),
int64: Some(-2),
uint32: Some(3),
uint64: Some(4),
bool: Some(false),
string: Some("hello".to_owned()),
bytes: Some(b"hello".to_vec()),
mask: Some(prost_types::FieldMask {
paths: vec!["field_one".to_owned(), "field_two.b.d".to_owned()],
}),
list: Some(prost_types::ListValue {
values: vec![
prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue("foo".to_owned())),
},
prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(false)),
},
],
}),
null: 0,
empty: Some(()),
});
assert_eq!(
value,
json!({
"timestamp": "1972-01-01T10:00:20.021Z",
"duration": "1.000340012s",
"struct": {
"number": 42.0,
"null": null,
},
"float": 42.1f32,
"double": 12.4,
"int32": 1,
"int64": "-2",
"uint32": 3,
"uint64": "4",
"bool": false,
"string": "hello",
"bytes": "aGVsbG8=",
"mask": "fieldOne,fieldTwo.b.d",
"list": ["foo", false],
"empty": {}
})
);
}
#[test]
fn serialize_no_stringify_64_bit_integers() {
let value = to_json_with_options(
&Scalars {
int32: 3,
int64: -4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: -8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: -12,
..Default::default()
},
&SerializeOptions::new().stringify_64_bit_integers(false),
);
assert_eq!(
value,
json!({
"int32": 3,
"int64": -4,
"uint32": 5,
"uint64": 6,
"sint32": 7,
"sint64": -8,
"fixed32": 9,
"fixed64": 10,
"sfixed32": 11,
"sfixed64": -12,
})
);
}
#[test]
fn serialize_use_proto_field_name() {
let value = to_json_with_options(
&ComplexType {
my_enum: vec![0, 1, 2, 3, -4],
..Default::default()
},
&SerializeOptions::new().use_proto_field_name(true),
);
assert_eq!(
value,
json!({
"my_enum": ["DEFAULT", "FOO", 2, "BAR", "NEG"],
})
);
}
#[test]
fn serialize_use_enum_numbers() {
let value = to_json_with_options(
&ComplexType {
my_enum: vec![0, 1, 2, 3, -4],
..Default::default()
},
&SerializeOptions::new().use_enum_numbers(true),
);
assert_eq!(
value,
json!({
"myEnum": [0, 1, 2, 3, -4],
})
);
}
#[test]
fn serialize_skip_default_fields() {
let value = to_json_with_options(
&ComplexType {
string_map: HashMap::from_iter([(
"1".to_owned(),
Scalars {
..Default::default()
},
)]),
int_map: HashMap::default(),
nested: None,
my_enum: vec![],
optional_enum: 0,
enum_map: HashMap::default(),
},
&SerializeOptions::new().skip_default_fields(false),
);
assert_eq!(
value,
json!({
"stringMap": {
"1": {
"double": 0.0,
"float": 0.0,
"int32": 0,
"int64": "0",
"uint32": 0,
"uint64": "0",
"sint32": 0,
"sint64": "0",
"fixed32": 0,
"fixed64": "0",
"sfixed32": 0,
"sfixed64": "0",
"bool": false,
"string": "",
"bytes": "",
},
},
"intMap": {},
"myEnum": [],
"optionalEnum": "DEFAULT",
"enumMap": {}
})
);
}
#[test]
fn serialize_string_skip_default_fields() {
let value = Point::default();
let mut dynamic = DynamicMessage::new(value.descriptor());
dynamic.transcode_from(&value).unwrap();
let mut s = serde_json::Serializer::new(vec![]);
dynamic
.serialize_with_options(&mut s, &SerializeOptions::new().skip_default_fields(false))
.unwrap();
assert_eq!(
String::from_utf8(s.into_inner()).unwrap(),
"{\"latitude\":0,\"longitude\":0}"
);
}
#[test]
fn deserialize_scalars() {
let value: Scalars = from_json(
json!({
"double": 1.1,
"float": 2.2f32,
"int32": 3,
"int64": "4",
"uint32": 5,
"uint64": "6",
"sint32": 7,
"sint64": "8",
"fixed32": 9,
"fixed64": "10",
"sfixed32": 11,
"sfixed64": "12",
"bool": true,
"string": "5",
"bytes": "aaa+bbb/WA==",
}),
"test.Scalars",
);
assert_eq!(
value,
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"i\xa6\xbem\xb6\xffX".to_vec(),
},
);
}
#[test]
fn deserialize_scalars_float_extrema() {
let inf: Scalars = from_json(
json!({
"double": "Infinity",
"float": "Infinity",
}),
"test.Scalars",
);
let neg_inf: Scalars = from_json(
json!({
"double": "-Infinity",
"float": "-Infinity",
}),
"test.Scalars",
);
let nan: Scalars = from_json(
json!({
"double": "NaN",
"float": "NaN",
}),
"test.Scalars",
);
assert_eq!(
inf,
Scalars {
float: f32::INFINITY,
double: f64::INFINITY,
..Default::default()
},
);
assert_eq!(
neg_inf,
Scalars {
float: f32::NEG_INFINITY,
double: f64::NEG_INFINITY,
..Default::default()
},
);
assert!(nan.float.is_nan());
assert!(nan.double.is_nan());
}
#[test]
fn deserialize_scalars_empty() {
let value: Scalars = from_json(json!({}), "test.Scalars");
assert_eq!(value, Scalars::default());
}
#[test]
#[should_panic(expected = "unrecognized field name 'unknown_field'")]
fn deserialize_deny_unknown_fields() {
from_json_with_options::<Scalars>(
json!({
"unknown_field": 123,
}),
"test.Scalars",
&DeserializeOptions::new(),
);
}
#[test]
fn deserialize_allow_unknown_fields() {
let value = from_json_with_options::<Scalars>(
json!({
"unknown_field": 123,
}),
"test.Scalars",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(value, Default::default());
}
#[test]
fn deserialize_scalars_null() {
let value: Scalars = from_json(
json!({
"double": null,
"float": null,
"int32": null,
"int64": null,
"uint32": null,
"uint64": null,
"sint32": null,
"sint64": null,
"fixed32": null,
"fixed64": null,
"sfixed32": null,
"sfixed64": null,
"bool": null,
"string": null,
"bytes": null,
}),
"test.Scalars",
);
assert_eq!(value, Scalars::default());
}
#[test]
fn deserialize_scalars_alt() {
let value: Scalars = from_json(
json!({
"double": "1.1",
"float": "2.2",
"int32": "3",
"int64": 4,
"uint32": "5",
"uint64": 6,
"sint32": "7",
"sint64": 8,
"fixed32": "9",
"fixed64": 10,
"sfixed32": "11",
"sfixed64": 12,
"bool": true,
"string": "5",
"bytes": "aaa-bbb_WA==",
}),
"test.Scalars",
);
assert_eq!(
value,
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
int64: 4,
uint32: 5,
uint64: 6,
sint32: 7,
sint64: 8,
fixed32: 9,
fixed64: 10,
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"i\xa6\xbem\xb6\xffX".to_vec(),
},
);
}
#[test]
fn deserialize_aliased_enum() {
let value1: MessageWithAliasedEnum = from_json(
json!({
"aliased": "A"
}),
"test.MessageWithAliasedEnum",
);
let value2: MessageWithAliasedEnum = from_json(
json!({
"aliased": "B"
}),
"test.MessageWithAliasedEnum",
);
assert_eq!(value1, MessageWithAliasedEnum { aliased: 1 },);
assert_eq!(value2, MessageWithAliasedEnum { aliased: 1 },);
}
#[test]
#[should_panic = "unrecognized enum value 'UNKNOWN'"]
fn deserialize_enum_unknown_value() {
from_json::<ComplexType>(
json!({
"optionalEnum": "UNKNOWN",
}),
".test.ComplexType",
);
}
#[test]
fn deserialize_enum_unknown_value_allow_unknown_fields() {
let value: ComplexType = from_json_with_options(
json!({
"optionalEnum": "UNKNOWN",
}),
".test.ComplexType",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(value.optional_enum, 0);
}
#[test]
#[should_panic = "expected null"]
fn deserialize_null_value_unknown_value() {
from_json::<WellKnownTypes>(
json!({
"null": "UNKNOWN",
}),
".test.WellKnownTypes",
);
}
#[test]
fn deserialize_null_value_unknown_value_allow_unknown_fields() {
let value: WellKnownTypes = from_json_with_options(
json!({
"null": "UNKNOWN",
}),
".test.WellKnownTypes",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(value.null, 0);
}
#[test]
fn deserialize_enum_in_array_unknown_value_allow_unknown_fields() {
let value: ComplexType = from_json_with_options(
json!({
"myEnum": ["FOO", "UNKNOWN", "BAR"],
}),
".test.ComplexType",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(value.my_enum.as_slice(), &[1, 3]);
}
#[test]
fn deserialize_enum_in_map_unknown_value_allow_unknown_fields() {
let value: ComplexType = from_json_with_options(
json!({
"enumMap": {
"1": "FOO",
"2": "UNKNOWN",
"3": "BAR",
},
}),
".test.ComplexType",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(value.enum_map, HashMap::from_iter([(1, 1), (3, 3),]));
}
#[test]
fn deserialize_array() {
let value: ScalarArrays = from_json(
json!({
"double": [1.1, 2.2],
}),
".test.ScalarArrays",
);
assert_eq!(
value,
ScalarArrays {
double: vec![1.1, 2.2],
..Default::default()
},
);
}
#[test]
fn deserialize_complex_type() {
let value: ComplexType = from_json(
json!({
"stringMap": {
"1": {
"double": 1.1,
"float": 2.2f32,
"int32": 3,
},
"2": {
"int64": "4",
"uint32": 5,
"uint64": "6",
},
},
"intMap": {
"3": {
"sint32": 7,
"sint64": "8",
"fixed32": 9,
},
"4": {
"sint64": "8",
"fixed32": 9,
"fixed64": "10",
},
},
"nested": {
"sfixed32": 11,
"sfixed64": "12",
"bool": true,
"string": "5",
"bytes": "Ng==",
},
"myEnum": ["DEFAULT", "FOO", 2, "BAR", "NEG"],
"optionalEnum": "FOO",
"enumMap": {
"1": "FOO",
"2": 2,
},
}),
".test.ComplexType",
);
assert_eq!(
value,
ComplexType {
string_map: HashMap::from_iter([
(
"1".to_owned(),
Scalars {
double: 1.1,
float: 2.2,
int32: 3,
..Default::default()
},
),
(
"2".to_owned(),
Scalars {
int64: 4,
uint32: 5,
uint64: 6,
..Default::default()
},
),
]),
int_map: HashMap::from_iter([
(
3,
Scalars {
sint32: 7,
sint64: 8,
fixed32: 9,
..Default::default()
},
),
(
4,
Scalars {
sint64: 8,
fixed32: 9,
fixed64: 10,
..Default::default()
},
),
]),
nested: Some(Scalars {
sfixed32: 11,
sfixed64: 12,
r#bool: true,
string: "5".to_owned(),
bytes: b"6".to_vec(),
..Default::default()
}),
my_enum: vec![0, 1, 2, 3, -4],
optional_enum: 1,
enum_map: HashMap::from_iter([(1, 1), (2, 2),]),
}
);
}
#[test]
fn deserialize_well_known_types() {
let value: WellKnownTypes = from_json(
json!({
"timestamp": "1972-01-01T10:00:20.021Z",
"duration": "1.000340012s",
"struct": {
"number": 42.0,
"null": null,
},
"float": 42.1f32,
"double": 12.4,
"int32": 1,
"int64": "-2",
"uint32": 3,
"uint64": "4",
"bool": false,
"string": "hello",
"bytes": "aGVsbG8=",
"mask": "fieldOne,fieldTwo.b.d",
"list": ["foo", false],
"empty": {}
}),
".test.WellKnownTypes",
);
assert_eq!(
value,
WellKnownTypes {
timestamp: Some(prost_types::Timestamp {
seconds: 63_108_020,
nanos: 21_000_000,
}),
duration: Some(prost_types::Duration {
seconds: 1,
nanos: 340_012,
}),
r#struct: Some(prost_types::Struct {
fields: BTreeMap::from_iter([
(
"number".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(42.)),
},
),
(
"null".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
},
),
]),
}),
float: Some(42.1),
double: Some(12.4),
int32: Some(1),
int64: Some(-2),
uint32: Some(3),
uint64: Some(4),
bool: Some(false),
string: Some("hello".to_owned()),
bytes: Some(b"hello".to_vec()),
mask: Some(prost_types::FieldMask {
paths: vec!["field_one".to_owned(), "field_two.b.d".to_owned()],
}),
list: Some(prost_types::ListValue {
values: vec![
prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue("foo".to_owned())),
},
prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(false)),
},
],
}),
null: 0,
empty: Some(()),
}
);
}
#[test]
fn serialize_any() {
let json = wkt_to_json(
&prost_types::Any {
type_url: "type.googleapis.com/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
},
"google.protobuf.Any",
);
assert_eq!(
json,
json!({
"@type": "type.googleapis.com/test.Point",
"longitude": 1,
"latitude": 2,
})
);
}
#[test]
fn serialize_any_wkt() {
let json = wkt_to_json(
&prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Int32Value".to_owned(),
value: 5i32.encode_to_vec(),
},
"google.protobuf.Any",
);
assert_eq!(
json,
json!({
"@type": "type.googleapis.com/google.protobuf.Int32Value",
"value": 5,
})
);
}
#[test]
fn deserialize_any() {
let value: prost_types::Any = from_json(
json!({
"@type": "type.googleapis.com/test.Point",
"longitude": 1,
"latitude": 2,
}),
"google.protobuf.Any",
);
assert_eq!(
value,
prost_types::Any {
type_url: "type.googleapis.com/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
}
);
}
#[test]
fn deserialize_any_buffer_fields() {
let value: prost_types::Any = from_json(
json!({
"longitude": 1,
"latitude": 2,
"@type": "type.googleapis.com/test.Point",
}),
"google.protobuf.Any",
);
assert_eq!(
value,
prost_types::Any {
type_url: "type.googleapis.com/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
}
);
}
#[test]
fn deserialize_any_wkt() {
let value: prost_types::Any = from_json(
json!({
"@type": "type.googleapis.com/google.protobuf.Int32Value",
"value": 5,
}),
"google.protobuf.Any",
);
assert_eq!(
value,
prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Int32Value".to_owned(),
value: 5i32.encode_to_vec(),
}
);
}
#[test]
#[should_panic(expected = "expected 'value' field")]
fn deserialize_any_wkt_missing_value() {
from_json::<prost_types::Any>(
json!({
"@type": "type.googleapis.com/google.protobuf.Int32Value"
}),
"google.protobuf.Any",
);
}
#[test]
#[should_panic(expected = "unrecognized field name 'unknown'")]
fn deserialize_any_deny_unknown_fields() {
from_json::<prost_types::Any>(
json!({
"@type": "type.googleapis.com/google.protobuf.Int32Value",
"value": 5,
"unknown": "hello",
}),
"google.protobuf.Any",
);
}
#[test]
#[should_panic(expected = "unrecognized field name 'unknown'")]
fn deserialize_any_deny_unknown_buffered_fields() {
from_json::<prost_types::Any>(
json!({
"value": 5,
"unknown": "hello",
"@type": "type.googleapis.com/google.protobuf.Int32Value",
}),
"google.protobuf.Any",
);
}
#[test]
fn deserialize_any_allow_unknown_fields() {
let value: prost_types::Any = from_json_with_options(
json!({
"value": 5,
"unknown": "hello",
"@type": "type.googleapis.com/google.protobuf.Int32Value",
}),
"google.protobuf.Any",
&DeserializeOptions::new().deny_unknown_fields(false),
);
assert_eq!(
value,
prost_types::Any {
type_url: "type.googleapis.com/google.protobuf.Int32Value".to_owned(),
value: 5i32.encode_to_vec(),
}
);
}
#[test]
fn deserialize_any_custom_type_url() {
let value: prost_types::Any = from_json(
json!({
"@type": "/test.Point",
"longitude": 1,
"latitude": 2,
}),
"google.protobuf.Any",
);
assert_eq!(
value,
prost_types::Any {
type_url: "/test.Point".to_owned(),
value: Point {
longitude: 1,
latitude: 2,
}
.encode_to_vec(),
}
);
}
#[test]
fn deserialize_duration_fraction_digits() {
let value: prost_types::Duration = from_json(json!("1.00034s"), "google.protobuf.Duration");
assert_eq!(
value,
prost_types::Duration {
seconds: 1,
nanos: 340_000,
}
);
}
#[test]
fn deserialize_duration_out_of_range() {
let value: prost_types::Duration =
from_json(json!("-15.000340123s"), "google.protobuf.Duration");
assert_eq!(
value,
prost_types::Duration {
seconds: -15,
nanos: -340_123,
}
);
}
#[test]
#[should_panic(expected = "failed to parse duration")]
fn deserialize_negative_duration() {
from_json::<prost_types::Duration>(
json!("-18446744073709551615.000340123s"),
"google.protobuf.Duration",
);
}
#[test]
fn ints_allow_trailing_zeros() {
let json = r#"{
"int32": -1.000,
"uint32": 2.000,
"int64": -3.000,
"uint64": 4.000
}"#;
let mut s = serde_json::de::Deserializer::from_str(json);
let dynamic_message = DynamicMessage::deserialize(
test_file_descriptor()
.get_message_by_name("test.Scalars")
.unwrap(),
&mut s,
)
.unwrap();
s.end().unwrap();
assert_eq!(
dynamic_message.transcode_to::<Scalars>().unwrap(),
Scalars {
int32: -1,
uint32: 2,
int64: -3,
uint64: 4,
..Default::default()
}
);
}
#[test]
#[should_panic(expected = "expected integer value")]
fn ints_deny_fractional() {
let json = r#"{
"int32": -1.01,
}"#;
let mut s = serde_json::de::Deserializer::from_str(json);
let _ = DynamicMessage::deserialize(
test_file_descriptor()
.get_message_by_name("test.Scalars")
.unwrap(),
&mut s,
)
.unwrap();
s.end().unwrap();
}
#[test]
fn null_in_oneof() {
let json = json!({ "oneofNull": null });
let value: MessageWithOneof = from_json(json, "test.MessageWithOneof");
assert_eq!(
value.test_oneof,
Some(message_with_oneof::TestOneof::OneofNull(0))
);
}
#[test]
fn oneof_duplicate_field_null_first() {
let json = json!({
"oneofField1": "hello",
"oneofField2": null,
});
let value: MessageWithOneof = from_json(json, "test.MessageWithOneof");
assert_eq!(
value.test_oneof,
Some(message_with_oneof::TestOneof::OneofField1(
"hello".to_owned()
))
);
}
#[test]
fn oneof_duplicate_field_null_second() {
let json = json!({
"oneofField1": null,
"oneofField2": 5,
});
let value: MessageWithOneof = from_json(json, "test.MessageWithOneof");
assert_eq!(
value.test_oneof,
Some(message_with_oneof::TestOneof::OneofField2(5))
);
}
#[test]
#[should_panic(expected = "multiple fields provided for oneof 'test_oneof'")]
fn duplicate_oneof_field_json_value_null() {
let json = json!({
"oneofField1": "hello",
"oneofNull": null,
});
let _: MessageWithOneof = from_json(json, "test.MessageWithOneof");
}
#[test]
fn roundtrip_oneof_field_with_options() {
roundtrip_json_with_options(
&MessageWithOneof::default(),
&SerializeOptions::new().skip_default_fields(false),
&DeserializeOptions::new(),
)
.unwrap();
}
#[test]
fn value_null_in_oneof() {
let json = json!({ "oneofValueNull": null });
let value: MessageWithOneof = from_json(json, "test.MessageWithOneof");
assert_eq!(
value.test_oneof,
Some(message_with_oneof::TestOneof::OneofValueNull(
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
}
)),
);
}
#[test]
fn null_old_format() {
let json = json!({ "null": "NULL_VALUE" });
let value: WellKnownTypes = from_json(json, "test.WellKnownTypes");
assert_eq!(
value,
WellKnownTypes {
null: 0,
..Default::default()
}
);
}
#[test]
fn null_scalar_field_proto2() {
let json = json!({ "float": null }).to_string();
let value =
try_from_json_string_with_options(&json, "test.Scalars", &DeserializeOptions::new())
.unwrap();
let field = value.descriptor().get_field_by_name("float").unwrap();
assert!(!value.has_field(&field));
}
#[test]
fn null_scalar_field_proto3() {
let json = json!({ "float": null }).to_string();
let value =
try_from_json_string_with_options(&json, "test2.Scalars2", &DeserializeOptions::new())
.unwrap();
let field = value.descriptor().get_field_by_name("float").unwrap();
assert!(!value.has_field(&field));
}
#[test]
#[should_panic(expected = "unrecognized field name 'unknown_field'")]
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | true |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/src/arbitrary.rs | prost-reflect-tests/src/arbitrary.rs | use std::time::SystemTime;
use std::{fmt::Write, iter::FromIterator};
use proptest::prelude::*;
use prost_types::{value::Kind, Duration, FieldMask, ListValue, Struct, Timestamp, Value};
pub fn timestamp() -> impl Strategy<Value = Timestamp> {
any::<SystemTime>().prop_map(Into::into)
}
prop_compose! {
pub fn duration()(
seconds in (-315_576_000_000i64..=315_576_000_000),
nanos in (-999_999_999i32..=999_999_999),
) -> Duration {
let mut duration = Duration { seconds, nanos };
duration.normalize();
duration
}
}
pub fn struct_() -> impl Strategy<Value = Struct> {
struct_inner(value().boxed())
}
pub fn struct_inner(value_strat: BoxedStrategy<Value>) -> impl Strategy<Value = Struct> {
prop::collection::btree_map(any::<String>(), value_strat, 0..4)
.prop_map(|fields| Struct { fields })
}
pub fn list() -> impl Strategy<Value = ListValue> {
list_inner(value().boxed())
}
pub fn list_inner(value_strat: BoxedStrategy<Value>) -> impl Strategy<Value = ListValue> {
prop::collection::vec(value_strat, 0..4).prop_map(|values| ListValue { values })
}
fn arb_finite_float() -> impl Strategy<Value = f64> {
use prop::num::f64::*;
POSITIVE | NEGATIVE | NORMAL | SUBNORMAL | ZERO
}
pub fn value() -> impl Strategy<Value = Value> {
prop_oneof![
Just(Kind::NullValue(0)),
arb_finite_float().prop_map(Kind::NumberValue),
any::<String>().prop_map(Kind::StringValue),
prop::bool::ANY.prop_map(Kind::BoolValue),
]
.prop_map(|kind| Value { kind: Some(kind) })
.prop_recursive(4, 10, 4, |value| {
prop_oneof![
list_inner(value.clone()).prop_map(Kind::ListValue),
struct_inner(value).prop_map(Kind::StructValue)
]
.prop_map(|kind| Value { kind: Some(kind) })
})
}
pub fn mask() -> impl Strategy<Value = FieldMask> {
let parts = prop::collection::vec("([a-z]{1,3}_){0,3}[a-z]{1,3}", 1..4);
let paths = prop::collection::vec(parts, 0..4);
paths.prop_map(|paths| FieldMask {
paths: paths
.into_iter()
.map(|parts| {
let mut parts = parts.into_iter();
let mut path = parts.next().unwrap();
for part in parts {
write!(path, ".{part}").unwrap();
}
path
})
.collect(),
})
}
pub fn json() -> impl Strategy<Value = String> {
fn arb_json_key() -> impl Strategy<Value = String> {
// Use real field names to make the deserialization error test more interesting
prop_oneof![
2 => Just("float".to_owned()),
2 => Just("double".to_owned()),
2 => Just("int32".to_owned()),
2 => Just("int64".to_owned()),
2 => Just("uint32".to_owned()),
2 => Just("uint64".to_owned()),
2 => Just("bool".to_owned()),
2 => Just("string".to_owned()),
2 => Just("bytes".to_owned()),
1 => Just("string_map".to_owned()),
1 => Just("int_map".to_owned()),
1 => Just("nested".to_owned()),
1 => Just("my_enum".to_owned()),
1 => Just("optional_enum".to_owned()),
1 => Just("timestamp".to_owned()),
1 => Just("duration".to_owned()),
1 => Just("struct".to_owned()),
1 => Just("mask".to_owned()),
1 => Just("list".to_owned()),
1 => Just("null".to_owned()),
1 => Just("empty".to_owned()),
1 => Just("sint32".to_owned()),
1 => Just("sint64".to_owned()),
1 => Just("fixed32".to_owned()),
1 => Just("fixed64".to_owned()),
1 => Just("sfixed32".to_owned()),
1 => Just("sfixed64".to_owned()),
]
}
fn arb_json_value() -> impl Strategy<Value = serde_json::Value> {
let leaf = prop_oneof![
Just(serde_json::Value::Null),
any::<bool>().prop_map(serde_json::Value::from),
any::<f64>().prop_map(serde_json::Value::from),
".*".prop_map(serde_json::Value::from),
];
leaf.prop_recursive(4, 32, 4, |inner| {
prop_oneof![
prop::collection::vec(inner.clone(), 0..4).prop_map(serde_json::Value::Array),
prop::collection::hash_map(arb_json_key(), inner, 0..4)
.prop_map(|map| serde_json::Map::from_iter(map).into()),
]
})
}
prop::collection::hash_map(arb_json_key(), arb_json_value(), 0..10)
.prop_map(|map| serde_json::Value::Object(map.into_iter().collect()).to_string())
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-tests/benches/decode.rs | prost-reflect-tests/benches/decode.rs | use std::{collections::BTreeMap, iter::FromIterator};
use criterion::{criterion_group, criterion_main, Criterion};
use prost::Message;
use prost_reflect::{DynamicMessage, ReflectMessage};
use prost_reflect_tests::proto::WellKnownTypes;
use rayon::iter::{IntoParallelIterator, ParallelIterator};
fn sample_wkt() -> WellKnownTypes {
WellKnownTypes {
timestamp: Some(prost_types::Timestamp {
seconds: 63_108_020,
nanos: 21_000_000,
}),
duration: Some(prost_types::Duration {
seconds: 1,
nanos: 340_012,
}),
r#struct: Some(prost_types::Struct {
fields: BTreeMap::from_iter([
(
"number".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NumberValue(42.)),
},
),
(
"null".to_owned(),
prost_types::Value {
kind: Some(prost_types::value::Kind::NullValue(0)),
},
),
]),
}),
float: Some(42.1),
double: Some(12.4),
int32: Some(1),
int64: Some(-2),
uint32: Some(3),
uint64: Some(4),
bool: Some(false),
string: Some("hello".to_owned()),
bytes: Some(b"hello".to_vec()),
mask: Some(prost_types::FieldMask {
paths: vec!["field_one".to_owned(), "field_two.b.d".to_owned()],
}),
list: Some(prost_types::ListValue {
values: vec![
prost_types::Value {
kind: Some(prost_types::value::Kind::StringValue("foo".to_owned())),
},
prost_types::Value {
kind: Some(prost_types::value::Kind::BoolValue(false)),
},
],
}),
null: 0,
empty: Some(()),
}
}
fn decode_wkt(c: &mut Criterion) {
let value = sample_wkt().transcode_to_dynamic();
c.bench_function("decode_wkt", |b| b.iter(|| value.encode_to_vec()));
}
fn encode_wkt(c: &mut Criterion) {
let value = sample_wkt().encode_to_vec();
let desc = prost_reflect_tests::test_file_descriptor()
.get_message_by_name("test.WellKnownTypes")
.unwrap();
c.bench_function("encode_wkt", |b| {
b.iter(|| DynamicMessage::decode(desc.clone(), value.as_slice()))
});
}
fn decode_wkt_multithread(c: &mut Criterion) {
let value = sample_wkt().transcode_to_dynamic();
c.bench_function("decode_wkt_multithread", |b| {
b.iter(|| {
(0i32..256)
.into_par_iter()
.for_each(|_| drop(criterion::black_box(value.encode_to_vec())))
})
});
}
fn encode_wkt_multithread(c: &mut Criterion) {
let value = sample_wkt().encode_to_vec();
let desc = prost_reflect_tests::test_file_descriptor()
.get_message_by_name("test.WellKnownTypes")
.unwrap();
c.bench_function("encode_wkt_multithread", |b| {
b.iter(|| {
(0i32..256).into_par_iter().for_each(|_| {
drop(criterion::black_box(DynamicMessage::decode(
desc.clone(),
value.as_slice(),
)))
})
})
});
}
criterion_group! {
name = benches;
config = Criterion::default().sample_size(500);
targets = decode_wkt, encode_wkt, decode_wkt_multithread, encode_wkt_multithread
}
criterion_main!(benches);
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-conformance-tests/build.rs | prost-reflect-conformance-tests/build.rs | // This build script is based on the script here: https://github.com/tokio-rs/prost/blob/master/protobuf/build.rs
use std::env;
use std::fs;
use std::path::{Path, PathBuf};
use std::process::Command;
use anyhow::{Context, Result};
fn main() -> Result<()> {
let out_dir =
&PathBuf::from(env::var("OUT_DIR").expect("OUT_DIR environment variable not set"));
let src_dir = PathBuf::from("protobuf");
if !src_dir.join("cmake").exists() {
anyhow::bail!(
"protobuf sources are not checked out; Try `git submodule update --init --recursive`"
)
}
let version = git_describe(&src_dir)?;
let protobuf_dir = &out_dir.join(format!("protobuf-{version}"));
if !protobuf_dir.exists() {
let build_dir = &out_dir.join(format!("build-protobuf-{version}"));
fs::create_dir_all(build_dir).expect("failed to create build directory");
let tempdir = tempfile::Builder::new()
.prefix("protobuf")
.tempdir_in(out_dir)
.expect("failed to create temporary directory");
let prefix_dir = &tempdir.path().join("prefix");
fs::create_dir(prefix_dir).expect("failed to create prefix directory");
install_protoc_and_conformance_test_runner(&src_dir, build_dir, prefix_dir)?;
fs::rename(prefix_dir, protobuf_dir).context("failed to move protobuf dir")?;
}
let protoc_executable = protobuf_dir.join("bin").join("protoc");
let conformance_proto_dir = src_dir.join("conformance");
prost_build::Config::new()
.protoc_executable(&protoc_executable)
.compile_protos(
&[conformance_proto_dir.join("conformance.proto")],
&[conformance_proto_dir],
)
.unwrap();
let proto_dir = src_dir.join("src");
// Generate BTreeMap fields for all messages. This forces encoded output to be consistent, so
// that encode/decode roundtrips can use encoded output for comparison. Otherwise trying to
// compare based on the Rust PartialEq implementations is difficult, due to presence of NaN
// values.
prost_build::Config::new()
.protoc_executable(&protoc_executable)
.btree_map(["."])
.file_descriptor_set_path(out_dir.join("test_messages.bin"))
.compile_protos(
&[
proto_dir.join("google/protobuf/test_messages_proto2.proto"),
proto_dir.join("google/protobuf/test_messages_proto3.proto"),
proto_dir.join("google/protobuf/unittest.proto"),
],
&[proto_dir],
)
.unwrap();
// Emit an environment variable with the path to the build so that it can be located in the
// main crate.
println!("cargo:rustc-env=PROTOBUF={}", protobuf_dir.display());
Ok(())
}
fn git_describe(src_dir: &Path) -> Result<String> {
let output = Command::new("git")
.arg("describe")
.arg("--tags")
.arg("--always")
.current_dir(src_dir)
.output()
.context("Unable to describe protobuf git repo")?;
if !output.status.success() {
anyhow::bail!(
"Unable to describe protobuf git repo: {}",
String::from_utf8_lossy(&output.stderr)
);
}
let stdout = String::from_utf8_lossy(&output.stdout);
Ok(stdout.trim().to_string())
}
fn install_protoc_and_conformance_test_runner(
src_dir: &Path,
build_dir: &Path,
prefix_dir: &Path,
) -> Result<()> {
// The protobuf conformance test runner does not support Windows [1].
// [1]: https://github.com/protocolbuffers/protobuf/tree/master/conformance#portability
let build_conformance = !cfg!(windows);
// Build and install protoc, the protobuf libraries, and the conformance test runner.
cmake::Config::new(src_dir)
.define("CMAKE_CXX_STANDARD", "14")
.define("ABSL_PROPAGATE_CXX_STD", "ON")
.define("CMAKE_INSTALL_PREFIX", prefix_dir)
.define(
"protobuf_BUILD_CONFORMANCE",
if build_conformance { "ON" } else { "OFF" },
)
.define("protobuf_BUILD_TESTS", "OFF")
.out_dir(build_dir)
.build();
if build_conformance {
// Install the conformance-test-runner binary, since it isn't done automatically.
fs::copy(
build_dir.join("build").join("conformance_test_runner"),
prefix_dir.join("bin").join("conformance-test-runner"),
)
.context("failed to copy conformance-test-runner")?;
}
Ok(())
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-conformance-tests/src/lib.rs | prost-reflect-conformance-tests/src/lib.rs | pub mod conformance {
use std::path::Path;
pub fn test_runner() -> &'static Path {
Path::new(concat!(env!("PROTOBUF"), "/bin/conformance-test-runner"))
}
include!(concat!(env!("OUT_DIR"), "/conformance.rs"));
}
pub mod test_messages {
pub mod proto2 {
include!(concat!(
env!("OUT_DIR"),
"/protobuf_test_messages.proto2.rs"
));
}
pub mod proto3 {
include!(concat!(
env!("OUT_DIR"),
"/protobuf_test_messages.proto3.rs"
));
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-conformance-tests/src/main.rs | prost-reflect-conformance-tests/src/main.rs | use std::io::{self, Read, Write};
use once_cell::sync::Lazy;
use prost::{
bytes::{Buf, BufMut},
Message,
};
use prost_reflect::{text_format, DescriptorPool, DeserializeOptions, DynamicMessage};
use prost_reflect_conformance_tests::conformance::{
conformance_request, conformance_response, ConformanceRequest, ConformanceResponse,
TestCategory, WireFormat,
};
const TEST_MESSAGES_DESCRIPTOR_POOL_SET_BYTES: &[u8] =
include_bytes!(concat!(env!("OUT_DIR"), "/test_messages.bin"));
static TEST_MESSAGES_DESCRIPTOR_POOL: Lazy<DescriptorPool> =
Lazy::new(|| DescriptorPool::decode(TEST_MESSAGES_DESCRIPTOR_POOL_SET_BYTES).unwrap());
fn main() -> io::Result<()> {
env_logger::init();
let mut bytes = vec![0; 4];
loop {
bytes.resize(4, 0);
if io::stdin().read_exact(&mut bytes).is_err() {
// No more test cases.
return Ok(());
}
let len = bytes.as_slice().get_u32_le() as usize;
bytes.resize(len, 0);
io::stdin().read_exact(&mut bytes)?;
let result = match ConformanceRequest::decode(&*bytes) {
Ok(request) => handle_request(request),
Err(error) => conformance_response::Result::ParseError(format!("{error:?}")),
};
let response = ConformanceResponse {
result: Some(result),
};
let len = response.encoded_len();
bytes.clear();
bytes.put_u32_le(len as u32);
response.encode(&mut bytes)?;
assert_eq!(len + 4, bytes.len());
let mut stdout = io::stdout();
stdout.lock().write_all(&bytes)?;
stdout.flush()?;
}
}
fn handle_request(request: ConformanceRequest) -> conformance_response::Result {
let message_desc =
match TEST_MESSAGES_DESCRIPTOR_POOL.get_message_by_name(&request.message_type) {
Some(message_desc) => message_desc,
None => {
return conformance_response::Result::ParseError(format!(
"unknown message type: {}",
request.message_type
));
}
};
let mut json_deserialize_options = DeserializeOptions::new();
match request.test_category() {
TestCategory::UnspecifiedTest => (),
TestCategory::BinaryTest => (),
TestCategory::JsonTest => (),
TestCategory::TextFormatTest => (),
TestCategory::JsonIgnoreUnknownParsingTest => {
json_deserialize_options = json_deserialize_options.deny_unknown_fields(false);
}
TestCategory::JspbTest => {
return conformance_response::Result::Skipped("unsupported test category".to_string())
}
}
let output = request.requested_output_format();
let dynamic_message = match request.payload {
None => return conformance_response::Result::ParseError("no payload".to_string()),
Some(conformance_request::Payload::ProtobufPayload(buf)) => {
let mut dynamic_message = DynamicMessage::new(message_desc);
match dynamic_message.merge(buf.as_ref()) {
Ok(()) => (),
Err(error) => return conformance_response::Result::ParseError(error.to_string()),
}
dynamic_message
}
Some(conformance_request::Payload::JsonPayload(json)) => {
let mut deserializer = serde_json::de::Deserializer::from_str(&json);
match DynamicMessage::deserialize_with_options(
message_desc,
&mut deserializer,
&json_deserialize_options,
) {
Ok(message) => message,
Err(error) => return conformance_response::Result::ParseError(error.to_string()),
}
}
Some(conformance_request::Payload::JspbPayload(_)) => {
return conformance_response::Result::Skipped(
"jspb payload is not supported".to_string(),
);
}
Some(conformance_request::Payload::TextPayload(text)) => {
match DynamicMessage::parse_text_format(message_desc, &text) {
Ok(message) => message,
Err(error) => return conformance_response::Result::ParseError(error.to_string()),
}
}
};
match output {
WireFormat::Unspecified => {
conformance_response::Result::ParseError("output format unspecified".to_string())
}
WireFormat::Jspb => {
conformance_response::Result::Skipped("JSPB output is not supported".to_string())
}
WireFormat::TextFormat => {
let options = text_format::FormatOptions::new()
.skip_unknown_fields(!request.print_unknown_fields);
conformance_response::Result::TextPayload(
dynamic_message.to_text_format_with_options(&options),
)
}
WireFormat::Json => match serde_json::to_string(&dynamic_message) {
Ok(s) => conformance_response::Result::JsonPayload(s),
Err(err) => conformance_response::Result::SerializeError(err.to_string()),
},
WireFormat::Protobuf => {
conformance_response::Result::ProtobufPayload(dynamic_message.encode_to_vec())
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-conformance-tests/tests/conformance.rs | prost-reflect-conformance-tests/tests/conformance.rs | #![cfg(not(target_os = "windows"))]
use std::env;
use std::process::Command;
use prost_reflect_conformance_tests::conformance;
/// Runs the protobuf conformance test. This must be done in an integration test
/// so that Cargo will build the proto-conformance binary.
#[test]
fn test_conformance() {
// Get the path to the proto-conformance binary. Adapted from
// https://github.com/rust-lang/cargo/blob/19fdb308cdbb25faf4f1e25a71351d8d603fa447/tests/cargotest/support/mod.rs#L306.
let proto_conformance = env::current_exe()
.map(|mut path| {
path.pop();
if path.ends_with("deps") {
path.pop();
}
path.join("prost-reflect-conformance-tests")
})
.unwrap();
let status = Command::new(conformance::test_runner())
.arg("--enforce_recommended")
.arg("--failure_list")
.arg("failure_list.txt")
.arg("--text_format_failure_list")
.arg("text_format_failure_list.txt")
.arg(proto_conformance)
.status()
.expect("failed to execute conformance-test-runner");
assert!(status.success(), "proto conformance test failed");
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/src/lib.rs | prost-reflect-derive/src/lib.rs | //! This crate provides the [`ReflectMessage`](https://docs.rs/prost-reflect/latest/prost_reflect/derive.ReflectMessage.html) derive macro
//!
//! For documentation, see the example in the [`prost-reflect` crate docs](https://docs.rs/prost-reflect/latest/prost_reflect/index.html#deriving-reflectmessage).
use proc_macro::TokenStream;
use proc_macro2::Span;
use quote::{quote, ToTokens};
use syn::spanned::Spanned;
/// A derive macro for the [`ReflectMessage`](https://docs.rs/prost-reflect/latest/prost_reflect/trait.ReflectMessage.html) trait.
///
/// For documentation, see the example in the [`prost-reflect` crate docs](https://docs.rs/prost-reflect/latest/prost_reflect/index.html#deriving-reflectmessage).
#[proc_macro_derive(ReflectMessage, attributes(prost_reflect))]
pub fn reflect_message(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as syn::DeriveInput);
match reflect_message_impl(input) {
Ok(tokens) => tokens.into(),
Err(err) => err.to_compile_error().into(),
}
}
struct Args {
args_span: Span,
message_name: Option<syn::Lit>,
descriptor_pool: Option<syn::LitStr>,
file_descriptor_set: Option<syn::LitStr>,
}
fn reflect_message_impl(input: syn::DeriveInput) -> Result<proc_macro2::TokenStream, syn::Error> {
match &input.data {
syn::Data::Struct(_) => (),
syn::Data::Enum(_) => return Ok(Default::default()),
syn::Data::Union(_) => return Ok(Default::default()),
};
let args = Args::parse(input.ident.span(), &input.attrs)?;
let name = &input.ident;
let descriptor_pool = args.descriptor_pool()?;
let message_name = args.message_name()?;
Ok(quote! {
impl ::prost_reflect::ReflectMessage for #name {
fn descriptor(&self) -> ::prost_reflect::MessageDescriptor {
#descriptor_pool
.get_message_by_name(#message_name)
.expect(concat!("descriptor for message type `", #message_name, "` not found"))
}
}
})
}
fn is_prost_reflect_attribute(attr: &syn::Attribute) -> bool {
attr.path().is_ident("prost_reflect")
}
impl Args {
fn parse(input_span: proc_macro2::Span, attrs: &[syn::Attribute]) -> Result<Args, syn::Error> {
let reflect_attrs: Vec<_> = attrs
.iter()
.filter(|attr| is_prost_reflect_attribute(attr))
.collect();
if reflect_attrs.is_empty() {
return Err(syn::Error::new(
input_span,
"missing #[prost_reflect] attribute",
));
}
let mut args = Args {
args_span: reflect_attrs
.iter()
.map(|a| a.span())
.reduce(|l, r| l.join(r).unwrap_or(l))
.unwrap(),
message_name: None,
descriptor_pool: None,
file_descriptor_set: None,
};
for attr in reflect_attrs {
attr.parse_nested_meta(|nested| {
if nested.path.is_ident("descriptor_pool") {
args.descriptor_pool = nested.value()?.parse()?;
Ok(())
} else if nested.path.is_ident("file_descriptor_set_bytes") {
args.file_descriptor_set = nested.value()?.parse()?;
Ok(())
} else if nested.path.is_ident("message_name") {
args.message_name = nested.value()?.parse()?;
Ok(())
} else {
Err(syn::Error::new(
nested.path.span(),
"unknown argument (expected 'descriptor_pool', 'file_descriptor_set_bytes' or 'message_name')",
))
}
})?;
}
Ok(args)
}
fn descriptor_pool(&self) -> Result<proc_macro2::TokenStream, syn::Error> {
if let Some(descriptor_pool) = &self.descriptor_pool {
let expr: syn::Expr = syn::parse_str(&descriptor_pool.value())?;
Ok(expr.to_token_stream())
} else if let Some(file_descriptor_set) = &self.file_descriptor_set {
let expr: syn::Expr = syn::parse_str(&file_descriptor_set.value())?;
Ok(quote!({
static INIT: ::std::sync::Once = ::std::sync::Once::new();
INIT.call_once(|| ::prost_reflect::DescriptorPool::decode_global_file_descriptor_set(#expr).unwrap());
::prost_reflect::DescriptorPool::global()
}))
} else {
Err(syn::Error::new(
self.args_span,
"missing required argument 'descriptor_pool'",
))
}
}
fn message_name(&self) -> Result<proc_macro2::TokenStream, syn::Error> {
if let Some(message_name) = &self.message_name {
Ok(message_name.to_token_stream())
} else {
Err(syn::Error::new(
self.args_span,
"missing required argument 'message_name'",
))
}
}
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/attr_unknown_field.rs | prost-reflect-derive/tests/attr_unknown_field.rs | use prost_reflect_derive::ReflectMessage;
#[derive(ReflectMessage)]
#[prost_reflect(foo = 123)]
pub struct MyMessage {}
fn main() {}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/file_descriptor_set.rs | prost-reflect-derive/tests/file_descriptor_set.rs | use prost::Message;
use prost_reflect::ReflectMessage;
const FILE_DESCRIPTOR_SET_BYTES: &'static [u8] = include_bytes!("file_descriptor_set.bin");
#[derive(Message, ReflectMessage)]
#[prost_reflect(file_descriptor_set_bytes = "FILE_DESCRIPTOR_SET_BYTES")]
#[prost_reflect(message_name = "package.MyMessage")]
pub struct MyNestedMessage {}
fn main() {
assert_eq!(
MyNestedMessage {}.descriptor().full_name(),
"package.MyMessage"
);
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/tests.rs | prost-reflect-derive/tests/tests.rs | #[test]
fn tests() {
let tests = trybuild::TestCases::new();
tests.pass("tests/basic.rs");
tests.pass("tests/ignore_enum.rs");
tests.pass("tests/multiple_attr.rs");
tests.pass("tests/file_descriptor_set.rs");
tests.compile_fail("tests/attr_unknown_field.rs");
tests.compile_fail("tests/missing_attr.rs");
tests.compile_fail("tests/missing_name.rs");
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/ignore_enum.rs | prost-reflect-derive/tests/ignore_enum.rs | use prost_reflect_derive::ReflectMessage;
#[derive(ReflectMessage)]
#[prost_reflect(descriptor_pool = "DESCRIPTOR_POOL", message_name = "msg")]
pub enum MyMessage {}
fn main() {}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/multiple_attr.rs | prost-reflect-derive/tests/multiple_attr.rs | use once_cell::sync::Lazy;
use prost::Message;
use prost_reflect::{DescriptorPool, ReflectMessage};
static DESCRIPTOR_POOL: Lazy<DescriptorPool> = Lazy::new(|| {
DescriptorPool::decode(include_bytes!("file_descriptor_set.bin").as_ref()).unwrap()
});
#[derive(Message, ReflectMessage)]
#[prost_reflect(descriptor_pool = "DESCRIPTOR_POOL")]
#[prost_reflect(message_name = "package.MyMessage")]
pub struct MyNestedMessage {}
fn main() {
assert_eq!(
MyNestedMessage {}.descriptor().full_name(),
"package.MyMessage"
);
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/missing_name.rs | prost-reflect-derive/tests/missing_name.rs | use prost_reflect_derive::ReflectMessage;
#[derive(ReflectMessage)]
#[prost_reflect(descriptor_pool = "DESCRIPTOR_POOL")]
pub struct MyMessage {}
fn main() {}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/missing_attr.rs | prost-reflect-derive/tests/missing_attr.rs | use prost_reflect_derive::ReflectMessage;
#[derive(ReflectMessage)]
pub struct MyMessage {}
fn main() {}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andrewhickman/prost-reflect | https://github.com/andrewhickman/prost-reflect/blob/f911a16f61099791557f2c1d0c2b9b78fdf5deb4/prost-reflect-derive/tests/basic.rs | prost-reflect-derive/tests/basic.rs | use once_cell::sync::Lazy;
use prost::Message;
use prost_reflect::{DescriptorPool, ReflectMessage};
static DESCRIPTOR_POOL: Lazy<DescriptorPool> = Lazy::new(|| {
DescriptorPool::decode(include_bytes!("file_descriptor_set.bin").as_ref()).unwrap()
});
#[derive(Message, ReflectMessage)]
#[prost_reflect(
descriptor_pool = "DESCRIPTOR_POOL",
message_name = "package.MyMessage"
)]
pub struct MyMessage {}
fn main() {
assert_eq!(MyMessage {}.descriptor().full_name(), "package.MyMessage");
}
| rust | Apache-2.0 | f911a16f61099791557f2c1d0c2b9b78fdf5deb4 | 2026-01-04T20:24:37.264985Z | false |
andraantariksa/Anime4K-rs | https://github.com/andraantariksa/Anime4K-rs/blob/a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29/src/test.rs | src/test.rs | use super::*;
fn anime4k(
input_filename: &str,
output_filename: &str,
scale: f64,
iteration: u32,
push_color_strength: f64,
push_gradient_strength: f64,
) {
let image = image::open(input_filename).expect("Can't open image.");
let mut kernel_instance = image_kernel::ImageKernel::from_image(image);
kernel_instance.scale(
(kernel_instance.width() as f64 * scale) as u32,
(kernel_instance.height() as f64 * scale) as u32,
);
for _ in 0..iteration {
kernel_instance.compute_luminance();
kernel_instance.push_color(image_kernel::clamp(
(push_color_strength * 255.0) as u16,
0,
0xFFFF,
));
kernel_instance.compute_gradient();
kernel_instance.push_gradient(image_kernel::clamp(
(push_gradient_strength * 255.0) as u16,
0,
0xFFFF,
));
}
kernel_instance
.save(output_filename)
.expect("Can't save image.");
}
#[test]
fn test_small_eye_image_default() {
anime4k("assets/eye-in.png", "assets/eye-out.png", 2.0, 1, 0.0, 1.0);
}
#[test]
fn test_small_eye_image_more_color_strength() {
anime4k("assets/eye-in.png", "assets/eye-more-color-strength-out.png", 2.0, 1, 1.0, 1.0);
}
#[test]
fn test_small_eye_image_more_iteration() {
anime4k("assets/eye-in.png", "assets/eye-more-iteration-out.png", 2.0, 2, 0.0, 1.0);
}
#[test]
fn test_small_eye_image_more_scale() {
anime4k("assets/eye-in.png", "assets/eye-more-scale-out.png", 4.0, 1, 0.0, 1.0);
}
#[test]
fn test_people_default() {
anime4k("assets/people-in.png", "assets/people-out.png", 2.0, 1, 0.0, 1.0);
}
#[test]
fn test_scenery_image_default() {
anime4k("assets/scenery-in.png", "assets/scenery-out.png", 2.0, 1, 0.0, 1.0);
}
| rust | MIT | a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29 | 2026-01-04T20:24:27.719086Z | false |
andraantariksa/Anime4K-rs | https://github.com/andraantariksa/Anime4K-rs/blob/a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29/src/main.rs | src/main.rs | extern crate clap;
extern crate image;
extern crate raster;
use clap::{App, Arg};
mod image_kernel;
#[cfg(test)]
mod test;
fn main() {
let matches = App::new("Anime4K-rs")
.version("0.1")
.author("Andra Antariksa <andra.antariksa@gmail.com>")
.about("A High-Quality Real Time Upscaler for Anime Video")
.arg(
Arg::with_name("INPUT")
.help("Sets the input file to use")
.required(true),
)
.arg(
Arg::with_name("OUTPUT")
.help("Sets the output file")
.required(true),
)
.arg(
Arg::with_name("scale")
.short("s")
.long("scale")
.default_value("2")
.help("Sets the scaling factor"),
)
.arg(
Arg::with_name("iteration")
.short("i")
.long("iteration")
.default_value("1")
.help("Sets how many the iteration to do"),
)
.arg(
Arg::with_name("push-color-strength")
.long("pcs")
.default_value("0")
.help("Sets the push color strength"),
)
.arg(
Arg::with_name("push-gradient-strength")
.long("pgs")
.default_value("1")
.help("Sets push gradient strength"),
)
.get_matches();
let input_filename = matches
.value_of("INPUT")
.expect("Error: Please specify input and output png files.");
let output_filename = matches
.value_of("OUTPUT")
.expect("Error: Please specify input and output png files.");
let scale = matches
.value_of("scale")
.unwrap()
.parse::<f64>()
.expect("Error on parsing scale to f64");
let iteration = matches
.value_of("iteration")
.unwrap()
.parse::<u8>()
.expect("Error on parsing iteration to u8");
let push_color_strength = matches
.value_of("push-color-strength")
.unwrap()
.parse::<f64>()
.expect("Error on parsing push-color-strength to f64");
let push_gradient_strength = matches
.value_of("push-gradient-strength")
.unwrap()
.parse::<f64>()
.expect("Error on parsing push-gradient-strength to f64");
let image = image::open(&input_filename).expect("Can't open image.");
let mut kernel_instance = image_kernel::ImageKernel::from_image(image);
kernel_instance.scale(
(kernel_instance.width() as f64 * scale) as u32,
(kernel_instance.height() as f64 * scale) as u32,
);
for _ in 0..iteration {
kernel_instance.compute_luminance();
kernel_instance.push_color(image_kernel::clamp(
(push_color_strength * 255.0) as u16,
0,
0xFFFF,
));
kernel_instance.compute_gradient();
kernel_instance.push_gradient(image_kernel::clamp(
(push_gradient_strength * 255.0) as u16,
0,
0xFFFF,
));
}
kernel_instance
.save(output_filename)
.expect("Can't save image.");
}
| rust | MIT | a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29 | 2026-01-04T20:24:27.719086Z | false |
andraantariksa/Anime4K-rs | https://github.com/andraantariksa/Anime4K-rs/blob/a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29/src/image_kernel.rs | src/image_kernel.rs | use std::cmp::{max, min, PartialOrd};
pub fn clamp<T: PartialOrd>(val: T, min: T, max: T) -> T {
if val < min {
min
} else if val > max {
max
} else {
val
}
}
#[inline]
pub fn extract_pixel_rgba(pixel: image::Rgba<u8>) -> (u8, u8, u8, u8) {
(pixel[0], pixel[1], pixel[2], pixel[3])
}
// https://stackoverflow.com/a/596241/3894179
#[inline]
pub fn get_brightness(r: u8, g: u8, b: u8) -> u32 {
(r as u32 + r as u32 + g as u32 + g as u32 + g as u32 + b as u32) / 6
}
pub fn get_largest_alpha_avg(
cc: image::Rgba<u8>,
lightest_color: image::Rgba<u8>,
a: image::Rgba<u8>,
b: image::Rgba<u8>,
c: image::Rgba<u8>,
strength: u16,
) -> image::Rgba<u8> {
let new_color_r = ((cc[0] as u32 * (0xFF - strength) as u32
+ ((a[0] as u32 + b[0] as u32 + c[0] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_g = ((cc[1] as u32 * (0xFF - strength) as u32
+ ((a[1] as u32 + b[1] as u32 + c[1] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_b = ((cc[2] as u32 * (0xFF - strength) as u32
+ ((a[2] as u32 + b[2] as u32 + c[2] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_a = ((cc[3] as u32 * (0xFF - strength) as u32
+ ((a[3] as u32 + b[3] as u32 + c[3] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color = image::Rgba::<u8>([new_color_r, new_color_g, new_color_b, new_color_a]);
if new_color[3] > lightest_color[3] {
new_color
} else {
lightest_color
}
}
pub fn get_alpha_avg(
cc: image::Rgba<u8>,
a: image::Rgba<u8>,
b: image::Rgba<u8>,
c: image::Rgba<u8>,
strength: u16,
) -> image::Rgba<u8> {
let new_color_r = ((cc[0] as u32 * (0xFF - strength) as u32
+ ((a[0] as u32 + b[0] as u32 + c[0] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_g = ((cc[1] as u32 * (0xFF - strength) as u32
+ ((a[1] as u32 + b[1] as u32 + c[1] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_b = ((cc[2] as u32 * (0xFF - strength) as u32
+ ((a[2] as u32 + b[2] as u32 + c[2] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
let new_color_a = ((cc[3] as u32 * (0xFF - strength) as u32
+ ((a[3] as u32 + b[3] as u32 + c[3] as u32) / 3) * strength as u32)
/ 0xFF) as u8;
image::Rgba::<u8>([new_color_r, new_color_g, new_color_b, new_color_a])
}
pub struct ImageKernel {
pub image: image::ImageBuffer<image::Rgba<u8>, Vec<u8>>,
}
impl ImageKernel {
pub fn from_image(image: image::DynamicImage) -> ImageKernel {
ImageKernel {
image: image.to_rgba(),
}
}
pub fn width(&self) -> u32 {
self.image.width()
}
pub fn height(&self) -> u32 {
self.image.height()
}
pub fn scale(&mut self, width: u32, height: u32) {
let mut raster_image = raster::Image {
width: self.image.width() as i32,
height: self.image.height() as i32,
bytes: self.image.clone().into_raw(),
};
let mode = raster::interpolate::InterpolationMode::Bicubic;
raster::interpolate::resample(&mut raster_image, width as i32, height as i32, mode)
.expect("Scale error");
self.image = image::ImageBuffer::from_raw(width, height, raster_image.bytes)
.expect("Load from raw raster image error");
}
pub fn compute_luminance(&mut self) {
for y in 0..self.image.height() {
for x in 0..self.image.width() {
let pixel = self.image.get_pixel_mut(x, y);
let (r, g, b, _) = extract_pixel_rgba(*pixel);
let brightness = get_brightness(r, g, b);
let luminance_value = clamp(brightness, 0, 0xFF);
pixel[0] = r;
pixel[1] = g;
pixel[2] = b;
pixel[3] = luminance_value as u8;
}
}
}
pub fn compute_gradient(&mut self) {
let sobelx = [[-1, 0, 1], [-2, 0, 2], [-1, 0, 1]];
let sobely = [[-1, -2, -1], [0, 0, 0], [1, 2, 1]];
let mut temp_image =
image::DynamicImage::new_rgba8(self.image.width(), self.image.height()).to_rgba();
for y in 1..self.image.height() - 1 {
for x in 1..self.image.width() - 1 {
let dx = self.image.get_pixel(x - 1, y - 1)[3] as i32 * sobelx[0][0]
+ self.image.get_pixel(x, y - 1)[3] as i32 * sobelx[0][1]
+ self.image.get_pixel(x + 1, y - 1)[3] as i32 * sobelx[0][2]
+ self.image.get_pixel(x - 1, y)[3] as i32 * sobelx[1][0]
+ self.image.get_pixel(x, y)[3] as i32 * sobelx[1][1]
+ self.image.get_pixel(x + 1, y)[3] as i32 * sobelx[1][2]
+ self.image.get_pixel(x - 1, y + 1)[3] as i32 * sobelx[2][0]
+ self.image.get_pixel(x, y + 1)[3] as i32 * sobelx[2][1]
+ self.image.get_pixel(x + 1, y + 1)[3] as i32 * sobelx[2][2];
let dy = self.image.get_pixel(x - 1, y - 1)[3] as i32 * sobely[0][0]
+ self.image.get_pixel(x, y - 1)[3] as i32 * sobely[0][1]
+ self.image.get_pixel(x + 1, y - 1)[3] as i32 * sobely[0][2]
+ self.image.get_pixel(x - 1, y)[3] as i32 * sobely[1][0]
+ self.image.get_pixel(x, y)[3] as i32 * sobely[1][1]
+ self.image.get_pixel(x + 1, y)[3] as i32 * sobely[1][2]
+ self.image.get_pixel(x - 1, y + 1)[3] as i32 * sobely[2][0]
+ self.image.get_pixel(x, y + 1)[3] as i32 * sobely[2][1]
+ self.image.get_pixel(x + 1, y + 1)[3] as i32 * sobely[2][2];
let derivata = (((dx * dx) + (dy * dy)) as f64).sqrt() as u32;
let pixel = self.image.get_pixel(x, y);
if derivata > 255 {
temp_image.put_pixel(
x,
y,
image::Rgba::<u8>([pixel[0], pixel[1], pixel[2], 0]),
);
} else {
temp_image.put_pixel(
x,
y,
image::Rgba::<u8>([pixel[0], pixel[1], pixel[2], (0xFF - derivata) as u8]),
);
}
}
}
self.image = temp_image;
}
pub fn push_color(&mut self, strength: u16) {
let mut temp_image =
image::DynamicImage::new_rgba8(self.image.width(), self.image.height()).to_rgba();
for y in 0..self.image.height() {
for x in 0..self.image.width() {
/*
* Kernel defination:
* --------------
* [tl] [tc] [tr]
* [ml] [mc] [mc]
* [bl] [bc] [br]
* --------------
*/
let mut x_r: i32 = 1;
let mut x_l: i32 = -1;
let mut y_b: i32 = 1;
let mut y_t: i32 = -1;
if x == 0 {
x_l = 0;
} else if x == self.image.width() - 1 {
x_r = 0;
}
if y == 0 {
y_t = 0;
} else if y == self.image.height() - 1 {
y_b = 0;
}
// Top column
let tl = *self
.image
.get_pixel((x as i32 + x_l) as u32, (y as i32 + y_t) as u32);
let tc = *self.image.get_pixel(x, (y as i32 + y_t) as u32);
let tr = *self
.image
.get_pixel((x as i32 + x_r) as u32, (y as i32 + y_t) as u32);
// Middle column
let ml = *self.image.get_pixel((x as i32 + x_l) as u32, y);
let mc = *self.image.get_pixel(x, y);
let mr = *self.image.get_pixel((x as i32 + x_r) as u32, y);
// Bottom column
let bl = *self
.image
.get_pixel((x as i32 + x_l) as u32, (y as i32 + y_b) as u32);
let bc = *self.image.get_pixel(x, (y as i32 + y_b) as u32);
let br = *self
.image
.get_pixel((x as i32 + x_r) as u32, (y as i32 + y_b) as u32);
let mut lightest_color = mc;
// Kernel 0 and 4
let mut max_dark = max(bl[3], max(bc[3], br[3]));
let mut min_light = min(tl[3], min(tc[3], tr[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, tl, tc, tr, strength);
} else {
max_dark = max(tl[3], max(tc[3], tr[3]));
min_light = min(br[3], min(bc[3], bl[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, br, bc, bl, strength);
}
}
// Kernel 1 and 5
max_dark = max(mc[3], max(ml[3], bc[3]));
min_light = min(mr[3], min(tc[3], tr[3]));
if min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, mr, tc, tr, strength);
} else {
max_dark = max(mc[3], max(mr[3], tc[3]));
min_light = min(bl[3], min(ml[3], bc[3]));
if min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, bl, ml, bc, strength);
}
}
// Kernel 2 and 6
max_dark = max(ml[3], max(tl[3], bl[3]));
min_light = min(mr[3], min(tr[3], br[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, mr, br, tr, strength);
} else {
max_dark = max(mr[3], max(tr[3], br[3]));
min_light = min(ml[3], min(tl[3], bl[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, ml, tl, bl, strength);
}
}
// Kernel 3 and 7
max_dark = max(mc[3], max(ml[3], tc[3]));
min_light = min(mr[3], min(br[3], bc[3]));
if min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, mr, br, bc, strength);
} else {
max_dark = max(mc[3], max(mr[3], bc[3]));
min_light = min(tc[3], min(ml[3], tl[3]));
if min_light > max_dark {
lightest_color =
get_largest_alpha_avg(mc, lightest_color, tc, ml, tl, strength);
}
}
temp_image.put_pixel(x, y, lightest_color);
}
}
self.image = temp_image;
}
pub fn push_gradient(&mut self, strength: u16) {
let mut temp_image =
image::DynamicImage::new_rgba8(self.image.width(), self.image.height()).to_rgba();
for y in 0..self.image.height() {
for x in 0..self.image.width() {
/*
* Kernel defination:
* --------------
* [tl] [tc] [tr]
* [ml] [mc] [mc]
* [bl] [bc] [br]
* --------------
*/
let mut x_r: i32 = 1;
let mut x_l: i32 = -1;
let mut y_b: i32 = 1;
let mut y_t: i32 = -1;
if x == 0 {
x_l = 0;
} else if x == self.image.width() - 1 {
x_r = 0;
}
if y == 0 {
y_t = 0;
} else if y == self.image.height() - 1 {
y_b = 0;
}
// Top column
let tl = *self
.image
.get_pixel((x as i32 + x_l) as u32, (y as i32 + y_t) as u32);
let tc = *self.image.get_pixel(x, (y as i32 + y_t) as u32);
let tr = *self
.image
.get_pixel((x as i32 + x_r) as u32, (y as i32 + y_t) as u32);
// Middle column
let ml = *self.image.get_pixel((x as i32 + x_l) as u32, y);
let mc = *self.image.get_pixel(x, y);
let mr = *self.image.get_pixel((x as i32 + x_r) as u32, y);
// Bottom column
let bl = *self
.image
.get_pixel((x as i32 + x_l) as u32, (y as i32 + y_b) as u32);
let bc = *self.image.get_pixel(x, (y as i32 + y_b) as u32);
let br = *self
.image
.get_pixel((x as i32 + x_r) as u32, (y as i32 + y_b) as u32);
let mut lightest_color = mc;
// Kernel 0 and 4
let mut max_dark = max(bl[3], max(bc[3], br[3]));
let mut min_light = min(tl[3], min(tc[3], tr[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color = get_alpha_avg(mc, tl, tc, tr, strength);
} else {
max_dark = max(tl[3], max(tc[3], tr[3]));
min_light = min(br[3], min(bc[3], bl[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color = get_alpha_avg(mc, br, bc, bl, strength);
}
}
// Kernel 1 and 5
max_dark = max(mc[3], max(ml[3], bc[3]));
min_light = min(mr[3], min(tc[3], tr[3]));
if min_light > max_dark {
lightest_color = get_alpha_avg(mc, mr, tc, tr, strength);
} else {
max_dark = max(mc[3], max(mr[3], tc[3]));
min_light = min(bl[3], min(ml[3], bc[3]));
if min_light > max_dark {
lightest_color = get_alpha_avg(mc, bl, ml, bc, strength);
}
}
// Kernel 2 and 6
max_dark = max(ml[3], max(tl[3], bl[3]));
min_light = min(mr[3], min(tr[3], br[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color = get_alpha_avg(mc, mr, br, tr, strength);
} else {
max_dark = max(mr[3], max(tr[3], br[3]));
min_light = min(ml[3], min(tl[3], bl[3]));
if min_light > mc[3] && min_light > max_dark {
lightest_color = get_alpha_avg(mc, ml, tl, bl, strength);
}
}
// Kernel 3 and 7
max_dark = max(mc[3], max(ml[3], tc[3]));
min_light = min(mr[3], min(br[3], bc[3]));
if min_light > max_dark {
lightest_color = get_alpha_avg(mc, mr, br, bc, strength);
} else {
max_dark = max(mc[3], max(mr[3], bc[3]));
min_light = min(tc[3], min(ml[3], tl[3]));
if min_light > max_dark {
lightest_color = get_alpha_avg(mc, tc, ml, tl, strength);
}
}
lightest_color[3] = 255;
temp_image.put_pixel(x, y, lightest_color);
}
}
self.image = temp_image;
}
pub fn save(&self, filename: &str) -> std::result::Result<(), std::io::Error> {
self.image.save(filename)
}
}
| rust | MIT | a47a8ac21f81d6a3bcbdf6fc338b6546f1a51d29 | 2026-01-04T20:24:27.719086Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/build.rs | build.rs | use std::{path::Path, process::Command};
fn main() {
// Specify re-run conditions
// 1. Rerun build script if we pass a new GIT_HASH
println!("cargo:rerun-if-env-changed=GIT_HASH");
// 2. Only do git based reruns if git directory exists
if Path::new(".git").exists() {
// If we change the branch, rerun
println!("cargo:rerun-if-changed=.git/HEAD");
if let Ok(r) = std::fs::read_to_string(".git/HEAD") {
if let Some(stripped) = r.strip_prefix("ref: ") {
// If the HEAD is detached it will be a commit hash
// so the HEAD changed directive above will pick it up,
// otherwise it will point to a ref in the refs directory
println!("cargo:rerun-if-changed=.git/{}", stripped);
}
}
}
// Getting git hash
// Don't fetch git hash if it's already in the ENV
let existing = std::env::var("GIT_HASH").unwrap_or_else(|_| String::new());
if !existing.is_empty() {
return;
}
// Get git hash from git and don't do anything if the command fails
if let Some(rev_parse) = cmd("git", &["rev-parse", "--short", "HEAD"]) {
// Add (dirty) to the GIT_HASH if the git status isn't clean
// This includes untracked files
let dirty = cmd("git", &["status", "--short"]).expect("git command works");
// Ignore Dockerfile deletion as it is expected in Docker buildx builds
let git_hash = if dirty.is_empty() || dirty.trim() == "D Dockerfile" {
rev_parse
} else {
format!("{}(dirty)", rev_parse.trim())
};
println!("cargo:rustc-env=GIT_HASH={}", git_hash.trim());
}
}
// Helper function, Command is verbose...
fn cmd(name: &str, args: &[&str]) -> Option<String> {
Command::new(name).args(args).output().ok().and_then(|o| {
if o.status.success() {
String::from_utf8(o.stdout).ok()
} else {
None
}
})
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/config.rs | src/config.rs | use clap::{App, Arg};
use dirs::home_dir;
use std::fs;
use std::net::SocketAddr;
use std::net::ToSocketAddrs;
use std::path::{Path, PathBuf};
use std::sync::{Arc, OnceLock};
use stderrlog;
use crate::chain::Network;
use crate::daemon::CookieGetter;
use crate::errors::*;
#[cfg(feature = "liquid")]
use bitcoin::Network as BNetwork;
pub(crate) const APP_NAME: &str = "mempool-electrs";
pub(crate) const ELECTRS_VERSION: &str = env!("CARGO_PKG_VERSION");
pub(crate) const GIT_HASH: Option<&str> = option_env!("GIT_HASH");
// This will be set only once in the Daemon::new() constructor at startup
pub(crate) static BITCOIND_SUBVER: OnceLock<String> = OnceLock::new();
lazy_static! {
pub(crate) static ref VERSION_STRING: String = {
if let Some(hash) = GIT_HASH {
format!("{} {}-{}", APP_NAME, ELECTRS_VERSION, hash)
} else {
format!("{} {}", APP_NAME, ELECTRS_VERSION)
}
};
}
#[derive(Debug, Clone)]
pub struct Config {
// See below for the documentation of each field:
pub log: stderrlog::StdErrLog,
pub network_type: Network,
pub magic: Option<u32>,
pub db_path: PathBuf,
pub daemon_dir: PathBuf,
pub blocks_dir: PathBuf,
pub daemon_rpc_addr: SocketAddr,
pub cookie: Option<String>,
pub electrum_rpc_addr: SocketAddr,
pub http_addr: SocketAddr,
pub http_socket_file: Option<PathBuf>,
pub rpc_socket_file: Option<PathBuf>,
pub monitoring_addr: SocketAddr,
pub jsonrpc_import: bool,
pub light_mode: bool,
pub main_loop_delay: u64,
pub address_search: bool,
pub index_unspendables: bool,
pub cors: Option<String>,
pub precache_scripts: Option<String>,
pub precache_threads: usize,
pub utxos_limit: usize,
pub electrum_txs_limit: usize,
pub electrum_banner: String,
pub mempool_backlog_stats_ttl: u64,
pub mempool_recent_txs_size: usize,
pub rest_default_block_limit: usize,
pub rest_default_chain_txs_per_page: usize,
pub rest_default_max_mempool_txs: usize,
pub rest_default_max_address_summary_txs: usize,
pub rest_max_mempool_page_size: usize,
pub rest_max_mempool_txid_page_size: usize,
#[cfg(feature = "liquid")]
pub parent_network: BNetwork,
#[cfg(feature = "liquid")]
pub asset_db_path: Option<PathBuf>,
#[cfg(feature = "electrum-discovery")]
pub electrum_public_hosts: Option<crate::electrum::ServerHosts>,
#[cfg(feature = "electrum-discovery")]
pub electrum_announce: bool,
#[cfg(feature = "electrum-discovery")]
pub tor_proxy: Option<std::net::SocketAddr>,
}
fn str_to_socketaddr(address: &str, what: &str) -> SocketAddr {
address
.to_socket_addrs()
.unwrap_or_else(|_| panic!("unable to resolve {} address", what))
.collect::<Vec<_>>()
.pop()
.unwrap()
}
impl Config {
pub fn from_args() -> Config {
let network_help = format!("Select network type ({})", Network::names().join(", "));
let args = App::new("Mempool Electrum Rust Server")
.version(crate_version!())
.arg(
Arg::with_name("version")
.long("version")
.help("Print out the version of this app and quit immediately."),
)
.arg(
Arg::with_name("verbosity")
.short("v")
.multiple(true)
.help("Increase logging verbosity"),
)
.arg(
Arg::with_name("timestamp")
.long("timestamp")
.help("Prepend log lines with a timestamp"),
)
.arg(
Arg::with_name("db_dir")
.long("db-dir")
.help("Directory to store index database (default: ./db/)")
.takes_value(true),
)
.arg(
Arg::with_name("daemon_dir")
.long("daemon-dir")
.help("Data directory of Bitcoind (default: ~/.bitcoin/)")
.takes_value(true),
)
.arg(
Arg::with_name("blocks_dir")
.long("blocks-dir")
.help("Analogous to bitcoind's -blocksdir option, this specifies the directory containing the raw blocks files (blk*.dat) (default: ~/.bitcoin/blocks/)")
.takes_value(true),
)
.arg(
Arg::with_name("cookie")
.long("cookie")
.help("JSONRPC authentication cookie ('USER:PASSWORD', default: read from ~/.bitcoin/.cookie)")
.takes_value(true),
)
.arg(
Arg::with_name("network")
.long("network")
.help(&network_help)
.takes_value(true),
)
.arg(
Arg::with_name("magic")
.long("magic")
.default_value("")
.takes_value(true),
)
.arg(
Arg::with_name("electrum_rpc_addr")
.long("electrum-rpc-addr")
.help("Electrum server JSONRPC 'addr:port' to listen on (default: '127.0.0.1:50001' for mainnet, '127.0.0.1:60001' for testnet and '127.0.0.1:60401' for regtest)")
.takes_value(true),
)
.arg(
Arg::with_name("http_addr")
.long("http-addr")
.help("HTTP server 'addr:port' to listen on (default: '127.0.0.1:3000' for mainnet, '127.0.0.1:3001' for testnet and '127.0.0.1:3002' for regtest)")
.takes_value(true),
)
.arg(
Arg::with_name("daemon_rpc_addr")
.long("daemon-rpc-addr")
.help("Bitcoin daemon JSONRPC 'addr:port' to connect (default: 127.0.0.1:8332 for mainnet, 127.0.0.1:18332 for testnet and 127.0.0.1:18443 for regtest)")
.takes_value(true),
)
.arg(
Arg::with_name("monitoring_addr")
.long("monitoring-addr")
.help("Prometheus monitoring 'addr:port' to listen on (default: 127.0.0.1:4224 for mainnet, 127.0.0.1:14224 for testnet and 127.0.0.1:24224 for regtest)")
.takes_value(true),
)
.arg(
Arg::with_name("jsonrpc_import")
.long("jsonrpc-import")
.help("Use JSONRPC instead of directly importing blk*.dat files. Useful for remote full node or low memory system"),
)
.arg(
Arg::with_name("light_mode")
.long("lightmode")
.help("Enable light mode for reduced storage")
)
.arg(
Arg::with_name("main_loop_delay")
.long("main-loop-delay")
.help("The number of milliseconds the main loop will wait between loops. (Can be shortened with SIGUSR1)")
.default_value("500")
)
.arg(
Arg::with_name("address_search")
.long("address-search")
.help("Enable prefix address search")
)
.arg(
Arg::with_name("index_unspendables")
.long("index-unspendables")
.help("Enable indexing of provably unspendable outputs")
)
.arg(
Arg::with_name("cors")
.long("cors")
.help("Origins allowed to make cross-site requests")
.takes_value(true)
)
.arg(
Arg::with_name("precache_scripts")
.long("precache-scripts")
.help("Path to file with list of scripts to pre-cache")
.takes_value(true)
)
.arg(
Arg::with_name("precache_threads")
.long("precache-threads")
.help("Non-zero number of threads to use for precache threadpool. [default: 4 * CORE_COUNT]")
.takes_value(true)
)
.arg(
Arg::with_name("utxos_limit")
.long("utxos-limit")
.help("Maximum number of utxos to process per address. Lookups for addresses with more utxos will fail. Applies to the Electrum and HTTP APIs.")
.default_value("500")
)
.arg(
Arg::with_name("mempool_backlog_stats_ttl")
.long("mempool-backlog-stats-ttl")
.help("The number of seconds that need to pass before Mempool::update will update the latency histogram again.")
.default_value("10")
)
.arg(
Arg::with_name("mempool_recent_txs_size")
.long("mempool-recent-txs-size")
.help("The number of transactions that mempool will keep in its recents queue. This is returned by mempool/recent endpoint.")
.default_value("10")
)
.arg(
Arg::with_name("rest_default_block_limit")
.long("rest-default-block-limit")
.help("The default number of blocks returned from the blocks/[start_height] endpoint.")
.default_value("10")
)
.arg(
Arg::with_name("rest_default_chain_txs_per_page")
.long("rest-default-chain-txs-per-page")
.help("The default number of on-chain transactions returned by the txs endpoints.")
.default_value("25")
)
.arg(
Arg::with_name("rest_default_max_mempool_txs")
.long("rest-default-max-mempool-txs")
.help("The default number of mempool transactions returned by the txs endpoints.")
.default_value("50")
)
.arg(
Arg::with_name("rest_default_max_address_summary_txs")
.long("rest-default-max-address-summary-txs")
.help("The default number of transactions returned by the address summary endpoints.")
.default_value("5000")
)
.arg(
Arg::with_name("rest_max_mempool_page_size")
.long("rest-max-mempool-page-size")
.help("The maximum number of transactions returned by the paginated /internal/mempool/txs endpoint.")
.default_value("1000")
)
.arg(
Arg::with_name("rest_max_mempool_txid_page_size")
.long("rest-max-mempool-txid-page-size")
.help("The maximum number of transactions returned by the paginated /mempool/txids/page endpoint.")
.default_value("10000")
)
.arg(
Arg::with_name("electrum_txs_limit")
.long("electrum-txs-limit")
.help("Maximum number of transactions returned by Electrum history queries. Lookups with more results will fail.")
.default_value("500")
).arg(
Arg::with_name("electrum_banner")
.long("electrum-banner")
.help("Welcome banner for the Electrum server, shown in the console to clients.")
.takes_value(true)
);
#[cfg(unix)]
let args = args.arg(
Arg::with_name("http_socket_file")
.long("http-socket-file")
.help("HTTP server 'unix socket file' to listen on (default disabled, enabling this disables the http server)")
.takes_value(true),
);
#[cfg(unix)]
let args = args.arg(
Arg::with_name("rpc_socket_file")
.long("rpc-socket-file")
.help("Electrum RPC 'unix socket file' to listen on (default disabled, enabling this ignores the electrum_rpc_addr arg)")
.takes_value(true),
);
#[cfg(feature = "liquid")]
let args = args
.arg(
Arg::with_name("parent_network")
.long("parent-network")
.help("Select parent network type (mainnet, testnet, regtest)")
.takes_value(true),
)
.arg(
Arg::with_name("asset_db_path")
.long("asset-db-path")
.help("Directory for liquid/elements asset db")
.takes_value(true),
);
#[cfg(feature = "electrum-discovery")]
let args = args.arg(
Arg::with_name("electrum_public_hosts")
.long("electrum-public-hosts")
.help("A dictionary of hosts where the Electrum server can be reached at. Required to enable server discovery. See https://electrumx.readthedocs.io/en/latest/protocol-methods.html#server-features")
.takes_value(true)
).arg(
Arg::with_name("electrum_announce")
.long("electrum-announce")
.help("Announce the Electrum server to other servers")
).arg(
Arg::with_name("tor_proxy")
.long("tor-proxy")
.help("ip:addr of socks proxy for accessing onion hosts")
.takes_value(true),
);
let m = args.get_matches();
if m.is_present("version") {
eprintln!("{}", *VERSION_STRING);
std::process::exit(0);
}
let network_name = m.value_of("network").unwrap_or("mainnet");
let network_type = Network::from(network_name);
let magic: Option<u32> = m
.value_of("magic")
.filter(|s| !s.is_empty())
.map(|s| u32::from_str_radix(s, 16).expect("invalid network magic"));
let db_dir = Path::new(m.value_of("db_dir").unwrap_or("./db"));
let db_path = db_dir.join(network_name);
#[cfg(feature = "liquid")]
let parent_network = m
.value_of("parent_network")
.map(|s| s.parse().expect("invalid parent network"))
.unwrap_or_else(|| match network_type {
Network::Liquid => BNetwork::Bitcoin,
// XXX liquid testnet/regtest don't have a parent chain
Network::LiquidTestnet | Network::LiquidRegtest => BNetwork::Regtest,
});
#[cfg(feature = "liquid")]
let asset_db_path = m.value_of("asset_db_path").map(PathBuf::from);
let default_daemon_port = match network_type {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => 8332,
#[cfg(not(feature = "liquid"))]
Network::Testnet => 18332,
#[cfg(not(feature = "liquid"))]
Network::Regtest => 18443,
#[cfg(not(feature = "liquid"))]
Network::Signet => 38332,
#[cfg(not(feature = "liquid"))]
Network::Testnet4 => 48332,
#[cfg(feature = "liquid")]
Network::Liquid => 7041,
#[cfg(feature = "liquid")]
Network::LiquidTestnet | Network::LiquidRegtest => 7040,
};
let default_electrum_port = match network_type {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => 50001,
#[cfg(not(feature = "liquid"))]
Network::Testnet => 60001,
#[cfg(not(feature = "liquid"))]
Network::Testnet4 => 40001,
#[cfg(not(feature = "liquid"))]
Network::Regtest => 60401,
#[cfg(not(feature = "liquid"))]
Network::Signet => 60601,
#[cfg(feature = "liquid")]
Network::Liquid => 51000,
#[cfg(feature = "liquid")]
Network::LiquidTestnet => 51301,
#[cfg(feature = "liquid")]
Network::LiquidRegtest => 51401,
};
let default_http_port = match network_type {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => 3000,
#[cfg(not(feature = "liquid"))]
Network::Testnet => 3001,
#[cfg(not(feature = "liquid"))]
Network::Regtest => 3002,
#[cfg(not(feature = "liquid"))]
Network::Signet => 3003,
#[cfg(not(feature = "liquid"))]
Network::Testnet4 => 3004,
#[cfg(feature = "liquid")]
Network::Liquid => 3000,
#[cfg(feature = "liquid")]
Network::LiquidTestnet => 3001,
#[cfg(feature = "liquid")]
Network::LiquidRegtest => 3002,
};
let default_monitoring_port = match network_type {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => 4224,
#[cfg(not(feature = "liquid"))]
Network::Testnet => 14224,
#[cfg(not(feature = "liquid"))]
Network::Regtest => 24224,
#[cfg(not(feature = "liquid"))]
Network::Testnet4 => 44224,
#[cfg(not(feature = "liquid"))]
Network::Signet => 54224,
#[cfg(feature = "liquid")]
Network::Liquid => 34224,
#[cfg(feature = "liquid")]
Network::LiquidTestnet => 44324,
#[cfg(feature = "liquid")]
Network::LiquidRegtest => 44224,
};
let daemon_rpc_addr: SocketAddr = str_to_socketaddr(
m.value_of("daemon_rpc_addr")
.unwrap_or(&format!("127.0.0.1:{}", default_daemon_port)),
"Bitcoin RPC",
);
let electrum_rpc_addr: SocketAddr = str_to_socketaddr(
m.value_of("electrum_rpc_addr")
.unwrap_or(&format!("127.0.0.1:{}", default_electrum_port)),
"Electrum RPC",
);
let http_addr: SocketAddr = str_to_socketaddr(
m.value_of("http_addr")
.unwrap_or(&format!("127.0.0.1:{}", default_http_port)),
"HTTP Server",
);
let http_socket_file: Option<PathBuf> = m.value_of("http_socket_file").map(PathBuf::from);
let rpc_socket_file: Option<PathBuf> = m.value_of("rpc_socket_file").map(PathBuf::from);
let monitoring_addr: SocketAddr = str_to_socketaddr(
m.value_of("monitoring_addr")
.unwrap_or(&format!("127.0.0.1:{}", default_monitoring_port)),
"Prometheus monitoring",
);
let mut daemon_dir = m
.value_of("daemon_dir")
.map(PathBuf::from)
.unwrap_or_else(|| {
let mut default_dir = home_dir().expect("no homedir");
default_dir.push(".bitcoin");
default_dir
});
match network_type {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => (),
#[cfg(not(feature = "liquid"))]
Network::Testnet => daemon_dir.push("testnet3"),
#[cfg(not(feature = "liquid"))]
Network::Testnet4 => daemon_dir.push("testnet4"),
#[cfg(not(feature = "liquid"))]
Network::Regtest => daemon_dir.push("regtest"),
#[cfg(not(feature = "liquid"))]
Network::Signet => daemon_dir.push("signet"),
#[cfg(feature = "liquid")]
Network::Liquid => daemon_dir.push("liquidv1"),
#[cfg(feature = "liquid")]
Network::LiquidTestnet => daemon_dir.push("liquidtestnet"),
#[cfg(feature = "liquid")]
Network::LiquidRegtest => daemon_dir.push("liquidregtest"),
}
let blocks_dir = m
.value_of("blocks_dir")
.map(PathBuf::from)
.unwrap_or_else(|| daemon_dir.join("blocks"));
let cookie = m.value_of("cookie").map(|s| s.to_owned());
let electrum_banner = m
.value_of("electrum_banner")
.map_or_else(|| format!("Welcome to {}", *VERSION_STRING), |s| s.into());
#[cfg(feature = "electrum-discovery")]
let electrum_public_hosts = m
.value_of("electrum_public_hosts")
.map(|s| serde_json::from_str(s).expect("invalid --electrum-public-hosts"));
let mut log = stderrlog::new();
log.verbosity(m.occurrences_of("verbosity") as usize);
log.timestamp(if m.is_present("timestamp") {
stderrlog::Timestamp::Millisecond
} else {
stderrlog::Timestamp::Off
});
log.init().expect("logging initialization failed");
let config = Config {
log,
network_type,
magic,
db_path,
daemon_dir,
blocks_dir,
daemon_rpc_addr,
cookie,
utxos_limit: value_t_or_exit!(m, "utxos_limit", usize),
electrum_rpc_addr,
electrum_txs_limit: value_t_or_exit!(m, "electrum_txs_limit", usize),
electrum_banner,
http_addr,
http_socket_file,
rpc_socket_file,
monitoring_addr,
mempool_backlog_stats_ttl: value_t_or_exit!(m, "mempool_backlog_stats_ttl", u64),
mempool_recent_txs_size: value_t_or_exit!(m, "mempool_recent_txs_size", usize),
rest_default_block_limit: value_t_or_exit!(m, "rest_default_block_limit", usize),
rest_default_chain_txs_per_page: value_t_or_exit!(
m,
"rest_default_chain_txs_per_page",
usize
),
rest_default_max_mempool_txs: value_t_or_exit!(
m,
"rest_default_max_mempool_txs",
usize
),
rest_default_max_address_summary_txs: value_t_or_exit!(
m,
"rest_default_max_address_summary_txs",
usize
),
rest_max_mempool_page_size: value_t_or_exit!(m, "rest_max_mempool_page_size", usize),
rest_max_mempool_txid_page_size: value_t_or_exit!(
m,
"rest_max_mempool_txid_page_size",
usize
),
jsonrpc_import: m.is_present("jsonrpc_import"),
light_mode: m.is_present("light_mode"),
main_loop_delay: value_t_or_exit!(m, "main_loop_delay", u64),
address_search: m.is_present("address_search"),
index_unspendables: m.is_present("index_unspendables"),
cors: m.value_of("cors").map(|s| s.to_string()),
precache_scripts: m.value_of("precache_scripts").map(|s| s.to_string()),
precache_threads: m.value_of("precache_threads").map_or_else(
|| {
std::thread::available_parallelism()
.expect("Can't get core count")
.get()
* 4
},
|s| match s.parse::<usize>() {
Ok(v) if v > 0 => v,
_ => clap::Error::value_validation_auto(format!(
"The argument '{}' isn't a valid value",
s
))
.exit(),
},
),
#[cfg(feature = "liquid")]
parent_network,
#[cfg(feature = "liquid")]
asset_db_path,
#[cfg(feature = "electrum-discovery")]
electrum_public_hosts,
#[cfg(feature = "electrum-discovery")]
electrum_announce: m.is_present("electrum_announce"),
#[cfg(feature = "electrum-discovery")]
tor_proxy: m.value_of("tor_proxy").map(|s| s.parse().unwrap()),
};
eprintln!("{:?}", config);
config
}
pub fn cookie_getter(&self) -> Arc<dyn CookieGetter> {
if let Some(ref value) = self.cookie {
Arc::new(StaticCookie {
value: value.as_bytes().to_vec(),
})
} else {
Arc::new(CookieFile {
daemon_dir: self.daemon_dir.clone(),
})
}
}
}
struct StaticCookie {
value: Vec<u8>,
}
impl CookieGetter for StaticCookie {
fn get(&self) -> Result<Vec<u8>> {
Ok(self.value.clone())
}
}
struct CookieFile {
daemon_dir: PathBuf,
}
impl CookieGetter for CookieFile {
fn get(&self) -> Result<Vec<u8>> {
let path = self.daemon_dir.join(".cookie");
let contents = fs::read(&path).chain_err(|| {
ErrorKind::Connection(format!("failed to read cookie from {:?}", path))
})?;
Ok(contents)
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/app.rs | src/app.rs | use bitcoin::hashes::sha256d::Hash as Sha256dHash;
use std::sync::{Arc, Mutex};
use crate::{daemon, index, signal::Waiter, store};
use crate::errors::*;
pub struct App {
store: store::DBStore,
index: index::Index,
daemon: daemon::Daemon,
tip: Mutex<Sha256dHash>,
}
impl App {
pub fn new(
store: store::DBStore,
index: index::Index,
daemon: daemon::Daemon,
) -> Result<Arc<App>> {
Ok(Arc::new(App {
store,
index,
daemon: daemon.reconnect()?,
tip: Mutex::new(Sha256dHash::default()),
}))
}
fn write_store(&self) -> &store::WriteStore {
&self.store
}
// TODO: use index for queries.
pub fn read_store(&self) -> &store::ReadStore {
&self.store
}
pub fn index(&self) -> &index::Index {
&self.index
}
pub fn daemon(&self) -> &daemon::Daemon {
&self.daemon
}
pub fn update(&self, signal: &Waiter) -> Result<bool> {
let mut tip = self.tip.lock().expect("failed to lock tip");
let new_block = *tip != self.daemon().getbestblockhash()?;
if new_block {
*tip = self.index().update(self.write_store(), &signal)?;
}
Ok(new_block)
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/errors.rs | src/errors.rs | #![allow(unexpected_cfgs)]
error_chain! {
types {
Error, ErrorKind, ResultExt, Result;
}
errors {
Connection(msg: String) {
description("Connection error")
display("Connection error: {}", msg)
}
Interrupt(sig: i32) {
description("Interruption by external signal")
display("Iterrupted by signal {}", sig)
}
TooManyUtxos(limit: usize) {
description("Too many unspent transaction outputs. Contact support to raise limits.")
display("Too many unspent transaction outputs (>{}). Contact support to raise limits.", limit)
}
TooManyTxs(limit: usize) {
description("Too many history transactions. Contact support to raise limits.")
display("Too many history transactions (>{}). Contact support to raise limits.", limit)
}
#[cfg(feature = "electrum-discovery")]
ElectrumClient(e: electrum_client::Error) {
description("Electrum client error")
display("Electrum client error: {:?}", e)
}
}
}
#[cfg(feature = "electrum-discovery")]
impl From<electrum_client::Error> for Error {
fn from(e: electrum_client::Error) -> Self {
Error::from(ErrorKind::ElectrumClient(e))
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/rest.rs | src/rest.rs | use crate::chain::{address, BlockHash, Network, OutPoint, Script, Transaction, TxIn, TxOut, Txid};
use crate::config::{Config, BITCOIND_SUBVER, VERSION_STRING};
use crate::errors;
use crate::metrics::Metrics;
use crate::new_index::{compute_script_hash, Query, SpendingInput, Utxo};
use crate::util::{
create_socket, electrum_merkle, extract_tx_prevouts, full_hash, get_innerscripts, get_tx_fee,
has_prevout, is_coinbase, transaction_sigop_count, BlockHeaderMeta, BlockId, FullHash,
ScriptToAddr, ScriptToAsm, TransactionStatus,
};
#[cfg(not(feature = "liquid"))]
use {bitcoin::consensus::encode, std::str::FromStr};
use bitcoin::blockdata::opcodes;
use bitcoin::hashes::hex::{FromHex, ToHex};
use bitcoin::hashes::Error as HashError;
use hex::{self, FromHexError};
use hyper::{
header::HeaderValue,
service::{make_service_fn, service_fn},
};
use hyper::{Body, Method, Response, Server, StatusCode};
use prometheus::{HistogramOpts, HistogramVec};
use rayon::iter::ParallelIterator;
use tokio::sync::oneshot;
use hyperlocal::UnixServerExt;
use std::{cmp, fs};
#[cfg(feature = "liquid")]
use {
crate::elements::{peg::PegoutValue, AssetSorting, IssuanceValue},
elements::{
confidential::{Asset, Nonce, Value},
encode, AssetId,
},
};
use serde::Serialize;
use serde_json;
use std::collections::HashMap;
use std::num::ParseIntError;
use std::os::unix::fs::FileTypeExt;
use std::sync::Arc;
use std::thread;
use url::form_urlencoded;
const ADDRESS_SEARCH_LIMIT: usize = 10;
// Limit to 300 addresses
const MULTI_ADDRESS_LIMIT: usize = 300;
#[cfg(feature = "liquid")]
const ASSETS_PER_PAGE: usize = 25;
#[cfg(feature = "liquid")]
const ASSETS_MAX_PER_PAGE: usize = 100;
const TTL_LONG: u32 = 157_784_630; // ttl for static resources (5 years)
const TTL_SHORT: u32 = 10; // ttl for volatie resources
const TTL_MEMPOOL_RECENT: u32 = 5; // ttl for GET /mempool/recent
const CONF_FINAL: usize = 10; // reorgs deeper than this are considered unlikely
// internal api prefix
const INTERNAL_PREFIX: &str = "internal";
#[derive(Serialize, Deserialize)]
struct BlockValue {
id: String,
height: u32,
version: u32,
timestamp: u32,
tx_count: u32,
size: u32,
weight: u32,
merkle_root: String,
previousblockhash: Option<String>,
mediantime: u32,
#[cfg(not(feature = "liquid"))]
nonce: u32,
#[cfg(not(feature = "liquid"))]
bits: u32,
#[cfg(not(feature = "liquid"))]
difficulty: f64,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
ext: Option<serde_json::Value>,
}
impl BlockValue {
#[cfg_attr(feature = "liquid", allow(unused_variables))]
fn new(blockhm: BlockHeaderMeta) -> Self {
let header = blockhm.header_entry.header();
BlockValue {
id: header.block_hash().to_hex(),
height: blockhm.header_entry.height() as u32,
version: {
#[allow(clippy::unnecessary_cast)]
{
header.version as u32
}
},
timestamp: header.time,
tx_count: blockhm.meta.tx_count,
size: blockhm.meta.size,
weight: blockhm.meta.weight,
merkle_root: header.merkle_root.to_hex(),
previousblockhash: if header.prev_blockhash != BlockHash::default() {
Some(header.prev_blockhash.to_hex())
} else {
None
},
mediantime: blockhm.mtp,
#[cfg(not(feature = "liquid"))]
bits: header.bits,
#[cfg(not(feature = "liquid"))]
nonce: header.nonce,
#[cfg(not(feature = "liquid"))]
difficulty: difficulty_new(header),
#[cfg(feature = "liquid")]
ext: Some(json!(header.ext)),
}
}
}
/// Calculate the difficulty of a BlockHeader
/// using Bitcoin Core code ported to Rust.
///
/// https://github.com/bitcoin/bitcoin/blob/v25.0/src/rpc/blockchain.cpp#L75-L97
#[cfg_attr(feature = "liquid", allow(dead_code))]
fn difficulty_new(bh: &bitcoin::BlockHeader) -> f64 {
let mut n_shift = (bh.bits >> 24) & 0xff;
let mut d_diff = (0x0000ffff as f64) / ((bh.bits & 0x00ffffff) as f64);
while n_shift < 29 {
d_diff *= 256.0;
n_shift += 1;
}
while n_shift > 29 {
d_diff /= 256.0;
n_shift -= 1;
}
d_diff
}
#[derive(Serialize, Deserialize)]
struct TransactionValue {
txid: Txid,
version: u32,
locktime: u32,
vin: Vec<TxInValue>,
vout: Vec<TxOutValue>,
size: u32,
weight: u32,
sigops: u32,
fee: u64,
#[serde(skip_serializing_if = "Option::is_none")]
status: Option<TransactionStatus>,
}
impl TransactionValue {
fn new(
tx: Transaction,
blockid: Option<BlockId>,
txos: &HashMap<OutPoint, TxOut>,
config: &Config,
) -> Result<Self, errors::Error> {
let prevouts = extract_tx_prevouts(&tx, txos)?;
let sigops = transaction_sigop_count(&tx, &prevouts)
.map_err(|_| errors::Error::from("Couldn't count sigops"))? as u32;
let vins: Vec<TxInValue> = tx
.input
.iter()
.enumerate()
.map(|(index, txin)| {
TxInValue::new(txin, prevouts.get(&(index as u32)).cloned(), config)
})
.collect();
let vouts: Vec<TxOutValue> = tx
.output
.iter()
.map(|txout| TxOutValue::new(txout, config))
.collect();
let fee = get_tx_fee(&tx, &prevouts, config.network_type);
#[allow(clippy::unnecessary_cast)]
Ok(TransactionValue {
txid: tx.txid(),
version: tx.version as u32,
locktime: tx.lock_time,
vin: vins,
vout: vouts,
size: tx.size() as u32,
weight: tx.weight() as u32,
sigops,
fee,
status: Some(TransactionStatus::from(blockid)),
})
}
}
#[derive(Serialize, Deserialize, Clone)]
struct TxInValue {
txid: Txid,
vout: u32,
prevout: Option<TxOutValue>,
scriptsig: Script,
scriptsig_asm: String,
#[serde(skip_serializing_if = "Option::is_none")]
witness: Option<Vec<String>>,
is_coinbase: bool,
sequence: u32,
#[serde(skip_serializing_if = "Option::is_none")]
inner_redeemscript_asm: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
inner_witnessscript_asm: Option<String>,
#[cfg(feature = "liquid")]
is_pegin: bool,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
issuance: Option<IssuanceValue>,
}
impl TxInValue {
fn new(txin: &TxIn, prevout: Option<&TxOut>, config: &Config) -> Self {
let witness = &txin.witness;
#[cfg(feature = "liquid")]
let witness = &witness.script_witness;
let witness = if !witness.is_empty() {
Some(witness.iter().map(hex::encode).collect())
} else {
None
};
let is_coinbase = is_coinbase(txin);
let innerscripts = prevout.map(|prevout| get_innerscripts(txin, prevout));
TxInValue {
txid: txin.previous_output.txid,
vout: txin.previous_output.vout,
prevout: prevout.map(|prevout| TxOutValue::new(prevout, config)),
scriptsig_asm: txin.script_sig.to_asm(),
witness,
inner_redeemscript_asm: innerscripts
.as_ref()
.and_then(|i| i.redeem_script.as_ref())
.map(ScriptToAsm::to_asm),
inner_witnessscript_asm: innerscripts
.as_ref()
.and_then(|i| i.witness_script.as_ref())
.map(ScriptToAsm::to_asm),
is_coinbase,
sequence: txin.sequence,
#[cfg(feature = "liquid")]
is_pegin: txin.is_pegin,
#[cfg(feature = "liquid")]
issuance: if txin.has_issuance() {
Some(IssuanceValue::from(txin))
} else {
None
},
scriptsig: txin.script_sig.clone(),
}
}
}
#[derive(Serialize, Deserialize, Clone)]
struct TxOutValue {
scriptpubkey: Script,
scriptpubkey_asm: String,
scriptpubkey_type: String,
#[serde(skip_serializing_if = "Option::is_none")]
scriptpubkey_address: Option<String>,
#[cfg(not(feature = "liquid"))]
value: u64,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
value: Option<u64>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
valuecommitment: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
asset: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
assetcommitment: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
pegout: Option<PegoutValue>,
}
impl TxOutValue {
fn new(txout: &TxOut, config: &Config) -> Self {
#[cfg(not(feature = "liquid"))]
let value = txout.value;
#[cfg(feature = "liquid")]
let value = txout.value.explicit();
#[cfg(feature = "liquid")]
let valuecommitment = match txout.value {
Value::Confidential(..) => Some(hex::encode(encode::serialize(&txout.value))),
_ => None,
};
#[cfg(feature = "liquid")]
let asset = match txout.asset {
Asset::Explicit(value) => Some(value.to_hex()),
_ => None,
};
#[cfg(feature = "liquid")]
let assetcommitment = match txout.asset {
Asset::Confidential(..) => Some(hex::encode(encode::serialize(&txout.asset))),
_ => None,
};
#[cfg(not(feature = "liquid"))]
let is_fee = false;
#[cfg(feature = "liquid")]
let is_fee = txout.is_fee();
let script = &txout.script_pubkey;
let script_asm = script.to_asm();
let script_addr = script.to_address_str(config.network_type);
// TODO should the following something to put inside rust-elements lib?
let script_type = if is_fee {
"fee"
} else if script.is_empty() {
"empty"
} else if script.is_op_return() {
"op_return"
} else if script.is_p2pk() {
"p2pk"
} else if script.is_p2pkh() {
"p2pkh"
} else if script.is_p2sh() {
"p2sh"
} else if script.is_v0_p2wpkh() {
"v0_p2wpkh"
} else if script.is_v0_p2wsh() {
"v0_p2wsh"
} else if is_v1_p2tr(script) {
"v1_p2tr"
} else if is_anchor(script) {
"anchor"
} else if script.is_provably_unspendable() {
"provably_unspendable"
} else if is_bare_multisig(script) {
"multisig"
} else {
"unknown"
};
#[cfg(feature = "liquid")]
let pegout = PegoutValue::from_txout(txout, config.network_type, config.parent_network);
TxOutValue {
scriptpubkey: script.clone(),
scriptpubkey_asm: script_asm,
scriptpubkey_address: script_addr,
scriptpubkey_type: script_type.to_string(),
value,
#[cfg(feature = "liquid")]
valuecommitment,
#[cfg(feature = "liquid")]
asset,
#[cfg(feature = "liquid")]
assetcommitment,
#[cfg(feature = "liquid")]
pegout,
}
}
}
fn is_v1_p2tr(script: &Script) -> bool {
script.len() == 34
&& script[0] == opcodes::all::OP_PUSHNUM_1.into_u8()
&& script[1] == opcodes::all::OP_PUSHBYTES_32.into_u8()
}
fn is_bare_multisig(script: &Script) -> bool {
let len = script.len();
// 1-of-1 multisig is 37 bytes
// Max is 15 pubkeys
// Min is 1
// First byte must be <= the second to last (4-of-2 makes no sense)
// We won't check the pubkeys, just assume anything with the form
// OP_M ... OP_N OP_CHECKMULTISIG
// is bare multisig
len >= 37
&& script[len - 1] == opcodes::all::OP_CHECKMULTISIG.into_u8()
&& script[len - 2] >= opcodes::all::OP_PUSHNUM_1.into_u8()
&& script[len - 2] <= opcodes::all::OP_PUSHNUM_15.into_u8()
&& script[0] >= opcodes::all::OP_PUSHNUM_1.into_u8()
&& script[0] <= script[len - 2]
}
fn is_anchor(script: &Script) -> bool {
let len = script.len();
len == 4
&& script[0] == opcodes::all::OP_PUSHNUM_1.into_u8()
&& script[1] == opcodes::all::OP_PUSHBYTES_2.into_u8()
&& script[2] == 0x4e
&& script[3] == 0x73
}
#[derive(Serialize)]
struct UtxoValue {
txid: Txid,
vout: u32,
status: TransactionStatus,
#[cfg(not(feature = "liquid"))]
value: u64,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
value: Option<u64>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
valuecommitment: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
asset: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
assetcommitment: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
nonce: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Option::is_none")]
noncecommitment: Option<String>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Vec::is_empty", with = "crate::util::serde_hex")]
surjection_proof: Vec<u8>,
#[cfg(feature = "liquid")]
#[serde(skip_serializing_if = "Vec::is_empty", with = "crate::util::serde_hex")]
range_proof: Vec<u8>,
}
impl From<Utxo> for UtxoValue {
fn from(utxo: Utxo) -> Self {
UtxoValue {
txid: utxo.txid,
vout: utxo.vout,
status: TransactionStatus::from(utxo.confirmed),
#[cfg(not(feature = "liquid"))]
value: utxo.value,
#[cfg(feature = "liquid")]
value: match utxo.value {
Value::Explicit(value) => Some(value),
_ => None,
},
#[cfg(feature = "liquid")]
valuecommitment: match utxo.value {
Value::Confidential(..) => Some(hex::encode(encode::serialize(&utxo.value))),
_ => None,
},
#[cfg(feature = "liquid")]
asset: match utxo.asset {
Asset::Explicit(asset) => Some(asset.to_hex()),
_ => None,
},
#[cfg(feature = "liquid")]
assetcommitment: match utxo.asset {
Asset::Confidential(..) => Some(hex::encode(encode::serialize(&utxo.asset))),
_ => None,
},
#[cfg(feature = "liquid")]
nonce: match utxo.nonce {
Nonce::Explicit(nonce) => Some(nonce.to_hex()),
_ => None,
},
#[cfg(feature = "liquid")]
noncecommitment: match utxo.nonce {
Nonce::Confidential(..) => Some(hex::encode(encode::serialize(&utxo.nonce))),
_ => None,
},
#[cfg(feature = "liquid")]
surjection_proof: utxo
.witness
.surjection_proof
.map_or(vec![], |p| (*p).serialize()),
#[cfg(feature = "liquid")]
range_proof: utxo.witness.rangeproof.map_or(vec![], |p| (*p).serialize()),
}
}
}
#[derive(Serialize, Default)]
struct SpendingValue {
spent: bool,
#[serde(skip_serializing_if = "Option::is_none")]
txid: Option<Txid>,
#[serde(skip_serializing_if = "Option::is_none")]
vin: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
status: Option<TransactionStatus>,
}
impl From<SpendingInput> for SpendingValue {
fn from(spend: SpendingInput) -> Self {
SpendingValue {
spent: true,
txid: Some(spend.txid),
vin: Some(spend.vin),
status: Some(TransactionStatus::from(spend.confirmed)),
}
}
}
fn ttl_by_depth(height: Option<usize>, query: &Query) -> u32 {
height.map_or(TTL_SHORT, |height| {
if query.chain().best_height() - height >= CONF_FINAL {
TTL_LONG
} else {
TTL_SHORT
}
})
}
enum TxidLocation {
Mempool,
Chain(u32), // contains height
None,
}
#[inline]
fn find_txid(
txid: &Txid,
mempool: &crate::new_index::Mempool,
chain: &crate::new_index::ChainQuery,
) -> TxidLocation {
if mempool.lookup_txn(txid).is_some() {
TxidLocation::Mempool
} else if let Some(block) = chain.tx_confirming_block(txid) {
TxidLocation::Chain(block.height as u32)
} else {
TxidLocation::None
}
}
/// Prepare transactions to be serialized in a JSON response
///
/// Any transactions with missing prevouts will be filtered out of the response, rather than returned with incorrect data.
fn prepare_txs(
txs: Vec<(Transaction, Option<BlockId>)>,
query: &Query,
config: &Config,
) -> Vec<TransactionValue> {
let outpoints = txs
.iter()
.flat_map(|(tx, _)| {
tx.input
.iter()
.filter(|txin| has_prevout(txin))
.map(|txin| txin.previous_output)
})
.collect();
let prevouts = query.lookup_txos(&outpoints);
txs.into_iter()
.filter_map(|(tx, blockid)| TransactionValue::new(tx, blockid, &prevouts, config).ok())
.collect()
}
#[tokio::main]
async fn run_server(
config: Arc<Config>,
query: Arc<Query>,
rx: oneshot::Receiver<()>,
metric: HistogramVec,
) {
let addr = &config.http_addr;
let socket_file = &config.http_socket_file;
let config = Arc::clone(&config);
let query = Arc::clone(&query);
let make_service_fn_inn = || {
let query = Arc::clone(&query);
let config = Arc::clone(&config);
let metric = metric.clone();
async move {
Ok::<_, hyper::Error>(service_fn(move |req| {
let query = Arc::clone(&query);
let config = Arc::clone(&config);
let timer = metric.with_label_values(&["all_methods"]).start_timer();
async move {
let method = req.method().clone();
let uri = req.uri().clone();
let body = hyper::body::to_bytes(req.into_body()).await?;
let mut resp = tokio::task::block_in_place(|| {
handle_request(method, uri, body, &query, &config)
})
.unwrap_or_else(|err| {
warn!("{:?}", err);
Response::builder()
.status(err.0)
.header("Content-Type", "text/plain")
.body(Body::from(err.1))
.unwrap()
});
resp.headers_mut()
.insert("X-Powered-By", HeaderValue::from_static(&VERSION_STRING));
if let Some(ref origins) = config.cors {
resp.headers_mut()
.insert("Access-Control-Allow-Origin", origins.parse().unwrap());
}
if let Some(subver) = BITCOIND_SUBVER.get() {
resp.headers_mut()
.insert("X-Bitcoin-Version", HeaderValue::from_static(subver));
}
timer.observe_duration();
Ok::<_, hyper::Error>(resp)
}
}))
}
};
let server = match socket_file {
None => {
info!("REST server running on {}", addr);
let socket = create_socket(addr);
socket.listen(511).expect("setting backlog failed");
Server::from_tcp(socket.into())
.expect("Server::from_tcp failed")
.serve(make_service_fn(move |_| make_service_fn_inn()))
.with_graceful_shutdown(async {
rx.await.ok();
})
.await
}
Some(path) => {
if let Ok(meta) = fs::metadata(path) {
// Cleanup socket file left by previous execution
if meta.file_type().is_socket() {
fs::remove_file(path).ok();
}
}
info!("REST server running on unix socket {}", path.display());
Server::bind_unix(path)
.expect("Server::bind_unix failed")
.serve(make_service_fn(move |_| make_service_fn_inn()))
.with_graceful_shutdown(async {
rx.await.ok();
})
.await
}
};
if let Err(e) = server {
eprintln!("server error: {}", e);
}
}
pub fn start(config: Arc<Config>, query: Arc<Query>, metrics: &Metrics) -> Handle {
let (tx, rx) = oneshot::channel::<()>();
let response_timer = metrics.histogram_vec(
HistogramOpts::new("electrs_rest_api", "Electrs REST API response timings"),
&["method"],
);
Handle {
tx,
thread: crate::util::spawn_thread("rest-server", move || {
run_server(config, query, rx, response_timer);
}),
}
}
pub struct Handle {
tx: oneshot::Sender<()>,
thread: thread::JoinHandle<()>,
}
impl Handle {
pub fn stop(self) {
self.tx.send(()).expect("failed to send shutdown signal");
self.thread.join().expect("REST server failed");
}
}
fn handle_request(
method: Method,
uri: hyper::Uri,
body: hyper::body::Bytes,
query: &Query,
config: &Config,
) -> Result<Response<Body>, HttpError> {
// TODO it looks hyper does not have routing and query parsing :(
let path: Vec<&str> = uri.path().split('/').skip(1).collect();
let query_params = match uri.query() {
Some(value) => form_urlencoded::parse(value.as_bytes())
.into_owned()
.collect::<HashMap<String, String>>(),
None => HashMap::new(),
};
info!("handle {:?} {:?}", method, uri);
match (
&method,
path.first(),
path.get(1),
path.get(2),
path.get(3),
path.get(4),
) {
(&Method::GET, Some(&"blocks"), Some(&"tip"), Some(&"hash"), None, None) => http_message(
StatusCode::OK,
query.chain().best_hash().to_hex(),
TTL_SHORT,
),
(&Method::GET, Some(&"blocks"), Some(&"tip"), Some(&"height"), None, None) => http_message(
StatusCode::OK,
query.chain().best_height().to_string(),
TTL_SHORT,
),
(&Method::GET, Some(&"blocks"), start_height, None, None, None) => {
let start_height = start_height.and_then(|height| height.parse::<usize>().ok());
blocks(query, config, start_height)
}
(&Method::GET, Some(&"block-height"), Some(height), None, None, None) => {
let height = height.parse::<usize>()?;
let header = query
.chain()
.header_by_height(height)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
let ttl = ttl_by_depth(Some(height), query);
http_message(StatusCode::OK, header.hash().to_hex(), ttl)
}
(&Method::GET, Some(&"block"), Some(hash), None, None, None) => {
let hash = BlockHash::from_hex(hash)?;
let blockhm = query
.chain()
.get_block_with_meta(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
let block_value = BlockValue::new(blockhm);
json_response(block_value, TTL_LONG)
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"status"), None, None) => {
let hash = BlockHash::from_hex(hash)?;
let status = query.chain().get_block_status(&hash);
let ttl = ttl_by_depth(status.height, query);
json_response(status, ttl)
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"txids"), None, None) => {
let hash = BlockHash::from_hex(hash)?;
let txids = query
.chain()
.get_block_txids(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
json_response(txids, TTL_LONG)
}
(&Method::GET, Some(&INTERNAL_PREFIX), Some(&"block"), Some(hash), Some(&"txs"), None) => {
let hash = BlockHash::from_hex(hash)?;
let block_id = query.chain().blockid_by_hash(&hash);
let txs = query
.chain()
.get_block_txs(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?
.into_iter()
.map(|tx| (tx, block_id.clone()))
.collect();
let ttl = ttl_by_depth(block_id.map(|b| b.height), query);
json_response(prepare_txs(txs, query, config), ttl)
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"header"), None, None) => {
let hash = BlockHash::from_hex(hash)?;
let header = query
.chain()
.get_block_header(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
let header_hex = hex::encode(encode::serialize(&header));
http_message(StatusCode::OK, header_hex, TTL_LONG)
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"raw"), None, None) => {
let hash = BlockHash::from_hex(hash)?;
let raw = query
.chain()
.get_block_raw(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
Ok(Response::builder()
.status(StatusCode::OK)
.header("Content-Type", "application/octet-stream")
.header("Cache-Control", format!("public, max-age={:}", TTL_LONG))
.body(Body::from(raw))
.unwrap())
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"txid"), Some(index), None) => {
let hash = BlockHash::from_hex(hash)?;
let index: usize = index.parse()?;
let txids = query
.chain()
.get_block_txids(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
if index >= txids.len() {
bail!(HttpError::not_found("tx index out of range".to_string()));
}
http_message(StatusCode::OK, txids[index].to_hex(), TTL_LONG)
}
(&Method::GET, Some(&"block"), Some(hash), Some(&"txs"), start_index, None) => {
let hash = BlockHash::from_hex(hash)?;
let txids = query
.chain()
.get_block_txids(&hash)
.ok_or_else(|| HttpError::not_found("Block not found".to_string()))?;
let start_index = start_index.map_or(0u32, |el| el.parse().unwrap_or(0)) as usize;
if start_index >= txids.len() {
bail!(HttpError::not_found("start index out of range".to_string()));
} else if start_index % config.rest_default_chain_txs_per_page != 0 {
bail!(HttpError::from(format!(
"start index must be a multipication of {}",
config.rest_default_chain_txs_per_page
)));
}
// blockid_by_hash() only returns the BlockId for non-orphaned blocks,
// or None for orphaned
let confirmed_blockid = query.chain().blockid_by_hash(&hash);
let txs = txids
.iter()
.skip(start_index)
.take(config.rest_default_chain_txs_per_page)
.map(|txid| {
query
.lookup_txn(txid)
.map(|tx| (tx, confirmed_blockid.clone()))
.ok_or_else(|| "missing tx".to_string())
})
.collect::<Result<Vec<(Transaction, Option<BlockId>)>, _>>()?;
// XXX orphraned blocks alway get TTL_SHORT
let ttl = ttl_by_depth(confirmed_blockid.map(|b| b.height), query);
json_response(prepare_txs(txs, query, config), ttl)
}
(&Method::GET, Some(script_type @ &"address"), Some(script_str), None, None, None)
| (&Method::GET, Some(script_type @ &"scripthash"), Some(script_str), None, None, None) => {
let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
let stats = query.stats(&script_hash[..]);
json_response(
json!({
*script_type: script_str,
"chain_stats": stats.0,
"mempool_stats": stats.1,
}),
TTL_SHORT,
)
}
(
&Method::GET,
Some(script_type @ &"address"),
Some(script_str),
Some(&"txs"),
None,
None,
)
| (
&Method::GET,
Some(script_type @ &"scripthash"),
Some(script_str),
Some(&"txs"),
None,
None,
) => {
let script_hash = to_scripthash(script_type, script_str, config.network_type)?;
let max_txs = query_params
.get("max_txs")
.and_then(|s| s.parse::<usize>().ok())
.unwrap_or(config.rest_default_max_mempool_txs);
let after_txid = query_params
.get("after_txid")
.and_then(|s| s.parse::<Txid>().ok());
let mut txs = vec![];
let after_txid_location = if let Some(txid) = &after_txid {
find_txid(txid, &query.mempool(), query.chain())
} else {
TxidLocation::Mempool
};
let confirmed_block_height = match after_txid_location {
TxidLocation::Mempool => {
txs.extend(
query
.mempool()
.history(&script_hash[..], after_txid.as_ref(), max_txs)
.into_iter()
.map(|tx| (tx, None)),
);
None
}
TxidLocation::None => {
return Err(HttpError(
StatusCode::UNPROCESSABLE_ENTITY,
String::from("after_txid not found"),
));
}
TxidLocation::Chain(height) => Some(height),
};
if txs.len() < max_txs {
let after_txid_ref = if !txs.is_empty() {
// If there are any txs, we know mempool found the
// after_txid IF it exists... so always return None.
None
} else {
after_txid.as_ref()
};
let mut confirmed_txs = query
.chain()
.history(
&script_hash[..],
after_txid_ref,
confirmed_block_height,
max_txs - txs.len(),
)
.map(|res| {
res.map(|(tx, blockid, tx_position)| (tx, Some(blockid), tx_position))
})
.collect::<Result<Vec<_>, _>>()?;
confirmed_txs.sort_unstable_by(
|(_, blockid1, tx_position1), (_, blockid2, tx_position2)| {
blockid2
.as_ref()
.map(|b| b.height)
.cmp(&blockid1.as_ref().map(|b| b.height))
.then_with(|| tx_position2.cmp(tx_position1))
},
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | true |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/lib.rs | src/lib.rs | #![recursion_limit = "1024"]
extern crate rocksdb;
#[macro_use]
extern crate clap;
#[macro_use]
extern crate arrayref;
#[macro_use]
extern crate error_chain;
#[macro_use]
extern crate log;
#[macro_use]
extern crate serde_derive;
#[macro_use]
extern crate serde_json;
#[macro_use]
extern crate lazy_static;
pub mod chain;
pub mod config;
pub mod daemon;
pub mod electrum;
pub mod errors;
pub mod metrics;
pub mod new_index;
pub mod rest;
pub mod signal;
pub mod util;
#[cfg(feature = "liquid")]
pub mod elements;
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/signal.rs | src/signal.rs | use crossbeam_channel as channel;
use crossbeam_channel::RecvTimeoutError;
use std::time::{Duration, Instant};
use signal_hook::consts::{SIGINT, SIGTERM, SIGUSR1};
use crate::errors::*;
#[derive(Clone)] // so multiple threads could wait on signals
pub struct Waiter {
receiver: channel::Receiver<i32>,
}
fn notify(signals: &[i32]) -> channel::Receiver<i32> {
let (s, r) = channel::bounded(1);
let mut signals =
signal_hook::iterator::Signals::new(signals).expect("failed to register signal hook");
crate::util::spawn_thread("signal-notifier", move || {
for signal in signals.forever() {
s.send(signal)
.unwrap_or_else(|_| panic!("failed to send signal {}", signal));
}
});
r
}
impl Waiter {
pub fn start() -> Waiter {
Waiter {
receiver: notify(&[
SIGINT, SIGTERM,
SIGUSR1, // allow external triggering (e.g. via bitcoind `blocknotify`)
]),
}
}
pub fn wait(&self, duration: Duration, accept_sigusr: bool) -> Result<()> {
// Determine the deadline time based on the duration, so that it doesn't
// get pushed back when wait_deadline() recurses
self.wait_deadline(Instant::now() + duration, accept_sigusr)
}
fn wait_deadline(&self, deadline: Instant, accept_sigusr: bool) -> Result<()> {
match self.receiver.recv_deadline(deadline) {
Ok(sig) if sig == SIGUSR1 => {
trace!("notified via SIGUSR1");
if accept_sigusr {
Ok(())
} else {
self.wait_deadline(deadline, accept_sigusr)
}
}
Ok(sig) => bail!(ErrorKind::Interrupt(sig)),
Err(RecvTimeoutError::Timeout) => Ok(()),
Err(RecvTimeoutError::Disconnected) => bail!("signal hook channel disconnected"),
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/chain.rs | src/chain.rs | use std::str::FromStr;
#[cfg(not(feature = "liquid"))] // use regular Bitcoin data structures
pub use bitcoin::{
blockdata::{opcodes, script, witness::Witness},
consensus::deserialize,
hashes,
util::address,
Block, BlockHash, BlockHeader, OutPoint, Script, Transaction, TxIn, TxOut, Txid,
};
#[cfg(feature = "liquid")]
pub use {
crate::elements::asset,
elements::{
address, confidential, encode::deserialize, hashes, opcodes, script, Address, AssetId,
Block, BlockHash, BlockHeader, OutPoint, Script, Transaction, TxIn, TxInWitness as Witness,
TxOut, Txid,
},
};
use bitcoin::blockdata::constants::genesis_block;
pub use bitcoin::network::constants::Network as BNetwork;
#[cfg(not(feature = "liquid"))]
pub type Value = u64;
#[cfg(feature = "liquid")]
pub use confidential::Value;
#[derive(Debug, Copy, Clone, PartialEq, Hash, Serialize, Ord, PartialOrd, Eq)]
pub enum Network {
#[cfg(not(feature = "liquid"))]
Bitcoin,
#[cfg(not(feature = "liquid"))]
Testnet,
#[cfg(not(feature = "liquid"))]
Testnet4,
#[cfg(not(feature = "liquid"))]
Regtest,
#[cfg(not(feature = "liquid"))]
Signet,
#[cfg(feature = "liquid")]
Liquid,
#[cfg(feature = "liquid")]
LiquidTestnet,
#[cfg(feature = "liquid")]
LiquidRegtest,
}
#[cfg(feature = "liquid")]
pub const LIQUID_TESTNET_PARAMS: address::AddressParams = address::AddressParams {
p2pkh_prefix: 36,
p2sh_prefix: 19,
blinded_prefix: 23,
bech_hrp: "tex",
blech_hrp: "tlq",
};
/// Magic for testnet4, 0x1c163f28 (from BIP94) with flipped endianness.
#[cfg(not(feature = "liquid"))]
const TESTNET4_MAGIC: u32 = 0x283f161c;
impl Network {
#[cfg(not(feature = "liquid"))]
pub fn magic(self) -> u32 {
match self {
Self::Testnet4 => TESTNET4_MAGIC,
_ => BNetwork::from(self).magic(),
}
}
#[cfg(feature = "liquid")]
pub fn magic(self) -> u32 {
match self {
Network::Liquid | Network::LiquidRegtest => 0xDAB5_BFFA,
Network::LiquidTestnet => 0x62DD_0E41,
}
}
pub fn is_regtest(self) -> bool {
match self {
#[cfg(not(feature = "liquid"))]
Network::Regtest => true,
#[cfg(feature = "liquid")]
Network::LiquidRegtest => true,
_ => false,
}
}
#[cfg(feature = "liquid")]
pub fn address_params(self) -> &'static address::AddressParams {
// Liquid regtest uses elements's address params
match self {
Network::Liquid => &address::AddressParams::LIQUID,
Network::LiquidRegtest => &address::AddressParams::ELEMENTS,
Network::LiquidTestnet => &LIQUID_TESTNET_PARAMS,
}
}
#[cfg(feature = "liquid")]
pub fn native_asset(self) -> &'static AssetId {
match self {
Network::Liquid => &asset::NATIVE_ASSET_ID,
Network::LiquidTestnet => &asset::NATIVE_ASSET_ID_TESTNET,
Network::LiquidRegtest => &asset::NATIVE_ASSET_ID_REGTEST,
}
}
#[cfg(feature = "liquid")]
pub fn pegged_asset(self) -> Option<&'static AssetId> {
match self {
Network::Liquid => Some(&*asset::NATIVE_ASSET_ID),
Network::LiquidTestnet | Network::LiquidRegtest => None,
}
}
pub fn names() -> Vec<String> {
#[cfg(not(feature = "liquid"))]
return vec![
"mainnet".to_string(),
"testnet".to_string(),
"regtest".to_string(),
"signet".to_string(),
];
#[cfg(feature = "liquid")]
return vec![
"liquid".to_string(),
"liquidtestnet".to_string(),
"liquidregtest".to_string(),
];
}
}
pub fn genesis_hash(network: Network) -> BlockHash {
#[cfg(not(feature = "liquid"))]
return bitcoin_genesis_hash(network);
#[cfg(feature = "liquid")]
return liquid_genesis_hash(network);
}
pub fn bitcoin_genesis_hash(network: Network) -> bitcoin::BlockHash {
lazy_static! {
static ref BITCOIN_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Bitcoin).block_hash();
static ref TESTNET_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Testnet).block_hash();
static ref TESTNET4_GENESIS: bitcoin::BlockHash = bitcoin::BlockHash::from_str(
"00000000da84f2bafbbc53dee25a72ae507ff4914b867c565be350b0da8bf043"
)
.unwrap();
static ref REGTEST_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Regtest).block_hash();
static ref SIGNET_GENESIS: bitcoin::BlockHash =
genesis_block(BNetwork::Signet).block_hash();
}
#[cfg(not(feature = "liquid"))]
match network {
Network::Bitcoin => *BITCOIN_GENESIS,
Network::Testnet => *TESTNET_GENESIS,
Network::Testnet4 => *TESTNET4_GENESIS,
Network::Regtest => *REGTEST_GENESIS,
Network::Signet => *SIGNET_GENESIS,
}
#[cfg(feature = "liquid")]
match network {
Network::Liquid => *BITCOIN_GENESIS,
Network::LiquidTestnet => *TESTNET_GENESIS,
Network::LiquidRegtest => *REGTEST_GENESIS,
}
}
#[cfg(feature = "liquid")]
pub fn liquid_genesis_hash(network: Network) -> elements::BlockHash {
lazy_static! {
static ref LIQUID_GENESIS: BlockHash =
"1466275836220db2944ca059a3a10ef6fd2ea684b0688d2c379296888a206003"
.parse()
.unwrap();
}
match network {
Network::Liquid => *LIQUID_GENESIS,
// The genesis block for liquid regtest chains varies based on the chain configuration.
// This instead uses an all zeroed-out hash, which doesn't matter in practice because its
// only used for Electrum server discovery, which isn't active on regtest.
_ => Default::default(),
}
}
impl From<&str> for Network {
fn from(network_name: &str) -> Self {
match network_name {
#[cfg(not(feature = "liquid"))]
"mainnet" => Network::Bitcoin,
#[cfg(not(feature = "liquid"))]
"testnet" => Network::Testnet,
#[cfg(not(feature = "liquid"))]
"testnet4" => Network::Testnet4,
#[cfg(not(feature = "liquid"))]
"regtest" => Network::Regtest,
#[cfg(not(feature = "liquid"))]
"signet" => Network::Signet,
#[cfg(feature = "liquid")]
"liquid" => Network::Liquid,
#[cfg(feature = "liquid")]
"liquidtestnet" => Network::LiquidTestnet,
#[cfg(feature = "liquid")]
"liquidregtest" => Network::LiquidRegtest,
_ => panic!("unsupported Bitcoin network: {:?}", network_name),
}
}
}
#[cfg(not(feature = "liquid"))]
impl From<Network> for BNetwork {
fn from(network: Network) -> Self {
match network {
Network::Bitcoin => BNetwork::Bitcoin,
Network::Testnet => BNetwork::Testnet,
Network::Testnet4 => BNetwork::Testnet,
Network::Regtest => BNetwork::Regtest,
Network::Signet => BNetwork::Signet,
}
}
}
#[cfg(not(feature = "liquid"))]
impl From<BNetwork> for Network {
fn from(network: BNetwork) -> Self {
match network {
BNetwork::Bitcoin => Network::Bitcoin,
BNetwork::Testnet => Network::Testnet,
BNetwork::Regtest => Network::Regtest,
BNetwork::Signet => Network::Signet,
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/daemon.rs | src/daemon.rs | use std::collections::{HashMap, HashSet};
use std::io::{BufRead, BufReader, Lines, Write};
use std::net::{SocketAddr, TcpStream};
use std::path::PathBuf;
use std::sync::{Arc, Mutex};
use std::time::Duration;
use base64;
use bitcoin::hashes::hex::{FromHex, ToHex};
use glob;
use hex;
use itertools::Itertools;
use serde_json::{from_str, from_value, Value};
#[cfg(not(feature = "liquid"))]
use bitcoin::consensus::encode::{deserialize, serialize};
#[cfg(feature = "liquid")]
use elements::encode::{deserialize, serialize};
use crate::chain::{Block, BlockHash, BlockHeader, Network, Transaction, Txid};
use crate::config::BITCOIND_SUBVER;
use crate::metrics::{HistogramOpts, HistogramVec, Metrics};
use crate::signal::Waiter;
use crate::util::HeaderList;
use crate::errors::*;
fn parse_hash<T>(value: &Value) -> Result<T>
where
T: FromHex,
{
T::from_hex(
value
.as_str()
.chain_err(|| format!("non-string value: {}", value))?,
)
.chain_err(|| format!("non-hex value: {}", value))
}
fn header_from_value(value: Value) -> Result<BlockHeader> {
let header_hex = value
.as_str()
.chain_err(|| format!("non-string header: {}", value))?;
let header_bytes = hex::decode(header_hex).chain_err(|| "non-hex header")?;
deserialize(&header_bytes).chain_err(|| format!("failed to parse header {}", header_hex))
}
fn block_from_value(value: Value) -> Result<Block> {
let block_hex = value.as_str().chain_err(|| "non-string block")?;
let block_bytes = hex::decode(block_hex).chain_err(|| "non-hex block")?;
deserialize(&block_bytes).chain_err(|| format!("failed to parse block {}", block_hex))
}
fn tx_from_value(value: Value) -> Result<Transaction> {
let tx_hex = value.as_str().chain_err(|| "non-string tx")?;
let tx_bytes = hex::decode(tx_hex).chain_err(|| "non-hex tx")?;
deserialize(&tx_bytes).chain_err(|| format!("failed to parse tx {}", tx_hex))
}
/// Parse JSONRPC error code, if exists.
fn parse_error_code(err: &Value) -> Option<i64> {
err.as_object()?.get("code")?.as_i64()
}
fn parse_jsonrpc_reply(mut reply: Value, method: &str, expected_id: u64) -> Result<Value> {
if let Some(reply_obj) = reply.as_object_mut() {
if let Some(err) = reply_obj.get("error") {
if !err.is_null() {
if let Some(code) = parse_error_code(err) {
match code {
// RPC_IN_WARMUP -> retry by later reconnection
-28 => bail!(ErrorKind::Connection(err.to_string())),
_ => bail!("{} RPC error: {}", method, err),
}
}
}
}
let id = reply_obj
.get("id")
.chain_err(|| format!("no id in reply: {:?}", reply_obj))?
.clone();
if id != expected_id {
bail!(
"wrong {} response id {}, expected {}",
method,
id,
expected_id
);
}
if let Some(result) = reply_obj.get_mut("result") {
return Ok(result.take());
}
bail!("no result in reply: {:?}", reply_obj);
}
bail!("non-object reply: {:?}", reply);
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BlockchainInfo {
pub chain: String,
pub blocks: u32,
pub headers: u32,
pub bestblockhash: String,
pub pruned: bool,
pub verificationprogress: f32,
pub initialblockdownload: Option<bool>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MempoolInfo {
pub loaded: bool,
}
#[derive(Serialize, Deserialize, Debug)]
struct NetworkInfo {
version: u64,
subversion: String,
relayfee: f64, // in BTC/kB
}
#[derive(Serialize, Deserialize, Debug)]
struct MempoolFees {
base: f64,
#[serde(rename = "effective-feerate")]
effective_feerate: f64,
#[serde(rename = "effective-includes")]
effective_includes: Vec<String>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct MempoolAcceptResult {
txid: String,
wtxid: String,
allowed: Option<bool>,
vsize: Option<u32>,
fees: Option<MempoolFees>,
#[serde(rename = "reject-reason")]
reject_reason: Option<String>,
}
#[derive(Serialize, Deserialize, Debug)]
struct MempoolFeesSubmitPackage {
base: f64,
#[serde(rename = "effective-feerate")]
effective_feerate: Option<f64>,
#[serde(rename = "effective-includes")]
effective_includes: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct SubmitPackageResult {
package_msg: String,
#[serde(rename = "tx-results")]
tx_results: HashMap<String, TxResult>,
#[serde(rename = "replaced-transactions")]
replaced_transactions: Option<Vec<String>>,
}
#[derive(Serialize, Deserialize, Debug)]
pub struct TxResult {
txid: String,
#[serde(rename = "other-wtxid")]
other_wtxid: Option<String>,
vsize: Option<u32>,
fees: Option<MempoolFeesSubmitPackage>,
error: Option<String>,
}
pub trait CookieGetter: Send + Sync {
fn get(&self) -> Result<Vec<u8>>;
}
struct Connection {
tx: TcpStream,
rx: Lines<BufReader<TcpStream>>,
cookie_getter: Arc<dyn CookieGetter>,
addr: SocketAddr,
signal: Waiter,
}
fn tcp_connect(addr: SocketAddr, signal: &Waiter) -> Result<TcpStream> {
loop {
match TcpStream::connect(addr) {
Ok(conn) => return Ok(conn),
Err(err) => {
warn!("failed to connect daemon at {}: {}", addr, err);
signal.wait(Duration::from_secs(3), false)?;
continue;
}
}
}
}
impl Connection {
fn new(
addr: SocketAddr,
cookie_getter: Arc<dyn CookieGetter>,
signal: Waiter,
) -> Result<Connection> {
let conn = tcp_connect(addr, &signal)?;
let reader = BufReader::new(
conn.try_clone()
.chain_err(|| format!("failed to clone {:?}", conn))?,
);
Ok(Connection {
tx: conn,
rx: reader.lines(),
cookie_getter,
addr,
signal,
})
}
fn reconnect(&self) -> Result<Connection> {
Connection::new(self.addr, self.cookie_getter.clone(), self.signal.clone())
}
fn send(&mut self, request: &str) -> Result<()> {
let cookie = &self.cookie_getter.get()?;
let msg = format!(
"POST / HTTP/1.1\nAuthorization: Basic {}\nContent-Length: {}\n\n{}",
base64::encode(cookie),
request.len(),
request,
);
self.tx.write_all(msg.as_bytes()).chain_err(|| {
ErrorKind::Connection("disconnected from daemon while sending".to_owned())
})
}
fn recv(&mut self) -> Result<String> {
// TODO: use proper HTTP parser.
let mut in_header = true;
let mut contents: Option<String> = None;
let iter = self.rx.by_ref();
let status = iter
.next()
.chain_err(|| {
ErrorKind::Connection("disconnected from daemon while receiving".to_owned())
})?
.chain_err(|| ErrorKind::Connection("failed to read status".to_owned()))?;
let mut headers = HashMap::new();
for line in iter {
let line = line.chain_err(|| ErrorKind::Connection("failed to read".to_owned()))?;
if line.is_empty() {
in_header = false; // next line should contain the actual response.
} else if in_header {
let parts: Vec<&str> = line.splitn(2, ": ").collect();
if parts.len() == 2 {
headers.insert(parts[0].to_owned(), parts[1].to_owned());
} else {
warn!("invalid header: {:?}", line);
}
} else {
contents = Some(line);
break;
}
}
let contents =
contents.chain_err(|| ErrorKind::Connection("no reply from daemon".to_owned()))?;
let contents_length: &str = headers
.get("Content-Length")
.chain_err(|| format!("Content-Length is missing: {:?}", headers))?;
let contents_length: usize = contents_length
.parse()
.chain_err(|| format!("invalid Content-Length: {:?}", contents_length))?;
let expected_length = contents_length - 1; // trailing EOL is skipped
if expected_length != contents.len() {
bail!(ErrorKind::Connection(format!(
"expected {} bytes, got {}",
expected_length,
contents.len()
)));
}
Ok(if status == "HTTP/1.1 200 OK" {
contents
} else if status == "HTTP/1.1 500 Internal Server Error" {
warn!("HTTP status: {}", status);
contents // the contents should have a JSONRPC error field
} else {
bail!(
"request failed {:?}: {:?} = {:?}",
status,
headers,
contents
);
})
}
}
struct Counter {
value: Mutex<u64>,
}
impl Counter {
fn new() -> Self {
Counter {
value: Mutex::new(0),
}
}
fn next(&self) -> u64 {
let mut value = self.value.lock().unwrap();
*value += 1;
*value
}
}
pub struct Daemon {
daemon_dir: PathBuf,
blocks_dir: PathBuf,
network: Network,
magic: Option<u32>,
conn: Mutex<Connection>,
message_id: Counter, // for monotonic JSONRPC 'id'
signal: Waiter,
// monitoring
latency: HistogramVec,
size: HistogramVec,
}
impl Daemon {
#[allow(clippy::too_many_arguments)]
pub fn new(
daemon_dir: PathBuf,
blocks_dir: PathBuf,
daemon_rpc_addr: SocketAddr,
cookie_getter: Arc<dyn CookieGetter>,
network: Network,
magic: Option<u32>,
signal: Waiter,
metrics: &Metrics,
) -> Result<Daemon> {
let daemon = Daemon {
daemon_dir,
blocks_dir,
network,
magic,
conn: Mutex::new(Connection::new(
daemon_rpc_addr,
cookie_getter,
signal.clone(),
)?),
message_id: Counter::new(),
signal: signal.clone(),
latency: metrics.histogram_vec(
HistogramOpts::new("daemon_rpc", "Bitcoind RPC latency (in seconds)"),
&["method"],
),
size: metrics.histogram_vec(
HistogramOpts::new("daemon_bytes", "Bitcoind RPC size (in bytes)"),
&["method", "dir"],
),
};
let network_info = daemon.getnetworkinfo()?;
info!("{:?}", network_info);
if network_info.version < 16_00_00 {
bail!(
"{} is not supported - please use bitcoind 0.16+",
network_info.subversion,
)
}
// Insert the subversion (/Satoshi xx.xx.xx(comment)/) string from bitcoind
_ = BITCOIND_SUBVER.set(network_info.subversion);
let blockchain_info = daemon.getblockchaininfo()?;
info!("{:?}", blockchain_info);
if blockchain_info.pruned {
bail!("pruned node is not supported (use '-prune=0' bitcoind flag)".to_owned())
}
loop {
let info = daemon.getblockchaininfo()?;
let mempool = daemon.getmempoolinfo()?;
let ibd_done = if network.is_regtest() {
info.blocks == info.headers
} else {
!info.initialblockdownload.unwrap_or(false)
};
if mempool.loaded && ibd_done && info.blocks == info.headers {
break;
}
warn!(
"waiting for bitcoind sync and mempool load to finish: {}/{} blocks, verification progress: {:.3}%, mempool loaded: {}",
info.blocks,
info.headers,
info.verificationprogress * 100.0,
mempool.loaded
);
signal.wait(Duration::from_secs(5), false)?;
}
Ok(daemon)
}
pub fn reconnect(&self) -> Result<Daemon> {
Ok(Daemon {
daemon_dir: self.daemon_dir.clone(),
blocks_dir: self.blocks_dir.clone(),
network: self.network,
magic: self.magic,
conn: Mutex::new(self.conn.lock().unwrap().reconnect()?),
message_id: Counter::new(),
signal: self.signal.clone(),
latency: self.latency.clone(),
size: self.size.clone(),
})
}
pub fn list_blk_files(&self) -> Result<Vec<PathBuf>> {
let path = self.blocks_dir.join("blk*.dat");
debug!("listing block files at {:?}", path);
let mut paths: Vec<PathBuf> = glob::glob(path.to_str().unwrap())
.chain_err(|| "failed to list blk*.dat files")?
.map(|res| res.unwrap())
.collect();
paths.sort();
Ok(paths)
}
pub fn magic(&self) -> u32 {
self.magic.unwrap_or_else(|| self.network.magic())
}
fn call_jsonrpc(&self, method: &str, request: &Value) -> Result<Value> {
let mut conn = self.conn.lock().unwrap();
let timer = self.latency.with_label_values(&[method]).start_timer();
let request = request.to_string();
conn.send(&request)?;
self.size
.with_label_values(&[method, "send"])
.observe(request.len() as f64);
let response = conn.recv()?;
let result: Value = from_str(&response).chain_err(|| "invalid JSON")?;
timer.observe_duration();
self.size
.with_label_values(&[method, "recv"])
.observe(response.len() as f64);
Ok(result)
}
fn handle_request_batch(
&self,
method: &str,
params_list: &[Value],
failure_threshold: f64,
) -> Result<Vec<Value>> {
let id = self.message_id.next();
let chunks = params_list
.iter()
.map(|params| json!({"method": method, "params": params, "id": id}))
.chunks(50_000); // Max Amount of batched requests
let mut results = vec![];
let total_requests = params_list.len();
let mut failed_requests: u64 = 0;
let threshold = (failure_threshold * total_requests as f64).round() as u64;
let mut n = 0;
for chunk in &chunks {
let reqs = chunk.collect();
let mut replies = self.call_jsonrpc(method, &reqs)?;
if let Some(replies_vec) = replies.as_array_mut() {
for reply in replies_vec {
n += 1;
match parse_jsonrpc_reply(reply.take(), method, id) {
Ok(parsed_reply) => results.push(parsed_reply),
Err(e) => {
failed_requests += 1;
warn!(
"batch request {} {}/{} failed: {}",
method,
n,
total_requests,
e.to_string()
);
// abort and return the last error once a threshold number of requests have failed
if failed_requests > threshold {
return Err(e);
}
}
}
}
} else {
bail!("non-array replies: {:?}", replies);
}
}
Ok(results)
}
fn retry_request_batch(
&self,
method: &str,
params_list: &[Value],
failure_threshold: f64,
) -> Result<Vec<Value>> {
loop {
match self.handle_request_batch(method, params_list, failure_threshold) {
Err(Error(ErrorKind::Connection(msg), _)) => {
warn!("reconnecting to bitcoind: {}", msg);
self.signal.wait(Duration::from_secs(3), false)?;
let mut conn = self.conn.lock().unwrap();
*conn = conn.reconnect()?;
continue;
}
result => return result,
}
}
}
fn request(&self, method: &str, params: Value) -> Result<Value> {
let mut values = self.retry_request_batch(method, &[params], 0.0)?;
assert_eq!(values.len(), 1);
Ok(values.remove(0))
}
fn requests(&self, method: &str, params_list: &[Value]) -> Result<Vec<Value>> {
self.retry_request_batch(method, params_list, 0.0)
}
// bitcoind JSONRPC API:
pub fn getblockchaininfo(&self) -> Result<BlockchainInfo> {
let info: Value = self.request("getblockchaininfo", json!([]))?;
from_value(info).chain_err(|| "invalid blockchain info")
}
fn getmempoolinfo(&self) -> Result<MempoolInfo> {
let info: Value = self.request("getmempoolinfo", json!([]))?;
from_value(info).chain_err(|| "invalid mempool info")
}
fn getnetworkinfo(&self) -> Result<NetworkInfo> {
let info: Value = self.request("getnetworkinfo", json!([]))?;
from_value(info).chain_err(|| "invalid network info")
}
pub fn getbestblockhash(&self) -> Result<BlockHash> {
parse_hash(&self.request("getbestblockhash", json!([]))?)
}
pub fn getblockheader(&self, blockhash: &BlockHash) -> Result<BlockHeader> {
header_from_value(self.request(
"getblockheader",
json!([blockhash.to_hex(), /*verbose=*/ false]),
)?)
}
pub fn getblockheaders(&self, heights: &[usize]) -> Result<Vec<BlockHeader>> {
let heights: Vec<Value> = heights.iter().map(|height| json!([height])).collect();
let params_list: Vec<Value> = self
.requests("getblockhash", &heights)?
.into_iter()
.map(|hash| json!([hash, /*verbose=*/ false]))
.collect();
let mut result = vec![];
for h in self.requests("getblockheader", ¶ms_list)? {
result.push(header_from_value(h)?);
}
Ok(result)
}
pub fn getblock(&self, blockhash: &BlockHash) -> Result<Block> {
let block = block_from_value(
self.request("getblock", json!([blockhash.to_hex(), /*verbose=*/ false]))?,
)?;
assert_eq!(block.block_hash(), *blockhash);
Ok(block)
}
pub fn getblock_raw(&self, blockhash: &BlockHash, verbose: u32) -> Result<Value> {
self.request("getblock", json!([blockhash.to_hex(), verbose]))
}
pub fn getblocks(&self, blockhashes: &[BlockHash]) -> Result<Vec<Block>> {
let params_list: Vec<Value> = blockhashes
.iter()
.map(|hash| json!([hash.to_hex(), /*verbose=*/ false]))
.collect();
let values = self.requests("getblock", ¶ms_list)?;
let mut blocks = vec![];
for value in values {
blocks.push(block_from_value(value)?);
}
Ok(blocks)
}
pub fn gettransactions(&self, txhashes: &[&Txid]) -> Result<Vec<Transaction>> {
let params_list: Vec<Value> = txhashes
.iter()
.map(|txhash| json!([txhash.to_hex(), /*verbose=*/ false]))
.collect();
let values = self.retry_request_batch("getrawtransaction", ¶ms_list, 0.25)?;
let mut txs = vec![];
for value in values {
txs.push(tx_from_value(value)?);
}
// missing transactions are skipped, so the number of txs returned may be less than the number of txids requested
Ok(txs)
}
pub fn gettransaction_raw(
&self,
txid: &Txid,
blockhash: &BlockHash,
verbose: bool,
) -> Result<Value> {
self.request(
"getrawtransaction",
json!([txid.to_hex(), verbose, blockhash]),
)
}
pub fn getmempooltx(&self, txhash: &Txid) -> Result<Transaction> {
let value = self.request(
"getrawtransaction",
json!([txhash.to_hex(), /*verbose=*/ false]),
)?;
tx_from_value(value)
}
pub fn getmempooltxids(&self) -> Result<HashSet<Txid>> {
let res = self.request("getrawmempool", json!([/*verbose=*/ false]))?;
serde_json::from_value(res).chain_err(|| "invalid getrawmempool reply")
}
pub fn broadcast(&self, tx: &Transaction) -> Result<Txid> {
self.broadcast_raw(&hex::encode(serialize(tx)))
}
pub fn broadcast_raw(&self, txhex: &str) -> Result<Txid> {
let txid = self.request("sendrawtransaction", json!([txhex]))?;
Txid::from_hex(txid.as_str().chain_err(|| "non-string txid")?)
.chain_err(|| "failed to parse txid")
}
pub fn test_mempool_accept(
&self,
txhex: Vec<String>,
maxfeerate: Option<f64>,
) -> Result<Vec<MempoolAcceptResult>> {
let params = match maxfeerate {
Some(rate) => json!([txhex, format!("{:.8}", rate)]),
None => json!([txhex]),
};
let result = self.request("testmempoolaccept", params)?;
serde_json::from_value::<Vec<MempoolAcceptResult>>(result)
.chain_err(|| "invalid testmempoolaccept reply")
}
pub fn submit_package(
&self,
txhex: Vec<String>,
maxfeerate: Option<f64>,
maxburnamount: Option<f64>,
) -> Result<SubmitPackageResult> {
let params = match (maxfeerate, maxburnamount) {
(Some(rate), Some(burn)) => {
json!([txhex, format!("{:.8}", rate), format!("{:.8}", burn)])
}
(Some(rate), None) => json!([txhex, format!("{:.8}", rate)]),
(None, Some(burn)) => json!([txhex, null, format!("{:.8}", burn)]),
(None, None) => json!([txhex]),
};
let result = self.request("submitpackage", params)?;
serde_json::from_value::<SubmitPackageResult>(result)
.chain_err(|| "invalid submitpackage reply")
}
// Get estimated feerates for the provided confirmation targets using a batch RPC request
// Missing estimates are logged but do not cause a failure, whatever is available is returned
#[allow(clippy::float_cmp)]
pub fn estimatesmartfee_batch(&self, conf_targets: &[u16]) -> Result<HashMap<u16, f64>> {
let params_list: Vec<Value> = conf_targets.iter().map(|t| json!([t])).collect();
Ok(self
.requests("estimatesmartfee", ¶ms_list)?
.iter()
.zip(conf_targets)
.filter_map(|(reply, target)| {
if !reply["errors"].is_null() {
warn!(
"failed estimating fee for target {}: {:?}",
target, reply["errors"]
);
return None;
}
let feerate = reply["feerate"]
.as_f64()
.unwrap_or_else(|| panic!("invalid estimatesmartfee response: {:?}", reply));
if feerate == -1f64 {
warn!("not enough data to estimate fee for target {}", target);
return None;
}
// from BTC/kB to sat/b
Some((*target, feerate * 100_000f64))
})
.collect())
}
fn get_all_headers(&self, tip: &BlockHash) -> Result<Vec<BlockHeader>> {
let info: Value = self.request("getblockheader", json!([tip.to_hex()]))?;
let tip_height = info
.get("height")
.expect("missing height")
.as_u64()
.expect("non-numeric height") as usize;
let all_heights: Vec<usize> = (0..=tip_height).collect();
let chunk_size = 100_000;
let mut result = vec![];
for heights in all_heights.chunks(chunk_size) {
trace!("downloading {} block headers", heights.len());
let mut headers = self.getblockheaders(heights)?;
assert!(headers.len() == heights.len());
result.append(&mut headers);
}
let mut blockhash = BlockHash::default();
for header in &result {
assert_eq!(header.prev_blockhash, blockhash);
blockhash = header.block_hash();
}
assert_eq!(blockhash, *tip);
Ok(result)
}
// Returns a list of BlockHeaders in ascending height (i.e. the tip is last).
pub fn get_new_headers(
&self,
indexed_headers: &HeaderList,
bestblockhash: &BlockHash,
) -> Result<Vec<BlockHeader>> {
// Iterate back over headers until known blockash is found:
if indexed_headers.is_empty() {
debug!("downloading all block headers up to {}", bestblockhash);
return self.get_all_headers(bestblockhash);
}
debug!(
"downloading new block headers ({} already indexed) from {}",
indexed_headers.len(),
bestblockhash,
);
let mut new_headers = vec![];
let null_hash = BlockHash::default();
let mut blockhash = *bestblockhash;
while blockhash != null_hash {
if indexed_headers.header_by_blockhash(&blockhash).is_some() {
break;
}
let header = self
.getblockheader(&blockhash)
.chain_err(|| format!("failed to get {} header", blockhash))?;
blockhash = header.prev_blockhash;
new_headers.push(header);
}
trace!("downloaded {} block headers", new_headers.len());
new_headers.reverse(); // so the tip is the last vector entry
Ok(new_headers)
}
pub fn get_relayfee(&self) -> Result<f64> {
let relayfee = self.getnetworkinfo()?.relayfee;
// from BTC/kB to sat/b
Ok(relayfee * 100_000f64)
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/metrics.rs | src/metrics.rs | use page_size;
use prometheus::{self, Encoder};
use std::fs;
use std::io;
use std::net::SocketAddr;
use std::thread;
use std::time::Duration;
use tiny_http;
pub use prometheus::{
GaugeVec, Histogram, HistogramOpts, HistogramTimer, HistogramVec, IntCounter as Counter,
IntCounterVec as CounterVec, IntGauge as Gauge, Opts as MetricOpts,
};
use crate::util::spawn_thread;
use crate::errors::*;
pub struct Metrics {
reg: prometheus::Registry,
addr: SocketAddr,
}
impl Metrics {
pub fn new(addr: SocketAddr) -> Metrics {
Metrics {
reg: prometheus::Registry::new(),
addr,
}
}
pub fn counter(&self, opts: prometheus::Opts) -> Counter {
let c = Counter::with_opts(opts).unwrap();
self.reg.register(Box::new(c.clone())).unwrap();
c
}
pub fn counter_vec(&self, opts: prometheus::Opts, labels: &[&str]) -> CounterVec {
let c = CounterVec::new(opts, labels).unwrap();
self.reg.register(Box::new(c.clone())).unwrap();
c
}
pub fn gauge(&self, opts: prometheus::Opts) -> Gauge {
let g = Gauge::with_opts(opts).unwrap();
self.reg.register(Box::new(g.clone())).unwrap();
g
}
pub fn gauge_vec(&self, opts: prometheus::Opts, labels: &[&str]) -> GaugeVec {
let g = GaugeVec::new(opts, labels).unwrap();
self.reg.register(Box::new(g.clone())).unwrap();
g
}
pub fn histogram(&self, opts: prometheus::HistogramOpts) -> Histogram {
let h = Histogram::with_opts(opts).unwrap();
self.reg.register(Box::new(h.clone())).unwrap();
h
}
pub fn histogram_vec(&self, opts: prometheus::HistogramOpts, labels: &[&str]) -> HistogramVec {
let h = HistogramVec::new(opts, labels).unwrap();
self.reg.register(Box::new(h.clone())).unwrap();
h
}
pub fn start(&self) {
let server = tiny_http::Server::http(self.addr)
.unwrap_or_else(|_| panic!("failed to start monitoring HTTP server at {}", self.addr));
start_process_exporter(self);
let reg = self.reg.clone();
spawn_thread("metrics", move || loop {
if let Err(e) = handle_request(®, server.recv()) {
error!("http error: {}", e);
}
});
}
}
fn handle_request(
reg: &prometheus::Registry,
request: io::Result<tiny_http::Request>,
) -> io::Result<()> {
let request = request?;
let mut buffer = vec![];
prometheus::TextEncoder::new()
.encode(®.gather(), &mut buffer)
.unwrap();
let response = tiny_http::Response::from_data(buffer);
request.respond(response)
}
struct Stats {
utime: f64,
rss: u64,
fds: usize,
}
fn get_ticks_per_second() -> Result<f64> {
// Safety: This code is taken directly from sysconf
match unsafe { libc::sysconf(libc::_SC_CLK_TCK) } {
-1 => Err("Clock Tick unsupported".into()),
ret => Ok(ret as f64),
}
}
fn parse_stats() -> Result<Stats> {
if cfg!(target_os = "macos") {
return Ok(Stats {
utime: 0f64,
rss: 0u64,
fds: 0usize,
});
}
let value = fs::read_to_string("/proc/self/stat").chain_err(|| "failed to read stats")?;
let parts: Vec<&str> = value.split_whitespace().collect();
let page_size = page_size::get() as u64;
let ticks_per_second = get_ticks_per_second().expect("failed to get _SC_CLK_TCK");
let parse_part = |index: usize, name: &str| -> Result<u64> {
parts
.get(index)
.chain_err(|| format!("missing {}: {:?}", name, parts))?
.parse::<u64>()
.chain_err(|| format!("invalid {}: {:?}", name, parts))
};
// For details, see '/proc/[pid]/stat' section at `man 5 proc`:
let utime = parse_part(13, "utime")? as f64 / ticks_per_second;
let rss = parse_part(23, "rss")? * page_size;
let fds = fs::read_dir("/proc/self/fd")
.chain_err(|| "failed to read fd directory")?
.count();
Ok(Stats { utime, rss, fds })
}
fn start_process_exporter(metrics: &Metrics) {
let rss = metrics.gauge(MetricOpts::new(
"process_memory_rss",
"Resident memory size [bytes]",
));
let cpu = metrics.gauge_vec(
MetricOpts::new("process_cpu_usage", "CPU usage by this process [seconds]"),
&["type"],
);
let fds = metrics.gauge(MetricOpts::new("process_fs_fds", "# of file descriptors"));
spawn_thread("exporter", move || loop {
match parse_stats() {
Ok(stats) => {
cpu.with_label_values(&["utime"]).set(stats.utime);
rss.set(stats.rss as i64);
fds.set(stats.fds as i64);
}
Err(e) => warn!("failed to export stats: {}", e),
}
thread::sleep(Duration::from_secs(5));
});
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/electrum_merkle.rs | src/util/electrum_merkle.rs | use crate::chain::{BlockHash, Txid};
use crate::errors::*;
use crate::new_index::ChainQuery;
use bitcoin::hashes::{sha256d::Hash as Sha256dHash, Hash};
pub fn get_tx_merkle_proof(
chain: &ChainQuery,
tx_hash: &Txid,
block_hash: &BlockHash,
) -> Result<(Vec<Sha256dHash>, usize)> {
let txids = chain
.get_block_txids(block_hash)
.chain_err(|| format!("missing block txids for #{}", block_hash))?;
let pos = txids
.iter()
.position(|txid| txid == tx_hash)
.chain_err(|| format!("missing txid {}", tx_hash))?;
let txids = txids.into_iter().map(Sha256dHash::from).collect();
let (branch, _root) = create_merkle_branch_and_root(txids, pos);
Ok((branch, pos))
}
pub fn get_header_merkle_proof(
chain: &ChainQuery,
height: usize,
cp_height: usize,
) -> Result<(Vec<Sha256dHash>, Sha256dHash)> {
if cp_height < height {
bail!("cp_height #{} < height #{}", cp_height, height);
}
let best_height = chain.best_height();
if best_height < cp_height {
bail!(
"cp_height #{} above best block height #{}",
cp_height,
best_height
);
}
let heights: Vec<usize> = (0..=cp_height).collect();
let header_hashes: Vec<BlockHash> = heights
.into_iter()
.map(|height| chain.hash_by_height(height))
.collect::<Option<Vec<BlockHash>>>()
.chain_err(|| "missing block headers")?;
let header_hashes = header_hashes.into_iter().map(Sha256dHash::from).collect();
Ok(create_merkle_branch_and_root(header_hashes, height))
}
pub fn get_id_from_pos(
chain: &ChainQuery,
height: usize,
tx_pos: usize,
want_merkle: bool,
) -> Result<(Txid, Vec<Sha256dHash>)> {
let header_hash = chain
.hash_by_height(height)
.chain_err(|| format!("missing block #{}", height))?;
let txids = chain
.get_block_txids(&header_hash)
.chain_err(|| format!("missing block txids #{}", height))?;
let txid = *txids
.get(tx_pos)
.chain_err(|| format!("No tx in position #{} in block #{}", tx_pos, height))?;
let txids = txids.into_iter().map(Sha256dHash::from).collect();
let branch = if want_merkle {
create_merkle_branch_and_root(txids, tx_pos).0
} else {
vec![]
};
Ok((txid, branch))
}
fn merklize(left: Sha256dHash, right: Sha256dHash) -> Sha256dHash {
let data = [&left[..], &right[..]].concat();
Sha256dHash::hash(&data)
}
fn create_merkle_branch_and_root(
mut hashes: Vec<Sha256dHash>,
mut index: usize,
) -> (Vec<Sha256dHash>, Sha256dHash) {
let mut merkle = vec![];
while hashes.len() > 1 {
if hashes.len() % 2 != 0 {
let last = *hashes.last().unwrap();
hashes.push(last);
}
index = if index % 2 == 0 { index + 1 } else { index - 1 };
merkle.push(hashes[index]);
index /= 2;
hashes = hashes
.chunks(2)
.map(|pair| merklize(pair[0], pair[1]))
.collect()
}
(merkle, hashes[0])
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/block.rs | src/util/block.rs | use crate::chain::{BlockHash, BlockHeader};
use crate::errors::*;
use crate::new_index::BlockEntry;
use std::collections::HashMap;
use std::fmt;
use std::iter::FromIterator;
use std::slice;
use time::format_description::well_known::Rfc3339;
use time::OffsetDateTime as DateTime;
const MTP_SPAN: usize = 11;
#[derive(Debug, Serialize, Deserialize, Clone)]
pub struct BlockId {
pub height: usize,
pub hash: BlockHash,
pub time: u32,
}
impl From<&HeaderEntry> for BlockId {
fn from(header: &HeaderEntry) -> Self {
BlockId {
height: header.height(),
hash: *header.hash(),
time: header.header().time,
}
}
}
#[derive(Eq, PartialEq, Clone)]
pub struct HeaderEntry {
height: usize,
hash: BlockHash,
header: BlockHeader,
}
impl HeaderEntry {
pub fn hash(&self) -> &BlockHash {
&self.hash
}
pub fn header(&self) -> &BlockHeader {
&self.header
}
pub fn height(&self) -> usize {
self.height
}
}
impl fmt::Debug for HeaderEntry {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
let last_block_time = DateTime::from_unix_timestamp(self.header().time as i64).unwrap();
write!(
f,
"hash={} height={} @ {}",
self.hash(),
self.height(),
last_block_time.format(&Rfc3339).unwrap(),
)
}
}
pub struct HeaderList {
headers: Vec<HeaderEntry>,
heights: HashMap<BlockHash, usize>,
tip: BlockHash,
}
impl HeaderList {
pub fn empty() -> HeaderList {
HeaderList {
headers: vec![],
heights: HashMap::new(),
tip: BlockHash::default(),
}
}
pub fn new(
mut headers_map: HashMap<BlockHash, BlockHeader>,
tip_hash: BlockHash,
) -> HeaderList {
trace!(
"processing {} headers, tip at {:?}",
headers_map.len(),
tip_hash
);
let mut blockhash = tip_hash;
let mut headers_chain: Vec<BlockHeader> = vec![];
let null_hash = BlockHash::default();
while blockhash != null_hash {
let header = headers_map.remove(&blockhash).unwrap_or_else(|| {
panic!(
"missing expected blockhash in headers map: {:?}, pointed from: {:?}",
blockhash,
headers_chain.last().map(|h| h.block_hash())
)
});
blockhash = header.prev_blockhash;
headers_chain.push(header);
}
headers_chain.reverse();
trace!(
"{} chained headers ({} orphan blocks left)",
headers_chain.len(),
headers_map.len()
);
let mut headers = HeaderList::empty();
headers.apply(headers.order(headers_chain));
headers
}
pub fn order(&self, new_headers: Vec<BlockHeader>) -> Vec<HeaderEntry> {
// header[i] -> header[i-1] (i.e. header.last() is the tip)
struct HashedHeader {
blockhash: BlockHash,
header: BlockHeader,
}
let hashed_headers =
Vec::<HashedHeader>::from_iter(new_headers.into_iter().map(|header| HashedHeader {
blockhash: header.block_hash(),
header,
}));
for i in 1..hashed_headers.len() {
assert_eq!(
hashed_headers[i].header.prev_blockhash,
hashed_headers[i - 1].blockhash
);
}
let prev_blockhash = match hashed_headers.first() {
Some(h) => h.header.prev_blockhash,
None => return vec![], // hashed_headers is empty
};
let null_hash = BlockHash::default();
let new_height: usize = if prev_blockhash == null_hash {
0
} else {
self.header_by_blockhash(&prev_blockhash)
.unwrap_or_else(|| panic!("{} is not part of the blockchain", prev_blockhash))
.height()
+ 1
};
(new_height..)
.zip(hashed_headers)
.map(|(height, hashed_header)| HeaderEntry {
height,
hash: hashed_header.blockhash,
header: hashed_header.header,
})
.collect()
}
/// Returns any rolled back blocks in order from old tip first and first block in the fork is last
/// It also returns the blockhash of the post-rollback tip.
pub fn apply(
&mut self,
new_headers: Vec<HeaderEntry>,
) -> (Vec<HeaderEntry>, Option<BlockHash>) {
// new_headers[i] -> new_headers[i - 1] (i.e. new_headers.last() is the tip)
for i in 1..new_headers.len() {
assert_eq!(new_headers[i - 1].height() + 1, new_headers[i].height());
assert_eq!(
*new_headers[i - 1].hash(),
new_headers[i].header().prev_blockhash
);
}
let new_height = match new_headers.first() {
Some(entry) => {
let height = entry.height();
let expected_prev_blockhash = if height > 0 {
*self.headers[height - 1].hash()
} else {
BlockHash::default()
};
assert_eq!(entry.header().prev_blockhash, expected_prev_blockhash);
height
}
None => return (vec![], None),
};
debug!(
"applying {} new headers from height {}",
new_headers.len(),
new_height
);
let mut removed = self.headers.split_off(new_height); // keep [0..new_height) entries
// If we reorged, we should return the last blockhash before adding the new chain's blockheaders.
let reorged_tip = if !removed.is_empty() {
self.headers.last().map(|be| be.hash()).cloned()
} else {
None
};
for new_header in new_headers {
let height = new_header.height();
assert_eq!(height, self.headers.len());
self.tip = *new_header.hash();
self.headers.push(new_header);
self.heights.insert(self.tip, height);
}
removed.reverse();
(removed, reorged_tip)
}
pub fn header_by_blockhash(&self, blockhash: &BlockHash) -> Option<&HeaderEntry> {
let height = self.heights.get(blockhash)?;
let header = self.headers.get(*height)?;
if *blockhash == *header.hash() {
Some(header)
} else {
None
}
}
pub fn header_by_height(&self, height: usize) -> Option<&HeaderEntry> {
self.headers.get(height).inspect(|entry| {
assert_eq!(entry.height(), height);
})
}
pub fn equals(&self, other: &HeaderList) -> bool {
self.headers.last() == other.headers.last()
}
pub fn tip(&self) -> &BlockHash {
assert_eq!(
self.tip,
self.headers.last().map(|h| *h.hash()).unwrap_or_default()
);
&self.tip
}
pub fn len(&self) -> usize {
self.headers.len()
}
pub fn is_empty(&self) -> bool {
self.headers.is_empty()
}
pub fn iter(&self) -> slice::Iter<'_, HeaderEntry> {
self.headers.iter()
}
/// Get the Median Time Past
pub fn get_mtp(&self, height: usize) -> u32 {
// Use the timestamp as the mtp of the genesis block.
// Matches bitcoind's behaviour: bitcoin-cli getblock `bitcoin-cli getblockhash 0` | jq '.time == .mediantime'
if height == 0 {
self.headers.first().unwrap().header.time
} else if height > self.len() - 1 {
0
} else {
let mut timestamps = (height.saturating_sub(MTP_SPAN - 1)..=height)
.map(|p_height| self.headers.get(p_height).unwrap().header.time)
.collect::<Vec<_>>();
timestamps.sort_unstable();
timestamps[timestamps.len() / 2]
}
}
}
#[derive(Serialize, Deserialize)]
pub struct BlockStatus {
pub in_best_chain: bool,
pub height: Option<usize>,
pub next_best: Option<BlockHash>,
}
impl BlockStatus {
pub fn confirmed(height: usize, next_best: Option<BlockHash>) -> BlockStatus {
BlockStatus {
in_best_chain: true,
height: Some(height),
next_best,
}
}
pub fn orphaned() -> BlockStatus {
BlockStatus {
in_best_chain: false,
height: None,
next_best: None,
}
}
}
#[derive(Serialize, Deserialize, Debug)]
pub struct BlockMeta {
#[serde(alias = "nTx")]
pub tx_count: u32,
pub size: u32,
pub weight: u32,
}
pub struct BlockHeaderMeta {
pub header_entry: HeaderEntry,
pub meta: BlockMeta,
pub mtp: u32,
}
impl From<&BlockEntry> for BlockMeta {
fn from(b: &BlockEntry) -> BlockMeta {
BlockMeta {
tx_count: b.block.txdata.len() as u32,
weight: b.block.weight() as u32,
size: b.size,
}
}
}
impl BlockMeta {
pub fn parse_getblock(val: ::serde_json::Value) -> Result<BlockMeta> {
Ok(BlockMeta {
tx_count: val
.get("nTx")
.chain_err(|| "missing nTx")?
.as_f64()
.chain_err(|| "nTx not a number")? as u32,
size: val
.get("size")
.chain_err(|| "missing size")?
.as_f64()
.chain_err(|| "size not a number")? as u32,
weight: val
.get("weight")
.chain_err(|| "missing weight")?
.as_f64()
.chain_err(|| "weight not a number")? as u32,
})
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/bincode_util.rs | src/util/bincode_util.rs | //! This module creates two sets of serialize and deserialize for bincode.
//! They explicitly spell out the bincode settings so that switching to
//! new versions in the future is less error prone.
//!
//! This is a list of all the row types and their settings for bincode.
//! +--------------+--------+------------+----------------+------------+
//! | | Endian | Int Length | Allow Trailing | Byte Limit |
//! +--------------+--------+------------+----------------+------------+
//! | TxHistoryRow | big | fixed | allow | unlimited |
//! | All others | little | fixed | allow | unlimited |
//! +--------------+--------+------------+----------------+------------+
// We only want people to use bincode_util
use bincode::Options;
use bincode_do_not_use_directly as bincode;
pub fn serialize_big<T>(value: &T) -> Result<Vec<u8>, bincode::Error>
where
T: ?Sized + serde::Serialize,
{
big_endian().serialize(value)
}
pub fn deserialize_big<'a, T>(bytes: &'a [u8]) -> Result<T, bincode::Error>
where
T: serde::Deserialize<'a>,
{
big_endian().deserialize(bytes)
}
pub fn serialize_little<T>(value: &T) -> Result<Vec<u8>, bincode::Error>
where
T: ?Sized + serde::Serialize,
{
little_endian().serialize(value)
}
pub fn deserialize_little<'a, T>(bytes: &'a [u8]) -> Result<T, bincode::Error>
where
T: serde::Deserialize<'a>,
{
little_endian().deserialize(bytes)
}
/// This is the default settings for Options,
/// but all explicitly spelled out, except for endianness.
/// The following functions will add endianness.
#[inline]
fn options() -> impl Options {
bincode::options()
.with_fixint_encoding()
.with_no_limit()
.allow_trailing_bytes()
}
/// Adding the endian flag for big endian
#[inline]
fn big_endian() -> impl Options {
options().with_big_endian()
}
/// Adding the endian flag for little endian
#[inline]
fn little_endian() -> impl Options {
options().with_little_endian()
}
#[cfg(test)]
#[path = "./bincode_tests.rs"]
mod bincode_tests;
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/mod.rs | src/util/mod.rs | mod block;
mod script;
mod transaction;
pub mod bincode_util;
pub mod electrum_merkle;
pub mod fees;
pub use self::block::{BlockHeaderMeta, BlockId, BlockMeta, BlockStatus, HeaderEntry, HeaderList};
pub use self::fees::get_tx_fee;
pub use self::script::{get_innerscripts, ScriptToAddr, ScriptToAsm};
pub use self::transaction::{
extract_tx_prevouts, has_prevout, is_coinbase, is_spendable, serialize_outpoint,
sigops::transaction_sigop_count, TransactionStatus, TxInput,
};
use std::collections::HashMap;
use std::sync::atomic::AtomicUsize;
use std::sync::mpsc::{channel, Receiver, Sender};
use std::sync::Mutex;
use std::thread::{self, ThreadId};
use crate::chain::BlockHeader;
use bitcoin::hashes::sha256d::Hash as Sha256dHash;
use socket2::{Domain, Protocol, Socket, Type};
use std::net::SocketAddr;
pub type Bytes = Vec<u8>;
pub type HeaderMap = HashMap<Sha256dHash, BlockHeader>;
// TODO: consolidate serialization/deserialize code for bincode/bitcoin.
const HASH_LEN: usize = 32;
pub type FullHash = [u8; HASH_LEN];
pub fn full_hash(hash: &[u8]) -> FullHash {
*array_ref![hash, 0, HASH_LEN]
}
pub struct SyncChannel<T> {
tx: Option<crossbeam_channel::Sender<T>>,
rx: Option<crossbeam_channel::Receiver<T>>,
}
impl<T> SyncChannel<T> {
pub fn new(size: usize) -> SyncChannel<T> {
let (tx, rx) = crossbeam_channel::bounded(size);
SyncChannel {
tx: Some(tx),
rx: Some(rx),
}
}
pub fn sender(&self) -> crossbeam_channel::Sender<T> {
self.tx.as_ref().expect("No Sender").clone()
}
pub fn receiver(&self) -> &crossbeam_channel::Receiver<T> {
self.rx.as_ref().expect("No Receiver")
}
pub fn into_receiver(self) -> crossbeam_channel::Receiver<T> {
self.rx.expect("No Receiver")
}
/// This drops the sender and receiver, causing all other methods to panic.
///
/// Use only when you know that the channel will no longer be used.
/// ie. shutdown.
pub fn close(&mut self) -> Option<crossbeam_channel::Receiver<T>> {
self.tx.take();
self.rx.take()
}
}
pub struct Channel<T> {
tx: Sender<T>,
rx: Receiver<T>,
}
impl<T> Channel<T> {
pub fn unbounded() -> Self {
let (tx, rx) = channel();
Channel { tx, rx }
}
pub fn sender(&self) -> Sender<T> {
self.tx.clone()
}
pub fn receiver(&self) -> &Receiver<T> {
&self.rx
}
pub fn into_receiver(self) -> Receiver<T> {
self.rx
}
}
/// This static HashMap contains all the threads spawned with [`spawn_thread`] with their name
#[inline]
pub fn with_spawned_threads<F>(f: F)
where
F: FnOnce(&mut HashMap<ThreadId, String>),
{
lazy_static! {
static ref SPAWNED_THREADS: Mutex<HashMap<ThreadId, String>> = Mutex::new(HashMap::new());
}
let mut lock = match SPAWNED_THREADS.lock() {
Ok(threads) => threads,
// There's no possible broken state
Err(threads) => {
warn!("SPAWNED_THREADS is in a poisoned state! Be wary of incorrect logs!");
threads.into_inner()
}
};
f(&mut lock)
}
pub fn spawn_thread<F, T>(prefix: &str, do_work: F) -> thread::JoinHandle<T>
where
F: FnOnce() -> T,
F: Send + 'static,
T: Send + 'static,
{
static THREAD_COUNTER: AtomicUsize = AtomicUsize::new(0);
let counter = THREAD_COUNTER.fetch_add(1, std::sync::atomic::Ordering::AcqRel);
thread::Builder::new()
.name(format!("{}-{}", prefix, counter))
.spawn(move || {
let thread = std::thread::current();
let name = thread.name().unwrap();
let id = thread.id();
trace!("[THREAD] GETHASHMAP INSERT | {name} {id:?}");
with_spawned_threads(|threads| {
threads.insert(id, name.to_owned());
});
trace!("[THREAD] START WORK | {name} {id:?}");
let result = do_work();
trace!("[THREAD] FINISHED WORK | {name} {id:?}");
trace!("[THREAD] GETHASHMAP REMOVE | {name} {id:?}");
with_spawned_threads(|threads| {
threads.remove(&id);
});
trace!("[THREAD] HASHMAP REMOVED | {name} {id:?}");
result
})
.unwrap()
}
// Similar to https://doc.rust-lang.org/std/primitive.bool.html#method.then (nightly only),
// but with a function that returns an `Option<T>` instead of `T`. Adding something like
// this to std is being discussed: https://github.com/rust-lang/rust/issues/64260
pub trait BoolThen {
fn and_then<T>(self, f: impl FnOnce() -> Option<T>) -> Option<T>;
}
impl BoolThen for bool {
fn and_then<T>(self, f: impl FnOnce() -> Option<T>) -> Option<T> {
if self {
f()
} else {
None
}
}
}
pub fn create_socket(addr: &SocketAddr) -> Socket {
let domain = match &addr {
SocketAddr::V4(_) => Domain::IPV4,
SocketAddr::V6(_) => Domain::IPV6,
};
let socket =
Socket::new(domain, Type::STREAM, Some(Protocol::TCP)).expect("creating socket failed");
#[cfg(unix)]
socket
.set_reuse_port(true)
.expect("cannot enable SO_REUSEPORT");
socket.bind(&(*addr).into()).expect("cannot bind");
socket
}
/// A module used for serde serialization of bytes in hexadecimal format.
///
/// The module is compatible with the serde attribute.
///
/// Copied from https://github.com/rust-bitcoin/rust-bitcoincore-rpc/blob/master/json/src/lib.rs
pub mod serde_hex {
use bitcoin::hashes::hex::{FromHex, ToHex};
use serde::de::Error;
use serde::{Deserializer, Serializer};
pub fn serialize<S: Serializer>(b: &[u8], s: S) -> Result<S::Ok, S::Error> {
s.serialize_str(&b.to_hex())
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Vec<u8>, D::Error> {
let hex_str: String = ::serde::Deserialize::deserialize(d)?;
FromHex::from_hex(&hex_str).map_err(D::Error::custom)
}
pub mod opt {
use bitcoin::hashes::hex::{FromHex, ToHex};
use serde::de::Error;
use serde::{Deserializer, Serializer};
pub fn serialize<S: Serializer>(b: &Option<Vec<u8>>, s: S) -> Result<S::Ok, S::Error> {
match *b {
None => s.serialize_none(),
Some(ref b) => s.serialize_str(&b.to_hex()),
}
}
pub fn deserialize<'de, D: Deserializer<'de>>(d: D) -> Result<Option<Vec<u8>>, D::Error> {
let hex_str: String = ::serde::Deserialize::deserialize(d)?;
Ok(Some(FromHex::from_hex(&hex_str).map_err(D::Error::custom)?))
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/script.rs | src/util/script.rs | #[cfg(feature = "liquid")]
use elements::address as elements_address;
use crate::chain::{script, Network, Script, TxIn, TxOut};
use script::Instruction::PushBytes;
pub struct InnerScripts {
pub redeem_script: Option<Script>,
pub witness_script: Option<Script>,
}
pub trait ScriptToAsm: std::fmt::Debug {
fn to_asm(&self) -> String {
let asm = format!("{:?}", self);
asm[7..asm.len() - 1].to_string()
}
}
impl ScriptToAsm for bitcoin::Script {}
#[cfg(feature = "liquid")]
impl ScriptToAsm for elements::Script {}
pub trait ScriptToAddr {
fn to_address_str(&self, network: Network) -> Option<String>;
}
#[cfg(not(feature = "liquid"))]
impl ScriptToAddr for bitcoin::Script {
fn to_address_str(&self, network: Network) -> Option<String> {
bitcoin::Address::from_script(self, network.into()).map(|s| s.to_string())
}
}
#[cfg(feature = "liquid")]
impl ScriptToAddr for elements::Script {
fn to_address_str(&self, network: Network) -> Option<String> {
elements_address::Address::from_script(self, None, network.address_params())
.map(|a| a.to_string())
}
}
// Returns the witnessScript in the case of p2wsh, or the redeemScript in the case of p2sh.
pub fn get_innerscripts(txin: &TxIn, prevout: &TxOut) -> InnerScripts {
// Wrapped redeemScript for P2SH spends
let redeem_script = if prevout.script_pubkey.is_p2sh() {
if let Some(Ok(PushBytes(redeemscript))) = txin.script_sig.instructions().last() {
Some(Script::from(redeemscript.to_vec()))
} else {
None
}
} else {
None
};
// Wrapped witnessScript for P2WSH or P2SH-P2WSH spends
let witness_script = if prevout.script_pubkey.is_v0_p2wsh()
|| prevout.script_pubkey.is_v1_p2tr()
|| redeem_script.as_ref().is_some_and(|s| s.is_v0_p2wsh())
{
let witness = &txin.witness;
#[cfg(feature = "liquid")]
let witness = &witness.script_witness;
// rust-bitcoin returns witness items as a [u8] slice, while rust-elements returns a Vec<u8>
#[cfg(not(feature = "liquid"))]
let wit_to_vec = Vec::from;
#[cfg(feature = "liquid")]
let wit_to_vec = Clone::clone;
let inner_script_slice = if prevout.script_pubkey.is_v1_p2tr() {
// Witness stack is potentially very large
// so we avoid to_vec() or iter().collect() for performance
let w_len = witness.len();
witness
.last()
// Get the position of the script spend script (if it exists)
.map(|last_elem| {
// From BIP341:
// If there are at least two witness elements, and the first byte of
// the last element is 0x50, this last element is called annex a
// and is removed from the witness stack.
if w_len >= 2 && last_elem.first().filter(|&&v| v == 0x50).is_some() {
// account for the extra item removed from the end
3
} else {
// otherwise script is 2nd from last
2
}
})
// Convert to None if not script spend
// Note: Option doesn't have filter_map() method
.filter(|&script_pos_from_last| w_len >= script_pos_from_last)
.and_then(|script_pos_from_last| {
// Can't use second_to_last() since it might be 3rd to last
#[allow(clippy::iter_nth)]
witness.iter().nth(w_len - script_pos_from_last)
})
} else {
witness.last()
};
inner_script_slice.map(wit_to_vec).map(Script::from)
} else {
None
};
InnerScripts {
redeem_script,
witness_script,
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/bincode_tests.rs | src/util/bincode_tests.rs | /*
The tests below show us the following defaults for each method of using bincode.
1. Using bincode::[de]serialize() directly: "function"
2. Using bincode::config().[de]serialize(): "Config" (deprecated)
3. Using bincode::options().[de]serialize(): "Options" (currently recommended for v1.3.3)
```
+----------+--------+------------+----------------+------------+
| | Endian | Int Length | Allow Trailing | Byte Limit |
+----------+--------+------------+----------------+------------+
| function | little | fixed | allow | unlimited |
| Config | little | fixed | allow | unlimited |
| Options | little | variable * | reject * | unlimited |
+----------+--------+------------+----------------+------------+
```
Thus we only need to change the int length from variable to fixed,
and allow trailing to allow in order to match the previous behavior.
(note: TxHistory was using Big Endian by explicitly setting it to big.)
*/
use bincode_do_not_use_directly as bincode;
#[test]
fn bincode_settings() {
let value = TestStruct::new();
let mut large = [0_u8; 4096];
let decoded = [
8_u8, 7, 6, 5, 4, 3, 2, 1, 1, 2, 3, 4, 5, 6, 7, 8, 0, 0, 0, 0, 8, 7, 6, 5, 4, 3, 2, 1, 1,
2, 3, 4, 5, 6, 7, 8, 12, 0, 0, 0, 0, 0, 0, 0, 72, 101, 108, 108, 111, 32, 87, 111, 114,
108, 100, 33,
];
large[0..56].copy_from_slice(&decoded);
// Using functions: Little endian, Fixint, Allow trailing, Unlimited
assert_eq!(bincode::serialize(&value).unwrap(), &decoded);
assert_eq!(bincode::deserialize::<TestStruct>(&large).unwrap(), value);
// Using Config (deprecated)
// Little endian, fixint, Allow trailing, Unlimited
#[allow(deprecated)]
{
assert_eq!(bincode::config().serialize(&value).unwrap(), &decoded);
assert_eq!(
bincode::config().deserialize::<TestStruct>(&large).unwrap(),
value
);
}
// Using Options
// Little endian, VARINT (different), Reject trailing (different), unlimited
use bincode::Options;
assert_eq!(
bincode::options()
.with_fixint_encoding()
.allow_trailing_bytes()
.serialize(&value)
.unwrap(),
&decoded
);
assert_eq!(
bincode::options()
.with_fixint_encoding()
.allow_trailing_bytes()
.deserialize::<TestStruct>(&large)
.unwrap(),
value
);
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
struct TestStruct {
a: u64,
b: [u8; 8],
c: TestData,
d: String,
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
enum TestData {
Foo(FooStruct),
Bar(BarStruct),
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
struct FooStruct {
a: u64,
b: [u8; 8],
}
#[derive(Debug, Serialize, Deserialize, PartialEq, Eq)]
struct BarStruct {
a: u64,
b: [u8; 8],
}
impl TestStruct {
fn new() -> Self {
Self {
a: 0x0102030405060708,
b: [1, 2, 3, 4, 5, 6, 7, 8],
c: TestData::Foo(FooStruct {
a: 0x0102030405060708,
b: [1, 2, 3, 4, 5, 6, 7, 8],
}),
d: String::from("Hello World!"),
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/transaction.rs | src/util/transaction.rs | use crate::chain::{BlockHash, OutPoint, Transaction, TxIn, TxOut, Txid};
use crate::errors;
use crate::util::BlockId;
use std::collections::HashMap;
#[cfg(feature = "liquid")]
use bitcoin::hashes::hex::FromHex;
#[cfg(feature = "liquid")]
lazy_static! {
static ref REGTEST_INITIAL_ISSUANCE_PREVOUT: Txid =
Txid::from_hex("50cdc410c9d0d61eeacc531f52d2c70af741da33af127c364e52ac1ee7c030a5").unwrap();
static ref TESTNET_INITIAL_ISSUANCE_PREVOUT: Txid =
Txid::from_hex("0c52d2526a5c9f00e9fb74afd15dd3caaf17c823159a514f929ae25193a43a52").unwrap();
}
#[derive(Serialize, Deserialize)]
pub struct TransactionStatus {
pub confirmed: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub block_height: Option<usize>,
#[serde(skip_serializing_if = "Option::is_none")]
pub block_hash: Option<BlockHash>,
#[serde(skip_serializing_if = "Option::is_none")]
pub block_time: Option<u32>,
}
impl From<Option<BlockId>> for TransactionStatus {
fn from(blockid: Option<BlockId>) -> TransactionStatus {
match blockid {
Some(b) => TransactionStatus {
confirmed: true,
block_height: Some(b.height),
block_hash: Some(b.hash),
block_time: Some(b.time),
},
None => TransactionStatus {
confirmed: false,
block_height: None,
block_hash: None,
block_time: None,
},
}
}
}
#[derive(Serialize, Deserialize)]
pub struct TxInput {
pub txid: Txid,
pub vin: u32,
}
pub fn is_coinbase(txin: &TxIn) -> bool {
#[cfg(not(feature = "liquid"))]
return txin.previous_output.is_null();
#[cfg(feature = "liquid")]
return txin.is_coinbase();
}
pub fn has_prevout(txin: &TxIn) -> bool {
#[cfg(not(feature = "liquid"))]
return !txin.previous_output.is_null();
#[cfg(feature = "liquid")]
return !txin.is_coinbase()
&& !txin.is_pegin
&& txin.previous_output.txid != *REGTEST_INITIAL_ISSUANCE_PREVOUT
&& txin.previous_output.txid != *TESTNET_INITIAL_ISSUANCE_PREVOUT;
}
pub fn is_spendable(txout: &TxOut) -> bool {
#[cfg(not(feature = "liquid"))]
return !txout.script_pubkey.is_provably_unspendable();
#[cfg(feature = "liquid")]
return !txout.is_fee() && !txout.script_pubkey.is_provably_unspendable();
}
/// Extract the previous TxOuts of a Transaction's TxIns
///
/// # Errors
///
/// This function MUST NOT return an error variant when allow_missing is true.
/// If allow_missing is false, it will return an error when any Outpoint is
/// missing from the keys of the txos argument's HashMap.
pub fn extract_tx_prevouts<'a>(
tx: &Transaction,
txos: &'a HashMap<OutPoint, TxOut>,
) -> Result<HashMap<u32, &'a TxOut>, errors::Error> {
tx.input
.iter()
.enumerate()
.filter(|(_, txi)| has_prevout(txi))
.map(|(index, txi)| {
Ok((
index as u32,
match txos.get(&txi.previous_output) {
Some(txo) => txo,
None => {
return Err(format!("missing outpoint {:?}", txi.previous_output).into());
}
},
))
})
.collect()
}
pub fn serialize_outpoint<S>(outpoint: &OutPoint, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::ser::Serializer,
{
use serde::ser::SerializeStruct;
let mut s = serializer.serialize_struct("OutPoint", 2)?;
s.serialize_field("txid", &outpoint.txid)?;
s.serialize_field("vout", &outpoint.vout)?;
s.end()
}
pub(super) mod sigops {
use crate::chain::{
hashes::hex::FromHex,
opcodes::{
all::{OP_CHECKMULTISIG, OP_CHECKMULTISIGVERIFY, OP_CHECKSIG, OP_CHECKSIGVERIFY},
All,
},
script::{self, Instruction},
Transaction, TxOut, Witness,
};
use std::collections::HashMap;
/// Get sigop count for transaction. prevout_map must have all the prevouts.
pub fn transaction_sigop_count(
tx: &Transaction,
prevout_map: &HashMap<u32, &TxOut>,
) -> Result<usize, script::Error> {
let input_count = tx.input.len();
let mut prevouts = Vec::with_capacity(input_count);
#[cfg(not(feature = "liquid"))]
let is_coinbase_or_pegin = tx.is_coin_base();
#[cfg(feature = "liquid")]
let is_coinbase_or_pegin = tx.is_coinbase() || tx.input.iter().any(|input| input.is_pegin);
if !is_coinbase_or_pegin {
for idx in 0..input_count {
prevouts.push(
*prevout_map
.get(&(idx as u32))
.ok_or(script::Error::EarlyEndOfScript)?,
);
}
}
// coinbase tx won't use prevouts so it can be empty.
get_sigop_cost(tx, &prevouts, true, true)
}
fn decode_pushnum(op: &All) -> Option<u8> {
// 81 = OP_1, 96 = OP_16
// 81 -> 1, so... 81 - 80 -> 1
let self_u8 = op.into_u8();
match self_u8 {
81..=96 => Some(self_u8 - 80),
_ => None,
}
}
fn count_sigops(script: &script::Script, accurate: bool) -> usize {
let mut n = 0;
let mut pushnum_cache = None;
for inst in script.instructions() {
match inst {
Ok(Instruction::Op(opcode)) => {
match opcode {
OP_CHECKSIG | OP_CHECKSIGVERIFY => {
n += 1;
}
OP_CHECKMULTISIG | OP_CHECKMULTISIGVERIFY => {
match (accurate, pushnum_cache) {
(true, Some(pushnum)) => {
// Add the number of pubkeys in the multisig as sigop count
n += usize::from(pushnum);
}
_ => {
// MAX_PUBKEYS_PER_MULTISIG from Bitcoin Core
// https://github.com/bitcoin/bitcoin/blob/v25.0/src/script/script.h#L29-L30
n += 20;
}
}
}
_ => {
pushnum_cache = decode_pushnum(&opcode);
}
}
}
// We ignore errors as well as pushdatas
_ => {
pushnum_cache = None;
}
}
}
n
}
/// Get the sigop count for legacy transactions
fn get_legacy_sigop_count(tx: &Transaction) -> usize {
let mut n = 0;
for input in &tx.input {
n += count_sigops(&input.script_sig, false);
}
for output in &tx.output {
n += count_sigops(&output.script_pubkey, false);
}
n
}
fn get_p2sh_sigop_count(tx: &Transaction, previous_outputs: &[&TxOut]) -> usize {
#[cfg(not(feature = "liquid"))]
if tx.is_coin_base() {
return 0;
}
#[cfg(feature = "liquid")]
if tx.is_coinbase() {
return 0;
}
let mut n = 0;
for (input, prevout) in tx.input.iter().zip(previous_outputs.iter()) {
if prevout.script_pubkey.is_p2sh() {
if let Some(Ok(script::Instruction::PushBytes(redeem))) =
input.script_sig.instructions().last()
{
let script =
script::Script::from_byte_iter(redeem.iter().map(|v| Ok(*v))).unwrap(); // I only return Ok, so it won't error
n += count_sigops(&script, true);
}
}
}
n
}
fn get_witness_sigop_count(tx: &Transaction, previous_outputs: &[&TxOut]) -> usize {
let mut n = 0;
#[inline]
fn is_push_only(script: &script::Script) -> bool {
for inst in script.instructions() {
match inst {
Err(_) => return false,
Ok(Instruction::Op(_)) => return false,
Ok(Instruction::PushBytes(_)) => {}
}
}
true
}
#[inline]
fn last_pushdata(script: &script::Script) -> Option<&[u8]> {
match script.instructions().last() {
Some(Ok(Instruction::PushBytes(bytes))) => Some(bytes),
_ => None,
}
}
#[inline]
fn count_with_prevout(
prevout: &TxOut,
script_sig: &script::Script,
witness: &Witness,
) -> usize {
let mut n = 0;
let script = if prevout.script_pubkey.is_witness_program() {
prevout.script_pubkey.clone()
} else if prevout.script_pubkey.is_p2sh()
&& is_push_only(script_sig)
&& !script_sig.is_empty()
{
script::Script::from_byte_iter(
last_pushdata(script_sig).unwrap().iter().map(|v| Ok(*v)),
)
.unwrap()
} else {
return 0;
};
if script.is_v0_p2wsh() {
let bytes = script.as_bytes();
n += sig_ops(witness, bytes[0], &bytes[2..]);
} else if script.is_v0_p2wpkh() {
n += 1;
}
n
}
for (input, prevout) in tx.input.iter().zip(previous_outputs.iter()) {
n += count_with_prevout(prevout, &input.script_sig, &input.witness);
}
n
}
/// Get the sigop cost for this transaction.
fn get_sigop_cost(
tx: &Transaction,
previous_outputs: &[&TxOut],
verify_p2sh: bool,
verify_witness: bool,
) -> Result<usize, script::Error> {
let mut n_sigop_cost = get_legacy_sigop_count(tx) * 4;
#[cfg(not(feature = "liquid"))]
if tx.is_coin_base() {
return Ok(n_sigop_cost);
}
#[cfg(feature = "liquid")]
if tx.is_coinbase() || tx.input.iter().any(|input| input.is_pegin) {
return Ok(n_sigop_cost);
}
if tx.input.len() != previous_outputs.len() {
return Err(script::Error::EarlyEndOfScript);
}
if verify_witness && !verify_p2sh {
return Err(script::Error::EarlyEndOfScript);
}
if verify_p2sh {
n_sigop_cost += get_p2sh_sigop_count(tx, previous_outputs) * 4;
}
if verify_witness {
n_sigop_cost += get_witness_sigop_count(tx, previous_outputs);
}
Ok(n_sigop_cost)
}
/// Get sigops for the Witness
///
/// witness_version is the raw opcode. OP_0 is 0, OP_1 is 81, etc.
fn sig_ops(witness: &Witness, witness_version: u8, witness_program: &[u8]) -> usize {
#[cfg(feature = "liquid")]
let last_witness = witness.script_witness.last();
#[cfg(not(feature = "liquid"))]
let last_witness = witness.last();
match (witness_version, witness_program.len()) {
(0, 20) => 1,
(0, 32) => last_witness
.map(|sl| sl.iter().map(|v| Ok(*v)))
.map(script::Script::from_byte_iter)
// I only return Ok 2 lines up, so there is no way to error
.map(|s| count_sigops(&s.unwrap(), true))
.unwrap_or_default(),
_ => 0,
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/util/fees.rs | src/util/fees.rs | use crate::chain::{Network, Transaction, TxOut};
use std::collections::HashMap;
const VSIZE_BIN_WIDTH: u32 = 50_000; // in vbytes
pub struct TxFeeInfo {
pub fee: u64, // in satoshis
pub vsize: u32, // in virtual bytes (= weight/4)
pub fee_per_vbyte: f32,
}
impl TxFeeInfo {
pub fn new(tx: &Transaction, prevouts: &HashMap<u32, &TxOut>, network: Network) -> Self {
let fee = get_tx_fee(tx, prevouts, network);
let vsize = tx.weight() / 4;
TxFeeInfo {
fee,
vsize: vsize as u32,
fee_per_vbyte: fee as f32 / vsize as f32,
}
}
}
#[cfg(not(feature = "liquid"))]
pub fn get_tx_fee(tx: &Transaction, prevouts: &HashMap<u32, &TxOut>, _network: Network) -> u64 {
if tx.is_coin_base() {
return 0;
}
let total_in: u64 = prevouts.values().map(|prevout| prevout.value).sum();
let total_out: u64 = tx.output.iter().map(|vout| vout.value).sum();
total_in - total_out
}
#[cfg(feature = "liquid")]
pub fn get_tx_fee(tx: &Transaction, _prevouts: &HashMap<u32, &TxOut>, network: Network) -> u64 {
tx.fee_in(*network.native_asset())
}
pub fn make_fee_histogram(mut entries: Vec<&TxFeeInfo>) -> Vec<(f32, u32)> {
entries.sort_unstable_by(|e1, e2| e1.fee_per_vbyte.partial_cmp(&e2.fee_per_vbyte).unwrap());
let mut histogram = vec![];
let mut bin_size = 0;
let mut last_fee_rate = 0.0;
for e in entries.iter().rev() {
if bin_size > VSIZE_BIN_WIDTH && last_fee_rate != e.fee_per_vbyte {
// vsize of transactions paying >= last_fee_rate
histogram.push((last_fee_rate, bin_size));
bin_size = 0;
}
last_fee_rate = e.fee_per_vbyte;
bin_size += e.vsize;
}
if bin_size > 0 {
histogram.push((last_fee_rate, bin_size));
}
histogram
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/electrum/discovery.rs | src/electrum/discovery.rs | use std::cmp::Ordering;
use std::collections::{hash_map::Entry, BinaryHeap, HashMap, HashSet};
use std::convert::TryInto;
use std::fmt;
use std::net::{IpAddr, SocketAddr, ToSocketAddrs};
use std::str::FromStr;
use std::sync::{Arc, RwLock};
use std::thread;
use std::time::{Duration, Instant};
use electrum_client::ElectrumApi;
use crate::chain::Network;
use crate::electrum::{Client, Hostname, Port, ProtocolVersion, ServerFeatures};
use crate::errors::{Result, ResultExt};
use crate::util::spawn_thread;
mod default_servers;
use default_servers::add_default_servers;
const HEALTH_CHECK_FREQ: Duration = Duration::from_secs(3600); // check servers every hour
const JOB_INTERVAL: Duration = Duration::from_secs(1); // run one health check job every second
const MAX_CONSECUTIVE_FAILURES: usize = 24; // drop servers after 24 consecutive failing attempts (~24 hours) (~24 hours)
const MAX_QUEUE_SIZE: usize = 500; // refuse accepting new servers if we have that many health check jobs
const MAX_SERVERS_PER_REQUEST: usize = 3; // maximum number of server hosts added per server.add_peer call
const MAX_SERVICES_PER_REQUEST: usize = 6; // maximum number of services added per server.add_peer call
#[derive(Debug)]
pub struct DiscoveryManager {
/// A queue of scheduled health check jobs, including for healthy, unhealthy and untested servers
queue: RwLock<BinaryHeap<HealthCheck>>,
/// A list of servers that were found to be healthy on their last health check
healthy: RwLock<HashMap<ServerAddr, Server>>,
/// Used to test for protocol version compatibility
our_version: ProtocolVersion,
/// So that we don't list ourselves
our_addrs: HashSet<ServerAddr>,
/// For advertising ourself to other servers
our_features: ServerFeatures,
/// Whether we should announce ourselves to the servers we're connecting to
announce: bool,
/// Optional, will not support onion hosts without this
tor_proxy: Option<SocketAddr>,
}
/// A Server corresponds to a single IP address or onion hostname, with one or more services
/// exposed on different ports.
#[derive(Debug)]
struct Server {
services: HashSet<Service>,
hostname: Hostname,
features: ServerFeatures,
// the `ServerAddr` isn't kept here directly, but is also available next to `Server` as the key for
// the `healthy` field on `DiscoveryManager`
}
#[derive(Eq, PartialEq, Hash, Clone, Debug)]
enum ServerAddr {
Clearnet(IpAddr),
Onion(Hostname),
}
#[derive(Eq, PartialEq, Hash, Copy, Clone, Debug)]
pub enum Service {
Tcp(Port),
Ssl(Port),
// unimplemented: Ws and Wss
}
/// A queued health check job, one per service/port (and not per server)
#[derive(Eq, Debug)]
struct HealthCheck {
addr: ServerAddr,
hostname: Hostname,
service: Service,
is_default: bool,
#[allow(dead_code)]
added_by: Option<IpAddr>,
last_check: Option<Instant>,
last_healthy: Option<Instant>,
consecutive_failures: usize,
}
/// The server entry format returned from server.peers.subscribe
#[derive(Serialize)]
pub struct ServerEntry(ServerAddr, Hostname, Vec<String>);
impl DiscoveryManager {
pub fn new(
our_network: Network,
our_features: ServerFeatures,
our_version: ProtocolVersion,
announce: bool,
tor_proxy: Option<SocketAddr>,
) -> Self {
let our_addrs = our_features
.hosts
.keys()
.filter_map(|hostname| {
ServerAddr::resolve(hostname)
.map_err(|e| warn!("failed resolving own hostname {}: {:?}", hostname, e))
.ok()
})
.collect();
let discovery = Self {
our_addrs,
our_version,
our_features,
announce,
tor_proxy,
healthy: Default::default(),
queue: Default::default(),
};
add_default_servers(&discovery, our_network);
discovery
}
/// Add a server requested via `server.add_peer`
pub fn add_server_request(&self, added_by: IpAddr, features: ServerFeatures) -> Result<()> {
self.verify_compatibility(&features)?;
let mut queue = self.queue.write().unwrap();
ensure!(queue.len() < MAX_QUEUE_SIZE, "queue size exceeded");
// TODO optimize
let mut existing_services: HashMap<ServerAddr, HashSet<Service>> = HashMap::new();
for job in queue.iter() {
existing_services
.entry(job.addr.clone())
.or_default()
.insert(job.service);
}
// collect HealthChecks for candidate services
let jobs = features
.hosts
.iter()
.take(MAX_SERVERS_PER_REQUEST)
.filter_map(|(hostname, ports)| {
let hostname = hostname.to_lowercase();
if hostname.len() > 100 {
warn!("skipping invalid hostname");
return None;
}
let addr = match ServerAddr::resolve(&hostname) {
Ok(addr) => addr,
Err(e) => {
warn!("failed resolving {}: {:?}", hostname, e);
return None;
}
};
if !is_remote_addr(&addr) || self.our_addrs.contains(&addr) {
warn!("skipping own or non-remote server addr");
return None;
}
// ensure the server address matches the ip that advertised it to us.
// onion hosts are exempt.
if let ServerAddr::Clearnet(ip) = addr {
if ip != added_by {
warn!(
"server ip does not match source ip ({}, {} != {})",
hostname, ip, added_by
);
return None;
}
}
Some((addr, hostname, ports))
})
.flat_map(|(addr, hostname, ports)| {
let tcp_service = ports.tcp_port.into_iter().map(Service::Tcp);
let ssl_service = ports.ssl_port.into_iter().map(Service::Ssl);
let services = tcp_service.chain(ssl_service).collect::<HashSet<Service>>();
services
.into_iter()
.filter(|service| {
existing_services
.get(&addr)
.is_none_or(|s| !s.contains(service))
})
.map(|service| {
HealthCheck::new(addr.clone(), hostname.clone(), service, Some(added_by))
})
.collect::<Vec<_>>()
})
.take(MAX_SERVICES_PER_REQUEST)
.collect::<Vec<_>>();
ensure!(
queue.len() + jobs.len() <= MAX_QUEUE_SIZE,
"queue size exceeded"
);
queue.extend(jobs);
Ok(())
}
/// Add a default server. Default servers are exempt from limits and given more leniency
/// before being removed due to unavailability.
pub fn add_default_server(&self, hostname: Hostname, services: Vec<Service>) -> Result<()> {
let addr = ServerAddr::resolve(&hostname)?;
let mut queue = self.queue.write().unwrap();
queue.extend(
services
.into_iter()
.map(|service| HealthCheck::new(addr.clone(), hostname.clone(), service, None)),
);
Ok(())
}
/// Get the list of healthy servers formatted for `servers.peers.subscribe`
pub fn get_servers(&self) -> Vec<ServerEntry> {
// XXX return a random sample instead of everything?
self.healthy
.read()
.unwrap()
.iter()
.map(|(addr, server)| {
ServerEntry(addr.clone(), server.hostname.clone(), server.feature_strs())
})
.collect()
}
pub fn our_features(&self) -> &ServerFeatures {
&self.our_features
}
/// Run the next health check in the queue (a single one)
fn run_health_check(&self) -> Result<()> {
// abort if there are no entries in the queue, or its still too early for the next one up
if self.queue.read().unwrap().peek().is_none_or(|next| {
next.last_check
.is_some_and(|t| t.elapsed() < HEALTH_CHECK_FREQ)
}) {
return Ok(());
}
let mut job = self.queue.write().unwrap().pop().unwrap();
debug!("processing {:?}", job);
let was_healthy = job.is_healthy();
match self.check_server(&job.addr, &job.hostname, job.service) {
Ok(features) => {
debug!("{} {:?} is available", job.hostname, job.service);
if !was_healthy {
self.save_healthy_service(&job, features);
}
// XXX update features?
job.last_check = Some(Instant::now());
job.last_healthy = job.last_check;
job.consecutive_failures = 0;
// schedule the next health check
self.queue.write().unwrap().push(job);
Ok(())
}
Err(e) => {
debug!("{} {:?} is unavailable: {:?}", job.hostname, job.service, e);
if was_healthy {
// XXX should we assume the server's other services are down too?
self.remove_unhealthy_service(&job);
}
job.last_check = Some(Instant::now());
job.consecutive_failures += 1;
if job.should_retry() {
self.queue.write().unwrap().push(job);
} else {
debug!("giving up on {:?}", job);
}
Err(e)
}
}
}
/// Upsert the server/service into the healthy set
fn save_healthy_service(&self, job: &HealthCheck, features: ServerFeatures) {
let addr = job.addr.clone();
let mut healthy = self.healthy.write().unwrap();
healthy
.entry(addr)
.or_insert_with(|| Server::new(job.hostname.clone(), features))
.services
.insert(job.service);
}
/// Remove the service, and remove the server entirely if it has no other reamining healthy services
fn remove_unhealthy_service(&self, job: &HealthCheck) {
let addr = job.addr.clone();
let mut healthy = self.healthy.write().unwrap();
if let Entry::Occupied(mut entry) = healthy.entry(addr) {
let server = entry.get_mut();
assert!(server.services.remove(&job.service));
if server.services.is_empty() {
entry.remove_entry();
}
} else {
unreachable!("missing expected server, corrupted state");
}
}
fn check_server(
&self,
addr: &ServerAddr,
hostname: &Hostname,
service: Service,
) -> Result<ServerFeatures> {
debug!("checking service {:?} {:?}", addr, service);
let server_url = match (addr, service) {
(ServerAddr::Clearnet(ip), Service::Tcp(port)) => format!("tcp://{}:{}", ip, port),
(ServerAddr::Clearnet(_), Service::Ssl(port)) => format!("ssl://{}:{}", hostname, port),
(ServerAddr::Onion(onion_host), Service::Tcp(port)) => {
format!("tcp://{}:{}", onion_host, port)
}
(ServerAddr::Onion(onion_host), Service::Ssl(port)) => {
format!("ssl://{}:{}", onion_host, port)
}
};
let mut config = electrum_client::ConfigBuilder::new();
if let ServerAddr::Onion(_) = addr {
let socks = electrum_client::Socks5Config::new(
self.tor_proxy
.chain_err(|| "no tor proxy configured, onion hosts are unsupported")?,
);
config = config.socks5(Some(socks)).unwrap()
}
let client = Client::from_config(&server_url, config.build())?;
let features = client.server_features()?.try_into()?;
self.verify_compatibility(&features)?;
if self.announce {
// XXX should we require the other side to reciprocate?
ensure!(
client.server_add_peer(&self.our_features)?,
"server does not reciprocate"
);
}
Ok(features)
}
fn verify_compatibility(&self, features: &ServerFeatures) -> Result<()> {
ensure!(
features.genesis_hash == self.our_features.genesis_hash,
"incompatible networks"
);
ensure!(
features.protocol_min <= self.our_version && features.protocol_max >= self.our_version,
"incompatible protocol versions"
);
ensure!(
features.hash_function == "sha256",
"incompatible hash function"
);
Ok(())
}
pub fn spawn_jobs_thread(manager: Arc<DiscoveryManager>) {
spawn_thread("discovery-jobs", move || loop {
if let Err(e) = manager.run_health_check() {
debug!("health check failed: {:?}", e);
}
// XXX use a dynamic JOB_INTERVAL, adjusted according to the queue size and HEALTH_CHECK_FREQ?
thread::sleep(JOB_INTERVAL);
});
}
}
impl Server {
fn new(hostname: Hostname, features: ServerFeatures) -> Self {
Server {
hostname,
features,
services: HashSet::new(),
}
}
/// Get server features and services in the compact string array format used for `servers.peers.subscribe`
fn feature_strs(&self) -> Vec<String> {
let mut strs = Vec::with_capacity(self.services.len() + 1);
strs.push(format!("v{}", self.features.protocol_max));
if let Some(pruning) = self.features.pruning {
strs.push(format!("p{}", pruning));
}
strs.extend(self.services.iter().map(|s| s.to_string()));
strs
}
}
impl ServerAddr {
fn resolve(host: &str) -> Result<Self> {
Ok(if host.ends_with(".onion") {
ServerAddr::Onion(host.into())
} else if let Ok(ip) = IpAddr::from_str(host) {
ServerAddr::Clearnet(ip)
} else {
let ip = format!("{}:1", host)
.to_socket_addrs()
.chain_err(|| "hostname resolution failed")?
.next()
.chain_err(|| "hostname resolution failed")?
.ip();
ServerAddr::Clearnet(ip)
})
}
}
impl fmt::Display for ServerAddr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
ServerAddr::Clearnet(ip) => write!(f, "{}", ip),
ServerAddr::Onion(hostname) => write!(f, "{}", hostname),
}
}
}
impl serde::Serialize for ServerAddr {
fn serialize<S>(&self, serializer: S) -> std::result::Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.serialize_str(&self.to_string())
}
}
impl HealthCheck {
fn new(
addr: ServerAddr,
hostname: Hostname,
service: Service,
added_by: Option<IpAddr>,
) -> Self {
HealthCheck {
addr,
hostname,
service,
is_default: added_by.is_none(),
added_by,
last_check: None,
last_healthy: None,
consecutive_failures: 0,
}
}
fn is_healthy(&self) -> bool {
match (self.last_check, self.last_healthy) {
(Some(last_check), Some(last_healthy)) => last_check == last_healthy,
_ => false,
}
}
// allow the server to fail up to MAX_CONSECTIVE_FAILURES time before giving up on it.
// if its a non-default server and the very first attempt fails, give up immediatly.
fn should_retry(&self) -> bool {
(self.last_healthy.is_some() || self.is_default)
&& self.consecutive_failures < MAX_CONSECUTIVE_FAILURES
}
}
impl PartialEq for HealthCheck {
fn eq(&self, other: &Self) -> bool {
self.hostname == other.hostname && self.service == other.service
}
}
impl Ord for HealthCheck {
fn cmp(&self, other: &Self) -> Ordering {
self.last_check.cmp(&other.last_check).reverse()
}
}
impl PartialOrd for HealthCheck {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl fmt::Display for Service {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Service::Tcp(port) => write!(f, "t{}", port),
Service::Ssl(port) => write!(f, "s{}", port),
}
}
}
fn is_remote_addr(addr: &ServerAddr) -> bool {
match addr {
ServerAddr::Onion(_) => true,
ServerAddr::Clearnet(ip) => {
!ip.is_loopback()
&& !ip.is_unspecified()
&& !ip.is_multicast()
&& !match ip {
IpAddr::V4(ipv4) => ipv4.is_private(),
IpAddr::V6(_) => false,
}
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use crate::chain::genesis_hash;
use crate::chain::Network;
use std::time;
use crate::config::VERSION_STRING;
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::new(1, 4);
#[test]
#[ignore = "This test requires external connection to server that no longer exists"]
fn test() -> Result<()> {
stderrlog::new().verbosity(4).init().unwrap();
#[cfg(feature = "liquid")]
let testnet = Network::LiquidTestnet;
#[cfg(not(feature = "liquid"))]
let testnet = Network::Testnet;
let features = ServerFeatures {
hosts: serde_json::from_str("{\"test.foobar.example\":{\"tcp_port\":60002}}").unwrap(),
server_version: VERSION_STRING.clone(),
genesis_hash: genesis_hash(testnet),
protocol_min: PROTOCOL_VERSION,
protocol_max: PROTOCOL_VERSION,
hash_function: "sha256".into(),
pruning: None,
};
let discovery = Arc::new(DiscoveryManager::new(
testnet,
features,
PROTOCOL_VERSION,
false,
None,
));
discovery
.add_default_server(
"electrum.blockstream.info".into(),
vec![Service::Tcp(60001)],
)
.unwrap();
discovery
.add_default_server("testnet.hsmiths.com".into(), vec![Service::Ssl(53012)])
.unwrap();
discovery
.add_default_server(
"tn.not.fyi".into(),
vec![Service::Tcp(55001), Service::Ssl(55002)],
)
.unwrap();
discovery
.add_default_server(
"electrum.blockstream.info".into(),
vec![Service::Tcp(60001), Service::Ssl(60002)],
)
.unwrap();
discovery
.add_default_server(
"explorerzydxu5ecjrkwceayqybizmpjjznk5izmitf2modhcusuqlid.onion".into(),
vec![Service::Tcp(143)],
)
.unwrap();
debug!("{:#?}", discovery);
for _ in 0..12 {
discovery
.run_health_check()
.map_err(|e| warn!("{:?}", e))
.ok();
thread::sleep(time::Duration::from_secs(1));
}
debug!("{:#?}", discovery);
info!("{}", json!(discovery.get_servers()));
Ok(())
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/electrum/client.rs | src/electrum/client.rs | use std::collections::HashMap;
use std::convert::TryFrom;
use bitcoin::hashes::Hash;
pub use electrum_client::client::Client;
pub use electrum_client::ServerFeaturesRes;
use crate::chain::BlockHash;
use crate::electrum::ServerFeatures;
use crate::errors::{Error, ResultExt};
// Convert from electrum-client's server features struct to ours. We're using a different struct because
// the electrum-client's one doesn't support the "hosts" key.
impl TryFrom<ServerFeaturesRes> for ServerFeatures {
type Error = Error;
fn try_from(mut features: ServerFeaturesRes) -> Result<Self, Self::Error> {
features.genesis_hash.reverse();
Ok(ServerFeatures {
// electrum-client doesn't retain the hosts map data, but we already have it from the add_peer request
hosts: HashMap::new(),
genesis_hash: BlockHash::from_inner(features.genesis_hash),
server_version: features.server_version,
protocol_min: features
.protocol_min
.parse()
.chain_err(|| "invalid protocol_min")?,
protocol_max: features
.protocol_max
.parse()
.chain_err(|| "invalid protocol_max")?,
pruning: features.pruning.map(|pruning| pruning as usize),
hash_function: features
.hash_function
.chain_err(|| "missing hash_function")?,
})
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/electrum/mod.rs | src/electrum/mod.rs | mod server;
pub use server::RPC;
#[cfg(feature = "electrum-discovery")]
mod client;
#[cfg(feature = "electrum-discovery")]
mod discovery;
#[cfg(feature = "electrum-discovery")]
pub use {client::Client, discovery::DiscoveryManager};
use std::cmp::Ordering;
use std::collections::HashMap;
use std::str::FromStr;
use serde::{de, Deserialize, Deserializer, Serialize};
use crate::chain::BlockHash;
use crate::errors::ResultExt;
use crate::util::BlockId;
pub fn get_electrum_height(blockid: Option<BlockId>, has_unconfirmed_parents: bool) -> isize {
match (blockid, has_unconfirmed_parents) {
(Some(blockid), _) => blockid.height as isize,
(None, false) => 0,
(None, true) => -1,
}
}
pub type Port = u16;
pub type Hostname = String;
pub type ServerHosts = HashMap<Hostname, ServerPorts>;
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ServerFeatures {
pub hosts: ServerHosts,
pub genesis_hash: BlockHash,
pub server_version: String,
pub protocol_min: ProtocolVersion,
pub protocol_max: ProtocolVersion,
pub pruning: Option<usize>,
pub hash_function: String,
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct ServerPorts {
tcp_port: Option<Port>,
ssl_port: Option<Port>,
}
#[derive(Eq, PartialEq, Debug, Clone, Default)]
pub struct ProtocolVersion {
major: usize,
minor: usize,
}
impl ProtocolVersion {
pub const fn new(major: usize, minor: usize) -> Self {
Self { major, minor }
}
}
impl Ord for ProtocolVersion {
fn cmp(&self, other: &Self) -> Ordering {
self.major
.cmp(&other.major)
.then_with(|| self.minor.cmp(&other.minor))
}
}
impl PartialOrd for ProtocolVersion {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
impl FromStr for ProtocolVersion {
type Err = crate::errors::Error;
fn from_str(s: &str) -> Result<Self, Self::Err> {
let mut iter = s.split('.');
Ok(Self {
major: iter
.next()
.chain_err(|| "missing major")?
.parse()
.chain_err(|| "invalid major")?,
minor: iter
.next()
.chain_err(|| "missing minor")?
.parse()
.chain_err(|| "invalid minor")?,
})
}
}
impl std::fmt::Display for ProtocolVersion {
fn fmt(&self, f: &mut std::fmt::Formatter<'_>) -> std::fmt::Result {
write!(f, "{}.{}", self.major, self.minor)
}
}
impl Serialize for ProtocolVersion {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: serde::Serializer,
{
serializer.collect_str(&self)
}
}
impl<'de> Deserialize<'de> for ProtocolVersion {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
let s = String::deserialize(deserializer)?;
FromStr::from_str(&s).map_err(de::Error::custom)
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/electrum/server.rs | src/electrum/server.rs | use std::collections::HashMap;
use std::convert::TryInto;
use std::fs;
use std::io::{BufRead, BufReader, Read, Write};
#[cfg(feature = "electrum-discovery")]
use std::net::IpAddr;
use std::net::{Shutdown, SocketAddr, TcpListener, TcpStream};
use std::os::unix::fs::FileTypeExt;
use std::os::unix::net::{UnixListener, UnixStream};
use std::path::Path;
use std::sync::atomic::AtomicBool;
use std::sync::mpsc::{Receiver, Sender};
use std::sync::{Arc, Mutex};
use std::thread;
use bitcoin::hashes::sha256d::Hash as Sha256dHash;
use error_chain::ChainedError;
use hex;
use serde_json::{from_str, Value};
use sha2::{Digest, Sha256};
#[cfg(not(feature = "liquid"))]
use bitcoin::consensus::encode::serialize;
#[cfg(feature = "liquid")]
use elements::encode::serialize;
use crate::chain::Txid;
use crate::config::{Config, VERSION_STRING};
use crate::electrum::{get_electrum_height, ProtocolVersion};
use crate::errors::*;
use crate::metrics::{Gauge, HistogramOpts, HistogramVec, MetricOpts, Metrics};
use crate::new_index::{Query, Utxo};
use crate::util::electrum_merkle::{get_header_merkle_proof, get_id_from_pos, get_tx_merkle_proof};
use crate::util::{
create_socket, full_hash, spawn_thread, BlockId, BoolThen, Channel, FullHash, HeaderEntry,
SyncChannel,
};
const PROTOCOL_VERSION: ProtocolVersion = ProtocolVersion::new(1, 4);
const MAX_HEADERS: usize = 2016;
#[cfg(feature = "electrum-discovery")]
use crate::electrum::{DiscoveryManager, ServerFeatures};
// TODO: Sha256dHash should be a generic hash-container (since script hash is single SHA256)
fn hash_from_value(val: Option<&Value>) -> Result<Sha256dHash> {
let script_hash = val.chain_err(|| "missing hash")?;
let script_hash = script_hash.as_str().chain_err(|| "non-string hash")?;
let script_hash = script_hash.parse().chain_err(|| "non-hex hash")?;
Ok(script_hash)
}
fn usize_from_value(val: Option<&Value>, name: &str) -> Result<usize> {
let val = val.chain_err(|| format!("missing {}", name))?;
let val = val.as_u64().chain_err(|| format!("non-integer {}", name))?;
Ok(val as usize)
}
fn usize_from_value_or(val: Option<&Value>, name: &str, default: usize) -> Result<usize> {
if val.is_none() {
return Ok(default);
}
usize_from_value(val, name)
}
fn bool_from_value(val: Option<&Value>, name: &str) -> Result<bool> {
let val = val.chain_err(|| format!("missing {}", name))?;
let val = val.as_bool().chain_err(|| format!("not a bool {}", name))?;
Ok(val)
}
fn bool_from_value_or(val: Option<&Value>, name: &str, default: bool) -> Result<bool> {
if val.is_none() {
return Ok(default);
}
bool_from_value(val, name)
}
// TODO: implement caching and delta updates
fn get_status_hash(txs: Vec<(Txid, Option<BlockId>)>, query: &Query) -> Option<FullHash> {
if txs.is_empty() {
None
} else {
let mut hasher = Sha256::new();
for (txid, blockid) in txs {
let is_mempool = blockid.is_none();
let has_unconfirmed_parents = is_mempool
.and_then(|| Some(query.has_unconfirmed_parents(&txid)))
.unwrap_or(false);
let height = get_electrum_height(blockid, has_unconfirmed_parents);
let part = format!("{}:{}:", txid, height);
hasher.update(part.as_bytes());
}
Some(
hasher.finalize()[..]
.try_into()
.expect("SHA256 size is 32 bytes"),
)
}
}
#[repr(i16)]
#[derive(Clone, Copy, PartialEq, Eq)]
enum JsonRpcV2Error {
ParseError = -32700,
InvalidRequest = -32600,
MethodNotFound = -32601,
InternalError = -32603,
}
impl JsonRpcV2Error {
#[inline]
fn into_i16(self) -> i16 {
self as i16
}
}
struct Connection {
query: Arc<Query>,
last_header_entry: Option<HeaderEntry>,
status_hashes: HashMap<Sha256dHash, Value>, // ScriptHash -> StatusHash
stream: ConnectionStream,
chan: SyncChannel<Message>,
stats: Arc<Stats>,
txs_limit: usize,
die_please: Option<Receiver<()>>,
#[cfg(feature = "electrum-discovery")]
discovery: Option<Arc<DiscoveryManager>>,
}
impl Connection {
pub fn new(
query: Arc<Query>,
stream: ConnectionStream,
stats: Arc<Stats>,
txs_limit: usize,
die_please: Receiver<()>,
#[cfg(feature = "electrum-discovery")] discovery: Option<Arc<DiscoveryManager>>,
) -> Connection {
Connection {
query,
last_header_entry: None, // disable header subscription for now
status_hashes: HashMap::new(),
stream,
chan: SyncChannel::new(10),
stats,
txs_limit,
die_please: Some(die_please),
#[cfg(feature = "electrum-discovery")]
discovery,
}
}
fn blockchain_headers_subscribe(&mut self) -> Result<Value> {
let entry = self.query.chain().best_header();
let hex_header = hex::encode(serialize(entry.header()));
let result = json!({"hex": hex_header, "height": entry.height()});
self.last_header_entry = Some(entry);
Ok(result)
}
fn server_version(&self) -> Result<Value> {
Ok(json!([VERSION_STRING.as_str(), PROTOCOL_VERSION]))
}
fn server_banner(&self) -> Result<Value> {
Ok(json!(self.query.config().electrum_banner.clone()))
}
#[cfg(feature = "electrum-discovery")]
fn server_features(&self) -> Result<Value> {
let discovery = self
.discovery
.as_ref()
.chain_err(|| "discovery is disabled")?;
Ok(json!(discovery.our_features()))
}
fn server_donation_address(&self) -> Result<Value> {
Ok(Value::Null)
}
fn server_peers_subscribe(&self) -> Result<Value> {
#[cfg(feature = "electrum-discovery")]
let servers = self
.discovery
.as_ref()
.map_or_else(|| json!([]), |d| json!(d.get_servers()));
#[cfg(not(feature = "electrum-discovery"))]
let servers = json!([]);
Ok(servers)
}
#[cfg(feature = "electrum-discovery")]
fn server_add_peer(&self, params: &[Value]) -> Result<Value> {
let ip = self
.stream
.ip()
.ok_or(Error::from("Can't add peer with Unix sockets enabled"))?;
let discovery = self
.discovery
.as_ref()
.chain_err(|| "discovery is disabled")?;
let features = params
.first()
.chain_err(|| "missing features param")?
.clone();
let features = serde_json::from_value(features).chain_err(|| "invalid features")?;
discovery.add_server_request(ip, features)?;
Ok(json!(true))
}
fn mempool_get_fee_histogram(&self) -> Result<Value> {
Ok(json!(&self.query.mempool().backlog_stats().fee_histogram))
}
fn blockchain_block_header(&self, params: &[Value]) -> Result<Value> {
let height = usize_from_value(params.first(), "height")?;
let cp_height = usize_from_value_or(params.get(1), "cp_height", 0)?;
let raw_header_hex: String = self
.query
.chain()
.header_by_height(height)
.map(|entry| hex::encode(serialize(entry.header())))
.chain_err(|| "missing header")?;
if cp_height == 0 {
return Ok(json!(raw_header_hex));
}
let (branch, root) = get_header_merkle_proof(self.query.chain(), height, cp_height)?;
Ok(json!({
"header": raw_header_hex,
"root": root,
"branch": branch
}))
}
fn blockchain_block_headers(&self, params: &[Value]) -> Result<Value> {
let start_height = usize_from_value(params.first(), "start_height")?;
let count = MAX_HEADERS.min(usize_from_value(params.get(1), "count")?);
let cp_height = usize_from_value_or(params.get(2), "cp_height", 0)?;
let heights: Vec<usize> = (start_height..(start_height + count)).collect();
let headers: Vec<String> = heights
.into_iter()
.filter_map(|height| {
self.query
.chain()
.header_by_height(height)
.map(|entry| hex::encode(serialize(entry.header())))
})
.collect();
if count == 0 || cp_height == 0 {
return Ok(json!({
"count": headers.len(),
"hex": headers.join(""),
"max": MAX_HEADERS,
}));
}
let (branch, root) =
get_header_merkle_proof(self.query.chain(), start_height + (count - 1), cp_height)?;
Ok(json!({
"count": headers.len(),
"hex": headers.join(""),
"max": MAX_HEADERS,
"root": root,
"branch" : branch,
}))
}
fn blockchain_estimatefee(&self, params: &[Value]) -> Result<Value> {
let conf_target = usize_from_value(params.first(), "blocks_count")?;
let fee_rate = self
.query
.estimate_fee(conf_target as u16)
.chain_err(|| format!("cannot estimate fee for {} blocks", conf_target))?;
// convert from sat/b to BTC/kB, as expected by Electrum clients
Ok(json!(fee_rate / 100_000f64))
}
fn blockchain_relayfee(&self) -> Result<Value> {
let relayfee = self.query.get_relayfee()?;
// convert from sat/b to BTC/kB, as expected by Electrum clients
Ok(json!(relayfee / 100_000f64))
}
fn blockchain_scripthash_subscribe(&mut self, params: &[Value]) -> Result<Value> {
let script_hash = hash_from_value(params.first()).chain_err(|| "bad script_hash")?;
let history_txids = get_history(&self.query, &script_hash[..], self.txs_limit)?;
let status_hash = get_status_hash(history_txids, &self.query)
.map_or(Value::Null, |h| json!(hex::encode(full_hash(&h[..]))));
if self
.status_hashes
.insert(script_hash, status_hash.clone())
.is_none()
{
self.stats.subscriptions.inc();
}
Ok(status_hash)
}
fn blockchain_scripthash_unsubscribe(&mut self, params: &[Value]) -> Result<Value> {
let script_hash = hash_from_value(params.first()).chain_err(|| "bad script_hash")?;
let removed = self.status_hashes.remove(&script_hash).is_some();
if removed {
self.stats.subscriptions.dec();
}
Ok(Value::Bool(removed))
}
#[cfg(not(feature = "liquid"))]
fn blockchain_scripthash_get_balance(&self, params: &[Value]) -> Result<Value> {
let script_hash = hash_from_value(params.first()).chain_err(|| "bad script_hash")?;
let (chain_stats, mempool_stats) = self.query.stats(&script_hash[..]);
Ok(json!({
"confirmed": chain_stats.funded_txo_sum - chain_stats.spent_txo_sum,
"unconfirmed": mempool_stats.funded_txo_sum as i64 - mempool_stats.spent_txo_sum as i64,
}))
}
fn blockchain_scripthash_get_history(&self, params: &[Value]) -> Result<Value> {
let script_hash = hash_from_value(params.first()).chain_err(|| "bad script_hash")?;
let history_txids = get_history(&self.query, &script_hash[..], self.txs_limit)?;
Ok(json!(history_txids
.into_iter()
.map(|(txid, blockid)| {
let is_mempool = blockid.is_none();
let fee = is_mempool.and_then(|| self.query.get_mempool_tx_fee(&txid));
let has_unconfirmed_parents = is_mempool
.and_then(|| Some(self.query.has_unconfirmed_parents(&txid)))
.unwrap_or(false);
let height = get_electrum_height(blockid, has_unconfirmed_parents);
GetHistoryResult { txid, height, fee }
})
.collect::<Vec<_>>()))
}
fn blockchain_scripthash_listunspent(&self, params: &[Value]) -> Result<Value> {
let script_hash = hash_from_value(params.first()).chain_err(|| "bad script_hash")?;
let utxos = self.query.utxo(&script_hash[..])?;
let to_json = |utxo: Utxo| {
let json = json!({
"height": utxo.confirmed.map_or(0, |b| b.height),
"tx_pos": utxo.vout,
"tx_hash": utxo.txid,
"value": utxo.value,
});
#[cfg(feature = "liquid")]
let json = {
let mut json = json;
json["asset"] = json!(utxo.asset);
json["nonce"] = json!(utxo.nonce);
json
};
json
};
Ok(json!(Value::Array(
utxos.into_iter().map(to_json).collect()
)))
}
fn blockchain_transaction_broadcast(&self, params: &[Value]) -> Result<Value> {
let tx = params.first().chain_err(|| "missing tx")?;
let tx = tx.as_str().chain_err(|| "non-string tx")?.to_string();
let txid = self.query.broadcast_raw(&tx)?;
if let Err(e) = self.chan.sender().try_send(Message::PeriodicUpdate) {
warn!("failed to issue PeriodicUpdate after broadcast: {}", e);
}
Ok(json!(txid))
}
fn blockchain_transaction_get(&self, params: &[Value]) -> Result<Value> {
let tx_hash = Txid::from(hash_from_value(params.first()).chain_err(|| "bad tx_hash")?);
let verbose = match params.get(1) {
Some(value) => value.as_bool().chain_err(|| "non-bool verbose value")?,
None => false,
};
// FIXME: implement verbose support
if verbose {
bail!("verbose transactions are currently unsupported");
}
let tx = self
.query
.lookup_raw_txn(&tx_hash)
.chain_err(|| "missing transaction")?;
Ok(json!(hex::encode(tx)))
}
fn blockchain_transaction_get_merkle(&self, params: &[Value]) -> Result<Value> {
let txid = Txid::from(hash_from_value(params.first()).chain_err(|| "bad tx_hash")?);
let height = usize_from_value(params.get(1), "height")?;
let blockid = self
.query
.chain()
.tx_confirming_block(&txid)
.ok_or("tx not found or is unconfirmed")?;
if blockid.height != height {
bail!("invalid confirmation height provided");
}
let (merkle, pos) = get_tx_merkle_proof(self.query.chain(), &txid, &blockid.hash)
.chain_err(|| "cannot create merkle proof")?;
Ok(json!({
"block_height": blockid.height,
"merkle": merkle,
"pos": pos}))
}
fn blockchain_transaction_id_from_pos(&self, params: &[Value]) -> Result<Value> {
let height = usize_from_value(params.first(), "height")?;
let tx_pos = usize_from_value(params.get(1), "tx_pos")?;
let want_merkle = bool_from_value_or(params.get(2), "merkle", false)?;
let (txid, merkle) = get_id_from_pos(self.query.chain(), height, tx_pos, want_merkle)?;
if !want_merkle {
return Ok(json!(txid));
}
Ok(json!({
"tx_hash": txid,
"merkle" : merkle}))
}
fn handle_command(&mut self, method: &str, params: &[Value], id: &Value) -> Result<Value> {
let timer = self
.stats
.latency
.with_label_values(&[method])
.start_timer();
let result = match method {
"blockchain.block.header" => self.blockchain_block_header(params),
"blockchain.block.headers" => self.blockchain_block_headers(params),
"blockchain.estimatefee" => self.blockchain_estimatefee(params),
"blockchain.headers.subscribe" => self.blockchain_headers_subscribe(),
"blockchain.relayfee" => self.blockchain_relayfee(),
#[cfg(not(feature = "liquid"))]
"blockchain.scripthash.get_balance" => self.blockchain_scripthash_get_balance(params),
"blockchain.scripthash.get_history" => self.blockchain_scripthash_get_history(params),
"blockchain.scripthash.listunspent" => self.blockchain_scripthash_listunspent(params),
"blockchain.scripthash.subscribe" => self.blockchain_scripthash_subscribe(params),
"blockchain.scripthash.unsubscribe" => self.blockchain_scripthash_unsubscribe(params),
"blockchain.transaction.broadcast" => self.blockchain_transaction_broadcast(params),
"blockchain.transaction.get" => self.blockchain_transaction_get(params),
"blockchain.transaction.get_merkle" => self.blockchain_transaction_get_merkle(params),
"blockchain.transaction.id_from_pos" => self.blockchain_transaction_id_from_pos(params),
"mempool.get_fee_histogram" => self.mempool_get_fee_histogram(),
"server.banner" => self.server_banner(),
"server.donation_address" => self.server_donation_address(),
"server.peers.subscribe" => self.server_peers_subscribe(),
"server.ping" => Ok(Value::Null),
"server.version" => self.server_version(),
#[cfg(feature = "electrum-discovery")]
"server.features" => self.server_features(),
#[cfg(feature = "electrum-discovery")]
"server.add_peer" => self.server_add_peer(params),
&_ => {
warn!("rpc unknown method #{} {} {:?}", id, method, params);
return Ok(json_rpc_error(
format!("Method {method} not found"),
Some(id),
JsonRpcV2Error::MethodNotFound,
));
}
};
timer.observe_duration();
// TODO: return application errors should be sent to the client
Ok(match result {
Ok(result) => json!({"jsonrpc": "2.0", "id": id, "result": result}),
Err(e) => {
warn!(
"rpc #{} {} {:?} failed: {}",
id,
method,
params,
e.display_chain()
);
json_rpc_error(e, Some(id), JsonRpcV2Error::InternalError)
}
})
}
fn update_subscriptions(&mut self) -> Result<Vec<Value>> {
let timer = self
.stats
.latency
.with_label_values(&["periodic_update"])
.start_timer();
let mut result = vec![];
if let Some(ref mut last_entry) = self.last_header_entry {
let entry = self.query.chain().best_header();
if *last_entry != entry {
*last_entry = entry;
let hex_header = hex::encode(serialize(last_entry.header()));
let header = json!({"hex": hex_header, "height": last_entry.height()});
result.push(json!({
"jsonrpc": "2.0",
"method": "blockchain.headers.subscribe",
"params": [header]}));
}
}
for (script_hash, status_hash) in self.status_hashes.iter_mut() {
let history_txids = get_history(&self.query, &script_hash[..], self.txs_limit)?;
let new_status_hash = get_status_hash(history_txids, &self.query)
.map_or(Value::Null, |h| json!(hex::encode(full_hash(&h[..]))));
if new_status_hash == *status_hash {
continue;
}
result.push(json!({
"jsonrpc": "2.0",
"method": "blockchain.scripthash.subscribe",
"params": [script_hash, new_status_hash]}));
*status_hash = new_status_hash;
}
timer.observe_duration();
Ok(result)
}
fn send_values(&mut self, values: &[Value]) -> Result<()> {
for value in values {
let line = value.to_string() + "\n";
self.stream
.write_all(line.as_bytes())
.chain_err(|| format!("failed to send {}", value))?;
}
Ok(())
}
fn handle_replies(&mut self, shutdown: crossbeam_channel::Receiver<()>) -> Result<()> {
loop {
crossbeam_channel::select! {
recv(self.chan.receiver()) -> msg => {
let msg = msg.chain_err(|| "channel closed")?;
trace!("RPC {:?}", msg);
match msg {
Message::Request(line) => {
let result = self.handle_line(&line);
self.send_values(&[result])?
}
Message::PeriodicUpdate => {
let values = self
.update_subscriptions()
.chain_err(|| "failed to update subscriptions")?;
self.send_values(&values)?
}
Message::Done => {
self.chan.close();
return Ok(());
}
}
}
recv(shutdown) -> _ => {
self.chan.close();
return Ok(());
}
}
}
}
#[inline]
fn handle_line(&mut self, line: &String) -> Value {
if let Ok(json_value) = from_str(line) {
match json_value {
Value::Array(mut arr) => {
for cmd in &mut arr {
// Replace each cmd with its response in-memory.
*cmd = self.handle_value(cmd);
}
Value::Array(arr)
}
cmd => self.handle_value(&cmd),
}
} else {
// serde_json was unable to parse
json_rpc_error(
format!("Invalid JSON: {line}"),
None,
JsonRpcV2Error::ParseError,
)
}
}
#[inline]
fn handle_value(&mut self, value: &Value) -> Value {
match (
value.get("method"),
value.get("params").unwrap_or(&json!([])),
value.get("id"),
) {
(Some(Value::String(method)), Value::Array(params), Some(id)) => self
.handle_command(method, params, id)
.unwrap_or_else(|err| {
json_rpc_error(
format!("{method} RPC error: {err}"),
Some(id),
JsonRpcV2Error::InternalError,
)
}),
(_, _, Some(id)) => json_rpc_error(value, Some(id), JsonRpcV2Error::InvalidRequest),
_ => json_rpc_error(value, None, JsonRpcV2Error::InvalidRequest),
}
}
fn handle_requests(
mut reader: BufReader<ConnectionStream>,
tx: crossbeam_channel::Sender<Message>,
) -> Result<()> {
loop {
let mut line = Vec::<u8>::new();
reader
.read_until(b'\n', &mut line)
.chain_err(|| "failed to read a request")?;
if line.is_empty() {
tx.send(Message::Done).chain_err(|| "channel closed")?;
return Ok(());
} else {
if line.starts_with(&[22, 3, 1]) {
// (very) naive SSL handshake detection
let _ = tx.send(Message::Done);
bail!("invalid request - maybe SSL-encrypted data?: {:?}", line)
}
match String::from_utf8(line) {
Ok(req) => tx
.send(Message::Request(req))
.chain_err(|| "channel closed")?,
Err(err) => {
let _ = tx.send(Message::Done);
bail!("invalid UTF8: {}", err)
}
}
}
}
}
pub fn run(mut self) {
self.stats.clients.inc();
let reader = BufReader::new(self.stream.try_clone().expect("failed to clone TcpStream"));
let tx = self.chan.sender();
let die_please = self.die_please.take().unwrap();
let (reply_killer, reply_receiver) = crossbeam_channel::unbounded();
// We create a clone of the stream and put it in an Arc
// This will drop at the end of the function.
let arc_stream = Arc::new(self.stream.try_clone().expect("failed to clone TcpStream"));
// We don't want to keep the stream alive until SIGINT
// It should drop (close) no matter what.
let maybe_stream = Arc::downgrade(&arc_stream);
spawn_thread("properly-die", move || {
let _ = die_please.recv();
let _ = maybe_stream.upgrade().map(|s| s.shutdown(Shutdown::Both));
let _ = reply_killer.send(());
});
let child = spawn_thread("reader", || Connection::handle_requests(reader, tx));
if let Err(e) = self.handle_replies(reply_receiver) {
error!(
"[{}] connection handling failed: {}",
self.stream.addr_string(),
e.display_chain().to_string()
);
}
self.stats.clients.dec();
self.stats
.subscriptions
.sub(self.status_hashes.len() as i64);
let addr = self.stream.addr_string();
debug!("[{}] shutting down connection", addr);
// Drop the Arc so that the stream properly closes.
drop(arc_stream);
let _ = self.stream.shutdown(Shutdown::Both);
if let Err(err) = child.join().expect("receiver panicked") {
error!("[{}] receiver failed: {}", addr, err);
}
}
}
#[inline]
fn json_rpc_error(
input: impl core::fmt::Display,
id: Option<&Value>,
code: JsonRpcV2Error,
) -> Value {
let mut ret = json!({
"error": {
"code": code.into_i16(),
"message": format!("{input}")
},
"jsonrpc": "2.0"
});
if let (Some(id), Some(obj)) = (id, ret.as_object_mut()) {
obj.insert(String::from("id"), id.clone());
}
ret
}
fn get_history(
query: &Query,
scripthash: &[u8],
txs_limit: usize,
) -> Result<Vec<(Txid, Option<BlockId>)>> {
// to avoid silently trunacting history entries, ask for one extra more than the limit and fail if it exists
let history_txids = query.history_txids(scripthash, txs_limit + 1);
ensure!(
history_txids.len() <= txs_limit,
ErrorKind::TooManyTxs(txs_limit)
);
Ok(history_txids)
}
#[derive(Serialize, Debug)]
struct GetHistoryResult {
#[serde(rename = "tx_hash")]
txid: Txid,
height: isize,
#[serde(skip_serializing_if = "Option::is_none")]
fee: Option<u64>,
}
#[derive(Debug)]
pub enum Message {
Request(String),
PeriodicUpdate,
Done,
}
pub enum Notification {
Periodic,
Exit,
}
pub struct RPC {
notification: Sender<Notification>,
server: Option<thread::JoinHandle<()>>, // so we can join the server while dropping this ojbect
}
struct Stats {
latency: HistogramVec,
clients: Gauge,
subscriptions: Gauge,
}
impl RPC {
fn start_notifier(
notification: Channel<Notification>,
senders: Arc<Mutex<Vec<crossbeam_channel::Sender<Message>>>>,
acceptor: Sender<Option<ConnectionStream>>,
acceptor_shutdown: Sender<()>,
) {
spawn_thread("notification", move || {
for msg in notification.receiver().iter() {
let mut senders = senders.lock().unwrap();
match msg {
Notification::Periodic => {
for sender in senders.split_off(0) {
if let Err(crossbeam_channel::TrySendError::Disconnected(_)) =
sender.try_send(Message::PeriodicUpdate)
{
continue;
}
senders.push(sender);
}
}
Notification::Exit => {
acceptor_shutdown.send(()).unwrap(); // Stop the acceptor itself
acceptor.send(None).unwrap(); // mark acceptor as done
break;
}
}
}
});
}
fn start_acceptor(
config: Arc<Config>,
shutdown_channel: Channel<()>,
) -> Channel<Option<ConnectionStream>> {
let chan = Channel::unbounded();
let acceptor = chan.sender();
spawn_thread("acceptor", move || {
let addr = config.electrum_rpc_addr;
let listener = if let Some(path) = config.rpc_socket_file.as_ref() {
// We can leak this Path because we know that this function is only
// called once on startup.
let path: &'static Path = Box::leak(path.clone().into_boxed_path());
ConnectionListener::new_unix(path)
} else {
ConnectionListener::new_tcp(&addr)
};
listener.run(acceptor, shutdown_channel);
});
chan
}
pub fn start(config: Arc<Config>, query: Arc<Query>, metrics: &Metrics) -> RPC {
let stats = Arc::new(Stats {
latency: metrics.histogram_vec(
HistogramOpts::new("electrum_rpc", "Electrum RPC latency (seconds)"),
&["method"],
),
clients: metrics.gauge(MetricOpts::new("electrum_clients", "# of Electrum clients")),
subscriptions: metrics.gauge(MetricOpts::new(
"electrum_subscriptions",
"# of Electrum subscriptions",
)),
});
stats.clients.set(0);
stats.subscriptions.set(0);
let notification = Channel::unbounded();
// Discovery is enabled when electrum-public-hosts is set
#[cfg(feature = "electrum-discovery")]
let discovery = config.electrum_public_hosts.clone().map(|hosts| {
use crate::chain::genesis_hash;
let features = ServerFeatures {
hosts,
server_version: VERSION_STRING.clone(),
genesis_hash: genesis_hash(config.network_type),
protocol_min: PROTOCOL_VERSION,
protocol_max: PROTOCOL_VERSION,
hash_function: "sha256".into(),
pruning: None,
};
let discovery = Arc::new(DiscoveryManager::new(
config.network_type,
features,
PROTOCOL_VERSION,
config.electrum_announce,
config.tor_proxy,
));
DiscoveryManager::spawn_jobs_thread(Arc::clone(&discovery));
discovery
});
let txs_limit = config.electrum_txs_limit;
RPC {
notification: notification.sender(),
server: Some(spawn_thread("rpc", move || {
let senders =
Arc::new(Mutex::new(Vec::<crossbeam_channel::Sender<Message>>::new()));
let acceptor_shutdown = Channel::unbounded();
let acceptor_shutdown_sender = acceptor_shutdown.sender();
let acceptor = RPC::start_acceptor(config, acceptor_shutdown);
RPC::start_notifier(
notification,
senders.clone(),
acceptor.sender(),
acceptor_shutdown_sender,
);
let mut threads = HashMap::new();
let (garbage_sender, garbage_receiver) = crossbeam_channel::unbounded();
while let Some(stream) = acceptor.receiver().recv().unwrap() {
let addr = stream.addr_string();
// explicitely scope the shadowed variables for the new thread
let query = Arc::clone(&query);
let senders = Arc::clone(&senders);
let stats = Arc::clone(&stats);
let garbage_sender = garbage_sender.clone();
// Kill the peers properly
let (killer, peace_receiver) = std::sync::mpsc::channel();
let killer_clone = killer.clone();
#[cfg(feature = "electrum-discovery")]
let discovery = discovery.clone();
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | true |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/electrum/discovery/default_servers.rs | src/electrum/discovery/default_servers.rs | use crate::chain::Network;
#[allow(unused_imports)]
use crate::electrum::discovery::{DiscoveryManager, Service};
#[allow(unused_variables)]
pub fn add_default_servers(discovery: &DiscoveryManager, network: Network) {
match network {
#[cfg(not(feature = "liquid"))]
Network::Bitcoin => {
discovery
.add_default_server(
"3smoooajg7qqac2y.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"81-7-10-251.blue.kundencontroller.de".into(),
vec![Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"E-X.not.fyi".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"VPS.hsmiths.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"b.ooze.cc".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bauerjda5hnedjam.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bauerjhejlv6di7s.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bitcoin.corgi.party".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bitcoin3nqy3db7c.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bitcoins.sk".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"btc.cihar.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"btc.xskyx.net".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"currentlane.lovebitco.in".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"daedalus.bauerj.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.jochen-hoenicke.de".into(),
vec![Service::Tcp(50003), Service::Ssl(50005)],
)
.ok();
discovery
.add_default_server(
"dragon085.startdedicated.de".into(),
vec![Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"e-1.claudioboxx.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"e.keff.org".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum-server.ninja".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum-unlimited.criptolayer.net".into(),
vec![Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.eff.ro".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.festivaldelhumor.org".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.hsmiths.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.leblancnet.us".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("electrum.mindspot.org".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server(
"electrum.qtornado.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("electrum.taborsky.cz".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server(
"electrum.villocq.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum2.eff.ro".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum2.villocq.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrumx.bot.nu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrumx.ddns.net".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("electrumx.ftp.sh".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server(
"electrumx.ml".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrumx.soon.it".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("electrumxhqdsmlu.onion".into(), vec![Service::Tcp(50001)])
.ok();
discovery
.add_default_server(
"elx01.knas.systems".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"enode.duckdns.org".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"fedaykin.goip.de".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"fn.48.org".into(),
vec![Service::Tcp(50003), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"helicarrier.bauerj.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"hsmiths4fyqlw5xw.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"hsmiths5mjk6uijs.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"icarus.tetradrachm.net".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"electrum.emzy.de".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"ndnd.selfhost.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("ndndword5lpb7eex.onion".into(), vec![Service::Tcp(50001)])
.ok();
discovery
.add_default_server(
"orannis.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"ozahtqwp25chjdjd.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"qtornadoklbgdyww.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("rbx.curalle.ovh".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server("s7clinmo4cazmhul.onion".into(), vec![Service::Tcp(50001)])
.ok();
discovery
.add_default_server(
"tardis.bauerj.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("technetium.network".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server(
"tomscryptos.com".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"ulrichard.ch".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"vmd27610.contaboserver.net".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"vmd30612.contaboserver.net".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"wsw6tua3xl24gsmi264zaep6seppjyrkyucpsmuxnjzyt3f3j6swshad.onion".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"xray587.startdedicated.de".into(),
vec![Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"yuio.top".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"bitcoin.dragon.zone".into(),
vec![Service::Tcp(50003), Service::Ssl(50004)],
)
.ok();
discovery
.add_default_server(
"ecdsa.net".into(),
vec![Service::Tcp(50001), Service::Ssl(110)],
)
.ok();
discovery
.add_default_server("btc.usebsv.com".into(), vec![Service::Ssl(50006)])
.ok();
discovery
.add_default_server(
"e2.keff.org".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server("electrum.hodlister.co".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server("electrum3.hodlister.co".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server("electrum5.hodlister.co".into(), vec![Service::Ssl(50002)])
.ok();
discovery
.add_default_server(
"electrumx.electricnewyear.net".into(),
vec![Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"fortress.qtornado.com".into(),
vec![Service::Tcp(50001), Service::Ssl(443)],
)
.ok();
discovery
.add_default_server(
"green-gold.westeurope.cloudapp.azure.com".into(),
vec![Service::Tcp(56001), Service::Ssl(56002)],
)
.ok();
discovery
.add_default_server(
"electrumx.erbium.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
}
#[cfg(not(feature = "liquid"))]
Network::Testnet => {
discovery
.add_default_server(
"hsmithsxurybd7uh.onion".into(),
vec![Service::Tcp(53011), Service::Ssl(53012)],
)
.ok();
discovery
.add_default_server(
"testnet.hsmiths.com".into(),
vec![Service::Tcp(53011), Service::Ssl(53012)],
)
.ok();
discovery
.add_default_server(
"testnet.qtornado.com".into(),
vec![Service::Tcp(51001), Service::Ssl(51002)],
)
.ok();
discovery
.add_default_server(
"testnet1.bauerj.eu".into(),
vec![Service::Tcp(50001), Service::Ssl(50002)],
)
.ok();
discovery
.add_default_server(
"tn.not.fyi".into(),
vec![Service::Tcp(55001), Service::Ssl(55002)],
)
.ok();
discovery
.add_default_server(
"bitcoin.cluelessperson.com".into(),
vec![Service::Tcp(51001), Service::Ssl(51002)],
)
.ok();
}
_ => (),
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/elements/peg.rs | src/elements/peg.rs | use bitcoin::hashes::hex::ToHex;
use elements::{confidential::Asset, PeginData, PegoutData, TxIn, TxOut};
use crate::chain::{bitcoin_genesis_hash, BNetwork, Network};
use crate::util::{FullHash, ScriptToAsm};
pub fn get_pegin_data(txout: &TxIn, network: Network) -> Option<PeginData<'_>> {
let pegged_asset_id = network.pegged_asset()?;
txout
.pegin_data()
.filter(|pegin| pegin.asset == Asset::Explicit(*pegged_asset_id))
}
pub fn get_pegout_data(
txout: &TxOut,
network: Network,
parent_network: BNetwork,
) -> Option<PegoutData<'_>> {
let pegged_asset_id = network.pegged_asset()?;
txout.pegout_data().filter(|pegout| {
pegout.asset == Asset::Explicit(*pegged_asset_id)
&& pegout.genesis_hash
== bitcoin_genesis_hash(match parent_network {
BNetwork::Bitcoin => Network::Liquid,
BNetwork::Testnet => Network::LiquidTestnet,
BNetwork::Signet => return false,
BNetwork::Regtest => Network::LiquidRegtest,
})
})
}
// API representation of pegout data assocaited with an output
#[derive(Serialize, Deserialize, Clone)]
pub struct PegoutValue {
pub genesis_hash: String,
pub scriptpubkey: bitcoin::Script,
pub scriptpubkey_asm: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub scriptpubkey_address: Option<String>,
}
impl PegoutValue {
pub fn from_txout(txout: &TxOut, network: Network, parent_network: BNetwork) -> Option<Self> {
let pegoutdata = get_pegout_data(txout, network, parent_network)?;
// pending https://github.com/ElementsProject/rust-elements/pull/69 is merged
let scriptpubkey = bitcoin::Script::from(pegoutdata.script_pubkey.into_bytes());
let address = bitcoin::Address::from_script(&scriptpubkey, parent_network);
Some(PegoutValue {
genesis_hash: pegoutdata.genesis_hash.to_hex(),
scriptpubkey_asm: scriptpubkey.to_asm(),
scriptpubkey_address: address.map(|s| s.to_string()),
scriptpubkey,
})
}
}
// Inner type for the indexer TxHistoryInfo::Pegin variant
#[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct PeginInfo {
pub txid: FullHash,
pub vin: u32,
pub value: u64,
}
// Inner type for the indexer TxHistoryInfo::Pegout variant
#[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct PegoutInfo {
pub txid: FullHash,
pub vout: u32,
pub value: u64,
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/elements/asset.rs | src/elements/asset.rs | use std::collections::{HashMap, HashSet};
use std::sync::{Arc, RwLock, RwLockReadGuard};
use bitcoin::hashes::{hex::FromHex, sha256, Hash};
use elements::confidential::{Asset, Value};
use elements::encode::{deserialize, serialize};
use elements::secp256k1_zkp::ZERO_TWEAK;
use elements::{issuance::ContractHash, AssetId, AssetIssuance, OutPoint, Transaction, TxIn};
use crate::chain::{BNetwork, BlockHash, Network, Txid};
use crate::elements::peg::{get_pegin_data, get_pegout_data, PeginInfo, PegoutInfo};
use crate::elements::registry::{AssetMeta, AssetRegistry};
use crate::errors::*;
use crate::new_index::schema::{Operation, TxHistoryInfo, TxHistoryKey, TxHistoryRow};
use crate::new_index::{db::DBFlush, ChainQuery, DBRow, Mempool, Query};
use crate::util::{bincode_util, full_hash, Bytes, FullHash, TransactionStatus, TxInput};
lazy_static! {
pub static ref NATIVE_ASSET_ID: AssetId =
AssetId::from_hex("6f0279e9ed041c3d710a9f57d0c02928416460c4b722ae3457a11eec381c526d")
.unwrap();
pub static ref NATIVE_ASSET_ID_TESTNET: AssetId =
AssetId::from_hex("144c654344aa716d6f3abcc1ca90e5641e4e2a7f633bc09fe3baf64585819a49")
.unwrap();
pub static ref NATIVE_ASSET_ID_REGTEST: AssetId =
AssetId::from_hex("5ac9f65c0efcc4775e0baec4ec03abdde22473cd3cf33c0419ca290e0751b225")
.unwrap();
}
fn parse_asset_id(sl: &[u8]) -> AssetId {
AssetId::from_slice(sl).expect("failed to parse AssetId")
}
#[derive(Serialize)]
#[serde(untagged)]
#[allow(clippy::large_enum_variant)]
pub enum LiquidAsset {
Issued(IssuedAsset),
Native(PeggedAsset),
}
#[derive(Serialize)]
pub struct PeggedAsset {
pub asset_id: AssetId,
pub chain_stats: PeggedAssetStats,
pub mempool_stats: PeggedAssetStats,
}
#[derive(Serialize)]
pub struct IssuedAsset {
pub asset_id: AssetId,
pub issuance_txin: TxInput,
#[serde(serialize_with = "crate::util::serialize_outpoint")]
pub issuance_prevout: OutPoint,
pub reissuance_token: AssetId,
#[serde(skip_serializing_if = "Option::is_none")]
pub contract_hash: Option<ContractHash>,
// the confirmation status of the initial issuance transaction
pub status: TransactionStatus,
pub chain_stats: IssuedAssetStats,
pub mempool_stats: IssuedAssetStats,
// optional metadata from registry
#[serde(flatten)]
pub meta: Option<AssetMeta>,
}
// DB representation (issued assets only)
#[derive(Serialize, Deserialize, Debug)]
pub struct AssetRow {
pub issuance_txid: FullHash,
pub issuance_vin: u32,
pub prev_txid: FullHash,
pub prev_vout: u32,
pub issuance: Bytes, // bincode does not like dealing with AssetIssuance, deserialization fails with "invalid type: sequence, expected a struct"
pub reissuance_token: FullHash,
}
impl IssuedAsset {
pub fn new(
asset_id: &AssetId,
asset: &AssetRow,
(chain_stats, mempool_stats): (IssuedAssetStats, IssuedAssetStats),
meta: Option<AssetMeta>,
status: TransactionStatus,
) -> Self {
let issuance: AssetIssuance =
deserialize(&asset.issuance).expect("failed parsing AssetIssuance");
let reissuance_token = parse_asset_id(&asset.reissuance_token);
let contract_hash = if issuance.asset_entropy != [0u8; 32] {
Some(ContractHash::from_inner(issuance.asset_entropy))
} else {
None
};
Self {
asset_id: *asset_id,
issuance_txin: TxInput {
txid: deserialize(&asset.issuance_txid).unwrap(),
vin: asset.issuance_vin,
},
issuance_prevout: OutPoint {
txid: deserialize(&asset.prev_txid).unwrap(),
vout: asset.prev_vout,
},
contract_hash,
reissuance_token,
status,
chain_stats,
mempool_stats,
meta,
}
}
}
impl LiquidAsset {
pub fn supply(&self) -> Option<u64> {
match self {
LiquidAsset::Native(asset) => Some(
asset.chain_stats.peg_in_amount
- asset.chain_stats.peg_out_amount
- asset.chain_stats.burned_amount
+ asset.mempool_stats.peg_in_amount
- asset.mempool_stats.peg_out_amount
- asset.mempool_stats.burned_amount,
),
LiquidAsset::Issued(asset) => {
if asset.chain_stats.has_blinded_issuances
|| asset.mempool_stats.has_blinded_issuances
{
None
} else {
Some(
asset.chain_stats.issued_amount - asset.chain_stats.burned_amount
+ asset.mempool_stats.issued_amount
- asset.mempool_stats.burned_amount,
)
}
}
}
}
pub fn precision(&self) -> u8 {
match self {
LiquidAsset::Native(_) => 8,
LiquidAsset::Issued(asset) => asset.meta.as_ref().map_or(0, |m| m.precision),
}
}
}
#[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct IssuingInfo {
pub txid: FullHash,
pub vin: u32,
pub is_reissuance: bool,
// None for blinded issuances
pub issued_amount: Option<u64>,
pub token_amount: Option<u64>,
}
#[derive(Serialize, Deserialize, Debug)]
#[cfg_attr(test, derive(PartialEq, Eq))]
pub struct BurningInfo {
pub txid: FullHash,
pub vout: u32,
pub value: u64,
}
// Index confirmed transaction issuances and save as db rows
pub fn index_confirmed_tx_assets(
tx: &Transaction,
confirmed_height: u32,
tx_position: u16,
network: Network,
parent_network: BNetwork,
rows: &mut Vec<DBRow>,
op: &Operation,
) {
let (history, issuances) = index_tx_assets(tx, network, parent_network);
rows.extend(history.into_iter().map(|(asset_id, info)| {
let history_row = asset_history_row(&asset_id, confirmed_height, tx_position, info);
if let Operation::DeleteBlocksWithHistory(tx) = op {
tx.send(history_row.key.hash)
.expect("unbounded channel won't fail");
}
history_row.into_row()
}));
// the initial issuance is kept twice: once in the history index under I<asset><height><txid:vin>,
// and once separately under i<asset> for asset lookup with some more associated metadata.
// reissuances are only kept under the history index.
rows.extend(issuances.into_iter().map(|(asset_id, asset_row)| DBRow {
key: [b"i", &asset_id.into_inner()[..]].concat(),
value: bincode_util::serialize_little(&asset_row).unwrap(),
}));
}
// Index mempool transaction issuances and save to in-memory store
pub fn index_mempool_tx_assets(
tx: &Transaction,
network: Network,
parent_network: BNetwork,
asset_history: &mut HashMap<AssetId, Vec<TxHistoryInfo>>,
asset_issuance: &mut HashMap<AssetId, AssetRow>,
) {
let (history, issuances) = index_tx_assets(tx, network, parent_network);
for (asset_id, info) in history {
asset_history.entry(asset_id).or_default().push(info);
}
for (asset_id, issuance) in issuances {
asset_issuance.insert(asset_id, issuance);
}
}
// Remove mempool transaction issuances from in-memory store
pub fn remove_mempool_tx_assets(
to_remove: &HashSet<&Txid>,
asset_history: &mut HashMap<AssetId, Vec<TxHistoryInfo>>,
asset_issuance: &mut HashMap<AssetId, AssetRow>,
) {
// TODO optimize
asset_history.retain(|_assethash, entries| {
entries.retain(|entry| !to_remove.contains(&entry.get_txid()));
!entries.is_empty()
});
asset_issuance.retain(|_assethash, issuance| {
let txid: Txid = deserialize(&issuance.issuance_txid).unwrap();
!to_remove.contains(&txid)
});
}
type HistoryAndIssuances = (Vec<(AssetId, TxHistoryInfo)>, Vec<(AssetId, AssetRow)>);
// Internal utility function, index a transaction and return its history entries and issuances
fn index_tx_assets(
tx: &Transaction,
network: Network,
parent_network: BNetwork,
) -> HistoryAndIssuances {
let mut history = vec![];
let mut issuances = vec![];
let txid = full_hash(&tx.txid()[..]);
for (txo_index, txo) in tx.output.iter().enumerate() {
if let Some(pegout) = get_pegout_data(txo, network, parent_network) {
history.push((
pegout.asset.explicit().unwrap(),
TxHistoryInfo::Pegout(PegoutInfo {
txid,
vout: txo_index as u32,
value: pegout.value,
}),
));
} else if txo.script_pubkey.is_provably_unspendable() && !txo.is_fee() {
if let (Asset::Explicit(asset_id), Value::Explicit(value)) = (txo.asset, txo.value) {
if value > 0 {
history.push((
asset_id,
TxHistoryInfo::Burning(BurningInfo {
txid,
vout: txo_index as u32,
value,
}),
));
}
}
}
}
for (txi_index, txi) in tx.input.iter().enumerate() {
if let Some(pegin) = get_pegin_data(txi, network) {
history.push((
pegin.asset.explicit().unwrap(),
TxHistoryInfo::Pegin(PeginInfo {
txid,
vin: txi_index as u32,
value: pegin.value,
}),
));
} else if txi.has_issuance() {
let is_reissuance = txi.asset_issuance.asset_blinding_nonce != ZERO_TWEAK;
let asset_entropy = get_issuance_entropy(txi).expect("invalid issuance");
let asset_id = AssetId::from_entropy(asset_entropy);
let issued_amount = match txi.asset_issuance.amount {
Value::Explicit(amount) => Some(amount),
Value::Null => Some(0),
_ => None,
};
let token_amount = match txi.asset_issuance.inflation_keys {
Value::Explicit(amount) => Some(amount),
Value::Null => Some(0),
_ => None,
};
history.push((
asset_id,
TxHistoryInfo::Issuing(IssuingInfo {
txid,
vin: txi_index as u32,
is_reissuance,
issued_amount,
token_amount,
}),
));
if !is_reissuance {
let is_confidential =
matches!(txi.asset_issuance.inflation_keys, Value::Confidential(..));
let reissuance_token =
AssetId::reissuance_token_from_entropy(asset_entropy, is_confidential);
issuances.push((
asset_id,
AssetRow {
issuance_txid: txid,
issuance_vin: txi_index as u32,
prev_txid: full_hash(&txi.previous_output.txid[..]),
prev_vout: txi.previous_output.vout,
issuance: serialize(&txi.asset_issuance),
reissuance_token: full_hash(&reissuance_token.into_inner()[..]),
},
));
}
}
}
(history, issuances)
}
fn asset_history_row(
asset_id: &AssetId,
confirmed_height: u32,
tx_position: u16,
txinfo: TxHistoryInfo,
) -> TxHistoryRow {
let key = TxHistoryKey {
code: b'I',
hash: full_hash(&asset_id.into_inner()[..]),
confirmed_height,
tx_position,
txinfo,
};
TxHistoryRow { key }
}
pub enum AssetRegistryLock<'a> {
RwLock(&'a Arc<RwLock<AssetRegistry>>),
RwLockReadGuard(&'a RwLockReadGuard<'a, AssetRegistry>),
}
pub fn lookup_asset(
query: &Query,
registry: Option<AssetRegistryLock>,
asset_id: &AssetId,
meta: Option<&AssetMeta>, // may optionally be provided if already known
) -> Result<Option<LiquidAsset>> {
if query.network().pegged_asset() == Some(asset_id) {
let (chain_stats, mempool_stats) = pegged_asset_stats(query, asset_id);
return Ok(Some(LiquidAsset::Native(PeggedAsset {
asset_id: *asset_id,
chain_stats,
mempool_stats,
})));
}
let history_db = query.chain().store().history_db();
let mempool = query.mempool();
let mempool_issuances = &mempool.asset_issuance;
let chain_row = history_db
.get(&[b"i", &asset_id.into_inner()[..]].concat())
.map(|row| {
bincode_util::deserialize_little::<AssetRow>(&row).expect("failed parsing AssetRow")
});
let row = chain_row
.as_ref()
.or_else(|| mempool_issuances.get(asset_id));
Ok(if let Some(row) = row {
let reissuance_token = parse_asset_id(&row.reissuance_token);
let meta = meta.cloned().or_else(|| match registry {
Some(AssetRegistryLock::RwLock(rwlock)) => {
rwlock.read().unwrap().get(asset_id).cloned()
}
Some(AssetRegistryLock::RwLockReadGuard(guard)) => guard.get(asset_id).cloned(),
None => None,
});
let stats = issued_asset_stats(query.chain(), &mempool, asset_id, &reissuance_token);
let status = query.get_tx_status(&deserialize(&row.issuance_txid).unwrap());
let asset = IssuedAsset::new(asset_id, row, stats, meta, status);
Some(LiquidAsset::Issued(asset))
} else {
None
})
}
pub fn get_issuance_entropy(txin: &TxIn) -> Result<sha256::Midstate> {
if !txin.has_issuance {
bail!("input has no issuance");
}
let is_reissuance = txin.asset_issuance.asset_blinding_nonce != ZERO_TWEAK;
Ok(if !is_reissuance {
let contract_hash = ContractHash::from_slice(&txin.asset_issuance.asset_entropy)
.chain_err(|| "invalid entropy (contract hash)")?;
AssetId::generate_asset_entropy(txin.previous_output, contract_hash)
} else {
sha256::Midstate::from_slice(&txin.asset_issuance.asset_entropy)
.chain_err(|| "invalid entropy (reissuance)")?
})
}
//
// Asset stats
//
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct IssuedAssetStats {
pub tx_count: usize,
pub issuance_count: usize,
pub issued_amount: u64,
pub burned_amount: u64,
pub has_blinded_issuances: bool,
pub reissuance_tokens: Option<u64>, // none if confidential
pub burned_reissuance_tokens: u64,
}
#[derive(Serialize, Deserialize, Debug, Default)]
pub struct PeggedAssetStats {
pub tx_count: usize,
pub peg_in_count: usize,
pub peg_in_amount: u64,
pub peg_out_count: usize,
pub peg_out_amount: u64,
pub burn_count: usize,
pub burned_amount: u64,
}
type AssetStatApplyFn<T> = fn(&TxHistoryInfo, &mut T, &mut HashSet<Txid>);
fn asset_cache_key(asset_id: &AssetId) -> Bytes {
[b"z", &asset_id.into_inner()[..]].concat()
}
fn asset_cache_row<T>(asset_id: &AssetId, stats: &T, blockhash: &BlockHash) -> DBRow
where
T: serde::Serialize,
{
DBRow {
key: asset_cache_key(asset_id),
value: bincode_util::serialize_little(&(stats, blockhash)).unwrap(),
}
}
// Get stats for the network's pegged asset
fn pegged_asset_stats(query: &Query, asset_id: &AssetId) -> (PeggedAssetStats, PeggedAssetStats) {
(
chain_asset_stats(query.chain(), asset_id, apply_pegged_asset_stats),
mempool_asset_stats(&query.mempool(), asset_id, apply_pegged_asset_stats),
)
}
// Get stats for issued assets
fn issued_asset_stats(
chain: &ChainQuery,
mempool: &Mempool,
asset_id: &AssetId,
reissuance_token: &AssetId,
) -> (IssuedAssetStats, IssuedAssetStats) {
let afn = apply_issued_asset_stats;
let mut chain_stats = chain_asset_stats(chain, asset_id, afn);
chain_stats.burned_reissuance_tokens =
chain_asset_stats(chain, reissuance_token, afn).burned_amount;
let mut mempool_stats = mempool_asset_stats(mempool, asset_id, afn);
mempool_stats.burned_reissuance_tokens =
mempool_asset_stats(mempool, reissuance_token, afn).burned_amount;
(chain_stats, mempool_stats)
}
// Get on-chain confirmed asset stats (issued or the pegged asset)
fn chain_asset_stats<T>(chain: &ChainQuery, asset_id: &AssetId, apply_fn: AssetStatApplyFn<T>) -> T
where
T: Default + serde::Serialize + serde::de::DeserializeOwned,
{
// get the last known stats and the blockhash they are updated for.
// invalidates the cache if the block was orphaned.
let cache: Option<(T, usize)> = chain
.store()
.cache_db()
.get(&asset_cache_key(asset_id))
.map(|c| bincode_util::deserialize_little(&c).unwrap())
.and_then(|(stats, blockhash)| {
chain
.height_by_hash(&blockhash)
.map(|height| (stats, height))
});
// update stats with new transactions since
let (newstats, lastblock) = cache.map_or_else(
|| chain_asset_stats_delta(chain, asset_id, T::default(), 0, apply_fn),
|(oldstats, blockheight)| {
chain_asset_stats_delta(chain, asset_id, oldstats, blockheight + 1, apply_fn)
},
);
// save updated stats to cache
if let Some(lastblock) = lastblock {
chain.store().cache_db().write(
vec![asset_cache_row(asset_id, &newstats, &lastblock)],
DBFlush::Enable,
);
}
newstats
}
// Update the asset stats with the delta of confirmed txs since start_height
fn chain_asset_stats_delta<T>(
chain: &ChainQuery,
asset_id: &AssetId,
init_stats: T,
start_height: usize,
apply_fn: AssetStatApplyFn<T>,
) -> (T, Option<BlockHash>) {
let history_iter = chain
.history_iter_scan(b'I', &asset_id.into_inner()[..], start_height)
.map(TxHistoryRow::from_row)
.filter_map(|history| {
chain
.tx_confirming_block(&history.get_txid())
.map(|blockid| (history, blockid))
});
let mut stats = init_stats;
let mut seen_txids = HashSet::new();
let mut lastblock = None;
for (row, blockid) in history_iter {
if lastblock != Some(blockid.hash) {
seen_txids.clear();
}
apply_fn(&row.key.txinfo, &mut stats, &mut seen_txids);
lastblock = Some(blockid.hash);
}
(stats, lastblock)
}
// Get mempool asset stats (issued or the pegged asset)
pub fn mempool_asset_stats<T>(
mempool: &Mempool,
asset_id: &AssetId,
apply_fn: AssetStatApplyFn<T>,
) -> T
where
T: Default,
{
let mut stats = T::default();
if let Some(history) = mempool.asset_history.get(asset_id) {
let mut seen_txids = HashSet::new();
for info in history {
apply_fn(info, &mut stats, &mut seen_txids)
}
}
stats
}
fn apply_issued_asset_stats(
info: &TxHistoryInfo,
stats: &mut IssuedAssetStats,
seen_txids: &mut HashSet<Txid>,
) {
if seen_txids.insert(info.get_txid()) {
stats.tx_count += 1;
}
match info {
TxHistoryInfo::Issuing(issuance) => {
stats.issuance_count += 1;
match issuance.issued_amount {
Some(amount) => stats.issued_amount += amount,
None => stats.has_blinded_issuances = true,
}
if !issuance.is_reissuance {
stats.reissuance_tokens = issuance.token_amount;
}
}
TxHistoryInfo::Burning(info) => {
stats.burned_amount += info.value;
}
TxHistoryInfo::Funding(_) | TxHistoryInfo::Spending(_) => {
// we don't keep funding/spending entries for assets
unreachable!();
}
TxHistoryInfo::Pegin(_) | TxHistoryInfo::Pegout(_) => {
// issued assets cannot have pegins/pegouts
unreachable!();
}
}
}
fn apply_pegged_asset_stats(
info: &TxHistoryInfo,
stats: &mut PeggedAssetStats,
seen_txids: &mut HashSet<Txid>,
) {
if seen_txids.insert(info.get_txid()) {
stats.tx_count += 1;
}
match info {
TxHistoryInfo::Pegin(info) => {
stats.peg_in_count += 1;
stats.peg_in_amount += info.value;
}
TxHistoryInfo::Pegout(info) => {
stats.peg_out_count += 1;
stats.peg_out_amount += info.value;
}
TxHistoryInfo::Burning(info) => {
stats.burn_count += 1;
stats.burned_amount += info.value;
}
TxHistoryInfo::Issuing(_) => {
warn!("encountered issuance of native asset, ignoring (possibly freeinitialcoins?)");
}
TxHistoryInfo::Funding(_) | TxHistoryInfo::Spending(_) => {
// these history entries variants are never kept for native assets
unreachable!();
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/elements/registry.rs | src/elements/registry.rs | use std::collections::HashMap;
use std::sync::{Arc, RwLock};
use std::time::{Duration, SystemTime};
use std::{cmp, fs, path, thread};
use serde_json::Value as JsonValue;
use bitcoin::hashes::hex::FromHex;
use elements::AssetId;
use crate::errors::*;
// length of asset id prefix to use for sub-directory partitioning
// (in number of hex characters, not bytes)
const DIR_PARTITION_LEN: usize = 2;
pub struct AssetRegistry {
directory: path::PathBuf,
assets_cache: HashMap<AssetId, (SystemTime, AssetMeta)>,
}
pub type AssetEntry<'a> = (&'a AssetId, &'a AssetMeta);
impl AssetRegistry {
pub fn new(directory: path::PathBuf) -> Self {
Self {
directory,
assets_cache: Default::default(),
}
}
pub fn get(&self, asset_id: &AssetId) -> Option<&AssetMeta> {
self.assets_cache
.get(asset_id)
.map(|(_, metadata)| metadata)
}
pub fn list(
&self,
start_index: usize,
limit: usize,
sorting: AssetSorting,
) -> (usize, Vec<AssetEntry<'_>>) {
let mut assets: Vec<AssetEntry> = self
.assets_cache
.iter()
.map(|(asset_id, (_, metadata))| (asset_id, metadata))
.collect();
assets.sort_by(sorting.as_comparator());
(
assets.len(),
assets.into_iter().skip(start_index).take(limit).collect(),
)
}
pub fn fs_sync(&mut self) -> Result<()> {
for entry in fs::read_dir(&self.directory).chain_err(|| "failed reading asset dir")? {
let entry = entry.chain_err(|| "invalid fh")?;
let filetype = entry.file_type().chain_err(|| "failed getting file type")?;
if !filetype.is_dir() || entry.file_name().len() != DIR_PARTITION_LEN {
continue;
}
for file_entry in
fs::read_dir(entry.path()).chain_err(|| "failed reading asset subdir")?
{
let file_entry = file_entry.chain_err(|| "invalid fh")?;
let path = file_entry.path();
if path.extension().and_then(|e| e.to_str()) != Some("json") {
continue;
}
let asset_id = AssetId::from_hex(
path.file_stem()
.unwrap() // cannot fail if extension() succeeded
.to_str()
.chain_err(|| "invalid filename")?,
)
.chain_err(|| "invalid filename")?;
let modified = file_entry
.metadata()
.chain_err(|| "failed reading metadata")?
.modified()
.chain_err(|| "metadata modified failed")?;
if let Some((last_update, _)) = self.assets_cache.get(&asset_id) {
if *last_update == modified {
continue;
}
}
let metadata: AssetMeta = serde_json::from_str(
&fs::read_to_string(path).chain_err(|| "failed reading file")?,
)
.chain_err(|| "failed parsing file")?;
self.assets_cache.insert(asset_id, (modified, metadata));
}
}
Ok(())
}
pub fn spawn_sync(asset_db: Arc<RwLock<AssetRegistry>>) -> thread::JoinHandle<()> {
crate::util::spawn_thread("asset-registry", move || loop {
if let Err(e) = asset_db.write().unwrap().fs_sync() {
error!("registry fs_sync failed: {:?}", e);
}
thread::sleep(Duration::from_secs(15));
// TODO handle shutdowm
})
}
}
#[derive(Serialize, Deserialize, Clone, Debug)]
pub struct AssetMeta {
#[serde(skip_serializing_if = "JsonValue::is_null")]
pub contract: JsonValue,
#[serde(skip_serializing_if = "JsonValue::is_null")]
pub entity: JsonValue,
pub precision: u8,
pub name: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub ticker: Option<String>,
}
impl AssetMeta {
fn domain(&self) -> Option<&str> {
self.entity["domain"].as_str()
}
}
pub struct AssetSorting(AssetSortField, AssetSortDir);
pub enum AssetSortField {
Name,
Domain,
Ticker,
}
pub enum AssetSortDir {
Descending,
Ascending,
}
type Comparator = Box<dyn Fn(&AssetEntry, &AssetEntry) -> cmp::Ordering>;
impl AssetSorting {
#[allow(clippy::wrong_self_convention)]
fn as_comparator(self) -> Comparator {
let sort_fn: Comparator = match self.0 {
AssetSortField::Name => {
// Order by name first, use asset id as a tie breaker. the other sorting fields
// don't require this because they're guaranteed to be unique.
Box::new(|a, b| lc_cmp(&a.1.name, &b.1.name).then_with(|| a.0.cmp(b.0)))
}
AssetSortField::Domain => Box::new(|a, b| a.1.domain().cmp(&b.1.domain())),
AssetSortField::Ticker => Box::new(|a, b| lc_cmp_opt(&a.1.ticker, &b.1.ticker)),
};
match self.1 {
AssetSortDir::Ascending => sort_fn,
AssetSortDir::Descending => Box::new(move |a, b| sort_fn(a, b).reverse()),
}
}
pub fn from_query_params(query: &HashMap<String, String>) -> Result<Self> {
let field = match query.get("sort_field").map(String::as_str) {
None => AssetSortField::Ticker,
Some("name") => AssetSortField::Name,
Some("domain") => AssetSortField::Domain,
Some("ticker") => AssetSortField::Ticker,
_ => bail!("invalid sort field"),
};
let dir = match query.get("sort_dir").map(String::as_str) {
None => AssetSortDir::Ascending,
Some("asc") => AssetSortDir::Ascending,
Some("desc") => AssetSortDir::Descending,
_ => bail!("invalid sort direction"),
};
Ok(Self(field, dir))
}
}
fn lc_cmp(a: &str, b: &str) -> cmp::Ordering {
a.to_lowercase().cmp(&b.to_lowercase())
}
fn lc_cmp_opt(a: &Option<String>, b: &Option<String>) -> cmp::Ordering {
a.as_ref()
.map(|a| a.to_lowercase())
.cmp(&b.as_ref().map(|b| b.to_lowercase()))
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
mempool/electrs | https://github.com/mempool/electrs/blob/3000bd13e76e2f33d0844a30489108846954d0a3/src/elements/mod.rs | src/elements/mod.rs | use bitcoin::hashes::{hex::ToHex, Hash};
use elements::secp256k1_zkp::ZERO_TWEAK;
use elements::{confidential::Value, encode::serialize, issuance::ContractHash, AssetId, TxIn};
pub mod asset;
pub mod peg;
mod registry;
use asset::get_issuance_entropy;
pub use asset::{lookup_asset, LiquidAsset};
pub use registry::{AssetRegistry, AssetSorting};
#[derive(Serialize, Deserialize, Clone)]
pub struct IssuanceValue {
pub asset_id: String,
pub is_reissuance: bool,
#[serde(skip_serializing_if = "Option::is_none")]
pub asset_blinding_nonce: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub contract_hash: Option<String>,
pub asset_entropy: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub assetamount: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub assetamountcommitment: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tokenamount: Option<u64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tokenamountcommitment: Option<String>,
}
impl From<&TxIn> for IssuanceValue {
fn from(txin: &TxIn) -> Self {
let issuance = &txin.asset_issuance;
let is_reissuance = issuance.asset_blinding_nonce != ZERO_TWEAK;
let asset_entropy = get_issuance_entropy(txin).expect("invalid issuance");
let asset_id = AssetId::from_entropy(asset_entropy);
let contract_hash = if !is_reissuance {
Some(ContractHash::from_slice(&issuance.asset_entropy).expect("invalid asset entropy"))
} else {
None
};
IssuanceValue {
asset_id: asset_id.to_hex(),
asset_entropy: asset_entropy.to_hex(),
contract_hash: contract_hash.map(|h| h.to_hex()),
is_reissuance,
asset_blinding_nonce: if is_reissuance {
Some(hex::encode(issuance.asset_blinding_nonce.as_ref()))
} else {
None
},
assetamount: match issuance.amount {
Value::Explicit(value) => Some(value),
Value::Null => Some(0),
Value::Confidential(..) => None,
},
assetamountcommitment: match issuance.amount {
Value::Confidential(..) => Some(hex::encode(serialize(&issuance.amount))),
_ => None,
},
tokenamount: match issuance.inflation_keys {
Value::Explicit(value) => Some(value),
Value::Null => Some(0),
Value::Confidential(..) => None,
},
tokenamountcommitment: match issuance.inflation_keys {
Value::Confidential(..) => Some(hex::encode(serialize(&issuance.inflation_keys))),
_ => None,
},
}
}
}
| rust | MIT | 3000bd13e76e2f33d0844a30489108846954d0a3 | 2026-01-04T20:24:15.088141Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.