repo stringlengths 6 65 | file_url stringlengths 81 311 | file_path stringlengths 6 227 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-04 15:31:58 2026-01-04 20:25:31 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc4315.rs | imap-proto/src/parser/rfc4315.rs | //!
//! https://tools.ietf.org/html/rfc4315
//!
//! The IMAP UIDPLUS Extension
//!
use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case},
combinator::map,
multi::separated_list1,
sequence::{preceded, tuple},
IResult,
};
use crate::parser::core::number;
use crate::types::*;
/// Extends resp-text-code as follows:
///
/// ```ignore
/// resp-text-code =/ resp-code-apnd
/// resp-code-apnd = "APPENDUID" SP nz-number SP append-uid
/// append-uid =/ uid-set
/// ; only permitted if client uses [MULTIAPPEND]
/// ; to append multiple messages.
/// ```
///
/// [RFC4315 - 3 Additional Response Codes](https://tools.ietf.org/html/rfc4315#section-3)
pub(crate) fn resp_text_code_append_uid(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(
tag_no_case(b"APPENDUID "),
tuple((number, tag(" "), uid_set)),
),
|(fst, _, snd)| ResponseCode::AppendUid(fst, snd),
)(i)
}
/// Extends resp-text-code as follows:
///
/// ```ignore
/// resp-text-code =/ resp-code-copy
/// resp-code-copy = "COPYUID" SP nz-number SP uid-set
/// ```
///
/// [RFC4315 - 3 Additional Response Codes](https://tools.ietf.org/html/rfc4315#section-3)
pub(crate) fn resp_text_code_copy_uid(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(
tag_no_case(b"COPYUID "),
tuple((number, tag(" "), uid_set, tag(" "), uid_set)),
),
|(fst, _, snd, _, trd)| ResponseCode::CopyUid(fst, snd, trd),
)(i)
}
/// Extends resp-text-code as follows:
///
/// ```ignore
/// resp-text-code =/ "UIDNOTSTICKY"
/// ```
///
/// [RFC4315 - 3 Additional Response Codes](https://tools.ietf.org/html/rfc4315#section-3)
pub(crate) fn resp_text_code_uid_not_sticky(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"UIDNOTSTICKY"), |_| ResponseCode::UidNotSticky)(i)
}
/// Parses the uid-set nonterminal:
///
/// ```ignore
/// uid-set = (uniqueid / uid-range) *("," uid-set)
/// ```
///
/// [RFC4315 - 4 Formal Syntax](https://tools.ietf.org/html/rfc4315#section-4)
fn uid_set(i: &[u8]) -> IResult<&[u8], Vec<UidSetMember>> {
separated_list1(tag(","), alt((uid_range, map(number, From::from))))(i)
}
/// Parses the uid-set nonterminal:
///
/// ```ignore
/// uid-range = (uniqueid ":" uniqueid)
/// ; two uniqueid values and all values
/// ; between these two regards of order.
/// ; Example: 2:4 and 4:2 are equivalent.
/// ```
///
/// [RFC4315 - 4 Formal Syntax](https://tools.ietf.org/html/rfc4315#section-4)
fn uid_range(i: &[u8]) -> IResult<&[u8], UidSetMember> {
map(
nom::sequence::separated_pair(number, tag(":"), number),
|(fst, snd)| if fst <= snd { fst..=snd } else { snd..=fst }.into(),
)(i)
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc4314.rs | imap-proto/src/parser/rfc4314.rs | //!
//! Current
//! https://tools.ietf.org/html/rfc4314
//!
//! Original
//! https://tools.ietf.org/html/rfc2086
//!
//! The IMAP ACL Extension
//!
use std::borrow::Cow;
use nom::{
bytes::streaming::tag_no_case,
character::complete::{space0, space1},
combinator::map,
multi::separated_list0,
sequence::{preceded, separated_pair, tuple},
IResult,
};
use crate::parser::core::astring_utf8;
use crate::parser::rfc3501::mailbox;
use crate::types::*;
/// 3.6. ACL Response
/// ```ignore
/// acl_response ::= "ACL" SP mailbox SP acl_list
/// ```
pub(crate) fn acl(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, _, mailbox, acls)) = tuple((
tag_no_case("ACL"),
space1,
map(mailbox, Cow::Borrowed),
acl_list,
))(i)?;
Ok((rest, Response::Acl(Acl { mailbox, acls })))
}
/// ```ignore
/// acl_list ::= *(SP acl_entry)
/// ```
fn acl_list(i: &[u8]) -> IResult<&[u8], Vec<AclEntry<'_>>> {
preceded(space0, separated_list0(space1, acl_entry))(i)
}
/// ```ignore
/// acl_entry ::= SP identifier SP rights
/// ```
fn acl_entry(i: &[u8]) -> IResult<&[u8], AclEntry<'_>> {
let (rest, (identifier, rights)) = separated_pair(
map(astring_utf8, Cow::Borrowed),
space1,
map(astring_utf8, map_text_to_rights),
)(i)?;
Ok((rest, AclEntry { identifier, rights }))
}
/// 3.7. LISTRIGHTS Response
/// ```ignore
/// list_rights_response ::= "LISTRIGHTS" SP mailbox SP identifier SP required_rights *(SP optional_rights)
/// ```
pub(crate) fn list_rights(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, _, mailbox, _, identifier, _, required, optional)) = tuple((
tag_no_case("LISTRIGHTS"),
space1,
map(mailbox, Cow::Borrowed),
space1,
map(astring_utf8, Cow::Borrowed),
space1,
map(astring_utf8, map_text_to_rights),
list_rights_optional,
))(i)?;
Ok((
rest,
Response::ListRights(ListRights {
mailbox,
identifier,
required,
optional,
}),
))
}
fn list_rights_optional(i: &[u8]) -> IResult<&[u8], Vec<AclRight>> {
let (rest, items) = preceded(space0, separated_list0(space1, astring_utf8))(i)?;
Ok((
rest,
items
.into_iter()
.flat_map(|s| s.chars().map(|c| c.into()))
.collect(),
))
}
/// 3.7. MYRIGHTS Response
/// ```ignore
/// my_rights_response ::= "MYRIGHTS" SP mailbox SP rights
/// ```
pub(crate) fn my_rights(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, _, mailbox, _, rights)) = tuple((
tag_no_case("MYRIGHTS"),
space1,
map(mailbox, Cow::Borrowed),
space1,
map(astring_utf8, map_text_to_rights),
))(i)?;
Ok((rest, Response::MyRights(MyRights { mailbox, rights })))
}
/// helper routine to map a string to a vec of AclRights
fn map_text_to_rights(i: &str) -> Vec<AclRight> {
i.chars().map(|c| c.into()).collect()
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc5161.rs | imap-proto/src/parser/rfc5161.rs | //!
//! https://tools.ietf.org/html/rfc5161
//!
//! The IMAP ENABLE Extension
//!
use nom::{
bytes::streaming::tag_no_case,
character::streaming::char,
combinator::map,
multi::many0,
sequence::{preceded, tuple},
IResult,
};
use std::borrow::Cow;
use crate::parser::core::atom;
use crate::types::*;
// The ENABLED response lists capabilities that were enabled in response
// to a ENABLE command.
// [RFC5161 - 3.2 The ENABLED Response](https://tools.ietf.org/html/rfc5161#section-3.2)
pub(crate) fn resp_enabled(i: &[u8]) -> IResult<&[u8], Response<'_>> {
map(enabled_data, Response::Capabilities)(i)
}
fn enabled_data(i: &[u8]) -> IResult<&[u8], Vec<Capability<'_>>> {
let (i, (_, capabilities)) = tuple((
tag_no_case("ENABLED"),
many0(preceded(char(' '), capability)),
))(i)?;
Ok((i, capabilities))
}
fn capability(i: &[u8]) -> IResult<&[u8], Capability<'_>> {
map(map(atom, Cow::Borrowed), Capability::Atom)(i)
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc2087.rs | imap-proto/src/parser/rfc2087.rs | //!
//! https://tools.ietf.org/html/rfc2087
//!
//! IMAP4 QUOTA extension
//!
use std::borrow::Cow;
use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case},
character::streaming::space1,
combinator::map,
multi::many0,
multi::separated_list0,
sequence::{delimited, preceded, tuple},
IResult,
};
use crate::parser::core::astring_utf8;
use crate::types::*;
use super::core::number_64;
/// 5.1. QUOTA Response
/// ```ignore
/// quota_response ::= "QUOTA" SP astring SP quota_list
/// ```
pub(crate) fn quota(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, _, root_name, _, resources)) = tuple((
tag_no_case("QUOTA"),
space1,
map(astring_utf8, Cow::Borrowed),
space1,
quota_list,
))(i)?;
Ok((
rest,
Response::Quota(Quota {
root_name,
resources,
}),
))
}
/// ```ignore
/// quota_list ::= "(" #quota_resource ")"
/// ```
pub(crate) fn quota_list(i: &[u8]) -> IResult<&[u8], Vec<QuotaResource<'_>>> {
delimited(tag("("), separated_list0(space1, quota_resource), tag(")"))(i)
}
/// ```ignore
/// quota_resource ::= atom SP number SP number
/// ```
pub(crate) fn quota_resource(i: &[u8]) -> IResult<&[u8], QuotaResource<'_>> {
let (rest, (name, _, usage, _, limit)) =
tuple((quota_resource_name, space1, number_64, space1, number_64))(i)?;
Ok((rest, QuotaResource { name, usage, limit }))
}
pub(crate) fn quota_resource_name(i: &[u8]) -> IResult<&[u8], QuotaResourceName<'_>> {
alt((
map(tag_no_case("STORAGE"), |_| QuotaResourceName::Storage),
map(tag_no_case("MESSAGE"), |_| QuotaResourceName::Message),
map(map(astring_utf8, Cow::Borrowed), QuotaResourceName::Atom),
))(i)
}
/// 5.2. QUOTAROOT Response
/// ```ignore
/// quotaroot_response ::= "QUOTAROOT" SP astring *(SP astring)
/// ```
pub(crate) fn quota_root(i: &[u8]) -> IResult<&[u8], Response<'_>> {
let (rest, (_, _, mailbox_name, quota_root_names)) = tuple((
tag_no_case("QUOTAROOT"),
space1,
map(astring_utf8, Cow::Borrowed),
many0(preceded(space1, map(astring_utf8, Cow::Borrowed))),
))(i)?;
Ok((
rest,
Response::QuotaRoot(QuotaRoot {
mailbox_name,
quota_root_names,
}),
))
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
use std::borrow::Cow;
#[test]
fn test_quota() {
assert_matches!(
quota(b"QUOTA \"\" (STORAGE 10 512)"),
Ok((_, r)) => {
assert_eq!(
r,
Response::Quota(Quota {
root_name: Cow::Borrowed(""),
resources: vec![QuotaResource {
name: QuotaResourceName::Storage,
usage: 10,
limit: 512
}]
})
);
}
);
}
#[test]
fn test_quota_spaces() {
// Archiveopteryx 3.2.0 generates QUOTA resources with double space.
// This is a test of a workaround for such incorrect implementation of QUOTA.
assert_matches!(
quota(b"QUOTA \"\" (STORAGE 0 2147483647 MESSAGE 0 2147483647)"),
Ok((_, r)) => {
assert_eq!(
r,
Response::Quota(Quota {
root_name: Cow::Borrowed(""),
resources: vec![QuotaResource {
name: QuotaResourceName::Storage,
usage: 0,
limit: 2147483647
}, QuotaResource {
name: QuotaResourceName::Message,
usage: 0,
limit: 2147483647
}]
})
);
}
);
}
#[test]
fn test_quota_response_data() {
assert_matches!(
crate::parser::rfc3501::response_data(b"* QUOTA \"\" (STORAGE 10 512)\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::Quota(Quota {
root_name: Cow::Borrowed(""),
resources: vec![QuotaResource {
name: QuotaResourceName::Storage,
usage: 10,
limit: 512
}]
})
);
}
);
}
#[test]
fn test_quota_list() {
assert_matches!(
quota_list(b"(STORAGE 10 512)"),
Ok((_, r)) => {
assert_eq!(
r,
vec![QuotaResource {
name: QuotaResourceName::Storage,
usage: 10,
limit: 512
}]
);
}
);
assert_matches!(
quota_list(b"(MESSAGE 100 512)"),
Ok((_, r)) => {
assert_eq!(
r,
vec![QuotaResource {
name: QuotaResourceName::Message,
usage: 100,
limit: 512
}]
);
}
);
assert_matches!(
quota_list(b"(DAILY 55 200)"),
Ok((_, r)) => {
assert_eq!(
r,
vec![QuotaResource {
name: QuotaResourceName::Atom(Cow::Borrowed("DAILY")),
usage: 55,
limit: 200
}]
);
}
);
}
#[test]
fn test_quota_root_response_data() {
assert_matches!(
crate::parser::rfc3501::response_data("* QUOTAROOT INBOX \"\"\r\n".as_bytes()),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("INBOX"),
quota_root_names: vec![Cow::Borrowed("")]
})
);
}
);
}
fn terminated_quota_root(i: &[u8]) -> IResult<&[u8], Response<'_>> {
nom::sequence::terminated(quota_root, nom::bytes::streaming::tag("\r\n"))(i)
}
#[test]
fn test_quota_root_without_root_names() {
assert_matches!(
terminated_quota_root(b"QUOTAROOT comp.mail.mime\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("comp.mail.mime"),
quota_root_names: vec![]
})
);
}
);
}
#[test]
fn test_quota_root2() {
assert_matches!(
terminated_quota_root(b"QUOTAROOT INBOX HU\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("INBOX"),
quota_root_names: vec![Cow::Borrowed("HU")]
})
);
}
);
assert_matches!(
terminated_quota_root(b"QUOTAROOT INBOX \"\"\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("INBOX"),
quota_root_names: vec![Cow::Borrowed("")]
})
);
}
);
assert_matches!(
terminated_quota_root(b"QUOTAROOT \"Inbox\" \"#Account\"\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("Inbox"),
quota_root_names: vec![Cow::Borrowed("#Account")]
})
);
}
);
assert_matches!(
terminated_quota_root(b"QUOTAROOT \"Inbox\" \"#Account\" \"#Mailbox\"\r\n"),
Ok((_, r)) => {
assert_eq!(
r,
Response::QuotaRoot(QuotaRoot{
mailbox_name: Cow::Borrowed("Inbox"),
quota_root_names: vec![Cow::Borrowed("#Account"), Cow::Borrowed("#Mailbox")]
})
);
}
);
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/bodystructure.rs | imap-proto/src/parser/bodystructure.rs | use std::collections::HashMap;
use crate::types::BodyStructure;
/// An utility parser helping to find the appropriate
/// section part from a FETCH response.
pub struct BodyStructParser<'a> {
root: &'a BodyStructure<'a>,
prefix: Vec<u32>,
iter: u32,
map: HashMap<Vec<u32>, &'a BodyStructure<'a>>,
}
impl<'a> BodyStructParser<'a> {
/// Returns a new parser
///
/// # Arguments
///
/// * `root` - The root of the `BodyStructure response.
pub fn new(root: &'a BodyStructure<'a>) -> Self {
let mut parser = BodyStructParser {
root,
prefix: vec![],
iter: 1,
map: HashMap::new(),
};
parser.parse(parser.root);
parser
}
/// Search particular element within the bodystructure.
///
/// # Arguments
///
/// * `func` - The filter used to search elements within the bodystructure.
pub fn search<F>(&self, func: F) -> Option<Vec<u32>>
where
F: Fn(&'a BodyStructure<'a>) -> bool,
{
let elem: Vec<_> = self
.map
.iter()
.filter_map(|(k, v)| {
if func(v) {
let slice: &[u32] = k;
Some(slice)
} else {
None
}
})
.collect();
elem.first().map(|a| a.to_vec())
}
/// Reetr
fn parse(&mut self, node: &'a BodyStructure) {
match node {
BodyStructure::Multipart { bodies, .. } => {
let vec = self.prefix.clone();
self.map.insert(vec, node);
for (i, n) in bodies.iter().enumerate() {
self.iter += i as u32;
self.prefix.push(self.iter);
self.parse(n);
self.prefix.pop();
}
self.iter = 1;
}
_ => {
let vec = self.prefix.clone();
self.map.insert(vec, node);
}
};
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc3501/body_structure.rs | imap-proto/src/parser/rfc3501/body_structure.rs | use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case},
character::streaming::char,
combinator::{map, opt},
multi::many1,
sequence::{delimited, preceded, tuple},
IResult,
};
use std::borrow::Cow;
use crate::{
parser::{core::*, rfc3501::envelope},
types::*,
};
// body-fields = body-fld-param SP body-fld-id SP body-fld-desc SP
// body-fld-enc SP body-fld-octets
fn body_fields(i: &[u8]) -> IResult<&[u8], BodyFields<'_>> {
let (i, (param, _, id, _, description, _, transfer_encoding, _, octets)) = tuple((
body_param,
tag(" "),
// body id seems to refer to the Message-ID or possibly Content-ID header, which
// by the definition in RFC 2822 seems to resolve to all ASCII characters (through
// a large amount of indirection which I did not have the patience to fully explore)
nstring_utf8,
tag(" "),
// Per https://tools.ietf.org/html/rfc2045#section-8, description should be all ASCII
nstring_utf8,
tag(" "),
body_encoding,
tag(" "),
number,
))(i)?;
Ok((
i,
BodyFields {
param,
id: id.map(Cow::Borrowed),
description: description.map(Cow::Borrowed),
transfer_encoding,
octets,
},
))
}
// body-ext-1part = body-fld-md5 [SP body-fld-dsp [SP body-fld-lang
// [SP body-fld-loc *(SP body-extension)]]]
// ; MUST NOT be returned on non-extensible
// ; "BODY" fetch
fn body_ext_1part(i: &[u8]) -> IResult<&[u8], BodyExt1Part<'_>> {
let (i, (md5, disposition, language, location, extension)) = tuple((
// Per RFC 1864, MD5 values are base64-encoded
opt_opt(preceded(tag(" "), nstring_utf8)),
opt_opt(preceded(tag(" "), body_disposition)),
opt_opt(preceded(tag(" "), body_lang)),
// Location appears to reference a URL, which by RFC 1738 (section 2.2) should be ASCII
opt_opt(preceded(tag(" "), nstring_utf8)),
opt(preceded(tag(" "), body_extension)),
))(i)?;
Ok((
i,
BodyExt1Part {
md5: md5.map(Cow::Borrowed),
disposition,
language,
location: location.map(Cow::Borrowed),
extension,
},
))
}
// body-ext-mpart = body-fld-param [SP body-fld-dsp [SP body-fld-lang
// [SP body-fld-loc *(SP body-extension)]]]
// ; MUST NOT be returned on non-extensible
// ; "BODY" fetch
fn body_ext_mpart(i: &[u8]) -> IResult<&[u8], BodyExtMPart<'_>> {
let (i, (param, disposition, language, location, extension)) = tuple((
opt_opt(preceded(tag(" "), body_param)),
opt_opt(preceded(tag(" "), body_disposition)),
opt_opt(preceded(tag(" "), body_lang)),
// Location appears to reference a URL, which by RFC 1738 (section 2.2) should be ASCII
opt_opt(preceded(tag(" "), nstring_utf8)),
opt(preceded(tag(" "), body_extension)),
))(i)?;
Ok((
i,
BodyExtMPart {
param,
disposition,
language,
location: location.map(Cow::Borrowed),
extension,
},
))
}
fn body_encoding(i: &[u8]) -> IResult<&[u8], ContentEncoding<'_>> {
alt((
delimited(
char('"'),
alt((
map(tag_no_case("7BIT"), |_| ContentEncoding::SevenBit),
map(tag_no_case("8BIT"), |_| ContentEncoding::EightBit),
map(tag_no_case("BINARY"), |_| ContentEncoding::Binary),
map(tag_no_case("BASE64"), |_| ContentEncoding::Base64),
map(tag_no_case("QUOTED-PRINTABLE"), |_| {
ContentEncoding::QuotedPrintable
}),
)),
char('"'),
),
map(string_utf8, |enc| {
ContentEncoding::Other(Cow::Borrowed(enc))
}),
))(i)
}
fn body_lang(i: &[u8]) -> IResult<&[u8], Option<Vec<Cow<'_, str>>>> {
alt((
// body language seems to refer to RFC 3066 language tags, which should be ASCII-only
map(nstring_utf8, |v| v.map(|s| vec![Cow::Borrowed(s)])),
map(
parenthesized_nonempty_list(map(string_utf8, Cow::Borrowed)),
Option::from,
),
))(i)
}
fn body_param(i: &[u8]) -> IResult<&[u8], BodyParams<'_>> {
alt((
map(nil, |_| None),
map(
parenthesized_nonempty_list(map(
tuple((string_utf8, tag(" "), string_utf8)),
|(key, _, val)| (Cow::Borrowed(key), Cow::Borrowed(val)),
)),
Option::from,
),
))(i)
}
fn body_extension(i: &[u8]) -> IResult<&[u8], BodyExtension<'_>> {
alt((
map(number, BodyExtension::Num),
// Cannot find documentation on character encoding for body extension values.
// So far, assuming UTF-8 seems fine, please report if you run into issues here.
map(nstring_utf8, |v| BodyExtension::Str(v.map(Cow::Borrowed))),
map(
parenthesized_nonempty_list(body_extension),
BodyExtension::List,
),
))(i)
}
fn body_disposition(i: &[u8]) -> IResult<&[u8], Option<ContentDisposition<'_>>> {
alt((
map(nil, |_| None),
paren_delimited(map(
tuple((string_utf8, tag(" "), body_param)),
|(ty, _, params)| {
Some(ContentDisposition {
ty: Cow::Borrowed(ty),
params,
})
},
)),
))(i)
}
fn body_type_basic(i: &[u8]) -> IResult<&[u8], BodyStructure<'_>> {
map(
tuple((
string_utf8,
tag(" "),
string_utf8,
tag(" "),
body_fields,
body_ext_1part,
)),
|(ty, _, subtype, _, fields, ext)| BodyStructure::Basic {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed(ty),
subtype: Cow::Borrowed(subtype),
params: fields.param,
},
disposition: ext.disposition,
language: ext.language,
location: ext.location,
},
other: BodyContentSinglePart {
id: fields.id,
md5: ext.md5,
octets: fields.octets,
description: fields.description,
transfer_encoding: fields.transfer_encoding,
},
extension: ext.extension,
},
)(i)
}
fn body_type_text(i: &[u8]) -> IResult<&[u8], BodyStructure<'_>> {
map(
tuple((
tag_no_case("\"TEXT\""),
tag(" "),
string_utf8,
tag(" "),
body_fields,
tag(" "),
number,
body_ext_1part,
)),
|(_, _, subtype, _, fields, _, lines, ext)| BodyStructure::Text {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("TEXT"),
subtype: Cow::Borrowed(subtype),
params: fields.param,
},
disposition: ext.disposition,
language: ext.language,
location: ext.location,
},
other: BodyContentSinglePart {
id: fields.id,
md5: ext.md5,
octets: fields.octets,
description: fields.description,
transfer_encoding: fields.transfer_encoding,
},
lines,
extension: ext.extension,
},
)(i)
}
fn body_type_message(i: &[u8]) -> IResult<&[u8], BodyStructure<'_>> {
map(
tuple((
tag_no_case("\"MESSAGE\" \"RFC822\""),
tag(" "),
body_fields,
tag(" "),
envelope,
tag(" "),
body,
tag(" "),
number,
body_ext_1part,
)),
|(_, _, fields, _, envelope, _, body, _, lines, ext)| BodyStructure::Message {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("MESSAGE"),
subtype: Cow::Borrowed("RFC822"),
params: fields.param,
},
disposition: ext.disposition,
language: ext.language,
location: ext.location,
},
other: BodyContentSinglePart {
id: fields.id,
md5: ext.md5,
octets: fields.octets,
description: fields.description,
transfer_encoding: fields.transfer_encoding,
},
envelope,
body: Box::new(body),
lines,
extension: ext.extension,
},
)(i)
}
fn body_type_multipart(i: &[u8]) -> IResult<&[u8], BodyStructure<'_>> {
map(
tuple((many1(body), tag(" "), string_utf8, body_ext_mpart)),
|(bodies, _, subtype, ext)| BodyStructure::Multipart {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("MULTIPART"),
subtype: Cow::Borrowed(subtype),
params: ext.param,
},
disposition: ext.disposition,
language: ext.language,
location: ext.location,
},
bodies,
extension: ext.extension,
},
)(i)
}
pub(crate) fn body(i: &[u8]) -> IResult<&[u8], BodyStructure<'_>> {
paren_delimited(alt((
body_type_text,
body_type_message,
body_type_basic,
body_type_multipart,
)))(i)
}
pub(crate) fn msg_att_body_structure(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(tuple((tag_no_case("BODYSTRUCTURE "), body)), |(_, body)| {
AttributeValue::BodyStructure(body)
})(i)
}
#[cfg(test)]
mod tests {
use super::*;
use assert_matches::assert_matches;
const EMPTY: &[u8] = &[];
// body-fld-param SP body-fld-id SP body-fld-desc SP body-fld-enc SP body-fld-octets
const BODY_FIELDS: &str = r#"("foo" "bar") "id" "desc" "7BIT" 1337"#;
const BODY_FIELD_PARAM_PAIR: (Cow<'_, str>, Cow<'_, str>) =
(Cow::Borrowed("foo"), Cow::Borrowed("bar"));
const BODY_FIELD_ID: Option<Cow<'_, str>> = Some(Cow::Borrowed("id"));
const BODY_FIELD_DESC: Option<Cow<'_, str>> = Some(Cow::Borrowed("desc"));
const BODY_FIELD_ENC: ContentEncoding = ContentEncoding::SevenBit;
const BODY_FIELD_OCTETS: u32 = 1337;
fn mock_body_text() -> (String, BodyStructure<'static>) {
(
format!(r#"("TEXT" "PLAIN" {BODY_FIELDS} 42)"#),
BodyStructure::Text {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("TEXT"),
subtype: Cow::Borrowed("PLAIN"),
params: Some(vec![BODY_FIELD_PARAM_PAIR]),
},
disposition: None,
language: None,
location: None,
},
other: BodyContentSinglePart {
md5: None,
transfer_encoding: BODY_FIELD_ENC,
octets: BODY_FIELD_OCTETS,
id: BODY_FIELD_ID,
description: BODY_FIELD_DESC,
},
lines: 42,
extension: None,
},
)
}
#[test]
fn test_body_param_data() {
assert_matches!(body_param(br#"NIL"#), Ok((EMPTY, None)));
assert_matches!(
body_param(br#"("foo" "bar")"#),
Ok((EMPTY, Some(param))) => {
assert_eq!(param, vec![(Cow::Borrowed("foo"), Cow::Borrowed("bar"))]);
}
);
}
#[test]
fn test_body_lang_data() {
assert_matches!(
body_lang(br#""bob""#),
Ok((EMPTY, Some(langs))) => {
assert_eq!(langs, vec!["bob"]);
}
);
assert_matches!(
body_lang(br#"("one" "two")"#),
Ok((EMPTY, Some(langs))) => {
assert_eq!(langs, vec!["one", "two"]);
}
);
assert_matches!(body_lang(br#"NIL"#), Ok((EMPTY, None)));
}
#[test]
fn test_body_extension_data() {
assert_matches!(
body_extension(br#""blah""#),
Ok((EMPTY, BodyExtension::Str(Some(Cow::Borrowed("blah")))))
);
assert_matches!(
body_extension(br#"NIL"#),
Ok((EMPTY, BodyExtension::Str(None)))
);
assert_matches!(
body_extension(br#"("hello")"#),
Ok((EMPTY, BodyExtension::List(list))) => {
assert_eq!(list, vec![BodyExtension::Str(Some(Cow::Borrowed("hello")))]);
}
);
assert_matches!(
body_extension(br#"(1337)"#),
Ok((EMPTY, BodyExtension::List(list))) => {
assert_eq!(list, vec![BodyExtension::Num(1337)]);
}
);
}
#[test]
fn test_body_disposition_data() {
assert_matches!(body_disposition(br#"NIL"#), Ok((EMPTY, None)));
assert_matches!(
body_disposition(br#"("attachment" ("FILENAME" "pages.pdf"))"#),
Ok((EMPTY, Some(disposition))) => {
assert_eq!(disposition, ContentDisposition {
ty: Cow::Borrowed("attachment"),
params: Some(vec![
(Cow::Borrowed("FILENAME"), Cow::Borrowed("pages.pdf"))
])
});
}
);
}
#[test]
fn test_body_structure_text() {
let (body_str, body_struct) = mock_body_text();
assert_matches!(
body(body_str.as_bytes()),
Ok((_, text)) => {
assert_eq!(text, body_struct);
}
);
}
#[test]
fn test_body_structure_text_with_ext() {
let body_str = format!(r#"("TEXT" "PLAIN" {BODY_FIELDS} 42 NIL NIL NIL NIL)"#);
let (_, text_body_struct) = mock_body_text();
assert_matches!(
body(body_str.as_bytes()),
Ok((_, text)) => {
assert_eq!(text, text_body_struct)
}
);
}
#[test]
fn test_body_structure_basic() {
const BODY: &[u8] = br#"("APPLICATION" "PDF" ("NAME" "pages.pdf") NIL NIL "BASE64" 38838 NIL ("attachment" ("FILENAME" "pages.pdf")) NIL NIL)"#;
assert_matches!(
body(BODY),
Ok((_, basic)) => {
assert_eq!(basic, BodyStructure::Basic {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("APPLICATION"),
subtype: Cow::Borrowed("PDF"),
params: Some(vec![(Cow::Borrowed("NAME"), Cow::Borrowed("pages.pdf"))])
},
disposition: Some(ContentDisposition {
ty: Cow::Borrowed("attachment"),
params: Some(vec![(Cow::Borrowed("FILENAME"), Cow::Borrowed("pages.pdf"))])
}),
language: None,
location: None,
},
other: BodyContentSinglePart {
transfer_encoding: ContentEncoding::Base64,
octets: 38838,
id: None,
md5: None,
description: None,
},
extension: None,
})
}
);
}
#[test]
fn test_body_structure_message() {
let (text_body_str, _) = mock_body_text();
let envelope_str = r#"("Wed, 17 Jul 1996 02:23:25 -0700 (PDT)" "IMAP4rev1 WG mtg summary and minutes" (("Terry Gray" NIL "gray" "cac.washington.edu")) (("Terry Gray" NIL "gray" "cac.washington.edu")) (("Terry Gray" NIL "gray" "cac.washington.edu")) ((NIL NIL "imap" "cac.washington.edu")) ((NIL NIL "minutes" "CNRI.Reston.VA.US") ("John Klensin" NIL "KLENSIN" "MIT.EDU")) NIL NIL "<B27397-0100000@cac.washington.edu>")"#;
let body_str =
format!(r#"("MESSAGE" "RFC822" {BODY_FIELDS} {envelope_str} {text_body_str} 42)"#);
assert_matches!(
body(body_str.as_bytes()),
Ok((_, BodyStructure::Message { .. }))
);
}
#[test]
fn test_body_structure_multipart() {
let (text_body_str1, text_body_struct1) = mock_body_text();
let (text_body_str2, text_body_struct2) = mock_body_text();
let body_str =
format!(r#"({text_body_str1}{text_body_str2} "ALTERNATIVE" NIL NIL NIL NIL)"#);
assert_matches!(
body(body_str.as_bytes()),
Ok((_, multipart)) => {
assert_eq!(multipart, BodyStructure::Multipart {
common: BodyContentCommon {
ty: ContentType {
ty: Cow::Borrowed("MULTIPART"),
subtype: Cow::Borrowed("ALTERNATIVE"),
params: None
},
language: None,
location: None,
disposition: None,
},
bodies: vec![
text_body_struct1,
text_body_struct2,
],
extension: None
});
}
);
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc3501/mod.rs | imap-proto/src/parser/rfc3501/mod.rs | //!
//! https://tools.ietf.org/html/rfc3501
//!
//! INTERNET MESSAGE ACCESS PROTOCOL
//!
use std::borrow::Cow;
use std::str::from_utf8;
use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case, take_while, take_while1},
character::streaming::char,
combinator::{map, map_res, opt, recognize, value},
multi::{many0, many1},
sequence::{delimited, pair, preceded, terminated, tuple},
IResult,
};
use crate::{
parser::{
core::*, rfc2087, rfc2971, rfc3501::body::*, rfc3501::body_structure::*, rfc4314, rfc4315,
rfc4551, rfc5161, rfc5256, rfc5464, rfc7162,
},
types::*,
};
use super::gmail;
pub mod body;
pub mod body_structure;
fn is_tag_char(c: u8) -> bool {
c != b'+' && is_astring_char(c)
}
fn status_ok(i: &[u8]) -> IResult<&[u8], Status> {
map(tag_no_case("OK"), |_s| Status::Ok)(i)
}
fn status_no(i: &[u8]) -> IResult<&[u8], Status> {
map(tag_no_case("NO"), |_s| Status::No)(i)
}
fn status_bad(i: &[u8]) -> IResult<&[u8], Status> {
map(tag_no_case("BAD"), |_s| Status::Bad)(i)
}
fn status_preauth(i: &[u8]) -> IResult<&[u8], Status> {
map(tag_no_case("PREAUTH"), |_s| Status::PreAuth)(i)
}
fn status_bye(i: &[u8]) -> IResult<&[u8], Status> {
map(tag_no_case("BYE"), |_s| Status::Bye)(i)
}
fn status(i: &[u8]) -> IResult<&[u8], Status> {
alt((status_ok, status_no, status_bad, status_preauth, status_bye))(i)
}
pub(crate) fn mailbox(i: &[u8]) -> IResult<&[u8], &str> {
map(astring_utf8, |s| {
if s.eq_ignore_ascii_case("INBOX") {
"INBOX"
} else {
s
}
})(i)
}
fn flag_extension(i: &[u8]) -> IResult<&[u8], &str> {
map_res(
recognize(pair(tag(b"\\"), take_while(is_atom_char))),
from_utf8,
)(i)
}
pub(crate) fn flag(i: &[u8]) -> IResult<&[u8], &str> {
// Correct code is
// alt((flag_extension, atom))(i)
//
// Unfortunately, some unknown providers send the following response:
// * FLAGS (OIB-Seen-[Gmail]/All)
//
// As a workaround, ']' (resp-specials) is allowed here.
alt((
flag_extension,
map_res(take_while1(is_astring_char), from_utf8),
))(i)
}
fn flag_list(i: &[u8]) -> IResult<&[u8], Vec<Cow<'_, str>>> {
// Correct code is
// parenthesized_list(flag)(i)
//
// Unfortunately, Zoho Mail Server (imap.zoho.com) sends the following response:
// * FLAGS (\Answered \Flagged \Deleted \Seen \Draft \*)
//
// As a workaround, "\*" is allowed here.
//
// Also, surgemail sends an additional space before the closing bracket:
// * FLAGS (\Answered \Flagged \Deleted \Draft \Seen $Forwarded )
//
// As a workaround, optional spaces before the closing bracket are allowed.
parenthesized_list(map(flag_perm, Cow::Borrowed))(i)
}
fn flag_perm(i: &[u8]) -> IResult<&[u8], &str> {
alt((map_res(tag(b"\\*"), from_utf8), flag))(i)
}
fn resp_text_code_alert(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"ALERT"), |_| ResponseCode::Alert)(i)
}
fn resp_text_code_badcharset(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(
tag_no_case(b"BADCHARSET"),
opt(preceded(
tag(b" "),
parenthesized_nonempty_list(map(astring_utf8, Cow::Borrowed)),
)),
),
ResponseCode::BadCharset,
)(i)
}
fn resp_text_code_capability(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(capability_data, ResponseCode::Capabilities)(i)
}
fn resp_text_code_parse(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"PARSE"), |_| ResponseCode::Parse)(i)
}
fn resp_text_code_permanent_flags(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(
tag_no_case(b"PERMANENTFLAGS "),
parenthesized_list(map(flag_perm, Cow::Borrowed)),
),
ResponseCode::PermanentFlags,
)(i)
}
fn resp_text_code_read_only(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"READ-ONLY"), |_| ResponseCode::ReadOnly)(i)
}
fn resp_text_code_read_write(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"READ-WRITE"), |_| ResponseCode::ReadWrite)(i)
}
fn resp_text_code_try_create(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(tag_no_case(b"TRYCREATE"), |_| ResponseCode::TryCreate)(i)
}
fn resp_text_code_uid_validity(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(tag_no_case(b"UIDVALIDITY "), number),
ResponseCode::UidValidity,
)(i)
}
fn resp_text_code_uid_next(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(tag_no_case(b"UIDNEXT "), number),
ResponseCode::UidNext,
)(i)
}
fn resp_text_code_unseen(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
map(
preceded(tag_no_case(b"UNSEEN "), number),
ResponseCode::Unseen,
)(i)
}
fn resp_text_code(i: &[u8]) -> IResult<&[u8], ResponseCode<'_>> {
// Per the spec, the closing tag should be "] ".
// See `resp_text` for more on why this is done differently.
delimited(
tag(b"["),
alt((
resp_text_code_alert,
resp_text_code_badcharset,
resp_text_code_capability,
resp_text_code_parse,
resp_text_code_permanent_flags,
resp_text_code_uid_validity,
resp_text_code_uid_next,
resp_text_code_unseen,
resp_text_code_read_only,
resp_text_code_read_write,
resp_text_code_try_create,
rfc4551::resp_text_code_highest_mod_seq,
rfc4315::resp_text_code_append_uid,
rfc4315::resp_text_code_copy_uid,
rfc4315::resp_text_code_uid_not_sticky,
rfc5464::resp_text_code_metadata_long_entries,
rfc5464::resp_text_code_metadata_max_size,
rfc5464::resp_text_code_metadata_too_many,
rfc5464::resp_text_code_metadata_no_private,
)),
tag(b"]"),
)(i)
}
fn capability(i: &[u8]) -> IResult<&[u8], Capability<'_>> {
alt((
map(tag_no_case(b"IMAP4rev1"), |_| Capability::Imap4rev1),
map(
map(preceded(tag_no_case(b"AUTH="), atom), Cow::Borrowed),
Capability::Auth,
),
map(map(atom, Cow::Borrowed), Capability::Atom),
))(i)
}
fn ensure_capabilities_contains_imap4rev(
capabilities: Vec<Capability<'_>>,
) -> Result<Vec<Capability<'_>>, ()> {
if capabilities.contains(&Capability::Imap4rev1) {
Ok(capabilities)
} else {
Err(())
}
}
fn capability_data(i: &[u8]) -> IResult<&[u8], Vec<Capability<'_>>> {
map_res(
preceded(
tag_no_case(b"CAPABILITY"),
many0(preceded(char(' '), capability)),
),
ensure_capabilities_contains_imap4rev,
)(i)
}
fn mailbox_data_search(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
// Technically, trailing whitespace is not allowed here, but multiple
// email servers in the wild seem to have it anyway (see #34, #108).
terminated(
preceded(tag_no_case(b"SEARCH"), many0(preceded(tag(" "), number))),
opt(tag(" ")),
),
MailboxDatum::Search,
)(i)
}
fn mailbox_data_flags(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
preceded(tag_no_case("FLAGS "), flag_list),
MailboxDatum::Flags,
)(i)
}
fn mailbox_data_exists(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
terminated(number, tag_no_case(" EXISTS")),
MailboxDatum::Exists,
)(i)
}
fn name_attribute(i: &[u8]) -> IResult<&[u8], NameAttribute<'_>> {
alt((
// RFC 3501
value(NameAttribute::NoInferiors, tag_no_case(b"\\Noinferiors")),
value(NameAttribute::NoSelect, tag_no_case(b"\\Noselect")),
value(NameAttribute::Marked, tag_no_case(b"\\Marked")),
value(NameAttribute::Unmarked, tag_no_case(b"\\Unmarked")),
// RFC 6154
value(NameAttribute::All, tag_no_case(b"\\All")),
value(NameAttribute::Archive, tag_no_case(b"\\Archive")),
value(NameAttribute::Drafts, tag_no_case(b"\\Drafts")),
value(NameAttribute::Flagged, tag_no_case(b"\\Flagged")),
value(NameAttribute::Junk, tag_no_case(b"\\Junk")),
value(NameAttribute::Sent, tag_no_case(b"\\Sent")),
value(NameAttribute::Trash, tag_no_case(b"\\Trash")),
// Extensions not supported by this crate
map(
map_res(
recognize(pair(tag(b"\\"), take_while(is_atom_char))),
from_utf8,
),
|s| NameAttribute::Extension(Cow::Borrowed(s)),
),
))(i)
}
#[allow(clippy::type_complexity)]
fn mailbox_list(i: &[u8]) -> IResult<&[u8], (Vec<NameAttribute<'_>>, Option<&str>, &str)> {
map(
tuple((
parenthesized_list(name_attribute),
tag(b" "),
alt((map(quoted_utf8, Some), map(nil, |_| None))),
tag(b" "),
mailbox,
)),
|(name_attributes, _, delimiter, _, name)| (name_attributes, delimiter, name),
)(i)
}
fn mailbox_data_list(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(preceded(tag_no_case("LIST "), mailbox_list), |data| {
MailboxDatum::List {
name_attributes: data.0,
delimiter: data.1.map(Cow::Borrowed),
name: Cow::Borrowed(data.2),
}
})(i)
}
fn mailbox_data_lsub(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(preceded(tag_no_case("LSUB "), mailbox_list), |data| {
MailboxDatum::List {
name_attributes: data.0,
delimiter: data.1.map(Cow::Borrowed),
name: Cow::Borrowed(data.2),
}
})(i)
}
// Unlike `status_att` in the RFC syntax, this includes the value,
// so that it can return a valid enum object instead of just a key.
fn status_att(i: &[u8]) -> IResult<&[u8], StatusAttribute> {
alt((
rfc4551::status_att_val_highest_mod_seq,
map(
preceded(tag_no_case("MESSAGES "), number),
StatusAttribute::Messages,
),
map(
preceded(tag_no_case("RECENT "), number),
StatusAttribute::Recent,
),
map(
preceded(tag_no_case("UIDNEXT "), number),
StatusAttribute::UidNext,
),
map(
preceded(tag_no_case("UIDVALIDITY "), number),
StatusAttribute::UidValidity,
),
map(
preceded(tag_no_case("UNSEEN "), number),
StatusAttribute::Unseen,
),
))(i)
}
fn status_att_list(i: &[u8]) -> IResult<&[u8], Vec<StatusAttribute>> {
// RFC 3501 specifies that the list is non-empty in the formal grammar
// status-att-list = status-att SP number *(SP status-att SP number)
// but mail.163.com sends an empty list in STATUS response anyway.
parenthesized_list(status_att)(i)
}
fn mailbox_data_status(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
tuple((tag_no_case("STATUS "), mailbox, tag(" "), status_att_list)),
|(_, mailbox, _, status)| MailboxDatum::Status {
mailbox: Cow::Borrowed(mailbox),
status,
},
)(i)
}
fn mailbox_data_recent(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
map(
terminated(number, tag_no_case(" RECENT")),
MailboxDatum::Recent,
)(i)
}
fn mailbox_data(i: &[u8]) -> IResult<&[u8], MailboxDatum<'_>> {
alt((
mailbox_data_flags,
mailbox_data_exists,
mailbox_data_list,
mailbox_data_lsub,
mailbox_data_status,
mailbox_data_recent,
mailbox_data_search,
gmail::mailbox_data_gmail_labels,
gmail::mailbox_data_gmail_msgid,
gmail::mailbox_data_gmail_thrid,
rfc5256::mailbox_data_sort,
))(i)
}
// An address structure is a parenthesized list that describes an
// electronic mail address.
fn address(i: &[u8]) -> IResult<&[u8], Address<'_>> {
paren_delimited(map(
tuple((
nstring,
tag(" "),
nstring,
tag(" "),
nstring,
tag(" "),
nstring,
)),
|(name, _, adl, _, mailbox, _, host)| Address {
name: name.map(Cow::Borrowed),
adl: adl.map(Cow::Borrowed),
mailbox: mailbox.map(Cow::Borrowed),
host: host.map(Cow::Borrowed),
},
))(i)
}
fn opt_addresses(i: &[u8]) -> IResult<&[u8], Option<Vec<Address<'_>>>> {
alt((
map(nil, |_s| None),
map(
paren_delimited(many1(terminated(address, opt(char(' '))))),
Some,
),
))(i)
}
// envelope = "(" env-date SP env-subject SP env-from SP
// env-sender SP env-reply-to SP env-to SP env-cc SP
// env-bcc SP env-in-reply-to SP env-message-id ")"
//
// env-bcc = "(" 1*address ")" / nil
//
// env-cc = "(" 1*address ")" / nil
//
// env-date = nstring
//
// env-from = "(" 1*address ")" / nil
//
// env-in-reply-to = nstring
//
// env-message-id = nstring
//
// env-reply-to = "(" 1*address ")" / nil
//
// env-sender = "(" 1*address ")" / nil
//
// env-subject = nstring
//
// env-to = "(" 1*address ")" / nil
pub(crate) fn envelope(i: &[u8]) -> IResult<&[u8], Envelope<'_>> {
paren_delimited(map(
tuple((
nstring,
tag(" "),
nstring,
tag(" "),
opt_addresses,
tag(" "),
opt_addresses,
tag(" "),
opt_addresses,
tag(" "),
opt_addresses,
tag(" "),
opt_addresses,
tag(" "),
opt_addresses,
tag(" "),
nstring,
tag(" "),
nstring,
)),
|(
date,
_,
subject,
_,
from,
_,
sender,
_,
reply_to,
_,
to,
_,
cc,
_,
bcc,
_,
in_reply_to,
_,
message_id,
)| Envelope {
date: date.map(Cow::Borrowed),
subject: subject.map(Cow::Borrowed),
from,
sender,
reply_to,
to,
cc,
bcc,
in_reply_to: in_reply_to.map(Cow::Borrowed),
message_id: message_id.map(Cow::Borrowed),
},
))(i)
}
fn msg_att_envelope(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(preceded(tag_no_case("ENVELOPE "), envelope), |envelope| {
AttributeValue::Envelope(Box::new(envelope))
})(i)
}
fn msg_att_internal_date(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(
preceded(tag_no_case("INTERNALDATE "), nstring_utf8),
|date| AttributeValue::InternalDate(Cow::Borrowed(date.unwrap())),
)(i)
}
fn msg_att_flags(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(
preceded(tag_no_case("FLAGS "), flag_list),
AttributeValue::Flags,
)(i)
}
fn msg_att_rfc822(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(preceded(tag_no_case("RFC822 "), nstring), |v| {
AttributeValue::Rfc822(v.map(Cow::Borrowed))
})(i)
}
fn msg_att_rfc822_header(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
// extra space workaround for DavMail
map(
tuple((tag_no_case("RFC822.HEADER "), opt(tag(b" ")), nstring)),
|(_, _, raw)| AttributeValue::Rfc822Header(raw.map(Cow::Borrowed)),
)(i)
}
fn msg_att_rfc822_size(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(
preceded(tag_no_case("RFC822.SIZE "), number),
AttributeValue::Rfc822Size,
)(i)
}
fn msg_att_rfc822_text(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(preceded(tag_no_case("RFC822.TEXT "), nstring), |v| {
AttributeValue::Rfc822Text(v.map(Cow::Borrowed))
})(i)
}
fn msg_att_uid(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(preceded(tag_no_case("UID "), number), AttributeValue::Uid)(i)
}
// msg-att = "(" (msg-att-dynamic / msg-att-static)
// *(SP (msg-att-dynamic / msg-att-static)) ")"
//
// msg-att-dynamic = "FLAGS" SP "(" [flag-fetch *(SP flag-fetch)] ")"
// ; MAY change for a message
//
// msg-att-static = "ENVELOPE" SP envelope / "INTERNALDATE" SP date-time /
// "RFC822" [".HEADER" / ".TEXT"] SP nstring /
// "RFC822.SIZE" SP number /
// "BODY" ["STRUCTURE"] SP body /
// "BODY" section ["<" number ">"] SP nstring /
// "UID" SP uniqueid
// ; MUST NOT change for a message
fn msg_att(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
alt((
msg_att_body_section,
msg_att_body_structure,
msg_att_envelope,
msg_att_internal_date,
msg_att_flags,
rfc4551::msg_att_mod_seq,
msg_att_rfc822,
msg_att_rfc822_header,
msg_att_rfc822_size,
msg_att_rfc822_text,
msg_att_uid,
gmail::msg_att_gmail_labels,
gmail::msg_att_gmail_msgid,
gmail::msg_att_gmail_thrid,
))(i)
}
fn msg_att_list(i: &[u8]) -> IResult<&[u8], Vec<AttributeValue<'_>>> {
parenthesized_nonempty_list(msg_att)(i)
}
// message-data = nz-number SP ("EXPUNGE" / ("FETCH" SP msg-att))
fn message_data_fetch(i: &[u8]) -> IResult<&[u8], Response<'_>> {
map(
tuple((number, tag_no_case(" FETCH "), msg_att_list)),
|(num, _, attrs)| Response::Fetch(num, attrs),
)(i)
}
// message-data = nz-number SP ("EXPUNGE" / ("FETCH" SP msg-att))
fn message_data_expunge(i: &[u8]) -> IResult<&[u8], u32> {
terminated(number, tag_no_case(" EXPUNGE"))(i)
}
// tag = 1*<any ASTRING-CHAR except "+">
fn imap_tag(i: &[u8]) -> IResult<&[u8], RequestId> {
map(map_res(take_while1(is_tag_char), from_utf8), |s| {
RequestId(s.to_string())
})(i)
}
// This is not quite according to spec, which mandates the following:
// ["[" resp-text-code "]" SP] text
// However, examples in RFC 4551 (Conditional STORE) counteract this by giving
// examples of `resp-text` that do not include the trailing space and text.
fn resp_text(i: &[u8]) -> IResult<&[u8], (Option<ResponseCode<'_>>, Option<&str>)> {
map(tuple((opt(resp_text_code), text)), |(code, text)| {
let res = if text.is_empty() {
None
} else if code.is_some() {
Some(&text[1..])
} else {
Some(text)
};
(code, res)
})(i)
}
// an response-text if it is at the end of a response. Empty text is then allowed without the normally needed trailing space.
fn trailing_resp_text(i: &[u8]) -> IResult<&[u8], (Option<ResponseCode<'_>>, Option<&str>)> {
map(opt(tuple((tag(b" "), resp_text))), |resptext| {
resptext.map(|(_, tuple)| tuple).unwrap_or((None, None))
})(i)
}
// continue-req = "+" SP (resp-text / base64) CRLF
pub(crate) fn continue_req(i: &[u8]) -> IResult<&[u8], Response<'_>> {
// Some servers do not send the space :/
// TODO: base64
map(
tuple((tag("+"), opt(tag(" ")), resp_text, tag("\r\n"))),
|(_, _, text, _)| Response::Continue {
code: text.0,
information: text.1.map(Cow::Borrowed),
},
)(i)
}
// response-tagged = tag SP resp-cond-state CRLF
//
// resp-cond-state = ("OK" / "NO" / "BAD") SP resp-text
// ; Status condition
pub(crate) fn response_tagged(i: &[u8]) -> IResult<&[u8], Response<'_>> {
map(
tuple((
imap_tag,
tag(b" "),
status,
trailing_resp_text,
tag(b"\r\n"),
)),
|(tag, _, status, text, _)| Response::Done {
tag,
status,
code: text.0,
information: text.1.map(Cow::Borrowed),
},
)(i)
}
// resp-cond-auth = ("OK" / "PREAUTH") SP resp-text
// ; Authentication condition
//
// resp-cond-bye = "BYE" SP resp-text
//
// resp-cond-state = ("OK" / "NO" / "BAD") SP resp-text
// ; Status condition
fn resp_cond(i: &[u8]) -> IResult<&[u8], Response<'_>> {
map(tuple((status, trailing_resp_text)), |(status, text)| {
Response::Data {
status,
code: text.0,
information: text.1.map(Cow::Borrowed),
}
})(i)
}
// response-data = "*" SP (resp-cond-state / resp-cond-bye /
// mailbox-data / message-data / capability-data / quota) CRLF
pub(crate) fn response_data(i: &[u8]) -> IResult<&[u8], Response<'_>> {
delimited(
tag(b"* "),
alt((
resp_cond,
map(mailbox_data, Response::MailboxData),
map(message_data_expunge, Response::Expunge),
message_data_fetch,
map(capability_data, Response::Capabilities),
rfc5161::resp_enabled,
rfc5464::metadata_solicited,
rfc5464::metadata_unsolicited,
rfc7162::resp_vanished,
rfc2087::quota,
rfc2087::quota_root,
rfc2971::resp_id,
rfc4314::acl,
rfc4314::list_rights,
rfc4314::my_rights,
)),
preceded(
many0(tag(b" ")), // Outlook server sometimes sends whitespace at the end of STATUS response.
tag(b"\r\n"),
),
)(i)
}
#[cfg(test)]
mod tests {
use crate::types::*;
use assert_matches::assert_matches;
use std::borrow::Cow;
#[test]
fn test_list() {
match super::mailbox(b"iNboX ") {
Ok((_, mb)) => {
assert_eq!(mb, "INBOX");
}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_envelope() {
let env = br#"ENVELOPE ("Wed, 17 Jul 1996 02:23:25 -0700 (PDT)" "IMAP4rev1 WG mtg summary and minutes" (("Terry Gray" NIL "gray" "cac.washington.edu")) (("Terry Gray" NIL "gray" "cac.washington.edu")) (("Terry Gray" NIL "gray" "cac.washington.edu")) ((NIL NIL "imap" "cac.washington.edu")) ((NIL NIL "minutes" "CNRI.Reston.VA.US") ("John Klensin" NIL "KLENSIN" "MIT.EDU")) NIL NIL "<B27397-0100000@cac.washington.edu>") "#;
match super::msg_att_envelope(env) {
Ok((_, AttributeValue::Envelope(_))) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_opt_addresses() {
let addr = b"((NIL NIL \"minutes\" \"CNRI.Reston.VA.US\") (\"John Klensin\" NIL \"KLENSIN\" \"MIT.EDU\")) ";
match super::opt_addresses(addr) {
Ok((_, _addresses)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_opt_addresses_no_space() {
let addr =
br#"((NIL NIL "test" "example@example.com")(NIL NIL "test" "example@example.com"))"#;
match super::opt_addresses(addr) {
Ok((_, _addresses)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_addresses() {
match super::address(b"(\"John Klensin\" NIL \"KLENSIN\" \"MIT.EDU\") ") {
Ok((_, _address)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
// Literal non-UTF8 address
match super::address(b"({12}\r\nJoh\xff Klensin NIL \"KLENSIN\" \"MIT.EDU\") ") {
Ok((_, _address)) => {}
rsp => panic!("unexpected response {rsp:?}"),
}
}
#[test]
fn test_capability_data() {
// Minimal capabilities
assert_matches!(
super::capability_data(b"CAPABILITY IMAP4rev1\r\n"),
Ok((_, capabilities)) => {
assert_eq!(capabilities, vec![Capability::Imap4rev1])
}
);
assert_matches!(
super::capability_data(b"CAPABILITY XPIG-LATIN IMAP4rev1 STARTTLS AUTH=GSSAPI\r\n"),
Ok((_, capabilities)) => {
assert_eq!(capabilities, vec![
Capability::Atom(Cow::Borrowed("XPIG-LATIN")),
Capability::Imap4rev1,
Capability::Atom(Cow::Borrowed("STARTTLS")),
Capability::Auth(Cow::Borrowed("GSSAPI")),
])
}
);
assert_matches!(
super::capability_data(b"CAPABILITY IMAP4rev1 AUTH=GSSAPI AUTH=PLAIN\r\n"),
Ok((_, capabilities)) => {
assert_eq!(capabilities, vec![
Capability::Imap4rev1,
Capability::Auth(Cow::Borrowed("GSSAPI")),
Capability::Auth(Cow::Borrowed("PLAIN")),
])
}
);
// Capability command must contain IMAP4rev1
assert_matches!(
super::capability_data(b"CAPABILITY AUTH=GSSAPI AUTH=PLAIN\r\n"),
Err(_)
);
}
#[test]
fn test_surgemail_select_flags() {
// Tests workaround for surgemail with space before closing bracket
assert_matches!(
super::flag_list(b"(\\Answered \\Flagged \\Deleted \\Draft \\Seen $Forwarded )"),
Ok(([], flags)) => {
assert_eq!(flags, vec![
"\\Answered",
"\\Flagged",
"\\Deleted",
"\\Draft",
"\\Seen",
"$Forwarded"
])
}
);
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/src/parser/rfc3501/body.rs | imap-proto/src/parser/rfc3501/body.rs | use nom::{
branch::alt,
bytes::streaming::{tag, tag_no_case},
character::streaming::char,
combinator::{map, opt},
multi::many0,
sequence::{delimited, preceded, tuple},
IResult,
};
use std::borrow::Cow;
use crate::{parser::core::*, types::*};
pub fn section_part(i: &[u8]) -> IResult<&[u8], Vec<u32>> {
let (i, (part, mut rest)) = tuple((number, many0(preceded(char('.'), number))))(i)?;
rest.insert(0, part);
Ok((i, rest))
}
pub fn section_msgtext(i: &[u8]) -> IResult<&[u8], MessageSection> {
alt((
map(
tuple((
tag_no_case("HEADER.FIELDS"),
opt(tag_no_case(".NOT")),
tag(" "),
parenthesized_list(astring),
)),
|_| MessageSection::Header,
),
map(tag_no_case("HEADER"), |_| MessageSection::Header),
map(tag_no_case("TEXT"), |_| MessageSection::Text),
))(i)
}
pub fn section_text(i: &[u8]) -> IResult<&[u8], MessageSection> {
alt((
section_msgtext,
map(tag_no_case("MIME"), |_| MessageSection::Mime),
))(i)
}
pub fn section_spec(i: &[u8]) -> IResult<&[u8], SectionPath> {
alt((
map(section_msgtext, SectionPath::Full),
map(
tuple((section_part, opt(preceded(char('.'), section_text)))),
|(part, text)| SectionPath::Part(part, text),
),
))(i)
}
pub fn section(i: &[u8]) -> IResult<&[u8], Option<SectionPath>> {
delimited(char('['), opt(section_spec), char(']'))(i)
}
pub fn msg_att_body_section(i: &[u8]) -> IResult<&[u8], AttributeValue<'_>> {
map(
tuple((
tag_no_case("BODY"),
section,
opt(delimited(char('<'), number, char('>'))),
tag(" "),
nstring,
)),
|(_, section, index, _, data)| AttributeValue::BodySection {
section,
index,
data: data.map(Cow::Borrowed),
},
)(i)
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/fuzz/fuzz_targets/utf8_parse_response.rs | imap-proto/fuzz/fuzz_targets/utf8_parse_response.rs | #![no_main]
#[macro_use] extern crate libfuzzer_sys;
extern crate imap_proto;
// UTF-8
fuzz_target!(|data: &[u8]| {
let _ = imap_proto::Response::from_bytes(data);
});
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/imap-proto/examples/parse_response.rs | imap-proto/examples/parse_response.rs | use imap_proto::Response;
use std::io::Write;
fn main() -> std::io::Result<()> {
loop {
let line = {
print!("Enter IMAP4REV1 response: ");
std::io::stdout().flush().unwrap();
let mut line = String::new();
std::io::stdin().read_line(&mut line)?;
line
};
match Response::from_bytes(line.replace('\n', "\r\n").as_bytes()) {
Ok((remaining, command)) => {
println!("{command:#?}");
if !remaining.is_empty() {
println!("Remaining data in buffer: {remaining:?}");
}
}
Err(_) => {
println!("Error parsing the response. Is it correct? Exiting.");
break;
}
}
}
Ok(())
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/tokio-imap/src/lib.rs | tokio-imap/src/lib.rs | mod client;
mod codec;
pub use crate::client::{Client, TlsClient};
pub use crate::codec::ResponseData;
pub mod builders {
pub use imap_proto::builders::command::{fetch, CommandBuilder, FetchCommand};
}
pub mod types {
pub use imap_proto::types::*;
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/tokio-imap/src/codec.rs | tokio-imap/src/codec.rs | use std::io;
use std::mem;
use bytes::{BufMut, Bytes, BytesMut};
use nom::{self, Needed};
use tokio_util::codec::{Decoder, Encoder};
use imap_proto::types::{Request, RequestId, Response};
#[derive(Default)]
pub struct ImapCodec {
decode_need_message_bytes: usize,
}
impl Decoder for ImapCodec {
type Item = ResponseData;
type Error = io::Error;
fn decode(&mut self, buf: &mut BytesMut) -> Result<Option<Self::Item>, io::Error> {
if self.decode_need_message_bytes > buf.len() {
return Ok(None);
}
let (response, rsp_len) = match imap_proto::Response::from_bytes(buf) {
Ok((remaining, response)) => {
// This SHOULD be acceptable/safe: BytesMut storage memory is
// allocated on the heap and should not move. It will not be
// freed as long as we keep a reference alive, which we do
// by retaining a reference to the split buffer, below.
let response =
unsafe { mem::transmute::<Response<'_>, Response<'static>>(response) };
(response, buf.len() - remaining.len())
}
Err(nom::Err::Incomplete(Needed::Size(min))) => {
self.decode_need_message_bytes = min.get();
return Ok(None);
}
Err(nom::Err::Incomplete(_)) => {
return Ok(None);
}
Err(nom::Err::Error(nom::error::Error { code, .. }))
| Err(nom::Err::Failure(nom::error::Error { code, .. })) => {
return Err(io::Error::new(
io::ErrorKind::Other,
format!("{code:?} during parsing of {buf:?}"),
));
}
};
let raw = buf.split_to(rsp_len).freeze();
self.decode_need_message_bytes = 0;
Ok(Some(ResponseData { raw, response }))
}
}
impl<'a> Encoder<&'a Request<'a>> for ImapCodec {
type Error = io::Error;
fn encode(&mut self, msg: &Request, dst: &mut BytesMut) -> Result<(), io::Error> {
dst.put(&*msg.0);
dst.put_u8(b' ');
dst.put_slice(&msg.1);
dst.put_slice(b"\r\n");
Ok(())
}
}
#[derive(Debug)]
pub struct ResponseData {
#[allow(dead_code)] // Contains data that `response` borrows
raw: Bytes,
// This reference is really scoped to the lifetime of the `raw`
// member, but unfortunately Rust does not allow that yet. It
// is transmuted to `'static` by the `Decoder`, instead, and
// references returned to callers of `ResponseData` are limited
// to the lifetime of the `ResponseData` struct.
//
// `raw` is never mutated during the lifetime of `ResponseData`,
// and `Response` does not not implement any specific drop glue.
response: Response<'static>,
}
impl ResponseData {
pub fn request_id(&self) -> Option<&RequestId> {
match self.response {
Response::Done { ref tag, .. } => Some(tag),
_ => None,
}
}
#[allow(clippy::needless_lifetimes)]
pub fn parsed<'a>(&'a self) -> &'a Response<'a> {
&self.response
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/tokio-imap/src/client.rs | tokio-imap/src/client.rs | use std::borrow::Cow;
use std::io;
use std::net::ToSocketAddrs;
use std::pin::Pin;
use std::sync::Arc;
use std::task::{ready, Context, Poll};
use futures_sink::Sink;
use futures_util::{Stream, StreamExt};
use pin_project_lite::pin_project;
use rustls_pki_types::ServerName;
use tokio::io::{AsyncRead, AsyncWrite};
use tokio::net::TcpStream;
use tokio_rustls::rustls::ClientConfig;
use tokio_rustls::{client::TlsStream, TlsConnector};
use tokio_util::codec::{Decoder, Framed};
use crate::codec::{ImapCodec, ResponseData};
use imap_proto::builders::command::Command;
use imap_proto::{Request, RequestId, State};
pub type TlsClient = Client<TlsStream<TcpStream>>;
pub struct Client<T> {
transport: Framed<T, ImapCodec>,
state: State,
request_ids: IdGenerator,
}
impl TlsClient {
pub async fn connect(server: &str) -> io::Result<(ResponseData, Self)> {
let addr = (server, 993).to_socket_addrs()?.next().ok_or_else(|| {
io::Error::new(
io::ErrorKind::Other,
format!("no IP addresses found for {server}"),
)
})?;
let mut roots = tokio_rustls::rustls::RootCertStore::empty();
roots.extend(webpki_roots::TLS_SERVER_ROOTS.iter().cloned());
let connector = TlsConnector::from(Arc::new(
ClientConfig::builder()
.with_root_certificates(roots)
.with_no_client_auth(),
));
let stream = TcpStream::connect(&addr).await?;
let stream = connector
.connect(ServerName::try_from(server).unwrap().to_owned(), stream)
.await?;
let mut transport = ImapCodec::default().framed(stream);
let greeting = match transport.next().await {
Some(greeting) => Ok(greeting),
None => Err(io::Error::new(io::ErrorKind::Other, "no greeting found")),
}?;
let client = Client {
transport,
state: State::NotAuthenticated,
request_ids: IdGenerator::new(),
};
greeting.map(|greeting| (greeting, client))
}
pub fn call<C: Into<Command>>(&mut self, cmd: C) -> ResponseStream<'_, TlsStream<TcpStream>> {
let request_id = self.request_ids.next().unwrap(); // safe: never returns Err,
ResponseStream {
client: self,
request_id,
cmd: cmd.into(),
state: ResponseStreamState::Start,
}
}
}
pin_project! {
pub struct ResponseStream<'a, T> {
#[pin]
client: &'a mut Client<T>,
request_id: RequestId,
cmd: Command,
state: ResponseStreamState,
}
}
impl<'a, T> Stream for ResponseStream<'a, T>
where
T: AsyncRead + AsyncWrite + Unpin,
{
type Item = Result<ResponseData, io::Error>;
fn poll_next(self: Pin<&mut Self>, cx: &mut Context) -> Poll<Option<Self::Item>> {
let mut me = self.project();
loop {
match me.state {
ResponseStreamState::Start => {
ready!(Pin::new(&mut me.client.transport).poll_ready(cx))?;
let pinned = Pin::new(&mut me.client.transport);
pinned.start_send(&Request(
Cow::Borrowed(me.request_id.as_bytes()),
Cow::Borrowed(&me.cmd.args),
))?;
*me.state = ResponseStreamState::Sending;
}
ResponseStreamState::Sending => {
let pinned = Pin::new(&mut me.client.transport);
ready!(pinned.poll_flush(cx))?;
*me.state = ResponseStreamState::Receiving;
}
ResponseStreamState::Receiving => {
match ready!(Pin::new(&mut me.client.transport).poll_next(cx)) {
Some(Ok(rsp)) => {
match rsp.request_id() {
Some(req_id) if req_id == me.request_id => {}
Some(_) | None => return Poll::Ready(Some(Ok(rsp))),
}
if let Some(next_state) = me.cmd.next_state.as_ref() {
me.client.state = *next_state;
}
*me.state = ResponseStreamState::Done;
return Poll::Ready(Some(Ok(rsp)));
}
Some(Err(e)) => return Poll::Ready(Some(Err(e))),
None => {
return Poll::Ready(Some(Err(io::Error::new(
io::ErrorKind::Other,
"stream ended before command completion",
))))
}
}
}
ResponseStreamState::Done => {
return Poll::Ready(None);
}
}
}
}
}
enum ResponseStreamState {
Start,
Sending,
Receiving,
Done,
}
pub struct IdGenerator {
next: u64,
}
impl IdGenerator {
pub fn new() -> Self {
Self { next: 0 }
}
}
impl Default for IdGenerator {
fn default() -> Self {
Self::new()
}
}
impl Iterator for IdGenerator {
type Item = RequestId;
fn next(&mut self) -> Option<Self::Item> {
self.next += 1;
Some(RequestId(format!("A{:04}", self.next % 10_000)))
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
djc/tokio-imap | https://github.com/djc/tokio-imap/blob/82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c/tokio-imap/examples/basic.rs | tokio-imap/examples/basic.rs | use std::error::Error;
use std::fmt::{self, Display, Formatter};
use std::io;
use futures_util::stream::TryStreamExt;
use tokio_imap::builders::CommandBuilder;
use tokio_imap::types::{Attribute, AttributeValue, Response};
use tokio_imap::ResponseData;
use tokio_imap::TlsClient;
#[tokio::main]
async fn main() {
let mut args = std::env::args();
let _ = args.next();
let server = args.next().expect("no server provided");
let login = args.next().expect("no login provided");
let password = args.next().expect("no password provided");
let mailbox = args.next().expect("no mailbox provided");
if let Err(cause) = imap_fetch(&server, login, password, mailbox).await {
eprintln!("Fatal error: {cause}");
}
}
async fn imap_fetch(
server: &str,
login: String,
password: String,
mailbox: String,
) -> Result<(), ImapError> {
eprintln!("Will connect to {server}");
let (_, mut tls_client) = TlsClient::connect(server)
.await
.map_err(|e| ImapError::Connect { cause: e })?;
let responses = tls_client
.call(CommandBuilder::login(&login, &password))
.try_collect::<Vec<_>>()
.await
.map_err(|e| ImapError::Login { cause: e })?;
match responses[0].parsed() {
Response::Capabilities(_) => {}
Response::Done { information, .. } => {
if let Some(info) = information {
eprintln!("Login failed: {info:?}");
}
return Err(ImapError::Login {
cause: io::Error::new(io::ErrorKind::Other, "login failed"),
});
}
_ => unimplemented!(),
}
let _ = tls_client
.call(CommandBuilder::select(&mailbox))
.try_collect::<Vec<_>>()
.await
.map_err(|e| ImapError::Select { cause: e })?;
let cmd = CommandBuilder::uid_fetch()
.range_from(1_u32..)
.attr(Attribute::Uid)
.attr(Attribute::Rfc822);
tls_client
.call(cmd)
.try_for_each(process_email)
.await
.map_err(|e| ImapError::UidFetch { cause: e })?;
let _ = tls_client
.call(CommandBuilder::close())
.try_collect::<Vec<_>>()
.await
.map_err(|e| ImapError::Close { cause: e })?;
eprintln!("Finished fetching messages");
Ok(())
}
async fn process_email(response_data: ResponseData) -> Result<(), io::Error> {
if let Response::Fetch(_, ref attr_vals) = *response_data.parsed() {
for val in attr_vals {
match val {
AttributeValue::Uid(u) => {
eprintln!("Message UID: {u}");
}
AttributeValue::Rfc822(Some(src)) => {
eprintln!("Message length: {}", src.to_vec().len());
}
_ => (),
}
}
}
Ok(())
}
#[derive(Debug)]
pub enum ImapError {
Connect { cause: io::Error },
Login { cause: io::Error },
Select { cause: io::Error },
UidFetch { cause: io::Error },
Close { cause: io::Error },
}
impl Error for ImapError {
fn description(&self) -> &'static str {
""
}
fn cause(&self) -> Option<&dyn Error> {
match *self {
ImapError::Connect { ref cause }
| ImapError::Login { ref cause }
| ImapError::Select { ref cause }
| ImapError::UidFetch { ref cause }
| ImapError::Close { ref cause } => Some(cause),
}
}
}
impl Display for ImapError {
fn fmt(&self, f: &mut Formatter<'_>) -> fmt::Result {
match *self {
ImapError::Connect { ref cause } => write!(f, "Connect failed: {cause}"),
ImapError::Login { ref cause } => write!(f, "Login failed: {cause}"),
ImapError::Select { ref cause } => write!(f, "Mailbox selection failed: {cause}"),
ImapError::UidFetch { ref cause } => write!(f, "Fetching messages failed: {cause}"),
ImapError::Close { ref cause } => write!(f, "Closing failed: {cause}"),
}
}
}
| rust | Apache-2.0 | 82c6967ad529e5f9cfa6b0c59aecb2f0b5f6bd8c | 2026-01-04T20:22:37.594708Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/config.rs | src/config.rs | use crate::test::{self, SecretValue};
use chrono::Local;
use log::error;
use serde::{Deserialize, Serialize};
use std::{collections::BTreeMap, env, path::Path};
#[derive(PartialEq, Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Config {
pub settings: Settings,
pub globals: BTreeMap<String, String>,
#[serde(skip_serializing)]
pub secrets: BTreeMap<String, String>,
}
#[derive(PartialEq, Serialize, Deserialize, Clone, Debug)]
#[serde(rename_all = "camelCase")]
pub struct Settings {
pub continue_on_failure: bool,
pub bypass_cert_verification: bool,
pub project: Option<String>,
pub environment: Option<String>,
#[serde(skip_serializing)]
pub api_key: Option<String>,
pub dev_mode: Option<bool>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct File {
pub settings: Option<FileSettings>,
pub globals: Option<BTreeMap<String, String>>,
#[serde(skip_serializing)]
pub secrets: Option<BTreeMap<String, String>>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct FileSettings {
pub continue_on_failure: Option<bool>,
pub bypass_cert_verification: Option<bool>,
pub api_key: Option<String>,
pub dev_mode: Option<bool>,
pub project: Option<String>,
pub environment: Option<String>,
}
impl Config {
pub fn generate_global_variables(&self) -> Vec<test::Variable> {
let now = Local::now();
let now_utc = now.to_utc();
let mut global_variables = BTreeMap::new();
global_variables.insert("TODAY".to_string(), format!("{}", now.format("%Y-%m-%d")));
global_variables.insert(
"TODAY_UTC".to_string(),
format!("{}", now_utc.format("%Y-%m-%d")),
);
global_variables.insert(
"NOW".to_string(),
format!("{}", now.format("%Y-%m-%d %H:%M:%S.%3f")),
);
global_variables.insert(
"NOW_UTC".to_string(),
format!("{}", now_utc.format("%Y-%m-%d %H:%M:%S.%3f")),
);
global_variables
.iter()
.chain(self.globals.iter())
.map(|i| test::Variable {
name: i.0.to_string(),
value: test::ValueOrDatumOrFileOrSecret::Value {
value: serde_json::Value::from(i.1.to_string()),
},
source_path: "./".to_string(),
})
.chain(
self.secrets
.iter()
.map(|(secret_name, secret_val)| test::Variable {
name: secret_name.clone(),
value: test::ValueOrDatumOrFileOrSecret::Secret {
value: SecretValue::new(secret_val),
},
source_path: "/".to_string(),
}),
)
.collect()
}
}
impl Default for Config {
fn default() -> Config {
Config {
settings: Settings {
continue_on_failure: false,
bypass_cert_verification: false,
api_key: None,
dev_mode: None,
project: None,
environment: None,
},
globals: BTreeMap::new(),
secrets: BTreeMap::new(),
}
}
}
pub async fn get_config(file: Option<String>) -> Config {
let config_sources_ascending_priority = vec![
load_home_file().await,
load_config_file(file.unwrap_or(".jikken".to_string()).as_str()).await,
Some(load_config_from_environment_variables_as_file()),
];
get_config_impl(config_sources_ascending_priority)
}
fn get_config_impl(config_sources_ascending_priority: Vec<Option<File>>) -> Config {
let specified_config = config_sources_ascending_priority
.into_iter()
.fold(None, combine_config_files);
apply_config_file(Config::default(), specified_config)
}
async fn load_config_file(file: &str) -> Option<File> {
if !Path::new(file).exists() || !Path::new(file).is_file() {
return None;
}
match tokio::fs::read_to_string(file).await {
Ok(data) => {
let config_result: Result<File, _> = toml::from_str(&data);
match config_result {
Ok(config) => Some(config),
Err(e) => {
error!("unable to load config file ({}): {}", file, e);
None
}
}
}
Err(e) => {
error!("unable to load config file ({}): {}", file, e);
None
}
}
}
async fn load_home_file() -> Option<File> {
let cfg_file = dirs::home_dir().map(|pb| pb.join(".jikken"));
load_config_file(cfg_file?.as_path().to_str()?).await
}
fn gather_env_vars_with_prefix(prefix: &str) -> BTreeMap<String, String> {
env::vars().fold(BTreeMap::new(), |mut acc, (key, value)| {
if let Some(stripped) = key.strip_prefix(prefix) {
acc.insert(stripped.to_string(), value);
}
acc
})
}
fn load_config_from_environment_variables_as_file() -> File {
let envvar_cof = env::var("JIKKEN_CONTINUE_ON_FAILURE")
.ok()
.and_then(|cfg| cfg.parse::<bool>().ok());
let envvar_bcv = env::var("JIKKEN_BYPASS_CERT_VERIFICATION")
.ok()
.and_then(|cfg| cfg.parse::<bool>().ok());
let envvar_apikey = env::var("JIKKEN_API_KEY").ok();
let envvar_devmode = env::var("JIKKEN_DEV_MODE")
.ok()
.and_then(|cfg| cfg.parse::<bool>().ok());
let envvar_project = env::var("JIKKEN_PROJECT").ok();
let envvar_env = env::var("JIKKEN_ENVIRONMENT").ok();
File {
settings: Some(FileSettings {
api_key: envvar_apikey,
dev_mode: envvar_devmode,
continue_on_failure: envvar_cof,
bypass_cert_verification: envvar_bcv,
project: envvar_project,
environment: envvar_env,
}),
globals: Some(gather_env_vars_with_prefix("JIKKEN_GLOBAL_")),
secrets: Some(gather_env_vars_with_prefix("JIKKEN_SECRET_")),
}
}
fn merge_btrees(
lhs: BTreeMap<String, String>,
rhs: Option<BTreeMap<String, String>>,
) -> BTreeMap<String, String> {
lhs.into_iter().chain(rhs.unwrap_or_default()).collect()
}
fn apply_config_file(config: Config, file_opt: Option<File>) -> Config {
if let Some(file) = file_opt {
let merged_globals = merge_btrees(config.globals, file.globals);
let merged_secrets = merge_btrees(config.secrets, file.secrets);
if let Some(settings) = file.settings {
return Config {
settings: Settings {
continue_on_failure: settings
.continue_on_failure
.unwrap_or(config.settings.continue_on_failure),
bypass_cert_verification: settings
.bypass_cert_verification
.unwrap_or(config.settings.bypass_cert_verification),
api_key: settings.api_key.or(config.settings.api_key),
dev_mode: settings.dev_mode.or(config.settings.dev_mode),
project: settings.project.or(config.settings.project),
environment: settings.environment.or(config.settings.environment),
},
globals: merged_globals,
secrets: merged_secrets,
};
}
return Config {
settings: config.settings,
globals: merged_globals,
secrets: merged_secrets,
};
}
config
}
//rhs priority
fn combine_config_files(lhs: Option<File>, rhs: Option<File>) -> Option<File> {
match (lhs, rhs) {
(None, None) => None,
(Some(x), None) => Some(x),
(None, Some(x)) => Some(x),
(Some(existing_file), Some(file_to_apply)) => {
let merged_globals = merge_btrees(
existing_file.globals.unwrap_or_default(),
file_to_apply.globals,
);
let merged_secrets = merge_btrees(
existing_file.secrets.unwrap_or_default(),
file_to_apply.secrets,
);
if let Some(settings) = file_to_apply.settings {
return Some(File {
settings: Some(FileSettings {
continue_on_failure: settings.continue_on_failure.or(existing_file
.settings
.as_ref()
.and_then(|s| s.continue_on_failure)),
bypass_cert_verification: settings.bypass_cert_verification.or(
existing_file
.settings
.as_ref()
.and_then(|s| s.bypass_cert_verification),
),
api_key: settings.api_key.or(existing_file
.settings
.as_ref()
.and_then(|s| s.api_key.clone())),
dev_mode: settings
.dev_mode
.or(existing_file.settings.as_ref().and_then(|s| s.dev_mode)),
project: settings.project.or(existing_file
.settings
.as_ref()
.and_then(|s| s.project.clone())),
environment: settings.environment.or(existing_file
.settings
.as_ref()
.and_then(|s| s.environment.clone())),
}),
globals: Some(merged_globals),
secrets: Some(merged_secrets),
});
}
Some(File {
settings: existing_file.settings,
globals: Some(merged_globals),
secrets: None,
})
}
}
}
#[cfg(test)]
mod tests {
use super::*;
use std::{fs::OpenOptions, io::Write};
use tempfile::tempdir;
#[test]
fn no_overrides_yields_default_config() {
let sources: Vec<Option<File>> = vec![None, None];
let actual = get_config_impl(sources);
assert_eq!(Config::default(), actual);
}
#[tokio::test]
async fn one_overrides_yields_correct_combination() {
let tmp_dir = tempdir().unwrap();
let tmp_path = tmp_dir.path();
let override_file_path = tmp_path.join("foo.jikken");
let override_file_path_str = override_file_path.to_str().unwrap();
_ = std::fs::File::create(override_file_path_str);
let mut f = OpenOptions::new()
.write(true)
.open(override_file_path_str)
.expect("create failed");
f.write_all(
r#"
[settings]
continueOnFailure=true
[globals]
my_override_global="foo"
[secrets]
my_override_secret="bar"
"#
.as_bytes(),
)
.unwrap();
let sources: Vec<Option<File>> = vec![
load_config_file(override_file_path.to_str().unwrap()).await,
None,
];
let actual = get_config_impl(sources);
assert_eq!(
Config {
settings: Settings {
continue_on_failure: true,
bypass_cert_verification: false,
api_key: None,
dev_mode: None,
project: None,
environment: None,
},
globals: BTreeMap::from([(
String::from("my_override_global"),
String::from("foo")
)]),
secrets: BTreeMap::from([(
String::from("my_override_secret"),
String::from("bar")
)]),
},
actual
);
}
#[tokio::test]
async fn two_overrides_yields_correct_combination() {
let tmp_dir = tempdir().unwrap();
let tmp_path = tmp_dir.path();
let override_file_path = tmp_path.join("foo.jikken");
let override_file_path_str = override_file_path.to_str().unwrap();
let override_file_path2 = tmp_path.join("foo2.jikken");
let override_file_path_str2 = override_file_path2.to_str().unwrap();
_ = std::fs::File::create(override_file_path_str);
let mut f = OpenOptions::new()
.write(true)
.open(override_file_path_str)
.expect("create failed");
f.write_all(
r#"
[settings]
continueOnFailure=true
apiKey="key"
devMode=true
[globals]
my_override_global="foo"
my_override_global2="bar"
[secrets]
my_override_secret="foo2"
my_override_secret2="bar2"
"#
.as_bytes(),
)
.unwrap();
_ = std::fs::File::create(override_file_path_str2);
f = OpenOptions::new()
.write(true)
.open(override_file_path_str2)
.expect("create failed");
f.write_all(
r#"
[settings]
continueOnFailure=false
project="my_proj"
environment="magic"
[globals]
my_override_global="bar"
my_override_global3="car"
[secrets]
my_override_secret="bar2"
my_override_secret3="car"
"#
.as_bytes(),
)
.unwrap();
let sources: Vec<Option<File>> = vec![
load_config_file(override_file_path_str).await,
load_config_file(override_file_path_str2).await,
];
let actual = get_config_impl(sources);
assert_eq!(
Config {
settings: Settings {
continue_on_failure: false,
bypass_cert_verification: false,
api_key: Some(String::from("key")),
dev_mode: Some(true),
project: Some(String::from("my_proj")),
environment: Some(String::from("magic")),
},
globals: BTreeMap::from([
(String::from("my_override_global"), String::from("bar")),
(String::from("my_override_global2"), String::from("bar")),
(String::from("my_override_global3"), String::from("car"))
]),
secrets: BTreeMap::from([
(String::from("my_override_secret"), String::from("bar2")),
(String::from("my_override_secret2"), String::from("bar2")),
(String::from("my_override_secret3"), String::from("car"))
])
},
actual
);
}
} // mod tests
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/errors.rs | src/errors.rs | use std::fmt;
#[derive(Debug, Clone)]
pub struct ValidationError {
pub reason: String,
}
impl std::error::Error for ValidationError {}
impl fmt::Display for ValidationError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.reason.is_empty() {
write!(f, "test validation")
} else {
write!(f, "{}", self.reason)
}
}
}
#[derive(Debug, Clone)]
pub struct TestFailure {
pub reason: String,
}
impl std::error::Error for TestFailure {}
impl fmt::Display for TestFailure {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.reason.is_empty() {
write!(f, "test failed")
} else {
write!(f, "{}", self.reason)
}
}
}
#[derive(Debug, Clone)]
pub struct TelemetryError {
pub reason: String,
}
impl std::error::Error for TelemetryError {}
impl fmt::Display for TelemetryError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.reason.is_empty() {
write!(f, "telemetry failed")
} else {
write!(f, "{}", self.reason)
}
}
}
#[derive(Debug, Clone)]
pub struct GenericError {
pub reason: String,
}
impl std::error::Error for GenericError {}
impl fmt::Display for GenericError {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.reason.is_empty() {
write!(f, "generic error")
} else {
write!(f, "{}", self.reason)
}
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test.rs | src/test.rs | pub mod definition;
pub mod file;
pub mod http;
pub mod template;
pub mod validation;
pub mod variable;
use crate::test::file::BodyOrSchema;
use self::file::{generate_value_from_schema, UnvalidatedRequest, UnvalidatedResponse};
use crate::test::{
definition::RequestBody,
file::{
DatumSchema, FloatSpecification, IntegerSpecification, NameSpecification,
UnvalidatedVariable, ValueOrDatumOrFile,
},
};
use file::{
DateSpecification, DateTimeSpecification, EmailSpecification, SequenceSpecification,
StringSpecification, UnvalidatedDatumSchemaVariable,
};
use log::{debug, error, trace};
use regex::Regex;
use serde::{Deserialize, Serialize, Serializer};
use std::{
collections::{HashMap, HashSet},
fmt::{
Debug, {self},
},
hash::{Hash, Hasher},
path::Path,
};
use ulid::Ulid;
#[derive(Deserialize, PartialEq)]
pub struct SecretValue(String);
impl SecretValue {
const REDACTED_VALUE: &'static str = "******";
fn redacted_value(&self) -> String {
let len = self.0.len();
match len {
0..=20 => Self::REDACTED_VALUE.to_string(),
_ => format!(
"{}{}{}",
&self.0[0..4],
Self::REDACTED_VALUE,
&self.0[len - 4..len]
),
}
}
pub fn new(v: &str) -> Self {
Self(v.to_string())
}
fn redact(&self, s: &str) -> String {
s.replace(&self.0, &self.redacted_value())
}
}
impl Serialize for SecretValue {
fn serialize<S>(&self, serializer: S) -> Result<S::Ok, S::Error>
where
S: Serializer,
{
serializer.serialize_str(&self.redacted_value())
}
}
impl fmt::Display for SecretValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", &self.redacted_value())
}
}
impl fmt::Debug for SecretValue {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", &self.redacted_value())
}
}
impl Hash for SecretValue {
fn hash<H: Hasher>(&self, state: &mut H) {
self.0.hash(state)
}
}
impl Clone for SecretValue {
fn clone(&self) -> Self {
SecretValue(self.0.clone())
}
}
#[derive(Debug, Serialize, Clone, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum ValueOrDatumOrFileOrSecret {
File {
value: String,
},
Secret {
value: SecretValue,
},
Schema {
value: DatumSchema,
},
Value {
value: serde_json::Value,
},
#[serde(rename_all = "camelCase")]
ValueSet {
value_set: serde_json::Value,
},
}
impl TryFrom<UnvalidatedVariable> for ValueOrDatumOrFileOrSecret {
type Error = String;
fn try_from(value: UnvalidatedVariable) -> Result<Self, Self::Error> {
match value {
// \todo : check if file is valid?
// we could, but will we ever store responses to file?
// basically a TOCTOU question
UnvalidatedVariable::File(f) => Ok(ValueOrDatumOrFileOrSecret::File { value: f.file }),
UnvalidatedVariable::Simple(s) => {
Ok(ValueOrDatumOrFileOrSecret::Value { value: s.value })
}
UnvalidatedVariable::Datum(ds) => TryInto::<DatumSchema>::try_into(ds)
.map(|a| ValueOrDatumOrFileOrSecret::Schema { value: a }),
UnvalidatedVariable::ValueSet(vs) => Ok(ValueOrDatumOrFileOrSecret::ValueSet {
value_set: serde_json::Value::Array(vs.value_set),
}),
}
}
}
impl TryFrom<ValueOrDatumOrFile> for ValueOrDatumOrFileOrSecret {
type Error = String;
fn try_from(value: ValueOrDatumOrFile) -> Result<Self, Self::Error> {
trace!("try_from({:?})", value);
match value {
ValueOrDatumOrFile::Value { value } => Ok(ValueOrDatumOrFileOrSecret::Value { value }),
// \todo : check if file is valid?
// we could, but will we ever store responses to file?
// basically a TOCTOU question
ValueOrDatumOrFile::ValueSet { value_set } => {
Ok(ValueOrDatumOrFileOrSecret::ValueSet {
value_set: serde_json::Value::Array(value_set),
})
}
ValueOrDatumOrFile::File { file } => {
Ok(ValueOrDatumOrFileOrSecret::File { value: file })
}
ValueOrDatumOrFile::Schema(schema) => {
match schema {
DatumSchema::Name { specification } => specification
.map(|s| {
NameSpecification::new(s.specification).map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Name {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Email {
specification: None,
},
})),
// \todo: Should recursively validate
DatumSchema::Object { schema } => Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Object { schema },
}),
DatumSchema::List { specification } => specification
.map(|s| {
SequenceSpecification::new(
s.schema,
s.length,
s.min_length,
s.max_length,
)
.map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::List {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::List {
specification: None,
},
})),
DatumSchema::Email { specification } => specification
.map(|s| {
EmailSpecification::new(s.specification).map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Email {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Email {
specification: None,
},
})),
DatumSchema::Boolean { specification } => {
Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Boolean { specification },
})
}
DatumSchema::Float { specification } => specification
.map(|s| {
FloatSpecification::new(s.specification, s.min, s.max).map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Float {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Float {
specification: None,
},
})),
DatumSchema::Integer { specification } => specification
.map(|s| {
IntegerSpecification::new(s.specification, s.min, s.max).map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Integer {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Integer {
specification: None,
},
})),
DatumSchema::String { specification } => specification
.map(|s| {
StringSpecification::new(
s.specification,
s.length,
s.min_length,
s.max_length,
s.pattern,
)
.map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::String {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::String {
specification: None,
},
})),
DatumSchema::Date { specification } => specification
.map(|ds| {
DateSpecification::new(
ds.specification,
ds.min,
ds.max,
ds.format,
ds.modifier,
)
.map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Date {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Date {
specification: None,
},
})),
DatumSchema::DateTime { specification } => specification
.map(|ds| {
DateTimeSpecification::new(
ds.specification,
ds.min,
ds.max,
ds.format,
ds.modifier,
)
.map(|s| {
ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::DateTime {
specification: Some(s),
},
}
})
})
.unwrap_or(Ok(ValueOrDatumOrFileOrSecret::Schema {
value: DatumSchema::Integer {
specification: None,
},
})),
}
}
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, Hash, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct File {
#[serde(skip_serializing_if = "Option::is_none")]
pub name: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub id: Option<String>,
#[serde(rename = "platformId")]
#[serde(skip_serializing_if = "Option::is_none")]
pub platform_id: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub description: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub disabled: Option<bool>,
#[serde(skip_serializing_if = "Option::is_none")]
pub project: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub env: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub tags: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub requires: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub iterate: Option<u32>,
#[serde(skip_serializing_if = "Option::is_none")]
pub setup: Option<file::UnvalidatedRequestResponse>,
#[serde(skip_serializing_if = "Option::is_none")]
pub request: Option<file::UnvalidatedRequest>,
#[serde(skip_serializing_if = "Option::is_none")]
pub compare: Option<file::UnvalidatedCompareRequest>,
#[serde(skip_serializing_if = "Option::is_none")]
pub response: Option<file::UnvalidatedResponse>,
#[serde(skip_serializing_if = "Option::is_none")]
pub stages: Option<Vec<file::UnvalidatedStage>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub cleanup: Option<file::UnvalidatedCleanup>,
#[serde(skip_serializing_if = "Option::is_none")]
pub variables: Option<Vec<file::UnvalidatedVariable>>,
#[serde(skip_serializing, skip_deserializing)]
pub filename: String,
}
impl Default for File {
fn default() -> Self {
Self {
filename: "".to_string(),
name: Some("".to_string()),
id: None,
platform_id: Some(Ulid::new().to_string()),
project: None,
env: None,
tags: None,
requires: None,
iterate: None,
setup: None,
request: Some(UnvalidatedRequest::default()),
compare: None,
response: Some(UnvalidatedResponse::default()),
stages: None,
cleanup: None,
variables: None,
disabled: None,
description: None,
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
#[serde(rename_all = "camelCase")]
pub struct Variable {
pub name: String,
#[serde(flatten)]
pub value: ValueOrDatumOrFileOrSecret,
#[serde(skip_serializing)]
pub source_path: String,
}
impl Variable {
pub fn new(
variable: file::UnvalidatedVariable,
source_path: &str,
) -> Result<Variable, validation::Error> {
let name = match &variable {
UnvalidatedVariable::File(f) => Some(f.name.clone()),
UnvalidatedVariable::Simple(s) => Some(s.name.clone()),
UnvalidatedVariable::Datum(d) => match d {
UnvalidatedDatumSchemaVariable::Boolean(b) => b.name.clone(),
UnvalidatedDatumSchemaVariable::Date(d) => d.name.clone(),
UnvalidatedDatumSchemaVariable::DateTime(d) => d.name.clone(),
UnvalidatedDatumSchemaVariable::Email(e) => e.name.clone(),
UnvalidatedDatumSchemaVariable::Float(f) => f.name.clone(),
UnvalidatedDatumSchemaVariable::Integer(i) => i.name.clone(),
UnvalidatedDatumSchemaVariable::Name(n) => n.name.clone(),
UnvalidatedDatumSchemaVariable::String(s) => s.name.clone(),
UnvalidatedDatumSchemaVariable::List(l) => l.name.clone(),
UnvalidatedDatumSchemaVariable::Object { name, schema: _ } => name.clone(),
},
UnvalidatedVariable::ValueSet(vs) => Some(vs.name.clone()),
};
return match name {
None => Err(validation::Error {
reason: "Name must be provided for variables".to_string(),
}),
Some(n) => {
let regex = Regex::new(r"(?i)^[a-z0-9-_]+$").unwrap();
if !regex.is_match(n.as_str()) {
debug!("variable name '{}' is invalid", n);
return Err(validation::Error{reason: "name is invalid - may only contain alphanumeric characters, hyphens, and underscores".to_string()});
}
TryInto::<ValueOrDatumOrFileOrSecret>::try_into(variable)
.map(|vdfs| Variable {
name: n,
source_path: source_path.to_string(),
value: vdfs,
})
.map_err(|e| validation::Error { reason: e })
}
};
}
pub fn validate_variables_opt(
variables: Option<Vec<file::UnvalidatedVariable>>,
source_path: &str,
) -> Result<Vec<Variable>, validation::Error> {
let mut errors: Vec<String> = vec![];
let ret = variables
.map(|vars| {
vars.into_iter()
.map(|f| Variable::new(f, source_path))
.filter_map(|v| match v {
Ok(x) => Some(x),
Err(e) => {
errors.push(format!("variable error: {}", e));
None
}
})
.collect::<Vec<Variable>>()
})
.unwrap_or_default();
if !errors.is_empty() {
return Err(validation::Error {
reason: errors.join(","),
});
}
Ok(ret)
}
pub fn generate_value(
&self,
definition: &Definition,
iteration: u32,
global_variables: &[Variable],
) -> String {
match &self.value {
ValueOrDatumOrFileOrSecret::File { value: file } => {
let file_path = if Path::new(file).exists() {
file.clone()
} else {
format!(
"{}{}{}",
self.source_path,
std::path::MAIN_SEPARATOR_STR,
file
)
};
match std::fs::read_to_string(&file_path) {
Ok(file_data) => file_data.trim().to_string(),
Err(e) => {
error!("error loading file ({}) content: {}", file_path, e);
"".to_string()
}
}
}
ValueOrDatumOrFileOrSecret::Secret { value: secret } => definition.resolve_variables(
secret.0.as_str(),
&HashMap::new(),
global_variables,
iteration,
),
ValueOrDatumOrFileOrSecret::Value { value: v } => serde_json::to_string(v)
.map(|jv| {
let ret = definition.resolve_variables(
jv.as_str(),
&HashMap::new(),
global_variables,
iteration,
);
ret
})
.unwrap_or_default()
.trim_matches('"')
.to_string(),
ValueOrDatumOrFileOrSecret::ValueSet { value_set: v } => {
let length = v.as_array().unwrap_or(&Vec::new()).len();
if length == 0 {
// divide by zero
return "".to_string();
}
let index = iteration % length as u32;
serde_json::to_string(
v.get(index as usize)
.unwrap_or(&serde_json::Value::from("")),
)
.map(|jv| {
let ret = definition.resolve_variables(
jv.as_str(),
&HashMap::new(),
global_variables,
iteration,
);
ret
})
.unwrap_or_default()
.trim_matches('"')
.to_string()
}
ValueOrDatumOrFileOrSecret::Schema { value: d } => serde_json::to_string(d)
.map(|jv| {
definition.resolve_variables(
jv.as_str(),
&HashMap::new(),
global_variables,
iteration,
)
})
.and_then(|rs| serde_json::from_str::<DatumSchema>(rs.as_str()))
.ok()
.and_then(|ds| generate_value_from_schema(&ds, 10))
.and_then(|v| serde_json::to_string(&v).ok())
.unwrap_or_default()
.trim_matches('"')
.to_string(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct Definition {
pub name: Option<String>,
pub description: Option<String>,
pub id: Option<String>,
pub platform_id: Option<String>,
pub project: Option<String>,
pub environment: Option<String>,
pub requires: Option<String>,
pub tags: Vec<String>,
pub iterate: u32,
pub variables: Vec<Variable>,
pub global_variables: Vec<Variable>,
pub stages: Vec<definition::StageDescriptor>,
pub setup: Option<definition::RequestResponseDescriptor>,
pub cleanup: definition::CleanupDescriptor,
pub disabled: bool,
#[serde(skip_serializing, skip_deserializing)]
pub file_data: File,
#[serde(skip_serializing, skip_deserializing)]
pub index: usize,
}
// TODO: add validation logic to verify the descriptor is valid
// TODO: Validation should be type driven for compile time correctness
impl Definition {
//Instead of iterating could we create a regex from all secret values
//and do it in 1 call. Introduce encapsulation to easily do that in future
pub fn redact_secrets(&self, s: &str) -> String {
self.global_variables
.iter()
.filter_map(|v| match &v.value {
ValueOrDatumOrFileOrSecret::Secret { value: secret } => Some(secret),
_ => None,
})
.fold(s.to_string(), |acc, secret| secret.redact(acc.as_str()))
}
fn update_request_variables(request: &definition::RequestDescriptor, var_pattern: &str) {
for header in request.headers.iter() {
if header.matches_variable.get() {
continue;
}
if header.value.contains(var_pattern) {
header.matches_variable.set(true);
debug!("setting match true: {}", header.header);
}
}
for param in request.params.iter() {
if param.matches_variable.get() {
continue;
}
if param.value.contains(var_pattern) {
param.matches_variable.set(true);
debug!("setting match true: {}", param.param);
}
}
if let Some(body) = request.body.as_ref() {
let body_data = match serde_json::to_string(&body.data) {
Ok(s) => Some(s),
Err(_) => None,
};
if let Some(b) = &body_data {
if b.contains(var_pattern) {
body.matches_variable.set(true);
debug!("request body match true: {}", var_pattern);
}
}
}
}
fn update_compare_variables(compare: &definition::CompareDescriptor, var_pattern: &str) {
for header in compare.headers.iter() {
if header.matches_variable.get() {
continue;
}
if header.value.contains(var_pattern) {
header.matches_variable.set(true);
debug!("setting match true: {}", header.header);
}
}
for param in compare.params.iter() {
if param.matches_variable.get() {
continue;
}
if param.value.contains(var_pattern) {
param.matches_variable.set(true);
debug!("setting match true: {}", param.param);
}
}
if let Some(body) = &compare.body {
let body_data = match serde_json::to_string(&body.data) {
Ok(s) => Some(s),
Err(_) => None,
};
if let Some(b) = &body_data {
if b.contains(var_pattern) {
body.matches_variable.set(true);
debug!("compare body match true: {}", var_pattern);
}
}
}
}
fn update_response_variables(response: &definition::ResponseDescriptor, var_pattern: &str) {
for header in response.headers.iter() {
if header.matches_variable.get() {
continue;
}
if header.value.contains(var_pattern) {
header.matches_variable.set(true);
debug!("setting match true: {}", header.header);
}
}
if let Some(body) = response.body.as_ref() {
let body_data = match serde_json::to_string(&body.data) {
Ok(s) => Some(s),
Err(_) => None,
};
if let Some(b) = &body_data {
if b.contains(var_pattern) {
body.matches_variable.set(true);
debug!("response body match true: {}", var_pattern);
}
}
}
}
fn update_variable_matching(&self) {
trace!("scanning test definition for variable pattern matches");
for variable in self.variables.iter().chain(self.global_variables.iter()) {
let var_pattern = format!("${{{}}}", variable.name.trim());
// debug!("pattern: {}", var_pattern);
if let Some(setup) = self.setup.as_ref() {
Definition::update_request_variables(&setup.request, var_pattern.as_str());
if let Some(response) = &setup.response {
Definition::update_response_variables(response, var_pattern.as_str());
}
}
if let Some(always) = &self.cleanup.always {
Definition::update_request_variables(always, var_pattern.as_str());
}
if let Some(onsuccess) = &self.cleanup.onsuccess {
Definition::update_request_variables(onsuccess, var_pattern.as_str());
}
if let Some(onfailure) = &self.cleanup.onfailure {
Definition::update_request_variables(onfailure, var_pattern.as_str());
}
}
for stage in self.stages.iter() {
for variable in stage
.variables
.iter()
.chain(self.variables.iter().chain(self.global_variables.iter()))
{
let var_pattern = format!("${{{}}}", variable.name.trim());
// debug!("pattern: {}", var_pattern);
Definition::update_request_variables(&stage.request, var_pattern.as_str());
if let Some(compare) = &stage.compare {
Definition::update_compare_variables(compare, var_pattern.as_str());
}
if let Some(response) = &stage.response {
Definition::update_response_variables(response, var_pattern.as_str());
}
}
}
}
pub fn get_url(
&self,
iteration: u32,
url: &str,
params: &[http::Parameter],
state_variables: &HashMap<String, String>,
variables: &[Variable],
) -> String {
let joined: Vec<_> = params
.iter()
.map(|param| {
let p = self.get_processed_param(param, iteration);
format!("{}={}", p.0, p.1)
})
.collect();
let modified_url = if url.contains('$') {
let mut replaced_url = url.to_string();
let state_vars: Vec<(String, &String)> = state_variables
.iter()
.map(|(k, v)| (format!("${{{}}}", k), v))
.collect();
for (var_pattern, value) in &state_vars {
if !replaced_url.contains(var_pattern) {
continue;
}
debug!("state variable match: {}", var_pattern);
replaced_url
.clone_from(&replaced_url.replace(var_pattern.as_str(), value.as_str()));
}
for variable in variables.iter().chain(self.global_variables.iter()) {
let var_pattern = format!("${{{}}}", variable.name);
if !replaced_url.contains(var_pattern.as_str()) {
continue;
}
let replacement = variable.generate_value(self, iteration, &self.global_variables);
replaced_url
.clone_from(&replaced_url.replace(var_pattern.as_str(), replacement.as_str()))
}
replaced_url
} else {
url.to_string()
};
if !joined.is_empty() {
format!("{}?{}", modified_url, joined.join("&"))
} else {
modified_url
}
}
fn get_processed_param(&self, parameter: &http::Parameter, iteration: u32) -> (String, String) {
if parameter.matches_variable.get() {
for variable in self.variables.iter().chain(self.global_variables.iter()) {
let var_pattern = format!("${{{}}}", variable.name);
if !parameter.value.contains(var_pattern.as_str()) {
continue;
}
let replacement = variable.generate_value(self, iteration, &self.global_variables);
return (
parameter.param.clone(),
parameter
.value
.replace(var_pattern.as_str(), replacement.as_str()),
);
}
}
(parameter.param.clone(), parameter.value.clone())
}
fn get_processed_header(&self, header: &http::Header, iteration: u32) -> (String, String) {
for variable in self.variables.iter().chain(self.global_variables.iter()) {
let var_pattern = format!("${{{}}}", variable.name);
if !header.value.contains(var_pattern.as_str()) {
continue;
}
let replacement = variable.generate_value(self, iteration, &self.global_variables);
return (
header.header.clone(),
header
.value
.replace(var_pattern.as_str(), replacement.as_str()),
);
}
(String::from(""), String::from(""))
}
pub fn get_setup_request_headers(&self, iteration: u32) -> Vec<(String, String)> {
match self.setup.as_ref() {
Some(setup) => setup
.request
.headers
.iter()
.map(|h| {
if h.matches_variable.get() {
let header = self.get_processed_header(h, iteration);
(header.0, header.1)
} else {
(h.header.clone(), h.value.clone())
}
})
.collect(),
None => Vec::new(),
}
}
pub fn get_headers(&self, headers: &[http::Header], iteration: u32) -> Vec<(String, String)> {
headers
.iter()
.map(|h| {
if h.matches_variable.get() {
let header = self.get_processed_header(h, iteration);
(header.0, header.1)
} else {
(h.header.clone(), h.value.clone())
}
})
.collect()
}
pub fn get_cleanup_request_headers(&self, iteration: u32) -> Vec<(String, String)> {
match &self.cleanup.always {
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | true |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/logger.rs | src/logger.rs | use log::{Level, Log, Metadata, Record};
use std::{
io::{self, Write},
sync::Mutex,
};
pub struct SimpleLogger {
pub level: Level,
pub disabled: bool,
pub buffered: bool,
queue: Mutex<Vec<(Level, String)>>,
}
impl SimpleLogger {
pub fn new(level: Level, disabled: bool, buffered: bool) -> SimpleLogger {
SimpleLogger {
level,
disabled,
buffered,
queue: Mutex::new(vec![]),
}
}
fn print(&self, level: &Level, message: &String) {
match level {
Level::Info => {
print!("{}", message);
_ = io::stdout().flush();
}
Level::Warn => {
println!("\x1b[33m{}\x1b[0m", message);
}
Level::Error => {
println!("\x1b[31m{}\x1b[0m", message);
}
_ => println!("{}", message),
}
}
}
impl Log for SimpleLogger {
fn enabled(&self, metadata: &Metadata) -> bool {
metadata.level() <= self.level
}
fn log(&self, record: &Record) {
if self.disabled {
return;
}
if !self.enabled(record.metadata()) {
return;
}
if !self.buffered {
self.print(&record.level(), &format!("{}", record.args()));
return;
}
match record.level() {
Level::Info => {
self.print(&Level::Info, &format!("{}", record.args()));
}
_ => {
let mut q = self.queue.lock().unwrap();
q.push((record.level(), format!("{}", record.args())));
}
}
}
fn flush(&self) {
let mut queue = self.queue.lock().unwrap();
queue.iter().for_each(|(level, message)| {
self.print(level, message);
});
queue.clear();
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/telemetry.rs | src/telemetry.rs | use crate::{
config,
errors::TelemetryError,
executor,
executor::ResultDetails,
machine, test,
test::{definition::RequestDescriptor, http::Header, Definition},
};
use bytes::{Bytes, BytesMut};
use http_body_util::{BodyExt, Full};
use hyper::{header::HeaderValue, Request};
use hyper_rustls::{HttpsConnector, HttpsConnectorBuilder};
use hyper_util::{
client::legacy::{connect::HttpConnector, Client},
rt::TokioExecutor,
};
use log::{debug, trace};
use serde::{Deserialize, Serialize};
use std::{collections::HashSet, env, error::Error};
use ulid::Ulid;
use url::Url;
use uuid::Uuid;
const TELEMETRY_BASE_URL: &str = "https://ingestion.jikken.io/v1";
const TELEMETRY_DEV_BASE_URL: &str = "https://dev-ingestion.jikken.io/v1";
#[derive(Clone)]
pub struct Session {
pub token: Uuid,
pub session_id: Uuid,
// pub start_time: chrono::DateTime<chrono::Utc>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct SessionPost<'a> {
pub version: String,
pub os: String,
pub machine_id: String,
pub tests: u32,
pub args: serde_json::Value,
pub validation: serde_json::Value,
pub config: serde_json::Value,
pub environments: Vec<&'a str>,
pub projects: Vec<&'a str>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct SessionCompletedPost {
pub runtime: u32,
pub status: u32,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct SessionResponse {
pub session_id: String,
#[serde(rename = "identifier")]
pub _identifier: Option<ulid::Ulid>,
}
#[derive(Clone)]
pub struct Test {
pub test_id: Uuid,
pub session: Session,
// pub start_time: chrono::DateTime<chrono::Utc>,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct TestPost {
pub session_id: String,
pub identifier: Option<String>,
pub platform_id: ulid::Ulid,
pub definition: serde_json::Value,
}
#[derive(Serialize)]
#[serde(rename_all = "camelCase")]
struct TestCompletedPost {
pub session_id: String,
pub iteration: u32,
pub stage: u32,
pub stage_type: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub stage_name: Option<String>,
pub status: u32,
pub runtime: u32,
#[serde(skip_serializing_if = "Option::is_none")]
pub details: Option<serde_json::Value>,
#[serde(skip_serializing_if = "Option::is_none")]
pub project: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub environment: Option<String>,
}
#[derive(Deserialize)]
#[serde(rename_all = "camelCase")]
struct TestResponse {
pub test_id: String,
}
fn redact_string(_val: &str) -> String {
static REDACTED_VALUE: &str = "*********";
REDACTED_VALUE.to_string()
}
fn redact_headers(headers: &mut [Header]) {
let should_redact = |s: &str| s.to_lowercase() == "authorization";
headers
.iter_mut()
.filter(|h| should_redact(h.header.as_str()))
.for_each(|h| {
h.value = redact_string(&h.value);
});
}
fn redact_request(req: &mut RequestDescriptor) {
redact_headers(req.headers.as_mut())
}
//temporal values will result in a new hash for test
//definition which means a new test
fn remove_temporal_values(td: test::Definition) -> test::Definition {
test::Definition {
global_variables: vec![],
..td
}
}
fn prune_definition(td: test::Definition) -> test::Definition {
remove_temporal_values(td)
}
fn redact_definition(mut td: test::Definition) -> test::Definition {
_ = td.setup.as_mut().map(|s| redact_request(&mut s.request));
_ = td.cleanup.always.as_mut().map(redact_request);
_ = td.cleanup.onfailure.as_mut().map(redact_request);
_ = td.cleanup.onsuccess.as_mut().map(redact_request);
td.stages.iter_mut().for_each(|s| {
redact_request(&mut s.request);
if let Some(c) = s.compare.as_mut() {
redact_headers(c.headers.as_mut())
};
});
td
}
fn redact_result_details(mut rd: ResultDetails) -> ResultDetails {
redact_headers(rd.request.headers.as_mut());
if let Some(c) = rd.compare_request.as_mut() {
redact_headers(c.headers.as_mut())
}
rd
}
fn get_url(url: &str, config: &config::Config) -> String {
format!(
"{}{}",
if config.settings.dev_mode.unwrap_or(false) {
TELEMETRY_DEV_BASE_URL
} else {
TELEMETRY_BASE_URL
},
url
)
}
fn get_config(config: &config::Config) -> serde_json::Value {
let global_variables = config.generate_global_variables();
let mut config_json = serde_json::to_value(config).unwrap();
config_json.as_object_mut().unwrap().insert(
"globals".to_string(),
serde_json::to_value(&global_variables).unwrap(),
);
config_json
}
pub fn get_connector() -> HttpsConnector<HttpConnector> {
HttpsConnectorBuilder::new()
.with_platform_verifier()
.https_or_http()
.enable_all_versions()
.build()
}
pub async fn create_session(
token: Uuid,
tests: Vec<&Definition>,
args_json: Box<serde_json::Value>,
config: &config::Config,
) -> Result<Session, Box<dyn Error + Send + Sync>> {
let client: Client<_, Full<Bytes>> =
Client::builder(TokioExecutor::new()).build(get_connector());
let uri = get_url("/sessions", config);
trace!("telemetry session url({})", uri);
match Url::parse(&uri) {
Ok(_) => {}
Err(error) => {
return Err(Box::from(format!("invalid telemetry url: {}", error)));
}
}
let validation_json = serde_json::json!({}); // todo: add validation report once validation is implemented
let config_json = get_config(config);
let m = machine::new();
let machine_id = m.generate_machine_id();
let mut test_count: u32 = 0;
let mut environments: HashSet<&str> = HashSet::new();
let mut projects: HashSet<&str> = HashSet::new();
tests.iter().for_each(|t| {
test_count += t.iterate;
if let Some(env) = &t.environment {
environments.insert(env);
}
if let Some(project) = &t.project {
projects.insert(project);
}
});
let post_body = SessionPost {
version: crate::VERSION.to_string(),
os: env::consts::OS.to_string(),
machine_id,
tests: test_count,
args: *args_json,
validation: validation_json,
config: config_json,
environments: Vec::from_iter(environments),
projects: Vec::from_iter(projects),
};
let post_string = serde_json::to_string(&post_body)?;
trace!("telemetry_body: {}", post_string);
let request = Request::builder()
.uri(uri)
.method("POST")
.header("Authorization", token.to_string())
.header("Content-Type", HeaderValue::from_static("application/json"))
.body(Full::from(post_string));
if let Ok(req) = request {
let response = client.request(req).await?;
let status = response.status();
if status.as_u16() != 201 {
// session creation failed
debug!("session creation failed: status({})", status);
return Err(Box::from(TelemetryError {
reason: "session creation failed".to_string(),
}));
}
let (_, mut body) = response.into_parts();
let mut body_bytes = BytesMut::new();
while let Some(next) = body.frame().await {
let frame = next.unwrap();
if let Some(chunk) = frame.data_ref() {
body_bytes.extend(chunk);
}
}
let response: SessionResponse = serde_json::from_slice(body_bytes.as_ref())?;
let session_id = uuid::Uuid::parse_str(&response.session_id)?;
return Ok(Session {
token,
session_id,
// start_time: chrono::Utc::now(),
});
}
Err(Box::from(TelemetryError {
reason: "invalid session request".to_string(),
}))
}
pub async fn create_test(
session: &Session,
definition: test::Definition,
config: &config::Config,
) -> Result<Test, Box<dyn Error + Send + Sync>> {
let client: Client<_, Full<Bytes>> =
Client::builder(TokioExecutor::new()).build(get_connector());
let uri = get_url("/tests", config);
trace!("telemetry test url({})", uri);
match Url::parse(&uri) {
Ok(_) => {}
Err(error) => {
return Err(Box::from(format!("invalid telemetry url: {}", error)));
}
}
let redacted_definition = redact_definition(definition.clone());
let pruned_definition = prune_definition(redacted_definition);
let definition_json = serde_json::to_value(&pruned_definition)?;
let ulid = Ulid::from_string(&definition.platform_id.expect("Platform ID is required"))
.expect("Platform ID must be valid");
let post_body = TestPost {
session_id: session.session_id.to_string(),
identifier: pruned_definition.id,
platform_id: ulid,
definition: definition_json,
};
let post_string = serde_json::to_string(&post_body)?;
trace!("telemetry_body: {}", post_string);
let request = Request::builder()
.uri(uri)
.method("POST")
.header("Authorization", session.token.to_string())
.header("Content-Type", HeaderValue::from_static("application/json"))
.body(Full::from(post_string));
if let Ok(req) = request {
let response = client.request(req).await?;
let status = response.status();
if status.as_u16() != 201 {
// session creation failed
debug!("test creation failed: status({})", status);
return Err(Box::from(TelemetryError {
reason: "test creation failed".to_string(),
}));
}
let (_, mut body) = response.into_parts();
let mut body_bytes = BytesMut::new();
while let Some(next) = body.frame().await {
let frame = next.unwrap();
if let Some(chunk) = frame.data_ref() {
body_bytes.extend(chunk);
}
}
let response: TestResponse = serde_json::from_slice(body_bytes.as_ref())?;
let test_id = uuid::Uuid::parse_str(&response.test_id)?;
return Ok(Test {
test_id,
session: session.clone(),
// start_time: chrono::Utc::now(),
});
}
Err(Box::from(TelemetryError {
reason: "invalid test request".to_string(),
}))
}
pub async fn complete_stage(
td: &test::Definition,
test: &Test,
iteration: u32,
stage: &executor::StageResult,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let client: Client<_, Full<Bytes>> =
Client::builder(TokioExecutor::new()).build(get_connector());
let uri = get_url(&format!("/tests/{}/completed", test.test_id), config);
trace!("telemetry test url({})", uri);
match Url::parse(&uri) {
Ok(_) => {}
Err(error) => {
return Err(Box::from(format!("invalid telemetry url: {}", error)));
}
}
let details_json = serde_json::to_value(redact_result_details(stage.details.clone()))?;
let post_data = TestCompletedPost {
session_id: test.session.session_id.to_string(),
iteration,
runtime: stage.total_runtime,
stage: stage.stage,
stage_type: stage.stage_type.clone() as u32,
stage_name: stage.stage_name.clone(),
status: stage.status.clone() as u32,
details: Some(details_json),
project: stage.project.clone(),
environment: stage.environment.clone(),
};
let post_body = serde_json::to_value(post_data)?;
let mut post_string = serde_json::to_string(&post_body)?;
post_string = td.redact_secrets(&post_string);
trace!("telemetry_body: {}", post_string);
let request = Request::builder()
.uri(&uri)
.method("POST")
.header("Authorization", test.session.token.to_string())
.header("Content-Type", HeaderValue::from_static("application/json"))
.body(Full::from(post_string));
if let Ok(req) = request {
let response = client.request(req).await?;
let status = response.status();
if status.as_u16() != 201 {
// session creation failed
debug!("test stage completion failed: status({})", status);
return Err(Box::from(TelemetryError {
reason: "test stage completion failed".to_string(),
}));
}
} else {
return Err(Box::from(TelemetryError {
reason: "invalid test request".to_string(),
}));
}
Ok(())
}
pub async fn complete_stage_skipped(
test: &Test,
test_definition: &test::Definition,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let client: Client<_, Full<Bytes>> =
Client::builder(TokioExecutor::new()).build(get_connector());
let uri = get_url(&format!("/tests/{}/completed", test.test_id), config);
trace!("telemetry complete stage url: {}", uri);
if let Err(error) = Url::parse(&uri) {
return Err(Box::from(format!("invalid telemetry url: {}", error)));
}
let post_data = TestCompletedPost {
session_id: test.session.session_id.to_string(),
iteration: 0,
stage: 0,
stage_type: 2, // Normal
stage_name: None,
status: 5, // Skipped
runtime: 0,
details: None,
project: test_definition.project.clone(),
environment: test_definition.environment.clone(),
};
let post_body = serde_json::to_value(post_data)?;
let post_string = serde_json::to_string(&post_body)?;
trace!("telemetry body: {}", post_string);
let request = Request::builder()
.uri(&uri)
.method("POST")
.header("Authorization", test.session.token.to_string())
.header("Content-Type", HeaderValue::from_static("application/json"))
.body(Full::from(post_string));
if let Ok(req) = request {
let response = client.request(req).await?;
let status = response.status();
if status.as_u16() != 201 {
debug!("test stage completion failed: status({})", status);
return Err(Box::from(TelemetryError {
reason: "test stage completion failed".to_string(),
}));
}
} else {
return Err(Box::from(TelemetryError {
reason: "invalid test request".to_string(),
}));
}
Ok(())
}
pub async fn complete_session(
session: &Session,
runtime: u32,
status: u32,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let client: Client<_, Full<Bytes>> =
Client::builder(TokioExecutor::new()).build(get_connector());
let uri = get_url(
&format!("/sessions/{}/completed", session.session_id),
config,
);
trace!("telemetry session url({})", uri);
match Url::parse(&uri) {
Ok(_) => {}
Err(error) => {
return Err(Box::from(format!("invalid telemetry url: {}", error)));
}
}
let post_body = SessionCompletedPost { runtime, status };
let post_string = serde_json::to_string(&post_body)?;
trace!("telemetry_body: {}", post_string);
let request = Request::builder()
.uri(uri)
.method("POST")
.header("Authorization", session.token.to_string())
.header("Content-Type", HeaderValue::from_static("application/json"))
.body(Full::from(post_string));
if let Ok(req) = request {
let response = client.request(req).await?;
let status = response.status();
if status.as_u16() != 200 {
// session creation failed
debug!("session completion failed: status({})", status);
return Err(Box::from(TelemetryError {
reason: "session completion failed".to_string(),
}));
}
}
Ok(())
}
#[derive(PartialEq, Eq)]
pub enum PlatformIdFailure {
Missing,
Invalid,
Duplicate,
}
pub fn validate_platform_ids(
definitions: Vec<&Definition>,
) -> Result<(), Vec<(&Definition, PlatformIdFailure)>> {
let mut failures: Vec<(&Definition, PlatformIdFailure)> = Vec::new();
let mut duplicate_check: HashSet<Ulid> = HashSet::new();
for definition in definitions {
let Some(id_raw) = definition.platform_id.as_ref() else {
failures.push((definition, PlatformIdFailure::Missing));
continue;
};
let Ok(id) = Ulid::from_string(id_raw) else {
failures.push((definition, PlatformIdFailure::Invalid));
continue;
};
if duplicate_check.contains(&id) {
failures.push((definition, PlatformIdFailure::Duplicate));
}
duplicate_check.insert(id);
}
if !failures.is_empty() {
return Err(failures);
}
Ok(())
}
#[cfg(test)]
mod tests {
use test::{File, Variable};
use crate::{
executor::ExpectedResultData,
test::definition::{CompareDescriptor, RequestResponseDescriptor, StageDescriptor},
};
use self::executor::RequestDetails;
use super::*;
fn headers_factory() -> Vec<Header> {
vec![
Header {
header: "AuthorIzation".to_string(), //dumb casing on purpose :)
value: "super_secret_key".to_string(),
matches_variable: false.into(),
},
Header {
header: "Foo".to_string(),
value: "Bar".to_string(),
matches_variable: false.into(),
},
]
}
#[test]
fn redact_string_redacts() {
assert_eq!("*********", redact_string("foo"));
}
#[test]
fn redact_headers_redacts() {
let mut headers = headers_factory();
redact_headers(&mut headers);
assert!(!headers.iter().any(|h| h.value == "super_secret_key"));
}
#[test]
fn redact_definition_redacts() {
let request = RequestDescriptor {
method: test::http::Verb::Get,
body: None,
headers: headers_factory(),
params: vec![],
url: "foo".to_string(),
};
let td = test::Definition {
name: None,
description: None,
id: None,
platform_id: None,
project: None,
environment: None,
requires: None,
tags: Vec::new(),
iterate: 0,
variables: Vec::new(),
global_variables: Vec::new(),
stages: vec![StageDescriptor {
name: None,
response: None,
// source_path: "".to_string(),
variables: vec![],
request: request.clone(),
compare: Some(CompareDescriptor {
add_headers: vec![],
method: test::http::Verb::Get,
add_params: vec![],
body: None,
headers: headers_factory(),
url: "foo2".to_string(),
ignore_headers: vec![],
ignore_params: vec![],
params: vec![],
strict: true,
}),
delay: None,
}],
setup: Some(RequestResponseDescriptor {
response: None,
request: request.clone(),
}),
cleanup: test::definition::CleanupDescriptor {
onsuccess: Some(request.clone()),
onfailure: Some(request.clone()),
always: Some(request.clone()),
},
disabled: false,
file_data: File::default(),
index: 0,
};
let get_all_headers = |td: test::Definition| -> Vec<Header> {
td.cleanup
.always
.unwrap()
.headers
.into_iter()
.chain(td.cleanup.onfailure.unwrap().headers.into_iter())
.chain(td.cleanup.onsuccess.unwrap().headers.into_iter())
.chain(
td.stages
.into_iter()
.map(|s| {
s.request
.headers
.into_iter()
.chain(s.compare.unwrap().headers.into_iter())
})
.flatten(),
)
.chain(td.setup.unwrap().request.headers.into_iter())
.collect()
};
let redacted_td = redact_definition(td.clone());
let redacted_headers = get_all_headers(redacted_td);
let nonredacted_headers = get_all_headers(td);
assert!(nonredacted_headers
.iter()
.any(|h| h.value == "super_secret_key"));
assert!(!redacted_headers
.iter()
.any(|h| h.value == "super_secret_key"));
}
#[test]
fn redact_resultdetails_redacts() {
let rd = RequestDetails {
body: serde_json::Value::Null,
headers: headers_factory(),
url: "".to_string(),
method: test::http::Verb::Get.as_method(),
};
let redacted = redact_result_details(ResultDetails {
actual: None,
request: rd.clone(),
compare_actual: None,
compare_request_runtime: None,
request_runtime: 100,
expected: ExpectedResultData {
body: None,
headers: vec![],
status: None,
..ExpectedResultData::new()
},
compare_request: Some(rd.clone()),
});
assert!(!redacted
.request
.headers
.iter()
.chain(redacted.compare_request.unwrap().headers.iter())
.any(|h| h.value == "super_secret_key"));
}
#[test]
fn remove_temporal_values_has_globals() {
let request = RequestDescriptor {
method: test::http::Verb::Get,
body: None,
headers: headers_factory(),
params: vec![],
url: "foo".to_string(),
};
let td = test::Definition {
name: None,
description: None,
id: None,
platform_id: None,
project: None,
environment: None,
requires: None,
tags: Vec::new(),
iterate: 0,
variables: Vec::new(),
global_variables: vec![Variable {
name: "my_global".to_string(),
source_path: "".to_string(),
value: test::ValueOrDatumOrFileOrSecret::Value {
value: serde_json::Value::from("hello"),
},
}],
stages: vec![StageDescriptor {
name: None,
response: None,
// source_path: "".to_string(),
variables: vec![],
request: request.clone(),
compare: Some(CompareDescriptor {
add_headers: vec![],
method: test::http::Verb::Get,
add_params: vec![],
body: None,
headers: headers_factory(),
url: "foo2".to_string(),
ignore_headers: vec![],
ignore_params: vec![],
params: vec![],
strict: true,
}),
delay: None,
}],
setup: Some(RequestResponseDescriptor {
response: None,
request: request.clone(),
}),
cleanup: test::definition::CleanupDescriptor {
onsuccess: Some(request.clone()),
onfailure: Some(request.clone()),
always: Some(request.clone()),
},
disabled: false,
file_data: File::default(),
index: 0,
};
let before = td.clone();
let pruned = remove_temporal_values(td);
assert_eq!(
Definition {
global_variables: vec![],
..before
},
pruned
)
}
#[test]
fn remove_temporal_values_has_no_globals() {
let request = RequestDescriptor {
method: test::http::Verb::Get,
body: None,
headers: headers_factory(),
params: vec![],
url: "foo".to_string(),
};
let td = test::Definition {
name: None,
description: None,
id: None,
platform_id: None,
project: None,
environment: None,
requires: None,
tags: Vec::new(),
iterate: 0,
variables: Vec::new(),
global_variables: Vec::new(),
stages: vec![StageDescriptor {
name: None,
response: None,
// source_path: "".to_string(),
variables: vec![],
request: request.clone(),
compare: Some(CompareDescriptor {
add_headers: vec![],
method: test::http::Verb::Get,
add_params: vec![],
body: None,
headers: headers_factory(),
url: "foo2".to_string(),
ignore_headers: vec![],
ignore_params: vec![],
params: vec![],
strict: true,
}),
delay: None,
}],
setup: Some(RequestResponseDescriptor {
response: None,
request: request.clone(),
}),
cleanup: test::definition::CleanupDescriptor {
onsuccess: Some(request.clone()),
onfailure: Some(request.clone()),
always: Some(request.clone()),
},
disabled: false,
file_data: File::default(),
index: 0,
};
let before = td.clone();
let pruned = remove_temporal_values(td);
assert_eq!(
Definition {
global_variables: vec![],
..before
},
pruned
)
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/validated.rs | src/validated.rs | use nonempty_collections::NEVec;
use validated::{
Validated,
Validated::{Fail, Good},
};
pub trait ValidatedExt<T, E> {
fn map5<U, V, W, X, Z, F>(
self,
vu: Validated<U, E>,
vv: Validated<V, E>,
vw: Validated<W, E>,
vx: Validated<X, E>,
f: F,
) -> Validated<Z, E>
where
F: FnOnce(T, U, V, W, X) -> Z;
}
impl<T, E> ValidatedExt<T, E> for Validated<T, E> {
/// Maps a function over five `Validated`, but only if all five are of the
/// `Good` variant. If any failed, then their errors are concatenated.
fn map5<U, V, W, X, Z, F>(
self,
vu: Validated<U, E>,
vv: Validated<V, E>,
vw: Validated<W, E>,
vx: Validated<X, E>,
f: F,
) -> Validated<Z, E>
where
F: FnOnce(T, U, V, W, X) -> Z,
{
match (self, vu, vv, vw, vx) {
(Good(t), Good(u), Good(v), Good(w), Good(x)) => Good(f(t, u, v, w, x)),
(Good(_), Good(_), Good(_), Good(_), Fail(e)) => Fail(e),
(Good(_), Good(_), Good(_), Fail(e), Good(_)) => Fail(e),
(Good(_), Good(_), Fail(e), Good(_), Good(_)) => Fail(e),
(Good(_), Fail(e), Good(_), Good(_), Good(_)) => Fail(e),
(Fail(e), Good(_), Good(_), Good(_), Good(_)) => Fail(e),
(Good(_), Good(_), Good(_), Fail(e0), Fail(e1)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Good(_), Fail(e0), Good(_), Fail(e1)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Fail(e0), Good(_), Good(_), Fail(e1)) => Fail(nons(e0, Some(e1).into_iter())),
(Fail(e0), Good(_), Good(_), Good(_), Fail(e1)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Good(_), Fail(e0), Fail(e1), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Fail(e0), Good(_), Fail(e1), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Fail(e0), Good(_), Good(_), Fail(e1), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Fail(e0), Fail(e1), Good(_), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Fail(e0), Good(_), Fail(e1), Good(_), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Fail(e0), Fail(e1), Good(_), Good(_), Good(_)) => Fail(nons(e0, Some(e1).into_iter())),
(Good(_), Good(_), Fail(e0), Fail(e1), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Good(_), Fail(e0), Good(_), Fail(e1), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Good(_), Good(_), Fail(e1), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Good(_), Fail(e0), Fail(e1), Good(_), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Good(_), Fail(e1), Good(_), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Fail(e1), Good(_), Good(_), Fail(e2)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Good(_), Fail(e0), Fail(e1), Fail(e2), Good(_)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Good(_), Fail(e1), Fail(e2), Good(_)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Fail(e1), Good(_), Fail(e2), Good(_)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Fail(e0), Fail(e1), Fail(e2), Good(_), Good(_)) => {
Fail(nons(e0, vec![e1, e2].into_iter()))
}
(Good(_), Fail(e0), Fail(e1), Fail(e2), Fail(e3)) => {
Fail(nons(e0, vec![e1, e2, e3].into_iter()))
}
(Fail(e0), Good(_), Fail(e1), Fail(e2), Fail(e3)) => {
Fail(nons(e0, vec![e1, e2, e3].into_iter()))
}
(Fail(e0), Fail(e1), Good(_), Fail(e2), Fail(e3)) => {
Fail(nons(e0, vec![e1, e2, e3].into_iter()))
}
(Fail(e0), Fail(e1), Fail(e2), Good(_), Fail(e3)) => {
Fail(nons(e0, vec![e1, e2, e3].into_iter()))
}
(Fail(e0), Fail(e1), Fail(e2), Fail(e3), Good(_)) => {
Fail(nons(e0, vec![e1, e2, e3].into_iter()))
}
(Fail(e0), Fail(e1), Fail(e2), Fail(e3), Fail(e4)) => {
Fail(nons(e0, vec![e1, e2, e3, e4].into_iter()))
}
}
}
}
/// Fuse some `NEVec`s together.
fn nons<E, I>(mut a: NEVec<E>, rest: I) -> NEVec<E>
where
I: Iterator<Item = NEVec<E>>,
{
for mut i in rest {
a.push(i.head);
a.append(&mut i.tail)
}
a
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/json.rs | src/json.rs | pub mod extractor;
pub mod filter;
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/executor.rs | src/executor.rs | use crate::{
config,
json::extractor::extract_json,
telemetry, test,
test::{
definition,
definition::ResponseDescriptor,
file::{
BodyOrSchema, BodyOrSchemaChecker, Checker, NumericSpecification,
ValueOrNumericSpecification,
},
http,
http::Header,
validation, Definition, Variable,
},
TagMode,
};
use bytes::{Bytes, BytesMut};
use http_body_util::{BodyExt, Full};
use hyper::{body::Incoming, header::HeaderValue, Request};
use hyper_rustls::HttpsConnectorBuilder;
use hyper_util::{client::legacy::Client, rt::TokioExecutor};
use log::{debug, error, info, trace, warn};
use rustls::ClientConfig;
use rustls_platform_verifier::Verifier;
use serde::Serialize;
use std::{
collections::{BTreeMap, BTreeSet, HashMap, HashSet},
error::Error,
fmt,
io::Write,
sync::Arc,
time::Instant,
vec,
};
use url::Url;
use validated::Validated::{self, Good};
#[derive(Default)]
pub struct Report {
pub test_files: u16,
pub run: u16,
pub passed: u16,
pub failed: u16,
pub skipped: u16,
pub runtime: u32,
}
impl From<ExecutionResult> for Report {
fn from(execution_result: ExecutionResult) -> Self {
let test_files = execution_result.test_results.len();
let totals = execution_result
.test_results
.into_iter()
.map(|tr| {
if tr.iteration_results.is_empty() {
(0, 0, 1)
} else {
tr.iteration_results.into_iter().fold(
(0, 0, 0),
|(passed, failed, skipped), iteration_result| match iteration_result.status
{
TestStatus::Failed => (passed, failed + 1, skipped),
TestStatus::Passed => (passed + 1, failed, skipped),
TestStatus::Skipped => (passed, failed, skipped + 1),
},
)
}
})
.fold(
(0, 0, 0),
|(total_passed, total_failed, total_skipped), (passed, failed, skipped)| {
(
total_passed + passed,
total_failed + failed,
total_skipped + skipped,
)
},
);
Report {
skipped: totals.2,
failed: totals.1,
passed: totals.0,
test_files: test_files as u16,
run: totals.1 + totals.0,
runtime: execution_result.runtime,
}
}
}
pub struct IterationResult {
pub iteration_number: u32,
pub status: TestStatus,
pub stage_results: Option<Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>>>,
}
impl IterationResult {
pub fn new(
iteration_number: u32,
stage_results: Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>>,
) -> Self {
//Determine test status here and store in the status field
let passed = *stage_results
.as_ref()
.map(|(passed, _)| passed)
.unwrap_or(&false);
Self {
iteration_number,
status: if passed {
TestStatus::Passed
} else {
TestStatus::Failed
},
stage_results: Some(stage_results),
}
}
pub fn new_skipped(iteration_number: u32) -> Self {
Self {
iteration_number,
status: TestStatus::Skipped,
stage_results: None,
}
}
}
pub struct TestResult {
pub test_name: String,
pub iteration_results: Vec<IterationResult>,
}
pub struct ExecutionResult {
pub test_results: Vec<TestResult>,
pub runtime: u32,
}
struct FormattedExecutionResult(String);
impl fmt::Display for FormattedExecutionResult {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
trait ExecutionResultFormatter {
fn format(&self, res: &ExecutionResult) -> FormattedExecutionResult;
}
fn formatted_result_to_file<T: ExecutionResultFormatter>(
formatter: T,
execution_result: &ExecutionResult,
file: &str,
) -> Result<(), std::io::Error> {
std::fs::OpenOptions::new()
.write(true)
.create_new(true)
.open(file)
.and_then(|mut f| f.write(formatter.format(execution_result).0.as_bytes()))
.map(|_| ())
}
struct JunitResultFormatter;
impl ExecutionResultFormatter for JunitResultFormatter {
fn format(&self, res: &ExecutionResult) -> FormattedExecutionResult {
let mut lines: Vec<String> = Vec::new();
lines.push(r#"<?xml version="1.0" encoding="UTF-8"?>"#.to_string());
lines.push(r#"<testsuites>"#.to_string());
for test in res.test_results.iter() {
lines.push(format!(r#"<testsuite name="{}">"#, test.test_name));
for iteration_result in test.iteration_results.iter() {
let test_iteration_name = format!(
"{}.Iterations.{}",
test.test_name.as_str(),
iteration_result.iteration_number + 1
);
lines.push(format!(
r#"<testsuite name="{}">"#,
test_iteration_name.as_str(),
));
for stage_result in iteration_result.stage_results.iter() {
match &stage_result {
Ok((_passed, stage_results)) => {
for (stage_number, stage_result) in stage_results.iter().enumerate() {
if stage_result.status == TestStatus::Passed {
lines.push(format!(
r#"<testcase name="stage_{}" classname="{}"/>"#,
stage_number + 1,
test_iteration_name.as_str()
));
} else {
lines.push(format!(
r#"<testcase name="stage_{}" classname="{}">"#,
stage_number + 1,
test_iteration_name.as_str()
));
if let validated::Validated::Fail(nec) =
&stage_result.validation
{
for i in nec {
lines.push(format!(
r#"<failure message="{}" type="AssertionError"/>"#,
i
));
}
}
lines.push(r#"</testcase>"#.to_string());
}
}
}
Err(_) => {
lines.push(
r#"<testcase name="Initial" classname="Initial" />"#.to_string(),
);
}
}
}
lines.push("</testsuite>".to_string());
}
lines.push("</testsuite>".to_string());
}
lines.push("</testsuites>".to_string());
FormattedExecutionResult(lines.join("\n"))
}
}
trait ExecutionPolicy {
fn name(&self) -> String;
fn new_line(&self) -> bool;
async fn execute(
&mut self,
state: &mut State,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
iteration: u32,
config: &config::Config,
) -> Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>>;
async fn skip(
&mut self,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>>;
}
struct DryRunExecutionPolicy;
impl ExecutionPolicy for DryRunExecutionPolicy {
fn name(&self) -> String {
"Dry Run".to_string()
}
fn new_line(&self) -> bool {
true
}
async fn execute(
&mut self,
state: &mut State,
_telemetry: &Option<telemetry::Session>,
test: &test::Definition,
iteration: u32,
_config: &config::Config,
) -> Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>> {
dry_run(state, test, iteration)
.await
.map(|passed| (passed, vec![] as Vec<StageResult>))
}
async fn skip(
&mut self,
_telemetry: &Option<telemetry::Session>,
_test: &test::Definition,
_config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
Ok(())
}
}
struct ActualRunExecutionPolicy;
impl ExecutionPolicy for ActualRunExecutionPolicy {
fn name(&self) -> String {
"Running".to_string()
}
fn new_line(&self) -> bool {
false
}
async fn execute(
&mut self,
state: &mut State,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
iteration: u32,
config: &config::Config,
) -> Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>> {
let telemetry_test = if let Some(s) = &telemetry {
match telemetry::create_test(s, test.clone(), config).await {
Ok(t) => Some(t),
Err(e) => {
debug!("telemetry failed: {}", e);
None
}
}
} else {
None
};
run(state, test, iteration, telemetry_test, config).await
}
async fn skip(
&mut self,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
let session = match &telemetry {
Some(session) => session,
None => {
debug!("missing telemetry session");
return Err(Box::from("missing telemetry session"));
}
};
match telemetry::create_test(session, test.clone(), config).await {
Ok(telemetry_test) => {
match telemetry::complete_stage_skipped(&telemetry_test, test, config).await {
Ok(_) => Ok(()),
Err(error) => {
debug!("telemetry test completion failed: {}", error);
Err(error)
}
}
}
Err(error) => {
debug!("telemetry test creation failed: {}", error);
Err(error)
}
}
}
}
struct FailurePolicy<T: ExecutionPolicy> {
wrapped_policy: T,
failed: bool,
}
impl<T: ExecutionPolicy> FailurePolicy<T> {
fn new(policy: T) -> FailurePolicy<T> {
FailurePolicy {
wrapped_policy: policy,
failed: false,
}
}
}
impl<T: ExecutionPolicy> ExecutionPolicy for FailurePolicy<T> {
fn name(&self) -> String {
self.wrapped_policy.name()
}
fn new_line(&self) -> bool {
self.wrapped_policy.new_line()
}
async fn execute(
&mut self,
state: &mut State,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
iteration: u32,
config: &config::Config,
) -> Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>> {
let ret = self
.wrapped_policy
.execute(state, telemetry, test, iteration, config)
.await;
let passed = ret.as_ref().map(|(passed, _)| *passed).unwrap_or_default();
self.failed = !passed;
ret
}
async fn skip(
&mut self,
telemetry: &Option<telemetry::Session>,
test: &test::Definition,
config: &config::Config,
) -> Result<(), Box<dyn Error + Send + Sync>> {
self.wrapped_policy.skip(telemetry, test, config).await
}
}
pub fn runtime_formatter(time_ms: u32) -> String {
let mut time_left = time_ms;
let milliseconds = time_left % 1000;
time_left /= 1000;
let seconds = time_left % 60;
time_left /= 60;
let minutes = time_left;
if minutes > 0 {
format!("{}m {}s {}ms", minutes, seconds, milliseconds)
} else if seconds > 0 {
format!("{}s {}ms", seconds, milliseconds)
} else {
format!("{}ms", milliseconds)
}
}
async fn run_tests<T: ExecutionPolicy>(
tests: Vec<Vec<test::Definition>>,
telemetry: Option<telemetry::Session>,
mut exec_policy: T,
config: &config::Config,
) -> ExecutionResult {
let flattened_tests: Vec<test::Definition> = tests.into_iter().flatten().collect();
let total_count = flattened_tests.len();
let mut results: Vec<TestResult> = Vec::new();
let mut state = State {
variables: HashMap::new(),
cookies: HashMap::new(),
bypass_cert_verification: config.settings.bypass_cert_verification,
};
let start_time = Instant::now();
let mut any_failures = false;
let mut message_displayed = false;
for (i, test) in flattened_tests.into_iter().enumerate() {
if any_failures && !config.settings.continue_on_failure && !message_displayed {
warn!("Skipping remaining tests due to continueOnFailure setting.");
log::logger().flush();
message_displayed = true;
}
//let mut test_result: Vec<Result<(bool, Vec<StageResult>), Box<dyn Error + Send + Sync>>> =
// Vec::new();
let mut iteration_results: Vec<IterationResult> = Vec::new();
let test_name = test.name.clone().unwrap_or(format!("Test{}", i + 1));
for iteration in 0..test.iterate {
// TODO: clean this up based on policies
// I don't see a clean way to access it without refactoring
if any_failures && !config.settings.continue_on_failure {
if iteration == 0 {
info!(
"{} Test ({}/{}) `{}` ... \x1b[33mSKIPPED\x1b[0m\n",
exec_policy.name(),
i + 1,
total_count,
&test_name,
);
let _ = exec_policy.skip(&telemetry, &test, config).await;
iteration_results.push(IterationResult::new_skipped(iteration));
}
break;
}
if test.disabled {
info!(
"{} Test ({}/{}) `{}` ... \x1b[33mDISABLED\x1b[0m\n",
exec_policy.name(),
i + 1,
total_count,
&test_name,
);
let _ = exec_policy.skip(&telemetry, &test, config).await;
iteration_results.push(IterationResult::new_skipped(iteration));
break;
}
let new_line = if exec_policy.new_line() { "\n" } else { "" };
info!(
"{} Test ({}/{}) `{}` Iteration({}/{}){}",
exec_policy.name(),
i + 1,
total_count,
&test_name,
iteration + 1,
test.iterate,
new_line,
);
let result = exec_policy
.execute(&mut state, &telemetry, &test, iteration, config)
.await;
match &result {
Ok(p) => {
let total_runtime: u32 = p.1.iter().map(|r| r.total_runtime).sum();
let runtime_label = runtime_formatter(total_runtime);
if p.0 {
info!(" Runtime({}) ... \x1b[32mPASSED\x1b[0m\n", runtime_label);
} else {
any_failures = true;
info!(" Runtime({}) ... \x1b[31mFAILED\x1b[0m\n", runtime_label);
}
}
Err(e) => {
any_failures = true;
info!(" ... \x1b[31mFAILED\x1b[0m\n");
error!("{}", e);
}
}
log::logger().flush();
iteration_results.push(IterationResult::new(iteration, result));
}
results.push(TestResult {
test_name,
iteration_results,
});
}
let runtime = start_time.elapsed().as_millis() as u32;
if let Some(s) = &telemetry {
let status = if any_failures { 2 } else { 1 };
_ = telemetry::complete_session(s, runtime, status, config).await;
}
ExecutionResult {
test_results: results,
runtime,
}
}
struct StateCookie {
domain: String,
path: String,
key: String,
value: String,
secure: bool,
}
impl StateCookie {
pub fn new(data: String) -> Option<StateCookie> {
debug!("cookie new: {}", &data);
let segments: Vec<&str> = data.split(';').collect();
let cookie_value: Vec<&str> = segments
.first()
.expect("cookie should have segments")
.split('=')
.collect();
let key: String = cookie_value.first().unwrap_or(&"").trim().to_string();
let value: String = cookie_value.last().unwrap_or(&"").trim().to_string();
let mut domain: String = "".to_string();
let mut path: String = "/".to_string();
let mut secure: bool = false;
for s in segments {
let key_value: Vec<&str> = s.split('=').collect();
let k = key_value.first().unwrap_or(&"").trim();
let v = key_value.last().unwrap_or(&"").trim().to_string();
match k {
"Domain" => domain = v,
"Path" => path = v,
"Secure" => secure = true,
&_ => {}
}
}
Some(StateCookie {
domain,
path,
key,
value,
secure,
})
}
pub fn update(&mut self, new_cookie: StateCookie) {
self.value = new_cookie.value;
}
}
struct State {
variables: HashMap<String, String>,
cookies: HashMap<String, HashMap<String, StateCookie>>,
bypass_cert_verification: bool,
}
#[derive(PartialEq, Eq, Clone)]
pub enum StageType {
Setup = 1,
Normal = 2,
Cleanup = 3,
}
#[derive(Debug, PartialEq, Eq, Clone)]
pub enum TestStatus {
Passed = 1,
Failed = 2,
Skipped = 5,
}
#[derive(Debug, Clone, Serialize, PartialEq, Eq)]
pub struct ResponseResultData {
pub headers: Vec<http::Header>,
pub status: u16,
pub body: serde_json::Value,
}
impl ResponseResultData {
pub async fn from_response(resp: hyper::Response<Incoming>) -> Option<ResponseResultData> {
debug!("Received response : {resp:?}");
let response_status = resp.status();
// TODO: We'll have to revisit this to support non-ASCII headers
let headers = resp
.headers()
.iter()
.map(|h| http::Header::new(h.0.to_string(), h.1.to_str().unwrap_or("").to_string()))
.collect();
let (_, mut body) = resp.into_parts();
let mut response_bytes = BytesMut::new();
while let Some(next) = body.frame().await {
let frame = next.unwrap();
if let Some(chunk) = frame.data_ref() {
response_bytes.extend(chunk);
}
}
match serde_json::from_slice(&response_bytes) {
Ok(data) => {
debug!("Body is {data}");
Some(ResponseResultData {
headers,
status: response_status.as_u16(),
body: data,
})
}
Err(e) => {
// TODO: add support for non JSON responses
debug!("response is not valid JSON data: {}", e);
debug!("{}", std::str::from_utf8(&response_bytes).unwrap_or(""));
Some(ResponseResultData {
headers,
status: response_status.as_u16(),
body: serde_json::Value::Null,
})
}
}
}
}
#[derive(Debug, Clone, PartialEq, Serialize)]
pub struct ExpectedResultData {
pub headers: Vec<http::Header>,
pub status: Option<ValueOrNumericSpecification<u16>>,
pub response_time: Option<NumericSpecification<u32>>,
pub body: Option<BodyOrSchema>,
pub strict: bool,
}
impl ExpectedResultData {
pub fn new() -> Self {
Self {
headers: Vec::default(),
status: Option::default(),
response_time: Option::default(),
body: Option::default(),
strict: true,
}
}
//Consider making get_body a static method that
//accepts the global vars. Passing the Definition seems wrong
pub fn from_request(
req: Option<ResponseDescriptor>,
td: &test::Definition,
state_variables: &HashMap<String, String>,
variables: &[Variable],
iteration: u32,
) -> ExpectedResultData {
req.map(|r| ExpectedResultData {
headers: r.headers,
status: r.status,
response_time: r.response_time,
body: td.get_expected_request_body(&r.body, state_variables, variables, iteration), //.unwrap_or(serde_json::Value::Null),
strict: r.strict,
})
.unwrap_or(ExpectedResultData::new())
}
}
#[derive(Debug, Clone, Serialize)]
pub struct RequestDetails {
pub headers: Vec<http::Header>,
pub url: String,
pub method: http::Method,
pub body: serde_json::Value,
}
#[derive(Debug, Clone, Serialize)]
pub struct ResultDetails {
pub request: RequestDetails,
pub request_runtime: u32,
pub expected: ExpectedResultData,
pub actual: Option<ResponseResultData>,
pub compare_request: Option<RequestDetails>,
pub compare_request_runtime: Option<u32>,
pub compare_actual: Option<ResponseResultData>,
}
#[derive(Clone)]
pub struct StageResult {
pub stage: u32,
pub stage_type: StageType,
pub stage_name: Option<String>,
pub total_runtime: u32,
pub status: TestStatus,
pub details: ResultDetails,
pub validation: Validated<Vec<()>, String>,
pub project: Option<String>,
pub environment: Option<String>,
}
fn load_test_from_path(filename: &str) -> Option<test::File> {
let load_result = test::file::load(filename);
match load_result {
Ok(file) => Some(file),
Err(e) => {
error!("unable to load test file ({}) data: {}", filename, e);
None
}
}
}
fn validate_test_file(
test_file: test::File,
global_variables: &[test::Variable],
project: Option<String>,
environment: Option<String>,
index: usize,
) -> Option<test::Definition> {
let name = test_file
.name
.clone()
.unwrap_or_else(|| test_file.filename.clone());
let res = validation::validate_file(test_file, global_variables, project, environment, index);
match res {
Ok(file) => Some(file),
Err(e) => {
error!("Test \"{}\" failed validation: {}.", name, e);
None
}
}
}
//consider using a set for tags and leverage set operations
//insted of raw loops
fn ignored_due_to_tag_filter(
test_definition: &test::Definition,
tags: &[String],
tag_mode: &TagMode,
) -> bool {
let test_name = test_definition
.name
.clone()
.unwrap_or("UKNOWN_NAME".to_string());
match tag_mode {
TagMode::OR => {
for t in tags.iter() {
if test_definition.tags.contains(t) {
return false;
}
}
debug!(
"test `{}` doesn't match any tags: {}",
test_name,
tags.join(", ")
);
true
}
TagMode::AND => {
for t in tags.iter() {
if !test_definition.tags.contains(t) {
debug!("test `{}` is missing tag: {}", test_name, t);
return true;
}
}
false
}
}
}
fn schedule_impl(
graph: &BTreeMap<usize, BTreeSet<usize>>,
scheduled_nodes: &BTreeSet<usize>,
) -> BTreeSet<usize> {
let mut ignore: BTreeSet<usize> = BTreeSet::new();
ignore.clone_from(scheduled_nodes);
//Is there a way to do in 1 iteration?
graph
.iter()
.filter(|(node, _)| !scheduled_nodes.contains(*node))
.for_each(|(_, edges)| {
edges.iter().for_each(|e| _ = ignore.insert(*e));
});
return graph
.keys()
.filter(|s| !ignore.contains(*s))
.cloned()
.collect();
}
fn construct_test_execution_graph_v2(
tests_to_run: Vec<test::Definition>,
tests_to_ignore: Vec<test::Definition>,
) -> Vec<Vec<Definition>> {
let tests_by_id: HashMap<String, test::Definition> = tests_to_run
.clone()
.into_iter()
.chain(tests_to_ignore)
.filter(|td| td.id.is_some())
.map(|td| (td.id.clone().unwrap(), td))
.collect();
trace!("determine test execution order based on dependency graph");
//Nodes are IDs ; Directed edges imply ordering; i.e. A -> B; B depends on A
let mut graph: BTreeMap<usize, BTreeSet<usize>> = BTreeMap::new();
tests_to_run
.iter()
.map(|td| (td.id.clone(), td))
.for_each(|(id, definition)| {
if let Some(required_id) = definition.requires.as_ref() {
let Some(required_def) = tests_by_id.get(required_id) else {
return;
};
if required_def.disabled {
warn!(
"Test \"{}\" requires a disabled test: \"{}\"",
definition
.name
.as_deref()
.unwrap_or(&id.clone().unwrap_or(definition.index.to_string())),
required_id
);
//should we do transitive disablement?
}
if let Some(edges) = graph.get_mut(&required_def.index) {
edges.insert(definition.index);
} else {
graph.insert(required_def.index, BTreeSet::from([definition.index]));
}
}
let node_for_index = graph.get(&definition.index);
if node_for_index.is_none() {
graph.insert(definition.index, BTreeSet::new());
}
//intution: if it already has a dependent, its simply a test
//depended on by multiple other tests and not a duplicate ID made in error
else if node_for_index.unwrap().is_empty() {
warn!(
"Skipping test, found duplicate test id: {}",
&id.clone().unwrap()
);
}
});
let mut jobs: Vec<BTreeSet<usize>> = Vec::new();
let mut scheduled_nodes: BTreeSet<usize> = BTreeSet::new();
while graph.len() != scheduled_nodes.len() {
let job = schedule_impl(&graph, &scheduled_nodes);
job.iter().for_each(|n| _ = scheduled_nodes.insert(*n));
jobs.push(job);
}
let job_definitions: Vec<Vec<Definition>> = jobs
.into_iter()
.map(|hs| {
hs.into_iter()
.map(|index| tests_to_run.get(index).unwrap().clone())
.collect::<Vec<Definition>>()
})
.collect();
let flattened_jobs = job_definitions
.iter()
.flatten()
.collect::<Vec<&Definition>>();
if tests_to_run.len() != flattened_jobs.len() {
//not smart enough on rust to write generic lambda in order to not repeat myself here
let s1: HashSet<String> = tests_to_run
.iter()
.map(|td| {
td.name
.clone()
.unwrap_or(td.id.clone().unwrap_or(td.index.to_string()))
})
.collect();
let s2: HashSet<String> = flattened_jobs
.iter()
.map(|td| {
td.name
.clone()
.unwrap_or(td.id.clone().unwrap_or(td.index.to_string()))
})
.collect();
let missing_tests = (&s1 - &s2)
.into_iter()
.filter(|s| !s.is_empty())
.map(|s| format!("\"{}\"", s))
.collect::<Vec<String>>()
.join(",");
if !missing_tests.is_empty() {
warn!("Warning: Required tests not found.");
warn!(
"Check the 'requires' tag in the following test definition(s): {}.\n\n",
missing_tests
);
}
}
for (count, job) in job_definitions.iter().enumerate() {
trace!(
"Job {count}: {}",
job.iter()
.enumerate()
.map(|(i, t)| t
.name
.clone()
.unwrap_or(t.id.clone().unwrap_or(i.to_string())))
.collect::<Vec<String>>()
.join(", ")
)
}
job_definitions
}
pub fn tests_from_files(
config: &config::Config,
files: Vec<String>,
tags: Vec<String>,
project: Option<String>,
environment: Option<String>,
tag_mode: TagMode,
) -> (Vec<test::Definition>, Vec<test::Definition>) {
let global_variables = config.generate_global_variables();
let mut tests_to_ignore: Vec<test::Definition> = Vec::new();
let tests_to_run: Vec<test::Definition> = files
.into_iter()
.filter_map(|s| load_test_from_path(s.as_str()))
.enumerate()
.filter_map(|(i, f)| {
validate_test_file(
f,
&global_variables,
project.clone(),
environment.clone(),
i,
)
})
.filter_map(|f| {
if !ignored_due_to_tag_filter(&f, &tags, &tag_mode) {
Some(f)
} else {
tests_to_ignore.push(f);
None
}
})
.collect();
(tests_to_run, tests_to_ignore)
}
pub fn print_validation_failures(
failures: Vec<(&Definition, telemetry::PlatformIdFailure)>,
as_errors: bool,
) {
let messages: Vec<String> = failures.iter().map(|(definition, failure)| {
match failure {
telemetry::PlatformIdFailure::Missing => {
format!("Test at path \"{}\" does not have a platformId. This field is required to stream telemetry data.", definition.file_data.filename)
},
telemetry::PlatformIdFailure::Invalid => {
format!("Test at path \"{}\" has an invalid platformId ({}). This field must be a valid ULID.", definition.file_data.filename, definition.platform_id.clone().expect("PlatformId should have a value"))
},
telemetry::PlatformIdFailure::Duplicate => {
format!("Test at path \"{}\" has a duplicate platformId ({}). This field must be a valid ULID and unique across all files.", definition.file_data.filename, definition.platform_id.clone().expect("PlatformId should have a value"))
},
}
}).collect();
if as_errors {
messages.iter().for_each(|m| error!("{}", m));
} else {
messages.iter().for_each(|m| warn!("{}", m));
}
}
pub async fn execute_tests(
config: config::Config,
tests_to_run: Vec<test::Definition>,
mode_dryrun: bool,
tests_to_ignore: Vec<test::Definition>,
junit_file: Option<String>,
cli_args: Box<serde_json::Value>,
) -> Report {
if !tests_to_ignore.is_empty() {
trace!("filtering out tests which don't match the tag pattern")
}
trace!("determine test execution order based on dependency graph");
let tests_to_run_with_dependencies =
construct_test_execution_graph_v2(tests_to_run.clone(), tests_to_ignore.clone());
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | true |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/main.rs | src/main.rs | mod config;
mod errors;
mod executor;
mod json;
mod logger;
mod machine;
mod new;
mod telemetry;
mod test;
mod updater;
mod validated;
use clap::{Parser, Subcommand};
use glob::{glob_with, MatchOptions};
use log::{debug, error, info, warn, Level, LevelFilter};
use logger::SimpleLogger;
use serde::{Deserialize, Serialize};
use std::{
error::Error,
path::{Path, PathBuf},
};
use telemetry::PlatformIdFailure;
use tokio::{fs, io::AsyncWriteExt};
use ulid::Ulid;
const VERSION: &str = env!("CARGO_PKG_VERSION");
const IGNORE_FILE: &str = ".jikkenignore";
#[derive(PartialEq, Eq)]
pub enum ExecutionMode {
Run,
Dryrun,
List,
Format,
Validate(bool),
}
pub enum TagMode {
AND,
OR,
}
#[derive(Parser, Serialize, Deserialize)]
#[command(author, version, about, long_about = None)]
pub struct Cli {
#[command(subcommand)]
command: Commands,
/// Indicate which environment tests are executing against
/// {n}This is not used unless tests are reporting to the Jikken.IO platform via an API Key
#[arg(short, long = "env", name = "env")]
environment: Option<String>,
/// Indicate which project tests belong to
/// {n}This is not used unless tests are reporting to the Jikken.IO platform via an API Key
#[arg(short, long = "project", name = "proj")]
project: Option<String>,
/// Specify path to a Jikken configuration file
/// {n}By default, optional ".jikken" files can be placed in the current directory
/// {n}and the user's home directory. This option instructs jikken to use the
/// {n}provided path instead of the optional .jikken file in the current directory
#[arg(short, long = "config_file", name = "config_file")]
config_file: Option<String>,
/// Enable quiet mode, suppresses all console output
#[arg(short, long, default_value_t = false)]
quiet: bool,
/// Enable verbose mode, provides more detailed console output
#[arg(short, long, default_value_t = false)]
verbose: bool,
/// Enable trace mode, provides exhaustive console output
#[arg(long, default_value_t = false)]
trace: bool,
}
#[derive(Subcommand, Serialize, Deserialize)]
pub enum Commands {
/// Execute tests
#[command(name = "run", alias = "r")]
Run {
/// The path(s) to search for test files
/// {n}By default, the current path is used
#[arg(name = "path")]
paths: Vec<String>,
/// Recursively search for test files
#[arg(short)]
recursive: bool,
/// Select tests to run based on tags
/// {n}By default, tests must match all given tags to be selected
#[arg(short, long = "tag", name = "tag")]
tags: Vec<String>,
/// Toggle tag matching logic to select tests matching any of the given tags
#[arg(long, default_value_t = false)]
tags_or: bool,
/// Output results in junit format to specified file
#[arg(long = "junit", name = "junit_file")]
junit: Option<String>,
},
/// Execute tests without calling API endpoints
#[command(name = "dryrun", alias = "dr")]
DryRun {
/// The path(s) to search for test files
/// {n}By default, the current path is used
#[arg(name = "path")]
paths: Vec<String>,
/// Recursively search for test files
#[arg(short)]
recursive: bool,
/// Select tests to run based on tags
/// {n}By default, tests must match all given tags to be selected
#[arg(short, long = "tag", name = "tag")]
tags: Vec<String>,
/// Toggle tag matching logic to select tests matching any of the given tags
#[arg(long, default_value_t = false)]
tags_or: bool,
/// Output results in junit format to specified file
#[arg(long = "junit", name = "junit_file")]
junit: Option<String>,
},
/// List test files
#[command(name = "list")]
List {
/// The path(s) to search for test files
/// {n}By default, the current path is used
#[arg(name = "path")]
paths: Vec<String>,
/// Recursively search for test files
#[arg(short)]
recursive: bool,
/// Select tests to list based on tags
/// {n}By default, tests must match all given tags to be selected
#[arg(short, long = "tag", name = "tag")]
tags: Vec<String>,
/// Toggle tag matching logic to select tests matching any of the given tags
#[arg(long, default_value_t = false)]
tags_or: bool,
},
/// Format test files
#[command(name = "format", alias = "fmt")]
Format {
/// The path(s) to search for test files
/// {n}By default, the current path is used
#[arg(name = "path")]
paths: Vec<String>,
/// Recursively search for test files
#[arg(short)]
recursive: bool,
/// Select tests to format based on tags
/// {n}By default, tests must match all given tags to be selected
#[arg(short, long = "tag", name = "tag")]
tags: Vec<String>,
/// Toggle tag matching logic to select tests matching any of the given tags
#[arg(long, default_value_t = false)]
tags_or: bool,
},
/// Validate test files
Validate {
/// The path(s) to search for test files
/// {n}By default, the current path is used
#[arg(name = "path")]
paths: Vec<String>,
/// Recursively search for test files
#[arg(short)]
recursive: bool,
/// Select tests to validate based on tags
/// {n}By default, tests must match all given tags to be selected
#[arg(short, long = "tag", name = "tag")]
tags: Vec<String>,
/// Toggle tag matching logic to select tests matching any of the given tags
#[arg(long, default_value_t = false)]
tags_or: bool,
/// Automatically generate and insert platform IDs in tests that don't have one
#[arg(long, default_value_t = false)]
generate_platform_ids: bool,
},
/// Create a new test
New {
/// The name of the test file to be created
name: Option<String>,
/// OpenApi spec to derive tests from
#[arg(long = "from_openapi", name = "from_openapi")]
openapi_spec_path: Option<String>,
/// Generate a test template with all available options
#[arg(short, long = "full", name = "full")]
full: bool,
/// Generate a multi-stage test template
#[arg(short = 'm', long = "multistage", name = "multistage")]
multistage: bool,
/// Output template to the console instead of saving to a file
#[arg(short = 'o')]
output: bool,
},
/// Update Jikken (if a newer version exists)
Update,
}
fn glob_walk(glob_string: &str) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> {
let mut ret: Vec<String> = Vec::new();
for path in glob_with(glob_string, MatchOptions::default())
.unwrap()
.flatten()
{
if let Some(s) = path.to_str() {
if s.ends_with(".jkt") {
ret.push(String::from(s))
}
}
}
Ok(ret)
}
fn satisfies_potential_glob_filter(glob_pattern: &Option<glob::Pattern>, file_name: &str) -> bool {
match &glob_pattern {
Some(p) => p.matches_with(file_name, MatchOptions::default()),
None => true,
}
}
fn check_supplied_config_file_existence(config_file: &Option<String>) {
if let Some(file) = config_file {
check_supplied_file_existence("config", &PathBuf::from(file));
}
}
fn check_supplied_file_existence(file_description: &str, path: &Path) {
if !std::path::Path::try_exists(path).unwrap_or(false) {
warn!(
"Supplied {} file does not exist: {}",
file_description,
path.as_os_str().to_str().unwrap_or_default()
);
}
}
// Consider how to approach feedback to user when supplied pattern
// is invalid
fn create_top_level_filter(glob_pattern: &Option<String>) -> impl Fn(&walkdir::DirEntry) -> bool {
let extract_pattern = |s: &Option<String>| {
s.clone()
.map(|s| glob::Pattern::new(s.as_str()))
.map(|r| r.ok())
.unwrap_or(None)
};
let pattern = extract_pattern(glob_pattern);
return move |e: &walkdir::DirEntry| -> bool {
e.file_name()
.to_str()
.map(|s| {
(e.file_type().is_file()
&& s.ends_with(".jkt")
&& satisfies_potential_glob_filter(&pattern, s))
|| e.file_type().is_dir()
})
.unwrap_or(false)
};
}
async fn search_directory(
path: &str,
recursive: bool,
glob_pattern: Option<String>,
) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> {
let mut ret: Vec<String> = Vec::new();
let entry_is_file = |e: &walkdir::DirEntry| e.metadata().map(|e| e.is_file()).unwrap_or(false);
walkdir::WalkDir::new(path)
.max_depth(if recursive { usize::MAX } else { 1 })
.into_iter()
.filter_entry(create_top_level_filter(&glob_pattern))
.filter_map(|e| e.ok())
.filter(entry_is_file)
.for_each(|e| {
if let Some(s) = e.path().to_str() {
ret.push(String::from(s))
}
});
Ok(ret)
}
async fn get_ignore_files(ignore_file: &std::path::Path) -> Vec<String> {
tokio::fs::read_to_string(ignore_file)
.await
.ok()
.map(|s| {
s.split('\n')
.map(|s| s.to_string())
.collect::<Vec<String>>()
})
.unwrap_or_default()
}
fn ignore_matches<'a>(ignore_pattern: &'a str, file: &'a str) -> bool {
let ignore_pattern_path = std::path::Path::new(ignore_pattern);
let file_path = std::path::Path::new(file);
let dirname_extractor =
|s: &'a Path| -> &'a str { s.parent().and_then(|s| s.to_str()).unwrap_or_default() };
if ignore_pattern_path.is_file() {
return file_path == ignore_pattern_path;
} else if ignore_pattern_path.is_dir() {
return dirname_extractor(file_path) == ignore_pattern_path.to_str().unwrap_or_default();
}
glob::Pattern::new(ignore_pattern)
.map(|p| p.matches_path(file_path))
.unwrap_or_default()
}
async fn get_files(
paths: Vec<String>,
ignore_file: &std::path::Path,
recursive: bool,
) -> Result<Vec<String>, Box<dyn Error + Send + Sync>> {
let mut results: Vec<String> = Vec::new();
for path in paths {
let exists = fs::try_exists(&path).await.unwrap_or(false);
let is_file = exists && fs::metadata(&path).await?.is_file();
let glob_pattern = if !exists {
Some(String::from(path.as_str()))
} else {
None
};
if is_file {
results.push(path);
} else if !exists && !recursive {
results.append(glob_walk(&path).unwrap_or_default().as_mut());
} else {
results.append(
search_directory(
if exists { path.as_str() } else { "." },
recursive,
glob_pattern,
)
.await
.unwrap_or(Vec::new())
.as_mut(),
);
}
}
let ignore_patterns = get_ignore_files(ignore_file).await;
results.retain(|f| {
!ignore_patterns
.iter()
.any(|ignore| ignore_matches(ignore.as_str(), f.as_str()))
});
for r in results.clone() {
debug!("file: {}", r);
}
Ok(results)
}
fn print_test_info(mut tests: Vec<test::Definition>) {
let mut path_column = vec!["PATH".to_string()];
let mut name_column = vec!["TEST NAME".to_string()];
let mut tags_column = vec!["TAGS".to_string()];
tests.sort_by_key(|td| td.file_data.filename.clone());
tests.into_iter().for_each(|td| {
name_column.push(td.name.unwrap_or("<none>".to_string()));
tags_column.push(if td.tags.is_empty() {
"<none>".to_string()
} else {
td.tags.join(", ")
});
path_column.push(td.file_data.filename)
});
let get_column_width = |v: &Vec<String>| v.iter().fold(0, |max, s| std::cmp::max(max, s.len()));
let max_name_size = get_column_width(&name_column);
let max_tags_size = get_column_width(&tags_column);
let max_path_size = get_column_width(&path_column);
name_column
.into_iter()
.zip(tags_column)
.zip(path_column)
.for_each(|((n, t), p)| {
info!(
"{: <max_path_size$} {: <max_name_size$} {: <max_tags_size$} \n",
p, n, t
);
});
}
async fn run_tests(
paths: Vec<String>,
tags: Vec<String>,
tags_or: bool,
execution_mode: ExecutionMode,
recursive: bool,
project: Option<String>,
environment: Option<String>,
config_file: Option<String>,
junit_file: Option<String>,
cli_args: Box<serde_json::Value>,
) -> Result<executor::Report, Box<dyn Error + Send + Sync>> {
let mut cli_paths = paths;
if cli_paths.is_empty() {
cli_paths.push(".".to_string())
}
let cli_tag_mode = if tags_or { TagMode::OR } else { TagMode::AND };
let config = config::get_config(config_file).await;
let files = get_files(cli_paths, std::path::Path::new(IGNORE_FILE), recursive).await?;
let plurality_policy = |count: usize| match count {
1 => "",
_ => "s",
};
let project = project.or(config.clone().settings.project);
let environment = environment.or(config.clone().settings.environment);
if config.settings.bypass_cert_verification {
warn!("WARNING: SSL certificate verification is disabled.\nIf this is not intentional please adjust your config settings.\nFor more information please check our docs: https://www.jikken.io/docs/configuration/");
log::logger().flush();
}
info!(
"Jikken found {} test file{}.\n\n",
files.len(),
plurality_policy(files.len())
);
if files.is_empty() {
return Ok(executor::Report::default());
}
let filters_specified = !tags.is_empty();
let (tests_to_run, tests_to_ignore) =
executor::tests_from_files(&config, files, tags, project, environment, cli_tag_mode);
if execution_mode == ExecutionMode::List {
let number_of_tests_to_run = tests_to_run.len();
print_test_info(tests_to_run);
if filters_specified {
info!(
"\n{} test{} matched provided filters.\n",
number_of_tests_to_run,
plurality_policy(number_of_tests_to_run)
);
}
return Ok(executor::Report::default());
}
if execution_mode == ExecutionMode::Format {
for td in &tests_to_run {
let mut file = fs::File::create(&td.file_data.filename).await?;
let file_data = serde_yaml::to_string(&td.file_data).unwrap();
file.write_all(file_data.as_bytes()).await?;
}
info!(
"Successfully formatted {} test files.\n",
&tests_to_run.len()
);
return Ok(executor::Report::default());
}
if let ExecutionMode::Validate(generate) = execution_mode {
if let Some(token) = &config.settings.api_key {
if uuid::Uuid::parse_str(token).is_ok() {
let validation_results =
telemetry::validate_platform_ids(tests_to_run.iter().collect());
let mut has_missing = false;
if let Err(failures) = validation_results {
for failure in failures {
if generate && failure.1 == PlatformIdFailure::Missing {
let platform_id = Ulid::new().to_string();
let mut test = tests_to_run[failure.0.index].clone();
test.file_data.platform_id = Some(platform_id.clone());
let mut file = fs::File::create(&test.file_data.filename).await?;
let file_data = serde_yaml::to_string(&test.file_data).unwrap();
file.write_all(file_data.as_bytes()).await?;
info!(
"Successfully updated test at path \"{}\" with platform ID {}.\n",
test.file_data.filename, platform_id
);
} else {
if !has_missing && failure.1 == PlatformIdFailure::Missing {
has_missing = true;
}
executor::print_validation_failures(vec![failure], true);
}
}
if has_missing {
warn!("\nRun the validate command with the --generate-platform-ids option to automatically generate and insert missing platform IDs.");
}
}
} else {
error!("API Key {} is invalid", &token);
}
}
return Ok(executor::Report::default());
}
let report = executor::execute_tests(
config,
tests_to_run,
execution_mode == ExecutionMode::Dryrun,
tests_to_ignore,
junit_file,
cli_args,
)
.await;
let runtime_label = executor::runtime_formatter(report.runtime);
if report.skipped > 0 {
info!(
"Jikken executed {} test{} from {} file{} with {} passed, {} skipped, and {} failed in {}.\n",
report.run,
plurality_policy(report.run.into()),
report.test_files,
plurality_policy(report.test_files as usize),
report.passed,
report.skipped,
report.failed,
runtime_label,
);
} else {
info!(
"Jikken executed {} test{} from {} file{} with {} passed and {} failed in {}.\n",
report.run,
plurality_policy(report.run.into()),
report.test_files,
plurality_policy(report.test_files as usize),
report.passed,
report.failed,
runtime_label,
);
}
Ok(report)
}
/*
Result is converted to an exit code implicitly,
but it prints a message we don't like. So we're
forced to resort to this
*/
fn result_to_exit_code<T>(
res: Result<T, Box<dyn Error + Send + Sync>>,
print: bool,
) -> std::process::ExitCode {
match res {
Err(e) => {
if print {
eprintln!("Error: {}", e);
}
std::process::ExitCode::FAILURE
}
Ok(_) => std::process::ExitCode::SUCCESS,
}
}
fn result_report_to_exit_code(
res: Result<executor::Report, Box<dyn Error + Send + Sync>>,
) -> std::process::ExitCode {
match res {
Err(_) => result_to_exit_code(res, true),
Ok(r) => report_to_exit_code(r),
}
}
fn report_to_exit_code(report: executor::Report) -> std::process::ExitCode {
if report.failed > 0 {
return std::process::ExitCode::FAILURE;
}
std::process::ExitCode::SUCCESS
}
#[tokio::main]
async fn main() -> std::process::ExitCode {
let _ = enable_ansi_support::enable_ansi_support();
let cli = Cli::parse();
let cli_args = Box::new(serde_json::to_value(&cli).unwrap());
let log_level = if cli.verbose {
Level::Debug
} else if cli.trace {
Level::Trace
} else {
Level::Info
};
let my_logger = SimpleLogger::new(
log_level,
cli.quiet,
matches!(cli.command, Commands::Run { .. } | Commands::DryRun { .. }),
);
if let Err(e) = log::set_boxed_logger(Box::new(my_logger)) {
error!("Error creating logger: {}", e);
panic!("unable to create logger");
}
log::set_max_level(LevelFilter::Trace);
let cli_project = cli.project;
let cli_environment = cli.environment;
let exit_code: std::process::ExitCode = match cli.command {
Commands::Run {
tags,
tags_or,
recursive,
paths,
junit,
} => {
updater::check_for_updates().await;
log::logger().flush();
check_supplied_config_file_existence(&cli.config_file);
result_report_to_exit_code(
run_tests(
paths,
tags,
tags_or,
ExecutionMode::Run,
recursive,
cli_project,
cli_environment,
cli.config_file,
junit,
cli_args,
)
.await,
)
}
Commands::DryRun {
tags,
tags_or,
recursive,
paths,
junit,
} => {
// \todo create a runner function that takes an Fn trait and
// eliminates the duplicated code
updater::check_for_updates().await;
log::logger().flush();
check_supplied_config_file_existence(&cli.config_file);
result_report_to_exit_code(
run_tests(
paths,
tags,
tags_or,
ExecutionMode::Dryrun,
recursive,
cli_project,
cli_environment,
cli.config_file,
junit,
Box::new(serde_json::Value::Null),
)
.await,
)
}
Commands::List {
tags,
tags_or,
recursive,
paths,
} => {
updater::check_for_updates().await;
check_supplied_config_file_existence(&cli.config_file);
result_report_to_exit_code(
run_tests(
paths,
tags,
tags_or,
ExecutionMode::List,
recursive,
cli_project,
cli_environment,
cli.config_file,
None,
cli_args,
)
.await,
)
}
Commands::Format {
tags,
tags_or,
recursive,
paths,
} => {
updater::check_for_updates().await;
check_supplied_config_file_existence(&cli.config_file);
result_report_to_exit_code(
run_tests(
paths,
tags,
tags_or,
ExecutionMode::Format,
recursive,
cli_project,
cli_environment,
cli.config_file,
None,
cli_args,
)
.await,
)
}
Commands::Validate {
tags,
tags_or,
recursive,
paths,
generate_platform_ids,
} => {
updater::check_for_updates().await;
check_supplied_config_file_existence(&cli.config_file);
result_report_to_exit_code(
run_tests(
paths,
tags,
tags_or,
ExecutionMode::Validate(generate_platform_ids),
recursive,
cli_project,
cli_environment,
cli.config_file,
None,
cli_args,
)
.await,
)
}
Commands::New {
full,
openapi_spec_path,
multistage,
output,
name,
} => {
updater::check_for_updates().await;
match openapi_spec_path {
Some(path) => result_to_exit_code(
new::create_tests_from_openapi_spec(path.as_str(), full, multistage, name),
false,
),
None => result_to_exit_code(
new::create_test_template(full, multistage, output, name).await,
false,
),
}
}
Commands::Update => {
updater::try_updating().await;
std::process::ExitCode::SUCCESS
}
};
log::logger().flush();
exit_code
}
//------------------TESTS---------------------------------
//mod file_capture{
#[cfg(test)]
mod tests {
use std::io::Write;
// Note this useful idiom: importing names from outer (for mod tests) scope.
use super::*;
use std::fs::File;
use tempfile::tempdir;
struct DirectoryFixture {
pub temp_dir: tempfile::TempDir,
}
//todo: support arbitrary depth
impl DirectoryFixture {
fn new(file_names: &[&str]) -> DirectoryFixture {
let tmp_dir = tempdir().unwrap();
let tmp_path = tmp_dir.path();
let _: Vec<std::fs::File> = file_names
.into_iter()
.map(|f| File::create(tmp_path.join(f)).unwrap())
.collect();
DirectoryFixture { temp_dir: tmp_dir }
}
fn path_string(&self) -> String {
self.temp_dir.path().to_str().unwrap().to_string()
}
fn create_ignore_file(&self, file_names: &[&str]) {
let ignore_file = self.temp_dir.path().join(".jikkenignore");
let str_to_write = file_names.join("\n");
_ = File::create(&ignore_file)
.unwrap()
.write(str_to_write.as_bytes());
}
async fn get_files(&self, recursive: bool, path: Option<String>) -> Vec<String> {
get_files(
vec![path.unwrap_or(self.path_string())],
self.temp_dir.path().join(".jikkenignore").as_path(),
recursive,
)
.await
.unwrap()
}
}
#[tokio::test]
async fn get_files_with_glob_ignore() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
dir_fixture.create_ignore_file(
vec![dir_fixture
.temp_dir
.path()
.join("my_test*")
.to_str()
.unwrap_or_default()]
.as_slice(),
);
assert_eq!(0, dir_fixture.get_files(true, None).await.len());
}
#[tokio::test]
async fn get_files_with_dir_ignore() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
dir_fixture.create_ignore_file(
vec![dir_fixture.temp_dir.path().to_str().unwrap_or_default()].as_slice(),
);
assert_eq!(0, dir_fixture.get_files(true, None).await.len());
}
#[tokio::test]
async fn get_files_with_filename_ignore() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
dir_fixture.create_ignore_file(
vec![
dir_fixture
.temp_dir
.path()
.join("my_test.jkt")
.to_str()
.unwrap_or_default(),
dir_fixture
.temp_dir
.path()
.join("my_test_2.jkt")
.to_str()
.unwrap_or_default(),
]
.as_slice(),
);
assert_eq!(0, dir_fixture.get_files(true, None).await.len());
}
#[tokio::test]
async fn get_files_with_one_level_of_depth_recursively() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
assert_eq!(2, dir_fixture.get_files(true, None).await.len());
}
#[tokio::test]
async fn get_files_with_one_level_of_depth_non_recursively() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
assert_eq!(2, dir_fixture.get_files(false, None).await.len());
}
#[tokio::test]
async fn get_files_with_one_level_of_depth_non_recursively_globbing() {
let dir_fixture = DirectoryFixture::new(
vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.as_slice(),
);
assert_eq!(
1,
dir_fixture
.get_files(
false,
Some(
dir_fixture
.temp_dir
.path()
.join("*_2*")
.to_str()
.unwrap_or_default()
.to_string()
)
)
.await
.len()
);
}
#[tokio::test]
async fn get_files_with_recursive_globbing() {
let tmp_dir = tempdir().unwrap();
let tmp_path = tmp_dir.path();
let glob_path_str = "*_2*";
_ = std::env::set_current_dir(tmp_path);
{
//Begin Scope
let _: Vec<std::fs::File> = vec![
"random_file",
"my_test.jkt",
"something_else",
"my_test_2.jkt",
]
.iter()
.map(|f| File::create(tmp_path.join(f)).unwrap())
.collect();
let found_files = get_files(
vec![String::from(glob_path_str)],
std::path::Path::new(".does_not_exist"),
true,
)
.await;
assert_eq!(1, found_files.unwrap().len());
} //End Scope
_ = tmp_dir.close();
}
} // mod tests
//}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/machine.rs | src/machine.rs | use log::trace;
use sha2::{Digest, Sha256};
use sysinfo::{CpuExt, CpuRefreshKind, System, SystemExt};
pub fn new() -> Machine {
let mut system = System::new();
system.refresh_cpu_specifics(
CpuRefreshKind::everything()
.without_frequency()
.without_cpu_usage(),
);
Machine { system }
}
pub struct Machine {
system: sysinfo::System,
}
impl Machine {
pub fn generate_machine_id(&self) -> String {
let cpu_count = self.system.cpus().len();
let cpu_vendor = if cpu_count > 0 {
self.system.cpus()[0].brand()
} else {
""
};
let system_os = self.system.long_os_version().unwrap_or_default();
let system_kernel = self.system.kernel_version().unwrap_or_default();
let system_hostname = self.system.host_name().unwrap_or_default();
let machine_id = format!(
"{}:{}:{}:{}:{}",
cpu_count, cpu_vendor, system_os, system_kernel, system_hostname
);
trace!("machine_id: {}", machine_id);
let h = hash(&machine_id);
trace!("hashed machine_id: {}", h);
h
}
}
fn hash(value: &str) -> String {
let mut hasher = Sha256::new();
hasher.update(value.as_bytes());
hex::encode(hasher.finalize().as_slice())
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/updater.rs | src/updater.rs | use bytes::{Bytes, BytesMut};
use http_body_util::{BodyExt, Empty};
use hyper::Request;
use hyper_util::{client::legacy::Client, rt::TokioExecutor};
use log::{debug, error, info, warn};
use regex::Regex;
use remove_dir_all::remove_dir_all;
use serde::Deserialize;
use std::{
cmp::Ordering,
env,
error::Error,
io::{stdout, Cursor, Write},
};
use tokio::io::AsyncWriteExt;
const UPDATE_URL: &str = "https://api.jikken.io/v1/latest_version";
const VERSION: &str = env!("CARGO_PKG_VERSION");
#[derive(Deserialize)]
pub struct ReleaseResponse {
pub version: String,
pub url: String,
}
#[derive(Eq)]
pub struct Version(String);
impl Version {
//Extraneous trailing zeros can throw a wrench in things
fn normalized(&self) -> String {
let trailing_zero_regex: Regex = Regex::new(r"(.0)?$").unwrap();
let ret = trailing_zero_regex
.find(self.0.as_str())
.map(|mat| self.0[..mat.range().start].to_string())
.unwrap_or(self.0.clone());
ret
}
}
impl PartialEq for Version {
fn eq(&self, other: &Self) -> bool {
let lhs_val = self.normalized();
let rhs_val = other.normalized();
lhs_val == rhs_val
}
}
impl Ord for Version {
fn cmp(&self, other: &Self) -> Ordering {
let lhs_val = self.normalized();
let rhs_val = other.normalized();
if lhs_val > rhs_val {
return Ordering::Greater;
}
if lhs_val < rhs_val {
return Ordering::Less;
}
Ordering::Equal
}
}
impl PartialOrd for Version {
fn partial_cmp(&self, other: &Self) -> Option<Ordering> {
Some(self.cmp(other))
}
}
async fn update(url: &str) -> Result<(), Box<dyn Error + Send + Sync>> {
info!("Jikken is updating to the latest version...");
stdout().flush().unwrap();
let file_name_opt = url.split('/').last();
if file_name_opt.is_none() {
error!("error: invalid url");
return Ok(());
}
let tmp_dir = tempfile::Builder::new().tempdir_in(::std::env::current_dir()?)?;
let tmp_tarball_path = tmp_dir.path().join(file_name_opt.unwrap());
let mut tmp_tarball = tokio::fs::File::create(&tmp_tarball_path).await?;
let response = reqwest::get(url).await?;
let mut content = Cursor::new(response.bytes().await?);
let save_file_reuslts = tmp_tarball.write_all_buf(&mut content).await;
if let Err(error) = save_file_reuslts {
error!("error saving downloaded file: {}", error);
return Ok(());
}
if env::consts::OS == "windows" {
self_update::Extract::from_source(&tmp_tarball_path)
.archive(self_update::ArchiveKind::Zip)
.extract_into(tmp_dir.path())?;
} else {
self_update::Extract::from_source(&tmp_tarball_path)
.archive(self_update::ArchiveKind::Tar(Some(
self_update::Compression::Gz,
)))
.extract_into(tmp_dir.path())?;
}
let tmp_file = tmp_dir.path().join("replacement_tmp");
let bin_path = match env::consts::OS {
"windows" => tmp_dir.path().join("jk.exe"),
_ => tmp_dir.path().join("jk"),
};
self_update::Move::from_source(&bin_path)
.replace_using_temp(&tmp_file)
.to_dest(&::std::env::current_exe()?)?;
drop(tmp_tarball);
_ = remove_dir_all(tmp_dir);
Ok(())
}
fn has_newer_version(new_version: Version) -> bool {
new_version > Version(crate::VERSION.to_string())
}
pub async fn get_latest_version() -> Result<Option<ReleaseResponse>, Box<dyn Error + Send + Sync>> {
let client: Client<_, Empty<Bytes>> =
Client::builder(TokioExecutor::new()).build(crate::telemetry::get_connector());
let req = Request::builder()
.uri(format!(
"{}?channel=stable&platform={}",
UPDATE_URL,
env::consts::OS
))
.body(Empty::new())?;
let resp = client.request(req).await?;
let (_, mut body) = resp.into_parts();
let mut body_bytes = BytesMut::new();
while let Some(next) = body.frame().await {
let frame = next.unwrap();
if let Some(chunk) = frame.data_ref() {
body_bytes.extend(chunk);
}
}
if let Ok(r) = serde_json::from_slice::<ReleaseResponse>(&body_bytes) {
if has_newer_version(Version(r.version.clone())) {
return Ok(Some(r));
}
}
Ok(None)
}
pub async fn try_updating() {
let latest_version = get_latest_version().await;
match latest_version {
Ok(lv_opt) => {
if let Some(lv) = lv_opt {
match update(&lv.url).await {
Ok(_) => {
info!("update completed\n");
}
Err(error) => {
error!(
"Jikken encountered an error when trying to update itself: {}",
error
);
}
}
return;
}
}
Err(error) => {
debug!("error checking for updates: {}", error);
}
}
error!("Jikken was unable to find an update for this platform and release channel");
}
pub async fn check_for_updates() {
match get_latest_version().await {
Ok(latest_version) => {
if let Some(latest) = latest_version {
warn!(
"Jikken found new version ({}), currently running version ({})",
latest.version, VERSION
);
warn!("Run command: `jk update` to update jikken or update using your package manager");
}
}
Err(error) => {
debug!("error checking for updates: {}", error);
}
}
}
#[cfg(test)]
mod tests {
use super::*;
#[test]
fn version_less() -> () {
assert!(Version("0.6.1".to_string()) < Version("0.7.0".to_string()));
}
#[test]
fn version_greater() -> () {
assert!(Version("0.7.1".to_string()) > Version("0.7.0".to_string()));
}
#[test]
fn version_greater_or_equal() -> () {
assert!(Version("0.7.1".to_string()) >= Version("0.7.0".to_string()));
}
#[test]
fn version_less_or_equal() -> () {
assert!(Version("0.6.0.0".to_string()) <= Version("0.7.0".to_string()));
}
#[test]
fn version_equal() -> () {
assert!(Version("0.6.1".to_string()) == Version("0.6.1".to_string()));
assert!(Version("0.6.1".to_string()) == Version("0.6.1.0".to_string()));
}
#[test]
fn version_not_equal() -> () {
assert!(Version("0.6.1".to_string()) != Version("0.6.2".to_string()));
}
#[test]
fn newer_version_checkings() -> () {
assert!(!has_newer_version(Version("0.6.1".to_string())));
}
} //mod tests
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/new.rs | src/new.rs | use super::{errors::GenericError, test::template};
use log::{error, info};
use crate::test::{
file::{NumericSpecification, ValueOrNumericSpecification},
http, File,
};
use std::{error::Error, io::Write};
use tokio::{fs, io::AsyncWriteExt};
fn create_tags(tags: &[String]) -> Option<String> {
if tags.is_empty() {
None
} else {
Some(tags.join(","))
}
}
fn create_status_code(status_code_pattern: &str) -> Option<ValueOrNumericSpecification<u16>> {
if status_code_pattern == "2XX" {
Some(ValueOrNumericSpecification::Schema(NumericSpecification {
specification: None,
min: Some(200),
max: Some(299),
}))
} else {
status_code_pattern
.parse()
.ok()
.map(ValueOrNumericSpecification::Value)
}
}
fn create_filename(path_string: &str, verb: &http::Verb) -> String {
let mut path = path_string
.split('/')
.filter(|s| !s.is_empty())
.collect::<Vec<&str>>()
.join(std::path::MAIN_SEPARATOR_STR);
if path.is_empty() {
path = "ROOT".to_string();
}
std::path::PathBuf::from(path)
.join(format!("{:?}.jkt", verb))
.to_str()
.unwrap()
.to_string()
}
mod openapi_legacy {
use super::*;
use crate::{
test,
test::{
file,
file::{
generate_value_from_schema, DatumSchema, FloatSpecification, IntegerSpecification,
SimpleValueVariable, Specification, StringSpecification, UnvalidatedRequest,
UnvalidatedResponse, UnvalidatedVariableNameOrDatumSchema,
UnvalidatedVariableNameOrValue, ValueOrDatumSchema,
},
},
};
use openapiv3::{
IndexMap, OpenAPI, Operation, Parameter, PathItem, RefOr, Responses, Schema, Server,
VariantOrUnknownOrEmpty, VersionedOpenAPI,
};
use std::{
collections::{hash_map::RandomState, BTreeMap},
io::BufReader,
};
fn create_headers(
headers: &IndexMap<String, RefOr<openapiv3::Header>, RandomState>,
) -> Option<Vec<test::http::Header>> {
let ret: Vec<test::http::Header> = headers
.iter()
.map(|(name, _)| test::http::Header {
header: name.clone(),
matches_variable: std::cell::Cell::new(false),
value: String::default(),
})
.collect();
if !ret.is_empty() {
Some(ret)
} else {
None
}
}
fn create_response(responses: &Responses, spec: &OpenAPI) -> Option<UnvalidatedResponse> {
responses
.responses
.iter()
.map(|(sc, obj_or_ref)| (sc.to_string(), obj_or_ref))
.filter(|(status_code_pattern, _)| status_code_pattern.starts_with('2'))
.map(|(status_code_pattern, obj_or_ref)| {
obj_or_ref.resolve(spec).ok().map(|t| {
let body_stuff = t.content.get("application/json").and_then(|content| {
content
.schema
.as_ref()
.and_then(|s| schema_to_datum(s.resolve(spec), spec))
.map(|ds| {
(
generate_value_from_schema(&ds, 10)
.map(UnvalidatedVariableNameOrValue::Component),
UnvalidatedVariableNameOrDatumSchema::Component(ds),
)
})
});
UnvalidatedResponse {
status: create_status_code(status_code_pattern.as_str()),
time: None,
headers: create_headers(&t.headers),
extract: None,
ignore: None,
strict: None,
body: body_stuff.clone().and_then(|(v, _)| v),
body_schema: None, //body_stuff.map(|(_, ds)| ds),
}
})
})
.last()
.flatten()
}
fn schema_to_datum(schema: &Schema, spec: &OpenAPI) -> Option<DatumSchema> {
match &schema.kind {
openapiv3::SchemaKind::Type(t) => match t {
openapiv3::Type::Array(a) => {
let f = a
.items
.as_ref()
.and_then(|s| schema_to_datum(s.resolve(spec), spec));
Some(DatumSchema::List {
specification: Some(file::SequenceSpecification {
schema: f.map(|ds| {
file::ValuesOrSchema::Schemas(
Specification::<Box<DatumSchema>>::Value(Box::from(ds)),
)
}),
length: Option::None,
min_length: a.min_items.map(|s| s as i64),
max_length: a.max_items.map(|s| s as i64),
}),
})
}
openapiv3::Type::Boolean {} => Some(DatumSchema::Boolean {
specification: None,
}),
openapiv3::Type::Integer(int) => Some(DatumSchema::Integer {
specification: Some(IntegerSpecification {
max: int.maximum.map(|v| v + int.exclusive_maximum as i64),
min: int.minimum.map(|v| v + int.exclusive_minimum as i64),
..Default::default()
}),
}),
openapiv3::Type::Number(num) => Some(DatumSchema::Float {
specification: Some(FloatSpecification {
max: num.maximum.map(|v| v + num.exclusive_maximum as i16 as f64),
min: num.minimum.map(|v| v + num.exclusive_minimum as i16 as f64),
..Default::default()
}),
}),
openapiv3::Type::Object(obj) => {
let f = &obj
.properties
.iter()
.map(|(name, prop)| {
(name.clone(), schema_to_datum(prop.resolve(spec), spec))
})
.filter(|(_, s)| s.is_some())
.map(|(n, s)| (n, ValueOrDatumSchema::Datum(s.unwrap())))
.collect::<BTreeMap<String, ValueOrDatumSchema>>();
if f.is_empty() {
Some(DatumSchema::Object { schema: None })
} else {
Some(DatumSchema::Object {
schema: Some(f.clone()),
})
}
}
openapiv3::Type::String(string) => {
let string_spec = StringSpecification {
length: Option::None,
max_length: string.max_length.map(|s| s as i64),
min_length: string.min_length.map(|s| s as i64),
pattern: string.pattern.clone(),
..Default::default()
};
match &string.format {
VariantOrUnknownOrEmpty::<openapiv3::StringFormat>::Item(s) => match s {
openapiv3::StringFormat::Date => Some(DatumSchema::Date {
specification: None,
}),
openapiv3::StringFormat::DateTime => Some(DatumSchema::DateTime {
specification: None,
}),
_ => Some(DatumSchema::String {
specification: Some(string_spec),
}),
},
_ => Some(DatumSchema::String {
specification: Some(string_spec),
}),
}
}
},
_ => None,
}
}
fn create_request(
url: &str,
verb: test::http::Verb,
op: &openapiv3::Operation,
spec: &OpenAPI,
) -> UnvalidatedRequest {
let mut headers: Vec<test::http::Header> = vec![];
let mut parameters: Vec<test::http::Parameter> = vec![];
op.parameters.iter().for_each(|f| {
if let Ok(Parameter { data, kind }) = f.resolve(spec) {
match &kind {
openapiv3::ParameterKind::Query { .. } => {
parameters.push(test::http::Parameter {
param: data.name.clone(),
value: String::default(),
matches_variable: std::cell::Cell::new(false),
})
}
openapiv3::ParameterKind::Header { .. } => headers.push(test::http::Header {
header: data.name.clone(),
value: String::default(),
matches_variable: std::cell::Cell::new(false),
}),
openapiv3::ParameterKind::Path { .. } => (), //user will have to do this themselves, based upon generated template
openapiv3::ParameterKind::Cookie { .. } => (), //no cookie support
}
}
});
let body_schema = op.request_body.as_ref().and_then(|maybe_request| {
maybe_request.resolve(spec).ok().and_then(|r| {
r.content.get("application/json").and_then(|content| {
content
.schema
.as_ref()
.and_then(|s| schema_to_datum(s.resolve(spec), spec))
.map(|ds| {
(
generate_value_from_schema(&ds, 10)
.map(UnvalidatedVariableNameOrValue::Component),
UnvalidatedVariableNameOrDatumSchema::Component(ds),
)
})
})
})
});
UnvalidatedRequest {
body: body_schema.clone().and_then(|(v, _)| v),
method: Some(verb),
url: url.to_string(),
headers: if headers.is_empty() {
None
} else {
Some(headers)
},
params: if parameters.is_empty() {
None
} else {
Some(parameters)
},
}
}
pub fn get_test_paths(
root_servers: &[Server],
path_servers: &[Server],
op_servers: &[Server],
fallback: &str,
) -> Vec<String> {
let url_extractor = |servers: &[Server]| -> Option<Vec<String>> {
if servers.is_empty() {
None
} else {
Some(servers.iter().map(|s| s.url.clone()).collect())
}
};
url_extractor(path_servers)
.or(url_extractor(root_servers))
.or(url_extractor(op_servers))
.unwrap_or(vec![fallback.to_string()])
}
fn create_tests_for_op(
op: &Option<Operation>,
path: &PathItem,
path_string: &str,
verb: test::http::Verb,
full: bool,
multistage: bool,
spec: &OpenAPI,
) -> Vec<File> {
op.clone()
.map(|op| {
get_test_paths(&spec.servers, &path.servers, &op.servers, "{url}")
.iter()
.flat_map(|url| {
create_test(
format!("{}{}", url, path_string).as_str(),
&op,
verb,
full,
multistage,
path_string,
spec,
)
})
.collect::<Vec<File>>()
})
.unwrap_or_default()
}
fn create_variables(
op: &openapiv3::Operation,
spec: &OpenAPI,
) -> Option<Vec<test::file::UnvalidatedVariable>> {
let ret = op
.parameters
.iter()
.filter_map(|p_or_ref| {
p_or_ref
.resolve(spec)
.map(|t| {
test::file::UnvalidatedVariable::Simple(SimpleValueVariable {
name: t.name.clone(),
value: serde_json::Value::from("value".to_string()),
})
})
.ok()
})
.collect::<Vec<file::UnvalidatedVariable>>();
if ret.is_empty() {
None
} else {
Some(ret)
}
}
fn create_test(
path: &str,
op: &openapiv3::Operation,
verb: test::http::Verb,
full: bool,
multistage: bool,
path_string: &str,
spec: &OpenAPI,
) -> Option<File> {
let default = if full {
test::template::template_full().unwrap()
} else if multistage {
test::template::template_staged().unwrap()
} else {
test::File::default()
};
let resolved_path = path.replace('{', "${").to_string();
let request = create_request(resolved_path.as_str(), verb, op, spec);
let response =
create_response(&op.responses, spec).or(Some(UnvalidatedResponse::default()));
let variables = create_variables(op, spec);
if multistage || verb == test::http::Verb::Delete {
Some(File {
name: op.summary.clone().or(default.name),
description: op.description.clone(),
id: op.operation_id.clone().or(default.id),
tags: create_tags(&op.tags),
stages: Some(vec![test::file::UnvalidatedStage {
request,
compare: None,
response,
variables,
name: None,
delay: None,
}]),
filename: create_filename(path_string, &verb),
..default
})
} else {
Some(File {
name: op.summary.clone().or(default.name),
description: op.description.clone(),
id: op.operation_id.clone().or(default.id),
tags: create_tags(&op.tags),
response,
request: Some(request),
filename: create_filename(path_string, &verb),
variables,
..default
})
}
}
fn create_tests(
path_string: &str,
path: &PathItem,
full: bool,
multistage: bool,
spec: &OpenAPI,
) -> Vec<File> {
let stuff: [(&Option<Operation>, test::http::Verb); 5] = [
(&path.get, test::http::Verb::Get),
(&path.post, test::http::Verb::Post),
(&path.delete, test::http::Verb::Delete),
(&path.patch, test::http::Verb::Patch),
(&path.put, test::http::Verb::Put),
];
stuff
.into_iter()
.flat_map(|(op, verb)| {
create_tests_for_op(op, path, path_string, verb, full, multistage, spec)
})
.collect()
}
pub fn create_tests_from_openapi_spec(
file: &str,
full: bool,
multistage: bool,
) -> Result<Vec<File>, Box<dyn std::error::Error + Send + Sync>> {
let file = std::fs::File::open(file)?;
let reader = BufReader::new(file);
let versioned_openapi: Result<VersionedOpenAPI, serde_json::Error> =
serde_json::from_reader(reader);
match versioned_openapi {
Err(e) => Err(Box::from(e)),
Ok(v) => {
let openapi = v.upgrade();
Ok(openapi
.paths
.iter()
.flat_map(|(path_string, ref_or_path)| match ref_or_path {
RefOr::Item(path) => {
create_tests(path_string, path, full, multistage, &openapi)
}
RefOr::Reference { .. } => Vec::default(),
})
.collect())
}
}
}
}
mod openapi_v31 {
use super::*;
use crate::{
test,
test::file::{
DateSpecification, DateTimeSpecification, DatumSchema, EmailSpecification,
FloatSpecification, IntegerSpecification, SimpleValueVariable, Specification,
StringSpecification, UnvalidatedDateSpecification, UnvalidatedDatumSchemaVariable,
UnvalidatedFloatSpecification, UnvalidatedIntegerSpecification, UnvalidatedRequest,
UnvalidatedResponse, UnvalidatedStringSpecification, UnvalidatedValueOrDatumSchema,
UnvalidatedValuesOrSchema, UnvalidatedVariable, UnvalidatedVariableNameOrComponent,
UnvalidatedVariableNameOrDatumSchema, ValueOrDatumSchema, VariableName,
},
};
use oas3::spec::{Header, ObjectOrReference, Operation, PathItem, Response, Server, Spec};
use std::collections::BTreeMap;
use test::file::{SequenceSpecification, ValuesOrSchema};
pub fn get_test_paths(
root_servers: &[Server],
path_servers: &[Server],
op_servers: &[Server],
fallback: &str,
) -> Vec<String> {
let url_extractor = |servers: &[Server]| -> Option<Vec<String>> {
if servers.is_empty() {
None
} else {
Some(servers.iter().map(|s| s.url.clone()).collect())
}
};
url_extractor(path_servers)
.or(url_extractor(root_servers))
.or(url_extractor(op_servers))
.unwrap_or(vec![fallback.to_string()])
}
fn create_headers(
headers: &BTreeMap<String, ObjectOrReference<Header>>,
) -> Option<Vec<test::http::Header>> {
let ret: Vec<test::http::Header> = headers
.iter()
.map(|(name, _)| test::http::Header {
header: name.clone(),
matches_variable: std::cell::Cell::new(false),
value: String::default(),
})
.collect();
if !ret.is_empty() {
Some(ret)
} else {
None
}
}
fn create_response(
responses: &BTreeMap<String, ObjectOrReference<Response>>,
spec: &Spec,
) -> Option<UnvalidatedResponse> {
responses
.iter()
.filter(|(status_code_pattern, _)| status_code_pattern.starts_with('2'))
.map(|(status_code_pattern, obj_or_ref)| {
obj_or_ref.resolve(spec).ok().map(|t| UnvalidatedResponse {
status: create_status_code(status_code_pattern),
time: None,
body: None,
headers: create_headers(&t.headers),
extract: None,
ignore: None,
strict: None,
body_schema: t.content.get("application/json").and_then(|c| {
c.schema(spec).ok().and_then(|s| {
schema_to_datum(s, spec)
.map(UnvalidatedVariableNameOrDatumSchema::Component)
})
}),
})
})
.last()
.flatten()
}
fn schema_to_datum(schema: oas3::Schema, spec: &Spec) -> Option<DatumSchema> {
schema.schema_type.map(|t| match t {
oas3::spec::SchemaType::Array => DatumSchema::List {
specification: Some(SequenceSpecification {
schema: schema.items.and_then(|items| {
items.resolve(spec).ok().and_then(|s| {
schema_to_datum(s, spec).map(|ds| {
ValuesOrSchema::Schemas(Specification::<Box<DatumSchema>>::Value(
Box::from(ds),
))
})
})
}),
length: Option::None,
max_length: schema.max_items.map(|n| n as i64),
min_length: schema.min_items.map(|n| n as i64),
}),
},
oas3::spec::SchemaType::Boolean => DatumSchema::Boolean {
specification: None,
},
oas3::spec::SchemaType::Integer => DatumSchema::Integer {
specification: Some(IntegerSpecification {
max: schema.maximum.and_then(|n| {
n.as_i64()
.map(|n| n + schema.exclusive_maximum.unwrap_or_default() as i64)
}),
min: schema.minimum.and_then(|n| {
n.as_i64()
.map(|n| n + schema.exclusive_minimum.unwrap_or_default() as i64)
}),
..Default::default()
}),
},
oas3::spec::SchemaType::Number => DatumSchema::Float {
specification: Some(FloatSpecification {
max: schema.maximum.and_then(|n| {
n.as_f64()
.map(|n| n + schema.exclusive_maximum.unwrap_or_default() as i64 as f64)
}),
min: schema.minimum.and_then(|n| {
n.as_f64()
.map(|n| n + schema.exclusive_minimum.unwrap_or_default() as i64 as f64)
}),
..Default::default()
}),
},
oas3::spec::SchemaType::Object => DatumSchema::Object {
schema: Some(
schema
.properties
.iter()
.filter_map(|(k, maybe_schema)| {
maybe_schema
.resolve(spec)
.ok()
.map(|v| {
(
k.clone(),
schema_to_datum(v, spec).map(ValueOrDatumSchema::Datum),
)
})
.filter(|(_, ds)| ds.is_some())
.map(|(n, ds)| (n, ds.unwrap()))
})
.collect::<BTreeMap<String, ValueOrDatumSchema>>(),
),
},
oas3::spec::SchemaType::String => {
let string_spec = StringSpecification {
pattern: schema.pattern,
length: Option::None,
max_length: schema.max_length.map(|n| n as i64),
min_length: schema.min_length.map(|n| n as i64),
..Default::default()
};
match schema.format.unwrap_or_default().as_str() {
"date" => DatumSchema::Date {
specification: Some(DateSpecification {
..Default::default()
}),
},
"date-time" => DatumSchema::DateTime {
specification: Some(DateTimeSpecification {
..Default::default()
}),
},
"email" => DatumSchema::Email {
specification: Some(EmailSpecification {
specification: string_spec,
}),
},
_ => DatumSchema::String {
specification: Some(string_spec),
},
}
}
})
}
fn schema_to_unvalidated_datum(
schema: oas3::Schema,
spec: &Spec,
name: Option<String>,
) -> Option<UnvalidatedDatumSchemaVariable> {
schema.schema_type.map(|t| match t {
oas3::spec::SchemaType::Array => {
UnvalidatedDatumSchemaVariable::List(test::file::UnvalidatedSequenceSpecification {
name: name,
schema: schema.items.and_then(|items| {
items.resolve(spec).ok().and_then(|s| {
schema_to_unvalidated_datum(s, spec, None)
.map(|ds| UnvalidatedValuesOrSchema::UntaggedSchema(Box::from(ds)))
})
}),
length: None,
max_length: schema.max_items.map(|n| n as i64),
min_length: schema.min_items.map(|n| n as i64),
})
}
oas3::spec::SchemaType::Boolean => {
UnvalidatedDatumSchemaVariable::Boolean(test::file::UnvalidatedSpecification {
name,
value: None,
any_of: None,
one_of: None,
none_of: None,
})
}
oas3::spec::SchemaType::Integer => {
UnvalidatedDatumSchemaVariable::Integer(UnvalidatedIntegerSpecification {
max: schema.maximum.and_then(|n| {
n.as_i64()
.map(|n| n + schema.exclusive_maximum.unwrap_or_default() as i64)
}),
min: schema.minimum.and_then(|n| {
n.as_i64()
.map(|n| n + schema.exclusive_minimum.unwrap_or_default() as i64)
}),
name,
..Default::default()
})
}
oas3::spec::SchemaType::Number => {
UnvalidatedDatumSchemaVariable::Float(UnvalidatedFloatSpecification {
max: schema.maximum.and_then(|n| {
n.as_f64()
.map(|n| n + schema.exclusive_maximum.unwrap_or_default() as i64 as f64)
}),
min: schema.minimum.and_then(|n| {
n.as_f64()
.map(|n| n + schema.exclusive_minimum.unwrap_or_default() as i64 as f64)
}),
name,
..Default::default()
})
}
oas3::spec::SchemaType::Object => UnvalidatedDatumSchemaVariable::Object {
name,
schema: Some(
schema
.properties
.iter()
.filter_map(|(k, maybe_schema)| {
maybe_schema
.resolve(spec)
.ok()
.map(|v| {
(
k.clone(),
schema_to_unvalidated_datum(v, spec, None)
.map(UnvalidatedValueOrDatumSchema::Datum),
)
})
.filter(|(_, ds)| ds.is_some())
.map(|(n, ds)| (n, ds.unwrap()))
})
.collect::<BTreeMap<String, UnvalidatedValueOrDatumSchema>>(),
),
},
oas3::spec::SchemaType::String => {
let string_spec = UnvalidatedStringSpecification {
pattern: schema.pattern,
length: Option::None,
max_length: schema.max_length.map(|n| n as i64),
min_length: schema.min_length.map(|n| n as i64),
..Default::default()
};
match schema.format.unwrap_or_default().as_str() {
"date" => UnvalidatedDatumSchemaVariable::Date(UnvalidatedDateSpecification {
name,
..Default::default()
}),
"date-time" => {
UnvalidatedDatumSchemaVariable::DateTime(UnvalidatedDateSpecification {
name,
..Default::default()
})
}
"email" => {
UnvalidatedDatumSchemaVariable::Email(UnvalidatedStringSpecification {
name,
..string_spec
})
}
_ => UnvalidatedDatumSchemaVariable::String(UnvalidatedStringSpecification {
name,
..string_spec
}),
}
}
})
}
fn create_request(
url: &str,
verb: test::http::Verb,
op: &oas3::spec::Operation,
spec: &Spec,
) -> (UnvalidatedRequest, Option<UnvalidatedVariable>) {
let mut headers: Vec<test::http::Header> = vec![];
let mut parameters: Vec<test::http::Parameter> = vec![];
op.parameters.iter().for_each(|f| {
if let Ok(t) = f.resolve(spec) {
match t.location.as_str() {
"query" => parameters.push(test::http::Parameter {
param: t.name.clone(),
value: String::default(),
matches_variable: std::cell::Cell::new(false),
}),
"header" => headers.push(test::http::Header {
header: t.name.clone(),
value: String::default(),
matches_variable: std::cell::Cell::new(false),
}),
"path" => (), //user will have to do this themselves, based upon generated template
"cookie" => (), //These will get picked up automatically as state vars
_ => (),
}
}
});
let maybe_variable = op.request_body.as_ref().and_then(|body| {
body.resolve(spec).ok().and_then(|b| {
b.content.get("application/json").and_then(|c| {
c.schema(spec).ok().and_then(|s| {
schema_to_unvalidated_datum(s, spec, Some("body".to_string()))
.map(UnvalidatedVariable::Datum)
})
})
})
});
(
UnvalidatedRequest {
body: maybe_variable.as_ref().map(|_| {
UnvalidatedVariableNameOrComponent::VariableName(VariableName(
"${body}".to_string(),
))
}),
method: Some(verb),
url: url.to_string(),
headers: if headers.is_empty() {
None
} else {
Some(headers)
},
params: if parameters.is_empty() {
None
} else {
Some(parameters)
},
},
maybe_variable,
)
}
fn create_variables(op: &Operation, spec: &Spec) -> Vec<test::file::UnvalidatedVariable> {
op.parameters
.iter()
.map(|p_or_ref| {
p_or_ref.resolve(spec).ok().map(|t| {
test::file::UnvalidatedVariable::Simple(SimpleValueVariable {
name: t.name.clone(),
value: serde_json::Value::from("".to_string()),
})
})
})
.filter(Option::is_some)
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | true |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/variable.rs | src/test/variable.rs | use serde::{Deserialize, Serialize};
use serde_json::Value;
use std::hash::Hash;
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize)]
pub struct Modifier {
pub operation: String,
pub value: Value,
pub unit: String,
}
impl Hash for Modifier {
fn hash<H: std::hash::Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state);
}
}
impl Modifier {
//We need an enum for modifier
pub fn get_inverse(&self) -> Modifier {
match self.operation.as_str() {
"add" => Self {
operation: "subtract".to_string(),
value: self.value.clone(),
unit: self.unit.clone(),
},
"subtract" => Self {
operation: "add".to_string(),
value: self.value.clone(),
unit: self.unit.clone(),
},
_ => self.clone(),
}
}
}
pub fn parse_source_path(path: &str) -> String {
let index = path.rfind('/');
let mut result = match index {
Some(i) => path[0..i].to_string(),
None => "./".to_string(),
};
if !result.ends_with('/') {
result = format!("{}/", result);
}
result
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/http.rs | src/test/http.rs | use serde::{Deserialize, Serialize, Serializer};
use std::{
cell::Cell,
fmt,
hash::{Hash, Hasher},
};
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq, Eq)]
#[serde(deny_unknown_fields)]
pub struct Header {
pub header: String,
pub value: String,
#[serde(skip_serializing, skip_deserializing)]
pub matches_variable: Cell<bool>,
}
impl Header {
pub fn new(header: String, value: String) -> Header {
Header {
header,
value,
matches_variable: Cell::from(false),
}
}
}
impl Hash for Header {
fn hash<H: Hasher>(&self, state: &mut H) {
self.header.hash(state);
self.value.hash(state);
}
fn hash_slice<H: Hasher>(data: &[Self], state: &mut H)
where
Self: Sized,
{
for piece in data {
piece.hash(state);
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct Parameter {
pub param: String,
pub value: String,
#[serde(skip_serializing, skip_deserializing)]
pub matches_variable: Cell<bool>,
}
impl Hash for Parameter {
fn hash<H: Hasher>(&self, state: &mut H) {
self.param.hash(state);
self.value.hash(state);
}
fn hash_slice<H: Hasher>(data: &[Self], state: &mut H)
where
Self: Sized,
{
for piece in data {
piece.hash(state);
}
}
}
#[derive(Debug, Clone, Copy, PartialEq, Eq, Serialize, Deserialize, Hash)]
pub enum Verb {
#[serde(alias = "get", alias = "GET")]
Get,
#[serde(alias = "post", alias = "POST")]
Post,
#[serde(alias = "put", alias = "PUT")]
Put,
#[serde(alias = "patch", alias = "PATCH")]
Patch,
#[serde(alias = "delete", alias = "DELETE")]
Delete,
Undefined,
}
impl Verb {
pub fn as_method(&self) -> Method {
match &self {
Verb::Post => Method(hyper::Method::POST),
Verb::Put => Method(hyper::Method::PUT),
Verb::Patch => Method(hyper::Method::PATCH),
Verb::Delete => Method(hyper::Method::DELETE),
_ => Method(hyper::Method::GET),
}
}
}
#[derive(Debug, Clone)]
pub struct Method(hyper::Method);
impl Method {
pub fn to_hyper(&self) -> hyper::Method {
self.0.clone()
}
}
impl Serialize for Method {
fn serialize<S: Serializer>(&self, serializer: S) -> Result<S::Ok, S::Error> {
serializer.serialize_str(self.0.as_str())
}
}
impl fmt::Display for Method {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
write!(f, "{}", self.0)
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/validation.rs | src/test/validation.rs | use crate::{
test,
test::{definition, validation, variable},
};
use log::warn;
use regex::Regex;
use std::{fmt, path::PathBuf};
use ulid::Ulid;
#[derive(Debug, Clone)]
pub struct Error {
pub reason: String,
}
impl std::error::Error for Error {}
impl fmt::Display for Error {
fn fmt(&self, f: &mut fmt::Formatter) -> fmt::Result {
if self.reason.is_empty() {
write!(f, "test validation")
} else {
write!(f, "{}", self.reason)
}
}
}
fn validate_test_file(
file: &test::File,
_global_variables: &[test::Variable],
) -> Result<bool, Error> {
if !file
.platform_id
.clone()
.map(|ulid| Ulid::from_string(&ulid).is_ok())
.unwrap_or(true)
{
warn!("Test file ({}) has invalid platform identifier ({}). PlatformId must be empty or a valid ULID.", file.filename, file.platform_id.clone().unwrap_or("".to_string()));
}
let regex = Regex::new(r"(?i)^[a-z0-9-_]+$").unwrap();
if !file
.id
.clone()
.map(|id| regex.is_match(id.as_str()))
.unwrap_or(true)
{
return Err(validation::Error {
reason:
format!("id '{}' is invalid - may only contain alphanumeric characters, hyphens, and underscores", file.id.clone().unwrap())
.to_string(),
});
}
Ok(true)
}
// this method is intended to do basic validation of the test file and convert it into a TestDefinition if it passes
pub fn validate_file(
file: test::File,
global_variables: &[test::Variable],
project: Option<String>,
environment: Option<String>,
index: usize,
) -> Result<test::Definition, Error> {
validate_test_file(&file, global_variables)?;
let new_tags = if let Some(tags) = file.tags.as_ref() {
tags.to_lowercase()
.split_whitespace()
.map(|s| s.to_string())
.collect()
} else {
Vec::new()
};
let variables = test::Variable::validate_variables_opt(
file.clone().variables,
PathBuf::from(&file.filename)
.parent()
.and_then(|p| p.to_str())
.unwrap_or(&file.filename),
)?;
let td = test::Definition {
file_data: file.clone(),
name: file.name,
description: file.description,
id: file.id.map(|i| i.to_lowercase()),
platform_id: file.platform_id,
project: file.project.or(project),
environment: file.env.or(environment),
requires: file.requires,
tags: new_tags,
iterate: file.iterate.unwrap_or(1),
variables: variables.clone(),
global_variables: global_variables.to_vec(),
stages: definition::StageDescriptor::validate_stages_opt(
file.request,
file.compare,
file.response,
file.stages,
&variable::parse_source_path(&file.filename),
&variables,
)?,
setup: definition::RequestResponseDescriptor::new_opt(file.setup, &variables)?,
cleanup: definition::CleanupDescriptor::new(file.cleanup, &variables)?,
disabled: file.disabled.unwrap_or_default(),
index,
};
td.update_variable_matching();
Ok(td)
}
// this method is intended to do a thorough validation of rules and logic in the resolved test definition
pub fn _validate_definition(_test_definition: &test::Definition) -> Result<bool, Error> {
Ok(true)
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/template.rs | src/test/template.rs | use crate::{
test,
test::{definition, file, http},
};
use std::{cell::Cell, error::Error};
use ulid::Ulid;
use super::{file::SimpleValueVariable, File};
pub fn template() -> Result<test::File, Box<dyn Error + Send + Sync>> {
Ok(test::File::default())
}
pub fn template_staged() -> Result<test::File, Box<dyn Error + Send + Sync>> {
let default = File::default();
Ok(test::File {
request: None,
response: None,
stages: Some(vec![new_stage()]),
..default
})
}
pub fn template_full() -> Result<test::File, Box<dyn Error + Send + Sync>> {
//Do not use default as a basis, as opposed to the others.
//Compilation error will serve as a reminder to init to "full" state
Ok(test::File {
filename: "".to_string(),
name: Some("".to_string()),
id: Some("".to_string()),
platform_id: Some(Ulid::new().to_string()),
project: Some("".to_string()),
env: Some("".to_string()),
tags: Some("".to_string()),
requires: Some("".to_string()),
iterate: Some(1),
setup: Some(new_full_request_response()?),
request: Some(new_full_request()?),
compare: Some(new_full_compare()?),
response: Some(new_full_response()?),
stages: Some(vec![new_full_stage()?]),
cleanup: Some(new_full_cleanup()?),
variables: Some(vec![new_full_variables()?]),
disabled: Some(false),
description: Some("".to_string()),
})
}
fn new_full_cleanup() -> Result<file::UnvalidatedCleanup, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedCleanup {
onsuccess: Some(new_full_request()?),
onfailure: Some(new_full_request()?),
always: Some(new_full_request()?),
})
}
fn new_full_request_response(
) -> Result<file::UnvalidatedRequestResponse, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedRequestResponse {
request: new_full_request()?,
response: Some(new_full_response()?),
})
}
fn new_stage() -> file::UnvalidatedStage {
file::UnvalidatedStage {
request: new_request(),
compare: None,
response: Some(new_response()),
variables: None,
name: None,
delay: None,
}
}
fn new_full_stage() -> Result<file::UnvalidatedStage, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedStage {
request: new_full_request()?,
compare: Some(new_full_compare()?),
response: Some(new_full_response()?),
variables: Some(vec![new_full_variables()?]),
name: None,
delay: None,
})
}
//Do we want to create a variable of every type as part of the full template?
fn new_full_variables() -> Result<file::UnvalidatedVariable, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedVariable::Simple(SimpleValueVariable {
name: "".to_string(),
value: serde_json::Value::from("".to_string()),
}))
}
fn new_response() -> file::UnvalidatedResponse {
file::UnvalidatedResponse::default()
}
fn new_full_response() -> Result<file::UnvalidatedResponse, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedResponse {
status: Some(test::file::ValueOrNumericSpecification::Value(200)),
time: Some(test::file::ValueOrNumericSpecification::Value(500)),
headers: Some(vec![new_header()]),
body: Some(serde_json::from_str("{}")?),
ignore: Some(vec!["".to_string()]),
extract: Some(vec![definition::ResponseExtraction::new()]),
strict: Some(true),
body_schema: None,
})
}
fn new_full_compare() -> Result<file::UnvalidatedCompareRequest, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedCompareRequest {
method: Some(http::Verb::Get),
url: "".to_string(),
params: Some(vec![new_parameter()]),
add_params: Some(vec![new_parameter()]),
ignore_params: Some(vec!["".to_string()]),
headers: Some(vec![new_header()]),
add_headers: Some(vec![new_header()]),
ignore_headers: Some(vec!["".to_string()]),
body: Some(serde_json::from_str("{}")?),
strict: Some(true),
})
}
fn new_request() -> file::UnvalidatedRequest {
file::UnvalidatedRequest::default()
}
fn new_full_request() -> Result<file::UnvalidatedRequest, Box<dyn Error + Send + Sync>> {
Ok(file::UnvalidatedRequest {
method: Some(http::Verb::Get),
url: "".to_string(),
params: Some(vec![new_parameter()]),
headers: Some(vec![new_header()]),
body: Some(serde_json::from_str("{}")?),
})
}
fn new_header() -> http::Header {
http::Header {
header: "".to_string(),
value: "".to_string(),
matches_variable: Cell::from(false),
}
}
fn new_parameter() -> http::Parameter {
http::Parameter {
param: "".to_string(),
value: "".to_string(),
matches_variable: Cell::from(false),
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/file.rs | src/test/file.rs | use crate::{
json::filter::filter_json,
test,
test::{definition, file::Validated::Good, http, variable, variable::Modifier},
validated::ValidatedExt,
};
use chrono::{
DateTime, Datelike, Days, Duration, Local, Months, NaiveDate, NaiveDateTime, ParseError,
TimeZone,
};
use log::{debug, error, trace};
use nonempty_collections::{IntoNonEmptyIterator, NonEmptyIterator};
use num::{Num, Signed};
use rand::{distributions::uniform::SampleUniform, rngs::ThreadRng, Rng};
use regex::Regex;
use serde::{de::Visitor, Deserialize, Deserializer, Serialize};
use serde_json::{Map, Value};
use std::{
cmp::{max, min},
collections::BTreeMap,
error::Error,
fmt::{self, Debug, Display},
fs,
hash::{Hash, Hasher},
};
use validated::Validated;
const GIVEN_NAMES: [&str; 20] = [
"James",
"Michael",
"Robert",
"John",
"David",
"William",
"Richard",
"Joseph",
"Thomas",
"Christopher",
"Mary",
"Patricia",
"Jennifer",
"Linda",
"Elizabeth",
"Barbara",
"Susan",
"Jessica",
"Karen",
"Sarah",
];
const SURNAMES: [&str; 20] = [
"Smith", "Johnson", "Williams", "Brown", "Jones", "Miller", "Davis", "Wilson", "Anderson",
"Thomas", "Taylor", "Moore", "Jackson", "Martin", "Lee", "Thompson", "Harris", "Clark",
"Lewis", "Robinson",
];
const EMAIL_DOMAINS: [&str; 3] = ["example.com", "example.net", "example.org"];
impl std::cmp::PartialEq<String> for VariableName {
fn eq(&self, other: &String) -> bool {
self.0 == *other
}
}
#[derive(PartialEq, Eq, Debug, Serialize, Clone)]
pub struct VariableName(pub String);
impl VariableName {
pub fn val(&self) -> String {
self.0.clone()
}
}
struct VariableNameVisitor;
impl<'de> Visitor<'de> for VariableNameVisitor {
type Value = VariableName;
fn expecting(&self, formatter: &mut std::fmt::Formatter) -> std::fmt::Result {
formatter.write_str("a string starting with a $")
}
fn visit_str<E>(self, value: &str) -> Result<Self::Value, E>
where
E: serde::de::Error,
{
if !value.starts_with('$') && !value.starts_with("\"$\"") {
return Err(E::custom("expecting identifier starting with $"));
}
Ok(VariableName(value.to_string()))
}
}
impl<'de> Deserialize<'de> for VariableName {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: Deserializer<'de>,
{
deserializer.deserialize_str(VariableNameVisitor)
}
}
//aka RefOrT , where Ref should refer to a variable
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum UnvalidatedVariableNameOrComponent<T> {
VariableName(VariableName),
Component(T),
}
#[derive(Serialize, Debug, Clone, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub enum Specification<T> {
AnyOf(Vec<T>),
OneOf(Vec<T>),
NoneOf(Vec<T>),
Value(T),
#[serde(untagged)]
UnTaggedValue(T),
}
impl<T> Default for Specification<T> {
fn default() -> Self {
Specification::NoneOf(vec![])
}
}
impl<T> TryFrom<UnvalidatedSpecification<T>> for Option<Specification<T>> {
type Error = String;
fn try_from(unvalidated: UnvalidatedSpecification<T>) -> Result<Self, Self::Error> {
let specified = vec![
&unvalidated.any_of,
&unvalidated.one_of,
&unvalidated.none_of,
]
.into_iter()
.filter(|o| o.is_some())
.count();
if specified > 1 || (specified == 1 && unvalidated.value.is_some()) {
return Err(
"can only specify one of the following constraints: oneOf, anyOf, noneOf, or value"
.to_string(),
);
}
return match (
unvalidated.value,
unvalidated.any_of,
unvalidated.one_of,
unvalidated.none_of,
) {
(Some(v), _, _, _) => Ok(Some(Specification::Value(v))),
(_, Some(vs), _, _) => Ok(Some(Specification::AnyOf(vs))),
(_, _, Some(vs), _) => Ok(Some(Specification::OneOf(vs))),
(_, _, _, Some(vs)) => Ok(Some(Specification::NoneOf(vs))),
_ => Ok(None),
};
}
}
#[derive(Serialize, Debug, Clone, Default, Deserialize, PartialEq)]
#[serde(rename_all = "camelCase")]
pub struct NumericSpecification<T: std::fmt::Display + Clone + PartialOrd> {
#[serde(flatten, skip_serializing_if = "Option::is_none")]
pub specification: Option<Specification<T>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min: Option<T>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max: Option<T>,
}
pub type BooleanSpecification = Specification<bool>;
pub type FloatSpecification = NumericSpecification<f64>;
pub type IntegerSpecification = NumericSpecification<i64>;
#[derive(Hash, Serialize, Debug, Clone, Deserialize, PartialEq, Default)]
#[serde(rename_all = "camelCase")]
pub struct StringSpecification {
#[serde(flatten, skip_serializing_if = "Option::is_none")]
pub specification: Option<Specification<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min_length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub pattern: Option<String>,
}
#[derive(Debug, Serialize, Clone, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum ValueOrDatumSchema {
Datum(DatumSchema),
Values(Value),
}
impl Hash for ValueOrDatumSchema {
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state)
}
}
#[derive(Debug, Serialize, Clone, Deserialize, PartialEq)]
#[serde(untagged)]
pub enum ValuesOrSchema {
Schemas(Specification<Box<DatumSchema>>),
Values(Specification<Vec<Value>>),
}
impl ValuesOrSchema {
pub fn generate_if_constrained(&self, rng: &mut ThreadRng) -> Option<Value> {
trace!("generate_if_constrained()");
match self {
ValuesOrSchema::Schemas(schema) => schema.schema_generate_if_constrained(rng),
ValuesOrSchema::Values(vals) => vals
.generate_if_constrained(rng)
.map(serde_json::Value::from),
}
}
fn check(
&self,
val: &Vec<Value>,
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
match &self {
ValuesOrSchema::Schemas(schema) => schema.schema_check(val, strict, formatter),
ValuesOrSchema::Values(vals) => vals.vec_check(val, strict, formatter),
}
}
}
#[derive(Serialize, Debug, Clone, Deserialize, PartialEq, Default)]
#[serde(rename_all = "camelCase")]
pub struct SequenceSpecification {
#[serde(skip_serializing_if = "Option::is_none")]
pub length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min_length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max_length: Option<i64>,
#[serde(skip_serializing_if = "Option::is_none")]
pub schema: Option<ValuesOrSchema>,
}
#[derive(Hash, Default, Serialize, Debug, Clone, Deserialize, PartialEq)]
pub struct DateSpecification {
#[serde(flatten, skip_serializing_if = "Option::is_none")]
pub specification: Option<Specification<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modifier: Option<variable::Modifier>,
}
#[derive(Hash, Default, Serialize, Debug, Clone, Deserialize, PartialEq)]
pub struct DateTimeSpecification {
#[serde(flatten)]
#[serde(skip_serializing_if = "Option::is_none")]
pub specification: Option<Specification<String>>,
#[serde(skip_serializing_if = "Option::is_none")]
pub min: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub max: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub format: Option<String>,
#[serde(skip_serializing_if = "Option::is_none")]
pub modifier: Option<variable::Modifier>,
}
#[derive(Hash, Default, Serialize, Debug, Clone, Deserialize, PartialEq)]
pub struct NameSpecification {
#[serde(flatten)]
pub specification: StringSpecification,
}
#[derive(Hash, Default, Serialize, Debug, Clone, Deserialize, PartialEq)]
pub struct EmailSpecification {
#[serde(flatten)]
pub specification: StringSpecification,
}
pub trait Checker {
type Item;
fn check(
&self,
val: &Self::Item,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>>;
}
impl Checker for NameSpecification {
type Item = String;
fn check(
&self,
val: &Self::Item,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
self.specification.check(val, formatter)
}
}
impl Checker for EmailSpecification {
type Item = String;
fn check(
&self,
val: &Self::Item,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
//standard browser regex for email
let email_regex: Regex = Regex::new(
r"^[a-zA-Z0-9.!#$%&'*+/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$",
)
.unwrap();
let matches = email_regex.is_match(val);
if !matches {
trace!("failed email regex");
vec![Validated::fail(formatter("email format", val))]
} else {
self.specification.check(val, formatter)
}
}
}
impl Specification<Box<DatumSchema>> {
fn schema_check(
&self,
vals: &Vec<Value>,
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
let findings = match self {
Specification::NoneOf(none_ofs) => {
self.schema_check_none_of(vals, none_ofs, strict, formatter)
}
Specification::AnyOf(any_ofs) => {
self.schema_any_one_of(vals, any_ofs, strict, formatter)
}
Specification::OneOf(one_ofs) => {
self.schema_check_one_of(vals, one_ofs, strict, formatter)
}
Specification::Value(val) => self.schema_check_val(vals, val, strict, formatter),
Specification::UnTaggedValue(val) => {
self.schema_check_val(vals, val, strict, formatter)
}
};
vec![findings]
}
fn schema_generate_if_constrained(&self, rng: &mut ThreadRng) -> Option<Value> {
trace!("schema_generate_if_constrained()");
match &self {
Specification::Value(v) => generate_value_from_schema(v, 1),
Specification::UnTaggedValue(v) => generate_value_from_schema(v, 1),
Specification::OneOf(oneofs) => oneofs
.get(rng.gen_range(0..oneofs.len()))
.and_then(|s| generate_value_from_schema(s, 1)),
Specification::AnyOf(oneofs) => oneofs
.get(rng.gen_range(0..oneofs.len()))
.and_then(|s| generate_value_from_schema(s, 1)),
_ => None,
}
}
fn schema_check_val(
&self,
actuals: &Vec<Value>,
specified_value: &DatumSchema,
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if actuals.iter().all(|actual| {
specified_value
.check(actual, strict, formatter)
.iter()
.all(|v| v.is_good())
}) {
Good(())
} else {
Validated::fail(formatter(
format!("{:?}", specified_value).as_str(),
format!("{:?}", actuals).as_str(),
))
}
}
fn schema_check_one_of(
&self,
actuals: &Vec<Value>,
specified_values: &[Box<DatumSchema>],
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
let matchers = specified_values
.iter()
.filter(|v| {
actuals.iter().all(|actual| {
v.check(actual, strict, formatter)
.iter()
.all(|validation| validation.is_good())
})
})
.collect::<Vec<&Box<DatumSchema>>>();
if matchers.len() == 1 {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actuals).as_str(),
))
}
}
fn schema_any_one_of(
&self,
actuals: &Vec<Value>,
specified_values: &[Box<DatumSchema>],
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if actuals.iter().all(|actual| {
specified_values.iter().any(|x| {
x.check(actual, strict, formatter)
.iter()
.all(|v| v.is_good())
})
}) {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actuals).as_str(),
))
}
}
fn schema_check_none_of(
&self,
actuals: &Vec<Value>,
specified_values: &[Box<DatumSchema>],
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if actuals.iter().all(|actual| {
!specified_values.iter().any(|x| {
x.check(actual, strict, formatter)
.iter()
.all(|v| v.is_good())
})
}) {
Good(())
} else {
Validated::fail(formatter(
format!("none of {:?}", specified_values).as_str(),
format!("{:?}", actuals).as_str(),
))
}
}
}
impl Specification<Vec<Value>> {
//Strict currently isn't supported for the arrays
//We can add later, but assert_json_diff::CompareMode::Inclusive produces
//weird results for arrays
fn vec_check(
&self,
vals: &Vec<Value>,
strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
let findings = match self {
Specification::NoneOf(none_ofs) => {
self.vec_check_none_of(vals, none_ofs, strict, formatter)
}
Specification::AnyOf(any_ofs) => {
self.vec_check_any_of(vals, any_ofs, strict, formatter)
}
Specification::OneOf(one_ofs) => {
self.vec_check_one_of(vals, one_ofs, strict, formatter)
}
Specification::Value(val) => self.vec_check_val(vals, val, strict, formatter),
Specification::UnTaggedValue(val) => self.vec_check_val(vals, val, strict, formatter),
};
vec![findings]
}
fn vec_check_val(
&self,
actual: &Vec<Value>,
specified_value: &Vec<Value>,
_strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if specified_value == actual {
Good(())
} else {
Validated::fail(formatter(
format!("{:?}", specified_value).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn vec_check_one_of(
&self,
actual: &Vec<Value>,
specified_values: &[Vec<Value>],
_strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
let hits = specified_values
.iter()
.filter(|v| *v == actual)
.collect::<Vec<&Vec<Value>>>();
if hits.len() == 1 {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn vec_check_any_of(
&self,
actual: &Vec<Value>,
specified_values: &[Vec<Value>],
_strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if specified_values.contains(actual) {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn vec_check_none_of(
&self,
actual: &Vec<Value>,
specified_values: &[Vec<Value>],
_strict: bool,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if !specified_values.contains(actual) {
Good(())
} else {
Validated::fail(formatter(
format!("none of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
}
impl<T> Specification<T>
where
T: PartialEq,
T: fmt::Debug,
T: Clone,
{
fn generate_if_constrained(&self, rng: &mut ThreadRng) -> Option<T> {
trace!("generate_if_constrained{:?}", &self);
match &self {
Specification::Value(v) => Some(v.clone()),
Specification::UnTaggedValue(v) => Some(v.clone()),
Specification::OneOf(oneofs) => oneofs.get(rng.gen_range(0..oneofs.len())).cloned(),
Specification::AnyOf(anyofs) => anyofs.get(rng.gen_range(0..anyofs.len())).cloned(),
Specification::NoneOf(_) => None,
}
}
fn check_val(
&self,
actual: &T,
specified_value: &T,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if specified_value == actual {
Good(())
} else {
Validated::fail(formatter(
format!("{:?}", specified_value).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn check_one_of(
&self,
actual: &T,
specified_values: &[T],
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
let hits = specified_values
.iter()
.filter(|v| *v == actual)
.collect::<Vec<&T>>();
if hits.len() == 1 {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn check_any_of(
&self,
actual: &T,
specified_values: &[T],
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if specified_values.contains(actual) {
Good(())
} else {
Validated::fail(formatter(
format!("one of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
fn check_none_of(
&self,
actual: &T,
specified_values: &[T],
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
if !specified_values.contains(actual) {
Good(())
} else {
Validated::fail(formatter(
format!("none of {:?}", specified_values).as_str(),
format!("{:?}", actual).as_str(),
))
}
}
}
impl<T> Hash for Specification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Display,
T: Serialize,
T: Clone,
{
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state)
}
}
impl Hash for SequenceSpecification {
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state)
}
}
impl<T> Hash for NumericSpecification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Serialize,
T: Clone,
{
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state)
}
}
impl<T> Hash for UnvalidatedNumericSpecification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Serialize,
T: Clone,
{
fn hash<H: Hasher>(&self, state: &mut H) {
serde_json::to_string(self).unwrap().hash(state)
}
}
fn validate1<T: Copy>(
pred: &impl Fn(&T) -> bool,
val: &Option<T>,
message: String,
) -> Validated<Option<T>, String> {
val.map(|v| {
if !pred(&v) {
Validated::fail(message)
} else {
Good(val.clone())
}
})
.unwrap_or(Validated::Good(None))
}
fn validate2<T>(
pred: &impl Fn(&T, &T) -> bool,
val: &Option<T>,
val2: &Option<T>,
message: String,
) -> Validated<(), String> {
val.as_ref()
.zip(val2.as_ref())
.map(|(v, v2)| {
if !pred(v, v2) {
Validated::fail(message)
} else {
Good(())
}
})
.unwrap_or(Good(()))
}
fn non_negative_validator<T: Signed + Copy>(
num: &Option<T>,
variable_name: &str,
) -> Validated<Option<T>, String> {
validate1::<T>(
&|val| val.is_positive() || val.is_zero(),
num,
format!("negative value provided for {variable_name}"),
)
}
fn less_than_or_equal_validator<T: PartialOrd>(
lhs: &Option<T>,
rhs: &Option<T>,
lhs_variable_name: &str,
rhs_variable_name: &str,
) -> Validated<(), String> {
validate2::<T>(
&|lhs, rhs| lhs <= rhs,
lhs,
rhs,
format!("{lhs_variable_name} must be less than or equal to {rhs_variable_name}"),
)
}
impl<T> NumericSpecification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Clone,
{
pub fn new(
specification: Option<Specification<T>>,
min: Option<T>,
max: Option<T>,
) -> Result<Self, String> {
let is_none_of = match specification.as_ref() {
Some(Specification::NoneOf(_)) => true,
_ => false,
};
let violation =
specification.is_some() && !is_none_of && min.as_ref().or(max.as_ref()).is_some();
if violation {
return Err(
"Cannot specify min or max alongside either of oneOf, anyOf, or value".to_string(),
);
}
less_than_or_equal_validator(&min, &max, "min", "max")
.map(|_| Self {
specification,
min,
max,
})
.ok()
.map_err(|nev| {
nev.into_nonempty_iter()
.reduce(|acc, e| format!("{},{}", acc, e))
})
}
fn check_min(
&self,
actual: &T,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.min {
Some(t) => {
if t <= actual {
Good(())
} else {
Validated::fail(formatter(
format!("minimum of {}", t).as_str(),
format!("{}", actual).as_str(),
))
}
}
None => Good(()),
}
}
fn check_max(
&self,
actual: &T,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.max {
Some(t) => {
if t >= actual {
Good(())
} else {
Validated::fail(formatter(
format!("maximum of {}", t).as_str(),
format!("{}", actual).as_str(),
))
}
}
None => Good(()),
}
}
}
impl<T> TryFrom<UnvalidatedNumericSpecification<T>> for NumericSpecification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Clone,
{
type Error = String;
fn try_from(
unvalidated_numeric: UnvalidatedNumericSpecification<T>,
) -> Result<Self, Self::Error> {
let unvalidated_spec = UnvalidatedSpecification::<T> {
name: unvalidated_numeric.name,
value: unvalidated_numeric.value,
any_of: unvalidated_numeric.any_of,
one_of: unvalidated_numeric.one_of,
none_of: unvalidated_numeric.none_of,
};
let maybe_spec = TryInto::<Option<Specification<T>>>::try_into(unvalidated_spec);
maybe_spec
.and_then(|spec| Self::new(spec, unvalidated_numeric.min, unvalidated_numeric.max))
}
}
impl<T> Checker for Specification<T>
where
T: PartialEq,
T: fmt::Debug,
T: Clone,
{
type Item = T;
fn check(
&self,
val: &T,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
vec![match self {
Specification::NoneOf(nones) => self.check_none_of(val, nones, formatter),
Specification::OneOf(oneofs) => self.check_one_of(val, oneofs, formatter),
Specification::AnyOf(anyofs) => self.check_any_of(val, anyofs, formatter),
Specification::Value(specified_value) => {
self.check_val(val, specified_value, formatter)
}
Specification::UnTaggedValue(specified_value) => {
self.check_val(val, specified_value, formatter)
}
}]
}
}
impl<T> Checker for NumericSpecification<T>
where
T: PartialEq,
T: Display,
T: PartialOrd,
T: fmt::Debug,
T: Clone,
{
type Item = T;
fn check(
&self,
val: &T,
formatter: &impl Fn(&str, &str) -> String,
) -> Vec<Validated<(), String>> {
let mut ret = vec![
self.check_min(val, formatter),
self.check_max(val, formatter),
];
ret.append(
self.specification
.as_ref()
.map(|s| s.check(val, formatter))
.unwrap_or_default()
.as_mut(),
);
ret
}
}
impl StringSpecification {
pub fn new(
specification: Option<Specification<String>>,
length: Option<i64>,
min_length: Option<i64>,
max_length: Option<i64>,
pattern: Option<String>,
) -> Result<Self, String> {
let is_none_of = match specification.as_ref() {
Some(Specification::NoneOf(_)) => true,
_ => false,
};
let violation = specification.is_some()
&& !is_none_of
&& length
.as_ref()
.or(min_length.as_ref())
.or(max_length.as_ref())
.or(pattern.as_ref().map(|_| &25)) //random value to make iti64
.is_some();
if violation {
return Err("Cannot specify minLength, maxLength, or pattern alongside either of oneOf, anyOf, or value".to_string());
}
let negative_validation_length = non_negative_validator(&length, "length");
let negative_validation_max = non_negative_validator(&max_length, "maxLength");
let negative_validation_min = non_negative_validator(&min_length, "minLength");
let relation_validation =
less_than_or_equal_validator(&min_length, &max_length, "minLength", "maxLength");
let length_not_combined_with_min_or_max_validation =
if length.and(min_length).is_some() || length.and(max_length).is_some() {
Validated::fail(
"length cannot be specified alongside minLength or maxLength".to_string(),
)
} else {
Good(())
};
let pattern_validation = pattern
.map(|p| {
Regex::new(p.as_str())
.map(|_| Good(Some(p)))
.unwrap_or_else(|e| {
Validated::fail(format!("invalid regex supplied for pattern: {}", e))
})
})
.unwrap_or(Good(None));
negative_validation_max
.map5(
length_not_combined_with_min_or_max_validation
.map2(negative_validation_length, |_, _| length),
negative_validation_min,
relation_validation,
pattern_validation,
|_, _, _, _, p| Self {
length,
max_length,
min_length,
specification,
pattern: p,
},
)
.ok()
.map_err(|nev| {
nev.into_nonempty_iter()
.reduce(|acc, e| format!("{},{}", acc, e))
})
}
fn check_pattern(
&self,
actual: &str,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.pattern {
Some(p) => {
let result = Regex::new(p).map(|re| {
if re.is_match(actual) {
Good(())
} else {
Validated::fail(formatter(format!("pattern of {}", p).as_str(), actual))
}
});
result.unwrap_or_else(|e| {
Validated::fail(formatter("valid regex", format!("{:?}", e).as_str()))
})
}
_ => Good(()),
}
}
fn check_length(
&self,
actual: &str,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.length {
Some(t) => {
if *t == actual.len() as i64 {
Good(())
} else {
Validated::fail(formatter(format!("length of {}", t).as_str(), actual))
}
}
None => Good(()),
}
}
fn check_min_length(
&self,
actual: &str,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.min_length {
Some(t) => {
if *t <= actual.len() as i64 {
Good(())
} else {
Validated::fail(formatter(
format!("minimum length of {}", t).as_str(),
actual,
))
}
}
None => Good(()),
}
}
fn check_max_length(
&self,
actual: &str,
formatter: &impl Fn(&str, &str) -> String,
) -> Validated<(), String> {
match &self.max_length {
Some(t) => {
if *t >= actual.len() as i64 {
Good(())
} else {
Validated::fail(formatter(
format!("maximum length of {}", t).as_str(),
actual,
))
}
}
None => Good(()),
}
}
}
impl TryFrom<UnvalidatedStringSpecification> for StringSpecification {
type Error = String;
fn try_from(unvalidated_string: UnvalidatedStringSpecification) -> Result<Self, Self::Error> {
let unvalidated_spec = UnvalidatedSpecification::<String> {
name: unvalidated_string.name,
value: unvalidated_string.value,
any_of: unvalidated_string.any_of,
one_of: unvalidated_string.one_of,
none_of: unvalidated_string.none_of,
};
let maybe_spec = TryInto::<Option<Specification<String>>>::try_into(unvalidated_spec);
maybe_spec.and_then(|spec| {
Self::new(
spec,
unvalidated_string.length,
unvalidated_string.min_length,
unvalidated_string.max_length,
unvalidated_string.pattern,
)
})
}
}
impl SequenceSpecification {
pub fn new(
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | true |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/test/definition.rs | src/test/definition.rs | use crate::{
test,
test::{
file,
file::{NumericSpecification, ValueOrNumericSpecification},
http, validation,
},
};
use log::trace;
use serde::{Deserialize, Serialize};
use std::{cell::Cell, collections::HashSet};
use super::file::BodyOrSchema;
use crate::test::Variable;
#[derive(Debug, Clone, PartialEq, Serialize, Deserialize)]
pub struct RequestBody {
pub data: BodyOrSchema,
#[serde(skip_serializing, skip_deserializing)]
pub matches_variable: Cell<bool>,
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct RequestDescriptor {
pub method: http::Verb,
pub url: String,
pub params: Vec<http::Parameter>,
pub headers: Vec<http::Header>,
pub body: Option<RequestBody>,
}
impl RequestDescriptor {
pub fn new(
request: file::UnvalidatedRequest,
variables: &[Variable],
) -> Result<RequestDescriptor, validation::Error> {
trace!("RequestDescriptor::new({:?})", request);
let validated_params = match request.params {
Some(params) => params
.iter()
.map(|v| http::Parameter {
param: v.param.clone(),
value: v.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
let validated_headers = match request.headers {
Some(headers) => headers
.iter()
.map(|h| http::Header {
header: h.header.clone(),
value: h.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
let request_body = request
.body
.and_then(|variable_name_or_value| match variable_name_or_value {
file::UnvalidatedVariableNameOrComponent::Component(v) => {
Some(BodyOrSchema::Body(v))
}
file::UnvalidatedVariableNameOrComponent::VariableName(name) => variables
.iter()
.find(|v| name == format!("${{{}}}", v.name))
.and_then(|v| match &v.value {
test::ValueOrDatumOrFileOrSecret::Value { value: v } => {
Some(BodyOrSchema::Body(v.clone()))
}
_ => None,
})
.or_else(|| Some(BodyOrSchema::Body(serde_json::Value::from(name.val())))),
})
.map(|b| RequestBody {
data: b,
matches_variable: Cell::from(false),
});
Ok(RequestDescriptor {
method: request.method.unwrap_or(http::Verb::Get),
url: request.url,
params: validated_params,
headers: validated_headers,
body: request_body,
})
}
pub fn new_opt(
request_opt: Option<file::UnvalidatedRequest>,
variables: &[Variable],
) -> Result<Option<RequestDescriptor>, validation::Error> {
match request_opt {
Some(request) => Ok(Some(RequestDescriptor::new(request, variables)?)),
None => Ok(None),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct CompareDescriptor {
pub method: http::Verb,
pub url: String,
pub params: Vec<http::Parameter>,
pub add_params: Vec<http::Parameter>,
pub ignore_params: Vec<String>,
pub headers: Vec<http::Header>,
pub add_headers: Vec<http::Header>,
pub ignore_headers: Vec<String>,
pub body: Option<RequestBody>,
pub strict: bool,
}
impl CompareDescriptor {
pub fn new_opt(
request_opt: Option<file::UnvalidatedCompareRequest>,
variables: &[Variable],
) -> Result<Option<CompareDescriptor>, validation::Error> {
match request_opt {
Some(request) => {
let validated_params = match request.params {
Some(params) => params
.iter()
.map(|p| http::Parameter {
param: p.param.clone(),
value: p.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
let mut validated_add_params = Vec::new();
let mut validated_ignore_params = Vec::new();
if validated_params.is_empty() {
validated_add_params = match request.add_params {
Some(params) => params
.iter()
.map(|p| http::Parameter {
param: p.param.clone(),
value: p.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
validated_ignore_params = match request.ignore_params {
Some(params) => params.to_vec(),
None => Vec::new(),
};
}
let validated_headers = match request.headers {
Some(headers) => headers
.iter()
.map(|h| http::Header {
header: h.header.clone(),
value: h.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
let mut validated_add_headers = Vec::new();
let mut validated_ignore_headers = Vec::new();
if validated_headers.is_empty() {
validated_add_headers = match request.add_headers {
Some(headers) => headers
.iter()
.map(|h| http::Header {
header: h.header.clone(),
value: h.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
validated_ignore_headers = match request.ignore_headers {
Some(headers) => headers.to_vec(),
None => Vec::new(),
};
}
let compare_body = request
.body
.and_then(|variable_name_or_value| match variable_name_or_value {
file::UnvalidatedVariableNameOrComponent::Component(v) => {
Some(BodyOrSchema::Body(v))
}
file::UnvalidatedVariableNameOrComponent::VariableName(name) => variables
.iter()
.find(|v| name == format!("${{{}}}", v.name))
.and_then(|v| match &v.value {
test::ValueOrDatumOrFileOrSecret::Value { value: v } => {
Some(BodyOrSchema::Body(v.clone()))
}
_ => None,
})
.or_else(|| {
Some(BodyOrSchema::Body(serde_json::Value::from(name.val())))
}),
})
.map(|b| RequestBody {
data: b,
matches_variable: Cell::from(false),
});
Ok(Some(CompareDescriptor {
method: request.method.unwrap_or(http::Verb::Get),
url: request.url,
params: validated_params,
add_params: validated_add_params,
ignore_params: validated_ignore_params,
headers: validated_headers,
add_headers: validated_add_headers,
ignore_headers: validated_ignore_headers,
body: compare_body,
strict: request.strict.unwrap_or(true),
}))
}
None => Ok(None),
}
}
}
#[derive(Debug, Clone, PartialEq, Eq, Serialize, Deserialize, Hash)]
#[serde(deny_unknown_fields)]
pub struct ResponseExtraction {
pub name: String,
pub field: String,
}
impl ResponseExtraction {
pub fn new() -> ResponseExtraction {
ResponseExtraction {
name: "".to_string(),
field: "".to_string(),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
#[serde(deny_unknown_fields)]
pub struct ResponseDescriptor {
pub status: Option<ValueOrNumericSpecification<u16>>,
pub response_time: Option<NumericSpecification<u32>>,
pub headers: Vec<http::Header>,
pub body: Option<RequestBody>,
pub ignore: Vec<String>,
pub extract: Vec<ResponseExtraction>,
pub strict: bool,
}
// TODO: add validation logic to verify the descriptor is valid
impl ResponseDescriptor {
pub fn new_opt(
response: Option<file::UnvalidatedResponse>,
variables: &[Variable],
) -> Result<Option<ResponseDescriptor>, validation::Error> {
match response {
Some(res) => {
let validated_headers = match res.headers {
Some(headers) => headers
.iter()
.map(|h| http::Header {
header: h.header.clone(),
value: h.value.clone(),
matches_variable: Cell::from(false),
})
.collect(),
None => Vec::new(),
};
//Value in this case means max. So we do a translation
let validated_response_time =
res.time.map(|value_or_numeric| match value_or_numeric {
ValueOrNumericSpecification::Value(v) => NumericSpecification {
max: Some(v),
..Default::default()
},
ValueOrNumericSpecification::Schema(s) => s,
});
let validated_ignore = res.ignore.unwrap_or_default();
let validated_extraction: Vec<ResponseExtraction> = res.extract.unwrap_or_default();
if res.body.is_some() && res.body_schema.is_some() {
return Err(validation::Error {
reason: "Responses can contain a body OR a bodySchema. Not both"
.to_string(),
});
}
let maybe_body_or_schema = res
.body
.and_then(|variable_name_or_value| match variable_name_or_value {
file::UnvalidatedVariableNameOrComponent::Component(v) => {
Some(BodyOrSchema::Body(v))
}
file::UnvalidatedVariableNameOrComponent::VariableName(name) => variables
.iter()
.find(|v| name == format!("${{{}}}", v.name))
.and_then(|v| match &v.value {
test::ValueOrDatumOrFileOrSecret::Value { value: v } => {
Some(BodyOrSchema::Body(v.clone()))
}
//In theory, we could try to read from a file variable and
//inject the contents... \todo
_ => None,
}),
})
.or(res.body_schema.and_then(|s| match s {
file::UnvalidatedVariableNameOrComponent::Component(ds) => {
Some(BodyOrSchema::Schema(ds))
}
file::UnvalidatedVariableNameOrComponent::VariableName(name) => variables
.iter()
.find(|v| name == format!("${{{}}}", v.name))
.and_then(|v| match &v.value {
test::ValueOrDatumOrFileOrSecret::Schema { value: ds } => {
Some(BodyOrSchema::Schema(ds.clone()))
}
_ => None,
}),
}));
let response_body = maybe_body_or_schema.map(|b| RequestBody {
data: b,
matches_variable: Cell::from(false),
});
Ok(Some(ResponseDescriptor {
status: res.status,
response_time: validated_response_time,
headers: validated_headers,
body: response_body,
ignore: validated_ignore,
extract: validated_extraction,
strict: res.strict.unwrap_or(true),
}))
}
None => Ok(None),
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct StageDescriptor {
pub request: RequestDescriptor,
pub compare: Option<CompareDescriptor>,
pub response: Option<ResponseDescriptor>,
pub variables: Vec<test::Variable>,
pub name: Option<String>,
//I would prefer to do this is Option<chrono::duration>
//But it requires too much effort in serialization/deserialization
pub delay: Option<u64>,
//#[serde(skip_serializing)]
//pub source_path: String,
}
impl StageDescriptor {
pub fn new(
stage: file::UnvalidatedStage,
source_path: &str,
variables: &[Variable],
) -> Result<StageDescriptor, validation::Error> {
Ok(StageDescriptor {
request: RequestDescriptor::new(stage.request, variables)?,
compare: CompareDescriptor::new_opt(stage.compare, variables)?,
response: ResponseDescriptor::new_opt(stage.response, variables)?,
variables: test::Variable::validate_variables_opt(stage.variables, source_path)?,
// source_path: source_path.to_string(),
name: stage.name,
delay: stage.delay,
})
}
pub fn validate_stages_opt(
request_opt: Option<file::UnvalidatedRequest>,
compare_opt: Option<file::UnvalidatedCompareRequest>,
response_opt: Option<file::UnvalidatedResponse>,
stages_opt: Option<Vec<file::UnvalidatedStage>>,
source_path: &str,
variables: &[Variable],
) -> Result<Vec<StageDescriptor>, validation::Error> {
let mut results = Vec::new();
let mut count = 0;
if let Some(request) = request_opt {
results.push(StageDescriptor {
request: RequestDescriptor::new(request, variables)?,
compare: CompareDescriptor::new_opt(compare_opt, variables)?,
response: ResponseDescriptor::new_opt(response_opt, variables)?,
variables: Vec::new(),
// source_path: source_path.to_string(),
name: None,
delay: None,
});
count += 1;
}
match stages_opt {
None => Ok(results),
Some(stages) => {
count += stages.len();
results.append(
&mut stages
.into_iter()
.map(|s| StageDescriptor::new(s, source_path, variables))
.filter_map(|v| match v {
Ok(x) => Some(x),
Err(_) => None,
})
.collect::<Vec<StageDescriptor>>(),
);
if results.len() != count {
Err(validation::Error {
reason: "blah".to_string(),
})
} else {
Ok(results)
}
}
}
}
pub fn get_compare_parameters(&self) -> Vec<http::Parameter> {
if let Some(c) = &self.compare {
if !c.params.is_empty() {
return c.params.clone();
}
let ignore_lookup: HashSet<String> = c.ignore_params.iter().cloned().collect();
return self
.request
.clone()
.params
.into_iter()
.filter(|p| !ignore_lookup.contains(&p.param))
.chain(c.add_params.clone())
.collect();
}
Vec::new()
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct RequestResponseDescriptor {
pub request: RequestDescriptor,
pub response: Option<ResponseDescriptor>,
}
impl RequestResponseDescriptor {
pub fn new_opt(
reqresp_opt: Option<file::UnvalidatedRequestResponse>,
variables: &[Variable],
) -> Result<Option<RequestResponseDescriptor>, validation::Error> {
match reqresp_opt {
Some(reqresp) => Ok(Some(RequestResponseDescriptor {
request: RequestDescriptor::new(reqresp.request, variables)?,
response: ResponseDescriptor::new_opt(reqresp.response, variables)?,
})),
None => Ok(None),
}
}
}
pub struct ResolvedRequest {
pub url: String,
pub method: http::Method,
pub headers: Vec<(String, String)>,
pub body: Option<serde_json::Value>,
}
impl ResolvedRequest {
pub fn new(
url: String,
method: http::Method,
headers: Vec<(String, String)>,
body: Option<serde_json::Value>,
) -> ResolvedRequest {
ResolvedRequest {
url,
method,
headers,
body,
}
}
}
#[derive(Debug, Clone, Serialize, Deserialize, PartialEq)]
pub struct CleanupDescriptor {
pub onsuccess: Option<RequestDescriptor>,
pub onfailure: Option<RequestDescriptor>,
pub always: Option<RequestDescriptor>,
}
impl CleanupDescriptor {
pub fn new(
cleanup_opt: Option<file::UnvalidatedCleanup>,
variables: &[Variable],
) -> Result<CleanupDescriptor, validation::Error> {
match cleanup_opt {
Some(cleanup) => Ok(CleanupDescriptor {
onsuccess: RequestDescriptor::new_opt(cleanup.onsuccess, variables)?,
onfailure: RequestDescriptor::new_opt(cleanup.onfailure, variables)?,
always: RequestDescriptor::new_opt(cleanup.always, variables)?,
}),
None => Ok(CleanupDescriptor {
onsuccess: None,
onfailure: None,
always: None,
}),
}
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/json/extractor.rs | src/json/extractor.rs | use serde_json::{json, Map, Value};
use std::error::Error;
pub fn extract_json(
path: &str,
depth: usize,
json: serde_json::Value,
) -> Result<serde_json::Value, Box<dyn Error + Send + Sync>> {
let path_segments: Vec<&str> = path.split('.').collect();
// println!("path ({}), depth({}), json({})", path, depth, json);
if depth + 1 > path_segments.len() {
return Ok(json);
}
if path_segments.len() == depth + 1 {
let segment = path_segments[depth];
// println!("segment ({})", segment);
match json {
serde_json::Value::Object(_) => {
let map: Map<String, Value> = serde_json::from_value(json)?;
if map.contains_key(segment) {
return Ok(map.get(segment).unwrap().to_owned());
}
return Err(Box::from("path not found".to_string()));
}
serde_json::Value::Array(a) => {
let mut results = Vec::new();
for item in a.into_iter() {
if let serde_json::Value::Object(_) = item {
results.push(extract_json(path, depth, item)?);
}
}
return Ok(json!(results));
}
_ => return Ok(json.clone()),
}
}
let current_segment = path_segments[depth];
// println!("current_segment ({})", current_segment);
match json {
serde_json::Value::Object(_) => {
let map: Map<String, Value> = serde_json::from_value(json)?;
if map.contains_key(current_segment) {
return extract_json(
path,
depth + 1,
map.get(current_segment)
.unwrap_or(&serde_json::Value::Null)
.clone(),
);
}
Err(Box::from("path not found".to_string()))
}
serde_json::Value::Array(a) => {
let mut results: Vec<serde_json::Value> = Vec::new();
for item in a.into_iter() {
if let serde_json::Value::Object(_) = item {
if let Ok(r) = extract_json(path, depth, item) {
results.push(r)
}
}
}
let mut flattened = Vec::new();
for r in results.iter().cloned() {
if let serde_json::Value::Array(a) = r {
for i in a {
flattened.push(i);
}
}
}
if !flattened.is_empty() {
return Ok(json!(flattened));
}
if !results.is_empty() {
return Ok(json!(results));
}
Err(Box::from("path not found".to_string()))
}
_ => Err(Box::from("path not found".to_string())),
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/json/filter.rs | src/json/filter.rs | use serde_json::{json, Map, Value};
use std::error::Error;
pub fn filter_json(
path: &str,
depth: usize,
json: serde_json::Value,
) -> Result<serde_json::Value, Box<dyn Error + Send + Sync>> {
let path_segments: Vec<&str> = path.split('.').collect();
if depth + 1 > path_segments.len() {
return Ok(json);
}
if path_segments.len() == depth + 1 {
let segment = path_segments[depth];
match json {
serde_json::Value::Object(_) => {
let mut map: Map<String, Value> = serde_json::from_value(json)?;
if map.contains_key(segment) {
map.remove(segment);
}
return Ok(json!(map));
}
serde_json::Value::Array(a) => {
let mut results = Vec::new();
for item in a.into_iter() {
match item {
serde_json::Value::Object(_) => {
results.push(filter_json(path, depth, item)?);
}
_ => results.push(item),
}
}
return Ok(json!(results));
}
_ => return Ok(json),
}
}
let current_segment = path_segments[depth];
match json {
serde_json::Value::Object(_) => {
let mut map: Map<String, Value> = serde_json::from_value(json)?;
if map.contains_key(current_segment) {
let result = filter_json(
path,
depth + 1,
map.get(current_segment)
.unwrap_or(&serde_json::Value::Null)
.clone(),
)?;
map.remove(current_segment);
map.insert(current_segment.to_string(), result);
}
Ok(json!(map))
}
serde_json::Value::Array(a) => {
let mut results = Vec::new();
for item in a.into_iter() {
match item {
serde_json::Value::Object(_) => {
results.push(filter_json(path, depth, item)?);
}
_ => results.push(item),
}
}
Ok(json!(results))
}
_ => Ok(json),
}
}
#[cfg(test)]
mod test {
use crate::json::filter::filter_json;
#[tokio::test]
async fn object() {
let input_data = r#"{
"test": "name",
"items": [{
"one": 1,
"two": 2
},
{
"one": 3,
"two": 4
},
{
"one": 5,
"two": 6
}]
}"#;
let expected_data = r#"{
"test": "name"
}"#;
let result = filter_json("items", 0, serde_json::from_str(input_data).unwrap()).unwrap();
let expected_result: serde_json::Value = serde_json::from_str(expected_data).unwrap();
assert_eq!(result, expected_result);
}
#[tokio::test]
async fn array_object() {
let input_data = r#"[{
"test": "name",
"items": [{
"one": 1,
"two": 2
},
{
"one": 3,
"two": 4
},
{
"one": 5,
"two": 6
}]
},{
"test": "name2",
"items": []
}]"#;
let expected_data = r#"[{
"test": "name"
},{
"test": "name2"
}]"#;
let result = filter_json("items", 0, serde_json::from_str(input_data).unwrap()).unwrap();
let expected_result: serde_json::Value = serde_json::from_str(expected_data).unwrap();
assert_eq!(result, expected_result);
}
#[tokio::test]
async fn object_array_object() {
let input_data = r#"{
"test": "name",
"items": [{
"one": 1,
"two": 2
},
{
"one": 3,
"two": 4
},
{
"one": 5,
"two": 6
}]
}"#;
let expected_data = r#"{
"test": "name",
"items": [{
"one": 1
},
{
"one": 3
},
{
"one": 5
}]
}"#;
let result =
filter_json("items.two", 0, serde_json::from_str(input_data).unwrap()).unwrap();
let expected_result: serde_json::Value = serde_json::from_str(expected_data).unwrap();
assert_eq!(result, expected_result);
}
#[tokio::test]
async fn array_object_array_object() {
let input_data = r#"[{
"test": "name",
"items": [{
"one": 1,
"two": 2
},
{
"one": 3,
"two": 4
},
{
"one": 5,
"two": 6
}]
},{
"test": "name2",
"items": [{
"one": 1,
"two": 10
}]
}]"#;
let expected_data = r#"[{
"test": "name",
"items": [{
"one": 1
},
{
"one": 3
},
{
"one": 5
}]
},{
"test": "name2",
"items": [{
"one": 1
}]
}]"#;
let result =
filter_json("items.two", 0, serde_json::from_str(input_data).unwrap()).unwrap();
let expected_result: serde_json::Value = serde_json::from_str(expected_data).unwrap();
assert_eq!(result, expected_result);
}
#[tokio::test]
async fn no_matches() {
let input_data = r#"[{
"test": "name",
"items": [{
"one": 1,
"two": 2
},
{
"one": 3,
"two": 4
},
{
"one": 5,
"two": 6
}]
},{
"test": "name2",
"items": [{
"one": 1,
"two": 10
}]
}]"#;
let result =
filter_json("items.three", 0, serde_json::from_str(input_data).unwrap()).unwrap();
let expected_result: serde_json::Value = serde_json::from_str(input_data).unwrap();
assert_eq!(result, expected_result);
}
}
| rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
jikkenio/jikken | https://github.com/jikkenio/jikken/blob/1849e4e0f36a7d7f90c4ce21503a035e75a72ada/src/executor/report.rs | src/executor/report.rs | pub struct Report {
run: u16,
passed: u16,
failed: u16,
} | rust | MIT | 1849e4e0f36a7d7f90c4ce21503a035e75a72ada | 2026-01-04T20:22:41.466161Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde.rs | crates/core-macros/src/serde.rs | //! Methods and types for (de)serialization.
pub mod as_str_as_ref_str;
pub mod debug_as_ref_str;
pub mod deserialize_from_cow_str;
pub mod display_as_ref_str;
pub mod enum_as_ref_str;
pub mod enum_from_string;
pub mod eq_as_ref_str;
pub mod ord_as_ref_str;
pub mod serialize_as_ref_str;
mod util;
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/lib.rs | crates/core-macros/src/lib.rs | #![doc(html_favicon_url = "https://ruma.dev/favicon.ico")]
#![doc(html_logo_url = "https://ruma.dev/images/logo.png")]
//! Procedural macros used by ruma crates.
//!
//! See the documentation for the individual macros for usage details.
// #![cfg_attr(feature = "__internal_macro_expand", feature(proc_macro_expand))]
#![warn(missing_docs)]
#![allow(unreachable_pub)]
// https://github.com/rust-lang/rust-clippy/issues/9029
#![allow(clippy::derive_partial_eq_without_eq)]
use proc_macro::TokenStream;
use quote::quote;
use palpo_identifiers_validation::{
base64_public_key, event_id, mxc_uri, room_alias_id, room_id, room_version_id, server_name,
server_signing_key_version, user_id,
};
use syn::{DeriveInput, ItemEnum, ItemStruct, parse_macro_input};
mod events;
mod identifiers;
mod serde;
mod util;
use self::{
events::{
event::expand_event,
event_content::expand_event_content,
event_enum::{EventEnumInput, expand_event_enum},
event_enum_from_event::expand_event_enum_from_event,
},
identifiers::{constructor::IdentifierConstructor, id_dst::expand_id_dst},
serde::{
as_str_as_ref_str::expand_as_str_as_ref_str, debug_as_ref_str::expand_debug_as_ref_str,
deserialize_from_cow_str::expand_deserialize_from_cow_str,
display_as_ref_str::expand_display_as_ref_str, enum_as_ref_str::expand_enum_as_ref_str,
enum_from_string::expand_enum_from_string, eq_as_ref_str::expand_eq_as_ref_str,
ord_as_ref_str::expand_ord_as_ref_str, serialize_as_ref_str::expand_serialize_as_ref_str,
},
};
/// Generates enums to represent the various Matrix event types.
///
/// # Generated types
///
/// This generates the following enums for each kind:
///
/// * `Any{Kind}EventContent`
/// * `Any{Kind}Event`
/// * `{Kind}EventType`
///
/// It also generates the following enums:
///
/// * `AnySync{Kind}Event` for the kinds that have a different format in the `/sync` API:
/// `EphemeralRoom`, `MessageLike` and `State`.
/// * `TimelineEventType` which includes the variants from `MessageLikeEventType` and
/// `StateEventType`
/// * And extra enums for the `State` kind:
/// * `AnyInitialStateEvent` for state events sent during room creation.
/// * `AnyStrippedStateEvent` for state events that are in room state previews when receiving
/// invites.
/// * `AnyFullStateEventContent` a helper type to be able to access the `content` and
/// `prev_content` of a state event.
///
/// This macro also implements the following traits for these enums, where it makes sense:
///
/// * `Serialize`
/// * `Deserialize` or `EventContentFromType`
/// * `{Kind}EventContent`
/// * Conversion from event type to enum variant, like:
/// * `From<{event_content_type}> for Any{Kind}EventContent`
/// * `From<{event_type}> for `Any{Kind}Event`
///
/// By default, the enums generated by this macro get a `#[non_exhaustive]` attribute.
///
/// # Syntax
///
/// The basic syntax for using this macro is:
///
/// ```ignore
/// event_enum! {
/// enum Kind {
/// "m.first_event_type" => path::to::first_event,
/// "m.second_event_type" => path::to::second_event,
/// }
///
/// // …
/// }
/// ```
///
/// ## Enum Kind
///
/// The kind must be one of these values, which matches the [`EventContent`] macro:
///
/// * `MessageLike` - A message-like event sent in the timeline
/// * `State` - A state event sent in the timeline
/// * `GlobalAccountData` - Global config event
/// * `RoomAccountData` - Per-room config event
/// * `ToDevice` - Event sent directly to a device
/// * `EphemeralRoom` - Event that is not persistent in the room
///
/// ## Event types
///
/// The first part of the event type declaration, before the arrow, is the string matching the
/// `type` of the event, as defined in the Matrix specification. It must match the `type` attribute
/// of the [`EventContent`] definition. Account data event types can end with `.*` in case the end
/// of the event type changes dynamically, see the docs of [`EventContent`] for more details.
///
/// This type is used by the enums to know which variant to deserialize according to the `type` that
/// can be found in the JSON data.
///
/// This macro supports deserialization from a second event type string with the `alias` attribute,
/// which can be useful to support deserializing an event using both its stable and unstable
/// prefixes, like this:
///
/// ```ignore
/// event_enum! {
/// enum MessageLike {
/// #[palpo_enum(alias = "dev.ruma.unstable.foo")]
/// "m.foo" => path::to::foo,
/// }
/// }
/// ```
///
/// By default, this macro tries to generate the event types names from the event type string. It
/// only recognizes strings that start with the `m.` prefix, which matches stable event types from
/// the Matrix specification. From there it generates a base name by capitalizing every word,
/// assuming that words are separated by `.` or `_`. For example, `m.foo.bar` will have the base
/// name `FooBar`.
///
/// If the base name is incorrect, or the event type string uses an unstable prefix, the base name
/// can be provided with the `ident` attribute, for example:
///
/// ```ignore
/// event_enum! {
/// enum MessageLike {
/// #[palpo_enum(ident = FooBar)]
/// "dev.ruma.foo_bar" => path::to::foo_bar,
/// }
/// }
/// ```
///
/// The second part of the event type declaration, after the arrow, is the path of the module where
/// the event types can be found.
///
/// This macro will then assume that all the necessary types are available in the given module to
/// generate the code for the enums, as if the [`EventContent`] macro was used on a type named
/// `{base_name}EventContent`.
///
/// You can use `cargo doc` to find out more details, its `--document-private-items` flag also lets
/// you generate documentation for binaries or private parts of a library.
///
/// # Example
///
/// ```ignore
/// # // HACK: This is "ignore" because of cyclical dependency drama.
/// use palpo_core_macros::event_enum;
///
/// event_enum! {
/// enum ToDevice {
/// "m.any.event" => super::any_event,
/// #[palpo_enum(alias = "dev.unstable.prefix.other.event")]
/// "m.other.event" => super::other_event,
/// #[cfg(feature = "unstable-mscXXXX")]
/// #[palpo_enum(ident = NewEventEventContent)]
/// "org.matrix.mscXXXX.new_event" => super::new_event,
/// }
///
/// enum State {
/// "m.more.events" => super::more_events,
/// "m.different.event" => super::different_event,
/// }
/// }
/// ```
#[proc_macro]
pub fn event_enum(input: TokenStream) -> TokenStream {
let input = syn::parse_macro_input!(input as EventEnumInput);
expand_event_enum(input).into()
}
/// Generates traits implementations and types for an event content.
///
/// # Trait implementations
///
/// This macro implements the following traits for the type on which it is applied:
///
/// * `{kind}EventContent`
/// * `StaticEventContent`
/// * `StaticStateEventContent`, for the `State` kind.
///
/// # Generated types
///
/// It also generates type aliases and modified clones depending on the kind of event. To generate
/// the base name of those types, the macro simply removes `Content` from the name of the type,
/// which means that to apply this macro to a type, its name must always end with `Content`. And for
/// compatibility with the [`event_enum!`] macro, the name should actually end with `EventContent`.
///
/// Some kinds can generate a modified clone of the event content type. For instance, for an event
/// content type named `FooEventContent`:
///
/// * `RedactedFooEventContent`: the redacted form of the event content, for the `MessageLike` and
/// `State` kinds. It also generates the `RedactContent` implementation which applies the
/// redaction algorithm according to the Matrix specification.
///
/// The generated type implements `Redacted{Kind}EventContent`, `StaticEventContent`, `Serialize`
/// and `Deserialize`.
///
/// The generation only works if the type is a struct with named fields. To keep a field after
/// redaction, the `#[palpo_event(skip_redaction)]` attribute can be applied to that field.
///
/// To skip the generation of this type and trait to implement a custom redaction, or because it
/// is not a struct with named fields, the `#[palpo_event(custom_redacted)]` attribute can be used
/// on the container. The `RedactedFooEventContent` type must still exist and implement the same
/// traits, even if it is only a type alias, and the `RedactContent` trait must still be
/// implemented for those kinds.
/// * `PossiblyRedactedFooEventContent`: the form of the event content that is used when we don't
/// know whether a `State` event is redacted or not. It means that on this type any field that is
/// redacted must be optional, or it must have the `#[serde(default)]` attribute for
/// deserialization.
///
/// The generated type implements `PossiblyRedactedStateEventContent`, `StaticEventContent`,
/// `Serialize` and `Deserialize`.
///
/// The generation uses the rules as the redacted type, using the `#[palpo_event(skip_redaction)]`
/// attribute.
///
/// To skip the generation of this type to use a custom type, the
/// `#[palpo_event(custom_possibly_redacted)]` attribute can be used on the container. The
/// `PossiblyRedactedFooEventContent` type must still exist for the `State` kind and implement the
/// same traits, even if it is only a type alias.
///
/// Event content types of the `MessageLike` kind that use the `Relation` type also need a clone of
/// the event content without the `relates_to` field for use within relations, where nested
/// relations are not meant to be serialized by homeservers. This macro can generate a
/// `FooEventContentWithoutRelation` type if the `#[palpo_event(without_relation)]` attribute is
/// applied on the container. It also generates `From<FooEventContent> for
/// FooEventContentWithoutRelation` and `FooEventContentWithoutRelation::with_relation()`.
///
/// By default, the generated types get a `#[non_exhaustive]` attribute.
///
/// # Type aliases
///
/// All kinds generate at least one type alias for the full event format. For the same example type
/// named `FooEventContent`, the first type alias generated is `type FooEvent =
/// {Kind}Event<FooEventContent>`.
///
/// The only exception for this is if the type has the `GlobalAccountData + RoomAccountData` kinds,
/// it generates two type aliases with prefixes:
///
/// * `type GlobalFooEvent = GlobalAccountDataEvent<FooEventContent>`
/// * `type RoomFooEvent = RoomAccountDataEvent<FooEventContent>`
///
/// Some kinds generate more type aliases:
///
/// * `type SyncFooEvent = Sync{Kind}Event<FooEventContent>`: an event received via the `/sync` API,
/// for the `MessageLike`, `State` and `EphemeralRoom` kinds
/// * `type OriginalFooEvent = Original{Kind}Event<FooEventContent>`, a non-redacted event, for the
/// `MessageLike` and `State` kinds
/// * `type OriginalSyncFooEvent = OriginalSync{Kind}Event<FooEventContent>`, a non-redacted event
/// received via the `/sync` API, for the `MessageLike` and `State` kinds
/// * `type RedactedFooEvent = Redacted{Kind}Event<RedactedFooEventContent>`, a redacted event, for
/// the `MessageLike` and `State` kinds
/// * `type OriginalSyncFooEvent = RedactedSync{Kind}Event<RedactedFooEventContent>`, a redacted
/// event received via the `/sync` API, for the `MessageLike` and `State` kinds
/// * `type InitialFooEvent = InitialStateEvent<FooEventContent>`, an event sent during room
/// creation, for the `State` kind
/// * `type StrippedFooEvent = StrippedStateEvent<PossiblyRedactedFooEventContent>`, an event that
/// is in a room state preview when receiving an invite, for the `State` kind
///
/// You can use `cargo doc` to find out more details, its `--document-private-items` flag also lets
/// you generate documentation for binaries or private parts of a library.
///
/// # Syntax
///
/// The basic syntax for using this macro is:
///
/// ```ignore
/// #[derive(Clone, Debug, Deserialize, Serialize, EventContent)]
/// #[palpo_event(type = "m.foo_bar", kind = MessageLike)]
/// pub struct FooBarEventContent {
/// data: String,
/// }
/// ```
///
/// ## Container attributes
///
/// The following settings can be used on the container, with the `#[palpo_event(_)]` attribute.
/// `type` and `kind` are always required.
///
/// ### `type = "m.event_type"`
///
/// The `type` of the event according to the Matrix specification, always required. This is usually
/// a string with an `m.` prefix.
///
/// Types with an account data kind can also use the `.*` suffix, if the end of the type changes
/// dynamically. It must be associated with a field that has the `#[palpo_event(type_fragment)]`
/// attribute that will store the end of the event type. Those types have the
/// `StaticEventContent::IsPrefix` type set to `True`.
///
/// ### `kind = Kind`
///
/// The kind of the event, always required. It must be one of these values, which matches the
/// [`event_enum!`] macro:
///
/// * `MessageLike` - A message-like (i.e. non-state) event sent in the timeline
/// * `State` - A state event sent in the timeline
/// * `GlobalAccountData` - Global config event
/// * `RoomAccountData` - Per-room config event
/// * `ToDevice` - Event sent directly to a device
/// * `EphemeralRoom` - Event that is not persistent in the room
///
/// It is possible to implement both account data kinds for the same type by using the syntax `kind
/// = GlobalAccountData + RoomAccountData`.
///
/// ### `alias = "m.event_type"`
///
/// An alternate `type` for the event, used during deserialization. It is usually used for
/// deserializing an event type using both its stable and unstable prefix.
///
/// ### `state_key = StringType`
///
/// The type of the state key of the event, required and only supported if the kind is `State`. This
/// type should be a string type like `String`, `EmptyStateKey` or an identifier type generated with
/// the `IdDst` macro.
///
/// ### `unsigned_type = UnsignedType`
///
/// A custom type to use for the `Unsigned` type of the `StaticStateEventContent` implementation if
/// the kind is `State`. Only necessary if the `StateUnsigned` type is not appropriate for this
/// type.
///
/// ### `custom_redacted`
///
/// If the kind requires a `Redacted{}EventContent` type and a `RedactContent` implementation and it
/// is not possible to generate them with the macro, setting this attribute prevents the macro from
/// trying to generate them. The type and trait must be implemented manually.
///
/// ### `custom_possibly_redacted`
///
/// If the kind requires a `PossiblyRedacted{}EventContent` type and it is not possible to generate
/// it with the macro, setting this attribute prevents the macro from trying to generate it. The
/// type must be implemented manually.
///
/// ### `without_relation`
///
/// If this is set, the macro will try to generate an `{}EventContentWithoutRelation` which is a
/// clone of the current type with the `relates_to` field removed.
///
/// ## Field attributes
///
/// The following settings can be used on the fields of a struct, with the `#[palpo_event(_)]`
/// attribute.
///
/// ### `skip_redaction`
///
/// If a `Redacted{}EventContent` type is generated by the macro, this field will be kept after
/// redaction.
///
/// ### `type_fragment`
///
/// If the event content's kind is account data and its type ends with the `.*`, this field is
/// required and will store the end of the event's type.
///
/// # Example
///
/// An example can be found in the docs at the root of `palpo_events`.
#[proc_macro_derive(EventContent, attributes(palpo_event))]
pub fn derive_event_content(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_event_content(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Generates trait implementations for Matrix event types.
///
/// This macro parses the name of the type on which it is applied to decide what to do, which means
/// that it only works on a fixed list of types. It also requires the type to be a struct with named
/// fields, with one of these fields named `content`.
///
/// This macro implements at least `Deserialize` for the type on which it is applied.
///
/// If the type is an `OriginalSync` or `RedactedSync` event, this implements conversion
/// helpers to the non-sync version of the event type. For example if the event type is
/// `OriginalSyncMessageLikeEvent`, this will generate `From<OriginalMessageLikeEvent> for
/// OriginalSyncMessageLikeEvent` and `OriginalSyncMessageLikeEvent::into_full_event()`.
///
/// If the type is a non-stripped timeline event, i.e. a struct with an `event_id` field, this
/// implements `PartialEq`, `Eq`, `PartialOrd` and `Ord` by comparing the `event_id` fields.
///
/// ## Field attributes
///
/// The following settings can be used on the fields of the struct, with the `#[palpo_event(_)]`
/// attribute.
///
/// ### `default`
///
/// If the field is missing, its `Default` implementation is used.
///
/// ### `default_on_error`
///
/// If an error occurs during deserialization of the value of this field, its `Default`
/// implementation is used. The error is logged with the [tracing] crate at the debug level, which
/// means that it must be a dependency of the crate where the macro is used.
///
/// ### `rename = "serialized_name"`
///
/// Use a different name when the field is serialized. The name is used both during serialization
/// and deserialization.
///
/// ### `alias = "alt_name"`
///
/// Allow a different name for the field during deserialization. This can be used several times for
/// different aliases.
///
/// You can use `cargo doc` to find out more details, its `--document-private-items` flag also lets
/// you generate documentation for binaries or private parts of a library.
///
/// [tracing]: https://crates.io/crates/tracing
#[proc_macro_derive(Event, attributes(palpo_event))]
pub fn derive_event(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_event(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Generates `From` implementations for an enum for all its variants.
#[proc_macro_derive(EventEnumFromEvent)]
pub fn derive_from_event_to_enum(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_event_enum_from_event(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Generate methods and trait impl's for DST identifier type.
///
/// This macro generates an `Owned*` wrapper type for the identifier type. This wrapper type is
/// variable, by default it'll use [`Box`], but it can be changed at compile time
/// by setting `--cfg=palpo_identifiers_storage=...` using `RUSTFLAGS` or `.cargo/config.toml` (under
/// `[build]` -> `rustflags = ["..."]`). Currently the only supported value is `Arc`, that uses
/// [`Arc`](std::sync::Arc) as a wrapper type.
///
/// This macro implements:
///
/// * Conversions to and from string types, `AsRef<[u8]>` and `AsRef<str>`, as well as `as_str()`
/// and `as_bytes()` methods. The borrowed type can be converted from a borrowed string without
/// allocation.
/// * Conversions to and from borrowed and owned type.
/// * `Deref`, `AsRef` and `Borrow` to the borrowed type for the owned type.
/// * `PartialEq` implementations for testing equality with string types and owned and borrowed
/// types.
///
/// # Attributes
///
/// * `#[palpo_api(validate = PATH)]`: the path to a function to validate the string during parsing
/// and deserialization. By default, the types implement `From` string types, when this is set
/// they implement `TryFrom`.
///
/// # Examples
///
/// ```ignore
/// # // HACK: This is "ignore" because of cyclical dependency drama.
/// use palpo_core_macros::IdDst;
///
/// #[derive(PartialEq, Eq, PartialOrd, Ord, Hash, IdDst)]
/// #[palpo_id(validate = palpo_identifiers_validation::user_id::validate)]
/// pub struct UserId(str);
/// ```
#[proc_macro_derive(IdDst, attributes(palpo_id))]
pub fn derive_id_dst(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemStruct);
expand_id_dst(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Compile-time checked `EventId` construction.
#[proc_macro]
pub fn event_id(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&EventId", event_id::validate)
.into()
}
/// Compile-time checked `RoomAliasId` construction.
#[proc_macro]
pub fn room_alias_id(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&RoomAliasId", room_alias_id::validate)
.into()
}
/// Compile-time checked `RoomId` construction.
#[proc_macro]
pub fn room_id(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&RoomId", room_id::validate)
.into()
}
/// Compile-time checked `RoomVersionId` construction.
#[proc_macro]
pub fn room_version_id(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("RoomVersionId", room_version_id::validate)
.into()
}
/// Compile-time checked `ServerSigningKeyVersion` construction.
#[proc_macro]
pub fn server_signing_key_version(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion(
"&ServerSigningKeyVersion",
server_signing_key_version::validate,
)
.into()
}
/// Compile-time checked `ServerName` construction.
#[proc_macro]
pub fn server_name(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&ServerName", server_name::validate)
.into()
}
/// Compile-time checked `MxcUri` construction.
#[proc_macro]
pub fn mxc_uri(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&MxcUri", mxc_uri::validate)
.into()
}
/// Compile-time checked `UserId` construction.
///
/// The user ID is validated using the same rules as `UserId::validate_strict()`.
#[proc_macro]
pub fn user_id(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&UserId", user_id::validate)
.into()
}
/// Compile-time checked `Base64PublicKey` construction.
#[proc_macro]
pub fn base64_public_key(input: TokenStream) -> TokenStream {
let id_ctor = parse_macro_input!(input as IdentifierConstructor);
id_ctor
.validate_and_expand_str_conversion("&Base64PublicKey", base64_public_key::validate)
.into()
}
/// Derive the `AsRef<str>` trait for an enum.
///
/// The enum can contain unit variants, or tuple or struct variants containing a single field
/// which is a newtype struct around a type implementing `Deref` with a `Target` of `str`.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/docs/palpo_enum_attributes.md"))]
///
/// ## Example
///
/// ```
/// # use palpo_core_macros::AsRefStr;
/// #[derive(AsRefStr)]
/// #[palpo_enum(rename_all = "lowercase")]
/// pub enum MyEnum {
/// Unit,
/// #[palpo_enum(rename = "unstable_other_unit")]
/// OtherUnit,
/// Struct {
/// inner: PrivOwnedStr,
/// },
/// Tuple(PrivOwnedStr),
/// }
///
/// pub struct PrivOwnedStr(Box<str>);
/// ```
#[proc_macro_derive(AsRefStr, attributes(palpo_enum))]
pub fn derive_enum_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemEnum);
expand_enum_as_ref_str(&input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `From<T: AsRef<str> + Into<Box<str>>>` trait for an enum.
///
/// The enum can contain any number of unit variants, and must contain a single tuple or struct
/// variant containing a single field which is a newtype struct around a `Box<str>`. This tuple or
/// struct variant will be used as a fallback to catch any string that doesn't match any of the unit
/// variants.
///
/// The string to convert from must match exactly the expected string representation of a unit
/// variants to be converted to it. If there is a difference of case, it will match the fallback
/// variant instead.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/docs/palpo_enum_attributes.md"))]
/// * `#[palpo_enum(alias = "value")]` - Allow this variant to be converted from a string that is not
/// its main string representation, which is `value`. This attribute can be used several times to
/// match more strings.
///
/// _Note that for this macro, there is no difference between `rename` and `alias`. It only matters
/// when used with [`AsRefStr`]._
///
/// ## Example
///
/// ```
/// # use palpo_core_macros::FromString;
/// #[derive(FromString)]
/// #[palpo_enum(rename_all = "lowercase")]
/// pub enum MyEnum {
/// Unit,
/// #[palpo_enum(rename = "stable_other_unit", alias = "unstable_other_unit")]
/// OtherUnit,
/// #[doc(hidden)]
/// _Custom(PrivOwnedStr),
/// }
///
/// pub struct PrivOwnedStr(Box<str>);
/// ```
#[proc_macro_derive(FromString, attributes(palpo_enum))]
pub fn derive_enum_from_string(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as ItemEnum);
expand_enum_from_string(&input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
// FIXME: The following macros aren't actually interested in type details beyond name (and possibly
// generics in the future). They probably shouldn't use `DeriveInput`.
/// Derive the `as_str()` method using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(AsStrAsRefStr)]
pub fn derive_as_str_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_as_str_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `fmt::Display` trait using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(DisplayAsRefStr)]
pub fn derive_display_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_display_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `fmt::Debug` trait using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(DebugAsRefStr)]
pub fn derive_debug_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_debug_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `Serialize` trait using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(SerializeAsRefStr)]
pub fn derive_serialize_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_serialize_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `Deserialize` trait using the `From<Cow<str>>` implementation of the type.
#[proc_macro_derive(DeserializeFromCowStr)]
pub fn derive_deserialize_from_cow_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_deserialize_from_cow_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `Ord` and `PartialOrd` traits using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(OrdAsRefStr)]
pub fn derive_ord_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_ord_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Derive the `PartialEq` and `Eq` traits using the `AsRef<str>` implementation of the type.
#[proc_macro_derive(EqAsRefStr)]
pub fn derive_eq_as_ref_str(input: TokenStream) -> TokenStream {
let input = parse_macro_input!(input as DeriveInput);
expand_eq_as_ref_str(&input.ident)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// Shorthand for the derives [`AsRefStr`], [`FromString`], [`DisplayAsRefStr`], [`DebugAsRefStr`],
/// [`SerializeAsRefStr`], [`DeserializeFromCowStr`], [`EqAsRefStr`] and [`OrdAsRefStr`].
///
/// The enum can contain any number of unit variants, and must contain a single tuple or struct
/// variant containing a single field which is a newtype struct around a `Box<str>`. This tuple or
/// struct variant will be used as a fallback to catch any string that doesn't match any of the unit
/// variants.
///
/// This will generate the following implementations:
///
/// * `AsRef<str>` to convert variants to their string representation, and the following
/// implementations based on it:
/// * `fn as_str(&self) -> &str`
/// * `fmt::Display`
/// * `fmt::Debug`
/// * `Serialize`
/// * `Ord` and `PartialOrd`
/// * `Eq` and `PartialEq`
/// * `From<T: AsRef<str> + Into<Box<str>>>` to convert a string to variants, and a `Deserialize`
/// implementation based on it. The string to convert from must match exactly the expected string
/// representation of a unit variants to be converted to it. If there is a difference of case, it
/// will match the fallback variant instead.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/docs/palpo_enum_attributes.md"))]
/// * `#[palpo_enum(alias = "value")]` - Allow this variant to be converted from a string that is not
/// its main string representation, which is `value`. This attribute can be used several times to
/// match more strings.
///
/// ## Example
///
/// ```ignore
/// # // HACK: This is "ignore" because of cyclical dependency drama.
/// #[derive(StringEnum)]
/// #[palpo_enum(rename_all = "lowercase")]
/// pub enum MyEnum {
/// Unit,
/// #[palpo_enum(rename = "stable_other_unit", alias = "unstable_other_unit")]
/// OtherUnit,
/// #[doc(hidden)]
/// _Custom(PrivOwnedStr),
/// }
///
/// pub struct PrivOwnedStr(Box<str>);
/// ```
#[proc_macro_derive(StringEnum, attributes(palpo_enum))]
pub fn derive_string_enum(input: TokenStream) -> TokenStream {
fn expand_all(input: ItemEnum) -> syn::Result<proc_macro2::TokenStream> {
let as_ref_str_impl = expand_enum_as_ref_str(&input)?;
let from_string_impl = expand_enum_from_string(&input)?;
let as_str_impl = expand_as_str_as_ref_str(&input.ident)?;
let display_impl = expand_display_as_ref_str(&input.ident)?;
let debug_impl = expand_debug_as_ref_str(&input.ident)?;
let serialize_impl = expand_serialize_as_ref_str(&input.ident)?;
let deserialize_impl = expand_deserialize_from_cow_str(&input.ident)?;
let eq_and_partial_eq_impl = expand_eq_as_ref_str(&input.ident)?;
let ord_and_partial_ord_impl = expand_ord_as_ref_str(&input.ident)?;
Ok(quote! {
#as_ref_str_impl
#from_string_impl
#as_str_impl
#display_impl
#debug_impl
#serialize_impl
#deserialize_impl
#eq_and_partial_eq_impl
#ord_and_partial_ord_impl
})
}
let input = parse_macro_input!(input as ItemEnum);
expand_all(input)
.unwrap_or_else(syn::Error::into_compile_error)
.into()
}
/// A derive macro that generates no code, but registers the serde attribute so both `#[serde(...)]`
/// and `#[cfg_attr(..., serde(...))]` are accepted on the type, its fields and (in case the input
/// is an enum) variants fields.
#[doc(hidden)]
#[proc_macro_derive(_FakeDeriveSerde, attributes(serde))]
pub fn fake_derive_serde(_input: TokenStream) -> TokenStream {
TokenStream::new()
} | rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/identifiers.rs | crates/core-macros/src/identifiers.rs | //! Methods and types for generating identifiers.
pub(crate) mod constructor;
pub(crate) mod id_dst;
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/util.rs | crates/core-macros/src/util.rs | use proc_macro_crate::{FoundCrate, crate_name};
use proc_macro2::TokenStream;
use quote::{ToTokens, format_ident, quote};
use syn::{
Attribute, Field, Ident, LitStr, meta::ParseNestedMeta, punctuated::Punctuated, visit::Visit,
};
pub(crate) struct NameSpace;
impl NameSpace {
pub(crate) fn palpo_core() -> TokenStream {
if let Ok(FoundCrate::Name(name)) = crate_name("palpo-core") {
let import = format_ident!("{name}");
quote! { ::#import }
} else if let Ok(FoundCrate::Name(name)) = crate_name("palpo") {
let import = format_ident!("{name}");
quote! { ::#import }
} else {
quote! { ::palpo_core }
}
}
pub(crate) fn serde() -> TokenStream {
let palpo_core = Self::palpo_core();
quote! { #palpo_core::__private::serde }
}
pub(crate) fn serde_json() -> TokenStream {
let palpo_core = Self::palpo_core();
quote! { #palpo_core::__private::serde_json }
}
}
/// CamelCase's a field ident like "foo_bar" to "FooBar".
pub(crate) fn to_camel_case(name: &Ident) -> Ident {
let span = name.span();
let name = name.to_string();
let s: String = name
.split('_')
.map(|s| s.chars().next().unwrap().to_uppercase().to_string() + &s[1..])
.collect();
Ident::new(&s, span)
}
/// Splits the given string on `.` and `_` removing the `m.` then camel casing to give a Rust type
/// name.
pub(crate) fn m_prefix_name_to_type_name(name: &LitStr) -> syn::Result<Ident> {
let span = name.span();
let name = name.value();
let name = name.strip_prefix("m.").ok_or_else(|| {
syn::Error::new(
span,
format!("well-known matrix events have to start with `m.` found `{name}`"),
)
})?;
let s: String = name
.strip_suffix(".*")
.unwrap_or(name)
.split(&['.', '_'] as &[char])
.map(|s| s.chars().next().unwrap().to_uppercase().to_string() + &s[1..])
.collect();
Ok(Ident::new(&s, span))
}
/// Wrapper around [`syn::Field`] that emits the field without its visibility,
/// thus making it private.
pub(crate) struct PrivateField<'a>(pub(crate) &'a Field);
impl ToTokens for PrivateField<'_> {
fn to_tokens(&self, tokens: &mut TokenStream) {
let Field {
attrs,
vis: _,
mutability,
ident,
colon_token,
ty,
} = self.0;
assert_eq!(*mutability, syn::FieldMutability::None);
for attr in attrs {
attr.to_tokens(tokens);
}
ident.to_tokens(tokens);
colon_token.to_tokens(tokens);
ty.to_tokens(tokens);
}
}
// /// Expand the `cfg` and `cfg_attr` attributes on the given struct.
// #[cfg(feature = "__internal_macro_expand")]
// pub(crate) fn cfg_expand_struct(item: &mut syn::ItemStruct) {
// use std::mem;
// use proc_macro2::TokenTree;
// use syn::{Fields, LitBool, Meta, visit_mut::VisitMut};
// fn eval_cfg(cfg_expr: TokenStream) -> Option<bool> {
// let cfg_macro_call = quote! { ::core::cfg!(#cfg_expr) };
// let expanded = match proc_macro::TokenStream::from(cfg_macro_call).expand_expr() {
// Ok(t) => t,
// Err(e) => {
// eprintln!("failed to expand cfg! {e}");
// return None;
// }
// };
// let lit: LitBool = syn::parse(expanded).expect("cfg! must expand to a boolean literal");
// Some(lit.value())
// }
// fn tokentree_not_comma(tree: &TokenTree) -> bool {
// match tree {
// TokenTree::Punct(p) => p.as_char() != ',',
// _ => true,
// }
// }
// struct CfgAttrExpand;
// impl VisitMut for CfgAttrExpand {
// fn visit_attribute_mut(&mut self, attr: &mut Attribute) {
// if attr.meta.path().is_ident("cfg_attr") {
// // Ignore invalid cfg attributes
// let Meta::List(list) = &attr.meta else { return };
// let mut token_iter = list.tokens.clone().into_iter();
// // Take all the tokens until the first toplevel comma.
// // That's the cfg-expression part of cfg_attr.
// let cfg_expr: TokenStream = token_iter
// .by_ref()
// .take_while(tokentree_not_comma)
// .collect();
// let Some(cfg_value) = eval_cfg(cfg_expr) else {
// return;
// };
// if cfg_value {
// // If we had the whole attribute list and could emit more
// // than one attribute, we'd split the remaining arguments to
// // cfg_attr by commas and turn them into regular attributes
// //
// // Because we can emit only one, do the first and error if
// // there's any more after it.
// let attr_tokens: TokenStream = token_iter
// .by_ref()
// .take_while(tokentree_not_comma)
// .collect();
// if attr_tokens.is_empty() {
// // no-op cfg_attr??
// return;
// }
// attr.meta = syn::parse2(attr_tokens)
// .expect("syn must be able to parse cfg-attr arguments as syn::Meta");
// let rest: TokenStream = token_iter.collect();
// assert!(
// rest.is_empty(),
// "cfg_attr's with multiple arguments after the cfg expression are not \
// currently supported by __internal_macro_expand."
// );
// }
// }
// }
// }
// CfgAttrExpand.visit_item_struct_mut(item);
// let Fields::Named(fields) = &mut item.fields else {
// panic!("only named fields are currently supported by __internal_macro_expand");
// };
// // Take out all the fields
// 'fields: for mut field in mem::take(&mut fields.named) {
// // Take out all the attributes
// for attr in mem::take(&mut field.attrs) {
// // For non-cfg attrs, put them back
// if !attr.meta.path().is_ident("cfg") {
// field.attrs.push(attr);
// continue;
// }
// // Also put back / ignore invalid cfg attributes
// let Meta::List(list) = &attr.meta else {
// field.attrs.push(attr);
// continue;
// };
// // Also put back / ignore cfg attributes we can't eval
// let Some(cfg_value) = eval_cfg(list.tokens.clone()) else {
// field.attrs.push(attr);
// continue;
// };
// // Finally, if the cfg is `false`, skip the part where it's put back
// if !cfg_value {
// continue 'fields;
// }
// }
// // If `continue 'fields` above wasn't hit, we didn't find a cfg that
// // evals to false, so put the field back
// fields.named.push(field);
// }
// }
/// Helper trait for a [`syn::Field`] belonging to a `struct`.
pub(crate) trait StructFieldExt {
/// Get a reference to the `ident` of this field.
///
/// Panics if this is not a named field.
fn ident(&self) -> &Ident;
/// Get the `#[cfg]` attributes on this field.
fn cfg_attrs(&self) -> impl Iterator<Item = &'_ Attribute>;
/// Get the serde meta items on this field, if it has `#[serde(…)]` attributes.
fn serde_meta_items(&self) -> impl Iterator<Item = syn::Meta>;
/// Whether this field has a `#[serde(…)]` containing the given meta item.
fn has_serde_meta_item(&self, meta: SerdeMetaItem) -> bool;
}
impl StructFieldExt for Field {
fn ident(&self) -> &Ident {
self.ident.as_ref().expect("struct field should be named")
}
fn cfg_attrs(&self) -> impl Iterator<Item = &'_ Attribute> {
self.attrs.iter().filter(|a| a.path().is_ident("cfg"))
}
fn serde_meta_items(&self) -> impl Iterator<Item = syn::Meta> {
self.attrs.iter().flat_map(AttributeExt::serde_meta_items)
}
fn has_serde_meta_item(&self, meta: SerdeMetaItem) -> bool {
self.serde_meta_items().any(|serde_meta| serde_meta == meta)
}
}
/// Possible meta items for `#[serde(…)]` attributes.
#[derive(Clone, Copy)]
pub(crate) enum SerdeMetaItem {
/// `flatten`.
Flatten,
/// `default`.
Default,
/// `rename`.
Rename,
/// `alias`.
Alias,
}
impl SerdeMetaItem {
/// The string representation of this meta item.
fn as_str(self) -> &'static str {
match self {
Self::Flatten => "flatten",
Self::Default => "default",
Self::Rename => "rename",
Self::Alias => "alias",
}
}
}
impl PartialEq<SerdeMetaItem> for syn::Meta {
fn eq(&self, other: &SerdeMetaItem) -> bool {
self.path().is_ident(other.as_str())
}
}
/// Helper trait for a [`syn::Attribute`].
pub(crate) trait AttributeExt {
/// Get the list of meta items if this is a `#[serde(…)]` attribute.
fn serde_meta_items(&self) -> impl Iterator<Item = syn::Meta>;
}
impl AttributeExt for Attribute {
fn serde_meta_items(&self) -> impl Iterator<Item = syn::Meta> {
if self.path().is_ident("serde")
&& let syn::Meta::List(list) = &self.meta
{
list.parse_args_with(Punctuated::<syn::Meta, syn::Token![,]>::parse_terminated)
.ok()
} else {
None
}
.into_iter()
.flatten()
}
}
/// Helper trait for a [`syn::Type`].
pub(crate) trait TypeExt {
/// Get the inner type if this is wrapped in an `Option`.
fn option_inner_type(&self) -> Option<&syn::Type>;
/// Whether this type has a lifetime.
fn has_lifetime(&self) -> bool;
}
impl TypeExt for syn::Type {
fn option_inner_type(&self) -> Option<&syn::Type> {
let syn::Type::Path(syn::TypePath {
path: syn::Path { segments, .. },
..
}) = self
else {
return None;
};
if segments.last().unwrap().ident != "Option" {
return None;
}
let syn::PathArguments::AngleBracketed(syn::AngleBracketedGenericArguments {
args: option_args,
..
}) = &segments.last().unwrap().arguments
else {
panic!("Option should use angle brackets");
};
let syn::GenericArgument::Type(inner_type) = option_args.first().unwrap() else {
panic!("Option brackets should contain type");
};
Some(inner_type)
}
fn has_lifetime(&self) -> bool {
struct Visitor {
found_lifetime: bool,
}
impl<'ast> Visit<'ast> for Visitor {
fn visit_lifetime(&mut self, _lt: &'ast syn::Lifetime) {
self.found_lifetime = true;
}
}
let mut vis = Visitor {
found_lifetime: false,
};
vis.visit_type(self);
vis.found_lifetime
}
}
/// Generate code for a list of struct fields.
///
/// If the fields have `cfg` attributes, they are also used.
///
/// This generates code looking like this for each field:
///
/// ```ignore
/// #[cfg(feature = "my-feature")]
/// ident,
/// ```
pub(crate) fn expand_fields_as_list<'a>(
fields: impl IntoIterator<Item = &'a Field>,
) -> TokenStream {
fields
.into_iter()
.map(|field| {
let ident = field.ident();
let cfg_attrs = field.cfg_attrs();
quote! {
#( #cfg_attrs )*
#ident,
}
})
.collect()
}
/// Extension trait for [`syn::meta::ParseNestedMeta`].
pub(crate) trait ParseNestedMetaExt {
/// Whether this meta item has a value.
fn has_value(&self) -> bool;
}
impl ParseNestedMetaExt for ParseNestedMeta<'_> {
fn has_value(&self) -> bool {
!self.input.is_empty() && !self.input.peek(syn::Token![,])
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events.rs | crates/core-macros/src/events.rs | //! Methods and types for generating events.
pub mod common;
pub mod event;
pub mod event_content;
pub mod event_enum;
pub mod event_enum_from_event;
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/identifiers/id_dst.rs | crates/core-macros/src/identifiers/id_dst.rs | //! Implementation of the `IdDst` derive macro.
use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::parse_quote;
use crate::util::NameSpace;
mod parse;
/// Generate the `Owned` version of an identifier and various trait implementations.
pub(crate) fn expand_id_dst(input: syn::ItemStruct) -> syn::Result<TokenStream> {
let id_dst = IdDst::parse(input)?;
let borrowed_type = &id_dst.borrowed_type;
let box_type = &id_dst.box_type;
let arc_type = &id_dst.arc_type;
let impl_generics = &id_dst.impl_generics;
let as_str_and_bytes_impls = id_dst.expand_as_str_and_bytes_impls();
let to_string_impls = id_dst.expand_to_string_impls(borrowed_type);
let unchecked_from_str_impls = id_dst.expand_unchecked_from_str_impls();
let owned_id_struct = id_dst.expand_owned_id_struct();
let fallible_from_str_impls = id_dst.expand_fallible_from_str_impls();
let infallible_from_str_impls = id_dst.expand_infallible_from_str_impls();
let partial_eq_impls = id_dst.expand_partial_eq_impls();
let diesel_impls = id_dst.expand_diesel_impls();
Ok(quote! {
#[automatically_derived]
impl #impl_generics ::std::clone::Clone for #box_type {
fn clone(&self) -> Self {
(**self).into()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#borrowed_type> for #box_type {
fn from(id: &#borrowed_type) -> Self {
<#borrowed_type>::from_box(id.as_str().into())
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#borrowed_type> for ::std::rc::Rc<#borrowed_type> {
fn from(s: &#borrowed_type) -> Self {
let rc = ::std::rc::Rc::<::std::primitive::str>::from(s.as_str());
<#borrowed_type>::from_rc(rc)
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#borrowed_type> for #arc_type {
fn from(s: &#borrowed_type) -> Self {
let arc = ::std::sync::Arc::<::std::primitive::str>::from(s.as_str());
<#borrowed_type>::from_arc(arc)
}
}
#as_str_and_bytes_impls
#to_string_impls
#unchecked_from_str_impls
#owned_id_struct
#fallible_from_str_impls
#infallible_from_str_impls
#partial_eq_impls
#diesel_impls
})
}
/// The parsed input of the `IdDst` macro.
struct IdDst {
/// The name of the borrowed type.
ident: syn::Ident,
/// The borrowed type with generics, if any.
borrowed_type: syn::Type,
/// The name of the owned type.
owned_ident: syn::Ident,
/// The owned type with generics, if any.
owned_type: syn::Type,
/// The type wrapped in a `Box` with generics, if any.
box_type: syn::Type,
/// The type wrapped in a `Arc` with generics, if any.
arc_type: syn::Type,
/// The generics on the borrowed type.
generics: syn::Generics,
/// The declaration of the generics of the borrowed type to use on `impl` blocks.
impl_generics: TokenStream,
/// The path to the function to use to validate the identifier.
validate: Option<syn::Path>,
/// The index of the `str` field.
///
/// This is assumed too be the last field of the tuple struct.
str_field_index: syn::Index,
/// The path to use imports from the ruma-common crate.
palpo_core: TokenStream,
}
impl IdDst {
/// Generate `AsRef<str>` and `AsRef<[u8]>` implementations and string conversions for this
/// identifier.
fn expand_as_str_and_bytes_impls(&self) -> TokenStream {
let ident = &self.ident;
let borrowed_type = &self.borrowed_type;
let box_type = &self.box_type;
let impl_generics = &self.impl_generics;
let str_field_index = &self.str_field_index;
let as_str_docs = format!("Extracts a string slice from this `{ident}`.");
let as_bytes_docs = format!("Extracts a byte slice from this `{ident}`.");
quote! {
impl #impl_generics #borrowed_type {
#[doc = #as_str_docs]
#[inline]
pub fn as_str(&self) -> &::std::primitive::str {
&self.#str_field_index
}
#[doc = #as_bytes_docs]
#[inline]
pub fn as_bytes(&self) -> &[::std::primitive::u8] {
self.as_str().as_bytes()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<#borrowed_type> for #borrowed_type {
fn as_ref(&self) -> &#borrowed_type {
self
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<::std::primitive::str> for #borrowed_type {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<::std::primitive::str> for #box_type {
fn as_ref(&self) -> &str {
self.as_str()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<[::std::primitive::u8]> for #borrowed_type {
fn as_ref(&self) -> &[::std::primitive::u8] {
self.as_bytes()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<[::std::primitive::u8]> for #box_type {
fn as_ref(&self) -> &[::std::primitive::u8] {
self.as_bytes()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#borrowed_type> for ::std::string::String {
fn from(id: &#borrowed_type) -> Self {
id.as_str().to_owned()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#box_type> for ::std::boxed::Box<::std::primitive::str> {
fn from(id: #box_type) -> Self {
unsafe { ::std::boxed::Box::from_raw(std::boxed::Box::into_raw(id) as _) }
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#box_type> for ::std::string::String {
fn from(id: #box_type) -> Self {
::std::boxed::Box::<::std::primitive::str>::from(id).into()
}
}
}
}
/// Generate unchecked private methods to convert a string type to the identifier.
fn expand_unchecked_from_str_impls(&self) -> TokenStream {
let borrowed_type = &self.borrowed_type;
let impl_generics = &self.impl_generics;
quote! {
#[automatically_derived]
impl #impl_generics #borrowed_type {
pub(super) const fn from_borrowed(s: &::std::primitive::str) -> &Self {
unsafe { ::std::mem::transmute(s) }
}
pub(super) fn from_box(s: ::std::boxed::Box<::std::primitive::str>) -> ::std::boxed::Box<Self> {
unsafe { ::std::boxed::Box::from_raw(::std::boxed::Box::into_raw(s) as _) }
}
pub(super) fn from_rc(s: ::std::rc::Rc<::std::primitive::str>) -> ::std::rc::Rc<Self> {
unsafe { ::std::rc::Rc::from_raw(::std::rc::Rc::into_raw(s) as _) }
}
pub(super) fn from_arc(s: ::std::sync::Arc<::std::primitive::str>) -> ::std::sync::Arc<Self> {
unsafe { ::std::sync::Arc::from_raw(::std::sync::Arc::into_raw(s) as _) }
}
}
}
}
/// Generate `FromStr` and other fallible string conversions implementations for this
/// identifier, if it has a validation function.
///
/// The error returned during conversion is `crate::IdParseError`.
fn expand_fallible_from_str_impls(&self) -> Option<TokenStream> {
let validate = self.validate.as_ref()?;
let ident = &self.ident;
let owned_ident = &self.owned_ident;
let borrowed_type = &self.borrowed_type;
let owned_type = &self.owned_type;
let box_type = &self.box_type;
let generic_params = &self.generics.params;
let impl_generics = &self.impl_generics;
let palpo_core = NameSpace::palpo_core();
let serde = NameSpace::serde();
let parse_doc_header = format!("Try parsing a `&str` into an `{owned_ident}`.");
let parse_box_doc_header = format!("Try parsing a `&str` into a `Box<{ident}>`.");
let parse_rc_docs = format!("Try parsing a `&str` into an `Rc<{ident}>`.");
let parse_arc_docs = format!("Try parsing a `&str` into an `Arc<{ident}>`.");
let ref_str_type: syn::Type = parse_quote!(&::std::primitive::str);
let string_type: syn::Type = parse_quote!(::std::string::String);
// Generate `TryFrom<{from_type}> for {for_type}` which uses the given `parse_fn` from the
// borrowed type.
let expand_try_from_impl =
|for_type: &syn::Type, from_type: &syn::Type, parse_fn: &syn::Ident| {
quote! {
#[automatically_derived]
impl #impl_generics ::std::convert::TryFrom<#from_type> for #for_type {
type Error = #palpo_core::IdParseError;
fn try_from(s: #from_type) -> ::std::result::Result<Self, Self::Error> {
<#borrowed_type>::#parse_fn(s)
}
}
}
};
// Generate `FromStr` `TryFrom<&str>` and `TryFrom<String>` implementations for the given
// type, which use the given `parse_fn` from the borrowed type.
let expand_from_str_impls = |ty: &syn::Type, parse_fn: &syn::Ident| -> TokenStream {
let try_from_ref_str_impl = expand_try_from_impl(ty, &ref_str_type, parse_fn);
let try_from_string_impl = expand_try_from_impl(ty, &string_type, parse_fn);
quote! {
#[automatically_derived]
impl #impl_generics ::std::str::FromStr for #ty {
type Err = #palpo_core::IdParseError;
fn from_str(s: &::std::primitive::str) -> ::std::result::Result<Self, Self::Err> {
<#borrowed_type>::#parse_fn(s)
}
}
#try_from_ref_str_impl
#try_from_string_impl
}
};
let box_type_from_str_impls =
expand_from_str_impls(box_type, &syn::Ident::new("parse_box", Span::call_site()));
let owned_type_from_str_impls =
expand_from_str_impls(owned_type, &syn::Ident::new("parse", Span::call_site()));
Some(quote! {
#[automatically_derived]
impl #impl_generics #borrowed_type {
#[doc = #parse_doc_header]
///
/// The same can also be done using `FromStr`, `TryFrom` or `TryInto`.
/// This function is simply more constrained and thus useful in generic contexts.
pub fn parse(
s: impl ::std::convert::AsRef<::std::primitive::str>,
) -> ::std::result::Result<#owned_type, #palpo_core::IdParseError> {
let s = s.as_ref();
#validate(s)?;
::std::result::Result::Ok(#ident::from_borrowed(s).to_owned())
}
#[doc = #parse_box_doc_header]
///
/// The same can also be done using `FromStr`, `TryFrom` or `TryInto`.
/// This function is simply more constrained and thus useful in generic contexts.
pub fn parse_box(
s: impl ::std::convert::AsRef<::std::primitive::str> + ::std::convert::Into<::std::boxed::Box<::std::primitive::str>>,
) -> ::std::result::Result<::std::boxed::Box<Self>, #palpo_core::IdParseError> {
#validate(s.as_ref())?;
::std::result::Result::Ok(#ident::from_box(s.into()))
}
#[doc = #parse_rc_docs]
pub fn parse_rc(
s: impl ::std::convert::AsRef<::std::primitive::str> + ::std::convert::Into<::std::rc::Rc<::std::primitive::str>>,
) -> ::std::result::Result<::std::rc::Rc<Self>, #palpo_core::IdParseError> {
#validate(s.as_ref())?;
::std::result::Result::Ok(#ident::from_rc(s.into()))
}
#[doc = #parse_arc_docs]
pub fn parse_arc(
s: impl ::std::convert::AsRef<::std::primitive::str> + ::std::convert::Into<std::sync::Arc<::std::primitive::str>>,
) -> ::std::result::Result<::std::sync::Arc<Self>, #palpo_core::IdParseError> {
#validate(s.as_ref())?;
::std::result::Result::Ok(#ident::from_arc(s.into()))
}
}
#[automatically_derived]
impl<'de, #generic_params> #serde::Deserialize<'de> for #box_type {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::Deserializer<'de>,
{
use #serde::de::Error;
let s = #palpo_core::serde::deserialize_cow_str(deserializer)?;
#ident::parse_box(s.as_ref()).map_err(D::Error::custom)
}
}
#[automatically_derived]
impl<'de, #generic_params> #serde::Deserialize<'de> for #owned_type {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::Deserializer<'de>,
{
use #serde::de::Error;
let s = #palpo_core::serde::deserialize_cow_str(deserializer)?;
#ident::parse(s.as_ref()).map_err(D::Error::custom)
}
}
#[automatically_derived]
impl<'a, #generic_params> ::std::convert::TryFrom<&'a ::std::primitive::str> for &'a #borrowed_type {
type Error = #palpo_core::IdParseError;
fn try_from(s: &'a ::std::primitive::str) -> ::std::result::Result<Self, Self::Error> {
#validate(s)?;
::std::result::Result::Ok(<#borrowed_type>::from_borrowed(s))
}
}
#box_type_from_str_impls
#owned_type_from_str_impls
})
}
/// Generate `From<&str>` and other infallible string conversions implementations for this
/// identifier, if it doesn't have a validation function.
fn expand_infallible_from_str_impls(&self) -> Option<TokenStream> {
if self.validate.is_some() {
return None;
}
let borrowed_type = &self.borrowed_type;
let owned_type = &self.owned_type;
let box_type = &self.box_type;
let impl_generics = &self.impl_generics;
let generic_params = &self.generics.params;
let serde = NameSpace::serde();
Some(quote! {
#[automatically_derived]
impl<'a, #generic_params> ::std::convert::From<&'a ::std::primitive::str> for &'a #borrowed_type {
fn from(s: &'a ::std::primitive::str) -> Self {
<#borrowed_type>::from_borrowed(s)
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&::std::primitive::str> for #owned_type {
fn from(s: &::std::primitive::str) -> Self {
<&#borrowed_type>::from(s).into()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<::std::boxed::Box<::std::primitive::str>> for #owned_type {
fn from(s: ::std::boxed::Box<::std::primitive::str>) -> Self {
<&#borrowed_type>::from(&*s).into()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<::std::string::String> for #owned_type {
fn from(s: ::std::string::String) -> Self {
<&#borrowed_type>::from(s.as_str()).into()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&::std::primitive::str> for #box_type {
fn from(s: &::std::primitive::str) -> Self {
<#borrowed_type>::from_box(s.into())
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<::std::boxed::Box<::std::primitive::str>> for #box_type {
fn from(s: ::std::boxed::Box<::std::primitive::str>) -> Self {
<#borrowed_type>::from_box(s)
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<::std::string::String> for #box_type {
fn from(s: String) -> Self {
<#borrowed_type>::from_box(s.into())
}
}
#[automatically_derived]
impl<'de, #generic_params> #serde::Deserialize<'de> for #box_type {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::Deserializer<'de>,
{
::std::boxed::Box::<::std::primitive::str>::deserialize(deserializer).map(<#borrowed_type>::from_box)
}
}
#[automatically_derived]
impl<'de, #generic_params> #serde::Deserialize<'de> for #owned_type {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::Deserializer<'de>,
{
// FIXME: Deserialize inner, convert that
::std::boxed::Box::<::std::primitive::str>::deserialize(deserializer).map(<#borrowed_type>::from_box).map(::std::convert::Into::into)
}
}
})
}
/// Generate the `Owned{ident}` type and its implementations.
fn expand_owned_id_struct(&self) -> TokenStream {
let ident = &self.ident;
let owned_ident = &self.owned_ident;
let borrowed_type = &self.borrowed_type;
let owned_type = &self.owned_type;
let box_type = &self.box_type;
let arc_type = &self.arc_type;
let generics = &self.generics;
let impl_generics = &self.impl_generics;
let doc_header = format!("Owned variant of [`{ident}`]");
let to_string_impls = self.expand_to_string_impls(owned_type);
quote! {
#[doc = #doc_header]
///
/// The wrapper type for this type is variable, by default it'll use [`Box`],
/// but you can change that by setting "`--cfg=palpo_identifiers_storage=...`" using
/// `RUSTFLAGS` or `.cargo/config.toml` (under `[build]` -> `rustflags = ["..."]`)
/// to the following;
/// - `palpo_identifiers_storage="Arc"` to use [`Arc`](std::sync::Arc) as a wrapper type.
#[derive(diesel::deserialize::FromSqlRow, diesel::expression::AsExpression)]
#[diesel(sql_type = diesel::sql_types::Text)]
pub struct #owned_ident #generics {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// inner: #box_type,
// #[cfg(palpo_identifiers_storage = "Arc")]
// inner: #arc_type,
inner: #arc_type,
}
#[automatically_derived]
impl #impl_generics ::std::borrow::ToOwned for #borrowed_type {
type Owned = #owned_type;
fn to_owned(&self) -> Self::Owned {
#owned_ident {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// inner: #ident::from_box(self.as_str().into()),
// #[cfg(palpo_identifiers_storage = "Arc")]
inner: #ident::from_arc(self.as_str().into()),
}
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<#borrowed_type> for #owned_type {
fn as_ref(&self) -> &#borrowed_type {
&*self.inner
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<::std::primitive::str> for #owned_type {
fn as_ref(&self) -> &::std::primitive::str {
self.inner.as_str()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::AsRef<[::std::primitive::u8]> for #owned_type {
fn as_ref(&self) -> &[::std::primitive::u8] {
self.inner.as_bytes()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#owned_type> for ::std::string::String {
fn from(id: #owned_type) -> Self {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// { id.inner.into() }
// #[cfg(palpo_identifiers_storage = "Arc")]
{ id.inner.as_ref().into() }
}
}
#[automatically_derived]
impl #impl_generics ::std::clone::Clone for #owned_type {
fn clone(&self) -> Self {
Self { inner: self.inner.clone() }
}
}
#[automatically_derived]
impl #impl_generics ::std::ops::Deref for #owned_type {
type Target = #borrowed_type;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
#[automatically_derived]
impl #impl_generics ::std::borrow::Borrow<#borrowed_type> for #owned_type {
fn borrow(&self) -> &#borrowed_type {
self.as_ref()
}
}
#to_string_impls
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#borrowed_type> for #owned_type {
fn from(id: &#borrowed_type) -> Self {
Self { inner: id.into() }
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<&#owned_type> for #owned_type {
fn from(id: &#owned_type) -> Self {
id.to_owned()
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#box_type> for #owned_type {
fn from(b: #box_type) -> Self {
Self { inner: b.into() }
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#arc_type> for #owned_type {
fn from(a: #arc_type) -> Self {
Self {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// inner: a.as_ref().into(),
// #[cfg(palpo_identifiers_storage = "Arc")]
inner: a,
}
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#owned_type> for #box_type {
fn from(a: #owned_type) -> Self {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// { a.inner }
// #[cfg(palpo_identifiers_storage = "Arc")]
{ a.inner.as_ref().into() }
}
}
#[automatically_derived]
impl #impl_generics ::std::convert::From<#owned_type> for #arc_type {
fn from(a: #owned_type) -> Self {
// #[cfg(not(any(palpo_identifiers_storage = "Arc")))]
// { a.inner.into() }
// #[cfg(palpo_identifiers_storage = "Arc")]
{ a.inner }
}
}
#[automatically_derived]
impl #impl_generics ::std::cmp::PartialEq for #owned_type {
fn eq(&self, other: &Self) -> bool {
self.as_str() == other.as_str()
}
}
#[automatically_derived]
impl #impl_generics ::std::cmp::Eq for #owned_type {}
#[automatically_derived]
impl #impl_generics ::std::cmp::PartialOrd for #owned_type {
fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
#[automatically_derived]
impl #impl_generics ::std::cmp::Ord for #owned_type {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
self.as_str().cmp(other.as_str())
}
}
#[automatically_derived]
impl #impl_generics ::std::hash::Hash for #owned_type {
fn hash<H>(&self, state: &mut H)
where
H: ::std::hash::Hasher,
{
self.as_str().hash(state)
}
}
impl #impl_generics salvo::oapi::ToSchema for #owned_type {
fn to_schema(components: &mut salvo::oapi::Components) -> salvo::oapi::RefOr<salvo::oapi::Schema>{
<String>::to_schema(components)
}
}
impl #impl_generics diesel::deserialize::FromSql<diesel::sql_types::Text, diesel::pg::Pg> for #owned_type {
fn from_sql(bytes: diesel::pg::PgValue<'_>) -> diesel::deserialize::Result<Self> {
let value = <String as diesel::deserialize::FromSql<diesel::sql_types::Text, diesel::pg::Pg>>::from_sql(bytes)?;
Ok(Self::try_from(value)?)
}
}
impl #impl_generics diesel::serialize::ToSql<diesel::sql_types::Text, diesel::pg::Pg> for #owned_type {
fn to_sql(&self, out: &mut diesel::serialize::Output<'_, '_, diesel::pg::Pg>) -> diesel::serialize::Result {
diesel::serialize::ToSql::<diesel::sql_types::Text, diesel::pg::Pg>::to_sql(self.as_str(), &mut out.reborrow())
}
}
}
}
/// Generate `std::fmt::Display`, `std::fmt::Debug` or `serde::Serialize` traits
/// implementations, using it's `.as_str()` function.
fn expand_to_string_impls(&self, ty: &syn::Type) -> TokenStream {
let serde = NameSpace::serde();
let impl_generics = &self.impl_generics;
quote! {
#[automatically_derived]
impl #impl_generics ::std::fmt::Display for #ty {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
write!(f, "{}", self.as_str())
}
}
#[automatically_derived]
impl #impl_generics ::std::fmt::Debug for #ty {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
<str as ::std::fmt::Debug>::fmt(self.as_str(), f)
}
}
#[automatically_derived]
impl #impl_generics #serde::Serialize for #ty {
fn serialize<S>(&self, serializer: S) -> ::std::result::Result<S::Ok, S::Error>
where
S: #serde::Serializer,
{
serializer.serialize_str(self.as_str())
}
}
}
}
/// Generate `std::cmp::PartialEq` implementations by comparing strings.
fn expand_partial_eq_impls(&self) -> TokenStream {
let impl_generics = &self.impl_generics;
let borrowed_type = &self.borrowed_type;
let ref_borrowed_type: syn::Type = parse_quote! { &#borrowed_type };
let str_type: syn::Type = parse_quote! { ::std::primitive::str };
let ref_str_type: syn::Type = parse_quote! { &#str_type };
let string_type: syn::Type = parse_quote! { ::std::string::String };
let self_ident = syn::Ident::new("self", Span::call_site());
let other_ident = syn::Ident::new("other", Span::call_site());
// Get the string representation of the type.
let as_str_impl = |ty: &syn::Type, ident: &syn::Ident| {
if *ty == str_type || *ty == ref_str_type {
quote! { ::std::convert::AsRef::<::std::primitive::str>::as_ref(#ident) }
} else {
quote! { #ident.as_str() }
}
};
// Implement `PartialEq` with the given lhs and rhs types.
let expand_partial_eq = |lhs: &syn::Type, rhs: &syn::Type| {
let self_as_str = as_str_impl(lhs, &self_ident);
let other_as_str = as_str_impl(rhs, &other_ident);
quote! {
#[automatically_derived]
impl #impl_generics ::std::cmp::PartialEq<#rhs> for #lhs {
fn eq(&self, other: &#rhs) -> bool {
#self_as_str == #other_as_str
}
}
}
};
// Implement reciprocal `PartialEq` implementation for the given type with the given other
// types.
let expand_partial_eq_impls_for_type =
|ty: &syn::Type, others: &[&syn::Type]| -> TokenStream {
others
.iter()
.flat_map(|other| [expand_partial_eq(ty, other), expand_partial_eq(other, ty)])
.collect()
};
[
expand_partial_eq_impls_for_type(
borrowed_type,
&[&str_type, &ref_str_type, &string_type],
),
expand_partial_eq_impls_for_type(
&self.box_type,
&[
&str_type,
&ref_str_type,
&string_type,
borrowed_type,
&ref_borrowed_type,
],
),
expand_partial_eq_impls_for_type(
&self.owned_type,
&[
&str_type,
&ref_str_type,
&string_type,
borrowed_type,
&ref_borrowed_type,
&self.box_type,
&self.arc_type,
],
),
]
.into_iter()
.collect()
}
/// Generate `diesel` implementations.
fn expand_diesel_impls(&self) -> TokenStream {
let impl_generics = &self.impl_generics;
let borrowed_type = &self.borrowed_type;
quote! {
impl #impl_generics diesel::serialize::ToSql<diesel::sql_types::Text, diesel::pg::Pg> for &'_ #borrowed_type {
fn to_sql(&self, out: &mut diesel::serialize::Output<'_, '_, diesel::pg::Pg>) -> diesel::serialize::Result {
diesel::serialize::ToSql::<diesel::sql_types::Text, diesel::pg::Pg>::to_sql(self.as_str(), &mut out.reborrow())
}
}
impl #impl_generics diesel::serialize::ToSql<diesel::sql_types::Nullable<diesel::sql_types::Text>, diesel::pg::Pg> for &'_ #borrowed_type {
fn to_sql(&self, out: &mut diesel::serialize::Output<'_, '_, diesel::pg::Pg>) -> diesel::serialize::Result {
diesel::serialize::ToSql::<diesel::sql_types::Nullable<diesel::sql_types::Text>, diesel::pg::Pg>::to_sql(self.as_str(), &mut out.reborrow())
}
}
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/identifiers/constructor.rs | crates/core-macros/src/identifiers/constructor.rs | use proc_macro2::{Span, TokenStream};
use quote::quote;
use syn::parse::{Parse, ParseStream};
/// A parsed identifier constructor input.
///
/// This validates the identifier at compile time and constructs it at runtime.
///
/// This is meant to be used in a `macro_rules!` macro like this:
///
/// ```
/// macro_rules! identifier {
/// ($str:literal) => {
/// $crate::proc_macros::identifier!($crate, $str)
/// };
/// }
/// ```
pub(crate) struct IdentifierConstructor {
/// The crate where the identifier is located.
src_crate: syn::Path,
/// The string value of the identifier.
str: syn::LitStr,
}
impl IdentifierConstructor {
/// Validate the input string and generate its conversion from the string value.
///
/// The conversion uses the `TryFrom<&str>` implementation of the given type at
/// runtime.
///
/// ## Parameters
///
/// * `id_type`: The type of the identifier to convert to. The type must be located at the root
/// of the `src_crate` and may be preceded with a `&` if the output is a borrowed type.
/// * `error_message`: The message to present if the compile-time validation fails.
/// * `validate_fn`: The function to use to validate the string value.
///
/// Panics with the given error message if the validation fails.
pub(crate) fn validate_and_expand_str_conversion<F, T, E>(
&self,
id_type: &str,
validate_fn: F,
) -> TokenStream
where
F: FnOnce(&str) -> Result<T, E>,
{
let (id_type, is_ref) = if let Some(id_type) = id_type.strip_prefix('&') {
(id_type, true)
} else {
(id_type, false)
};
assert!(validate_fn(&self.str.value()).is_ok(), "Invalid {id_type}");
let src_crate = &self.src_crate;
let str = &self.str;
let ampersand = is_ref.then(|| quote! { & });
let ident = syn::Ident::new(id_type, Span::call_site());
quote! {
<#ampersand #src_crate::#ident as ::std::convert::TryFrom<&str>>::try_from(#str).unwrap()
}
}
}
impl Parse for IdentifierConstructor {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let src_crate = input.parse()?;
let _: syn::Token![,] = input.parse()?;
let str = input.parse()?;
Ok(Self { src_crate, str })
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/identifiers/id_dst/parse.rs | crates/core-macros/src/identifiers/id_dst/parse.rs | //! Implementations and types to parse the `IdDst` macro input.
use as_variant::as_variant;
use proc_macro2::Span;
use quote::{format_ident, quote};
use syn::{meta::ParseNestedMeta, parse_quote};
use super::IdDst;
use crate::util::NameSpace;
impl IdDst {
/// Parse the given `IdDst` macro input.
pub(super) fn parse(input: syn::ItemStruct) -> syn::Result<Self> {
let mut id_dst_attrs = IdDstAttrs::default();
for attr in &input.attrs {
if !attr.path().is_ident("palpo_id") {
continue;
}
attr.parse_nested_meta(|meta| id_dst_attrs.try_merge(meta, attr))?;
}
let IdDstAttrs { validate } = id_dst_attrs;
if validate.is_none() && !input.generics.params.is_empty() {
return Err(syn::Error::new(
Span::call_site(),
"IDs without validation and with generics are not supported",
));
}
if input.generics.where_clause.is_some() {
// So we don't have to insert #where_clause everywhere when it is always None in
// practice.
return Err(syn::Error::new(
Span::call_site(),
"where clauses on IDs are not supported",
));
}
let str_field_index = as_variant!(
&input.fields,
syn::Fields::Unnamed(syn::FieldsUnnamed { unnamed, .. }) => unnamed
)
.and_then(|unnamed| unnamed.len().checked_sub(1))
.ok_or_else(|| {
syn::Error::new(
Span::call_site(),
"Only tuple structs with a `str` as the last field are supported",
)
})?
.into();
let generics = input.generics;
let (impl_generics, type_generics, _where_clause) = generics.split_for_impl();
let impl_generics = quote! { #impl_generics };
let ident = input.ident;
let borrowed_type = parse_quote! { #ident #type_generics };
let owned_ident = format_ident!("Owned{ident}");
let owned_type = parse_quote! { #owned_ident #type_generics };
let box_type = parse_quote! { ::std::boxed::Box<#borrowed_type> };
let arc_type = parse_quote! { ::std::sync::Arc<#borrowed_type> };
Ok(Self {
ident,
borrowed_type,
owned_ident,
owned_type,
box_type,
arc_type,
generics,
impl_generics,
validate,
str_field_index,
palpo_core: NameSpace::palpo_core(),
})
}
}
/// The parsed attributes of the [`IdDst`].
#[derive(Default)]
struct IdDstAttrs {
/// The path to the function to use to validate the identifier.
validate: Option<syn::Path>,
}
impl IdDstAttrs {
/// Set the path to the function to use to validate the identifier.
///
/// Returns an error if it is already set.
fn set_validate(&mut self, validate: syn::Path, attr: &syn::Attribute) -> syn::Result<()> {
if self.validate.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `validate` attribute",
));
}
self.validate = Some(validate);
Ok(())
}
/// Try to parse the given meta item and merge it into this `IdDstAttrs`.
///
/// Returns an error if an unknown `palpo_id` attribute is encountered, or if an attribute
/// that accepts a single value appears several times.
fn try_merge(&mut self, meta: ParseNestedMeta<'_>, attr: &syn::Attribute) -> syn::Result<()> {
if meta.path.is_ident("validate") {
return self.set_validate(meta.value()?.parse()?, attr);
}
Err(meta.error("unsupported `palpo_id` attribute"))
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/ord_as_ref_str.rs | crates/core-macros/src/serde/ord_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
/// Generate the `std::cmp::Ord` and `std::cmp::PartialOrd` implementations for the type with the
/// given ident, using its `AsRef<str>` implementation.
pub fn expand_ord_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
Ok(quote! {
#[automatically_derived]
impl ::std::cmp::Ord for #ident {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
let other = ::std::convert::AsRef::<::std::primitive::str>::as_ref(other);
::std::convert::AsRef::<::std::primitive::str>::as_ref(self).cmp(other)
}
}
#[automatically_derived]
impl ::std::cmp::PartialOrd for #ident {
fn partial_cmp(&self, other: &Self) -> ::std::option::Option<::std::cmp::Ordering> {
let other = ::std::convert::AsRef::<::std::primitive::str>::as_ref(other);
::std::convert::AsRef::<::std::primitive::str>::as_ref(self).partial_cmp(other)
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/eq_as_ref_str.rs | crates/core-macros/src/serde/eq_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
/// Generate the `std::cmp::PartialEq` and `std::cmp::Eq` implementations for the type with the
/// given ident, using its `AsRef<str>` implementation.
pub fn expand_eq_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
Ok(quote! {
#[automatically_derived]
impl ::std::cmp::PartialEq for #ident {
fn eq(&self, other: &Self) -> bool {
let other = ::std::convert::AsRef::<::std::primitive::str>::as_ref(other);
::std::convert::AsRef::<::std::primitive::str>::as_ref(self) == other
}
}
#[automatically_derived]
#[allow(deprecated)]
impl ::std::cmp::Eq for #ident {}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/display_as_ref_str.rs | crates/core-macros/src/serde/display_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
/// Generate the `std::fmt::Display` implementation for the type with the given ident, using its
/// `AsRef<str>` implementation.
pub fn expand_display_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
Ok(quote! {
#[automatically_derived]
impl ::std::fmt::Display for #ident {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
f.write_str(::std::convert::AsRef::<::std::primitive::str>::as_ref(self))
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/util.rs | crates/core-macros/src/serde/util.rs | use proc_macro2::TokenStream;
use quote::quote;
mod parse;
/// Parsed `palpo_enum` attributes on a container.
#[derive(Default)]
pub(super) struct PalpoEnumAttrs {
/// The global renaming rule for the variants.
pub(super) rename_all: RenameAll,
}
/// A parsed unit variant of an enum with `palpo_enum` attributes.
pub(super) struct UnitVariant {
/// The name of the variant.
pub(super) ident: syn::Ident,
/// The custom string representation for the variant.
rename: Option<syn::LitStr>,
/// Alternative string representations for the variant.
pub(super) aliases: Vec<syn::LitStr>,
}
impl UnitVariant {
/// The string representation of this variant.
pub(super) fn string_representation(&self, rename_all: &RenameAll) -> String {
if let Some(rename) = &self.rename {
rename.value()
} else {
rename_all.apply(&self.ident.to_string())
}
}
}
/// A parsed tuple or struct variant with a single field.
pub(super) struct VariantWithSingleField {
/// The name of the variant.
pub(super) ident: syn::Ident,
/// The data field of the variant.
pub(super) field: syn::Field,
}
impl VariantWithSingleField {
/// Generate the code to extract or set the inner value of this variant into or from a variable
/// called `inner`.
///
/// The generated code looks like:
///
/// ```ignore
/// Ident(inner)
/// ```
///
/// or
///
/// ```ignore
/// Ident { field: inner }
/// ```
pub(super) fn expand_variable(&self) -> TokenStream {
let ident = &self.ident;
match &self.field.ident {
Some(field) => quote! { #ident { #field: inner } },
None => quote! { #ident (inner) },
}
}
}
/// The transformations to apply to all unit variants without a custom string representation in an
/// enum.
#[derive(Default)]
pub(super) struct RenameAll {
/// The prefix to add to the variants.
prefix: Option<syn::LitStr>,
/// The transformation to apply to the variants.
rule: RenameRule,
}
impl RenameAll {
/// Apply this transformation to the given enum variant.
pub(super) fn apply(&self, variant: &str) -> String {
let mut renamed = self.rule.apply(variant);
if let Some(prefix) = &self.prefix {
renamed = format!("{}{renamed}", prefix.value());
}
renamed
}
}
/// The different ways to change the string representation of unit variants in an enum.
#[derive(Copy, Clone, Default, PartialEq)]
pub(super) enum RenameRule {
/// Don't apply a default rename rule.
#[default]
None,
/// Convert to "lowercase" style.
LowerCase,
/// Convert to "UPPERCASE" style.
Uppercase,
/// Convert to "camelCase" style.
CamelCase,
/// Convert to "snake_case" style, as commonly used for fields.
SnakeCase,
/// Convert to "SCREAMING_SNAKE_CASE" style, as commonly used for constants.
ScreamingSnakeCase,
/// Convert to "kebab-case" style.
KebabCase,
}
impl RenameRule {
/// Split the given variant name at the uppercase letters by adding the given separator.
///
/// The uppercase letters are transformed to lowercase.
fn split_variant_name(variant: &str, separator: char) -> String {
let mut s = String::with_capacity(variant.len());
for (i, ch) in variant.char_indices() {
if i > 0 && ch.is_uppercase() {
s.push(separator);
}
s.push(ch.to_ascii_lowercase());
}
s
}
/// Apply this rule to the given variant.
pub(super) fn apply(&self, variant: &str) -> String {
match *self {
Self::None => variant.to_owned(),
Self::LowerCase => variant.to_ascii_lowercase(),
Self::Uppercase => variant.to_ascii_uppercase(),
Self::CamelCase => variant[..1].to_ascii_lowercase() + &variant[1..],
Self::SnakeCase => Self::split_variant_name(variant, '_'),
Self::ScreamingSnakeCase => Self::SnakeCase.apply(variant).to_ascii_uppercase(),
Self::KebabCase => Self::split_variant_name(variant, '-'),
}
}
}
#[cfg(test)]
mod tests {
use syn::parse_quote;
use super::{RenameAll, RenameRule};
#[test]
fn rename_all_apply() {
let variant = "VeryTasty";
// Default, no change.
let rename_all = RenameAll::default();
assert_eq!(rename_all.apply(variant), variant);
// Only rule.
let rename_all = RenameAll {
rule: RenameRule::ScreamingSnakeCase,
..Default::default()
};
assert_eq!(rename_all.apply(variant), "VERY_TASTY");
let rename_all = RenameAll {
rule: RenameRule::SnakeCase,
..Default::default()
};
assert_eq!(rename_all.apply(variant), "very_tasty");
// Only prefix.
let rename_all = RenameAll {
prefix: Some(parse_quote!("m.rule.")),
..Default::default()
};
assert_eq!(rename_all.apply(variant), "m.rule.VeryTasty");
let rename_all = RenameAll {
prefix: Some(parse_quote!("M_")),
..Default::default()
};
assert_eq!(rename_all.apply(variant), "M_VeryTasty");
// Rule and prefix.
let rename_all = RenameAll {
prefix: Some(parse_quote!("m.rule.")),
rule: RenameRule::ScreamingSnakeCase,
};
assert_eq!(rename_all.apply(variant), "m.rule.VERY_TASTY");
let rename_all = RenameAll {
prefix: Some(parse_quote!("m.rule.")),
rule: RenameRule::SnakeCase,
};
assert_eq!(rename_all.apply(variant), "m.rule.very_tasty");
let rename_all = RenameAll {
prefix: Some(parse_quote!("M_")),
rule: RenameRule::ScreamingSnakeCase,
};
assert_eq!(rename_all.apply(variant), "M_VERY_TASTY");
let rename_all = RenameAll {
prefix: Some(parse_quote!("M_")),
rule: RenameRule::SnakeCase,
};
assert_eq!(rename_all.apply(variant), "M_very_tasty");
}
#[test]
fn rename_rule_apply() {
for &(original, lower, upper, camel, snake, screaming, kebab) in &[
(
"Outcome", "outcome", "OUTCOME", "outcome", "outcome", "OUTCOME", "outcome",
),
(
"VeryTasty",
"verytasty",
"VERYTASTY",
"veryTasty",
"very_tasty",
"VERY_TASTY",
"very-tasty",
),
("A", "a", "A", "a", "a", "A", "a"),
("Z42", "z42", "Z42", "z42", "z42", "Z42", "z42"),
] {
assert_eq!(RenameRule::None.apply(original), original);
assert_eq!(RenameRule::LowerCase.apply(original), lower);
assert_eq!(RenameRule::Uppercase.apply(original), upper);
assert_eq!(RenameRule::CamelCase.apply(original), camel);
assert_eq!(RenameRule::SnakeCase.apply(original), snake);
assert_eq!(RenameRule::ScreamingSnakeCase.apply(original), screaming);
assert_eq!(RenameRule::KebabCase.apply(original), kebab);
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/enum_as_ref_str.rs | crates/core-macros/src/serde/enum_as_ref_str.rs | use proc_macro2::TokenStream;
use quote::quote;
use super::util::{RenameAll, PalpoEnumAttrs, UnitVariant, VariantWithSingleField};
/// Generate the `AsRef<str>` implementation for the given enum.
pub fn expand_enum_as_ref_str(input: &syn::ItemEnum) -> syn::Result<TokenStream> {
let palpo_enum = PalpoEnumWithAnyVariants::try_from(input)?;
let ident = &input.ident;
let unit_variants = palpo_enum.unit_variants_data().map(|(variant, string)| {
quote! {
Self::#variant => #string,
}
});
let field_variants = palpo_enum.expand_field_variants_variables().map(|variant| {
quote! {
Self::#variant => &inner.0,
}
});
Ok(quote! {
#[automatically_derived]
#[allow(deprecated)]
impl ::std::convert::AsRef<::std::primitive::str> for #ident {
fn as_ref(&self) -> &::std::primitive::str {
match self {
#( #unit_variants )*
#( #field_variants )*
}
}
}
})
}
/// A parsed enum with `palpo_enum` attributes and any [`UnitVariant`] or [`VariantWithSingleField`].
pub(crate) struct PalpoEnumWithAnyVariants {
/// The unit variants of the enum.
unit_variants: Vec<UnitVariant>,
/// The variants of the enum containing a single field.
field_variants: Vec<VariantWithSingleField>,
/// The global renaming rule for the variants.
rename_all: RenameAll,
}
impl PalpoEnumWithAnyVariants {
/// The names and string representations of the unit variants.
pub(super) fn unit_variants_data(&self) -> impl Iterator<Item = (&syn::Ident, String)> {
self.unit_variants.iter().map(|variant| {
(
&variant.ident,
variant.string_representation(&self.rename_all),
)
})
}
/// Generate the code to extract or set the inner value of the field variants into or from a
/// variable called `inner`.
pub(super) fn expand_field_variants_variables(&self) -> impl Iterator<Item = TokenStream> {
self.field_variants
.iter()
.map(|variant| variant.expand_variable())
}
}
impl TryFrom<&syn::ItemEnum> for PalpoEnumWithAnyVariants {
type Error = syn::Error;
fn try_from(input: &syn::ItemEnum) -> Result<Self, Self::Error> {
let enum_attrs = PalpoEnumAttrs::parse(&input.attrs)?;
let mut field_variants = Vec::new();
let mut unit_variants = Vec::new();
// Parse enum variants.
for variant in &input.variants {
match &variant.fields {
syn::Fields::Named(_) | syn::Fields::Unnamed(_) => {
field_variants.push(VariantWithSingleField::try_from(variant)?);
}
syn::Fields::Unit => {
unit_variants.push(UnitVariant::try_from(variant)?);
}
}
}
Ok(Self {
unit_variants,
field_variants,
rename_all: enum_attrs.rename_all,
})
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/deserialize_from_cow_str.rs | crates/core-macros/src/serde/deserialize_from_cow_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
use crate::util::NameSpace;
/// Generate the `serde::de:Deserialize` implementation for the type with the given ident, using its
/// `From<Cow<'a, str>>` implementation.
pub fn expand_deserialize_from_cow_str(ident: &Ident) -> syn::Result<TokenStream> {
let palpo_core = NameSpace::palpo_core();
let serde = NameSpace::serde();
Ok(quote! {
#[automatically_derived]
#[allow(deprecated)]
impl<'de> #serde::de::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::de::Deserializer<'de>,
{
type CowStr<'a> = ::std::borrow::Cow<'a, ::std::primitive::str>;
let cow = #palpo_core::serde::deserialize_cow_str(deserializer)?;
Ok(::std::convert::From::<CowStr<'_>>::from(cow))
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/serialize_as_ref_str.rs | crates/core-macros/src/serde/serialize_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
use crate::util::NameSpace;
/// Generate the `serde::ser::Serialize` implementation for the type with the given ident, using its
/// `AsRef<str>` implementation.
pub fn expand_serialize_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
let palpo_core = NameSpace::palpo_core();
Ok(quote! {
#[automatically_derived]
#[allow(deprecated)]
impl #palpo_core::__private::serde::ser::Serialize for #ident {
fn serialize<S>(&self, serializer: S) -> ::std::result::Result<S::Ok, S::Error>
where
S: #palpo_core::__private::serde::ser::Serializer,
{
::std::convert::AsRef::<::std::primitive::str>::as_ref(self).serialize(serializer)
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/as_str_as_ref_str.rs | crates/core-macros/src/serde/as_str_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
/// Generate a `pub fn as_str(&self) -> &str` method for the type with the given ident, using its
/// `AsRef<str>` implementation.
pub fn expand_as_str_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
let as_str_doc = format!("Creates a string slice from this `{ident}`.");
Ok(quote! {
#[automatically_derived]
impl #ident {
#[doc = #as_str_doc]
pub fn as_str(&self) -> &str {
self.as_ref()
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/enum_from_string.rs | crates/core-macros/src/serde/enum_from_string.rs | use proc_macro2::{Span, TokenStream};
use quote::quote;
use super::util::{RenameAll, PalpoEnumAttrs, UnitVariant, VariantWithSingleField};
/// Generate the `From<T> where T: AsRef<str> + Into<Box<str>>` implementation for the given enum.
pub fn expand_enum_from_string(input: &syn::ItemEnum) -> syn::Result<TokenStream> {
let palpo_enum = PalpoEnumWithFallbackVariant::try_from(input)?;
let ident = &input.ident;
let unit_variants = palpo_enum
.unit_variants_data()
.map(|(variant, string, aliases)| {
quote! {
#string #( | #aliases )* => Self::#variant,
}
});
let fallback_variant = {
let variant = palpo_enum.fallback_variant.expand_variable();
let field_ty = &palpo_enum.fallback_variant.field.ty;
quote! {
_ => {
let inner = #field_ty(s.into());
Self::#variant
}
}
};
Ok(quote! {
#[automatically_derived]
#[allow(deprecated)]
impl<T> ::std::convert::From<T> for #ident
where
T: ::std::convert::AsRef<::std::primitive::str>
+ ::std::convert::Into<::std::boxed::Box<::std::primitive::str>>
{
fn from(s: T) -> Self {
match s.as_ref() {
#( #unit_variants )*
#fallback_variant
}
}
}
})
}
/// A parsed enum with `palpo_enum` attributes and a single fallback variant.
pub(crate) struct PalpoEnumWithFallbackVariant {
/// The unit variants of the enum.
unit_variants: Vec<UnitVariant>,
/// The fallback variant of the enum.
fallback_variant: VariantWithSingleField,
/// The global renaming rule for the variants.
rename_all: RenameAll,
}
impl PalpoEnumWithFallbackVariant {
/// The names, string representations and aliases of the unit variants.
pub(super) fn unit_variants_data(
&self,
) -> impl Iterator<Item = (&syn::Ident, String, &[syn::LitStr])> {
self.unit_variants.iter().map(|variant| {
(
&variant.ident,
variant.string_representation(&self.rename_all),
variant.aliases.as_slice(),
)
})
}
}
impl TryFrom<&syn::ItemEnum> for PalpoEnumWithFallbackVariant {
type Error = syn::Error;
fn try_from(input: &syn::ItemEnum) -> Result<Self, Self::Error> {
let enum_attrs = PalpoEnumAttrs::parse(&input.attrs)?;
let mut fallback_variant = None;
let mut unit_variants = Vec::new();
// Parse enum variants.
for variant in &input.variants {
match &variant.fields {
syn::Fields::Named(_) | syn::Fields::Unnamed(_) => {
if fallback_variant.is_some() {
return Err(syn::Error::new_spanned(
variant,
"cannot have multiple fallback variants",
));
}
fallback_variant = Some(VariantWithSingleField::try_from(variant)?);
}
syn::Fields::Unit => {
unit_variants.push(UnitVariant::try_from(variant)?);
}
}
}
let palpo_enum = Self {
unit_variants,
fallback_variant: fallback_variant.ok_or_else(|| {
syn::Error::new(Span::call_site(), "required fallback variant not found")
})?,
rename_all: enum_attrs.rename_all,
};
Ok(palpo_enum)
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/debug_as_ref_str.rs | crates/core-macros/src/serde/debug_as_ref_str.rs | use proc_macro2::{Ident, TokenStream};
use quote::quote;
/// Generate the `std::fmt::Debug` implementation for the type with the given ident, using its
/// `AsRef<str>` implementation.
pub fn expand_debug_as_ref_str(ident: &Ident) -> syn::Result<TokenStream> {
Ok(quote! {
#[automatically_derived]
impl ::std::fmt::Debug for #ident {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
<::std::primitive::str as ::std::fmt::Debug>::fmt(
::std::convert::AsRef::<::std::primitive::str>::as_ref(self),
f,
)
}
}
})
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/serde/util/parse.rs | crates/core-macros/src/serde/util/parse.rs | use proc_macro2::Span;
use syn::{meta::ParseNestedMeta, parse::Parse};
use super::{RenameAll, RenameRule, PalpoEnumAttrs, UnitVariant, VariantWithSingleField};
impl PalpoEnumAttrs {
/// Try to parse the given meta item and merge it into this `PalpoEnumAttrs`.
///
/// Returns an error if parsing the meta item fails, or if it sets a field that was already set.
fn try_merge(&mut self, meta: ParseNestedMeta<'_>) -> syn::Result<()> {
if meta.path.is_ident("rename_all") {
return self.rename_all.try_merge(meta);
}
Err(meta.error("unsupported `palpo_enum` attribute"))
}
}
impl PalpoEnumAttrs {
/// Try to parse the `palpo_enum` attributes from the list.
pub(crate) fn parse(attrs: &[syn::Attribute]) -> syn::Result<Self> {
let mut enum_attrs = Self::default();
for attr in attrs {
if !attr.path().is_ident("palpo_enum") {
continue;
}
attr.parse_nested_meta(|meta| enum_attrs.try_merge(meta))?;
}
Ok(enum_attrs)
}
}
impl RenameAll {
/// Set the prefix of this attribute.
///
/// Returns an error if the prefix was already set.
fn set_prefix(&mut self, prefix: syn::LitStr) -> syn::Result<()> {
if self.prefix.is_some() {
return Err(syn::Error::new(
Span::call_site(),
"cannot have multiple values for `#[palpo_enum(rename_all)]`'s prefix",
));
}
self.prefix = Some(prefix);
Ok(())
}
/// Set the rule of this attribute.
///
/// Returns an error if the rule was already set.
fn set_rule(&mut self, rule: RenameRule) -> syn::Result<()> {
if self.rule != RenameRule::None {
return Err(syn::Error::new(
Span::call_site(),
"cannot have multiple values for `#[palpo_enum(rename_all)]`'s rule",
));
}
self.rule = rule;
Ok(())
}
/// Try to merge the values of the attributes in the given meta in this one.
///
/// Returns an error if parsing the meta fails or a value is set twice.
fn try_merge(&mut self, meta: ParseNestedMeta<'_>) -> syn::Result<()> {
if meta.input.peek(syn::Token![=]) {
self.set_rule(meta.value()?.parse()?)?;
return Ok(());
}
if meta.input.peek(syn::token::Paren) {
meta.parse_nested_meta(|nested_meta| {
if nested_meta.path.is_ident("prefix") {
self.set_prefix(nested_meta.value()?.parse()?)?;
return Ok(());
}
if nested_meta.path.is_ident("rule") {
self.set_rule(nested_meta.value()?.parse()?)?;
return Ok(());
}
Err(nested_meta.error(
"unsupported `rename_all` attribute, expected one of `prefix` or `rule`",
))
})?;
return Ok(());
}
Err(meta.error(
"unexpected syntax for `rename_all` attribute, \
expected `rename_all = \"rule\"` or \
`rename_all(prefix = \"m.\", rule = \"rule\")`",
))
}
}
impl Parse for RenameRule {
fn parse(input: syn::parse::ParseStream<'_>) -> syn::Result<Self> {
let str: syn::LitStr = input.parse()?;
match str.value().as_str() {
"lowercase" => Ok(Self::LowerCase),
"UPPERCASE" => Ok(Self::Uppercase),
"camelCase" => Ok(Self::CamelCase),
"snake_case" => Ok(Self::SnakeCase),
"SCREAMING_SNAKE_CASE" => Ok(Self::ScreamingSnakeCase),
"kebab-case" => Ok(Self::KebabCase),
_ => Err(syn::Error::new_spanned(
str,
"unsupported value for `#[palpo_enum(rename_all)]`'s rule",
)),
}
}
}
/// The parsed attributes of a [`PalpoEnum`] unit variant.
#[derive(Default)]
struct UnitVariantAttrs {
/// The custom string representation for the variant.
rename: Option<syn::LitStr>,
/// Alternative string representations for the variant.
aliases: Vec<syn::LitStr>,
}
impl UnitVariantAttrs {
/// Set custom string representation for the variant.
///
/// Returns an error if the custom string representation was already set.
fn set_rename(&mut self, rename: syn::LitStr, variant: &syn::Variant) -> syn::Result<()> {
if self.rename.is_some() {
return Err(syn::Error::new_spanned(
variant,
"cannot have multiple `#[palpo_enum(rename)]` attributes",
));
}
self.rename = Some(rename);
Ok(())
}
/// Push the given alternative string representations for the variant.
fn push_alias(&mut self, alias: syn::LitStr) {
self.aliases.push(alias);
}
/// Try to merge the values of the attributes in the given meta in this one.
///
/// Returns an error if parsing the meta fails or a value is set twice.
fn try_merge(&mut self, meta: ParseNestedMeta<'_>, variant: &syn::Variant) -> syn::Result<()> {
if meta.path.is_ident("rename") {
return self.set_rename(meta.value()?.parse()?, variant);
}
if meta.path.is_ident("alias") {
self.push_alias(meta.value()?.parse()?);
return Ok(());
}
Err(meta.error("unsupported `palpo_enum` attribute"))
}
}
impl TryFrom<&syn::Variant> for UnitVariant {
type Error = syn::Error;
fn try_from(variant: &syn::Variant) -> Result<Self, Self::Error> {
if !matches!(variant.fields, syn::Fields::Unit) {
return Err(syn::Error::new_spanned(
variant,
"UnitVariant can only be parsed from unit enum variants",
));
}
let mut variant_attrs = UnitVariantAttrs::default();
for attr in &variant.attrs {
if !attr.path().is_ident("palpo_enum") {
continue;
}
attr.parse_nested_meta(|meta| variant_attrs.try_merge(meta, variant))?;
}
Ok(Self {
ident: variant.ident.clone(),
rename: variant_attrs.rename,
aliases: variant_attrs.aliases,
})
}
}
impl TryFrom<&syn::Variant> for VariantWithSingleField {
type Error = syn::Error;
fn try_from(variant: &syn::Variant) -> Result<Self, Self::Error> {
if variant
.attrs
.iter()
.any(|attr| attr.path().is_ident("palpo_enum"))
{
return Err(syn::Error::new_spanned(
variant,
"struct or tuple variant doesn't support any `#[palpo_enum]` attribute",
));
}
let fields = match &variant.fields {
syn::Fields::Named(syn::FieldsNamed { named: fields, .. })
| syn::Fields::Unnamed(syn::FieldsUnnamed {
unnamed: fields, ..
}) => fields,
syn::Fields::Unit => {
return Err(syn::Error::new_spanned(
variant,
"VariantWithSingleField can only be parsed from tuple or struct enum variants",
));
}
};
match fields.len() {
0 => {
return Err(syn::Error::new_spanned(
fields,
"struct or tuple variant must have one field",
));
}
1 => {}
_ => {
return Err(syn::Error::new_spanned(
fields,
"struct or tuple variant cannot have multiple fields",
));
}
}
Ok(Self {
ident: variant.ident.clone(),
field: fields
.iter()
.next()
.cloned()
.expect("struct or tuple variant should have at least one field"),
})
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum_from_event.rs | crates/core-macros/src/events/event_enum_from_event.rs | //! Implementation of the `EventEnumFromEvent` derive macro.
use proc_macro2::{Span, TokenStream};
use quote::quote;
/// `EventEnumFromEvent` derive macro code generation.
pub(crate) fn expand_event_enum_from_event(input: syn::DeriveInput) -> syn::Result<TokenStream> {
let event_enum_from_event = EventEnumFromEvent::parse(input)?;
Ok(event_enum_from_event.expand_from_impls())
}
/// The parsed `EventEnumFromEvent` container data.
struct EventEnumFromEvent {
/// The name of the event enum.
ident: syn::Ident,
/// The unit variants of the enum.
variants: Vec<EventEnumFromEventVariant>,
}
impl EventEnumFromEvent {
/// Parse the given input as an `EventEnumFromEvent`.
fn parse(input: syn::DeriveInput) -> syn::Result<Self> {
let syn::Data::Enum(syn::DataEnum { variants, .. }) = input.data else {
return Err(syn::Error::new_spanned(
input,
"the `EventEnumFromEvent` derive macro only works on enums",
));
};
let variants = variants
.into_iter()
.map(EventEnumFromEventVariant::parse)
.collect::<syn::Result<_>>()?;
Ok(Self {
ident: input.ident,
variants,
})
}
/// Generate the `From<{variant_inner_type}> for {ident}` implementations.
fn expand_from_impls(&self) -> TokenStream {
let ident = &self.ident;
self.variants
.iter()
.map(|variant| {
let variant_ident = &variant.ident;
let variant_inner_type = &variant.field.ty;
quote! {
#[automatically_derived]
impl ::std::convert::From<#variant_inner_type> for #ident {
fn from(c: #variant_inner_type) -> Self {
Self::#variant_ident(c)
}
}
}
})
.collect()
}
}
/// A parsed unit variant of [`EventEnumFromEvent`].
struct EventEnumFromEventVariant {
/// The name of the variant.
ident: syn::Ident,
/// The field of the variant.
field: syn::Field,
}
impl EventEnumFromEventVariant {
/// Parse the given variant as an `EventEnumFromEventVariant`.
fn parse(variant: syn::Variant) -> syn::Result<Self> {
if let syn::Fields::Unnamed(fields) = variant.fields
&& fields.unnamed.len() == 1
{
Ok(Self {
ident: variant.ident,
field: fields
.unnamed
.into_iter()
.next()
.expect("variant should have one unnamed field"),
})
} else {
Err(syn::Error::new(
Span::call_site(),
"the `EventEnumFromEvent` derive macro only works with unit variants",
))
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event.rs | crates/core-macros/src/events/event.rs | //! Implementation of the `Event` derive macro.
use std::{borrow::Cow, fmt};
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use syn::parse_quote;
mod parse;
use super::common::{CommonEventKind, EventVariation};
use crate::util::{NameSpace, to_camel_case};
/// `Event` derive macro code generation.
pub(crate) fn expand_event(input: syn::DeriveInput) -> syn::Result<TokenStream> {
let event = Event::parse(input)?;
let deserialize_impl = event.expand_deserialize_impl();
let sync_from_and_into_full = event.expand_sync_from_and_into_full();
let eq_and_ord_impl = event.expand_eq_and_ord_impl();
Ok(quote! {
#deserialize_impl
#sync_from_and_into_full
#eq_and_ord_impl
})
}
/// The parsed `Event` container data.
struct Event {
/// The name of the event struct.
ident: syn::Ident,
/// The generics on the event struct.
generics: syn::Generics,
/// The kind of event.
kind: EventKind,
/// The variation of the event struct.
variation: EventVariation,
/// The fields of the struct.
fields: Vec<EventField>,
/// The path to imports from the ruma-events crate.
palpo_core: TokenStream,
}
impl Event {
/// Generate the `serde::Deserialize` implementation for this struct.
fn expand_deserialize_impl(&self) -> TokenStream {
let palpo_core = &self.palpo_core;
let serde = NameSpace::serde();
let serde_json = NameSpace::serde_json();
let ident = &self.ident;
let is_content_generic = !self.generics.params.is_empty();
let (impl_generics, ty_gen, where_clause) = self.generics.split_for_impl();
let field_idents = self
.fields
.iter()
.map(EventField::ident)
.collect::<Vec<_>>();
let serialized_field_names = self
.fields
.iter()
.map(EventField::serialized_name)
.collect::<Vec<_>>();
let enum_variants = field_idents
.iter()
.copied()
.map(to_camel_case)
.collect::<Vec<_>>();
let enum_variants_serde_attributes = self.fields.iter().map(EventField::serde_attribute);
// Get the type of each field to deserialize to.
let field_types = self.fields.iter().map(|field| {
let field_ident = field.ident();
if *field_ident == "content" && is_content_generic {
// Deserialize the content to a `Box<RawValue>` so we can use the
// `EventContentFromType` implementation later.
quote! { ::std::boxed::Box<#serde_json::value::RawValue> }
} else if *field_ident == "state_key" && self.variation == EventVariation::Initial {
// Because the state key is allowed to be missing if it is empty when sending an
// initial state event during creation, we default to deserializing a string first
// so we can default to an empty string if it is missing.
quote! { ::std::string::String }
} else {
let field_type = &field.inner.ty;
quote! { #field_type }
}
});
// Validate the deserialized values of the fields.
let validate_field_values = self
.fields
.iter()
.zip(&serialized_field_names)
.map(|(field, serialized_name)| {
let field_ident = field.ident();
if *field_ident == "content" && is_content_generic {
// Return an error if the content is missing, and use the `EventContentFromType`
// implementation to deserialize the `RawValue`.
quote! {
let content = {
let json = content
.ok_or_else(|| #serde::de::Error::missing_field("content"))?;
C::from_parts(&event_type, &json).map_err(#serde::de::Error::custom)?
};
}
} else if field.default
|| (*field_ident == "unsigned" && !self.variation.is_redacted())
{
// The field is allowed to be missing, and uses its `Default` implementation.
quote! {
let #field_ident = #field_ident.unwrap_or_default();
}
} else if *field_ident == "state_key" && self.variation == EventVariation::Initial {
// The state key is allowed to be missing if it is empty, when sending an
// initial state event during creation.
let field_type = &field.inner.ty;
quote! {
let state_key = <#field_type as #serde::de::Deserialize>::deserialize(
#serde::de::IntoDeserializer::<A::Error>::into_deserializer(
state_key.unwrap_or_default(),
),
)?;
}
} else {
// The default behavior is to return an error if the field is missing.
quote! {
let #field_ident = #field_ident.ok_or_else(|| {
#serde::de::Error::missing_field(#serialized_name)
})?;
}
}
})
.collect::<Vec<_>>();
// Handle deserialization errors for the fields.
let field_deserialize_error_handlers = self.fields.iter().map(|field| {
if field.default_on_error {
// Just log the deserialization error and use the `Default` implementation instead.
quote! {
.map_err(|error| {
tracing::debug!("deserialization error, using default value: {error}");
})
.unwrap_or_default()
}
} else {
// Just forward the deserialization error.
quote! { ? }
}
});
// Add the deserialization lifetime to the list of generics.
let deserialize_generics = if is_content_generic {
let generic_params = &self.generics.params;
quote! { <'de, #generic_params> }
} else {
quote! { <'de> }
};
// If the struct has generics, it needs to be forwarded to the `EventVisitor` as
// `PhantomData`.
let visitor_phantom_type = if is_content_generic {
quote! { ::std::marker::PhantomData }
} else {
quote! {}
};
// If the content is generic, we must add a bound for the `EventContentFromType`
// implementation.
let where_clause = if is_content_generic {
let predicate = parse_quote! { C: #palpo_core::events::EventContentFromType };
let where_clause = if let Some(mut where_clause) = where_clause.cloned() {
where_clause.predicates.push(predicate);
where_clause
} else {
parse_quote! { where #predicate }
};
Some(Cow::Owned(where_clause))
} else {
where_clause.map(Cow::Borrowed)
};
quote! {
#[automatically_derived]
impl #deserialize_generics #serde::de::Deserialize<'de> for #ident #ty_gen #where_clause {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: #serde::de::Deserializer<'de>,
{
#[derive(#serde::Deserialize)]
#[serde(field_identifier, rename_all = "snake_case")]
enum Field {
// This field is hidden as the content type in Palpo, but it is always a
// valid field and we need to extract it to deserialize the content type.
Type,
#( #enum_variants_serde_attributes #enum_variants, )*
#[serde(other)]
Unknown,
}
/// Visits the fields of an event struct, in particular to handle deserialization of
/// the `content` field.
struct EventVisitor #impl_generics (#visitor_phantom_type #ty_gen);
#[automatically_derived]
impl #deserialize_generics #serde::de::Visitor<'de>
for EventVisitor #ty_gen #where_clause
{
type Value = #ident #ty_gen;
fn expecting(
&self,
formatter: &mut ::std::fmt::Formatter<'_>,
) -> ::std::fmt::Result {
write!(formatter, "a key-value map")
}
fn visit_map<A>(self, mut map: A) -> Result<Self::Value, A::Error>
where
A: #serde::de::MapAccess<'de>,
{
let mut event_type: Option<String> = None;
#( let mut #field_idents: Option<#field_types> = None; )*
while let Some(key) = map.next_key()? {
match key {
// We ignore unknown fields, for forwards compatibility.
Field::Unknown => {
let _: #serde::de::IgnoredAny = map.next_value()?;
},
Field::Type => {
if event_type.is_some() {
return Err(#serde::de::Error::duplicate_field("type"));
}
event_type = Some(map.next_value()?);
}
#(
Field::#enum_variants => {
if #field_idents.is_some() {
return Err(#serde::de::Error::duplicate_field(
#serialized_field_names,
));
}
#field_idents = Some(map.next_value() #field_deserialize_error_handlers);
}
)*
}
}
let event_type =
event_type.ok_or_else(|| #serde::de::Error::missing_field("type"))?;
#( #validate_field_values )*
Ok(#ident {
#( #field_idents ),*
})
}
}
deserializer.deserialize_map(EventVisitor(#visitor_phantom_type))
}
}
}
}
/// Generate `From<{full_event}>` and `.into_full_event()` implementations if this is a "sync"
/// event struct.
fn expand_sync_from_and_into_full(&self) -> Option<TokenStream> {
let full_ident = self.kind.to_event_ident(self.variation.to_full()?);
let palpo_core = &self.palpo_core;
let ident = &self.ident;
let (impl_generics, ty_gen, where_clause) = self.generics.split_for_impl();
let field_idents = self
.fields
.iter()
.map(EventField::ident)
.collect::<Vec<_>>();
Some(quote! {
#[automatically_derived]
impl #impl_generics ::std::convert::From<#full_ident #ty_gen>
for #ident #ty_gen #where_clause
{
fn from(event: #full_ident #ty_gen) -> Self {
let #full_ident { #( #field_idents, )* .. } = event;
Self { #( #field_idents, )* }
}
}
#[automatically_derived]
impl #impl_generics #ident #ty_gen #where_clause {
/// Convert this sync event into a full event, one with a `room_id` field.
pub fn into_full_event(
self,
room_id: #palpo_core::OwnedRoomId,
) -> #full_ident #ty_gen {
let Self { #( #field_idents, )* } = self;
#full_ident {
#( #field_idents, )*
room_id,
}
}
}
})
}
/// Implement `std::cmp::PartialEq`, `std::cmp::Eq`, `std::cmp::PartialOrd`, `std::cmp::Ord` for
/// this event struct by comparing the `event_id`, if this field is present.
fn expand_eq_and_ord_impl(&self) -> Option<TokenStream> {
if !self.kind.is_event_id_present(self.variation) {
return None;
}
let ident = &self.ident;
let (impl_gen, ty_gen, where_clause) = self.generics.split_for_impl();
Some(quote! {
#[automatically_derived]
impl #impl_gen ::std::cmp::PartialEq for #ident #ty_gen #where_clause {
/// Checks if the `EventId`s of the events are equal.
fn eq(&self, other: &Self) -> ::std::primitive::bool {
self.event_id == other.event_id
}
}
#[automatically_derived]
impl #impl_gen ::std::cmp::Eq for #ident #ty_gen #where_clause {}
#[automatically_derived]
impl #impl_gen ::std::cmp::PartialOrd for #ident #ty_gen #where_clause {
/// Compares the `EventId`s of the events and orders them lexicographically.
fn partial_cmp(&self, other: &Self) -> ::std::option::Option<::std::cmp::Ordering> {
self.event_id.partial_cmp(&other.event_id)
}
}
#[automatically_derived]
impl #impl_gen ::std::cmp::Ord for #ident #ty_gen #where_clause {
/// Compares the `EventId`s of the events and orders them lexicographically.
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
self.event_id.cmp(&other.event_id)
}
}
})
}
}
/// All the supported [`Event`] struct kinds.
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
enum EventKind {
/// Global account data.
///
/// This is user data for the whole account.
GlobalAccountData,
/// Room account data.
///
/// This is user data specific to a room.
RoomAccountData,
/// Ephemeral room data.
///
/// This is data associated to a room and that is not persisted.
EphemeralRoom,
/// Message-like event.
///
/// This is an event that can occur in the timeline and that doesn't have a state key.
MessageLike,
/// State event.
///
/// This is an event that can occur in the timeline and that has a state key.
State,
/// A to-device event.
///
/// This is an event that is sent directly to another device.
ToDevice,
/// `m.room.redaction` event.
RoomRedaction,
/// `m.space.child` event in the format returned at the space hierarchy endpoint.
HierarchySpaceChild,
/// Decrypted event.
Decrypted,
}
impl EventKind {
/// Whether this kind can be found in a room's timeline.
fn is_timeline(self) -> bool {
matches!(self, Self::MessageLike | Self::State | Self::RoomRedaction)
}
/// The common kind matching this kind, if any.
///
/// Returns `None` for the [`EventEnumKind::Timeline`] variant.
fn common_kind(self) -> Option<CommonEventKind> {
Some(match self {
Self::GlobalAccountData => CommonEventKind::GlobalAccountData,
Self::RoomAccountData => CommonEventKind::RoomAccountData,
Self::EphemeralRoom => CommonEventKind::EphemeralRoom,
Self::MessageLike => CommonEventKind::MessageLike,
Self::State => CommonEventKind::State,
Self::ToDevice => CommonEventKind::ToDevice,
_ => return None,
})
}
/// Get the name of the event type (struct or enum) for this kind and the given variation.
fn to_event_ident(self, variation: EventVariation) -> syn::Ident {
format_ident!("{variation}{self}")
}
/// Whether the `event_id` field is present with this kind and the given variation.
fn is_event_id_present(self, variation: EventVariation) -> bool {
self.is_timeline()
&& matches!(
variation,
EventVariation::None
| EventVariation::Sync
| EventVariation::Original
| EventVariation::OriginalSync
| EventVariation::Redacted
| EventVariation::RedactedSync
)
}
}
impl fmt::Display for EventKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(common_kind) = self.common_kind() {
fmt::Display::fmt(&common_kind, f)
} else {
match self {
Self::RoomRedaction => write!(f, "RoomRedactionEvent"),
Self::HierarchySpaceChild => write!(f, "HierarchySpaceChildEvent"),
_ => unreachable!(),
}
}
}
}
/// A parsed field of an [`Event`].
struct EventField {
/// The parsed field, without the `palpo_event` attributes.
inner: syn::Field,
/// Whether this field should deserialize to the default value if it is missing.
default: bool,
/// Whether this field should deserialize to the default value if an error occurs during
/// deserialization.
default_on_error: bool,
/// The name to use when (de)serializing this field.
///
/// If this is not set, the name of the field will be used.
rename: Option<syn::LitStr>,
/// The alternate names to recognize when deserializing this field.
aliases: Vec<syn::LitStr>,
}
impl EventField {
/// The ident of this field.
fn ident(&self) -> &syn::Ident {
self.inner.ident.as_ref().expect(
"all fields of Event struct should be named; \
this should have been checked during parsing",
)
}
/// The name of this field in its serialized form.
fn serialized_name(&self) -> Cow<'_, syn::LitStr> {
self.rename.as_ref().map(Cow::Borrowed).unwrap_or_else(|| {
let ident = self.ident();
Cow::Owned(syn::LitStr::new(&ident.to_string(), ident.span()))
})
}
/// The serde attribute to apply to this field.
fn serde_attribute(&self) -> Option<TokenStream> {
let mut attrs = Vec::new();
if let Some(rename) = &self.rename {
attrs.push(quote! { rename = #rename });
}
attrs.extend(self.aliases.iter().map(|alias| {
quote! { alias = #alias }
}));
(!attrs.is_empty()).then(|| {
quote! { #[serde(#( #attrs ),*)] }
})
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_content.rs | crates/core-macros/src/events/event_content.rs | //! Implementation of the `EventContent` derive macro.
use std::borrow::Cow;
use as_variant::as_variant;
use proc_macro2::TokenStream;
use quote::{ToTokens, format_ident, quote};
use syn::parse_quote;
mod parse;
use super::common::{
CommonEventKind, EventContentTraitVariation, EventType, EventTypes, EventVariation,
};
use crate::util::{
PrivateField, SerdeMetaItem, StructFieldExt,
TypeExt, NameSpace
};
/// `EventContent` derive macro code generation.
pub(crate) fn expand_event_content(input: syn::DeriveInput) -> syn::Result<TokenStream> {
let event_content = EventContent::parse(input)?;
// Generate alternate variations.
let redacted_event_content = event_content.expand_redacted_event_content();
let possibly_redacted_event_content = event_content.expand_possibly_redacted_event_content();
let event_content_without_relation = event_content.expand_event_content_without_relation();
// Generate trait implementations of the original variation.
let event_content_impl = event_content.expand_event_content_impl(
EventContentVariation::Original,
&event_content.ident,
event_content.fields.as_ref(),
);
let static_event_content_impl =
event_content.expand_static_event_content_impl(&event_content.ident);
let json_castable_impl = generate_json_castable_impl(&event_content.ident, &[]);
// Generate type aliases.
let event_type_aliases = event_content.expand_event_type_aliases();
Ok(quote! {
#redacted_event_content
#possibly_redacted_event_content
#event_content_without_relation
#event_content_impl
#static_event_content_impl
#json_castable_impl
#event_type_aliases
})
}
/// Parsed `EventContent` container data.
struct EventContent {
/// The name of the event content type.
ident: syn::Ident,
/// The visibility of the event content type.
vis: syn::Visibility,
/// The fields of the event content type, if it is a struct.
fields: Option<Vec<EventContentField>>,
/// The event types.
types: EventTypes,
/// The event kind.
kind: EventContentKind,
/// Whether this macro should generate an `*EventContentWithoutRelation` type.
has_without_relation: bool,
/// The path for imports from the palpo_core::events crate.
palpo_core: TokenStream,
}
impl EventContent {
/// The name of the field that contains the type fragment of the struct, if any.
fn type_fragment_field(&self) -> Option<&syn::Ident> {
self.fields
.as_ref()?
.iter()
.find(|field| field.is_type_fragment)
.and_then(|field| field.inner.ident.as_ref())
}
/// Generate the `Redacted*EventContent` variation of this struct, if it needs one.
fn expand_redacted_event_content(&self) -> Option<TokenStream> {
if !self.kind.should_generate_redacted() {
return None;
}
let palpo_core = &self.palpo_core;
let serde = NameSpace::serde();
let ident = &self.ident;
let vis = &self.vis;
let redacted_doc = format!("Redacted form of [`{ident}`]");
let redacted_ident = EventContentVariation::Redacted.variation_ident(ident);
let redacted_fields = self
.fields
.iter()
.flatten()
.filter(|field| field.skip_redaction)
.collect::<Vec<_>>();
let redacted_fields_idents = redacted_fields.iter().flat_map(|field| &field.inner.ident);
let constructor = redacted_fields.is_empty().then(|| {
let constructor_doc = format!("Creates an empty {redacted_ident}.");
quote! {
impl #redacted_ident {
#[doc = #constructor_doc]
#vis fn new() -> Self {
Self {}
}
}
}
});
let redacted_event_content = self.expand_event_content_impl(
EventContentVariation::Redacted,
&redacted_ident,
Some(redacted_fields.iter().copied()),
);
let static_event_content_impl = self.expand_static_event_content_impl(&redacted_ident);
let json_castable_impl = generate_json_castable_impl(&redacted_ident, &[ident]);
Some(quote! {
// this is the non redacted event content's impl
#[automatically_derived]
impl #palpo_core::events::RedactContent for #ident {
type Redacted = #redacted_ident;
fn redact(self, _rules: &#palpo_core::room_version_rules::RedactionRules) -> #redacted_ident {
#redacted_ident {
#( #redacted_fields_idents: self.#redacted_fields_idents, )*
}
}
}
#[doc = #redacted_doc]
#[derive(salvo::oapi::ToSchema, #serde::Deserialize, #serde::Serialize, Clone, Debug)]
#vis struct #redacted_ident {
#( #redacted_fields, )*
}
#constructor
#redacted_event_content
#static_event_content_impl
#json_castable_impl
})
}
/// Generate the `PossiblyRedacted*EventContent` variation of this struct, if it needs one.
fn expand_possibly_redacted_event_content(&self) -> Option<TokenStream> {
if !self.kind.should_generate_possibly_redacted() {
return None;
}
let serde = NameSpace::serde();
let ident = &self.ident;
let vis = &self.vis;
let possibly_redacted_doc = format!(
"The possibly redacted form of [`{ident}`].\n\n\
This type is used when it's not obvious whether the content is redacted or not."
);
let possibly_redacted_ident =
EventContentVariation::PossiblyRedacted.variation_ident(ident);
let mut field_changed = false;
let possibly_redacted_fields = self
.fields
.iter()
.flatten()
.map(|field| {
if field.keep_in_possibly_redacted() {
return Cow::Borrowed(field);
}
// Otherwise, change the field to an `Option`.
field_changed = true;
let mut field = field.clone();
let wrapped_type = &field.inner.ty;
field.inner.ty = parse_quote! { Option<#wrapped_type> };
field
.inner
.attrs
.push(parse_quote! { #[serde(skip_serializing_if = "Option::is_none")] });
Cow::Owned(field)
})
.collect::<Vec<_>>();
// If at least one field needs to change, generate a new struct, else use a type alias.
if field_changed {
let possibly_redacted_event_content = self.expand_event_content_impl(
EventContentVariation::PossiblyRedacted,
&possibly_redacted_ident,
Some(possibly_redacted_fields.iter().map(|field| field.as_ref())),
);
let static_event_content_impl =
self.expand_static_event_content_impl(&possibly_redacted_ident);
let json_castable_impl = if self.kind.should_generate_redacted() {
let redacted_ident = EventContentVariation::PossiblyRedacted.variation_ident(ident);
generate_json_castable_impl(&possibly_redacted_ident, &[ident, &redacted_ident])
} else {
generate_json_castable_impl(&possibly_redacted_ident, &[ident])
};
Some(quote! {
#[doc = #possibly_redacted_doc]
#[derive(salvo::oapi::ToSchema, #serde::Deserialize, #serde::Serialize, Clone, Debug)]
#vis struct #possibly_redacted_ident {
#( #possibly_redacted_fields, )*
}
#possibly_redacted_event_content
#static_event_content_impl
#json_castable_impl
})
} else {
let event_content_kind_trait_impl = self.expand_event_content_kind_trait_impl(
EventContentTraitVariation::PossiblyRedacted,
ident,
);
Some(quote! {
#[doc = #possibly_redacted_doc]
#vis type #possibly_redacted_ident = #ident;
#event_content_kind_trait_impl
})
}
}
/// Generate the `*EventContentWithoutRelation` variation of the type.
fn expand_event_content_without_relation(&self) -> Option<TokenStream> {
if !self.has_without_relation {
return None;
}
let serde = NameSpace::serde();
let ident = &self.ident;
let vis = &self.vis;
let without_relation_doc = format!(
"Form of [`{ident}`] without relation.\n\n\
To construct this type, construct a [`{ident}`] and then use one of its `::from()` / `.into()` methods."
);
let without_relation_ident = format_ident!("{ident}WithoutRelation");
let with_relation_fn_doc =
format!("Convert `self` into a [`{ident}`] with the given relation.");
let (relates_to_field, without_relation_fields) = self
.fields
.iter()
.flatten()
.partition::<Vec<_>, _>(|field| {
field
.inner
.ident
.as_ref()
.is_some_and(|ident| *ident == "relates_to")
});
let relates_to_type = relates_to_field
.first()
.map(|field| &field.inner.ty)
.expect(
"event content type without relation should have a `relates_to` field; \
this should have been checked during parsing",
);
let without_relation_fields_idents = without_relation_fields
.iter()
.flat_map(|field| &field.inner.ident)
.collect::<Vec<_>>();
let without_relation_struct_definition = if without_relation_fields.is_empty() {
quote! { ; }
} else {
quote! {
{ #( #without_relation_fields, )* }
}
};
let json_castable_impl = generate_json_castable_impl(&without_relation_ident, &[ident]);
Some(quote! {
#[allow(unused_qualifications)]
#[automatically_derived]
impl ::std::convert::From<#ident> for #without_relation_ident {
fn from(c: #ident) -> Self {
Self {
#( #without_relation_fields_idents: c.#without_relation_fields_idents, )*
}
}
}
#[doc = #without_relation_doc]
#[derive(salvo::oapi::ToSchema, #serde::Deserialize, #serde::Serialize, Clone, Debug)]
#vis struct #without_relation_ident #without_relation_struct_definition
impl #without_relation_ident {
#[doc = #with_relation_fn_doc]
#vis fn with_relation(self, relates_to: #relates_to_type) -> #ident {
#ident {
#( #without_relation_fields_idents: self.#without_relation_fields_idents, )*
relates_to,
}
}
}
#json_castable_impl
})
}
/// Generate the `palpo_events::*EventContent` trait implementations for this kind and the given
/// event content variation with the given ident and fields.
fn expand_event_content_impl<'a>(
&self,
variation: EventContentVariation,
ident: &syn::Ident,
fields: Option<impl IntoIterator<Item = &'a EventContentField>>,
) -> TokenStream {
let event_content_kind_trait_impl =
self.expand_event_content_kind_trait_impl(variation.into(), ident);
let static_state_event_content_impl =
self.expand_static_state_event_content_impl(variation, ident);
let event_content_from_type_impl = self.expand_event_content_from_type_impl(ident, fields);
quote! {
#event_content_from_type_impl
#event_content_kind_trait_impl
#static_state_event_content_impl
}
}
/// Generate the `palpo_events::*EventContent` trait implementations for this kind and the given
/// variation with the given ident.
fn expand_event_content_kind_trait_impl(
&self,
variation: EventContentTraitVariation,
ident: &syn::Ident,
) -> TokenStream {
let palpo_core = &self.palpo_core;
let event_type = self.types.ev_type.without_wildcard();
let event_type_fn_impl = if let Some(field) = self.type_fragment_field() {
let format = event_type.to_owned() + "{}";
quote! {
::std::convert::From::from(::std::format!(#format, self.#field))
}
} else {
quote! { ::std::convert::From::from(#event_type) }
};
let state_key =
as_variant!(&self.kind, EventContentKind::State { state_key_type, .. } => state_key_type).map(|state_key_type| {
quote! {
type StateKey = #state_key_type;
}
});
self.kind
.as_event_type_enums_and_content_kind_traits(variation)
.into_iter()
.map(|(event_type_enum, event_content_kind_trait)| {
quote! {
#[automatically_derived]
impl #palpo_core::events::#event_content_kind_trait for #ident {
#state_key
fn event_type(&self) -> #palpo_core::events::#event_type_enum {
#event_type_fn_impl
}
}
}
})
.collect()
}
/// Generate the `palpo_events::StaticStateEventContent` trait implementation for this kind and
/// the given variation with the given ident, if it needs one.
fn expand_static_state_event_content_impl(
&self,
variation: EventContentVariation,
ident: &syn::Ident,
) -> Option<TokenStream> {
let EventContentKind::State { unsigned_type, .. } = &self.kind else {
// Only the `State` kind can implement this trait.
return None;
};
if variation != EventContentVariation::Original {
// Only the original variation can implement this trait.
return None;
}
let palpo_core = &self.palpo_core;
let possibly_redacted_ident =
EventContentVariation::PossiblyRedacted.variation_ident(ident);
Some(quote! {
#[automatically_derived]
impl #palpo_core::events::StaticStateEventContent for #ident {
type PossiblyRedacted = #possibly_redacted_ident;
type Unsigned = #unsigned_type;
}
})
}
/// Generate the `StaticEventContent` trait implementation for the given ident.
fn expand_static_event_content_impl(&self, ident: &syn::Ident) -> TokenStream {
let palpo_core = &self.palpo_core;
let static_event_type = self.types.ev_type.without_wildcard();
let is_prefix = if self.types.is_prefix() {
quote! { #palpo_core::events::True }
} else {
quote! { #palpo_core::events::False }
};
quote! {
impl #palpo_core::events::StaticEventContent for #ident {
const TYPE: &'static ::std::primitive::str = #static_event_type;
type IsPrefix = #is_prefix;
}
}
}
/// Generate the `palpo_events::EventContentFromType` trait implementation for the given ident
/// with the given fields, if this event type has a type fragment.
fn expand_event_content_from_type_impl<'a>(
&self,
ident: &syn::Ident,
fields: Option<impl IntoIterator<Item = &'a EventContentField>>,
) -> Option<TokenStream> {
let type_fragment_field = self.type_fragment_field()?;
let fields = fields.expect(
"event content with `.*` type suffix should be a struct; \
this should have been checked during parsing",
);
let palpo_core = &self.palpo_core;
let serde = NameSpace::serde();
let serde_json = NameSpace::serde_json();
let type_prefixes = self.types.iter().map(EventType::without_wildcard);
let type_prefixes = quote! {
[#( #type_prefixes, )*]
};
let fields_without_type_fragment = fields
.into_iter()
.filter(|field| !field.is_type_fragment)
.map(|field| PrivateField(&field.inner))
.collect::<Vec<_>>();
let fields_ident_without_type_fragment = fields_without_type_fragment
.iter()
.filter_map(|f| f.0.ident.as_ref());
Some(quote! {
impl #palpo_core::events::EventContentFromType for #ident {
fn from_parts(
ev_type: &::std::primitive::str,
content: &#serde_json::value::RawValue,
) -> #serde_json::Result<Self> {
#[derive(#serde::Deserialize)]
struct WithoutTypeFragment {
#( #fields_without_type_fragment, )*
}
if let ::std::option::Option::Some(type_fragment) =
#type_prefixes.iter().find_map(|prefix| ev_type.strip_prefix(prefix))
{
let c: WithoutTypeFragment = #serde_json::from_str(content.get())?;
::std::result::Result::Ok(Self {
#(
#fields_ident_without_type_fragment:
c.#fields_ident_without_type_fragment,
)*
#type_fragment_field: type_fragment.to_owned(),
})
} else {
::std::result::Result::Err(#serde::de::Error::custom(
::std::format!(
"expected event type starting with one of `{:?}`, found `{}`",
#type_prefixes, ev_type,
)
))
}
}
}
})
}
/// Generate the type aliases for the event.
fn expand_event_type_aliases(&self) -> Option<TokenStream> {
// The redaction module has its own event types.
if self.ident == "RoomRedactionEventContent" {
return None;
}
let palpo_core = &self.palpo_core;
let event_type = &self.types.ev_type;
let ident = &self.ident;
let ident_s = ident.to_string();
let ev_type_s = ident_s.strip_suffix("Content").expect(
"event content struct name should end with `Content`; \
this should have been checked during parsing",
);
let vis = &self.vis;
Some(
self.kind
.event_variations()
.iter()
.flat_map(|&variation| {
std::iter::repeat(variation)
.zip(self.kind.as_event_idents(variation).into_iter().flatten())
})
.map(|(variation, (type_kind_prefix, event_ident))| {
let type_alias_ident =
format_ident!("{variation}{type_kind_prefix}{ev_type_s}");
// Details about the variation added at the end of the sentence.
let doc_suffix = match variation {
EventVariation::None | EventVariation::Original => "",
EventVariation::Sync | EventVariation::OriginalSync => {
" from a `sync_events` response"
}
EventVariation::Stripped => " from an invited room preview",
EventVariation::Redacted => " that has been redacted",
EventVariation::RedactedSync => {
" from a `sync_events` response that has been redacted"
}
EventVariation::Initial => " for creating a room",
};
let type_alias_doc = if type_kind_prefix.is_empty() {
format!("An `{event_type}` event{doc_suffix}.")
} else {
format!(
"A {} `{event_type}` event{doc_suffix}.",
type_kind_prefix.to_lowercase()
)
};
let content_ident = if variation.is_redacted() {
EventContentVariation::Redacted.variation_ident(ident)
} else if let EventVariation::Stripped = variation {
EventContentVariation::PossiblyRedacted.variation_ident(ident)
} else {
EventContentVariation::Original.variation_ident(ident)
};
quote! {
#[doc = #type_alias_doc]
#vis type #type_alias_ident = #palpo_core::events::#event_ident<#content_ident>;
}
})
.collect(),
)
}
}
/// A parsed field of an event content struct.
#[derive(Clone)]
struct EventContentField {
/// The inner field, with the `palpo_enum` attributes stripped.
inner: syn::Field,
/// Whether this field should be kept during redaction.
skip_redaction: bool,
/// Whether this field represents the suffix of the event type.
is_type_fragment: bool,
}
impl EventContentField {
/// Whether to keep this field as-is when generating the `PossiblyRedacted*EventContent`
/// variation.
///
/// Returns `true` if the field has the `skip_redaction` attribute, if its type is wrapped in an
/// `Option`, or if it has the serde `default` attribute.
fn keep_in_possibly_redacted(&self) -> bool {
self.skip_redaction
|| self.inner.ty.option_inner_type().is_some()
|| self.inner.has_serde_meta_item(SerdeMetaItem::Default)
}
}
impl ToTokens for EventContentField {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.inner.to_tokens(tokens);
}
}
/// The possible kinds of event content an their settings.
#[derive(Clone)]
#[allow(clippy::large_enum_variant)]
enum EventContentKind {
/// Global account data.
///
/// This is user data for the whole account.
GlobalAccountData,
/// Room account data.
///
/// This is user data specific to a room.
RoomAccountData,
/// Both account data kinds.
///
/// This is data usable as both global and room account data.
BothAccountData,
/// Ephemeral room data.
///
/// This is data associated to a room and that is not persisted.
EphemeralRoom,
/// Message-like event.
///
/// This is an event that can occur in the timeline and that doesn't have a state key.
MessageLike {
/// Whether the `Redacted*EventContent` type is implemented manually rather than generated
/// by this macro.
has_custom_redacted: bool,
},
/// State event.
///
/// This is an event that can occur in the timeline and that has a state key.
State {
/// The type of the state key.
state_key_type: syn::Type,
/// The type of the unsigned data.
unsigned_type: syn::Type,
/// Whether the `Redacted*EventContent` type is implemented manually rather than generated
/// by this macro.
has_custom_redacted: bool,
/// Whether the `PossiblyRedacted*EventContent` type is implemented manually rather than
/// generated by this macro.
has_custom_possibly_redacted: bool,
},
/// A to-device event.
///
/// This is an event that is sent directly to another device.
ToDevice,
}
impl EventContentKind {
/// The [`CommonEventKind`] matching this event content kind, if there is a single one.
///
/// Returns `None` for [`EventContentKind::BothAccountData`].
fn event_kind(&self) -> Option<CommonEventKind> {
Some(match self {
Self::GlobalAccountData => CommonEventKind::GlobalAccountData,
Self::RoomAccountData => CommonEventKind::RoomAccountData,
Self::BothAccountData => return None,
Self::EphemeralRoom => CommonEventKind::EphemeralRoom,
Self::MessageLike { .. } => CommonEventKind::MessageLike,
Self::State { .. } => CommonEventKind::State,
Self::ToDevice => CommonEventKind::ToDevice,
})
}
/// Whether this matches an account data kind.
fn is_account_data(&self) -> bool {
matches!(self, Self::BothAccountData)
|| self.event_kind().is_some_and(|event_kind| {
matches!(
event_kind,
CommonEventKind::GlobalAccountData | CommonEventKind::RoomAccountData
)
})
}
/// Whether we should generate a `Redacted*EventContent` variation for this kind.
fn should_generate_redacted(&self) -> bool {
// We only generate redacted content structs for state and message-like events.
matches!(self, Self::MessageLike { has_custom_redacted, .. } | Self::State { has_custom_redacted, .. } if !*has_custom_redacted)
}
/// Whether we should generate a `Redacted*EventContent` variation for this kind.
fn should_generate_possibly_redacted(&self) -> bool {
// We only generate possibly redacted content structs for state events.
matches!(self, Self::State { has_custom_possibly_redacted, .. } if !*has_custom_possibly_redacted)
}
/// Get the list of variations for an event type (struct or enum) for this kind.
fn event_variations(&self) -> &'static [EventVariation] {
if let Some(event_kind) = self.event_kind() {
event_kind.event_variations()
} else {
// Both account data types have the same variations.
CommonEventKind::GlobalAccountData.event_variations()
}
}
/// Get the idents of the event struct for these kinds and the given variation.
///
/// Returns a list of `(type_prefix, event_ident)` if the variation is supported for these
/// kinds.
fn as_event_idents(
&self,
variation: EventVariation,
) -> Option<Vec<(&'static str, syn::Ident)>> {
if let Some(event_kind) = self.event_kind() {
event_kind
.to_event_ident(variation)
.map(|event_ident| vec![("", event_ident)])
} else {
let first_event_ident = CommonEventKind::GlobalAccountData
.to_event_ident(variation)
.map(|event_ident| ("Global", event_ident));
let second_event_ident = CommonEventKind::RoomAccountData
.to_event_ident(variation)
.map(|event_ident| ("Room", event_ident));
if first_event_ident.is_none() && second_event_ident.is_none() {
None
} else {
Some(
first_event_ident
.into_iter()
.chain(second_event_ident)
.collect(),
)
}
}
}
/// Get the idents of the `*EventType` enums and `*EventContent` traits for this kind and the
/// given variation.
///
/// Returns a list of `(event_type_enum, event_content_trait)`.
fn as_event_type_enums_and_content_kind_traits(
&self,
variation: EventContentTraitVariation,
) -> Vec<(syn::Ident, syn::Ident)> {
if let Some(event_kind) = self.event_kind() {
vec![(
event_kind.to_event_type_enum(),
event_kind.to_content_kind_trait(variation),
)]
} else {
[
CommonEventKind::GlobalAccountData,
CommonEventKind::RoomAccountData,
]
.iter()
.map(|event_kind| {
(
event_kind.to_event_type_enum(),
event_kind.to_content_kind_trait(variation),
)
})
.collect()
}
}
}
/// Implement `JsonCastable<JsonObject> for {ident}` and `JsonCastable<{ident}> for {other}`.
fn generate_json_castable_impl(ident: &syn::Ident, others: &[&syn::Ident]) -> TokenStream {
let palpo_core = NameSpace::palpo_core();
let mut json_castable_impls = quote! {
#[automatically_derived]
impl #palpo_core::serde::JsonCastable<#palpo_core::serde::JsonObject> for #ident {}
};
json_castable_impls.extend(others.iter().map(|other| {
quote! {
#[automatically_derived]
impl #palpo_core::serde::JsonCastable<#ident> for #other {}
}
}));
json_castable_impls
}
/// The possible variations of an event content type.
#[derive(Clone, Copy, PartialEq)]
enum EventContentVariation {
/// The original, non-redacted, event content.
Original,
/// The redacted event content.
Redacted,
/// Event content that might be redacted or not.
PossiblyRedacted,
}
impl EventContentVariation {
/// Get the ident for this variation, based on the given ident.
fn variation_ident(self, ident: &syn::Ident) -> Cow<'_, syn::Ident> {
match self {
Self::Original => Cow::Borrowed(ident),
Self::Redacted => Cow::Owned(format_ident!("Redacted{ident}")),
Self::PossiblyRedacted => Cow::Owned(format_ident!("PossiblyRedacted{ident}")),
}
}
}
impl From<EventContentVariation> for EventContentTraitVariation {
fn from(value: EventContentVariation) -> Self {
match value {
EventContentVariation::Original => Self::Original,
EventContentVariation::Redacted => Self::Redacted,
EventContentVariation::PossiblyRedacted => Self::PossiblyRedacted,
}
}
}
impl CommonEventKind {
/// Get the name of the event type (struct or enum) for this kind and the given variation, if
/// any is supported.
fn to_event_ident(self, variation: EventVariation) -> Option<syn::Ident> {
if !self.event_variations().contains(&variation) {
return None;
}
Some(format_ident!("{variation}{self}"))
}
/// Get the name of the `*EventType` enum for this kind.
fn to_event_type_enum(self) -> syn::Ident {
format_ident!("{self}Type")
}
/// Get the name of the `[variation][kind]Content` trait for this kind and the given variation.
fn to_content_kind_trait(self, variation: EventContentTraitVariation) -> syn::Ident {
format_ident!("{variation}{self}Content")
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum.rs | crates/core-macros/src/events/event_enum.rs | //! Implementation of the `event_enum!` macro.
use std::fmt;
use proc_macro2::TokenStream;
use quote::{format_ident, quote};
mod event_kind_enum;
mod event_type;
mod parse;
mod util;
use self::{
event_kind_enum::EventEnum, event_type::EventTypeEnum, util::expand_json_castable_impl,
};
use super::common::{
CommonEventField, CommonEventKind, EventContentTraitVariation, EventTypes, EventVariation,
};
use crate::util::NameSpace;
/// Generates enums to represent the various Matrix event types.
pub(crate) fn expand_event_enum(input: EventEnumInput) -> TokenStream {
let palpo_core = NameSpace::palpo_core();
let mut event_enums_data = input.enums;
let mut tokens = TokenStream::new();
let mut timeline_data = None;
for data in &event_enums_data {
tokens.extend(
EventEnum::new(data, &palpo_core)
.expand()
.unwrap_or_else(syn::Error::into_compile_error),
);
// Create the Timeline kind if there are events to put in it. The `Any*TimelineEvent` enums
// are implemented manually so we don't need to generate them.
if data.kind.is_timeline() {
timeline_data
.get_or_insert_with(|| EventEnumData {
attrs: Vec::new(),
kind: EventEnumKind::Timeline,
events: Vec::new(),
})
.events
.extend(data.events.iter().cloned());
}
}
// Handle the Timeline kind if necessary.
if let Some(mut data) = timeline_data {
// Deduplicate event variants, in case there are some with the same `type` in the timeline
// kinds. This is necessary for the `m.room.encrypted` state event type from MSC4362.
let mut deduped_events: Vec<EventEnumEntry> = Vec::new();
for event in data.events {
if let Some(idx) = deduped_events
.iter()
.position(|deduped_event| deduped_event.types.ev_type == event.types.ev_type)
{
// If there is a variant without config attributes use that.
if deduped_events[idx].attrs != event.attrs && event.attrs.is_empty() {
deduped_events[idx] = event;
}
} else {
deduped_events.push(event);
}
}
data.events = deduped_events;
// Generate `JsonCastable` implementations for `Any*TimelineEvent` enums.
tokens.extend(data.kind.event_enum_variations().iter().map(|variation| {
let ident = data.kind.to_event_enum_ident(*variation);
expand_json_castable_impl(&ident, data.kind, *variation, &palpo_core)
}));
event_enums_data.push(data);
}
tokens.extend(
event_enums_data
.iter()
.map(|data| EventTypeEnum::new(data, &palpo_core).expand()),
);
tokens
}
/// The parsed `event_enum!` macro.
pub(crate) struct EventEnumInput {
/// The parsed enums.
enums: Vec<EventEnumData>,
}
/// The parsed data for a specific [`EventKind`] in the `event_enum!` macro.
struct EventEnumData {
/// Outer attributes on the declaration, such as docstrings.
attrs: Vec<syn::Attribute>,
/// The event enum kind.
kind: EventEnumKind,
/// The event types for this kind.
events: Vec<EventEnumEntry>,
}
/// All the possible [`EventEnum`] kinds.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
enum EventEnumKind {
/// Global account data.
///
/// This is user data for the whole account.
GlobalAccountData,
/// Room account data.
///
/// This is user data specific to a room.
RoomAccountData,
/// Ephemeral room data.
///
/// This is data associated to a room and that is not persisted.
EphemeralRoom,
/// Message-like event.
///
/// This is an event that can occur in the timeline and that doesn't have a state key.
MessageLike,
/// State event.
///
/// This is an event that can occur in the timeline and that has a state key.
State,
/// Timeline event.
///
/// This is any event that can occur in the timeline, so this includes message-like and state
/// events.
Timeline,
/// A to-device event.
///
/// This is an event that is sent directly to another device.
ToDevice,
}
impl EventEnumKind {
/// Whether this kind can be found in a room's timeline.
fn is_timeline(self) -> bool {
matches!(self, Self::MessageLike | Self::State)
}
/// The common kind matching this kind, if any.
///
/// Returns `None` for the [`EventEnumKind::Timeline`] variant.
fn common_kind(self) -> Option<CommonEventKind> {
Some(match self {
Self::GlobalAccountData => CommonEventKind::GlobalAccountData,
Self::RoomAccountData => CommonEventKind::RoomAccountData,
Self::EphemeralRoom => CommonEventKind::EphemeralRoom,
Self::MessageLike => CommonEventKind::MessageLike,
Self::State => CommonEventKind::State,
Self::ToDevice => CommonEventKind::ToDevice,
Self::Timeline => return None,
})
}
/// Get the name of the event type (struct or enum) for this kind and the given variation.
fn to_event_ident(self, variation: EventVariation) -> syn::Ident {
format_ident!("{variation}{self}")
}
/// Get the name of the `*EventType` enum for this kind.
fn to_event_type_enum(self) -> syn::Ident {
format_ident!("{self}Type")
}
/// Get the name of the `{variation}{kind}Content` trait for this kind and the given variation.
fn to_content_kind_trait(self, variation: EventContentTraitVariation) -> syn::Ident {
format_ident!("{variation}{self}Content")
}
/// Get the list of variations for an event type (struct or enum) for this kind.
fn event_variations(self) -> &'static [EventVariation] {
if let Some(common_kind) = self.common_kind() {
common_kind.event_variations()
} else {
// The Timeline kind has no variations.
&[]
}
}
/// Get the list of variations for an event enum for this kind.
fn event_enum_variations(self) -> &'static [EventVariation] {
match self {
Self::GlobalAccountData | Self::RoomAccountData | Self::ToDevice => {
&[EventVariation::None]
}
Self::EphemeralRoom => &[EventVariation::Sync],
Self::MessageLike | Self::Timeline => &[EventVariation::None, EventVariation::Sync],
Self::State => &[
EventVariation::None,
EventVariation::Sync,
EventVariation::Stripped,
EventVariation::Initial,
],
}
}
/// Whether the given field is present in this kind and variation.
fn field_is_present(self, field: CommonEventField, var: EventVariation) -> bool {
match field {
CommonEventField::OriginServerTs | CommonEventField::EventId => {
self.is_timeline()
&& matches!(
var,
EventVariation::None
| EventVariation::Sync
| EventVariation::Original
| EventVariation::OriginalSync
| EventVariation::Redacted
| EventVariation::RedactedSync
)
}
CommonEventField::RoomId => {
matches!(self, Self::MessageLike | Self::State | Self::EphemeralRoom)
&& matches!(
var,
EventVariation::None | EventVariation::Original | EventVariation::Redacted
)
}
CommonEventField::Sender => {
matches!(self, Self::MessageLike | Self::State | Self::ToDevice)
&& var != EventVariation::Initial
}
}
}
}
impl From<CommonEventKind> for EventEnumKind {
fn from(value: CommonEventKind) -> Self {
match value {
CommonEventKind::GlobalAccountData => Self::GlobalAccountData,
CommonEventKind::RoomAccountData => Self::RoomAccountData,
CommonEventKind::EphemeralRoom => Self::EphemeralRoom,
CommonEventKind::MessageLike => Self::MessageLike,
CommonEventKind::State => Self::State,
CommonEventKind::ToDevice => Self::ToDevice,
}
}
}
impl fmt::Display for EventEnumKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
if let Some(common_kind) = self.common_kind() {
fmt::Display::fmt(&common_kind, f)
} else {
// This is the Timeline kind
write!(f, "TimelineEvent")
}
}
}
/// An entry for an event type in the `event_enum!` macro.
#[derive(Clone)]
struct EventEnumEntry {
/// The attributes on the event type.
attrs: Vec<syn::Attribute>,
/// The types of the event.
types: EventTypes,
/// The path to the module containing the event.
ev_path: syn::Path,
/// The name of the variant.
ident: syn::Ident,
/// Whether this event represents both global and room account data.
both_account_data: bool,
}
impl EventEnumEntry {
/// Whether this entry has a type fragment.
fn has_type_fragment(&self) -> bool {
self.types.ev_type.is_prefix()
}
/// Get or generate the path of the event type for this entry.
fn to_event_path(&self, kind: EventEnumKind, var: EventVariation) -> syn::Path {
let type_prefix = match kind {
EventEnumKind::ToDevice => "ToDevice",
// Special case event types that represent both account data kinds.
EventEnumKind::GlobalAccountData if self.both_account_data => "Global",
EventEnumKind::RoomAccountData if self.both_account_data => "Room",
// Special case encrypted state event for MSC4362.
EventEnumKind::State
if self
.types
.stable_type()
.is_some_and(|ev_type| ev_type.as_str() == "m.room.encrypted") =>
{
"State"
}
_ => "",
};
let event_name = format_ident!("{var}{type_prefix}{}Event", self.ident);
let mut path = self.ev_path.clone();
path.segments.push(event_name.into());
path
}
/// Get or generate the path of the event content type for this entry.
fn to_event_content_path(&self, kind: EventEnumKind) -> syn::Path {
let type_prefix = match kind {
EventEnumKind::ToDevice => "ToDevice",
// Special case encrypted state event for MSC4362.
EventEnumKind::State
if self
.types
.stable_type()
.is_some_and(|ev_type| ev_type.as_str() == "m.room.encrypted") =>
{
"State"
}
_ => "",
};
let content_name = format_ident!("{type_prefix}{}EventContent", self.ident);
let mut path = self.ev_path.clone();
path.segments.push(content_name.into());
path
}
/// Generate the docs for this entry.
fn docs(&self) -> TokenStream {
let main_type = self.types.main_type();
let mut doc = quote! {
#[doc = #main_type]
};
if self.types.ev_type != *main_type {
let unstable_name = format!(
"This variant uses the unstable type `{}`.",
self.types.ev_type
);
doc.extend(quote! {
#[doc = ""]
#[doc = #unstable_name]
});
}
let aliases = &self.types.aliases;
match aliases.len() {
0 => {}
1 => {
let alias = format!(
"This variant can also be deserialized from the `{}` type.",
aliases[0]
);
doc.extend(quote! {
#[doc = ""]
#[doc = #alias]
});
}
_ => {
let aliases = format!(
"This variant can also be deserialized from the following types: {}.",
aliases
.iter()
.map(|alias| format!("`{alias}`"))
.collect::<Vec<_>>()
.join(", ")
);
doc.extend(quote! {
#[doc = ""]
#[doc = #aliases]
});
}
}
doc
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/common.rs | crates/core-macros/src/events/common.rs | //! Common types for event macros.
use std::fmt;
use proc_macro2::{Span, TokenStream};
use quote::{ToTokens, format_ident, quote};
use syn::parse::{Parse, ParseStream};
use crate::util::m_prefix_name_to_type_name;
/// All the common event kinds.
#[derive(Clone, Copy, Debug, Eq, PartialEq, PartialOrd, Ord)]
pub(super) enum CommonEventKind {
/// Global account data.
///
/// This is user data for the whole account.
GlobalAccountData,
/// Room account data.
///
/// This is user data specific to a room.
RoomAccountData,
/// Ephemeral room data.
///
/// This is data associated to a room and that is not persisted.
EphemeralRoom,
/// Message-like event.
///
/// This is an event that can occur in the timeline and that doesn't have a state key.
MessageLike,
/// State event.
///
/// This is an event that can occur in the timeline and that has a state key.
State,
/// A to-device event.
///
/// This is an event that is sent directly to another device.
ToDevice,
}
impl CommonEventKind {
/// Get the list of variations for an event type (struct or enum) for this kind.
pub(super) fn event_variations(self) -> &'static [EventVariation] {
match self {
Self::GlobalAccountData | Self::RoomAccountData | Self::ToDevice => {
&[EventVariation::None]
}
Self::EphemeralRoom => &[EventVariation::None, EventVariation::Sync],
Self::MessageLike => &[
EventVariation::None,
EventVariation::Original,
EventVariation::Redacted,
EventVariation::Sync,
EventVariation::OriginalSync,
EventVariation::RedactedSync,
],
Self::State => &[
EventVariation::None,
EventVariation::Original,
EventVariation::Redacted,
EventVariation::Sync,
EventVariation::OriginalSync,
EventVariation::RedactedSync,
EventVariation::Stripped,
EventVariation::Initial,
],
}
}
}
impl fmt::Display for CommonEventKind {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::GlobalAccountData => write!(f, "GlobalAccountDataEvent"),
Self::RoomAccountData => write!(f, "RoomAccountDataEvent"),
Self::EphemeralRoom => write!(f, "EphemeralRoomEvent"),
Self::MessageLike => write!(f, "MessageLikeEvent"),
Self::State => write!(f, "StateEvent"),
Self::ToDevice => write!(f, "ToDeviceEvent"),
}
}
}
impl Parse for CommonEventKind {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let ident: syn::Ident = input.parse()?;
Ok(match ident.to_string().as_str() {
"GlobalAccountData" => Self::GlobalAccountData,
"RoomAccountData" => Self::RoomAccountData,
"EphemeralRoom" => Self::EphemeralRoom,
"MessageLike" => Self::MessageLike,
"State" => Self::State,
"ToDevice" => Self::ToDevice,
id => {
return Err(syn::Error::new_spanned(
ident,
format!(
"valid event kinds are GlobalAccountData, RoomAccountData, EphemeralRoom, \
MessageLike, State, ToDevice; found `{id}`",
),
));
}
})
}
}
/// All the possible event variations.
#[derive(Clone, Copy, Debug, Eq, PartialEq)]
pub(super) enum EventVariation {
/// The full format of an event.
///
/// Either the event cannot be redacted, or the type contains variants for the original and
/// redacted variations.
None,
/// The sync format of an event.
///
/// Either the event cannot be redacted, or the type contains variants for the original and
/// redacted variations.
Sync,
/// The full format of an event that can be redacted.
Original,
/// The sync format of an event that can be redacted.
OriginalSync,
/// The stripped format of an event.
Stripped,
/// The format of an event passed during room creation.
Initial,
/// The full format of an event that was redacted.
Redacted,
/// The sync format of an event that was redacted.
RedactedSync,
}
impl EventVariation {
/// Whether this variation was redacted.
pub(super) fn is_redacted(self) -> bool {
matches!(self, Self::Redacted | Self::RedactedSync)
}
/// Whether this variation was received via the `/sync` endpoint.
pub(super) fn is_sync(self) -> bool {
matches!(self, Self::Sync | Self::OriginalSync | Self::RedactedSync)
}
/// Convert this "sync" variation to one which contains a `room_id`, if possible.
///
/// Returns `None` if this is not a "sync" variation.
pub(super) fn to_full(self) -> Option<Self> {
Some(match self {
Self::Sync => Self::None,
Self::OriginalSync => Self::Original,
Self::RedactedSync => Self::Redacted,
_ => return None,
})
}
/// Whether this variation can implement `JsonCastable` for the other variation, if both are
/// available for a kind.
///
/// A variation can be cast to another variation when that other variation includes the same
/// fields or less.
pub(super) fn is_json_castable_to(self, other: Self) -> bool {
match self {
Self::None | Self::OriginalSync | Self::RedactedSync => {
matches!(other, Self::Sync | Self::Stripped)
}
Self::Original => {
matches!(
other,
Self::None | Self::Sync | Self::OriginalSync | Self::Stripped
)
}
Self::Redacted => {
matches!(
other,
Self::None | Self::Sync | Self::RedactedSync | Self::Stripped
)
}
Self::Sync | Self::Stripped | Self::Initial => false,
}
}
}
impl fmt::Display for EventVariation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::None => write!(f, ""),
Self::Sync => write!(f, "Sync"),
Self::Original => write!(f, "Original"),
Self::OriginalSync => write!(f, "OriginalSync"),
Self::Stripped => write!(f, "Stripped"),
Self::Initial => write!(f, "Initial"),
Self::Redacted => write!(f, "Redacted"),
Self::RedactedSync => write!(f, "RedactedSync"),
}
}
}
/// The possible variations of an event content trait.
#[derive(Clone, Copy, PartialEq)]
pub(super) enum EventContentTraitVariation {
/// An event content that wasn't redacted.
Original,
/// An event content that was redacted.
Redacted,
/// An event content that might have been redacted.
PossiblyRedacted,
/// Static data about an event content that wasn't redacted.
Static,
}
impl fmt::Display for EventContentTraitVariation {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
match self {
Self::Original => Ok(()),
Self::Redacted => write!(f, "Redacted"),
Self::PossiblyRedacted => write!(f, "PossiblyRedacted"),
Self::Static => write!(f, "Static"),
}
}
}
/// An event type.
#[derive(Debug, Clone)]
pub(super) struct EventType {
/// The source of the event type.
source: syn::LitStr,
/// Whether this event type is a prefix.
is_prefix: bool,
/// The value of the event type.
value: String,
}
impl EventType {
/// Whether this event type is a prefix.
pub(super) fn is_prefix(&self) -> bool {
self.is_prefix
}
/// Access the inner string of this event type.
pub(super) fn as_str(&self) -> &str {
&self.value
}
/// Access the inner string of this event type and remove the final `*` if this is a prefix.
pub(super) fn without_wildcard(&self) -> &str {
if self.is_prefix {
self.value.trim_end_matches('*')
} else {
&self.value
}
}
/// Whether this event type is stable.
///
/// A stable event type starts with `m.`.
pub(super) fn is_stable(&self) -> bool {
self.value.starts_with("m.")
}
/// Get the `match` arm representation of this event type.
pub(super) fn as_match_arm(&self) -> TokenStream {
let ev_type = self.without_wildcard();
if self.is_prefix() {
quote! { t if t.starts_with(#ev_type) }
} else {
quote! { #ev_type }
}
}
}
impl PartialEq for EventType {
fn eq(&self, other: &Self) -> bool {
self.is_prefix == other.is_prefix && self.value == other.value
}
}
impl Eq for EventType {}
impl From<syn::LitStr> for EventType {
fn from(source: syn::LitStr) -> Self {
let value = source.value();
Self {
source,
is_prefix: value.ends_with(".*"),
value,
}
}
}
impl Parse for EventType {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
Ok(input.parse::<syn::LitStr>()?.into())
}
}
impl fmt::Display for EventType {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
impl ToTokens for EventType {
fn to_tokens(&self, tokens: &mut TokenStream) {
self.source.to_tokens(tokens);
}
}
/// All the event types supported by an event.
#[derive(Clone)]
pub(super) struct EventTypes {
/// The main event type.
pub(super) ev_type: EventType,
/// The alternate event types.
pub(super) aliases: Vec<EventType>,
}
impl EventTypes {
/// Try to construct an `EventTypes` from the given default event type and aliases.
///
/// This performs the following validation on the event types:
///
/// - `*` cannot be used anywhere in the event type but as a wildcard at the end.
/// - If one event type ends with `.*`, all event types must end with it.
pub(super) fn try_from_parts(ev_type: EventType, aliases: Vec<EventType>) -> syn::Result<Self> {
if ev_type.without_wildcard().contains('*') {
return Err(syn::Error::new_spanned(
ev_type,
"event type may only contain `*` as part of a `.*` suffix",
));
}
let is_prefix = ev_type.is_prefix();
for alias in &aliases {
if alias.without_wildcard().contains('*') {
return Err(syn::Error::new_spanned(
alias,
"alias may only contain `*` as part of a `.*` suffix",
));
}
if alias.is_prefix() != is_prefix {
return Err(syn::Error::new_spanned(
alias,
"aliases should have the same `.*` suffix, or lack thereof, as the main event type",
));
}
}
Ok(Self { ev_type, aliases })
}
/// Get an iterator over all the event types.
pub(super) fn iter(&self) -> impl Iterator<Item = &EventType> {
std::iter::once(&self.ev_type).chain(&self.aliases)
}
/// Whether the default event type is a prefix.
///
/// If one event type is a prefix, all event types are prefixes.
pub(super) fn is_prefix(&self) -> bool {
self.ev_type.is_prefix
}
/// Get the stable event type, if any.
///
/// A stable type is a type beginning with `m.`.
pub(super) fn stable_type(&self) -> Option<&EventType> {
self.iter().find(|ev_type| ev_type.is_stable())
}
/// Get the main event type.
///
/// It is the stable event type or the default event type as a fallback.
pub(super) fn main_type(&self) -> &EventType {
self.stable_type().unwrap_or(&self.ev_type)
}
/// Get the type name for these event types.
///
/// Returns an error if none of these types are the stable type.
pub(super) fn as_event_ident(&self) -> syn::Result<syn::Ident> {
let stable_type = self.stable_type().ok_or_else(|| {
syn::Error::new(
Span::call_site(),
format!(
"A matrix event must declare a well-known type that starts with `m.` \
either as the main type or as an alias, or must declare the ident that \
should be used if it is only an unstable type, found main type `{}`",
self.ev_type
),
)
})?;
Ok(m_prefix_name_to_type_name(&stable_type.source)
.expect("we already checked that the event type is stable"))
}
}
/// Common fields in event types.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
pub(super) enum CommonEventField {
/// `origin_server_ts`.
OriginServerTs,
/// `room_id`.
RoomId,
/// `event_id`.
EventId,
/// `sender`.
Sender,
}
impl CommonEventField {
/// All the variants of this enum
pub(super) const ALL: &[Self] = &[
Self::OriginServerTs,
Self::RoomId,
Self::EventId,
Self::Sender,
];
/// Get the string representation of this field.
pub(super) fn as_str(self) -> &'static str {
match self {
Self::OriginServerTs => "origin_server_ts",
Self::RoomId => "room_id",
Self::EventId => "event_id",
Self::Sender => "sender",
}
}
/// This field as a [`syn::Ident`].
pub(super) fn ident(self) -> syn::Ident {
format_ident!("{}", self.as_str())
}
/// Get the type of this field.
///
/// Returns a `(type, is_reference)` tuple.
pub(super) fn ty(self, palpo_core: &TokenStream) -> (TokenStream, bool) {
match self {
Self::OriginServerTs => (quote! { #palpo_core::UnixMillis }, false),
Self::RoomId => (quote! { &#palpo_core::RoomId }, true),
Self::EventId => (quote! { &#palpo_core::EventId }, true),
Self::Sender => (quote! { &#palpo_core::UserId }, true),
}
}
}
impl fmt::Display for CommonEventField {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
f.write_str(self.as_str())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum/parse.rs | crates/core-macros/src/events/event_enum/parse.rs | //! Parsing helpers specific to the `event_enum!` macro.
use std::collections::BTreeMap;
use proc_macro2::Span;
use syn::{
meta::ParseNestedMeta,
parse::{Parse, ParseStream},
};
use super::{EventEnumData, EventEnumEntry, EventEnumInput};
use crate::events::common::{CommonEventKind, EventType, EventTypes};
impl Parse for EventEnumInput {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let mut enums_map = BTreeMap::new();
while !input.is_empty() {
let attrs = input.call(syn::Attribute::parse_outer)?;
let _: syn::Token![enum] = input.parse()?;
let kind: CommonEventKind = input.parse()?;
let content;
syn::braced!(content in input);
let events = content.parse_terminated(EventEnumEntry::parse, syn::Token![,])?;
let events = events.into_iter().collect();
if enums_map
.insert(
kind,
EventEnumData {
attrs,
kind: kind.into(),
events,
},
)
.is_some()
{
return Err(syn::Error::new(
Span::call_site(),
format!("duplicate definition for kind `{kind:?}`"),
));
}
}
// Mark event types which are declared for both account data kinds, because they use a
// different name for the event struct.
let mut room_account_data = enums_map.remove(&CommonEventKind::RoomAccountData);
if let Some((global_account_data, room_account_data)) = enums_map
.get_mut(&CommonEventKind::GlobalAccountData)
.zip(room_account_data.as_mut())
{
for global_event in global_account_data.events.iter_mut() {
if let Some(room_event) = room_account_data.events.iter_mut().find(|room_event| {
room_event.types.ev_type == global_event.types.ev_type
&& room_event.ev_path == global_event.ev_path
&& room_event.ident == global_event.ident
}) {
global_event.both_account_data = true;
room_event.both_account_data = true;
}
}
}
if let Some(room_account_data) = room_account_data {
enums_map.insert(CommonEventKind::RoomAccountData, room_account_data);
}
Ok(EventEnumInput {
enums: enums_map.into_values().collect(),
})
}
}
impl Parse for EventEnumEntry {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let (palpo_enum_attrs, attrs) = input
.call(syn::Attribute::parse_outer)?
.into_iter()
.partition::<Vec<_>, _>(|attr| attr.path().is_ident("palpo_enum"));
let ev_type: EventType = input.parse()?;
let _: syn::Token![=>] = input.parse()?;
let ev_path = input.call(syn::Path::parse_mod_style)?;
let mut entry_attrs = EventEnumEntryAttrs::default();
for attr in palpo_enum_attrs {
attr.parse_nested_meta(|meta| entry_attrs.try_merge(meta, &attr))?;
}
let types = EventTypes::try_from_parts(ev_type, entry_attrs.aliases)?;
// We will need the name of the event type so compute it right now to make sure that we have
// enough data for it.
let ident = if let Some(ident) = entry_attrs.ident {
ident
} else {
types.as_event_ident()?
};
Ok(Self {
attrs,
types,
ev_path,
ident,
both_account_data: false,
})
}
}
/// Parsed attributes on an event entry in the `event_enum!` macro.
#[derive(Default)]
struct EventEnumEntryAttrs {
/// The custom name of the variant.
ident: Option<syn::Ident>,
/// The alternative event types.
aliases: Vec<EventType>,
}
impl EventEnumEntryAttrs {
/// Set the name of the Rust event type.
///
/// Returns an error if the name is already set.
fn set_ident(&mut self, ident: syn::Ident, attr: &syn::Attribute) -> syn::Result<()> {
if self.ident.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `ident` attribute",
));
}
self.ident = Some(ident);
Ok(())
}
/// Try to parse the given meta item and merge it into this `EventEnumEntryAttrs`.
///
/// Returns an error if parsing the meta item fails, or if it sets a field that was already set.
pub(crate) fn try_merge(
&mut self,
meta: ParseNestedMeta<'_>,
attr: &syn::Attribute,
) -> syn::Result<()> {
if meta.path.is_ident("ident") {
return self.set_ident(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("alias") {
self.aliases.push(meta.value()?.parse()?);
return Ok(());
}
Err(meta.error("unsupported `palpo_enum` attribute"))
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum/event_type.rs | crates/core-macros/src/events/event_enum/event_type.rs | //! Functions to generate the `*EventType` enums.
use std::ops::Deref;
use proc_macro2::TokenStream;
use quote::quote;
use super::{EventEnumData, EventEnumKind};
use crate::util::NameSpace;
/// Data to generate an `*EventType` enum.
pub(super) struct EventTypeEnum<'a> {
/// The data for the enum.
data: &'a EventEnumData,
/// The import path for the ruma-events crate.
palpo_core: &'a TokenStream,
/// The import path for the serde crate.
serde: TokenStream,
/// The name of the event type enum
ident: syn::Ident,
}
impl<'a> EventTypeEnum<'a> {
/// Create an `EventTypeEnum` with the given data.
pub(super) fn new(data: &'a EventEnumData, palpo_core: &'a TokenStream) -> Self {
let serde = NameSpace::serde();
let ident = data.kind.to_event_type_enum();
Self {
data,
palpo_core,
ident,
serde,
}
}
}
impl EventTypeEnum<'_> {
/// Generate the `*EventType` enum and its implementations.
pub(super) fn expand(&self) -> TokenStream {
let ident = &self.ident;
let enum_doc = format!("The type of `{}` this is.", self.kind);
let variants = self.events.iter().map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
let variant_docs = event.docs();
if event.has_type_fragment() {
quote! {
#variant_docs
#( #variant_attrs )*
#variant(::std::string::String),
}
} else {
quote! {
#variant_docs
#( #variant_attrs )*
#variant,
}
}
});
let ord_impl = self.expand_ord_impl();
let to_string_impl = self.expand_to_string_impl();
let from_string_impl = self.expand_from_string_impl();
let into_timeline_event_type_impl = self.expand_into_timeline_event_type_impl();
quote! {
#[doc = #enum_doc]
///
/// This type can hold an arbitrary string. To build events with a custom type, convert it
/// from a string with `::from()` / `.into()`. To check for events that are not available as a
/// documented variant here, use its string representation, obtained through `.to_string()`.
#[derive(salvo::oapi::ToSchema, Clone, PartialEq, Eq, Hash, diesel::deserialize::FromSqlRow, diesel::expression::AsExpression)]
#[diesel(sql_type = diesel::sql_types::Text)]
pub enum #ident {
#( #variants )*
#[doc(hidden)]
_Custom(crate::PrivOwnedStr),
}
#ord_impl
#to_string_impl
#from_string_impl
#into_timeline_event_type_impl
}
}
/// Generate the `Ord` and `PartialOrd` implementations for the event type enum.
///
/// To compare event types we need to compare the static event type first, and then the "type
/// fragment" if there is one.
fn expand_ord_impl(&self) -> TokenStream {
let ident = &self.ident;
let event_type_str_match_arms = self.events.iter().map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
let ev_type = &event.types.ev_type;
if ev_type.is_prefix() {
let ev_type = ev_type.without_wildcard();
quote! {
#( #variant_attrs )*
Self::#variant(_s) => #ev_type,
}
} else {
quote! {
#( #variant_attrs )*
Self::#variant => #ev_type,
}
}
});
let mut type_fragment_match_arms = self
.events
.iter()
// We only need to compare types with fragment, others will be equal.
.filter(|event| event.has_type_fragment())
.map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
quote! {
#( #variant_attrs )*
(Self::#variant(this), Self::#variant(other)) => this.cmp(other),
}
})
.peekable();
let cmp_type_fragment_impl = if type_fragment_match_arms.peek().is_none() {
// If there are no type fragments, all variants are equal.
quote! { ::std::cmp::Ordering::Equal }
} else {
quote! {
match (self, other) {
#( #type_fragment_match_arms )*
_ => ::std::cmp::Ordering::Equal,
}
}
};
quote! {
#[allow(deprecated)]
impl #ident {
fn event_type_str(&self) -> &::std::primitive::str {
match self {
#( #event_type_str_match_arms )*
Self::_Custom(crate::PrivOwnedStr(s)) => s,
}
}
fn cmp_type_fragment(&self, other: &Self) -> ::std::cmp::Ordering {
#cmp_type_fragment_impl
}
}
impl ::std::cmp::Ord for #ident {
fn cmp(&self, other: &Self) -> ::std::cmp::Ordering {
let event_type_cmp = self.event_type_str().cmp(&other.event_type_str());
if event_type_cmp.is_eq() {
self.cmp_type_fragment(other)
} else {
event_type_cmp
}
}
}
impl ::std::cmp::PartialOrd for #ident {
fn partial_cmp(&self, other: &Self) -> Option<::std::cmp::Ordering> {
Some(self.cmp(other))
}
}
}
}
/// Generate the `std::fmt::Display`, `std::fmt::Debug` and `serde::Serialize` implementations
/// for the event type enum.
fn expand_to_string_impl(&self) -> TokenStream {
let ident = &self.ident;
let serde = &self.serde;
let match_arms = self.events
.iter()
.map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
let ev_type = &event.types.ev_type;
if ev_type.is_prefix() {
let format_str = ev_type.without_wildcard().to_owned() + "{}";
quote! {
#( #variant_attrs )*
Self::#variant(_s) => ::std::borrow::Cow::Owned(::std::format!(#format_str, _s)),
}
} else {
quote! {
#( #variant_attrs )*
Self::#variant => ::std::borrow::Cow::Borrowed(#ev_type),
}
}
});
quote! {
#[allow(deprecated)]
impl #ident {
fn to_cow_str(&self) -> ::std::borrow::Cow<'_, ::std::primitive::str> {
match self {
#( #match_arms )*
Self::_Custom(crate::PrivOwnedStr(s)) => ::std::borrow::Cow::Borrowed(s),
}
}
}
#[allow(deprecated)]
impl ::std::fmt::Display for #ident {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
self.to_cow_str().fmt(f)
}
}
#[allow(deprecated)]
impl ::std::fmt::Debug for #ident {
fn fmt(&self, f: &mut ::std::fmt::Formatter<'_>) -> ::std::fmt::Result {
<str as ::std::fmt::Debug>::fmt(&self.to_cow_str(), f)
}
}
#[allow(deprecated)]
impl #serde::Serialize for #ident {
fn serialize<S>(&self, serializer: S) -> ::std::result::Result<S::Ok, S::Error>
where
S: #serde::Serializer,
{
self.to_cow_str().serialize(serializer)
}
}
impl diesel::deserialize::FromSql<diesel::sql_types::Text, diesel::pg::Pg> for #ident {
fn from_sql(bytes: diesel::pg::PgValue<'_>) -> diesel::deserialize::Result<Self> {
let value = <String as diesel::deserialize::FromSql<diesel::sql_types::Text, diesel::pg::Pg>>::from_sql(bytes)?;
Ok(Self::from(value))
}
}
impl diesel::serialize::ToSql<diesel::sql_types::Text, diesel::pg::Pg> for #ident {
fn to_sql(&self, out: &mut diesel::serialize::Output<'_, '_, diesel::pg::Pg>) -> diesel::serialize::Result {
diesel::serialize::ToSql::<diesel::sql_types::Text, diesel::pg::Pg>::to_sql(self.to_cow_str().as_ref(), &mut out.reborrow())
}
}
}
}
/// Generate the `From<&str>`, `From<String>` and `serde::Deserialize` implementations for the
/// event type enum.
fn expand_from_string_impl(&self) -> TokenStream {
let ident = &self.ident;
let palpo_core = self.palpo_core;
let serde = &self.serde;
let from_str_match_arms = self.events.iter().map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
let ev_types = event.types.iter();
if event.has_type_fragment() {
ev_types.map(|ev_type| {
let prefix = ev_type.without_wildcard();
quote! {
#( #variant_attrs )*
// Use if-let guard once available
s if s.starts_with(#prefix) => {
Self::#variant(::std::convert::From::from(s.strip_prefix(#prefix).unwrap()))
}
}
}).collect()
} else {
quote! {
#( #variant_attrs )*
#( #ev_types )|* => Self::#variant,
}
}
});
quote! {
#[allow(deprecated)]
impl ::std::convert::From<&::std::primitive::str> for #ident {
fn from(s: &::std::primitive::str) -> Self {
match s {
#( #from_str_match_arms )*
_ => Self::_Custom(crate::PrivOwnedStr(::std::convert::From::from(s))),
}
}
}
#[allow(deprecated)]
impl ::std::convert::From<::std::string::String> for #ident {
fn from(s: ::std::string::String) -> Self {
::std::convert::From::from(s.as_str())
}
}
#[allow(deprecated)]
impl<'de> #serde::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> ::std::result::Result<Self, D::Error>
where
D: #serde::Deserializer<'de>
{
let s = #palpo_core::serde::deserialize_cow_str(deserializer)?;
Ok(::std::convert::From::from(&s[..]))
}
}
}
}
/// Generate the `From<{ident}> for TimelineEventType` implementation for the timeline kinds.
fn expand_into_timeline_event_type_impl(&self) -> Option<TokenStream> {
if !self.kind.is_timeline() || self.kind == EventEnumKind::Timeline {
return None;
}
let ident = &self.ident;
let match_arms = self.events.iter().map(|event| {
let variant = &event.ident;
let variant_attrs = &event.attrs;
if event.has_type_fragment() {
quote! {
#( #variant_attrs )*
#ident::#variant(s) => Self::#variant(s),
}
} else {
quote! {
#( #variant_attrs )*
#ident::#variant => Self::#variant,
}
}
});
Some(quote! {
#[allow(deprecated)]
impl ::std::convert::From<#ident> for TimelineEventType {
fn from(s: #ident) -> Self {
match s {
#( #match_arms )*
#ident ::_Custom(_s) => Self::_Custom(_s),
}
}
}
})
}
}
impl Deref for EventTypeEnum<'_> {
type Target = EventEnumData;
fn deref(&self) -> &Self::Target {
self.data
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum/util.rs | crates/core-macros/src/events/event_enum/util.rs | use proc_macro2::TokenStream;
use quote::{format_ident, quote};
use super::EventEnumKind;
use crate::{
events::common::{EventContentTraitVariation, EventVariation},
util::NameSpace,
};
/// Generate `palpo_core::serde::JsonCastable` implementations for all compatible types.
pub(super) fn expand_json_castable_impl(
ident: &syn::Ident,
kind: EventEnumKind,
variation: EventVariation,
palpo_core: &TokenStream,
) -> TokenStream {
// All event types are represented as objects in JSON.
let mut json_castable_impls = quote! {
#[automatically_derived]
impl #palpo_core::serde::JsonCastable<#palpo_core::serde::JsonObject> for #ident {}
};
// The event type kinds in this enum.
let mut event_kinds = vec![kind];
event_kinds.extend(kind.extra_enum_kinds());
for event_kind in event_kinds {
let event_variations = event_kind.event_variations();
// Matching event types (structs or enums) can be cast to this event enum.
json_castable_impls.extend(
event_variations
.iter()
// Filter variations that can't be cast from.
.filter(|event_variation| event_variation.is_json_castable_to(variation))
// All enum variations can also be cast from event structs from the same variation.
.chain(event_variations.contains(&variation).then_some(&variation))
.map(|event_variation| {
let EventWithBounds { type_with_generics, impl_generics, where_clause } =
EventWithBounds::new(event_kind, *event_variation, palpo_core);
quote! {
#[automatically_derived]
impl #impl_generics #palpo_core::serde::JsonCastable<#ident> for #type_with_generics
#where_clause
{}
}
}),
);
// Matching event enums can be cast to this one, e.g. `AnyMessageLikeEvent` can be cast to
// `AnyTimelineEvent`.
let event_enum_variations = event_kind.event_enum_variations();
json_castable_impls.extend(
event_enum_variations
.iter()
// Filter variations that can't be cast from.
.filter(|event_enum_variation| event_enum_variation.is_json_castable_to(variation))
// All enum variations can also be cast from other event enums from the same
// variation.
.chain(
(event_kind != kind && event_enum_variations.contains(&variation))
.then_some(&variation),
)
.map(|event_enum_variation| {
let other_ident = event_kind.to_event_enum_ident(*event_enum_variation);
quote! {
#[automatically_derived]
impl #palpo_core::serde::JsonCastable<#ident> for #other_ident {}
}
}),
);
}
json_castable_impls
}
impl EventEnumKind {
/// Get the name of the `Any*Event` enum for this kind and the given variation.
pub(super) fn to_event_enum_ident(self, variation: EventVariation) -> syn::Ident {
format_ident!("Any{variation}{self}")
}
/// Get the list of extra event kinds that are part of the event enum for this kind.
fn extra_enum_kinds(self) -> &'static [Self] {
match self {
Self::Timeline => &[Self::MessageLike, Self::State],
Self::GlobalAccountData
| Self::RoomAccountData
| Self::EphemeralRoom
| Self::MessageLike
| Self::State
| Self::ToDevice => &[],
}
}
}
/// An event type (struct or enum) with its bounds.
struct EventWithBounds {
/// The type name with its generics.
type_with_generics: TokenStream,
/// The generics declaration.
impl_generics: Option<TokenStream>,
/// The `where` clause with the event bounds.
where_clause: Option<TokenStream>,
}
impl EventWithBounds {
fn new(kind: EventEnumKind, variation: EventVariation, palpo_core: &TokenStream) -> Self {
let ident = kind.to_event_ident(variation);
let event_content_trait = match variation {
EventVariation::None
| EventVariation::Sync
| EventVariation::Original
| EventVariation::OriginalSync
| EventVariation::Initial => {
// `State` event structs have a `StaticStateEventContent` bound.
if kind == EventEnumKind::State {
kind.to_content_kind_trait(EventContentTraitVariation::Static)
} else {
kind.to_content_kind_trait(EventContentTraitVariation::Original)
}
}
EventVariation::Stripped => {
kind.to_content_kind_trait(EventContentTraitVariation::PossiblyRedacted)
}
EventVariation::Redacted | EventVariation::RedactedSync => {
kind.to_content_kind_trait(EventContentTraitVariation::Redacted)
}
};
let (type_with_generics, impl_generics, where_clause) = match kind {
EventEnumKind::MessageLike | EventEnumKind::State
if matches!(variation, EventVariation::None | EventVariation::Sync) =>
{
// `MessageLike` and `State` event kinds have an extra `RedactContent` bound with a
// `where` clause on the variations that match enum types.
let redacted_trait =
kind.to_content_kind_trait(EventContentTraitVariation::Redacted);
(
quote! { #palpo_core::events::#ident<C> },
Some(
quote! { <C: #palpo_core::events::#event_content_trait + #palpo_core::events::RedactContent> },
),
Some(quote! {
where
C::Redacted: #palpo_core::events::#redacted_trait,
}),
)
}
EventEnumKind::GlobalAccountData
| EventEnumKind::RoomAccountData
| EventEnumKind::EphemeralRoom
| EventEnumKind::MessageLike
| EventEnumKind::State
| EventEnumKind::ToDevice => (
quote! { #palpo_core::events::#ident<C> },
Some(quote! { <C: #palpo_core::events::#event_content_trait> }),
None,
),
EventEnumKind::Timeline => {
panic!("Timeline kind should not generate JsonCastable implementations")
}
};
Self {
impl_generics,
type_with_generics,
where_clause,
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum/event_kind_enum.rs | crates/core-macros/src/events/event_enum/event_kind_enum.rs | use std::ops::Deref;
use proc_macro2::{Span, TokenStream};
use quote::{format_ident, quote};
mod content;
use super::{EventEnumData, EventEnumEntry, EventEnumKind, util::expand_json_castable_impl};
use crate::{
events::common::{CommonEventField, EventContentTraitVariation, EventType, EventVariation},
util::NameSpace,
};
/// Cache for [`EventEnum`] data that is used in several places.
pub(super) struct EventEnum<'a> {
/// The data for the enum.
data: &'a EventEnumData,
/// The import path for the ruma-events crate.
palpo_core: &'a TokenStream,
/// The attributes of the event entries.
variant_attrs: Vec<&'a [syn::Attribute]>,
/// The names of the variants of the entries.
variants: Vec<&'a syn::Ident>,
/// The docs for the variants.
variant_docs: Vec<TokenStream>,
/// The match arms for the entries' `type` string.
event_type_string_match_arms: Vec<Vec<TokenStream>>,
/// The paths to the `*EventContent` types of the entries.
event_content_types: Vec<syn::Path>,
/// The name of the enum that contains the content of the events for this kind.
content_enum: syn::Ident,
/// The name of the enum that contains the "full" content of the events for this kind.
full_content_enum: syn::Ident,
/// The name of the `*EventType` enum for this kind.
event_type_enum: syn::Ident,
}
impl<'a> EventEnum<'a> {
/// Construct a new `EventEnum` with the given data and ruma-events import.
pub(super) fn new(data: &'a EventEnumData, palpo_core: &'a TokenStream) -> Self {
// Compute data that is used in several places.
let variant_attrs = data
.events
.iter()
.map(|event| event.attrs.as_slice())
.collect();
let variants = data.events.iter().map(|event| &event.ident).collect();
let variant_docs = data.events.iter().map(EventEnumEntry::docs).collect();
let event_content_types = data
.events
.iter()
.map(|event| event.to_event_content_path(data.kind))
.collect();
let event_type_string_match_arms = data
.events
.iter()
.map(|event| event.types.iter().map(EventType::as_match_arm).collect())
.collect();
let kind = data.kind;
let content_enum = format_ident!("Any{kind}Content");
let full_content_enum = format_ident!("AnyFull{kind}Content");
let event_type_enum = kind.to_event_type_enum();
Self {
data,
palpo_core,
variant_attrs,
variants,
variant_docs,
event_type_string_match_arms,
event_content_types,
content_enum,
full_content_enum,
event_type_enum,
}
}
}
impl EventEnum<'_> {
/// Generate the `Any*Event(Content)` enums and their implementations.
pub(super) fn expand(&self) -> syn::Result<TokenStream> {
let variations = self.kind.event_enum_variations();
if variations.is_empty() {
return Err(syn::Error::new(
Span::call_site(),
format!("The {:?} kind is not supported", self.kind),
));
}
// Generate the `Any*EventContent` enum.
let mut tokens = self.expand_content_enum()?;
let has_full = variations.contains(&EventVariation::None);
// Generate the `Any*Event` enums for all the variations.
for variation in variations {
tokens.extend(
EventEnumVariation::new(self, *variation)
.expand_event_kind_enum()
.unwrap_or_else(syn::Error::into_compile_error),
);
if variation.is_sync() && has_full {
tokens.extend(self.expand_sync_from_into_full());
}
}
// Generate the `AnyFull*EventContent` enum.
if matches!(self.kind, EventEnumKind::State) {
tokens.extend(self.expand_full_content_enum());
}
Ok(tokens)
}
/// Implement `From<Any*Event>` and `.into_full_event()` for an `AnySync*Event` enum.
fn expand_sync_from_into_full(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let sync = self.kind.to_event_enum_ident(EventVariation::Sync);
let full = self.kind.to_event_enum_ident(EventVariation::None);
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
quote! {
#[automatically_derived]
impl ::std::convert::From<#full> for #sync {
fn from(event: #full) -> Self {
match event {
#(
#( #variant_attrs )*
#full::#variants(event) => {
Self::#variants(::std::convert::From::from(event))
}
)*
#full::_Custom(event) => {
Self::_Custom(::std::convert::From::from(event))
},
}
}
}
#[automatically_derived]
impl #sync {
/// Convert this sync event into a full event (one with a `room_id` field).
pub fn into_full_event(self, room_id: #palpo_core::OwnedRoomId) -> #full {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => {
#full::#variants(event.into_full_event(room_id))
}
)*
Self::_Custom(event) => {
#full::_Custom(event.into_full_event(room_id))
},
}
}
}
}
}
/// Implement `From<{event_type}>` for all the variants of the given enum.
fn expand_from_impl(&self, ident: &syn::Ident, event_types: &[syn::Path]) -> TokenStream {
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
quote! {
#(
#[allow(unused_qualifications)]
#[automatically_derived]
#( #variant_attrs )*
impl ::std::convert::From<#event_types> for #ident {
fn from(c: #event_types) -> Self {
Self::#variants(c)
}
}
)*
}
}
}
impl Deref for EventEnum<'_> {
type Target = EventEnumData;
fn deref(&self) -> &Self::Target {
self.data
}
}
/// A variation of an event enum.
struct EventEnumVariation<'a> {
/// The event enum data.
inner: &'a EventEnum<'a>,
/// The variation of this enum.
variation: EventVariation,
/// The name of this enum.
ident: syn::Ident,
/// The name of the struct used for the events for this variation.
event_struct: syn::Ident,
/// The paths to the event types of the variants of this enum.
event_types: Vec<syn::Path>,
}
impl<'a> EventEnumVariation<'a> {
/// Construct an `EventEnumVariation` for the given data and variation.
fn new(inner: &'a EventEnum<'a>, variation: EventVariation) -> Self {
let ident = inner.kind.to_event_enum_ident(variation);
let event_struct = inner.kind.to_event_ident(variation);
let event_types = inner
.events
.iter()
.map(|event| event.to_event_path(inner.kind, variation))
.collect();
Self {
inner,
variation,
ident,
event_struct,
event_types,
}
}
}
impl EventEnumVariation<'_> {
/// Whether the content in the variants of this enum can be redacted.
fn maybe_redacted(&self) -> bool {
self.kind.is_timeline()
&& matches!(self.variation, EventVariation::None | EventVariation::Sync)
}
/// Generate this `Any*Event` enum.
fn expand_event_kind_enum(&self) -> syn::Result<TokenStream> {
let palpo_core = self.palpo_core;
let ident = &self.ident;
let event_struct = &self.event_struct;
let attrs = &self.attrs;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let variant_docs = &self.variant_docs;
let event_types = &self.event_types;
let kind = self.kind;
let custom_content_ty = format_ident!("Custom{kind}Content");
let deserialize_impl = self.expand_deserialize_impl();
let field_accessor_impl = self.expand_accessor_methods()?;
let from_impl = self.expand_from_impl(ident, event_types);
let json_castable_impl =
expand_json_castable_impl(ident, kind, self.variation, palpo_core);
Ok(quote! {
#( #attrs )*
#[derive(salvo::oapi::ToSchema, Clone, Debug)]
#[allow(clippy::large_enum_variant, unused_qualifications)]
pub enum #ident {
#(
#variant_docs
#( #variant_attrs )*
#variants(#event_types),
)*
/// An event not defined by the Matrix specification
#[doc(hidden)]
_Custom(
#palpo_core::events::#event_struct<
#palpo_core::events::_custom::#custom_content_ty
>,
),
}
#deserialize_impl
#field_accessor_impl
#from_impl
#json_castable_impl
})
}
/// Generate the `serde::de::Deserialize` implementation for this enum.
fn expand_deserialize_impl(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let serde = NameSpace::serde();
let serde_json = NameSpace::serde_json();
let ident = &self.ident;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let event_type_string_match_arms = &self.event_type_string_match_arms;
let event_types = &self.event_types;
quote! {
#[allow(unused_qualifications)]
impl<'de> #serde::de::Deserialize<'de> for #ident {
fn deserialize<D>(deserializer: D) -> Result<Self, D::Error>
where
D: #serde::de::Deserializer<'de>,
{
use #serde::de::Error as _;
let json = Box::<#serde_json::value::RawValue>::deserialize(deserializer)?;
let #palpo_core::events::EventTypeDeHelper { ev_type, .. } =
#palpo_core::serde::from_raw_json_value(&json)?;
match &*ev_type {
#(
#( #variant_attrs )*
#( #event_type_string_match_arms )|* => {
let event = #serde_json::from_str::<#event_types>(json.get())
.map_err(D::Error::custom)?;
Ok(Self::#variants(event))
},
)*
_ => {
let event = #serde_json::from_str(json.get()).map_err(D::Error::custom)?;
Ok(Self::_Custom(event))
},
}
}
}
}
}
/// Implement accessors for the common fields of an `Any*Event` enum.
fn expand_accessor_methods(&self) -> syn::Result<TokenStream> {
let palpo_core = self.palpo_core;
let ident = &self.ident;
let event_type_enum = &self.event_type_enum;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let event_type_match_arms = if self.maybe_redacted() {
quote! {
#(
#( #variant_attrs )*
Self::#variants(event) => event.event_type(),
)*
Self::_Custom(event) => event.event_type(),
}
} else if self.variation == EventVariation::Stripped {
let possibly_redacted_event_content_kind_trait = self
.kind
.to_content_kind_trait(EventContentTraitVariation::PossiblyRedacted);
quote! {
#(
#( #variant_attrs )*
Self::#variants(event) =>
#palpo_core::events::#possibly_redacted_event_content_kind_trait::event_type(&event.content),
)*
Self::_Custom(event) => ::std::convert::From::from(
#palpo_core::events::#possibly_redacted_event_content_kind_trait::event_type(&event.content),
),
}
} else {
let original_event_content_kind_trait = self
.kind
.to_content_kind_trait(EventContentTraitVariation::Original);
quote! {
#(
#( #variant_attrs )*
Self::#variants(event) =>
#palpo_core::events::#original_event_content_kind_trait::event_type(&event.content),
)*
Self::_Custom(event) => ::std::convert::From::from(
#palpo_core::events::#original_event_content_kind_trait::event_type(&event.content),
),
}
};
let content_accessor = self.expand_content_accessors();
let field_accessors = self.expand_event_field_accessors();
let state_key_accessor = self.expand_state_key_accessor();
let relations_accessor = self.expand_relations_accessor();
let transaction_id_accessor = self.expand_transaction_id_accessor();
Ok(quote! {
#[automatically_derived]
impl #ident {
/// Returns the `type` of this event.
pub fn event_type(&self) -> #palpo_core::events::#event_type_enum {
match self { #event_type_match_arms }
}
#content_accessor
#( #field_accessors )*
#relations_accessor
#state_key_accessor
#transaction_id_accessor
}
})
}
/// Generate accessors for the `content` field for this enum.
///
/// The code that is generated depends on the (kind, variation) tuple:
///
/// * `pub fn original_content(&self) -> Option<ContentEnum>` and `pub fn is_redacted(&self)` ->
/// bool` for kinds and variations that return `true` in
/// [`maybe_redacted()`](Self::maybe_redacted). It also generates `pub fn content(&self) ->
/// FullContentEnum` for state events.
/// * An empty `TokenStream` for the stripped variation.
/// * `pub fn content(&self) -> ContentEnum` for the others.
fn expand_content_accessors(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let content_enum = &self.content_enum;
let event_struct = &self.event_struct;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let original_event_content_kind_trait = self
.kind
.to_content_kind_trait(EventContentTraitVariation::Original);
if self.maybe_redacted() {
let mut accessors = quote! {
/// Returns the content for this event if it is not redacted, or `None` if it is.
pub fn original_content(&self) -> Option<#content_enum> {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => {
event.as_original().map(|ev| #content_enum::#variants(ev.content.clone()))
}
)*
Self::_Custom(event) => event.as_original().map(|ev| {
#content_enum::_Custom {
event_type: crate::PrivOwnedStr(
::std::convert::From::from(
::std::string::ToString::to_string(
&#palpo_core::events::#original_event_content_kind_trait::event_type(
&ev.content,
),
),
),
),
}
}),
}
}
/// Returns whether this event is redacted.
pub fn is_redacted(&self) -> bool {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => {
event.as_original().is_none()
}
)*
Self::_Custom(event) => event.as_original().is_none(),
}
}
};
if self.kind == EventEnumKind::State {
let full_content_enum = &self.full_content_enum;
let redacted_event_content_kind_trait_name = self
.kind
.to_content_kind_trait(EventContentTraitVariation::Redacted);
accessors.extend(quote! {
/// Returns the content of this state event.
pub fn content(&self) -> #full_content_enum {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => match event {
#palpo_core::events::#event_struct::Original(ev) => #full_content_enum::#variants(
#palpo_core::events::FullStateEventContent::Original {
content: ev.content.clone(),
prev_content: ev.unsigned.prev_content.clone()
}
),
#palpo_core::events::#event_struct::Redacted(ev) => #full_content_enum::#variants(
#palpo_core::events::FullStateEventContent::Redacted(
ev.content.clone()
)
),
},
)*
Self::_Custom(event) => match event {
#palpo_core::events::#event_struct::Original(ev) => {
#full_content_enum::_Custom {
event_type: crate::PrivOwnedStr(
::std::string::ToString::to_string(
&#palpo_core::events::#original_event_content_kind_trait::event_type(
&ev.content,
),
).into_boxed_str(),
),
redacted: false,
}
}
#palpo_core::events::#event_struct::Redacted(ev) => {
#full_content_enum::_Custom {
event_type: crate::PrivOwnedStr(
::std::string::ToString::to_string(
&#palpo_core::events::#redacted_event_content_kind_trait_name::event_type(
&ev.content,
),
).into_boxed_str(),
),
redacted: true,
}
}
},
}
}
});
}
accessors
} else if self.variation == EventVariation::Stripped {
// There is no content enum for possibly-redacted content types (yet)
TokenStream::new()
} else {
quote! {
/// Returns the content for this event.
pub fn content(&self) -> #content_enum {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => #content_enum::#variants(event.content.clone()),
)*
Self::_Custom(event) => #content_enum::_Custom {
event_type: crate::PrivOwnedStr(
::std::convert::From::from(
::std::string::ToString::to_string(
&#palpo_core::events::#original_event_content_kind_trait::event_type(&event.content)
)
),
),
},
}
}
}
}
}
/// Generate accessors for the [`EventField`]s that are present for this enum.
fn expand_event_field_accessors(&self) -> impl Iterator<Item = TokenStream> {
CommonEventField::ALL
.iter()
.filter(|field| self.kind.field_is_present(**field, self.variation))
.map(|field| {
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let docs = format!("Returns this event's `{field}` field.");
let ident = field.ident();
let palpo_core = self.palpo_core;
// Field types that don't implement `Copy` must be accessedd via a reference.
let (field_type, is_ref) = field.ty(&palpo_core);
let ampersand = is_ref.then(|| quote! { & });
// If this content might be redacted, the field is available through an accessor on
// the inner content enum.
let call_parens = self.maybe_redacted().then(|| quote! { () });
quote! {
#[doc = #docs]
pub fn #ident(&self) -> #field_type {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => #ampersand event.#ident #call_parens,
)*
Self::_Custom(event) => #ampersand event.#ident #call_parens,
}
}
}
})
}
/// Generate an accessor for the `state_key` field for this enum, if present.
fn expand_state_key_accessor(&self) -> Option<TokenStream> {
if self.kind != EventEnumKind::State {
return None;
}
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
// If this content might be redacted, the field is available through an accessor on
// the inner content enum.
let call_parens = self.maybe_redacted().then(|| quote! { () });
Some(quote! {
/// Returns this event's `state_key` field.
pub fn state_key(&self) -> &::std::primitive::str {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => event.state_key #call_parens .as_ref(),
)*
Self::_Custom(event) => event.state_key #call_parens .as_ref(),
}
}
})
}
/// Generate an accessor for the `unsigned.relations` field for this enum, if present.
fn expand_relations_accessor(&self) -> Option<TokenStream> {
if self.kind != EventEnumKind::MessageLike {
return None;
}
let palpo_core = self.palpo_core;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
Some(quote! {
/// Returns this event's `relations` from inside `unsigned`.
pub fn relations(
&self,
) -> #palpo_core::events::BundledMessageLikeRelations<AnySyncMessageLikeEvent> {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => event.as_original().map_or_else(
::std::default::Default::default,
|ev| ev.unsigned.relations.clone().map_replace(|r| {
::std::convert::From::from(r.into_maybe_redacted())
}),
),
)*
Self::_Custom(event) => event.as_original().map_or_else(
::std::default::Default::default,
|ev| ev.unsigned.relations.clone().map_replace(|r| {
AnySyncMessageLikeEvent::_Custom(r.into_maybe_redacted())
}),
),
}
}
})
}
/// Generate an accessor for the `unsigned.transaction_id` field for this enum, if present.
fn expand_transaction_id_accessor(&self) -> Option<TokenStream> {
if !self.maybe_redacted() {
return None;
}
let palpo_core = &self.palpo_core;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
Some(quote! {
/// Returns this event's `transaction_id` from inside `unsigned`, if there is one.
pub fn transaction_id(&self) -> Option<&#palpo_core::TransactionId> {
match self {
#(
#( #variant_attrs )*
Self::#variants(event) => {
event.as_original().and_then(|ev| ev.unsigned.transaction_id.as_deref())
}
)*
Self::_Custom(event) => {
event.as_original().and_then(|ev| ev.unsigned.transaction_id.as_deref())
}
}
}
})
}
}
impl<'a> Deref for EventEnumVariation<'a> {
type Target = EventEnum<'a>;
fn deref(&self) -> &Self::Target {
self.inner
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_enum/event_kind_enum/content.rs | crates/core-macros/src/events/event_enum/event_kind_enum/content.rs | //! Functions to generate `Any*EventContent` enums.
use proc_macro2::TokenStream;
use quote::quote;
use crate::{
events::{
common::EventContentTraitVariation,
event_enum::{EventEnum, EventEnumKind},
},
util::NameSpace,
};
impl EventEnum<'_> {
/// Generate the `Any*EventContent` enum for this kind.
pub(super) fn expand_content_enum(&self) -> syn::Result<TokenStream> {
let palpo_core = NameSpace::palpo_core();
let serde = NameSpace::serde();
let attrs = &self.attrs;
let ident = &self.content_enum;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let variant_docs = &self.variant_docs;
let event_content_types = &self.event_content_types;
let event_content_from_type_impl = self.expand_content_enum_event_content_from_type_impl();
let event_content_kind_trait_impl =
self.expand_content_enum_event_content_kind_trait_impl();
let from_impl = self.expand_from_impl(ident, event_content_types);
let json_castable_impl = self.expand_content_enum_json_castable_impl();
// We need this path as a string.
let serialize_custom_event_error_path =
quote! { #palpo_core::events::serialize_custom_event_error }.to_string();
Ok(quote! {
#( #attrs )*
#[derive(salvo::oapi::ToSchema, #serde::Serialize, Clone, Debug)]
#[serde(untagged)]
#[allow(clippy::large_enum_variant, unused_qualifications)]
pub enum #ident {
#(
#variant_docs
#( #variant_attrs )*
#variants(#event_content_types),
)*
#[doc(hidden)]
#[serde(serialize_with = #serialize_custom_event_error_path)]
_Custom {
event_type: crate::PrivOwnedStr,
},
}
#event_content_from_type_impl
#event_content_kind_trait_impl
#from_impl
#json_castable_impl
})
}
/// Generate the `palpo_events::EventContentFromType` implementation for the
/// `Any*EventContent` enum.
fn expand_content_enum_event_content_from_type_impl(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let serde_json = NameSpace::serde_json();
let ident = &self.content_enum;
let variants = &self.variants;
let event_attrs = &self.variant_attrs;
let event_content_types = &self.event_content_types;
let event_type_string_match_arms = &self.event_type_string_match_arms;
let deserialize_event_contents = self.events.iter().zip(event_content_types.iter()).map(
|(event, event_content_type)| {
if event.has_type_fragment() {
// If the event has a type fragment, then it implements EventContentFromType
// itself; see `generate_event_content_impl` which does that. In this case,
// forward to its implementation.
quote! {
#event_content_type::from_parts(event_type, json)?
}
} else {
// The event doesn't have a type fragment, so it *should* implement
// Deserialize: use that here.
quote! {
#serde_json::from_str(json.get())?
}
}
},
);
quote! {
#[automatically_derived]
impl #palpo_core::events::EventContentFromType for #ident {
fn from_parts(event_type: &str, json: &#serde_json::value::RawValue) -> serde_json::Result<Self> {
match event_type {
#(
#( #event_attrs )*
#( #event_type_string_match_arms )|* => {
let content = #deserialize_event_contents;
Ok(Self::#variants(content))
},
)*
_ => {
Ok(Self::_Custom {
event_type: crate::PrivOwnedStr(
::std::convert::From::from(event_type.to_owned())
)
})
}
}
}
}
}
}
/// Generate the `palpo_events::{kind}EventContent` trait implementation for the
/// `Any*EventContent` enum.
fn expand_content_enum_event_content_kind_trait_impl(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let ident = &self.content_enum;
let event_type_enum = &self.event_type_enum;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let event_content_kind_trait = self
.kind
.to_content_kind_trait(EventContentTraitVariation::Original);
let extra_event_content_impl = (self.kind == EventEnumKind::State).then(|| {
quote! {
type StateKey = String;
}
});
quote! {
#[automatically_derived]
impl #palpo_core::events::#event_content_kind_trait for #ident {
#extra_event_content_impl
fn event_type(&self) -> #palpo_core::events::#event_type_enum {
match self {
#(
#( #variant_attrs )*
Self::#variants(content) => content.event_type(),
)*
Self::_Custom { event_type } => ::std::convert::From::from(&event_type.0[..]),
}
}
}
}
}
/// Generate an `AnyFull*EventContent` enum.
pub(super) fn expand_full_content_enum(&self) -> syn::Result<TokenStream> {
let palpo_core = self.palpo_core;
let attrs = &self.attrs;
let ident = &self.full_content_enum;
let event_type_enum = &self.event_type_enum;
let variants = &self.variants;
let variant_attrs = &self.variant_attrs;
let variant_docs = &self.variant_docs;
let event_content_types = &self.event_content_types;
Ok(quote! {
#( #attrs )*
#[derive(salvo::oapi::ToSchema, Clone, Debug)]
#[allow(clippy::large_enum_variant, unused_qualifications)]
pub enum #ident {
#(
#variant_docs
#( #variant_attrs )*
#variants(#palpo_core::events::FullStateEventContent<#event_content_types>),
)*
#[doc(hidden)]
_Custom {
event_type: crate::PrivOwnedStr,
redacted: bool,
},
}
impl #ident {
/// Get the event’s type, like `m.room.create`.
pub fn event_type(&self) -> #palpo_core::events::#event_type_enum {
match self {
#(
#( #variant_attrs )*
Self::#variants(content) => content.event_type(),
)*
Self::_Custom { event_type, .. } => ::std::convert::From::from(&event_type.0[..]),
}
}
}
})
}
/// Implement `JsonCastable<{enum}>` for all the variants and `JsonCastable<JsonObject>` for the
/// given event content enum.
fn expand_content_enum_json_castable_impl(&self) -> TokenStream {
let palpo_core = self.palpo_core;
let serde = NameSpace::serde();
let ident = &self.content_enum;
// All event content types are represented as objects in JSON.
let mut json_castable_impls = quote! {
#[automatically_derived]
impl #palpo_core::serde::JsonCastable<#palpo_core::serde::JsonObject> for #ident {}
};
json_castable_impls.extend(
self.event_content_types
.iter()
.zip(self.variant_attrs.iter())
.map(|(event_content_type, variant_attrs)| {
quote! {
#[allow(unused_qualifications)]
#[automatically_derived]
#( #variant_attrs )*
impl #palpo_core::serde::JsonCastable<#ident> for #event_content_type {}
}
}),
);
json_castable_impls
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event_content/parse.rs | crates/core-macros/src/events/event_content/parse.rs | //! Parsing helpers for the `EventContent` derive macro.
use as_variant::as_variant;
use proc_macro2::{Span, TokenStream};
use syn::{
meta::ParseNestedMeta,
parse::{Parse, ParseStream},
parse_quote,
};
use super::{EventContent, EventContentField, EventContentKind};
use crate::{
events::common::{CommonEventKind, EventType, EventTypes},
util::{NameSpace, ParseNestedMetaExt, SerdeMetaItem, StructFieldExt},
};
impl EventContent {
pub(super) fn parse(input: syn::DeriveInput) -> syn::Result<Self> {
let palpo_core = NameSpace::palpo_core();
let mut event_content_attrs = EventContentAttrs::default();
for attr in &input.attrs {
if !attr.path().is_ident("palpo_event") {
continue;
}
attr.parse_nested_meta(|meta| event_content_attrs.try_merge(meta, attr))?;
}
let EventContentAttrs {
event_type,
aliases,
kind,
state_key_type,
unsigned_type,
has_custom_redacted,
has_custom_possibly_redacted,
has_without_relation,
} = event_content_attrs;
let event_type = event_type.ok_or_else(|| {
syn::Error::new(
Span::call_site(),
"missing `type` attribute, \
add `#[palpo_event(type = \"m.event_type\", kind = Kind)]` \
below the event content derive",
)
})?;
let types = EventTypes::try_from_parts(event_type, aliases)?;
let fields =
as_variant!(input.data, syn::Data::Struct(syn::DataStruct { fields, ..}) => fields)
.map(|fields| {
fields
.into_iter()
.map(|field| field.try_into())
.collect::<syn::Result<_>>()
})
.transpose()?;
let kind = EventContentKind::try_from_parts(
kind,
state_key_type,
unsigned_type,
has_custom_redacted,
has_custom_possibly_redacted,
&palpo_core,
)?;
let event_content = Self {
types,
ident: input.ident,
vis: input.vis,
fields,
kind,
has_without_relation,
palpo_core,
};
event_content.validate()?;
Ok(event_content)
}
}
impl EventContent {
/// Validate the data inside this
fn validate(&self) -> syn::Result<()> {
// Ident check.
if !self.ident.to_string().ends_with("Content") {
return Err(syn::Error::new_spanned(
&self.ident,
"event content struct name must end with `Content`",
));
}
// Type suffix checks.
let has_type_suffix = self.types.is_prefix();
if has_type_suffix && !self.kind.is_account_data() {
return Err(syn::Error::new_spanned(
&self.types.ev_type,
"only account data events may contain a `.*` suffix",
));
}
if let Some(fields) = &self.fields {
let type_fragment_fields_count =
fields.iter().filter(|field| field.is_type_fragment).count();
if has_type_suffix && type_fragment_fields_count == 0 {
return Err(syn::Error::new_spanned(
&self.types.ev_type,
"event type with a `.*` suffix requires there to be a \
`#[palpo_event(type_fragment)]` field",
));
}
if !has_type_suffix && type_fragment_fields_count > 0 {
return Err(syn::Error::new(
Span::call_site(),
"`#[palpo_event(type_fragment)]` field is only valid for an event type with a `.*` suffix",
));
}
if type_fragment_fields_count > 1 {
return Err(syn::Error::new(
Span::call_site(),
"There can only be one `#[palpo_event(type_fragment)]` field",
));
}
} else if has_type_suffix {
return Err(syn::Error::new_spanned(
&self.types.ev_type,
"event type with a `.*` suffix is required to be a struct",
));
}
// Checks for generated structs.
if self.kind.should_generate_redacted() && self.fields.is_none() {
return Err(syn::Error::new(
Span::call_site(),
"To generate a redacted event content, \
the event content type needs to be a struct. \
Disable this with the `custom_redacted` attribute",
));
}
if self.kind.should_generate_possibly_redacted() {
if self
.fields
.as_ref()
.is_none_or(|fields| fields.iter().any(|field| field.inner.ident.is_none()))
{
return Err(syn::Error::new(
Span::call_site(),
"To generate a possibly redacted event content, \
the event content type needs to be a struct with named fields. \
Disable this with the `custom_possibly_redacted` attribute",
));
}
if let Some(fields) = &self.fields
&& let Some(field_with_unsupported_serde_attribute) = fields.iter().find(|field| {
if field.keep_in_possibly_redacted() {
return false;
}
field.inner.serde_meta_items().any(|serde_meta| {
serde_meta != SerdeMetaItem::Rename && serde_meta != SerdeMetaItem::Alias
})
})
{
return Err(syn::Error::new_spanned(
field_with_unsupported_serde_attribute,
"To generate a possibly redacted event content, \
the fields that are redacted must either use the `default` \
serde attribute with any other attribute, or only the \
following serde attributes: `rename` or `alias`. \
Disable this with the `custom_possibly_redacted` attribute",
));
}
}
if self.has_without_relation
&& self.fields.as_ref().is_none_or(|fields| {
!fields.iter().any(|field| {
field
.inner
.ident
.as_ref()
.is_some_and(|ident| *ident == "relates_to")
})
})
{
return Err(syn::Error::new(
Span::call_site(),
"To generate an event content without relation, \
the event content type needs to be a struct with a field named `relates_to`. \
Disable this by removing the `without_relation` attribute",
));
}
Ok(())
}
}
/// Parsed container attributes for the `EventContent` macro.
#[derive(Default)]
pub struct EventContentAttrs {
/// The main event type.
event_type: Option<EventType>,
/// The alternative event types.
aliases: Vec<EventType>,
/// The event content kind.
kind: Option<EventContentKindAttr>,
/// The type of the state key.
state_key_type: Option<syn::Type>,
/// The type of the unsigned data.
unsigned_type: Option<syn::Type>,
/// Whether the `Redacted*EventContent` type is implemented manually rather than generated by
/// this macro.
has_custom_redacted: bool,
/// Whether the `PossiblyRedacted*EventContent` type is implemented manually rather than
/// generated by this macro.
has_custom_possibly_redacted: bool,
/// Whether this macro should generate an `*EventContentWithoutRelation` type.
has_without_relation: bool,
}
impl EventContentAttrs {
/// Set the main event type.
///
/// Returns an error if it is already set.
fn set_event_type(&mut self, event_type: EventType, attr: &syn::Attribute) -> syn::Result<()> {
if self.event_type.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `type` attribute",
));
}
self.event_type = Some(event_type);
Ok(())
}
/// Add an alternative event type.
fn push_alias(&mut self, event_type: EventType) {
self.aliases.push(event_type);
}
/// Set the event content kind.
///
/// Returns an error if it is already set.
fn set_kind(&mut self, kind: EventContentKindAttr, attr: &syn::Attribute) -> syn::Result<()> {
if self.kind.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `kind` attribute",
));
}
self.kind = Some(kind);
Ok(())
}
/// Set the type of the state key.
///
/// Returns an error if it is already set.
fn set_state_key_type(
&mut self,
state_key_type: syn::Type,
attr: &syn::Attribute,
) -> syn::Result<()> {
if self.state_key_type.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `state_key_type` attribute",
));
}
self.state_key_type = Some(state_key_type);
Ok(())
}
/// Set the type of the unsigned data.
///
/// Returns an error if it is already set.
fn set_unsigned_type(
&mut self,
unsigned_type: syn::Type,
attr: &syn::Attribute,
) -> syn::Result<()> {
if self.unsigned_type.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `unsigned_type` attribute",
));
}
self.unsigned_type = Some(unsigned_type);
Ok(())
}
/// Set that the `Redacted*EventContent` type is implemented manually rather than generated by
/// this macro.
///
/// Returns an error if it is already set.
fn set_custom_redacted(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.has_custom_redacted {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `custom_redacted` attribute",
));
}
self.has_custom_redacted = true;
Ok(())
}
/// Set that the `PossiblyRedacted*EventContent` type is implemented manually rather than
/// generated by this macro.
///
/// Returns an error if it is already set.
fn set_custom_possibly_redacted(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.has_custom_possibly_redacted {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `custom_possibly_redacted` attribute",
));
}
self.has_custom_possibly_redacted = true;
Ok(())
}
/// Set that this macro should generate an `*EventContentWithoutRelation` type.
///
/// Returns an error if it is already set.
fn set_without_relation(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.has_without_relation {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `without_relation` attribute",
));
}
self.has_without_relation = true;
Ok(())
}
fn try_merge(&mut self, meta: ParseNestedMeta<'_>, attr: &syn::Attribute) -> syn::Result<()> {
if meta.path.is_ident("type") {
return self.set_event_type(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("alias") {
self.push_alias(meta.value()?.parse()?);
return Ok(());
}
if meta.path.is_ident("kind") {
return self.set_kind(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("state_key_type") {
return self.set_state_key_type(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("unsigned_type") {
return self.set_unsigned_type(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("custom_redacted") {
if meta.has_value() {
return Err(meta.error("`custom_redacted` attribute doesn't expect a value"));
}
return self.set_custom_redacted(attr);
}
if meta.path.is_ident("custom_possibly_redacted") {
if meta.has_value() {
return Err(
meta.error("`custom_possibly_redacted` attribute doesn't expect a value")
);
}
return self.set_custom_possibly_redacted(attr);
}
if meta.path.is_ident("without_relation") {
if meta.has_value() {
return Err(meta.error("`without_relation` attribute doesn't expect a value"));
}
return self.set_without_relation(attr);
}
Err(meta.error("unsupported `palpo_event` attribute"))
}
}
impl TryFrom<syn::Field> for EventContentField {
type Error = syn::Error;
fn try_from(mut inner: syn::Field) -> Result<Self, Self::Error> {
let mut field_attrs = EventContentFieldAttrs::default();
for attr in inner
.attrs
.extract_if(.., |attr| attr.path().is_ident("palpo_event"))
{
attr.parse_nested_meta(|meta| field_attrs.try_merge(meta, &attr))?;
}
let EventContentFieldAttrs {
skip_redaction,
is_type_fragment,
} = field_attrs;
if skip_redaction && is_type_fragment {
return Err(syn::Error::new_spanned(
inner,
"the `skip_redaction` attribute is not valid on a field with the `type_fragment` attribute",
));
}
Ok(Self {
inner,
skip_redaction,
is_type_fragment,
})
}
}
/// Parsed field attributes for the `EventContent` macro.
#[derive(Default)]
struct EventContentFieldAttrs {
/// Whether this field should be kept during redaction.
skip_redaction: bool,
/// Whether this field represents the suffix of the event type.
is_type_fragment: bool,
}
impl EventContentFieldAttrs {
/// Set that this field should be kept during redaction.
///
/// Returns an error if it is already set.
fn set_skip_redaction(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.skip_redaction {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `skip_redaction` attribute",
));
}
self.skip_redaction = true;
Ok(())
}
/// Set that this field represents the suffix of the event type.
///
/// Returns an error if it is already set.
fn set_type_fragment(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.is_type_fragment {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `type_fragment` attribute",
));
}
self.is_type_fragment = true;
Ok(())
}
fn try_merge(&mut self, meta: ParseNestedMeta<'_>, attr: &syn::Attribute) -> syn::Result<()> {
if meta.path.is_ident("skip_redaction") {
if meta.has_value() {
return Err(meta.error("`skip_redaction` attribute doesn't expect a value"));
}
return self.set_skip_redaction(attr);
}
if meta.path.is_ident("type_fragment") {
if meta.has_value() {
return Err(meta.error("`type_fragment` attribute doesn't expect a value"));
}
return self.set_type_fragment(attr);
}
Err(meta.error("unsupported `palpo_event` attribute"))
}
}
impl EventContentKind {
fn try_from_parts(
kind: Option<EventContentKindAttr>,
state_key_type: Option<syn::Type>,
unsigned_type: Option<syn::Type>,
has_custom_redacted: bool,
has_custom_possibly_redacted: bool,
palpo_core: &TokenStream,
) -> syn::Result<Self> {
let kind = kind.ok_or_else(|| {
syn::Error::new(
Span::call_site(),
"missing `kind` attribute, \
add `#[palpo_event(type = \"m.event_type\", kind = Kind)]` \
below the event content derive",
)
})?;
let is_state = matches!(kind, EventContentKindAttr::Single(CommonEventKind::State));
let is_message_like = matches!(
kind,
EventContentKindAttr::Single(CommonEventKind::MessageLike)
);
if let Some(state_key_type) = &state_key_type
&& !is_state
{
return Err(syn::Error::new_spanned(
state_key_type,
"the `state_key_type` attribute is only valid for the State kind",
));
}
if let Some(unsigned_type) = &unsigned_type
&& !is_state
{
return Err(syn::Error::new_spanned(
unsigned_type,
"the `unsigned_type` attribute is only valid for the State kind",
));
}
if has_custom_redacted && !is_state && !is_message_like {
return Err(syn::Error::new(
Span::call_site(),
"the `custom_redacted` attribute is only valid for the State and MessageLike kinds",
));
}
if has_custom_possibly_redacted && !is_state {
return Err(syn::Error::new(
Span::call_site(),
"the `custom_possibly_redacted` attribute is only valid for the State kind",
));
}
Ok(match kind {
EventContentKindAttr::Single(kind) => match kind {
CommonEventKind::GlobalAccountData => EventContentKind::GlobalAccountData,
CommonEventKind::RoomAccountData => EventContentKind::RoomAccountData,
CommonEventKind::EphemeralRoom => EventContentKind::EphemeralRoom,
CommonEventKind::MessageLike => EventContentKind::MessageLike {
has_custom_redacted,
},
CommonEventKind::State => {
let state_key_type = state_key_type.ok_or_else(|| {
syn::Error::new(Span::call_site(), "missing `state_key_type` attribute")
})?;
let unsigned_type = unsigned_type.unwrap_or_else(|| {
parse_quote! {
#palpo_core::events::StateUnsigned<Self::PossiblyRedacted>
}
});
EventContentKind::State {
state_key_type,
unsigned_type,
has_custom_redacted,
has_custom_possibly_redacted,
}
}
CommonEventKind::ToDevice => EventContentKind::ToDevice,
},
EventContentKindAttr::BothAccountData => EventContentKind::BothAccountData,
})
}
}
/// The possible values of the `kind` attribute of an event content.
///
/// This is a wrapper around [`EventKind`] that allows to provide two kinds for the same event
/// content.
#[derive(Clone, Copy)]
enum EventContentKindAttr {
/// The event content has a single kind.
Single(CommonEventKind),
/// The event content is of the two account data kinds.
BothAccountData,
}
impl Parse for EventContentKindAttr {
fn parse(input: ParseStream<'_>) -> syn::Result<Self> {
let first_event_kind: CommonEventKind = input.parse()?;
let second_event_kind = input
.peek(syn::Token![+])
.then(|| {
let _: syn::Token![+] = input.parse()?;
input.parse::<CommonEventKind>()
})
.transpose()?;
match (first_event_kind, second_event_kind) {
(event_kind, None) => Ok(Self::Single(event_kind)),
(CommonEventKind::GlobalAccountData, Some(CommonEventKind::RoomAccountData))
| (CommonEventKind::RoomAccountData, Some(CommonEventKind::GlobalAccountData)) => {
Ok(Self::BothAccountData)
}
_ => Err(syn::Error::new(
Span::call_site(),
"only account data can have two kinds",
)),
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core-macros/src/events/event/parse.rs | crates/core-macros/src/events/event/parse.rs | //! Parsing helpers specific to the `Event` derive macro.
use proc_macro2::Span;
use syn::meta::ParseNestedMeta;
use super::{Event, EventField, EventKind};
use crate::{
events::common::EventVariation,
util::{NameSpace, ParseNestedMetaExt},
};
impl Event {
/// Parse the given input as an `Event`.
pub(super) fn parse(input: syn::DeriveInput) -> syn::Result<Self> {
let (kind, variation) = event_ident_to_kind_and_variation(&input.ident)?;
let syn::Data::Struct(syn::DataStruct {
fields: syn::Fields::Named(syn::FieldsNamed { named, .. }),
..
}) = input.data
else {
return Err(syn::Error::new_spanned(
&input.ident,
"the `Event` derive only supports structs with named fields",
));
};
let fields = named
.into_iter()
.map(EventField::parse)
.collect::<Result<_, _>>()?;
let event = Self {
ident: input.ident,
generics: input.generics,
kind,
variation,
fields,
palpo_core: NameSpace::palpo_core(),
};
event.validate()?;
Ok(event)
}
/// Validate the fields of this `Event`.
fn validate(&self) -> syn::Result<()> {
let Some(content_field) = self.fields.iter().find(|field| *field.ident() == "content")
else {
return Err(syn::Error::new(
Span::call_site(),
"`Event` struct must contain at least a `content` field",
));
};
// We assume that if there are generics, they must be at least for the `content` type, and
// its generic should be named `C`.
if !self.generics.params.is_empty() && (!self.generics.params.iter().any(|param| matches!(param, syn::GenericParam::Type(syn::TypeParam { ident, ..}) if *ident == "C")) || !matches!(&content_field.inner.ty, syn::Type::Path(syn::TypePath{ path, ..}) if path.is_ident("C"))) {
return Err(syn::Error::new(
Span::call_site(),
"Generics on `Event` struct are only supported for \
the `content` field and must be named `C`",
));
}
Ok(())
}
}
/// Get the event kind and variation from the given struct name.
///
/// Returns `None` if the value of `ident` is not supported.
fn event_ident_to_kind_and_variation(
ident: &syn::Ident,
) -> syn::Result<(EventKind, EventVariation)> {
Ok(match ident.to_string().as_str() {
"GlobalAccountDataEvent" => (EventKind::GlobalAccountData, EventVariation::None),
"RoomAccountDataEvent" => (EventKind::RoomAccountData, EventVariation::None),
"EphemeralRoomEvent" => (EventKind::EphemeralRoom, EventVariation::None),
"SyncEphemeralRoomEvent" => (EventKind::EphemeralRoom, EventVariation::Sync),
"OriginalMessageLikeEvent" => (EventKind::MessageLike, EventVariation::Original),
"OriginalSyncMessageLikeEvent" => (EventKind::MessageLike, EventVariation::OriginalSync),
"RedactedMessageLikeEvent" => (EventKind::MessageLike, EventVariation::Redacted),
"RedactedSyncMessageLikeEvent" => (EventKind::MessageLike, EventVariation::RedactedSync),
"OriginalStateEvent" => (EventKind::State, EventVariation::Original),
"OriginalSyncStateEvent" => (EventKind::State, EventVariation::OriginalSync),
"StrippedStateEvent" => (EventKind::State, EventVariation::Stripped),
"InitialStateEvent" => (EventKind::State, EventVariation::Initial),
"RedactedStateEvent" => (EventKind::State, EventVariation::Redacted),
"RedactedSyncStateEvent" => (EventKind::State, EventVariation::RedactedSync),
"ToDeviceEvent" => (EventKind::ToDevice, EventVariation::None),
"HierarchySpaceChildEvent" => (EventKind::HierarchySpaceChild, EventVariation::Stripped),
"OriginalRoomRedactionEvent" => (EventKind::RoomRedaction, EventVariation::None),
"OriginalSyncRoomRedactionEvent" => {
(EventKind::RoomRedaction, EventVariation::OriginalSync)
}
"RedactedRoomRedactionEvent" => (EventKind::RoomRedaction, EventVariation::Redacted),
"RedactedSyncRoomRedactionEvent" => {
(EventKind::RoomRedaction, EventVariation::RedactedSync)
}
"DecryptedOlmV1Event" | "DecryptedMegolmV1Event" => {
(EventKind::Decrypted, EventVariation::None)
}
_ => {
return Err(syn::Error::new_spanned(
ident,
"not a supported `Event` struct identifier",
));
}
})
}
impl EventField {
/// Parse the given field to construct an `EventField`.
///
/// Returns an error if an unknown `palpo_event` attribute is encountered, or if an attribute
/// that accepts a single value appears several times.
fn parse(mut inner: syn::Field) -> syn::Result<Self> {
let mut field_attrs = EventFieldAttrs::default();
for attr in inner
.attrs
.extract_if(.., |attribute| attribute.path().is_ident("palpo_event"))
{
attr.parse_nested_meta(|meta| field_attrs.try_merge(meta, &attr))?;
}
let EventFieldAttrs {
default,
default_on_error,
rename,
aliases,
} = field_attrs;
Ok(Self {
inner,
default,
default_on_error,
rename,
aliases,
})
}
}
/// A parsed attributes of a field of an [`Event`].
#[derive(Default)]
struct EventFieldAttrs {
/// Whether this field should deserialize to the default value if it is missing.
default: bool,
/// Whether this field should deserialize to the default value if an error occurs during
/// deserialization.
default_on_error: bool,
/// The name to use when (de)serializing this field.
///
/// If this is not set, the name of the field will be used.
rename: Option<syn::LitStr>,
/// The alternate names to recognize when deserializing this field.
aliases: Vec<syn::LitStr>,
}
impl EventFieldAttrs {
/// Set that this field should deserialize to the default value if it is missing.
///
/// Returns an error if it is already set.
fn set_default(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.default {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `default` attribute",
));
}
self.default = true;
Ok(())
}
/// Set that this field should deserialize to the default value if an error occurs during
/// deserialization.
///
/// Returns an error if it is already set.
fn set_default_on_error(&mut self, attr: &syn::Attribute) -> syn::Result<()> {
if self.default_on_error {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple occurrences of `default_on_error` attribute",
));
}
self.default_on_error = true;
Ok(())
}
/// Set the name to use when (de)serializing this field.
///
/// Returns an error if it is already set.
fn set_rename(&mut self, rename: syn::LitStr, attr: &syn::Attribute) -> syn::Result<()> {
if self.rename.is_some() {
return Err(syn::Error::new_spanned(
attr,
"cannot have multiple values for `rename` attribute",
));
}
self.rename = Some(rename);
Ok(())
}
/// Try to parse the given meta item and merge it into this `EventFieldAttrs`.
///
/// Returns an error if an unknown `palpo_event` attribute is encountered, or if an attribute
/// that accepts a single value appears several times.
fn try_merge(&mut self, meta: ParseNestedMeta<'_>, attr: &syn::Attribute) -> syn::Result<()> {
if meta.path.is_ident("default") {
if meta.has_value() {
return Err(meta.error("`default` attribute doesn't expect a value"));
}
return self.set_default(attr);
}
if meta.path.is_ident("default_on_error") {
if meta.has_value() {
return Err(meta.error("`default_on_error` attribute doesn't expect a value"));
}
return self.set_default_on_error(attr);
}
if meta.path.is_ident("rename") {
return self.set_rename(meta.value()?.parse()?, attr);
}
if meta.path.is_ident("alias") {
self.aliases.push(meta.value()?.parse()?);
return Ok(());
}
Err(meta.error("unsupported `palpo_event` attribute"))
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/room_id.rs | crates/identifiers-validation/src/room_id.rs | use crate::{Error, validate_id};
pub fn validate(s: &str) -> Result<(), Error> {
validate_id(s, b'!')
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/user_id.rs | crates/identifiers-validation/src/user_id.rs | use crate::{Error, parse_id};
pub fn validate(s: &str) -> Result<(), Error> {
let colon_idx = parse_id(s, b'@')?;
let localpart = &s[1..colon_idx];
let _ = localpart_is_fully_conforming(localpart)?;
Ok(())
}
/// Check whether the given user id localpart is valid and fully conforming
///
/// Returns an `Err` for invalid user ID localparts, `Ok(false)` for historical user ID localparts
/// and `Ok(true)` for fully conforming user ID localparts.
///
/// With the `compat` feature enabled, this will also return `Ok(false)` for invalid user ID
/// localparts. User IDs that don't even meet the historical user ID restrictions exist in the wild
/// due to Synapse allowing them over federation. This will likely be fixed in an upcoming room
/// version; see [MSC2828](https://github.com/matrix-org/matrix-spec-proposals/pull/2828).
pub fn localpart_is_fully_conforming(localpart: &str) -> Result<bool, Error> {
// See https://spec.matrix.org/latest/appendices/#user-identifiers
let is_fully_conforming = !localpart.is_empty()
&& localpart.bytes().all(
|b| matches!(b, b'0'..=b'9' | b'a'..=b'z' | b'-' | b'.' | b'=' | b'_' | b'/' | b'+'),
);
if !is_fully_conforming {
// If it's not fully conforming, check if it contains characters that are also disallowed
// for historical user IDs, or is empty. If that's the case, return an error.
// See https://spec.matrix.org/latest/appendices/#historical-user-ids
// #[cfg(not(feature = "compat-user-id"))]
// let is_invalid = localpart.is_empty() || localpart.bytes().any(|b| b < 0x21 || b == b':' || b > 0x7E);
// In compat mode, allow anything except `:` to match Synapse. The `:` check is only needed
// because this function can be called through `UserId::parse_with_servername`, otherwise
// it would be impossible for the input to contain a `:`.
// #[cfg(feature = "compat-user-id")]
let is_invalid = localpart.as_bytes().contains(&b':');
if is_invalid {
return Err(Error::InvalidCharacters);
}
}
Ok(is_fully_conforming)
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/lib.rs | crates/identifiers-validation/src/lib.rs | #![doc(html_favicon_url = "https://palpo.io/favicon.ico")]
#![doc(html_logo_url = "https://palpo.io/images/logo.png")]
pub mod base64_public_key;
pub mod client_secret;
pub mod device_key_id;
pub mod error;
pub mod event_id;
pub mod key_id;
pub mod mxc_uri;
pub mod room_alias_id;
pub mod room_id;
pub mod room_id_or_alias_id;
pub mod room_version_id;
pub mod server_name;
pub mod server_signing_key_version;
pub mod space_child_order;
pub mod user_id;
pub mod voip_version_id;
pub use error::Error;
/// All identifiers must be 255 bytes or less.
pub const MAX_BYTES: usize = 255;
/// Checks if an identifier is valid.
fn validate_id(id: &str, first_byte: u8) -> Result<(), Error> {
#[cfg(not(feature = "compat-arbitrary-length-ids"))]
if id.len() > MAX_BYTES {
return Err(Error::MaximumLengthExceeded);
}
if id.as_bytes().first() != Some(&first_byte) {
return Err(Error::MissingLeadingSigil);
}
Ok(())
}
/// Checks an identifier that contains a localpart and hostname for validity.
fn parse_id(id: &str, first_byte: u8) -> Result<usize, Error> {
validate_id(id, first_byte)?;
let colon_idx = id.find(':').ok_or(Error::MissingColon)?;
server_name::validate(&id[colon_idx + 1..])?;
Ok(colon_idx)
}
/// Checks an identifier that contains a localpart and hostname for validity.
fn validate_delimited_id(id: &str, first_byte: u8) -> Result<(), Error> {
parse_id(id, first_byte)?;
Ok(())
}
/// Helper trait to validate the name of a key.
pub trait KeyName: AsRef<str> {
/// Validate the given string for this name.
fn validate(s: &str) -> Result<(), Error>;
}
/// Check whether the Matrix identifier localpart is [allowed over federation].
///
/// According to the spec, localparts can consist of any legal non-surrogate Unicode code points
/// except for `:` and `NUL` (`U+0000`).
///
/// [allowed over federation]: https://spec.matrix.org/latest/appendices/#historical-user-ids
pub fn localpart_is_backwards_compatible(localpart: &str) -> Result<(), Error> {
let is_invalid = localpart.contains([':', '\0']);
if is_invalid {
Err(Error::InvalidCharacters)
} else {
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/server_name.rs | crates/identifiers-validation/src/server_name.rs | use crate::error::Error;
pub fn validate(server_name: &str) -> Result<(), Error> {
use std::net::Ipv6Addr;
if server_name.is_empty() {
return Err(Error::InvalidServerName);
}
let end_of_host = if server_name.starts_with('[') {
let end_of_ipv6 = match server_name.find(']') {
Some(idx) => idx,
None => return Err(Error::InvalidServerName),
};
if server_name[1..end_of_ipv6].parse::<Ipv6Addr>().is_err() {
return Err(Error::InvalidServerName);
}
end_of_ipv6 + 1
} else {
#[allow(clippy::unnecessary_lazy_evaluations)]
let end_of_host = server_name.find(':').unwrap_or_else(|| server_name.len());
if server_name[..end_of_host]
.bytes()
.any(|byte| !(byte.is_ascii_alphanumeric() || byte == b'-' || byte == b'.'))
{
return Err(Error::InvalidServerName);
}
end_of_host
};
if server_name.len() != end_of_host
&& (
// hostname is followed by something other than ":port"
server_name.as_bytes()[end_of_host] != b':'
// the remaining characters after ':' are not a valid port
|| server_name[end_of_host + 1..].parse::<u16>().is_err()
)
{
Err(Error::InvalidServerName)
} else {
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/base64_public_key.rs | crates/identifiers-validation/src/base64_public_key.rs | use crate::Error;
pub fn validate(s: &str) -> Result<(), Error> {
if s.is_empty() {
return Err(Error::Empty);
} else if !s
.chars()
.all(|c| c.is_alphanumeric() || matches!(c, '+' | '/' | '='))
{
return Err(Error::InvalidCharacters);
}
Ok(())
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/voip_version_id.rs | crates/identifiers-validation/src/voip_version_id.rs | use crate::{Error, error::VoipVersionIdError};
pub fn validate(u: u64) -> Result<(), Error> {
if u != 0 {
return Err(VoipVersionIdError::WrongUintValue.into());
}
Ok(())
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/server_signing_key_version.rs | crates/identifiers-validation/src/server_signing_key_version.rs | use crate::Error;
pub fn validate(s: &str) -> Result<(), Error> {
if s.is_empty() {
Err(Error::Empty)
} else if !s.chars().all(|c| c.is_alphanumeric() || c == '_') {
Err(Error::InvalidCharacters)
} else {
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/room_alias_id.rs | crates/identifiers-validation/src/room_alias_id.rs | use crate::{Error, validate_delimited_id};
pub fn validate(s: &str) -> Result<(), Error> {
validate_delimited_id(s, b'#')
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/space_child_order.rs | crates/identifiers-validation/src/space_child_order.rs | use crate::Error;
/// Validate the `order` of an [`m.space.child`] event.
///
/// According to the specification, the order:
///
/// > Must consist of ASCII characters within the range `\x20` (space) and `\x7E` (~),
/// > inclusive. Must not exceed 50 characters.
///
/// Returns `Ok(())` if the order passes validation, or an error if the order doesn't respect
/// the rules from the spec, as it cannot be used for ordering.
///
/// [`m.space.child`]: https://spec.matrix.org/latest/client-server-api/#mspacechild
pub fn validate(s: &str) -> Result<(), Error> {
if s.len() > 50 {
return Err(Error::MaximumLengthExceeded);
}
if !s.bytes().all(|byte| (b'\x20'..=b'\x7E').contains(&byte)) {
return Err(Error::InvalidCharacters);
}
Ok(())
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/client_secret.rs | crates/identifiers-validation/src/client_secret.rs | use crate::Error;
pub fn validate(s: &str) -> Result<(), Error> {
if s.len() > 255 {
return Err(Error::MaximumLengthExceeded);
} else if !s.chars().all(|c| c.is_alphanumeric() || ".=_-".contains(c)) {
return Err(Error::InvalidCharacters);
} else if s.is_empty() {
return Err(Error::Empty);
}
Ok(())
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/error.rs | crates/identifiers-validation/src/error.rs | //! Error conditions.
use std::str::Utf8Error;
/// An error encountered when trying to parse an invalid ID string.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum Error {
/// The identifier or a required part of it is empty.
#[error("identifier or required part of it is empty")]
Empty,
/// The identifier contains invalid characters.
#[error("identifier contains invalid characters")]
InvalidCharacters,
/// The string isn't a valid Matrix ID.
#[error("invalid matrix ID: {0}")]
InvalidMatrixId(#[from] MatrixIdError),
/// The string isn't a valid Matrix.to URI.
#[error("invalid matrix.to URI: {0}")]
InvalidMatrixToUri(#[from] MatrixToError),
/// The string isn't a valid Matrix URI.
#[error("invalid matrix URI: {0}")]
InvalidMatrixUri(#[from] MatrixUriError),
/// The mxc:// isn't a valid Matrix Content URI.
#[error("invalid Matrix Content URI: {0}")]
InvalidMxcUri(#[from] MxcUriError),
/// The value isn't a valid VoIP version Id.
#[error("invalid VoIP version ID: {0}")]
InvalidVoipVersionId(#[from] VoipVersionIdError),
/// The server name part of the the ID string is not a valid server name.
#[error("server name is not a valid IP address or domain name")]
InvalidServerName,
/// The string isn't valid UTF-8.
#[error("invalid UTF-8")]
InvalidUtf8,
/// The ID exceeds 255 bytes (or 32 codepoints for a room version ID).
#[error("ID exceeds 255 bytes")]
MaximumLengthExceeded,
/// The ID is missing the colon delimiter between localpart and server name, or between key
/// algorithm and key name / version.
#[error("required colon is missing")]
MissingColon,
/// The ID is missing the correct leading sigil.
#[error("leading sigil is incorrect or missing")]
MissingLeadingSigil,
}
impl From<Utf8Error> for Error {
fn from(_: Utf8Error) -> Self {
Self::InvalidUtf8
}
}
/// An error occurred while validating an MXC URI.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum MxcUriError {
/// MXC URI did not start with `mxc://`.
#[error("MXC URI schema was not mxc://")]
WrongSchema,
/// MXC URI did not have first slash, required for `server.name/media_id`.
#[error("MXC URI does not have first slash")]
MissingSlash,
/// Media identifier malformed due to invalid characters detected.
///
/// Valid characters are (in regex notation) `[A-Za-z0-9_-]+`.
/// See [here](https://spec.matrix.org/v1.17/client-server-api/#security-considerations-5) for more details.
#[error("Media Identifier malformed, invalid characters")]
MediaIdMalformed,
/// Server identifier malformed: invalid IP or domain name.
#[error("invalid Server Name")]
ServerNameMalformed,
}
/// An error occurred while validating a `MatrixId`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum MatrixIdError {
/// The string contains an invalid number of parts.
#[error("invalid number of parts")]
InvalidPartsNumber,
/// The string is missing a room ID or alias.
#[error("missing room ID or alias")]
MissingRoom,
/// The string contains no identifier.
#[error("no identifier")]
NoIdentifier,
/// The string contains too many identifiers.
#[error("too many identifiers")]
TooManyIdentifiers,
/// The string contains an unknown identifier.
#[error("unknown identifier")]
UnknownIdentifier,
/// The string contains two identifiers that cannot be paired.
#[error("unknown identifier pair")]
UnknownIdentifierPair,
/// The string contains an unknown identifier type.
#[error("unknown identifier type")]
UnknownType,
}
/// An error occurred while validating a `matrix.to` URI.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum MatrixToError {
/// String is not a valid URI.
#[error("given string is not a valid URL")]
InvalidUrl,
/// String did not start with `https://matrix.to/#/`.
#[error("base URL is not https://matrix.to/#/")]
WrongBaseUrl,
/// String has an unknown additional argument.
#[error("unknown additional argument")]
UnknownArgument,
}
/// An error occurred while validating a `MatrixURI`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum MatrixUriError {
/// The string does not start with `matrix:`.
#[error("scheme is not 'matrix:'")]
WrongScheme,
/// The string contains too many actions.
#[error("too many actions")]
TooManyActions,
/// The string contains an unknown query item.
#[error("unknown query item")]
UnknownQueryItem,
}
/// An error occurred while validating a `VoipVersionId`.
#[derive(Copy, Clone, Debug, Eq, Hash, PartialEq, thiserror::Error)]
#[non_exhaustive]
pub enum VoipVersionIdError {
/// The value of the `u64` is not 0.
#[error("u64 value is not 0")]
WrongUintValue,
}
#[cfg(test)]
mod tests {
use std::mem::size_of;
use super::Error;
#[test]
fn small_error_type() {
assert!(size_of::<Error>() <= 8);
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/event_id.rs | crates/identifiers-validation/src/event_id.rs | use crate::{Error, validate_delimited_id};
pub fn validate(s: &str) -> Result<(), Error> {
if s.contains(':') {
validate_delimited_id(s, b'$')?;
} else if !s.starts_with('$') {
return Err(Error::MissingLeadingSigil);
}
Ok(())
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/room_id_or_alias_id.rs | crates/identifiers-validation/src/room_id_or_alias_id.rs | use crate::Error;
pub fn validate(s: &str) -> Result<(), Error> {
match s.as_bytes().first() {
Some(b'#') => crate::room_alias_id::validate(s),
Some(b'!') => crate::room_id::validate(s),
_ => Err(Error::MissingLeadingSigil),
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/key_id.rs | crates/identifiers-validation/src/key_id.rs | use std::num::NonZeroU8;
use crate::{Error, KeyName};
pub fn validate<K: KeyName + ?Sized>(s: &str) -> Result<NonZeroU8, Error> {
let colon_idx =
NonZeroU8::new(s.find(':').ok_or(Error::MissingColon)? as u8).ok_or(Error::MissingColon)?;
K::validate(&s[colon_idx.get() as usize + 1..])?;
Ok(colon_idx)
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/room_version_id.rs | crates/identifiers-validation/src/room_version_id.rs | use crate::Error;
/// Room version identifiers cannot be more than 32 code points.
const MAX_CODE_POINTS: usize = 32;
pub fn validate(s: &str) -> Result<(), Error> {
if s.is_empty() {
Err(Error::Empty)
} else if s.chars().count() > MAX_CODE_POINTS {
Err(Error::MaximumLengthExceeded)
} else if !s.chars().all(|c| c.is_alphanumeric() || ".-".contains(c)) {
Err(Error::InvalidCharacters)
} else {
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/device_key_id.rs | crates/identifiers-validation/src/device_key_id.rs | use crate::Error;
pub fn validate(s: &str) -> Result<(), Error> {
let colon_idx = s.find(':').ok_or(Error::MissingColon)?;
if colon_idx == 0 {
Err(Error::Empty)
} else {
// Any non-empty string is accepted as a key algorithm for forwards compatibility
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/identifiers-validation/src/mxc_uri.rs | crates/identifiers-validation/src/mxc_uri.rs | use std::num::NonZeroU8;
use crate::{error::MxcUriError, server_name};
const PROTOCOL: &str = "mxc://";
pub fn validate(uri: &str) -> Result<NonZeroU8, MxcUriError> {
let uri = match uri.strip_prefix(PROTOCOL) {
Some(uri) => uri,
None => return Err(MxcUriError::WrongSchema),
};
let index = match uri.find('/') {
Some(index) => index,
None => return Err(MxcUriError::MissingSlash),
};
let server_name = &uri[..index];
let media_id = &uri[index + 1..];
// See: https://spec.matrix.org/v1.17/client-server-api/#security-considerations-5
let media_id_is_valid = media_id
.bytes()
.all(|b| matches!(b, b'0'..=b'9' | b'a'..=b'z' | b'A'..=b'Z' | b'-' ));
if !media_id_is_valid {
Err(MxcUriError::MediaIdMalformed)
} else if server_name::validate(server_name).is_err() {
Err(MxcUriError::ServerNameMalformed)
} else {
Ok(NonZeroU8::new((index + 6) as u8).unwrap())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/user.rs | crates/core/src/user.rs | use salvo::oapi::{ToParameters, ToSchema};
use serde::{Deserialize, Serialize};
use crate::{
OwnedMxcUri, OwnedRoomId, OwnedUserId, PrivOwnedStr, events::GlobalAccountDataEventType,
serde::StringEnum,
};
/// Profile fields to specify in query.
///
/// This type can hold an arbitrary string. To build this with a custom value,
/// convert it from a string with `::from()` / `.into()`. To check for values
/// that are not available as a documented variant here, use its string
/// representation, obtained through [`.as_str()`](Self::as_str()).
#[derive(ToSchema, Clone, StringEnum)]
#[non_exhaustive]
pub enum ProfileField {
/// Display name of the user.
#[palpo_enum(rename = "displayname")]
DisplayName,
/// Avatar URL for the user's avatar.
#[palpo_enum(rename = "avatar_url")]
AvatarUrl,
#[doc(hidden)]
#[salvo(schema(value_type = String))]
_Custom(PrivOwnedStr),
}
#[derive(ToParameters, Deserialize, Debug)]
pub struct UserRoomReqArgs {
/// The user whose tags will be retrieved.
#[salvo(parameter(parameter_in = Path))]
pub user_id: OwnedUserId,
/// The room from which tags will be retrieved.
#[salvo(parameter(parameter_in = Path))]
pub room_id: OwnedRoomId,
}
#[derive(ToParameters, Deserialize, Debug)]
pub struct UserEventTypeReqArgs {
/// The ID of the user to set account_data for.
///
/// The access token must be authorized to make requests for this user ID.
#[salvo(parameter(parameter_in = Path))]
pub user_id: OwnedUserId,
/// The event type of the account_data to set.
///
/// Custom types should be namespaced to avoid clashes.
#[salvo(parameter(parameter_in = Path))]
pub event_type: GlobalAccountDataEventType,
}
#[derive(ToParameters, Deserialize, Debug)]
pub struct UserRoomEventTypeReqArgs {
/// The ID of the user to set account_data for.
///
/// The access token must be authorized to make requests for this user ID.
#[salvo(parameter(parameter_in = Path))]
pub user_id: OwnedUserId,
#[salvo(parameter(parameter_in = Path))]
pub room_id: OwnedRoomId,
/// The event type of the account_data to set.
///
/// Custom types should be namespaced to avoid clashes.
#[salvo(parameter(parameter_in = Path))]
pub event_type: GlobalAccountDataEventType,
}
#[derive(ToParameters, Deserialize, Debug)]
pub struct UserFilterReqArgs {
/// The user ID to download a filter for.
#[salvo(parameter(parameter_in = Path))]
pub user_id: OwnedUserId,
/// The ID of the filter to download.
#[salvo(parameter(parameter_in = Path))]
pub filter_id: String,
}
// /// GET /_matrix/federation/v1/query/profile
// /// `GET /_matrix/client/*/profile/{user_id}`
// ///
// /// Get all profile information of an user.
// /// `/v3/` ([spec])
// ///
// /// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3profileuser_id
// const METADATA: Metadata = metadata! {
// method: GET,
// rate_limited: false,
// authentication: None,
// history: {
// 1.0 => "/_matrix/client/r0/profile/:user_id",
// 1.1 => "/_matrix/client/v3/profile/:user_id",
// }
// };
// /// Request type for the `get_profile` endpoint.
/// Response type for the `get_profile` endpoint.
#[derive(ToSchema, Deserialize, Serialize, Default, Debug)]
pub struct ProfileResBody {
/// The user's avatar URL, if set.
#[serde(
skip_serializing_if = "Option::is_none",
default,
deserialize_with = "crate::serde::empty_string_as_none"
)]
pub avatar_url: Option<OwnedMxcUri>,
/// The user's display name, if set.
#[serde(skip_serializing_if = "Option::is_none", rename = "displayname")]
pub display_name: Option<String>,
/// The [BlurHash](https://blurha.sh) for the avatar pointed to by `avatar_url`.
///
/// This uses the unstable prefix in
/// [MSC2448](https://github.com/matrix-org/matrix-spec-proposals/pull/2448).
#[serde(
rename = "xyz.amorgan.blurhash",
skip_serializing_if = "Option::is_none"
)]
pub blurhash: Option<String>,
}
impl ProfileResBody {
/// Creates a new `Response` with the given avatar URL and display name.
pub fn new(avatar_url: Option<OwnedMxcUri>, display_name: Option<String>) -> Self {
Self {
avatar_url,
display_name,
blurhash: None,
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/third_party_invite.rs | crates/core/src/third_party_invite.rs | //! Common types for [third-party invites].
//!
//! [third-party invites]: https://spec.matrix.org/latest/client-server-api/#third-party-invites
use std::ops::Deref;
use salvo::oapi::ToSchema;
use serde::{Deserialize, Serialize};
use crate::serde::{
Base64, Base64DecodeError,
base64::{Standard, UrlSafe},
};
/// A base64-encoded public key from an [identity server].
///
/// This type supports both standard and URL-safe base64, for [compatibility with Sydent].
///
/// No validation is done on the inner string during deserialization, this type is used for its
/// semantic value and for providing a helper to decode it.
///
/// The key can be decoded by calling [`IdentityServerBase64PublicKey::decode()`].
///
/// [identity server]: https://spec.matrix.org/latest/identity-service-api/
/// [compatibility with Sydent]: https://github.com/matrix-org/sydent/issues/593
#[derive(ToSchema, Debug, Clone, PartialEq, Eq, PartialOrd, Ord, Hash, Serialize, Deserialize)]
#[allow(clippy::exhaustive_structs)]
pub struct IdentityServerBase64PublicKey(pub String);
impl IdentityServerBase64PublicKey {
/// Construct a new `IdentityServerBase64PublicKey` by encoding the given key using unpadded
/// standard base64.
pub fn new(bytes: &[u8]) -> Self {
Self(Base64::<Standard, &[u8]>::new(bytes).encode())
}
/// Try to decode this base64-encoded string.
///
/// This will try to detect the proper alphabet to use for decoding, between
/// the standard and the URL-safe alphabet.
pub fn decode(&self) -> Result<Vec<u8>, Base64DecodeError> {
let is_url_safe_alphabet = self.0.contains(['-', '_']);
if is_url_safe_alphabet {
Ok(Base64::<UrlSafe>::parse(&self.0)?.into_inner())
} else {
Ok(Base64::<Standard>::parse(&self.0)?.into_inner())
}
}
}
impl From<String> for IdentityServerBase64PublicKey {
fn from(value: String) -> Self {
Self(value)
}
}
impl AsRef<str> for IdentityServerBase64PublicKey {
fn as_ref(&self) -> &str {
&self.0
}
}
impl Deref for IdentityServerBase64PublicKey {
type Target = str;
fn deref(&self) -> &Self::Target {
self.as_ref()
}
}
impl PartialEq<String> for IdentityServerBase64PublicKey {
fn eq(&self, other: &String) -> bool {
self.0.eq(other)
}
}
impl<'a> PartialEq<&'a str> for IdentityServerBase64PublicKey {
fn eq(&self, other: &&'a str) -> bool {
self.0.eq(other)
}
}
impl PartialEq<str> for IdentityServerBase64PublicKey {
fn eq(&self, other: &str) -> bool {
self.0.eq(other)
}
}
#[cfg(test)]
mod tests {
use super::IdentityServerBase64PublicKey;
#[test]
fn identity_server_base64_public_key_encode_then_decode() {
let original = b"foobar";
let encoded = IdentityServerBase64PublicKey::new(original);
assert_eq!(encoded, "Zm9vYmFy");
assert_eq!(encoded.decode().unwrap(), original);
}
#[test]
fn identity_server_base64_public_key_decode_standard_and_url_safe() {
let original = &[60, 98, 62, 77, 68, 78, 60, 47, 98, 62];
let standard_base64 = IdentityServerBase64PublicKey("PGI+TUROPC9iPg".to_owned());
assert_eq!(standard_base64.decode().unwrap(), original);
let urlsafe_base64 = IdentityServerBase64PublicKey("PGI-TUROPC9iPg".to_owned());
assert_eq!(urlsafe_base64.decode().unwrap(), original);
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/serde.rs | crates/core/src/serde.rs | //! (De)serialization helpers for other Palpo use crate::s.
//!
//! Part of that is a fork of [serde_urlencoded], with support for sequences in
//! `Deserialize` / `Serialize` structs (e.g. `Vec<Something>`) that are
//! (de)serialized as `field=val1&field=val2`.
//!
//! [serde_urlencoded]: https://github.com/nox/serde_urlencoded
use std::{fmt, marker::PhantomData};
use serde::{
Deserialize, Deserializer,
de::{self, SeqAccess, DeserializeOwned, Visitor},
};
pub use serde_json::{
json,
value::{RawValue as RawJsonValue, Value as JsonValue, to_raw_value as to_raw_json_value},
};
pub mod base64;
mod buf;
pub mod can_be_empty;
pub mod canonical_json;
mod cow;
pub mod duration;
pub mod json_string;
pub(crate) mod pdu_process_response;
mod raw_json;
pub mod single_element_seq;
mod strings;
pub mod test;
pub use canonical_json::{
CanonicalJsonError, CanonicalJsonObject, CanonicalJsonValue, from_canonical_value,
to_canonical_object, to_canonical_value, validate_canonical_json,
};
pub use self::{
base64::{Base64, Base64DecodeError},
buf::{json_to_buf, slice_to_buf},
can_be_empty::{CanBeEmpty, is_empty},
cow::deserialize_cow_str,
raw_json::{JsonCastable, RawJson},
strings::{
btreemap_deserialize_v1_power_level_values, deserialize_as_f64_or_string,
deserialize_as_optional_f64_or_string, deserialize_v1_power_level, empty_string_as_none,
none_as_empty_string, vec_deserialize_int_power_level_values,
vec_deserialize_v1_power_level_values,
},
};
/// The inner type of [`JsonValue::Object`].
pub type JsonObject = serde_json::Map<String, JsonValue>;
/// Check whether a value is equal to its default value.
pub fn is_default<T: Default + PartialEq>(val: &T) -> bool {
*val == T::default()
}
/// Deserialize a `T` via `Option<T>`, falling back to `T::default()`.
pub fn none_as_default<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: Default + Deserialize<'de>,
{
Ok(Option::deserialize(deserializer)?.unwrap_or_default())
}
/// Simply returns `true`.
///
/// Useful for `#[serde(default = ...)]`.
pub fn default_true() -> bool {
true
}
/// Simply returns `false`.
///
/// Useful for `#[serde(default = ...)]`.
pub fn default_false() -> bool {
false
}
/// Simply dereferences the given bool.
///
/// Useful for `#[serde(skip_serializing_if = ...)]`.
#[allow(clippy::trivially_copy_pass_by_ref)]
pub fn is_true(b: &bool) -> bool {
*b
}
/// Returns None if the serialization fails
pub fn empty_as_none<'de, D: Deserializer<'de>, T: for<'a> Deserialize<'a>>(
deserializer: D,
) -> Result<Option<T>, D::Error> {
let json = Box::<RawJsonValue>::deserialize(deserializer)?;
let res = serde_json::from_str::<Option<T>>(json.get()).map_err(de::Error::custom);
match res {
Ok(a) => Ok(a),
Err(e) => {
#[derive(Deserialize)]
#[serde(deny_unknown_fields)]
struct Empty {}
if let Ok(Empty {}) = serde_json::from_str(json.get()) {
Ok(None)
} else {
Err(e)
}
}
}
}
/// Helper function for `serde_json::value::RawValue` deserialization.
pub fn from_raw_json_value<'a, T, E>(val: &'a RawJsonValue) -> Result<T, E>
where
T: Deserialize<'a>,
E: de::Error,
{
serde_json::from_str(val.get()).map_err(E::custom)
}
/// Helper function for returning a default value if deserialization of the type fails.
///
/// Assumes that the content being deserialized is JSON.
///
/// Used as `#[serde(deserialize_with = "default_on_error")]`.
pub fn default_on_error<'de, D, T>(deserializer: D) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: DeserializeOwned + Default,
{
let value = match Box::<RawJsonValue>::deserialize(deserializer) {
Ok(value) => value,
Err(error) => {
debug!("deserialization error, using default value: {error}");
return Ok(T::default());
}
};
Ok(from_raw_json_value(&value).unwrap_or_else(|error: D::Error| {
debug!("deserialization error, using default value: {error}");
T::default()
}))
}
/// Helper function for ignoring invalid items in a `Vec`, instead letting them cause the entire
/// `Vec` to fail deserialization
pub fn ignore_invalid_vec_items<'de, D, T>(deserializer: D) -> Result<Vec<T>, D::Error>
where
D: Deserializer<'de>,
T: Deserialize<'de>,
{
struct SkipInvalid<T>(PhantomData<T>);
impl<'de, T> Visitor<'de> for SkipInvalid<T>
where
T: Deserialize<'de>,
{
type Value = Vec<T>;
fn expecting(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
formatter.write_str("Vec with possibly invalid items")
}
fn visit_seq<A>(self, mut seq: A) -> Result<Self::Value, A::Error>
where
A: SeqAccess<'de>,
{
let mut vec = Vec::new();
while let Some(result) = seq.next_element::<T>().transpose() {
let Ok(elem) = result else {
continue;
};
vec.push(elem);
}
Ok(vec)
}
}
deserializer.deserialize_seq(SkipInvalid(PhantomData))
}
pub use crate::macros::{
AsRefStr, DebugAsRefStr, DeserializeFromCowStr, DisplayAsRefStr, EqAsRefStr, FromString,
OrdAsRefStr, SerializeAsRefStr, StringEnum,
};
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/percent_encode.rs | crates/core/src/percent_encode.rs | use percent_encoding::{AsciiSet, CONTROLS};
/// The [path percent-encode set] as defined in the WHATWG URL standard + `/`
/// since we always encode single segments of the path.
///
/// [path percent-encode set]: https://url.spec.whatwg.org/#path-percent-encode-set
pub(crate) const PATH_PERCENT_ENCODE_SET: &AsciiSet = &CONTROLS
.add(b' ')
.add(b'"')
.add(b'#')
.add(b'<')
.add(b'>')
.add(b'?')
.add(b'`')
.add(b'{')
.add(b'}')
.add(b'/');
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/html.rs | crates/core/src/html.rs | //! Opinionated HTML parsing and manipulating library.
//!
//! Like the rest of the Palpo crates, this crate is primarily meant to be used for
//! the Matrix protocol. It should be able to be used to interact with any HTML
//! document but will offer APIs focused on specificities of HTML in the Matrix
//! specification..
//!
//! # Features
//!
//! * `matrix` - Allow to convert HTML elements data into enums with variants for elements and
//! attributes [suggested by the Matrix Specification][spec].
//!
//! [spec]: https://spec.matrix.org/latest/client-server-api/#mroommessage-msgtypes
#![warn(missing_docs)]
#![cfg_attr(docsrs, feature(doc_cfg))]
use std::{
cell::RefCell,
collections::BTreeSet,
fmt, io,
iter::FusedIterator,
rc::{Rc, Weak},
};
use as_variant::as_variant;
use html5ever::serialize::{Serialize, SerializeOpts, Serializer, TraversalScope, serialize};
use html5ever::tendril::TendrilSink;
use html5ever::tree_builder::{NodeOrText, TreeSink};
pub use html5ever::{Attribute, LocalName, Namespace, Prefix, QualName, tendril::StrTendril};
use html5ever::{ParseOpts, local_name, ns, parse_fragment};
mod helpers;
mod sanitizer_config;
pub mod matrix;
pub use self::{helpers::*, sanitizer_config::*};
/// What [HTML elements and attributes] should be kept by the sanitizer.
///
/// [HTML elements and attributes]: https://spec.matrix.org/latest/client-server-api/#mroommessage-msgtypes
#[derive(Clone, Copy, Debug, PartialEq, Eq)]
#[allow(clippy::exhaustive_enums)]
pub enum HtmlSanitizerMode {
/// Keep only the elements and attributes suggested in the Matrix specification.
///
/// In addition to filtering elements and attributes listed in the Matrix specification, it
/// also removes elements that are nested more than 100 levels deep.
///
/// Deprecated elements and attributes are also replaced when applicable.
Strict,
/// Like `Strict` mode, with additional elements and attributes that are not yet included in
/// the spec, but are reasonable to keep.
///
/// Differences with `Strict` mode:
///
/// * The `matrix` scheme is allowed in links.
Compat,
}
/// An HTML fragment.
///
/// To get the serialized HTML, use its `Display` implementation. Due to the fact that the HTML is
/// parsed, note that malformed HTML and comments will be stripped from the output.
#[derive(Debug)]
pub struct Html {
document: NodeRef,
}
impl Html {
/// Construct a new `Html` by parsing the given string.
///
/// This is infallible, any error encountered while parsing the HTML is logged with
/// `tracing::debug!`.
pub fn parse(string: &str) -> Self {
let sink = Self::default();
let mut parser = parse_fragment(
sink,
ParseOpts::default(),
QualName::new(None, ns!(html), local_name!("div")),
Vec::new(),
true,
);
parser.process(string.into());
parser.finish()
}
/// Sanitize this HTML according to the Matrix specification.
///
/// This is equivalent to calling [`Self::sanitize_with()`] with a `config` value of
/// `SanitizerConfig::compat().remove_reply_fallback()`.
pub fn sanitize(&self) {
let conf = SanitizerConfig::compat().remove_reply_fallback();
self.sanitize_with(&conf);
}
/// Sanitize this HTML according to the given configuration.
pub fn sanitize_with(&self, conf: &SanitizerConfig) {
conf.clean(self);
}
/// Get the root node of the HTML.
fn root(&self) -> NodeRef {
self.document
.first_child()
.expect("html should always have a root node")
}
/// Whether the root node of the HTML has children.
pub fn has_children(&self) -> bool {
self.root().has_children()
}
/// The first child node of the root node of the HTML.
///
/// Returns `None` if the root node has no children.
pub fn first_child(&self) -> Option<NodeRef> {
self.root().first_child()
}
/// The last child node of the root node of the HTML .
///
/// Returns `None` if the root node has no children.
pub fn last_child(&self) -> Option<NodeRef> {
self.root().last_child()
}
/// Iterate through the children of the root node of the HTML.
pub fn children(&self) -> Children {
Children::new(self.first_child())
}
}
impl Default for Html {
fn default() -> Self {
Self {
document: NodeRef::new(NodeData::Document),
}
}
}
impl TreeSink for Html {
type Handle = NodeRef;
type Output = Self;
type ElemName<'a> = html5ever::ExpandedName<'a>;
fn finish(self) -> Self::Output {
self
}
fn parse_error(&self, msg: std::borrow::Cow<'static, str>) {
debug!("HTML parse error: {msg}");
}
fn get_document(&self) -> Self::Handle {
self.document.clone()
}
fn elem_name<'a>(&'a self, target: &'a Self::Handle) -> html5ever::ExpandedName<'a> {
target.as_element().expect("not an element").name.expanded()
}
fn create_element(
&self,
name: QualName,
attrs: Vec<Attribute>,
_flags: html5ever::tree_builder::ElementFlags,
) -> Self::Handle {
NodeRef::new(NodeData::Element(ElementData {
name,
attrs: RefCell::new(attrs.into_iter().collect()),
}))
}
fn create_comment(&self, _text: StrTendril) -> Self::Handle {
NodeRef::new(NodeData::Other)
}
fn create_pi(&self, _target: StrTendril, _data: StrTendril) -> Self::Handle {
NodeRef::new(NodeData::Other)
}
fn append(&self, parent: &Self::Handle, child: NodeOrText<Self::Handle>) {
match child {
NodeOrText::AppendNode(node) => parent.append_child(node),
NodeOrText::AppendText(text) => {
// If the previous sibling is also text, add this text to it.
if let Some(prev_text) = parent
.last_child()
.as_ref()
.and_then(|sibling| sibling.as_text())
{
prev_text.borrow_mut().push_tendril(&text);
} else {
let node = NodeRef::new(NodeData::Text(text.into()));
parent.append_child(node);
}
}
}
}
fn append_based_on_parent_node(
&self,
element: &Self::Handle,
prev_element: &Self::Handle,
child: NodeOrText<Self::Handle>,
) {
if element.0.parent.borrow().is_some() {
self.append_before_sibling(element, child);
} else {
self.append(prev_element, child);
}
}
fn append_doctype_to_document(
&self,
_name: StrTendril,
_public_id: StrTendril,
_system_id: StrTendril,
) {
}
fn get_template_contents(&self, target: &Self::Handle) -> Self::Handle {
target.clone()
}
fn same_node(&self, x: &Self::Handle, y: &Self::Handle) -> bool {
Rc::ptr_eq(&x.0, &y.0)
}
fn set_quirks_mode(&self, _mode: html5ever::tree_builder::QuirksMode) {}
fn append_before_sibling(&self, sibling: &Self::Handle, new_node: NodeOrText<Self::Handle>) {
match new_node {
NodeOrText::AppendNode(node) => node.insert_before_sibling(sibling),
NodeOrText::AppendText(text) => {
// If the previous sibling is also text, add this text to it.
if let Some(prev_text) = sibling
.prev_sibling()
.as_ref()
.and_then(|prev_sibling| prev_sibling.as_text())
{
prev_text.borrow_mut().push_tendril(&text);
} else {
let node = NodeRef::new(NodeData::Text(text.into()));
node.insert_before_sibling(sibling);
}
}
}
}
fn add_attrs_if_missing(&self, target: &Self::Handle, attrs: Vec<Attribute>) {
let element = target.as_element().unwrap();
element.attrs.borrow_mut().extend(attrs);
}
fn remove_from_parent(&self, target: &Self::Handle) {
target.detach();
}
fn reparent_children(&self, node: &Self::Handle, new_parent: &Self::Handle) {
for child in node.0.children.take() {
child.0.parent.take();
new_parent.append_child(child);
}
}
}
impl Serialize for Html {
fn serialize<S>(&self, serializer: &mut S, traversal_scope: TraversalScope) -> io::Result<()>
where
S: Serializer,
{
match traversal_scope {
TraversalScope::IncludeNode => {
for child in self.children() {
child.serialize(serializer)?;
}
Ok(())
}
TraversalScope::ChildrenOnly(_) => Ok(()),
}
}
}
impl fmt::Display for Html {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
let mut u8_vec = Vec::new();
serialize(
&mut u8_vec,
self,
SerializeOpts {
traversal_scope: TraversalScope::IncludeNode,
..Default::default()
},
)
.unwrap();
f.write_str(&String::from_utf8(u8_vec).unwrap())?;
Ok(())
}
}
/// An HTML node.
#[derive(Debug)]
#[non_exhaustive]
struct Node {
parent: RefCell<Option<Weak<Node>>>,
children: RefCell<Vec<NodeRef>>,
data: NodeData,
}
impl Node {
/// Constructs a new `NodeRef` with the given data.
fn new(data: NodeData) -> Self {
Self {
parent: Default::default(),
children: Default::default(),
data,
}
}
/// Returns the data of this `Node` if it is an Element (aka an HTML tag).
fn as_element(&self) -> Option<&ElementData> {
as_variant!(&self.data, NodeData::Element)
}
/// Returns the text content of this `Node`, if it is a `NodeData::Text`.
fn as_text(&self) -> Option<&RefCell<StrTendril>> {
as_variant!(&self.data, NodeData::Text)
}
/// Whether this is the root node of the HTML document.
fn is_root(&self) -> bool {
// The root node is the `html` element.
matches!(&self.data, NodeData::Element(element_data) if element_data.name.local.as_bytes() == b"html")
}
/// The parent of this node, if any.
fn parent(&self) -> Option<NodeRef> {
self.parent.borrow().as_ref()?.upgrade().map(NodeRef)
}
}
/// The data of a `Node`.
#[derive(Debug, Clone)]
#[allow(clippy::exhaustive_enums)]
pub enum NodeData {
/// The root node of the `Html`.
Document,
/// A text node.
Text(RefCell<StrTendril>),
/// An HTML element (aka a tag).
Element(ElementData),
/// Other types (comment, processing instruction, …).
Other,
}
/// The data of an HTML element.
#[derive(Debug, Clone)]
#[allow(clippy::exhaustive_structs)]
pub struct ElementData {
/// The qualified name of the element.
pub name: QualName,
/// The attributes of the element.
pub attrs: RefCell<BTreeSet<Attribute>>,
}
impl ElementData {
/// Convert this element data to typed data as [suggested by the Matrix Specification][spec].
///
/// [spec]: https://spec.matrix.org/latest/client-server-api/#mroommessage-msgtypes
pub fn to_matrix(&self) -> matrix::MatrixElementData {
matrix::MatrixElementData::parse(&self.name, &self.attrs.borrow())
}
}
/// A reference to an HTML node.
#[derive(Debug, Clone)]
#[non_exhaustive]
pub struct NodeRef(Rc<Node>);
impl NodeRef {
/// Constructs a new `NodeRef` with the given data.
fn new(data: NodeData) -> Self {
Self(Node::new(data).into())
}
/// Detach this node from the tree, if it has a parent.
pub(crate) fn detach(&self) {
if let Some((parent, index)) = self.parent_and_index() {
parent.0.children.borrow_mut().remove(index);
self.0.parent.take();
}
}
/// Append the given child node to this node.
///
/// The child node is detached from its previous position.
fn append_child(&self, child: NodeRef) {
child.detach();
child.0.parent.replace(Some(Rc::downgrade(&self.0)));
self.0.children.borrow_mut().push(child);
}
/// If this node has a parent, get it and the node's position in the parent's children.
fn parent_and_index(&self) -> Option<(NodeRef, usize)> {
let parent = self.0.parent()?;
let i = parent
.0
.children
.borrow()
.iter()
.position(|child| Rc::ptr_eq(&child.0, &self.0))
.expect("child should be in parent's children");
Some((parent, i))
}
/// Insert this node before the given sibling.
///
/// This node is detached from its previous position.
pub(crate) fn insert_before_sibling(&self, sibling: &NodeRef) {
self.detach();
let (parent, index) = sibling
.parent_and_index()
.expect("sibling should have parent");
self.0.parent.replace(Some(Rc::downgrade(&parent.0)));
parent.0.children.borrow_mut().insert(index, self.clone());
}
/// Constructs a new element `NodeRef` with the same data as this one, but with a different
/// element name and use it to replace this one in the parent.
///
/// Panics if this node is not in the tree and is not an element node.
pub(crate) fn replace_with_element_name(self, name: LocalName) -> NodeRef {
let mut element_data = self.as_element().unwrap().clone();
element_data.name.local = name;
let new_node = NodeRef::new(NodeData::Element(element_data));
for child in self.children() {
new_node.append_child(child);
}
new_node.insert_before_sibling(&self);
self.detach();
new_node
}
/// The data of the node.
pub fn data(&self) -> &NodeData {
&self.0.data
}
/// Returns the data of this `Node` if it is an Element (aka an HTML tag).
pub fn as_element(&self) -> Option<&ElementData> {
self.0.as_element()
}
/// Returns the text content of this `Node`, if it is a `NodeData::Text`.
pub fn as_text(&self) -> Option<&RefCell<StrTendril>> {
self.0.as_text()
}
/// The parent node of this node.
///
/// Returns `None` if the parent is the root node.
pub fn parent(&self) -> Option<NodeRef> {
let parent = self.0.parent()?;
// We don't want users to be able to navigate to the root.
if parent.0.is_root() {
return None;
}
Some(parent)
}
/// The next sibling node of this node.
///
/// Returns `None` if this is the last of its siblings.
pub fn next_sibling(&self) -> Option<NodeRef> {
let (parent, index) = self.parent_and_index()?;
let index = index.checked_add(1)?;
parent.0.children.borrow().get(index).cloned()
}
/// The previous sibling node of this node.
///
/// Returns `None` if this is the first of its siblings.
pub fn prev_sibling(&self) -> Option<NodeRef> {
let (parent, index) = self.parent_and_index()?;
let index = index.checked_sub(1)?;
parent.0.children.borrow().get(index).cloned()
}
/// Whether this node has children.
pub fn has_children(&self) -> bool {
!self.0.children.borrow().is_empty()
}
/// The first child node of this node.
///
/// Returns `None` if this node has no children.
pub fn first_child(&self) -> Option<NodeRef> {
self.0.children.borrow().first().cloned()
}
/// The last child node of this node.
///
/// Returns `None` if this node has no children.
pub fn last_child(&self) -> Option<NodeRef> {
self.0.children.borrow().last().cloned()
}
/// Get an iterator through the children of this node.
pub fn children(&self) -> Children {
Children::new(self.first_child())
}
pub(crate) fn serialize<S>(&self, serializer: &mut S) -> io::Result<()>
where
S: Serializer,
{
match self.data() {
NodeData::Element(data) => {
serializer.start_elem(
data.name.clone(),
data.attrs
.borrow()
.iter()
.map(|attr| (&attr.name, &*attr.value)),
)?;
for child in self.children() {
child.serialize(serializer)?;
}
serializer.end_elem(data.name.clone())?;
Ok(())
}
NodeData::Document => {
for child in self.children() {
child.serialize(serializer)?;
}
Ok(())
}
NodeData::Text(text) => serializer.write_text(&text.borrow()),
_ => Ok(()),
}
}
}
/// An iterator through the children of a node.
///
/// Can be constructed with [`Html::children()`] or [`NodeRef::children()`].
#[derive(Debug, Clone)]
pub struct Children {
next: Option<NodeRef>,
}
impl Children {
/// Construct a `Children` starting from the given node.
fn new(start_node: Option<NodeRef>) -> Self {
Self { next: start_node }
}
}
impl Iterator for Children {
type Item = NodeRef;
fn next(&mut self) -> Option<Self::Item> {
let next = self.next.take()?;
self.next = next.next_sibling();
Some(next)
}
}
impl FusedIterator for Children {}
#[cfg(test)]
mod tests {
use super::Html;
#[test]
fn sanity() {
let html = "\
<h1>Title</h1>\
<div>\
<p>This is some <em>text</em></p>\
</div>\
";
assert_eq!(Html::parse(html).to_string(), html);
assert_eq!(Html::parse("").to_string(), "");
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/power_levels.rs | crates/core/src/power_levels.rs | //! Common types for the [`m.room.power_levels` event][power_levels].
//!
//! [power_levels]: https://spec.matrix.org/latest/client-server-api/#mroompower_levels
use salvo::prelude::*;
use serde::{Deserialize, Serialize};
/// The power level requirements for specific notification types.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct NotificationPowerLevels {
/// The level required to trigger an `@room` notification.
#[serde(
default = "default_power_level",
deserialize_with = "crate::serde::deserialize_v1_power_level"
)]
pub room: i64,
}
impl NotificationPowerLevels {
/// Create a new `NotificationPowerLevels` with all-default values.
pub fn new() -> Self {
Self {
room: default_power_level(),
}
}
/// Value associated with the given `key`.
pub fn get(&self, key: &str) -> Option<&i64> {
match key {
"room" => Some(&self.room),
_ => None,
}
}
/// Whether all fields have their default values.
pub fn is_default(&self) -> bool {
self.room == default_power_level()
}
}
impl Default for NotificationPowerLevels {
fn default() -> Self {
Self::new()
}
}
/// Used to default power levels to 50 during deserialization.
pub fn default_power_level() -> i64 {
50
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/lib.rs | crates/core/src/lib.rs | #![allow(missing_docs, dead_code)]
pub mod appservice;
pub mod authentication;
pub mod client;
pub mod device;
pub mod directory;
pub mod encryption;
pub mod events;
pub mod federation;
#[cfg(feature = "html")]
pub mod html;
pub mod identifiers;
pub mod metadata;
mod percent_encode;
pub mod power_levels;
pub mod presence;
pub mod push;
pub mod room;
pub mod room_version_rules;
pub mod serde;
pub mod signatures;
pub mod space;
pub mod third_party;
pub mod third_party_invite;
mod time;
pub mod to_device;
pub use metadata::{MatrixVersion, SupportedVersions};
pub mod error;
pub use error::{MatrixError, UnknownVersionError};
#[macro_use]
pub mod sending;
#[macro_use]
extern crate tracing;
pub mod auth_scheme;
pub mod http_headers;
pub mod media;
// pub mod path_builder;
pub mod state;
pub mod user;
pub mod utils;
pub use palpo_core_macros as macros;
// https://github.com/bkchr/proc-macro-crate/issues/10
extern crate self as palpo_core;
use std::fmt;
use ::serde::{Deserialize, Serialize};
use salvo::oapi::{Components, RefOr, Schema, ToSchema};
pub use self::identifiers::*;
pub use self::time::{UnixMillis, UnixSeconds};
pub type Seqnum = i64;
pub type MatrixResult<T> = Result<T, MatrixError>;
// Wrapper around `Box<str>` that cannot be used in a meaningful way outside of
// this crate:: Used for string enums because their `_Custom` variant can't be
// truly private (only `#[doc(hidden)]`).
#[doc(hidden)]
#[derive(Clone, PartialEq, Eq, PartialOrd, Deserialize, Ord, Hash)]
pub struct PrivOwnedStr(Box<str>);
impl fmt::Debug for PrivOwnedStr {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
self.0.fmt(f)
}
}
impl ToSchema for PrivOwnedStr {
fn to_schema(components: &mut Components) -> RefOr<Schema> {
<String>::to_schema(components)
}
}
/// The direction to return events from.
#[derive(ToSchema, Clone, Copy, Debug, Default, PartialEq, Eq, Deserialize, Serialize)]
#[allow(clippy::exhaustive_enums)]
pub enum Direction {
/// Return events backwards in time from the requested `from` token.
#[default]
#[serde(rename = "b")]
Backward,
/// Return events forwards in time from the requested `from` token.
#[serde(rename = "f")]
Forward,
}
pub enum ReasonBool<T> {
True,
False(T),
}
impl<T> ReasonBool<T> {
fn value(&self) -> bool {
matches!(self, Self::True)
}
}
/// Re-__private used by macro-generated code.
///
/// It is not considered part of this module's public API.
#[doc(hidden)]
pub mod __private {
pub use crate::macros;
pub use bytes;
pub use http;
pub use serde;
pub use serde_json;
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/device.rs | crates/core/src/device.rs | use std::collections::BTreeMap;
use salvo::prelude::*;
use serde::{self, Deserialize, Serialize};
use crate::{
OwnedDeviceId, OwnedTransactionId, OwnedUserId,
encryption::DeviceKeys,
events::{AnyToDeviceEventContent, ToDeviceEventType},
serde::RawJson,
to_device::DeviceIdOrAllDevices,
};
/// Information on E2E device updates.
#[derive(ToSchema, Clone, Debug, Default, Deserialize, Serialize)]
pub struct DeviceLists {
/// List of users who have updated their device identity keys or who now
/// share an encrypted room with the client since the previous sync.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub changed: Vec<OwnedUserId>,
/// List of users who no longer share encrypted rooms since the previous
/// sync response.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub left: Vec<OwnedUserId>,
}
impl DeviceLists {
/// Creates an empty `DeviceLists`.
pub fn new() -> Self {
Default::default()
}
/// Returns true if there are no device list updates.
pub fn is_empty(&self) -> bool {
self.changed.is_empty() && self.left.is_empty()
}
}
/// The description of the direct-to- device message.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct DeviceListUpdateContent {
/// The user ID who owns the device.
pub user_id: OwnedUserId,
/// The ID of the device whose details are changing.
pub device_id: OwnedDeviceId,
/// The public human-readable name of this device.
///
/// Will be absent if the device has no name.
#[serde(skip_serializing_if = "Option::is_none")]
pub device_display_name: Option<String>,
/// An ID sent by the server for this update, unique for a given user_id.
pub stream_id: u64,
/// The stream_ids of any prior m.device_list_update EDUs sent for this user
/// which have not been referred to already in an EDU's prev_id field.
#[serde(default, skip_serializing_if = "Vec::is_empty")]
pub prev_id: Vec<u64>,
/// True if the server is announcing that this device has been deleted.
#[serde(skip_serializing_if = "Option::is_none")]
pub deleted: Option<bool>,
/// The updated identity keys (if any) for this device.
#[serde(skip_serializing_if = "Option::is_none")]
pub keys: Option<RawJson<DeviceKeys>>,
}
impl DeviceListUpdateContent {
/// Create a new `DeviceListUpdateContent` with the given `user_id`,
/// `device_id` and `stream_id`.
pub fn new(user_id: OwnedUserId, device_id: OwnedDeviceId, stream_id: u64) -> Self {
Self {
user_id,
device_id,
device_display_name: None,
stream_id,
prev_id: vec![],
deleted: None,
keys: None,
}
}
}
/// The description of the direct-to- device message.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct DirectDeviceContent {
/// The user ID of the sender.
pub sender: OwnedUserId,
/// Event type for the message.
#[serde(rename = "type")]
pub ev_type: ToDeviceEventType,
/// Unique utf8 string ID for the message, used for idempotency.
pub message_id: OwnedTransactionId,
/// The contents of the messages to be sent.
///
/// These are arranged in a map of user IDs to a map of device IDs to
/// message bodies. The device ID may also be *, meaning all known
/// devices for the user.
#[salvo(schema(value_type = Object, additional_properties = true))]
pub messages: DirectDeviceMessages,
}
impl DirectDeviceContent {
/// Creates a new `DirectDeviceContent` with the given `sender, `ev_type`
/// and `message_id`.
pub fn new(
sender: OwnedUserId,
ev_type: ToDeviceEventType,
message_id: OwnedTransactionId,
) -> Self {
Self {
sender,
ev_type,
message_id,
messages: DirectDeviceMessages::new(),
}
}
}
/// Direct device message contents.
///
/// Represented as a map of `{ user-ids => { device-ids => message-content } }`.
pub type DirectDeviceMessages =
BTreeMap<OwnedUserId, BTreeMap<DeviceIdOrAllDevices, RawJson<AnyToDeviceEventContent>>>;
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/sending.rs | crates/core/src/sending.rs | use std::{
future::Future,
ops::{Deref, DerefMut},
};
use reqwest::{Client as ReqwestClient, ClientBuilder, Request as ReqwestRequest};
use salvo::http::{HeaderName, HeaderValue, Method, header::CONTENT_TYPE};
use serde::Deserialize;
use thiserror::Error;
use url::{ParseError, Url};
pub fn client() -> ReqwestClient {
ClientBuilder::new().build().unwrap()
}
#[derive(Debug)]
pub struct SendRequest {
inner: ReqwestRequest,
}
#[macro_export]
macro_rules! json_body_modifier {
($name:ident) => {
impl $crate::sending::SendModifier for $name {
fn modify(
self,
request: &mut $crate::sending::SendRequest,
) -> Result<(), $crate::sending::SendError> {
let bytes = serde_json::to_vec(&self)?;
*request.body_mut() = Some(bytes.into());
Ok(())
}
}
};
}
macro_rules! method {
($name:ident, $method:ident) => {
pub fn $name(url: Url) -> SendRequest {
SendRequest {
inner: ReqwestRequest::new(Method::$method, url),
}
}
};
}
method!(get, GET);
method!(patch, PATCH);
method!(put, PUT);
method!(post, POST);
method!(delete, DELETE);
#[derive(Error, Debug)]
pub enum SendError {
#[error("parse url: `{0}`")]
Url(#[from] ParseError),
#[error("reqwest: `{0}`")]
Reqwest(#[from] reqwest::Error),
#[error("json: `{0}`")]
Json(#[from] serde_json::Error),
#[error("other: `{0}`")]
Other(String),
}
impl SendError {
pub fn other(msg: impl Into<String>) -> Self {
Self::Other(msg.into())
}
}
pub type SendResult<T> = Result<T, SendError>;
impl SendRequest {
method!(get, GET);
method!(patch, PATCH);
method!(put, PUT);
method!(post, POST);
method!(delete, DELETE);
pub fn into_inner(self) -> reqwest::Request {
self.inner
}
pub fn stuff(mut self, modifier: impl SendModifier) -> Result<Self, SendError> {
modifier.modify(&mut self)?;
if !self.headers().contains_key(CONTENT_TYPE) {
self.headers_mut()
.insert(CONTENT_TYPE, "application/json".parse().unwrap());
}
Ok(self)
}
pub async fn load<R>(self) -> Result<R, SendError>
where
R: for<'de> Deserialize<'de>,
{
let res = client().execute(self.inner).await?;
res.json().await.map_err(SendError::Reqwest)
}
pub async fn load_by_client<R>(self, client: ReqwestClient) -> Result<R, SendError>
where
R: for<'de> Deserialize<'de>,
{
let res = client.execute(self.inner).await?;
res.json().await.map_err(SendError::Reqwest)
}
pub async fn send<R>(self) -> Result<R, SendError>
where
R: for<'de> Deserialize<'de>,
{
let res = client().execute(self.inner).await?;
res.json().await.map_err(SendError::Reqwest)
}
pub async fn send_by_client<R>(self, client: ReqwestClient) -> Result<R, SendError>
where
R: for<'de> Deserialize<'de>,
{
let res = client.execute(self.inner).await?;
res.json().await.map_err(SendError::Reqwest)
}
pub fn exec(self) -> impl Future<Output = Result<reqwest::Response, reqwest::Error>> {
client().execute(self.inner)
}
}
impl Deref for SendRequest {
type Target = ReqwestRequest;
fn deref(&self) -> &Self::Target {
&self.inner
}
}
impl DerefMut for SendRequest {
fn deref_mut(&mut self) -> &mut Self::Target {
&mut self.inner
}
}
pub trait SendModifier {
fn modify(self, request: &mut SendRequest) -> Result<(), SendError>;
}
impl SendModifier for (HeaderName, HeaderValue) {
fn modify(self, request: &mut SendRequest) -> Result<(), SendError> {
request.headers_mut().append(self.0, self.1);
Ok(())
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/presence.rs | crates/core/src/presence.rs | //! Common types for the [presence module][presence].
//!
//! [presence]: https://spec.matrix.org/latest/client-server-api/#presence
use salvo::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{OwnedUserId, PrivOwnedStr, serde::StringEnum};
/// A description of a user's connectivity and availability for chat.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/doc/string_enum.md"))]
#[derive(ToSchema, Clone, Default, StringEnum)]
#[palpo_enum(rename_all = "snake_case")]
#[non_exhaustive]
pub enum PresenceState {
/// Disconnected from the service.
Offline,
/// Connected to the service.
#[default]
Online,
/// Connected to the service but not available for chat.
Unavailable,
#[doc(hidden)]
#[salvo(schema(value_type = String))]
_Custom(PrivOwnedStr),
}
impl Default for &'_ PresenceState {
fn default() -> Self {
&PresenceState::Online
}
}
/// The content for "m.presence" Edu.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct PresenceContent {
/// A list of presence updates that the receiving server is likely to be
/// interested in.
pub push: Vec<PresenceUpdate>,
}
impl PresenceContent {
/// Creates a new `PresenceContent`.
pub fn new(push: Vec<PresenceUpdate>) -> Self {
Self { push }
}
}
/// An update to the presence of a user.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct PresenceUpdate {
/// The user ID this presence EDU is for.
pub user_id: OwnedUserId,
/// The presence of the user.
pub presence: PresenceState,
/// An optional description to accompany the presence.
#[serde(skip_serializing_if = "Option::is_none")]
pub status_msg: Option<String>,
/// The number of milliseconds that have elapsed since the user last did
/// something.
pub last_active_ago: u64,
/// Whether or not the user is currently active.
///
/// Defaults to false.
#[serde(default, skip_serializing_if = "crate::serde::is_default")]
pub currently_active: bool,
}
impl PresenceUpdate {
/// Creates a new `PresenceUpdate` with the given `user_id`, `presence` and
/// `last_activity`.
pub fn new(user_id: OwnedUserId, presence: PresenceState, last_activity: u64) -> Self {
Self {
user_id,
presence,
last_active_ago: last_activity,
status_msg: None,
currently_active: false,
}
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/identifiers.rs | crates/core/src/identifiers.rs | //! Types for [Matrix](https://matrix.org/) identifiers for devices, events, keys, rooms, servers,
//! users and URIs.
// FIXME: Remove once lint doesn't trigger on std::convert::TryFrom in identifiers/macros.rs anymore
#![allow(unused_qualifications)]
pub use palpo_identifiers_validation::KeyName;
#[doc(inline)]
pub use palpo_identifiers_validation::error::{
Error as IdParseError, MatrixIdError, MatrixToError, MatrixUriError, MxcUriError,
VoipVersionIdError,
};
use serde::de::{self, Deserializer, Unexpected};
#[doc(inline)]
pub use self::{
base64_public_key::{Base64PublicKey, OwnedBase64PublicKey},
base64_public_key_or_device_id::{Base64PublicKeyOrDeviceId, OwnedBase64PublicKeyOrDeviceId},
client_secret::{ClientSecret, OwnedClientSecret},
crypto_algorithms::{
DeviceKeyAlgorithm, EventEncryptionAlgorithm, KeyDerivationAlgorithm, OneTimeKeyAlgorithm,
SigningKeyAlgorithm,
},
device_id::{DeviceId, OwnedDeviceId},
event_id::{EventId, OwnedEventId},
key_id::{
AnyKeyName, DeviceKeyId, DeviceSigningKeyId, KeyId, OwnedDeviceKeyId,
OwnedDeviceSigningKeyId, OwnedKeyId, OwnedServerSigningKeyId, OwnedSigningKeyId,
ServerSigningKeyId, SigningKeyId,
},
matrix_uri::{MatrixToUri, MatrixUri},
mxc_uri::{Mxc, MxcUri, OwnedMxcUri},
one_time_key_name::OneTimeKeyName,
room_alias_id::{OwnedRoomAliasId, RoomAliasId},
room_id::{OwnedRoomId, RoomId},
room_or_alias_id::{OwnedRoomOrAliasId, RoomOrAliasId},
room_version_id::RoomVersionId,
server_name::{OwnedServerName, ServerName},
server_signing_key_version::{OwnedServerSigningKeyVersion, ServerSigningKeyVersion},
session_id::{OwnedSessionId, SessionId},
signatures::{
CrossSigningOrDeviceSignatures, DeviceSignatures, EntitySignatures, ServerSignatures,
Signatures,
},
space_child_order::{OwnedSpaceChildOrder, SpaceChildOrder},
transaction_id::{OwnedTransactionId, TransactionId},
user_id::{OwnedUserId, UserId},
voip_id::{OwnedVoipId, VoipId},
voip_version_id::VoipVersionId,
};
pub mod matrix_uri;
pub mod user_id;
mod base64_public_key;
mod base64_public_key_or_device_id;
mod client_secret;
mod crypto_algorithms;
mod device_id;
mod event_id;
mod key_id;
mod mxc_uri;
mod one_time_key_name;
mod room_alias_id;
mod room_id;
mod room_or_alias_id;
mod room_version_id;
mod server_name;
mod server_signing_key_version;
mod session_id;
mod signatures;
mod space_child_order;
mod transaction_id;
mod voip_id;
mod voip_version_id;
/// Generates a random identifier localpart.
fn generate_localpart(length: usize) -> Box<str> {
use rand::Rng as _;
rand::rng()
.sample_iter(&rand::distr::Alphanumeric)
.map(char::from)
.take(length)
.collect::<String>()
.into_boxed_str()
}
/// Deserializes any type of id using the provided `TryFrom` implementation.
///
/// This is a helper function to reduce the boilerplate of the `Deserialize`
/// implementations.
fn deserialize_id<'de, D, T>(deserializer: D, expected_str: &str) -> Result<T, D::Error>
where
D: Deserializer<'de>,
T: for<'a> TryFrom<&'a str>,
{
crate::serde::deserialize_cow_str(deserializer).and_then(|v| {
T::try_from(&v).map_err(|_| de::Error::invalid_value(Unexpected::Str(&v), &expected_str))
})
}
/// Shorthand for `<&DeviceId>::from`.
#[macro_export]
macro_rules! device_id {
($s:expr) => {
<&$crate::DeviceId as ::std::convert::From<_>>::from($s)
};
}
/// Shorthand for `OwnedDeviceId::from`.
#[macro_export]
macro_rules! owned_device_id {
($s:expr) => {
<$crate::OwnedDeviceId as ::std::convert::From<_>>::from($s)
};
}
#[doc(hidden)]
pub mod __private_macros {
pub use crate::macros::{
event_id, mxc_uri, room_alias_id, room_id, room_version_id, server_name,
server_signing_key_version, user_id,
};
}
/// Compile-time checked [`DeviceKeyId`] construction.
#[macro_export]
macro_rules! device_key_id {
($s:literal) => {
$crate::__private_macros::device_key_id!($crate, $s)
};
}
/// Compile-time checked [`OwnedDeviceKeyId`] construction.
#[macro_export]
macro_rules! owned_device_key_id {
($s:literal) => {
$crate::device_key_id!($s).to_owned()
};
}
/// Compile-time checked [`EventId`] construction.
#[macro_export]
macro_rules! event_id {
($s:literal) => {
$crate::__private_macros::event_id!($crate, $s)
};
}
/// Compile-time checked [`OwnedEventId`] construction.
#[macro_export]
macro_rules! owned_event_id {
($s:literal) => {
$crate::event_id!($s).to_owned()
};
}
/// Compile-time checked [`RoomAliasId`] construction.
#[macro_export]
macro_rules! room_alias_id {
($s:literal) => {
$crate::__private_macros::room_alias_id!($crate, $s)
};
}
/// Compile-time checked [`OwnedRoomAliasId`] construction.
#[macro_export]
macro_rules! owned_room_alias_id {
($s:literal) => {
$crate::room_alias_id!($s).to_owned()
};
}
/// Compile-time checked [`RoomId`] construction.
#[macro_export]
macro_rules! room_id {
($s:literal) => {
$crate::__private_macros::room_id!($crate, $s)
};
}
/// Compile-time checked [`OwnedRoomId`] construction.
#[macro_export]
macro_rules! owned_room_id {
($s:literal) => {
$crate::room_id!($s).to_owned()
};
}
/// Compile-time checked [`RoomVersionId`] construction.
#[macro_export]
macro_rules! room_version_id {
($s:literal) => {
$crate::__private_macros::room_version_id!($crate, $s)
};
}
/// Compile-time checked [`ServerSigningKeyVersion`] construction.
#[macro_export]
macro_rules! server_signing_key_version {
($s:literal) => {
$crate::__private_macros::server_signing_key_version!($crate, $s)
};
}
/// Compile-time checked [`OwnedServerSigningKeyId`] construction.
#[macro_export]
macro_rules! owned_server_signing_key_id {
($s:literal) => {
$crate::server_signing_key_id!($s).to_owned()
};
}
/// Compile-time checked [`ServerName`] construction.
#[macro_export]
macro_rules! server_name {
($s:literal) => {
$crate::__private_macros::server_name!($crate, $s)
};
}
/// Compile-time checked [`OwnedServerName`] construction.
#[macro_export]
macro_rules! owned_server_name {
($s:literal) => {
$crate::server_name!($s).to_owned()
};
}
/// Compile-time checked [`SessionId`] construction.
#[macro_export]
macro_rules! session_id {
($s:literal) => {{
const SESSION_ID: &$crate::SessionId = match $crate::SessionId::_priv_const_new($s) {
Ok(id) => id,
Err(e) => panic!("{}", e),
};
SESSION_ID
}};
}
/// Compile-time checked [`OwnedSessionId`] construction.
#[macro_export]
macro_rules! owned_session_id {
($s:literal) => {
$crate::session_id!($s).to_owned()
};
}
/// Compile-time checked [`MxcUri`] construction.
#[macro_export]
macro_rules! mxc_uri {
($s:literal) => {
$crate::__private_macros::mxc_uri!($crate, $s)
};
}
/// Compile-time checked [`OwnedMxcUri`] construction.
#[macro_export]
macro_rules! owned_mxc_uri {
($s:literal) => {
$crate::mxc_uri!($s).to_owned()
};
}
/// Compile-time checked [`UserId`] construction.
#[macro_export]
macro_rules! user_id {
($s:literal) => {
$crate::__private_macros::user_id!($crate, $s)
};
}
/// Compile-time checked [`OwnedUserId`] construction.
#[macro_export]
macro_rules! owned_user_id {
($s:literal) => {
$crate::user_id!($s).to_owned()
};
}
/// Compile-time checked [`Base64PublicKey`] construction.
#[macro_export]
macro_rules! base64_public_key {
($s:literal) => {
$crate::__private_macros::base64_public_key!($crate, $s)
};
}
/// Compile-time checked [`OwnedBase64PublicKey`] construction.
#[macro_export]
macro_rules! owned_base64_public_key {
($s:literal) => {
$crate::base64_public_key!($s).to_owned()
};
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/push.rs | crates/core/src/push.rs | //! Common types for the [push notifications module][push].
//!
//! [push]: https://spec.matrix.org/latest/client-server-api/#push-notifications
//!
//! ## Understanding the types of this module
//!
//! Push rules are grouped in `RuleSet`s, and are grouped in five kinds (for
//! more details about the different kind of rules, see the `Ruleset`
//! documentation, or the specification). These five kinds are, by order of
//! priority:
//!
//! - override rules
//! - content rules
//! - room rules
//! - sender rules
//! - underride rules
mod action;
mod condition;
mod conditional_push_rule;
mod error;
mod iter;
mod patterned_push_rule;
mod predefined;
pub mod push_gateway;
mod push_rule;
mod pusher;
mod ruleset;
mod simple_push_rule;
use std::hash::Hash;
pub use conditional_push_rule::*;
pub use error::*;
use indexmap::{Equivalent, IndexSet};
pub use patterned_push_rule::*;
pub use push_rule::{RuleKind, *};
pub use pusher::*;
pub use ruleset::Ruleset;
use salvo::oapi::ToSchema;
pub use simple_push_rule::*;
#[cfg(feature = "unstable-msc3932")]
pub use self::condition::RoomVersionFeature;
pub use self::{
action::{Action, Tweak},
condition::{
_CustomPushCondition, ComparisonOperator, FlattenedJson, FlattenedJsonValue, PushCondition,
PushConditionPowerLevelsCtx, PushConditionRoomCtx, RoomMemberCountIs, ScalarJsonValue,
},
iter::{AnyPushRule, AnyPushRuleRef, RulesetIntoIter, RulesetIter},
predefined::{
PredefinedContentRuleId, PredefinedOverrideRuleId, PredefinedRuleId,
PredefinedUnderrideRuleId,
},
};
use crate::{PrivOwnedStr, serde::StringEnum};
/// A special format that the homeserver should use when sending notifications
/// to a Push Gateway. Currently, only `event_id_only` is supported, see the
/// [Push Gateway API][spec].
///
/// [spec]: https://spec.matrix.org/latest/push-gateway-api/#homeserver-behaviour
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/doc/string_enum.md"))]
#[derive(ToSchema, Clone, StringEnum)]
#[palpo_enum(rename_all = "snake_case")]
#[non_exhaustive]
pub enum PushFormat {
/// Require the homeserver to only send a reduced set of fields in the push.
EventIdOnly,
#[doc(hidden)]
#[salvo(schema(skip))]
_Custom(PrivOwnedStr),
}
/// Insert the rule in the given indexset and move it to the given position.
pub fn insert_and_move_rule<T>(
set: &mut IndexSet<T>,
rule: T,
default_position: usize,
after: Option<&str>,
before: Option<&str>,
) -> Result<(), InsertPushRuleError>
where
T: Hash + Eq,
str: Equivalent<T>,
{
let (from, replaced) = set.replace_full(rule);
let mut to = default_position;
if let Some(rule_id) = after {
let idx = set
.get_index_of(rule_id)
.ok_or(InsertPushRuleError::UnknownRuleId)?;
to = idx + 1;
}
if let Some(rule_id) = before {
let idx = set
.get_index_of(rule_id)
.ok_or(InsertPushRuleError::UnknownRuleId)?;
if idx < to {
return Err(InsertPushRuleError::BeforeHigherThanAfter);
}
to = idx;
}
// Only move the item if it's new or if it was positioned.
if replaced.is_none() || after.is_some() || before.is_some() {
set.move_index(from, to);
}
Ok(())
}
// #[cfg(test)]
// mod tests {
// use std::collections::BTreeMap;
// use assert_matches2::assert_matches;
// use serde_json::{from_value as from_json_value, json, to_value as
// to_json_value};
// use super::{
// action::{Action, Tweak},
// condition::{PushCondition, PushConditionPowerLevelsCtx,
// PushConditionRoomCtx, RoomMemberCountIs}, AnyPushRule,
// ConditionalPushRule, PatternedPushRule, Ruleset, SimplePushRule, };
// use crate::{
// owned_room_id, owned_user_id,
// power_levels::NotificationPowerLevels,
// push::{PredefinedContentRuleId, PredefinedOverrideRuleId},
// serde::RawJson,
// user_id, JsonValue, RawJsonValue,
// };
// fn example_ruleset() -> Ruleset {
// let mut set = Ruleset::new();
// set.override_.insert(ConditionalPushRule {
// conditions: vec![PushCondition::EventMatch {
// key: "type".into(),
// pattern: "m.call.invite".into(),
// }],
// actions: vec![Action::Notify,
// Action::SetTweak(Tweak::Highlight(true))], rule_id:
// ".m.rule.call".into(), enabled: true,
// default: true,
// });
// set
// }
// fn power_levels() -> PushConditionPowerLevelsCtx {
// PushConditionPowerLevelsCtx {
// users: BTreeMap::new(),
// users_default: 50,
// notifications: NotificationPowerLevels { room: 50 },
// }
// }
// #[test]
// fn iter() {
// let mut set = example_ruleset();
// let added = set.override_.insert(ConditionalPushRule {
// conditions: vec![PushCondition::EventMatch {
// key: "room_id".into(),
// pattern: "!roomid:matrix.org".into(),
// }],
// actions: vec![],
// rule_id: "!roomid:matrix.org".into(),
// enabled: true,
// default: false,
// });
// assert!(added);
// let added = set.override_.insert(ConditionalPushRule {
// conditions: vec![],
// actions: vec![],
// rule_id: ".m.rule.suppress_notices".into(),
// enabled: false,
// default: true,
// });
// assert!(added);
// let mut iter = set.into_iter();
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Override(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, ".m.rule.call");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Override(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, "!roomid:matrix.org");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Override(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, ".m.rule.suppress_notices");
// assert_matches!(iter.next(), None);
// }
// #[test]
// fn serialize_conditional_push_rule() {
// let rule = ConditionalPushRule {
// actions: vec![Action::Notify,
// Action::SetTweak(Tweak::Highlight(true))], default: true,
// enabled: true,
// rule_id: ".m.rule.call".into(),
// conditions: vec![
// PushCondition::EventMatch {
// key: "type".into(),
// pattern: "m.call.invite".into(),
// },
// PushCondition::ContainsDisplayName,
// PushCondition::RoomMemberCount {
// is: RoomMemberCountIs::gt(2),
// },
// PushCondition::SenderNotificationPermission { key:
// "room".into() }, ],
// };
// let rule_value: JsonValue = to_json_value(rule).unwrap();
// assert_eq!(
// rule_value,
// json!({
// "conditions": [
// {
// "kind": "event_match",
// "key": "type",
// "pattern": "m.call.invite"
// },
// {
// "kind": "contains_display_name"
// },
// {
// "kind": "room_member_count",
// "is": ">2"
// },
// {
// "kind": "sender_notification_permission",
// "key": "room"
// }
// ],
// "actions": [
// "notify",
// {
// "set_tweak": "highlight"
// }
// ],
// "rule_id": ".m.rule.call",
// "default": true,
// "enabled": true
// })
// );
// }
// #[test]
// fn serialize_simple_push_rule() {
// let rule = SimplePushRule {
// actions: vec![Action::Notify],
// default: false,
// enabled: false,
// rule_id: owned_room_id!("!roomid:server.name"),
// };
// let rule_value: JsonValue = to_json_value(rule).unwrap();
// assert_eq!(
// rule_value,
// json!({
// "actions": [
// "notify"
// ],
// "rule_id": "!roomid:server.name",
// "default": false,
// "enabled": false
// })
// );
// }
// #[test]
// fn serialize_patterned_push_rule() {
// let rule = PatternedPushRule {
// actions: vec![
// Action::Notify,
// Action::SetTweak(Tweak::Sound("default".into())),
// Action::SetTweak(Tweak::Custom {
// name: "dance".into(),
// value: RawJsonValue::from_string("true".into()).unwrap(),
// }),
// ],
// default: true,
// enabled: true,
// pattern: "user_id".into(),
// rule_id: ".m.rule.contains_user_name".into(),
// };
// let rule_value: JsonValue = to_json_value(rule).unwrap();
// assert_eq!(
// rule_value,
// json!({
// "actions": [
// "notify",
// {
// "set_tweak": "sound",
// "value": "default"
// },
// {
// "set_tweak": "dance",
// "value": true
// }
// ],
// "pattern": "user_id",
// "rule_id": ".m.rule.contains_user_name",
// "default": true,
// "enabled": true
// })
// );
// }
// #[test]
// fn serialize_ruleset() {
// let mut set = example_ruleset();
// set.override_.insert(ConditionalPushRule {
// conditions: vec![
// PushCondition::RoomMemberCount {
// is: RoomMemberCountIs::from(2),
// },
// PushCondition::EventMatch {
// key: "type".into(),
// pattern: "m.room.message".into(),
// },
// ],
// actions: vec![
// Action::Notify,
// Action::SetTweak(Tweak::Sound("default".into())),
// Action::SetTweak(Tweak::Highlight(false)),
// ],
// rule_id: ".m.rule.room_one_to_one".into(),
// enabled: true,
// default: true,
// });
// set.content.insert(PatternedPushRule {
// actions: vec![
// Action::Notify,
// Action::SetTweak(Tweak::Sound("default".into())),
// Action::SetTweak(Tweak::Highlight(true)),
// ],
// rule_id: ".m.rule.contains_user_name".into(),
// pattern: "user_id".into(),
// enabled: true,
// default: true,
// });
// let set_value: JsonValue = to_json_value(set).unwrap();
// assert_eq!(
// set_value,
// json!({
// "override": [
// {
// "actions": [
// "notify",
// {
// "set_tweak": "highlight",
// },
// ],
// "conditions": [
// {
// "kind": "event_match",
// "key": "type",
// "pattern": "m.call.invite"
// },
// ],
// "rule_id": ".m.rule.call",
// "default": true,
// "enabled": true,
// },
// {
// "conditions": [
// {
// "kind": "room_member_count",
// "is": "2"
// },
// {
// "kind": "event_match",
// "key": "type",
// "pattern": "m.room.message"
// }
// ],
// "actions": [
// "notify",
// {
// "set_tweak": "sound",
// "value": "default"
// },
// {
// "set_tweak": "highlight",
// "value": false
// }
// ],
// "rule_id": ".m.rule.room_one_to_one",
// "default": true,
// "enabled": true
// },
// ],
// "content": [
// {
// "actions": [
// "notify",
// {
// "set_tweak": "sound",
// "value": "default"
// },
// {
// "set_tweak": "highlight"
// }
// ],
// "pattern": "user_id",
// "rule_id": ".m.rule.contains_user_name",
// "default": true,
// "enabled": true
// }
// ],
// })
// );
// }
// #[test]
// fn deserialize_patterned_push_rule() {
// let rule = from_json_value::<PatternedPushRule>(json!({
// "actions": [
// "notify",
// {
// "set_tweak": "sound",
// "value": "default"
// },
// {
// "set_tweak": "highlight",
// "value": true
// }
// ],
// "pattern": "user_id",
// "rule_id": ".m.rule.contains_user_name",
// "default": true,
// "enabled": true
// }))
// .unwrap();
// assert!(rule.default);
// assert!(rule.enabled);
// assert_eq!(rule.pattern, "user_id");
// assert_eq!(rule.rule_id, ".m.rule.contains_user_name");
// let mut iter = rule.actions.iter();
// assert_matches!(iter.next(), Some(Action::Notify));
// assert_matches!(iter.next(),
// Some(Action::SetTweak(Tweak::Sound(sound)))); assert_eq!(sound,
// "default"); assert_matches!(iter.next(),
// Some(Action::SetTweak(Tweak::Highlight(true)))); assert_matches!
// (iter.next(), None); }
// #[test]
// fn deserialize_ruleset() {
// let set: Ruleset = from_json_value(json!({
// "override": [
// {
// "actions": [],
// "conditions": [],
// "rule_id": "!roomid:server.name",
// "default": false,
// "enabled": true
// },
// {
// "actions": [],
// "conditions": [],
// "rule_id": ".m.rule.call",
// "default": true,
// "enabled": true
// },
// ],
// "underride": [
// {
// "actions": [],
// "conditions": [],
// "rule_id": ".m.rule.room_one_to_one",
// "default": true,
// "enabled": true
// },
// ],
// "room": [
// {
// "actions": [],
// "rule_id": "!roomid:server.name",
// "default": false,
// "enabled": false
// }
// ],
// "sender": [],
// "content": [
// {
// "actions": [],
// "pattern": "user_id",
// "rule_id": ".m.rule.contains_user_name",
// "default": true,
// "enabled": true
// },
// {
// "actions": [],
// "pattern": "palpo",
// "rule_id": "palpo",
// "default": false,
// "enabled": true
// }
// ]
// }))
// .unwrap();
// let mut iter = set.into_iter();
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Override(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, "!roomid:server.name");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Override(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, ".m.rule.call");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Content(PatternedPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, ".m.rule.contains_user_name");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Content(PatternedPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, "palpo");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(rule_opt.unwrap(), AnyPushRule::Room(SimplePushRule {
// rule_id, .. })); assert_eq!(rule_id, "!roomid:server.name");
// let rule_opt = iter.next();
// assert!(rule_opt.is_some());
// assert_matches!(
// rule_opt.unwrap(),
// AnyPushRule::Underride(ConditionalPushRule { rule_id, .. })
// );
// assert_eq!(rule_id, ".m.rule.room_one_to_one");
// assert_matches!(iter.next(), None);
// }
// #[test]
// fn default_ruleset_applies() {
// let set =
// Ruleset::server_default(user_id!("@jolly_jumper:server.name"));
// let context_one_to_one = &PushConditionRoomCtx {
// room_id: owned_room_id!("!dm:server.name"),
// member_count: u2,
// user_id: owned_user_id!("@jj:server.name"),
// user_display_name: "Jolly Jumper".into(),
// power_levels: Some(power_levels()),
// supported_features: Default::default(),
// };
// let context_public_room = &PushConditionRoomCtx {
// room_id: owned_room_id!("!far_west:server.name"),
// member_count: u100,
// user_id: owned_user_id!("@jj:server.name"),
// user_display_name: "Jolly Jumper".into(),
// power_levels: Some(power_levels()),
// supported_features: Default::default(),
// };
// let message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "type": "m.room.message"
// }"#,
// )
// .unwrap();
// assert_matches!(
// set.get_actions(&message, context_one_to_one),
// [
// Action::Notify,
// Action::SetTweak(Tweak::Sound(_)),
// Action::SetTweak(Tweak::Highlight(false))
// ]
// );
// assert_matches!(
// set.get_actions(&message, context_public_room),
// [Action::Notify, Action::SetTweak(Tweak::Highlight(false))]
// );
// let user_name = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "type": "m.room.message",
// "content": {
// "body": "Hi jolly_jumper!"
// }
// }"#,
// )
// .unwrap();
// assert_matches!(
// set.get_actions(&user_name, context_one_to_one),
// [
// Action::Notify,
// Action::SetTweak(Tweak::Sound(_)),
// Action::SetTweak(Tweak::Highlight(true)),
// ]
// );
// assert_matches!(
// set.get_actions(&user_name, context_public_room),
// [
// Action::Notify,
// Action::SetTweak(Tweak::Sound(_)),
// Action::SetTweak(Tweak::Highlight(true)),
// ]
// );
// let notice = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "type": "m.room.message",
// "content": {
// "msgtype": "m.notice"
// }
// }"#,
// )
// .unwrap();
// assert_matches!(set.get_actions(¬ice, context_one_to_one), []);
// let at_room = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "type": "m.room.message",
// "sender": "@rantanplan:server.name",
// "content": {
// "body": "@room Attention please!",
// "msgtype": "m.text"
// }
// }"#,
// )
// .unwrap();
// assert_matches!(
// set.get_actions(&at_room, context_public_room),
// [Action::Notify, Action::SetTweak(Tweak::Highlight(true)),]
// );
// let empty =
// serde_json::from_str::<RawJson<JsonValue>>(r#"{}"#).unwrap();
// assert_matches!(set.get_actions(&empty, context_one_to_one), []);
// }
// #[test]
// fn custom_ruleset_applies() {
// let context_one_to_one = &PushConditionRoomCtx {
// room_id: owned_room_id!("!dm:server.name"),
// member_count: u2,
// user_id: owned_user_id!("@jj:server.name"),
// user_display_name: "Jolly Jumper".into(),
// power_levels: Some(power_levels()),
// supported_features: Default::default(),
// };
// let message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "sender": "@rantanplan:server.name",
// "type": "m.room.message",
// "content": {
// "msgtype": "m.text",
// "body": "Great joke!"
// }
// }"#,
// )
// .unwrap();
// let mut set = Ruleset::new();
// let disabled = ConditionalPushRule {
// actions: vec![Action::Notify],
// default: false,
// enabled: false,
// rule_id: "disabled".into(),
// conditions: vec![PushCondition::RoomMemberCount {
// is: RoomMemberCountIs::from(2),
// }],
// };
// set.underride.insert(disabled);
// let test_set = set.clone();
// assert_matches!(test_set.get_actions(&message, context_one_to_one),
// []);
// let no_conditions = ConditionalPushRule {
// actions: vec![Action::SetTweak(Tweak::Highlight(true))],
// default: false,
// enabled: true,
// rule_id: "no.conditions".into(),
// conditions: vec![],
// };
// set.underride.insert(no_conditions);
// let test_set = set.clone();
// assert_matches!(
// test_set.get_actions(&message, context_one_to_one),
// [Action::SetTweak(Tweak::Highlight(true))]
// );
// let sender = SimplePushRule {
// actions: vec![Action::Notify],
// default: false,
// enabled: true,
// rule_id: owned_user_id!("@rantanplan:server.name"),
// };
// set.sender.insert(sender);
// let test_set = set.clone();
// assert_matches!(test_set.get_actions(&message, context_one_to_one),
// [Action::Notify]);
// let room = SimplePushRule {
// actions: vec![Action::SetTweak(Tweak::Highlight(true))],
// default: false,
// enabled: true,
// rule_id: owned_room_id!("!dm:server.name"),
// };
// set.room.insert(room);
// let test_set = set.clone();
// assert_matches!(
// test_set.get_actions(&message, context_one_to_one),
// [Action::SetTweak(Tweak::Highlight(true))]
// );
// let content = PatternedPushRule {
// actions: vec![Action::SetTweak(Tweak::Sound("content".into()))],
// default: false,
// enabled: true,
// rule_id: "content".into(),
// pattern: "joke".into(),
// };
// set.content.insert(content);
// let test_set = set.clone();
// assert_matches!(
// test_set.get_actions(&message, context_one_to_one),
// [Action::SetTweak(Tweak::Sound(sound))]
// );
// assert_eq!(sound, "content");
// let three_conditions = ConditionalPushRule {
// actions: vec![Action::SetTweak(Tweak::Sound("three".into()))],
// default: false,
// enabled: true,
// rule_id: "three.conditions".into(),
// conditions: vec![
// PushCondition::RoomMemberCount {
// is: RoomMemberCountIs::from(2),
// },
// PushCondition::ContainsDisplayName,
// PushCondition::EventMatch {
// key: "room_id".into(),
// pattern: "!dm:server.name".into(),
// },
// ],
// };
// set.override_.insert(three_conditions);
// assert_matches!(
// set.get_actions(&message, context_one_to_one),
// [Action::SetTweak(Tweak::Sound(sound))]
// );
// assert_eq!(sound, "content");
// let new_message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "sender": "@rantanplan:server.name",
// "type": "m.room.message",
// "content": {
// "msgtype": "m.text",
// "body": "Tell me another one, Jolly Jumper!"
// }
// }"#,
// )
// .unwrap();
// assert_matches!(
// set.get_actions(&new_message, context_one_to_one),
// [Action::SetTweak(Tweak::Sound(sound))]
// );
// assert_eq!(sound, "three");
// }
// #[test]
// fn intentional_mentions_apply() {
// let set =
// Ruleset::server_default(user_id!("@jolly_jumper:server.name"));
// let context = &PushConditionRoomCtx {
// room_id: owned_room_id!("!far_west:server.name"),
// member_count: u100,
// user_id: owned_user_id!("@jj:server.name"),
// user_display_name: "Jolly Jumper".into(),
// power_levels: Some(power_levels()),
// supported_features: Default::default(),
// };
// let message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "content": {
// "body": "Hey jolly_jumper!",
// "m.mentions": {
// "user_ids": ["@jolly_jumper:server.name"]
// }
// },
// "sender": "@admin:server.name",
// "type": "m.room.message"
// }"#,
// )
// .unwrap();
// assert_eq!(
// set.get_match(&message, context).unwrap().rule_id(),
// PredefinedOverrideRuleId::IsUserMention.as_ref()
// );
// let message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "content": {
// "body": "Listen room!",
// "m.mentions": {
// "room": true
// }
// },
// "sender": "@admin:server.name",
// "type": "m.room.message"
// }"#,
// )
// .unwrap();
// assert_eq!(
// set.get_match(&message, context).unwrap().rule_id(),
// PredefinedOverrideRuleId::IsRoomMention.as_ref()
// );
// }
// #[test]
// fn invite_for_me_applies() {
// let set =
// Ruleset::server_default(user_id!("@jolly_jumper:server.name"));
// let context = &PushConditionRoomCtx {
// room_id: owned_room_id!("!far_west:server.name"),
// member_count: u100,
// user_id: owned_user_id!("@jj:server.name"),
// user_display_name: "Jolly Jumper".into(),
// // `invite_state` usually doesn't include the power levels.
// power_levels: None,
// supported_features: Default::default(),
// };
// let message = serde_json::from_str::<RawJson<JsonValue>>(
// r#"{
// "content": {
// "membership": "invite"
// },
// "state_key": "@jolly_jumper:server.name",
// "sender": "@admin:server.name",
// "type": "m.room.member"
// }"#,
// )
// .unwrap();
// assert_eq!(
// set.get_match(&message, context).unwrap().rule_id(),
// PredefinedOverrideRuleId::InviteForMe.as_ref()
// );
// }
// }
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/signatures.rs | crates/core/src/signatures.rs | //! Digital signatures according to the [Matrix](https://matrix.org/) specification.
//!
//! Digital signatures are used by Matrix homeservers to verify the authenticity
//! of events in the Matrix system, as well as requests between homeservers for
//! federation. Each homeserver has one or more signing key pairs (sometimes
//! referred to as "verify keys") which it uses to sign all events and
//! federation requests. Matrix clients and other Matrix homeservers can ask the
//! homeserver for its public keys and use those keys to verify the signed data.
//!
//! Each signing key pair has an identifier, which consists of the name of the
//! digital signature algorithm it uses and a "version" string, separated by a
//! colon. The version is an arbitrary identifier used to distinguish key pairs
//! using the same algorithm from the same homeserver.
//!
//! Arbitrary JSON objects can be signed as well as JSON representations of
//! Matrix events. In both cases, the signatures are stored within the JSON
//! object itself under a `signatures` key. Events are also required to contain
//! hashes of their content, which are similarly stored within the hashed JSON
//! object under a `hashes` key.
//!
//! In JSON representations, both signatures and hashes appear as base64-encoded
//! strings, using the standard character set, without padding.
//!
//! # Signing and hashing
//!
//! To sign an arbitrary JSON object, use the `sign_json` function. See the
//! documentation of this function for more details and a full example of use.
//!
//! Signing an event uses a more complicated process than signing arbitrary
//! JSON, because events can be redacted, and signatures need to remain valid
//! even if data is removed from an event later. HomeServers are required to
//! generate hashes of event contents as well as signing events before
//! exchanging them with other homeservers. Although the algorithm for hashing
//! and signing an event is more complicated than for signing arbitrary JSON,
//! the interface to a user of palpo-signatures is the same. To hash and sign an
//! event, use the `hash_and_sign_event` function. See the documentation of this
//! function for more details and a full example of use.
//!
//! # Verifying signatures and hashes
//!
//! When a homeserver receives data from another homeserver via the federation,
//! it's necessary to verify the authenticity and integrity of the data by
//! verifying their signatures.
//!
//! To verify a signature on arbitrary JSON, use the `verify_json` function. To
//! verify the signatures and hashes on an event, use the `verify_event`
//! function. See the documentation for these respective functions for more
//! details and full examples of use.
pub use self::{
error::{Error, JsonError, ParseError, VerificationError},
functions::{
canonical_json, content_hash, hash_and_sign_event, reference_hash, required_keys,
sign_json, verify_canonical_json_bytes, verify_event, verify_json,
},
keys::{Ed25519KeyPair, KeyPair, PublicKeyMap, PublicKeySet},
verification::Verified,
};
use crate::serde::{Base64, base64::Standard};
use crate::{AnyKeyName, IdParseError, OwnedSigningKeyId, SigningKeyAlgorithm, SigningKeyId};
mod error;
mod functions;
mod keys;
mod verification;
/// A digital signature.
#[derive(Clone, Debug, Eq, Hash, PartialEq)]
pub struct Signature {
/// The ID of the key used to generate this signature.
pub(crate) key_id: OwnedSigningKeyId<AnyKeyName>,
/// The signature data.
pub(crate) signature: Vec<u8>,
}
impl Signature {
/// Creates a signature from raw bytes.
///
/// This constructor will ensure that the key ID has the correct `algorithm:key_name` format.
///
/// # Parameters
///
/// * `id`: A key identifier, e.g. `ed25519:1`.
/// * `bytes`: The digital signature, as a series of bytes.
///
/// # Errors
///
/// Returns an error if:
///
/// * The key ID is malformed.
pub fn new(id: &str, bytes: &[u8]) -> Result<Self, IdParseError> {
let key_id = SigningKeyId::<AnyKeyName>::parse(id)?;
Ok(Self {
key_id,
signature: bytes.to_vec(),
})
}
/// The algorithm used to generate the signature.
pub fn algorithm(&self) -> SigningKeyAlgorithm {
self.key_id.algorithm()
}
/// The raw bytes of the signature.
pub fn as_bytes(&self) -> &[u8] {
self.signature.as_slice()
}
/// A base64 encoding of the signature.
///
/// Uses the standard character set with no padding.
pub fn base64(&self) -> String {
Base64::<Standard, _>::new(self.signature.as_slice()).encode()
}
/// The key identifier, a string containing the signature algorithm and the key "version"
/// separated by a colon, e.g. `ed25519:1`.
pub fn id(&self) -> String {
self.key_id.to_string()
}
/// The "version" of the key used for this signature.
///
/// Versions are used as an identifier to distinguish signatures generated from different keys
/// but using the same algorithm on the same homeserver.
pub fn version(&self) -> &str {
self.key_id.key_name().as_ref()
}
/// Split this `Signature` into its key identifier and bytes.
pub fn into_parts(self) -> (OwnedSigningKeyId<AnyKeyName>, Vec<u8>) {
(self.key_id, self.signature)
}
}
#[cfg(test)]
mod tests {
use super::Signature;
use crate::SigningKeyAlgorithm;
#[test]
fn valid_key_id() {
let signature = Signature::new("ed25519:abcdef", &[]).unwrap();
assert_eq!(signature.algorithm(), SigningKeyAlgorithm::Ed25519);
assert_eq!(signature.version(), "abcdef");
}
#[test]
fn unknown_key_id_algorithm() {
let signature = Signature::new("foobar:abcdef", &[]).unwrap();
assert_eq!(signature.algorithm().as_str(), "foobar");
assert_eq!(signature.version(), "abcdef");
}
#[test]
fn invalid_key_id_format() {
Signature::new("ed25519", &[]).unwrap_err();
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/third_party.rs | crates/core/src/third_party.rs | //! Common types for the [third party networks module][thirdparty].
//!
//! [thirdparty]: https://spec.matrix.org/latest/client-server-api/#third-party-networks
use std::collections::BTreeMap;
use salvo::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{OwnedRoomAliasId, OwnedUserId, PrivOwnedStr, UnixMillis, serde::StringEnum};
/// Metadata about a third party protocol.
///
/// To create an instance of this type.
#[derive(ToSchema, Deserialize, Serialize, Clone, Default, Debug)]
pub struct Protocol {
/// Fields which may be used to identify a third party user.
pub user_fields: Vec<String>,
/// Fields which may be used to identify a third party location.
pub location_fields: Vec<String>,
/// A content URI representing an icon for the third party protocol.
#[serde(default)]
pub icon: String,
/// The type definitions for the fields defined in `user_fields` and
/// `location_fields`.
pub field_types: BTreeMap<String, FieldType>,
/// A list of objects representing independent instances of configuration.
pub instances: Vec<ProtocolInstance>,
}
/// Metadata about an instance of a third party protocol.
///
/// To create an instance of this type.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct ProtocolInstance {
/// A human-readable description for the protocol, such as the name.
pub desc: String,
/// An optional content URI representing the protocol.
#[serde(skip_serializing_if = "Option::is_none")]
pub icon: Option<String>,
/// Preset values for `fields` the client may use to search by.
pub fields: BTreeMap<String, String>,
/// A unique identifier across all instances.
pub network_id: String,
/// A unique identifier across all instances.
///
/// See [matrix-spec#833](https://github.com/matrix-org/matrix-spec/issues/833).
#[cfg(feature = "unstable-unspecified")]
pub instance_id: String,
}
/// A type definition for a field used to identify third party users or
/// locations.
///
/// To create an instance of this type.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct FieldType {
/// A regular expression for validation of a field's value.
pub regexp: String,
/// A placeholder serving as a valid example of the field value.
pub placeholder: String,
}
/// A third party network location.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct Location {
/// An alias for a matrix room.
pub alias: OwnedRoomAliasId,
/// The protocol ID that the third party location is a part of.
pub protocol: String,
/// Information used to identify this third party location.
pub fields: BTreeMap<String, String>,
}
impl Location {
/// Creates a new `Location` with the given alias, protocol and fields.
pub fn new(
alias: OwnedRoomAliasId,
protocol: String,
fields: BTreeMap<String, String>,
) -> Self {
Self {
alias,
protocol,
fields,
}
}
}
/// A third party network user.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct User {
/// A matrix user ID representing a third party user.
pub user_id: OwnedUserId,
/// The protocol ID that the third party user is a part of.
pub protocol: String,
/// Information used to identify this third party user.
pub fields: BTreeMap<String, String>,
}
impl User {
/// Creates a new `User` with the given user_id, protocol and fields.
pub fn new(user_id: OwnedUserId, protocol: String, fields: BTreeMap<String, String>) -> Self {
Self {
user_id,
protocol,
fields,
}
}
}
/// The medium of a third party identifier.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/doc/string_enum.md"))]
#[derive(ToSchema, Clone, StringEnum)]
#[palpo_enum(rename_all = "lowercase")]
#[non_exhaustive]
pub enum Medium {
/// Email address identifier
Email,
/// Phone number identifier
Msisdn,
#[doc(hidden)]
#[salvo(schema(value_type = String))]
_Custom(PrivOwnedStr),
}
/// An identifier external to Matrix.
///
/// To create an instance of this type.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
#[cfg_attr(test, derive(PartialEq))]
pub struct ThirdPartyIdentifier {
/// The third party identifier address.
pub address: String,
/// The medium of third party identifier.
pub medium: Medium,
/// The time when the identifier was validated by the identity server.
pub validated_at: UnixMillis,
/// The time when the homeserver associated the third party identifier with
/// the user.
pub added_at: UnixMillis,
}
// `GET /_matrix/client/*/thirdparty/protocols`
//
// Fetches the overall metadata about protocols supported by the homeserver.
// `/v3/` ([spec])
//
// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartyprotocols
// const METADATA: Metadata = metadata! {
// method: GET,
// rate_limited: false,
// authentication: AccessToken,
// history: {
// 1.0 => "/_matrix/client/r0/thirdparty/protocols",
// 1.1 => "/_matrix/client/v3/thirdparty/protocols",
// }
// };
/// Response type for the `get_protocols` endpoint.
#[derive(ToSchema, Serialize, Default, Debug)]
pub struct ProtocolsResBody(
/// Metadata about protocols supported by the homeserver.
BTreeMap<String, Protocol>,
);
impl ProtocolsResBody {
/// Creates a new `Response` with the given protocols.
pub fn new(protocols: BTreeMap<String, Protocol>) -> Self {
Self(protocols)
}
}
// `GET /_matrix/client/*/thirdparty/protocol/{protocol}`
//
// Fetches the metadata from the homeserver about a particular third party
// protocol. `/v3/` ([spec])
//
// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartyprotocolprotocol
// const METADATA: Metadata = metadata! {
// method: GET,
// rate_limited: false,
// authentication: AccessToken,
// history: {
// 1.0 => "/_matrix/client/r0/thirdparty/protocol/:protocol",
// 1.1 => "/_matrix/client/v3/thirdparty/protocol/:protocol",
// }
// };
// Request type for the `get_protocol` endpoint.
// pub struct ProtocolReqBody {
// /// The name of the protocol.
// #[salvo(parameter(parameter_in = Path))]
// pub protocol: String,
// }
/// Response type for the `get_protocol` endpoint.
#[derive(ToSchema, Serialize, Clone, Default, Debug)]
pub struct ProtocolResBody(
/// Metadata about the protocol.
pub Protocol,
);
impl ProtocolResBody {
/// Creates a new `Response` with the given protocol.
pub fn new(protocol: Protocol) -> Self {
Self(protocol)
}
}
// `GET /_matrix/client/*/thirdparty/location/{protocol}`
//
// Fetches third party locations for a protocol.
// `/v3/` ([spec])
//
// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartylocationprotocol
// const METADATA: Metadata = metadata! {
// method: GET,
// rate_limited: false,
// authentication: AccessToken,
// history: {
// 1.0 => "/_matrix/client/r0/thirdparty/location/:protocol",
// 1.1 => "/_matrix/client/v3/thirdparty/location/:protocol",
// }
// };
// /// Request type for the `get_location_for_protocol` endpoint.
// pub struct LocationForProtocolReqBody {
// /// The protocol used to communicate to the third party network.
// #[salvo(parameter(parameter_in = Path))]
// pub protocol: String,
// /// One or more custom fields to help identify the third party location.
// // The specification is incorrect for this parameter. See [matrix-spec#560](https://github.com/matrix-org/matrix-spec/issues/560).
//
// pub fields: BTreeMap<String, String>,
// }
/// Response type for the `get_location_for_protocol` endpoint.
#[derive(ToSchema, Serialize, Default, Debug)]
pub struct LocationsResBody(
/// List of matched third party locations.
pub Vec<Location>,
);
impl LocationsResBody {
/// Creates a new `Response` with the given locations.
pub fn new(locations: Vec<Location>) -> Self {
Self(locations)
}
}
// `GET /_matrix/client/*/thirdparty/location`
//
// Retrieve an array of third party network locations from a Matrix room alias.
// `/v3/` ([spec])
//
// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartylocation
// const METADATA: Metadata = metadata! {
// method: GET,
// rate_limited: false,
// authentication: AccessToken,
// history: {
// 1.0 => "/_matrix/client/r0/thirdparty/location",
// 1.1 => "/_matrix/client/v3/thirdparty/location",
// }
// };
// /// Request type for the `get_location_for_room_alias` endpoint.
// pub struct LocationReqBody {
// /// The Matrix room alias to look up.
// #[salvo(parameter(parameter_in = Query))]
// pub alias: OwnedRoomAliasId,
// }
// /// Response type for the `get_location_for_room_alias` endpoint.
// #[derive(ToSchema, Serialize, Debug)]
//
// pub struct LocationResBody {
// /// List of matched third party locations.
// pub locations: Vec<Location>,
// }
//
// impl LocationResBody {
// /// Creates a new `Response` with the given locations.
// pub fn new(locations: Vec<Location>) -> Self {
// Self { locations }
// }
// }
// /// `GET /_matrix/client/*/thirdparty/user/{protocol}`
// ///
// /// Fetches third party users for a protocol.
// /// `/v3/` ([spec])
// ///
// /// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartyuserprotocol
// /// const METADATA: Metadata = metadata! {
// /// method: GET,
// /// rate_limited: false,
// /// authentication: AccessToken,
// /// history: {
// /// 1.0 => "/_matrix/client/r0/thirdparty/user/:protocol",
// /// 1.1 => "/_matrix/client/v3/thirdparty/user/:protocol",
// /// }
// /// };
//
// /// Request type for the `get_user_for_protocol` endpoint.
// pub struct UserForProtocolReqBody {
// /// The protocol used to communicate to the third party network.
// #[salvo(parameter(parameter_in = Path))]
// pub protocol: String,
// /// One or more custom fields that are passed to the AS to help identify the user.
// // The specification is incorrect for this parameter. See [matrix-spec#560](https://github.com/matrix-org/matrix-spec/issues/560).
//
// pub fields: BTreeMap<String, String>,
// }
/// `GET /_matrix/client/*/thirdparty/user`
///
/// Retrieve an array of third party users from a Matrix User ID.
/// `/v3/` ([spec])
///
/// [spec]: https://spec.matrix.org/latest/client-server-api/#get_matrixclientv3thirdpartyuser
/// const METADATA: Metadata = metadata! {
/// method: GET,
/// rate_limited: false,
/// authentication: AccessToken,
/// history: {
/// 1.0 => "/_matrix/client/r0/thirdparty/user",
/// 1.1 => "/_matrix/client/v3/thirdparty/user",
/// }
/// };
///
/// Response type for the `get_user_for_protocol` endpoint.
#[derive(ToSchema, Serialize, Default, Debug)]
pub struct UsersResBody(
/// List of matched third party users.
pub Vec<User>,
);
impl UsersResBody {
/// Creates a new `Response` with the given users.
pub fn new(users: Vec<User>) -> Self {
Self(users)
}
}
#[cfg(test)]
mod tests {
use serde_json::{from_value as from_json_value, json, to_value as to_json_value};
use super::{Medium, ThirdPartyIdentifier};
use crate::UnixMillis;
#[test]
fn third_party_identifier_serde() {
let third_party_id = ThirdPartyIdentifier {
address: "monkey@banana.island".into(),
medium: Medium::Email,
validated_at: UnixMillis(1_535_176_800_000_u64.try_into().unwrap()),
added_at: UnixMillis(1_535_336_848_756_u64.try_into().unwrap()),
};
let third_party_id_serialized = json!({
"medium": "email",
"address": "monkey@banana.island",
"validated_at": 1_535_176_800_000_u64,
"added_at": 1_535_336_848_756_u64
});
assert_eq!(
to_json_value(third_party_id.clone()).unwrap(),
third_party_id_serialized
);
assert_eq!(
third_party_id,
from_json_value(third_party_id_serialized).unwrap()
);
}
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/path_builder.rs | crates/core/src/path_builder.rs | //! The `PathBuilder` trait used to construct the path used to query endpoints and the types that
//! implement it.
use std::{
borrow::Cow,
collections::BTreeSet,
fmt::{Display, Write},
};
use konst::{iter, slice, string};
use percent_encoding::utf8_percent_encode;
use tracing::warn;
use super::{FeatureFlag, MatrixVersion, SupportedVersions, error::IntoHttpError};
use crate::percent_encode::PATH_PERCENT_ENCODE_SET;
/// Trait implemented by types providing a method to construct the path used to query an endpoint.
///
/// Types implementing this must enforce that all possible paths returned from `select_path()` must
/// contain the same number of variables.
pub trait PathBuilder: Sized {
/// The input necessary to generate the endpoint URL.
type Input<'a>;
/// Pick the right path according to the given input.
///
/// Returns an error if no path could be selected for the given input.
fn select_path(&self, input: Self::Input<'_>) -> Result<&'static str, IntoHttpError>;
/// Generate the endpoint URL for this data, considering the given input.
///
/// ## Arguments
///
/// * `input` - The input necessary to select the path.
/// * `base_url` - The base URL (i.e. the scheme and host) to which the endpoint path will be
/// appended. Since all paths begin with a slash, it is not necessary for the this to have a
/// trailing slash. If it has one however, it will be ignored.
/// * `path_args` - The values of the variables in the endpoint's path. The order and number
/// must match the order and number of the variables in the path.
/// * `query_string` - The serialized query string to append to the URL.
///
/// ## Errors
///
/// Returns an error if the `PathBuilder::select_path()` implementation returns an error.
///
/// Panics if the number of `path_args` doesn't match the number of variables in the path
/// returned by `PathBuilder::select_path()` must contain the same variables.
fn make_endpoint_url(
&self,
input: Self::Input<'_>,
base_url: &str,
path_args: &[&dyn Display],
query_string: &str,
) -> Result<String, IntoHttpError> {
let path_with_placeholders = self.select_path(input)?;
let mut res = base_url.strip_suffix('/').unwrap_or(base_url).to_owned();
let mut segments = path_with_placeholders.split('/');
let mut path_args = path_args.iter();
let first_segment = segments.next().expect("split iterator is never empty");
assert!(first_segment.is_empty(), "endpoint paths must start with '/'");
for segment in segments {
if extract_endpoint_path_segment_variable(segment).is_some() {
let arg = path_args
.next()
.expect("number of placeholders must match number of arguments")
.to_string();
let arg = utf8_percent_encode(&arg, PATH_PERCENT_ENCODE_SET);
write!(res, "/{arg}").expect("writing to a String using fmt::Write can't fail");
} else {
res.reserve(segment.len() + 1);
res.push('/');
res.push_str(segment);
}
}
if !query_string.is_empty() {
res.push('?');
res.push_str(query_string);
}
Ok(res)
}
/// All the possible paths used by the endpoint in canon form.
///
/// This is meant to be used to register paths in server routers.
fn all_paths(&self) -> impl Iterator<Item = &'static str>;
/// The list of path parameters in the URL.
///
/// Used for `#[test]`s generated by the API macros.
#[doc(hidden)]
fn _path_parameters(&self) -> Vec<&'static str>;
}
/// The complete history of this endpoint as far as Palpo knows, together with all variants on
/// versions stable and unstable.
///
/// The amount and positioning of path variables are the same over all path variants.
#[derive(Clone, Debug, PartialEq, Eq)]
#[allow(clippy::exhaustive_structs)]
pub struct VersionHistory {
/// A list of unstable paths over this endpoint's history, mapped to optional unstable
/// features.
///
/// For endpoint querying purposes, the last item will be used as a fallback.
unstable_paths: &'static [(Option<&'static str>, &'static str)],
/// A list of stable paths, mapped to selectors.
///
/// Sorted (ascending) by Matrix version.
stable_paths: &'static [(StablePathSelector, &'static str)],
/// The Matrix version that deprecated this endpoint.
///
/// Deprecation often precedes one Matrix version before removal.
///
/// This will make [`try_into_http_request`](super::OutgoingRequest::try_into_http_request)
/// emit a warning, see the corresponding documentation for more information.
deprecated: Option<MatrixVersion>,
/// The Matrix version that removed this endpoint.
///
/// This will make [`try_into_http_request`](super::OutgoingRequest::try_into_http_request)
/// emit an error, see the corresponding documentation for more information.
removed: Option<MatrixVersion>,
}
impl VersionHistory {
/// Constructs an instance of [`VersionHistory`], erroring on compilation if it does not
/// pass invariants.
///
/// Specifically, this checks the following invariants:
///
/// * Path arguments are equal (in order, amount, and argument name) in all path strings
/// * In `stable_paths`:
/// * Matrix versions are in ascending order
/// * No matrix version is referenced twice
/// * `deprecated`'s version comes after the latest version mentioned in `stable_paths`, except
/// for version 1.0, and only if any stable path is defined
/// * `removed` comes after `deprecated`, or after the latest referenced `stable_paths`, like
/// `deprecated`
///
/// ## Arguments
///
/// * `unstable_paths` - List of unstable paths for the endpoint, mapped to optional unstable
/// features.
/// * `stable_paths` - List of stable paths for the endpoint, mapped to selectors.
/// * `deprecated` - The Matrix version that deprecated the endpoint, if any.
/// * `removed` - The Matrix version that removed the endpoint, if any.
pub const fn new(
unstable_paths: &'static [(Option<&'static str>, &'static str)],
stable_paths: &'static [(StablePathSelector, &'static str)],
deprecated: Option<MatrixVersion>,
removed: Option<MatrixVersion>,
) -> Self {
const fn check_path_args_equal(first: &'static str, second: &'static str) {
let mut second_iter = string::split(second, "/").next();
iter::for_each!(first_s in string::split(first, '/') => {
if let Some(first_arg) = extract_endpoint_path_segment_variable(first_s) {
let second_next_arg: Option<&'static str> = loop {
let Some((second_s, second_n_iter)) = second_iter else {
break None;
};
let maybe_second_arg = extract_endpoint_path_segment_variable(second_s);
second_iter = second_n_iter.next();
if let Some(second_arg) = maybe_second_arg {
break Some(second_arg);
}
};
if let Some(second_next_arg) = second_next_arg {
if !string::eq_str(second_next_arg, first_arg) {
panic!("names of endpoint path segment variables do not match");
}
} else {
panic!("counts of endpoint path segment variables do not match");
}
}
});
// If second iterator still has some values, empty first.
while let Some((second_s, second_n_iter)) = second_iter {
if extract_endpoint_path_segment_variable(second_s).is_some() {
panic!("counts of endpoint path segment variables do not match");
}
second_iter = second_n_iter.next();
}
}
// The path we're going to use to compare all other paths with
let ref_path: &str = if let Some((_, s)) = unstable_paths.first() {
s
} else if let Some((_, s)) = stable_paths.first() {
s
} else {
panic!("no endpoint paths supplied")
};
iter::for_each!(unstable_path in slice::iter(unstable_paths) => {
check_path_is_valid(unstable_path.1);
check_path_args_equal(ref_path, unstable_path.1);
});
let mut prev_seen_version: Option<MatrixVersion> = None;
iter::for_each!(version_path in slice::iter(stable_paths) => {
check_path_is_valid(version_path.1);
check_path_args_equal(ref_path, version_path.1);
if let Some(current_version) = version_path.0.version() {
if let Some(prev_seen_version) = prev_seen_version {
let cmp_result = current_version.const_ord(&prev_seen_version);
if cmp_result.is_eq() {
// Found a duplicate, current == previous
panic!("duplicate matrix version in stable paths")
} else if cmp_result.is_lt() {
// Found an older version, current < previous
panic!("stable paths are not in ascending order")
}
}
prev_seen_version = Some(current_version);
}
});
if let Some(deprecated) = deprecated {
if let Some(prev_seen_version) = prev_seen_version {
let ord_result = prev_seen_version.const_ord(&deprecated);
if !deprecated.is_legacy() && ord_result.is_eq() {
// prev_seen_version == deprecated, except for 1.0.
// It is possible that an endpoint was both made stable and deprecated in the
// legacy versions.
panic!("deprecated version is equal to latest stable path version")
} else if ord_result.is_gt() {
// prev_seen_version > deprecated
panic!("deprecated version is older than latest stable path version")
}
} else {
panic!("defined deprecated version while no stable path exists")
}
}
if let Some(removed) = removed {
if let Some(deprecated) = deprecated {
let ord_result = deprecated.const_ord(&removed);
if ord_result.is_eq() {
// deprecated == removed
panic!("removed version is equal to deprecated version")
} else if ord_result.is_gt() {
// deprecated > removed
panic!("removed version is older than deprecated version")
}
} else {
panic!("defined removed version while no deprecated version exists")
}
}
Self { unstable_paths, stable_paths, deprecated, removed }
}
/// Whether the homeserver advertises support for a path in this [`VersionHistory`].
///
/// Returns `true` if any version or feature in the given [`SupportedVersions`] matches a path
/// in this history, unless the endpoint was removed.
///
/// Note that this is likely to return false negatives, since some endpoints don't specify a
/// stable or unstable feature, and homeservers should not advertise support for a Matrix
/// version unless they support all of its features.
pub fn is_supported(&self, considering: &SupportedVersions) -> bool {
match self.versioning_decision_for(&considering.versions) {
VersioningDecision::Removed => false,
VersioningDecision::Version { .. } => true,
VersioningDecision::Feature => self.feature_path(&considering.features).is_some(),
}
}
/// Decide which kind of endpoint to use given the supported versions of a homeserver.
///
/// Returns:
///
/// - `Removed` if the endpoint is removed in all supported versions.
/// - `Version` if the endpoint is stable or deprecated in at least one supported version.
/// - `Feature` in all other cases, to look if a feature path is supported, or use the last
/// unstable path as a fallback.
///
/// If resulting [`VersioningDecision`] is `Stable`, it will also detail if any version denoted
/// deprecation or removal.
pub fn versioning_decision_for(
&self,
versions: &BTreeSet<MatrixVersion>,
) -> VersioningDecision {
let is_superset_any =
|version: MatrixVersion| versions.iter().any(|v| v.is_superset_of(version));
let is_superset_all =
|version: MatrixVersion| versions.iter().all(|v| v.is_superset_of(version));
// Check if all versions removed this endpoint.
if self.removed.is_some_and(is_superset_all) {
return VersioningDecision::Removed;
}
// Check if *any* version marks this endpoint as stable.
if self.added_in().is_some_and(is_superset_any) {
let all_deprecated = self.deprecated.is_some_and(is_superset_all);
return VersioningDecision::Version {
any_deprecated: all_deprecated || self.deprecated.is_some_and(is_superset_any),
all_deprecated,
any_removed: self.removed.is_some_and(is_superset_any),
};
}
VersioningDecision::Feature
}
/// Returns the *first* version this endpoint was added in.
///
/// Is `None` when this endpoint is unstable/unreleased.
pub fn added_in(&self) -> Option<MatrixVersion> {
self.stable_paths.iter().find_map(|(v, _)| v.version())
}
/// Returns the Matrix version that deprecated this endpoint, if any.
pub fn deprecated_in(&self) -> Option<MatrixVersion> {
self.deprecated
}
/// Returns the Matrix version that removed this endpoint, if any.
pub fn removed_in(&self) -> Option<MatrixVersion> {
self.removed
}
/// Picks the last unstable path, if it exists.
pub fn unstable(&self) -> Option<&'static str> {
self.unstable_paths.last().map(|(_, path)| *path)
}
/// Returns all unstable path variants in canon form, with optional corresponding feature.
pub fn unstable_paths(&self) -> impl Iterator<Item = (Option<&'static str>, &'static str)> {
self.unstable_paths.iter().copied()
}
/// Returns all version path variants in canon form, with corresponding selector.
pub fn stable_paths(&self) -> impl Iterator<Item = (StablePathSelector, &'static str)> {
self.stable_paths.iter().copied()
}
/// The path that should be used to query the endpoint, given a set of supported versions.
///
/// Picks the latest path that the versions accept.
///
/// Returns an endpoint in the following format;
/// - `/_matrix/client/versions`
/// - `/_matrix/client/hello/{world}` (`{world}` is a path replacement parameter)
///
/// Note: This doesn't handle endpoint removals, check with
/// [`versioning_decision_for`](VersionHistory::versioning_decision_for) to see if this endpoint
/// is still available.
pub fn version_path(&self, versions: &BTreeSet<MatrixVersion>) -> Option<&'static str> {
let version_paths = self
.stable_paths
.iter()
.filter_map(|(selector, path)| selector.version().map(|version| (version, path)));
// Reverse the iterator, to check the "latest" version first.
for (ver, path) in version_paths.rev() {
// Check if any of the versions are equal or greater than the version the path needs.
if versions.iter().any(|v| v.is_superset_of(ver)) {
return Some(path);
}
}
None
}
/// The path that should be used to query the endpoint, given a list of supported features.
pub fn feature_path(&self, supported_features: &BTreeSet<FeatureFlag>) -> Option<&'static str> {
let unstable_feature_paths = self
.unstable_paths
.iter()
.filter_map(|(feature, path)| feature.map(|feature| (feature, path)));
let stable_feature_paths = self
.stable_paths
.iter()
.filter_map(|(selector, path)| selector.feature().map(|feature| (feature, path)));
// Reverse the iterator, to check the "latest" features first.
for (feature, path) in unstable_feature_paths.chain(stable_feature_paths).rev() {
// Return the path of the first supported feature.
if supported_features.iter().any(|supported| supported.as_str() == feature) {
return Some(path);
}
}
None
}
}
impl PathBuilder for VersionHistory {
type Input<'a> = Cow<'a, SupportedVersions>;
/// Pick the right path according to the given input.
///
/// This will fail if, for every version in `input`;
/// - The endpoint is too old, and has been removed in all versions.
/// ([`EndpointRemoved`](super::error::IntoHttpError::EndpointRemoved))
/// - The endpoint is too new, and no unstable path is known for this endpoint.
/// ([`NoUnstablePath`](super::error::IntoHttpError::NoUnstablePath))
///
/// Finally, this will emit a warning through [`tracing`] if it detects that any version in
/// `input` has deprecated this endpoint.
fn select_path(
&self,
input: Cow<'_, SupportedVersions>,
) -> Result<&'static str, IntoHttpError> {
match self.versioning_decision_for(&input.versions) {
VersioningDecision::Removed => Err(IntoHttpError::EndpointRemoved(
self.removed.expect("VersioningDecision::Removed implies metadata.removed"),
)),
VersioningDecision::Version { any_deprecated, all_deprecated, any_removed } => {
if any_removed {
if all_deprecated {
warn!(
"endpoint is removed in some (and deprecated in ALL) \
of the following versions: {:?}",
input.versions
);
} else if any_deprecated {
warn!(
"endpoint is removed (and deprecated) in some of the \
following versions: {:?}",
input.versions
);
} else {
unreachable!("any_removed implies *_deprecated");
}
} else if all_deprecated {
warn!(
"endpoint is deprecated in ALL of the following versions: {:?}",
input.versions
);
} else if any_deprecated {
warn!(
"endpoint is deprecated in some of the following versions: {:?}",
input.versions
);
}
Ok(self
.version_path(&input.versions)
.expect("VersioningDecision::Version implies that a version path exists"))
}
VersioningDecision::Feature => self
.feature_path(&input.features)
.or_else(|| self.unstable())
.ok_or(IntoHttpError::NoUnstablePath),
}
}
fn all_paths(&self) -> impl Iterator<Item = &'static str> {
self.unstable_paths().map(|(_, path)| path).chain(self.stable_paths().map(|(_, path)| path))
}
fn _path_parameters(&self) -> Vec<&'static str> {
let path = self.all_paths().next().unwrap();
path.split('/').filter_map(extract_endpoint_path_segment_variable).collect()
}
}
/// A versioning "decision" derived from a set of Matrix versions.
#[derive(Clone, Copy, Debug, PartialEq, Eq, Hash)]
#[allow(clippy::exhaustive_enums)]
pub enum VersioningDecision {
/// A feature path should be used, or a fallback.
Feature,
/// A path with a Matrix version should be used.
Version {
/// If any version denoted deprecation.
any_deprecated: bool,
/// If *all* versions denoted deprecation.
all_deprecated: bool,
/// If any version denoted removal.
any_removed: bool,
},
/// This endpoint was removed in all versions, it should not be used.
Removed,
}
/// A selector for a stable path of an endpoint.
#[derive(Debug, Clone, Copy, PartialEq, Eq)]
#[allow(clippy::exhaustive_enums)]
pub enum StablePathSelector {
/// The path is available with the given stable feature.
Feature(&'static str),
/// The path was added in the given Matrix version.
Version(MatrixVersion),
/// The path is available via a stable feature and was added in a Matrix version.
FeatureAndVersion {
/// The stable feature that adds support for the path.
feature: &'static str,
/// The Matrix version when the path was added.
version: MatrixVersion,
},
}
impl StablePathSelector {
/// The feature that adds support for the path, if any.
pub const fn feature(self) -> Option<&'static str> {
match self {
Self::Feature(feature) | Self::FeatureAndVersion { feature, .. } => Some(feature),
_ => None,
}
}
/// The Matrix version when the path was added, if any.
pub const fn version(self) -> Option<MatrixVersion> {
match self {
Self::Version(version) | Self::FeatureAndVersion { version, .. } => Some(version),
_ => None,
}
}
}
impl From<MatrixVersion> for StablePathSelector {
fn from(value: MatrixVersion) -> Self {
Self::Version(value)
}
}
/// The endpoint has a single path.
///
/// This means that the endpoint has no path history, or the Matrix spec has no way to manage path
/// history in the API that it is a part of.
#[derive(Clone, Debug, PartialEq, Eq)]
#[allow(clippy::exhaustive_structs)]
pub struct SinglePath(&'static str);
impl SinglePath {
/// Construct a new `SinglePath` for the given path.
pub const fn new(path: &'static str) -> Self {
check_path_is_valid(path);
// Check that path variables are valid.
iter::for_each!(segment in string::split(path, '/') => {
extract_endpoint_path_segment_variable(segment);
});
Self(path)
}
/// The path of the endpoint.
pub fn path(&self) -> &'static str {
self.0
}
}
impl PathBuilder for SinglePath {
type Input<'a> = ();
fn select_path(&self, _input: ()) -> Result<&'static str, IntoHttpError> {
Ok(self.0)
}
fn all_paths(&self) -> impl Iterator<Item = &'static str> {
std::iter::once(self.0)
}
fn _path_parameters(&self) -> Vec<&'static str> {
self.0.split('/').filter_map(extract_endpoint_path_segment_variable).collect()
}
}
/// Check that the given path is valid.
///
/// Panics if the path contains invalid (non-ascii or whitespace) characters.
const fn check_path_is_valid(path: &'static str) {
iter::for_each!(path_b in slice::iter(path.as_bytes()) => {
match *path_b {
0x21..=0x7E => {},
_ => panic!("path contains invalid (non-ascii or whitespace) characters")
}
});
}
/// Extract the variable of the given endpoint path segment.
///
/// The supported syntax for an endpoint path segment variable is `{var}`.
///
/// Returns the name of the variable if one was found in the segment, `None` if no variable was
/// found.
///
/// Panics if:
///
/// * The segment begins with `{` but doesn't end with `}`.
/// * The segment ends with `}` but doesn't begin with `{`.
/// * The segment begins with `:`, which matches the old syntax for endpoint path segment variables.
pub const fn extract_endpoint_path_segment_variable(segment: &str) -> Option<&str> {
if string::starts_with(segment, ':') {
panic!("endpoint paths syntax has changed and segment variables must be wrapped by `{{}}`");
}
if let Some(s) = string::strip_prefix(segment, '{') {
let var = string::strip_suffix(s, '}')
.expect("endpoint path segment variable braces mismatch: missing ending `}`");
return Some(var);
}
if string::ends_with(segment, '}') {
panic!("endpoint path segment variable braces mismatch: missing starting `{{`");
}
None
}
#[cfg(test)]
mod tests {
use std::{
borrow::Cow,
collections::{BTreeMap, BTreeSet},
};
use assert_matches2::assert_matches;
use super::{PathBuilder, StablePathSelector, VersionHistory};
use crate::api::{
MatrixVersion::{self, V1_0, V1_1, V1_2, V1_3},
SupportedVersions,
error::IntoHttpError,
};
fn stable_only_history(
stable_paths: &'static [(StablePathSelector, &'static str)],
) -> VersionHistory {
VersionHistory { unstable_paths: &[], stable_paths, deprecated: None, removed: None }
}
fn version_only_supported(versions: &[MatrixVersion]) -> SupportedVersions {
SupportedVersions {
versions: versions.iter().copied().collect(),
features: BTreeSet::new(),
}
}
// TODO add test that can hook into tracing and verify the deprecation warning is emitted
#[test]
fn make_simple_endpoint_url() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s")]);
let url = history
.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[],
"",
)
.unwrap();
assert_eq!(url, "https://example.org/s");
}
#[test]
fn make_endpoint_url_with_path_args() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s/{x}")]);
let url = history
.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[&"123"],
"",
)
.unwrap();
assert_eq!(url, "https://example.org/s/123");
}
#[test]
fn make_endpoint_url_with_path_args_with_dash() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s/{x}")]);
let url = history
.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[&"my-path"],
"",
)
.unwrap();
assert_eq!(url, "https://example.org/s/my-path");
}
#[test]
fn make_endpoint_url_with_path_args_with_reserved_char() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s/{x}")]);
let url = history
.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[&"#path"],
"",
)
.unwrap();
assert_eq!(url, "https://example.org/s/%23path");
}
#[test]
fn make_endpoint_url_with_query() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s/")]);
let url = history
.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[],
"foo=bar",
)
.unwrap();
assert_eq!(url, "https://example.org/s/?foo=bar");
}
#[test]
#[should_panic]
fn make_endpoint_url_wrong_num_path_args() {
let history = stable_only_history(&[(StablePathSelector::Version(V1_0), "/s/{x}")]);
_ = history.make_endpoint_url(
Cow::Owned(version_only_supported(&[V1_0])),
"https://example.org",
&[],
"",
);
}
const EMPTY: VersionHistory =
VersionHistory { unstable_paths: &[], stable_paths: &[], deprecated: None, removed: None };
#[test]
fn select_version() {
let version_supported = version_only_supported(&[V1_0, V1_1]);
let superset_supported = version_only_supported(&[V1_1]);
// With version only.
let hist =
VersionHistory { stable_paths: &[(StablePathSelector::Version(V1_0), "/s")], ..EMPTY };
assert_matches!(hist.select_path(Cow::Borrowed(&version_supported)), Ok("/s"));
assert!(hist.is_supported(&version_supported));
assert_matches!(hist.select_path(Cow::Borrowed(&superset_supported)), Ok("/s"));
assert!(hist.is_supported(&superset_supported));
// With feature and version.
let hist = VersionHistory {
stable_paths: &[(
StablePathSelector::FeatureAndVersion { feature: "org.boo.stable", version: V1_0 },
"/s",
)],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&version_supported)), Ok("/s"));
assert!(hist.is_supported(&version_supported));
assert_matches!(hist.select_path(Cow::Borrowed(&superset_supported)), Ok("/s"));
assert!(hist.is_supported(&superset_supported));
// Select latest stable version.
let hist = VersionHistory {
stable_paths: &[
(StablePathSelector::Version(V1_0), "/s_v1"),
(StablePathSelector::Version(V1_1), "/s_v2"),
],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&version_supported)), Ok("/s_v2"));
assert!(hist.is_supported(&version_supported));
// With unstable feature.
let unstable_supported = SupportedVersions {
versions: [V1_0].into(),
features: ["org.boo.unstable".into()].into(),
};
let hist = VersionHistory {
unstable_paths: &[(Some("org.boo.unstable"), "/u")],
stable_paths: &[(StablePathSelector::Version(V1_0), "/s")],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&unstable_supported)), Ok("/s"));
assert!(hist.is_supported(&unstable_supported));
}
#[test]
fn select_stable_feature() {
let supported = SupportedVersions {
versions: [V1_1].into(),
features: ["org.boo.unstable".into(), "org.boo.stable".into()].into(),
};
// With feature only.
let hist = VersionHistory {
unstable_paths: &[(Some("org.boo.unstable"), "/u")],
stable_paths: &[(StablePathSelector::Feature("org.boo.stable"), "/s")],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&supported)), Ok("/s"));
assert!(hist.is_supported(&supported));
// With feature and version.
let hist = VersionHistory {
unstable_paths: &[(Some("org.boo.unstable"), "/u")],
stable_paths: &[(
StablePathSelector::FeatureAndVersion { feature: "org.boo.stable", version: V1_3 },
"/s",
)],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&supported)), Ok("/s"));
assert!(hist.is_supported(&supported));
}
#[test]
fn select_unstable_feature() {
let supported = SupportedVersions {
versions: [V1_1].into(),
features: ["org.boo.unstable".into()].into(),
};
let hist = VersionHistory {
unstable_paths: &[(Some("org.boo.unstable"), "/u")],
stable_paths: &[(
StablePathSelector::FeatureAndVersion { feature: "org.boo.stable", version: V1_3 },
"/s",
)],
..EMPTY
};
assert_matches!(hist.select_path(Cow::Borrowed(&supported)), Ok("/u"));
assert!(hist.is_supported(&supported));
}
#[test]
fn select_unstable_fallback() {
let supported = version_only_supported(&[V1_0]);
let hist = VersionHistory { unstable_paths: &[(None, "/u")], ..EMPTY };
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | true |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/encryption.rs | crates/core/src/encryption.rs | //! Common types for [encryption] related tasks.
//!
//! [encryption]: https://spec.matrix.org/latest/client-server-api/#end-to-end-encryption
use std::collections::BTreeMap;
use salvo::prelude::*;
use serde::{Deserialize, Serialize};
use crate::{
EventEncryptionAlgorithm, OwnedDeviceId, OwnedDeviceKeyId, OwnedUserId, PrivOwnedStr,
serde::{Base64, StringEnum},
};
/// Identity keys for a device.
#[derive(ToSchema, Deserialize, Serialize, Clone, Debug)]
pub struct DeviceKeys {
/// The ID of the user the device belongs to.
///
/// Must match the user ID used when logging in.
pub user_id: OwnedUserId,
/// The ID of the device these keys belong to.
///
/// Must match the device ID used when logging in.
pub device_id: OwnedDeviceId,
/// The encryption algorithms supported by this device.
pub algorithms: Vec<EventEncryptionAlgorithm>,
/// Public identity keys.
pub keys: BTreeMap<OwnedDeviceKeyId, String>,
/// Signatures for the device key object.
#[serde(default)]
pub signatures: BTreeMap<OwnedUserId, BTreeMap<OwnedDeviceKeyId, String>>,
/// Additional data added to the device key information by intermediate
/// servers, and not covered by the signatures.
#[serde(default, skip_serializing_if = "UnsignedDeviceInfo::is_empty")]
pub unsigned: UnsignedDeviceInfo,
}
impl DeviceKeys {
/// Creates a new `DeviceKeys` from the given user id, device id,
/// algorithms, keys and signatures.
pub fn new(
user_id: OwnedUserId,
device_id: OwnedDeviceId,
algorithms: Vec<EventEncryptionAlgorithm>,
keys: BTreeMap<OwnedDeviceKeyId, String>,
signatures: BTreeMap<OwnedUserId, BTreeMap<OwnedDeviceKeyId, String>>,
) -> Self {
Self {
user_id,
device_id,
algorithms,
keys,
signatures,
unsigned: Default::default(),
}
}
}
/// Additional data added to device key information by intermediate servers.
#[derive(ToSchema, Clone, Debug, Default, Deserialize, Serialize)]
pub struct UnsignedDeviceInfo {
/// The display name which the user set on the device.
#[serde(skip_serializing_if = "Option::is_none")]
pub device_display_name: Option<String>,
}
impl UnsignedDeviceInfo {
/// Creates an empty `UnsignedDeviceInfo`.
pub fn new() -> Self {
Default::default()
}
/// Checks whether all fields are empty / `None`.
pub fn is_empty(&self) -> bool {
self.device_display_name.is_none()
}
}
/// Signatures for a `SignedKey` object.
pub type SignedKeySignatures = BTreeMap<OwnedUserId, BTreeMap<OwnedDeviceKeyId, String>>;
/// A key for the SignedCurve25519 algorithm
#[derive(ToSchema, Debug, Clone, Serialize, Deserialize)]
pub struct SignedKey {
/// Base64-encoded 32-byte Curve25519 public key.
#[salvo(schema(value_type = String))]
pub key: Base64,
/// Signatures for the key object.
pub signatures: SignedKeySignatures,
/// Is this key considered to be a fallback key, defaults to false.
#[serde(default, skip_serializing_if = "crate::serde::is_default")]
pub fallback: bool,
}
impl SignedKey {
/// Creates a new `SignedKey` with the given key and signatures.
pub fn new(key: Base64, signatures: SignedKeySignatures) -> Self {
Self {
key,
signatures,
fallback: false,
}
}
/// Creates a new fallback `SignedKey` with the given key and signatures.
pub fn new_fallback(key: Base64, signatures: SignedKeySignatures) -> Self {
Self {
key,
signatures,
fallback: true,
}
}
}
/// A one-time public key for "pre-key" messages.
#[derive(ToSchema, Debug, Clone, Serialize, Deserialize)]
#[serde(untagged)]
pub enum OneTimeKey {
/// A key containing signatures, for the SignedCurve25519 algorithm.
SignedKey(SignedKey),
/// A string-valued key, for the Ed25519 and Curve25519 algorithms.
Key(String),
}
/// Signatures for a `CrossSigningKey` object.
pub type CrossSigningKeySignatures = BTreeMap<OwnedUserId, BTreeMap<OwnedDeviceKeyId, String>>;
/// A cross signing key.
#[derive(ToSchema, Deserialize, Serialize, PartialEq, Eq, Clone, Debug)]
pub struct CrossSigningKey {
/// The ID of the user the key belongs to.
pub user_id: OwnedUserId,
/// What the key is used for.
pub usage: Vec<KeyUsage>,
/// The public key.
///
/// The object must have exactly one property.
pub keys: BTreeMap<OwnedDeviceKeyId, String>,
/// Signatures of the key.
///
/// Only optional for master key.
#[serde(default, skip_serializing_if = "BTreeMap::is_empty")]
pub signatures: CrossSigningKeySignatures,
}
impl CrossSigningKey {
/// Creates a new `CrossSigningKey` with the given user ID, usage, keys and
/// signatures.
pub fn new(
user_id: OwnedUserId,
usage: Vec<KeyUsage>,
keys: BTreeMap<OwnedDeviceKeyId, String>,
signatures: CrossSigningKeySignatures,
) -> Self {
Self {
user_id,
usage,
keys,
signatures,
}
}
}
/// The usage of a cross signing key.
#[doc = include_str!(concat!(env!("CARGO_MANIFEST_DIR"), "/src/doc/string_enum.md"))]
#[derive(ToSchema, Clone, StringEnum)]
#[palpo_enum(rename_all = "snake_case")]
pub enum KeyUsage {
/// Master key.
Master,
/// Self-signing key.
SelfSigning,
/// User-signing key.
UserSigning,
#[doc(hidden)]
#[salvo(schema(skip))]
_Custom(PrivOwnedStr),
}
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
palpo-im/palpo | https://github.com/palpo-im/palpo/blob/066b5b15ce094d4e9f6d28484cbc9cb8bd913e67/crates/core/src/time.rs | crates/core/src/time.rs | use std::{
fmt,
time::{Duration, SystemTime, UNIX_EPOCH},
};
use diesel::{AsExpression, FromSqlRow, deserialize::FromSql, pg, serialize::ToSql, sql_types};
use salvo::prelude::*;
use serde::{Deserialize, Serialize};
/// A timestamp represented as the number of milliseconds since the unix epoch.
#[derive(
ToSchema,
FromSqlRow,
AsExpression,
Default,
Clone,
Copy,
Hash,
PartialEq,
Eq,
PartialOrd,
Ord,
Deserialize,
Serialize,
)]
#[diesel(sql_type = sql_types::Bigint)]
#[allow(clippy::exhaustive_structs)]
#[serde(transparent)]
pub struct UnixMillis(pub u64);
impl fmt::Display for UnixMillis {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
write!(f, "{}", self.0)
}
}
impl UnixMillis {
/// Creates a new `UnixMillis` from the given `SystemTime`, if it is not
/// before the unix epoch, or too large to be represented.
pub fn from_system_time(time: SystemTime) -> Option<Self> {
let duration = time.duration_since(UNIX_EPOCH).ok()?;
let millis = duration.as_millis().try_into().ok()?;
Some(Self(millis))
}
/// The current system time in milliseconds since the unix epoch.
pub fn now() -> Self {
Self::from_system_time(SystemTime::now()).expect("date out of range")
}
/// Creates a new `SystemTime` from `self`, if it can be represented.
pub fn to_system_time(self) -> Option<SystemTime> {
UNIX_EPOCH.checked_add(Duration::from_millis(self.0))
}
/// Get the time since the unix epoch in milliseconds.
pub fn get(&self) -> u64 {
self.0
}
/// Get time since the unix epoch in seconds.
pub fn as_secs(&self) -> u64 {
self.0 / 1000
}
}
impl fmt::Debug for UnixMillis {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// The default Debug impl would put the inner value on its own line if the
// formatter's alternate mode is enabled, which bloats debug strings
// unnecessarily
write!(f, "UnixMillis({})", self.0)
}
}
impl FromSql<sql_types::BigInt, pg::Pg> for UnixMillis {
fn from_sql(bytes: diesel::pg::PgValue<'_>) -> diesel::deserialize::Result<Self> {
let value = <i64 as diesel::deserialize::FromSql<
diesel::sql_types::BigInt,
diesel::pg::Pg,
>>::from_sql(bytes)?;
Ok(Self(value as u64))
}
}
impl ToSql<sql_types::BigInt, pg::Pg> for UnixMillis {
fn to_sql(
&self,
out: &mut diesel::serialize::Output<'_, '_, pg::Pg>,
) -> diesel::serialize::Result {
ToSql::<sql_types::BigInt, pg::Pg>::to_sql(&(self.0 as i64), &mut out.reborrow())
}
}
/// A timestamp represented as the number of seconds since the unix epoch.
#[derive(
ToSchema,
FromSqlRow,
AsExpression,
Clone,
Copy,
Hash,
PartialEq,
Eq,
PartialOrd,
Ord,
Deserialize,
Serialize,
)]
#[diesel(sql_type = sql_types::Bigint)]
#[allow(clippy::exhaustive_structs)]
#[serde(transparent)]
pub struct UnixSeconds(pub u64);
impl UnixSeconds {
/// Creates a new `UnixMillis` from the given `SystemTime`, if it is not
/// before the unix epoch, or too large to be represented.
pub fn from_system_time(time: SystemTime) -> Option<Self> {
let duration = time.duration_since(UNIX_EPOCH).ok()?;
Some(Self(duration.as_secs()))
}
/// The current system-time as seconds since the unix epoch.
pub fn now() -> Self {
Self::from_system_time(SystemTime::now()).expect("date out of range")
}
/// Creates a new `SystemTime` from `self`, if it can be represented.
pub fn to_system_time(self) -> Option<SystemTime> {
UNIX_EPOCH.checked_add(Duration::from_secs(self.0))
}
/// Get time since the unix epoch in seconds.
pub fn get(&self) -> u64 {
self.0
}
}
impl fmt::Debug for UnixSeconds {
fn fmt(&self, f: &mut fmt::Formatter<'_>) -> fmt::Result {
// The default Debug impl would put the inner value on its own line if the
// formatter's alternate mode is enabled, which bloats debug strings
// unnecessarily
write!(f, "UnixSeconds({})", self.0)
}
}
impl FromSql<sql_types::BigInt, pg::Pg> for UnixSeconds {
fn from_sql(bytes: diesel::pg::PgValue<'_>) -> diesel::deserialize::Result<Self> {
let value = <i64 as diesel::deserialize::FromSql<
diesel::sql_types::BigInt,
diesel::pg::Pg,
>>::from_sql(bytes)?;
Ok(Self(value as u64))
}
}
impl ToSql<sql_types::BigInt, pg::Pg> for UnixSeconds {
fn to_sql(
&self,
out: &mut diesel::serialize::Output<'_, '_, pg::Pg>,
) -> diesel::serialize::Result {
ToSql::<sql_types::BigInt, pg::Pg>::to_sql(&(self.0 as i64), &mut out.reborrow())
}
}
// #[cfg(test)]
// mod tests {
// use std::time::{Duration, UNIX_EPOCH};
// use serde::{Deserialize, Serialize};
// use serde_json::json;
// use super::{UnixMillis, UnixSeconds};
// #[derive(Clone, Debug, Deserialize, Serialize)]
// struct SystemTimeTest {
// millis: UnixMillis,
// secs: UnixSeconds,
// }
// #[test]
// fn deserialize() {
// let json = json!({ "millis": 3000, "secs": 60 });
// let time = serde_json::from_value::<SystemTimeTest>(json).unwrap();
// assert_eq!(
// time.millis.to_system_time(),
// Some(UNIX_EPOCH + Duration::from_millis(3000))
// );
// assert_eq!(time.secs.to_system_time(), Some(UNIX_EPOCH +
// Duration::from_secs(60))); }
// #[test]
// fn serialize() {
// let request = SystemTimeTest {
// millis: UnixMillis::from_system_time(UNIX_EPOCH +
// Duration::new(2, 0)).unwrap(), secs: UnixSeconds(u0),
// };
// assert_eq!(
// serde_json::to_value(request).unwrap(),
// json!({ "millis": 2000, "secs": 0 })
// );
// }
// }
| rust | Apache-2.0 | 066b5b15ce094d4e9f6d28484cbc9cb8bd913e67 | 2026-01-04T20:22:21.242775Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.